]>
Commit | Line | Data |
---|---|---|
4ee9c684 | 1 | /* Conditional constant propagation pass for the GNU compiler. |
f1717362 | 2 | Copyright (C) 2000-2016 Free Software Foundation, Inc. |
4ee9c684 | 3 | Adapted from original RTL SSA-CCP by Daniel Berlin <dberlin@dberlin.org> |
4 | Adapted to GIMPLE trees by Diego Novillo <dnovillo@redhat.com> | |
5 | ||
6 | This file is part of GCC. | |
48e1416a | 7 | |
4ee9c684 | 8 | GCC is free software; you can redistribute it and/or modify it |
9 | under the terms of the GNU General Public License as published by the | |
8c4c00c1 | 10 | Free Software Foundation; either version 3, or (at your option) any |
4ee9c684 | 11 | later version. |
48e1416a | 12 | |
4ee9c684 | 13 | GCC is distributed in the hope that it will be useful, but WITHOUT |
14 | ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or | |
15 | FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License | |
16 | for more details. | |
48e1416a | 17 | |
4ee9c684 | 18 | You should have received a copy of the GNU General Public License |
8c4c00c1 | 19 | along with GCC; see the file COPYING3. If not see |
20 | <http://www.gnu.org/licenses/>. */ | |
4ee9c684 | 21 | |
88dbf20f | 22 | /* Conditional constant propagation (CCP) is based on the SSA |
23 | propagation engine (tree-ssa-propagate.c). Constant assignments of | |
24 | the form VAR = CST are propagated from the assignments into uses of | |
25 | VAR, which in turn may generate new constants. The simulation uses | |
26 | a four level lattice to keep track of constant values associated | |
27 | with SSA names. Given an SSA name V_i, it may take one of the | |
28 | following values: | |
29 | ||
bfa30570 | 30 | UNINITIALIZED -> the initial state of the value. This value |
31 | is replaced with a correct initial value | |
32 | the first time the value is used, so the | |
33 | rest of the pass does not need to care about | |
34 | it. Using this value simplifies initialization | |
35 | of the pass, and prevents us from needlessly | |
36 | scanning statements that are never reached. | |
88dbf20f | 37 | |
38 | UNDEFINED -> V_i is a local variable whose definition | |
39 | has not been processed yet. Therefore we | |
40 | don't yet know if its value is a constant | |
41 | or not. | |
42 | ||
43 | CONSTANT -> V_i has been found to hold a constant | |
44 | value C. | |
45 | ||
46 | VARYING -> V_i cannot take a constant value, or if it | |
47 | does, it is not possible to determine it | |
48 | at compile time. | |
49 | ||
50 | The core of SSA-CCP is in ccp_visit_stmt and ccp_visit_phi_node: | |
51 | ||
52 | 1- In ccp_visit_stmt, we are interested in assignments whose RHS | |
53 | evaluates into a constant and conditional jumps whose predicate | |
54 | evaluates into a boolean true or false. When an assignment of | |
55 | the form V_i = CONST is found, V_i's lattice value is set to | |
56 | CONSTANT and CONST is associated with it. This causes the | |
57 | propagation engine to add all the SSA edges coming out the | |
58 | assignment into the worklists, so that statements that use V_i | |
59 | can be visited. | |
60 | ||
61 | If the statement is a conditional with a constant predicate, we | |
62 | mark the outgoing edges as executable or not executable | |
63 | depending on the predicate's value. This is then used when | |
64 | visiting PHI nodes to know when a PHI argument can be ignored. | |
48e1416a | 65 | |
88dbf20f | 66 | |
67 | 2- In ccp_visit_phi_node, if all the PHI arguments evaluate to the | |
68 | same constant C, then the LHS of the PHI is set to C. This | |
69 | evaluation is known as the "meet operation". Since one of the | |
70 | goals of this evaluation is to optimistically return constant | |
71 | values as often as possible, it uses two main short cuts: | |
72 | ||
73 | - If an argument is flowing in through a non-executable edge, it | |
74 | is ignored. This is useful in cases like this: | |
75 | ||
76 | if (PRED) | |
77 | a_9 = 3; | |
78 | else | |
79 | a_10 = 100; | |
80 | a_11 = PHI (a_9, a_10) | |
81 | ||
82 | If PRED is known to always evaluate to false, then we can | |
83 | assume that a_11 will always take its value from a_10, meaning | |
84 | that instead of consider it VARYING (a_9 and a_10 have | |
85 | different values), we can consider it CONSTANT 100. | |
86 | ||
87 | - If an argument has an UNDEFINED value, then it does not affect | |
88 | the outcome of the meet operation. If a variable V_i has an | |
89 | UNDEFINED value, it means that either its defining statement | |
90 | hasn't been visited yet or V_i has no defining statement, in | |
91 | which case the original symbol 'V' is being used | |
92 | uninitialized. Since 'V' is a local variable, the compiler | |
93 | may assume any initial value for it. | |
94 | ||
95 | ||
96 | After propagation, every variable V_i that ends up with a lattice | |
97 | value of CONSTANT will have the associated constant value in the | |
98 | array CONST_VAL[i].VALUE. That is fed into substitute_and_fold for | |
99 | final substitution and folding. | |
100 | ||
e913b5cd | 101 | This algorithm uses wide-ints at the max precision of the target. |
102 | This means that, with one uninteresting exception, variables with | |
103 | UNSIGNED types never go to VARYING because the bits above the | |
104 | precision of the type of the variable are always zero. The | |
105 | uninteresting case is a variable of UNSIGNED type that has the | |
106 | maximum precision of the target. Such variables can go to VARYING, | |
107 | but this causes no loss of infomation since these variables will | |
108 | never be extended. | |
109 | ||
4ee9c684 | 110 | References: |
111 | ||
112 | Constant propagation with conditional branches, | |
113 | Wegman and Zadeck, ACM TOPLAS 13(2):181-210. | |
114 | ||
115 | Building an Optimizing Compiler, | |
116 | Robert Morgan, Butterworth-Heinemann, 1998, Section 8.9. | |
117 | ||
118 | Advanced Compiler Design and Implementation, | |
119 | Steven Muchnick, Morgan Kaufmann, 1997, Section 12.6 */ | |
120 | ||
121 | #include "config.h" | |
122 | #include "system.h" | |
123 | #include "coretypes.h" | |
9ef16211 | 124 | #include "backend.h" |
7c29e30e | 125 | #include "target.h" |
4ee9c684 | 126 | #include "tree.h" |
9ef16211 | 127 | #include "gimple.h" |
7c29e30e | 128 | #include "tree-pass.h" |
9ef16211 | 129 | #include "ssa.h" |
7c29e30e | 130 | #include "gimple-pretty-print.h" |
b20a8bb4 | 131 | #include "fold-const.h" |
bc61cadb | 132 | #include "gimple-fold.h" |
133 | #include "tree-eh.h" | |
a8783bee | 134 | #include "gimplify.h" |
dcf1a1ec | 135 | #include "gimple-iterator.h" |
073c1fd5 | 136 | #include "tree-cfg.h" |
41511585 | 137 | #include "tree-ssa-propagate.h" |
43fb76c1 | 138 | #include "dbgcnt.h" |
9a65cc0a | 139 | #include "params.h" |
f7715905 | 140 | #include "builtins.h" |
058a1b7a | 141 | #include "tree-chkp.h" |
e2588447 | 142 | #include "cfgloop.h" |
4ee9c684 | 143 | |
2dc10fae | 144 | |
4ee9c684 | 145 | /* Possible lattice values. */ |
146 | typedef enum | |
147 | { | |
bfa30570 | 148 | UNINITIALIZED, |
4ee9c684 | 149 | UNDEFINED, |
150 | CONSTANT, | |
151 | VARYING | |
88dbf20f | 152 | } ccp_lattice_t; |
4ee9c684 | 153 | |
9908fe4d | 154 | struct ccp_prop_value_t { |
14f101cf | 155 | /* Lattice value. */ |
156 | ccp_lattice_t lattice_val; | |
157 | ||
158 | /* Propagated value. */ | |
159 | tree value; | |
b7e55469 | 160 | |
e913b5cd | 161 | /* Mask that applies to the propagated value during CCP. For X |
162 | with a CONSTANT lattice value X & ~mask == value & ~mask. The | |
163 | zero bits in the mask cover constant values. The ones mean no | |
164 | information. */ | |
5de9d3ed | 165 | widest_int mask; |
14f101cf | 166 | }; |
167 | ||
88dbf20f | 168 | /* Array of propagated constant values. After propagation, |
169 | CONST_VAL[I].VALUE holds the constant value for SSA_NAME(I). If | |
170 | the constant is held in an SSA name representing a memory store | |
4fb5e5ca | 171 | (i.e., a VDEF), CONST_VAL[I].MEM_REF will contain the actual |
172 | memory reference used to store (i.e., the LHS of the assignment | |
173 | doing the store). */ | |
9908fe4d | 174 | static ccp_prop_value_t *const_val; |
285df01b | 175 | static unsigned n_const_val; |
4ee9c684 | 176 | |
9908fe4d | 177 | static void canonicalize_value (ccp_prop_value_t *); |
6688f8ec | 178 | static bool ccp_fold_stmt (gimple_stmt_iterator *); |
27de3d43 | 179 | static void ccp_lattice_meet (ccp_prop_value_t *, ccp_prop_value_t *); |
4af351a8 | 180 | |
88dbf20f | 181 | /* Dump constant propagation value VAL to file OUTF prefixed by PREFIX. */ |
01406fc0 | 182 | |
183 | static void | |
9908fe4d | 184 | dump_lattice_value (FILE *outf, const char *prefix, ccp_prop_value_t val) |
01406fc0 | 185 | { |
41511585 | 186 | switch (val.lattice_val) |
01406fc0 | 187 | { |
88dbf20f | 188 | case UNINITIALIZED: |
189 | fprintf (outf, "%sUNINITIALIZED", prefix); | |
190 | break; | |
41511585 | 191 | case UNDEFINED: |
192 | fprintf (outf, "%sUNDEFINED", prefix); | |
193 | break; | |
194 | case VARYING: | |
195 | fprintf (outf, "%sVARYING", prefix); | |
196 | break; | |
41511585 | 197 | case CONSTANT: |
b7e55469 | 198 | if (TREE_CODE (val.value) != INTEGER_CST |
796b6678 | 199 | || val.mask == 0) |
16ab4e97 | 200 | { |
201 | fprintf (outf, "%sCONSTANT ", prefix); | |
202 | print_generic_expr (outf, val.value, dump_flags); | |
203 | } | |
b7e55469 | 204 | else |
205 | { | |
28e557ef | 206 | widest_int cval = wi::bit_and_not (wi::to_widest (val.value), |
207 | val.mask); | |
e913b5cd | 208 | fprintf (outf, "%sCONSTANT ", prefix); |
209 | print_hex (cval, outf); | |
210 | fprintf (outf, " ("); | |
211 | print_hex (val.mask, outf); | |
212 | fprintf (outf, ")"); | |
b7e55469 | 213 | } |
41511585 | 214 | break; |
215 | default: | |
8c0963c4 | 216 | gcc_unreachable (); |
41511585 | 217 | } |
01406fc0 | 218 | } |
4ee9c684 | 219 | |
4ee9c684 | 220 | |
88dbf20f | 221 | /* Print lattice value VAL to stderr. */ |
222 | ||
9908fe4d | 223 | void debug_lattice_value (ccp_prop_value_t val); |
88dbf20f | 224 | |
4b987fac | 225 | DEBUG_FUNCTION void |
9908fe4d | 226 | debug_lattice_value (ccp_prop_value_t val) |
88dbf20f | 227 | { |
228 | dump_lattice_value (stderr, "", val); | |
229 | fprintf (stderr, "\n"); | |
230 | } | |
4ee9c684 | 231 | |
9c1be15e | 232 | /* Extend NONZERO_BITS to a full mask, with the upper bits being set. */ |
233 | ||
234 | static widest_int | |
235 | extend_mask (const wide_int &nonzero_bits) | |
236 | { | |
237 | return (wi::mask <widest_int> (wi::get_precision (nonzero_bits), true) | |
238 | | widest_int::from (nonzero_bits, UNSIGNED)); | |
239 | } | |
4ee9c684 | 240 | |
88dbf20f | 241 | /* Compute a default value for variable VAR and store it in the |
242 | CONST_VAL array. The following rules are used to get default | |
243 | values: | |
01406fc0 | 244 | |
88dbf20f | 245 | 1- Global and static variables that are declared constant are |
246 | considered CONSTANT. | |
247 | ||
248 | 2- Any other value is considered UNDEFINED. This is useful when | |
41511585 | 249 | considering PHI nodes. PHI arguments that are undefined do not |
250 | change the constant value of the PHI node, which allows for more | |
88dbf20f | 251 | constants to be propagated. |
4ee9c684 | 252 | |
8883e700 | 253 | 3- Variables defined by statements other than assignments and PHI |
88dbf20f | 254 | nodes are considered VARYING. |
4ee9c684 | 255 | |
8883e700 | 256 | 4- Initial values of variables that are not GIMPLE registers are |
bfa30570 | 257 | considered VARYING. */ |
4ee9c684 | 258 | |
9908fe4d | 259 | static ccp_prop_value_t |
88dbf20f | 260 | get_default_value (tree var) |
261 | { | |
9908fe4d | 262 | ccp_prop_value_t val = { UNINITIALIZED, NULL_TREE, 0 }; |
42acab1c | 263 | gimple *stmt; |
8edeb88b | 264 | |
265 | stmt = SSA_NAME_DEF_STMT (var); | |
266 | ||
267 | if (gimple_nop_p (stmt)) | |
4ee9c684 | 268 | { |
8edeb88b | 269 | /* Variables defined by an empty statement are those used |
270 | before being initialized. If VAR is a local variable, we | |
271 | can assume initially that it is UNDEFINED, otherwise we must | |
272 | consider it VARYING. */ | |
7c782c9b | 273 | if (!virtual_operand_p (var) |
274 | && TREE_CODE (SSA_NAME_VAR (var)) == VAR_DECL) | |
8edeb88b | 275 | val.lattice_val = UNDEFINED; |
276 | else | |
b7e55469 | 277 | { |
278 | val.lattice_val = VARYING; | |
e913b5cd | 279 | val.mask = -1; |
fc08b993 | 280 | if (flag_tree_bit_ccp) |
281 | { | |
9c1be15e | 282 | wide_int nonzero_bits = get_nonzero_bits (var); |
283 | if (nonzero_bits != -1) | |
fc08b993 | 284 | { |
285 | val.lattice_val = CONSTANT; | |
286 | val.value = build_zero_cst (TREE_TYPE (var)); | |
9c1be15e | 287 | val.mask = extend_mask (nonzero_bits); |
fc08b993 | 288 | } |
289 | } | |
b7e55469 | 290 | } |
4ee9c684 | 291 | } |
b45b214a | 292 | else if (is_gimple_assign (stmt)) |
41511585 | 293 | { |
8edeb88b | 294 | tree cst; |
295 | if (gimple_assign_single_p (stmt) | |
296 | && DECL_P (gimple_assign_rhs1 (stmt)) | |
297 | && (cst = get_symbol_constant_value (gimple_assign_rhs1 (stmt)))) | |
88dbf20f | 298 | { |
8edeb88b | 299 | val.lattice_val = CONSTANT; |
300 | val.value = cst; | |
88dbf20f | 301 | } |
302 | else | |
b45b214a | 303 | { |
304 | /* Any other variable defined by an assignment is considered | |
305 | UNDEFINED. */ | |
306 | val.lattice_val = UNDEFINED; | |
307 | } | |
308 | } | |
309 | else if ((is_gimple_call (stmt) | |
310 | && gimple_call_lhs (stmt) != NULL_TREE) | |
311 | || gimple_code (stmt) == GIMPLE_PHI) | |
312 | { | |
313 | /* A variable defined by a call or a PHI node is considered | |
314 | UNDEFINED. */ | |
315 | val.lattice_val = UNDEFINED; | |
8edeb88b | 316 | } |
317 | else | |
318 | { | |
319 | /* Otherwise, VAR will never take on a constant value. */ | |
320 | val.lattice_val = VARYING; | |
e913b5cd | 321 | val.mask = -1; |
41511585 | 322 | } |
4ee9c684 | 323 | |
41511585 | 324 | return val; |
325 | } | |
4ee9c684 | 326 | |
4ee9c684 | 327 | |
bfa30570 | 328 | /* Get the constant value associated with variable VAR. */ |
4ee9c684 | 329 | |
9908fe4d | 330 | static inline ccp_prop_value_t * |
bfa30570 | 331 | get_value (tree var) |
88dbf20f | 332 | { |
9908fe4d | 333 | ccp_prop_value_t *val; |
bfa30570 | 334 | |
285df01b | 335 | if (const_val == NULL |
336 | || SSA_NAME_VERSION (var) >= n_const_val) | |
e004838d | 337 | return NULL; |
338 | ||
339 | val = &const_val[SSA_NAME_VERSION (var)]; | |
bfa30570 | 340 | if (val->lattice_val == UNINITIALIZED) |
4ee9c684 | 341 | *val = get_default_value (var); |
342 | ||
f5faab84 | 343 | canonicalize_value (val); |
4af351a8 | 344 | |
4ee9c684 | 345 | return val; |
346 | } | |
347 | ||
15d138c9 | 348 | /* Return the constant tree value associated with VAR. */ |
349 | ||
350 | static inline tree | |
351 | get_constant_value (tree var) | |
352 | { | |
9908fe4d | 353 | ccp_prop_value_t *val; |
98d92e3c | 354 | if (TREE_CODE (var) != SSA_NAME) |
355 | { | |
356 | if (is_gimple_min_invariant (var)) | |
357 | return var; | |
358 | return NULL_TREE; | |
359 | } | |
360 | val = get_value (var); | |
b7e55469 | 361 | if (val |
362 | && val->lattice_val == CONSTANT | |
363 | && (TREE_CODE (val->value) != INTEGER_CST | |
796b6678 | 364 | || val->mask == 0)) |
15d138c9 | 365 | return val->value; |
366 | return NULL_TREE; | |
367 | } | |
368 | ||
bfa30570 | 369 | /* Sets the value associated with VAR to VARYING. */ |
370 | ||
371 | static inline void | |
372 | set_value_varying (tree var) | |
373 | { | |
9908fe4d | 374 | ccp_prop_value_t *val = &const_val[SSA_NAME_VERSION (var)]; |
bfa30570 | 375 | |
376 | val->lattice_val = VARYING; | |
377 | val->value = NULL_TREE; | |
e913b5cd | 378 | val->mask = -1; |
bfa30570 | 379 | } |
4ee9c684 | 380 | |
0001b944 | 381 | /* For integer constants, make sure to drop TREE_OVERFLOW. */ |
b31eb493 | 382 | |
383 | static void | |
9908fe4d | 384 | canonicalize_value (ccp_prop_value_t *val) |
b31eb493 | 385 | { |
f5faab84 | 386 | if (val->lattice_val != CONSTANT) |
387 | return; | |
388 | ||
389 | if (TREE_OVERFLOW_P (val->value)) | |
390 | val->value = drop_tree_overflow (val->value); | |
b31eb493 | 391 | } |
392 | ||
b7e55469 | 393 | /* Return whether the lattice transition is valid. */ |
394 | ||
395 | static bool | |
9908fe4d | 396 | valid_lattice_transition (ccp_prop_value_t old_val, ccp_prop_value_t new_val) |
b7e55469 | 397 | { |
398 | /* Lattice transitions must always be monotonically increasing in | |
399 | value. */ | |
400 | if (old_val.lattice_val < new_val.lattice_val) | |
401 | return true; | |
402 | ||
403 | if (old_val.lattice_val != new_val.lattice_val) | |
404 | return false; | |
405 | ||
406 | if (!old_val.value && !new_val.value) | |
407 | return true; | |
408 | ||
409 | /* Now both lattice values are CONSTANT. */ | |
410 | ||
fc6cc27b | 411 | /* Allow arbitrary copy changes as we might look through PHI <a_1, ...> |
412 | when only a single copy edge is executable. */ | |
413 | if (TREE_CODE (old_val.value) == SSA_NAME | |
414 | && TREE_CODE (new_val.value) == SSA_NAME) | |
415 | return true; | |
416 | ||
417 | /* Allow transitioning from a constant to a copy. */ | |
418 | if (is_gimple_min_invariant (old_val.value) | |
419 | && TREE_CODE (new_val.value) == SSA_NAME) | |
420 | return true; | |
421 | ||
43c92e0a | 422 | /* Allow transitioning from PHI <&x, not executable> == &x |
423 | to PHI <&x, &y> == common alignment. */ | |
b7e55469 | 424 | if (TREE_CODE (old_val.value) != INTEGER_CST |
425 | && TREE_CODE (new_val.value) == INTEGER_CST) | |
426 | return true; | |
427 | ||
428 | /* Bit-lattices have to agree in the still valid bits. */ | |
429 | if (TREE_CODE (old_val.value) == INTEGER_CST | |
430 | && TREE_CODE (new_val.value) == INTEGER_CST) | |
5de9d3ed | 431 | return (wi::bit_and_not (wi::to_widest (old_val.value), new_val.mask) |
432 | == wi::bit_and_not (wi::to_widest (new_val.value), new_val.mask)); | |
b7e55469 | 433 | |
434 | /* Otherwise constant values have to agree. */ | |
0001b944 | 435 | if (operand_equal_p (old_val.value, new_val.value, 0)) |
436 | return true; | |
437 | ||
438 | /* At least the kinds and types should agree now. */ | |
439 | if (TREE_CODE (old_val.value) != TREE_CODE (new_val.value) | |
440 | || !types_compatible_p (TREE_TYPE (old_val.value), | |
441 | TREE_TYPE (new_val.value))) | |
442 | return false; | |
443 | ||
444 | /* For floats and !HONOR_NANS allow transitions from (partial) NaN | |
445 | to non-NaN. */ | |
446 | tree type = TREE_TYPE (new_val.value); | |
447 | if (SCALAR_FLOAT_TYPE_P (type) | |
93633022 | 448 | && !HONOR_NANS (type)) |
0001b944 | 449 | { |
450 | if (REAL_VALUE_ISNAN (TREE_REAL_CST (old_val.value))) | |
451 | return true; | |
452 | } | |
453 | else if (VECTOR_FLOAT_TYPE_P (type) | |
93633022 | 454 | && !HONOR_NANS (type)) |
0001b944 | 455 | { |
456 | for (unsigned i = 0; i < VECTOR_CST_NELTS (old_val.value); ++i) | |
457 | if (!REAL_VALUE_ISNAN | |
458 | (TREE_REAL_CST (VECTOR_CST_ELT (old_val.value, i))) | |
459 | && !operand_equal_p (VECTOR_CST_ELT (old_val.value, i), | |
460 | VECTOR_CST_ELT (new_val.value, i), 0)) | |
461 | return false; | |
462 | return true; | |
463 | } | |
464 | else if (COMPLEX_FLOAT_TYPE_P (type) | |
93633022 | 465 | && !HONOR_NANS (type)) |
0001b944 | 466 | { |
467 | if (!REAL_VALUE_ISNAN (TREE_REAL_CST (TREE_REALPART (old_val.value))) | |
468 | && !operand_equal_p (TREE_REALPART (old_val.value), | |
469 | TREE_REALPART (new_val.value), 0)) | |
470 | return false; | |
471 | if (!REAL_VALUE_ISNAN (TREE_REAL_CST (TREE_IMAGPART (old_val.value))) | |
472 | && !operand_equal_p (TREE_IMAGPART (old_val.value), | |
473 | TREE_IMAGPART (new_val.value), 0)) | |
474 | return false; | |
475 | return true; | |
476 | } | |
477 | return false; | |
b7e55469 | 478 | } |
479 | ||
88dbf20f | 480 | /* Set the value for variable VAR to NEW_VAL. Return true if the new |
481 | value is different from VAR's previous value. */ | |
4ee9c684 | 482 | |
41511585 | 483 | static bool |
27de3d43 | 484 | set_lattice_value (tree var, ccp_prop_value_t *new_val) |
4ee9c684 | 485 | { |
6d0bf6d6 | 486 | /* We can deal with old UNINITIALIZED values just fine here. */ |
9908fe4d | 487 | ccp_prop_value_t *old_val = &const_val[SSA_NAME_VERSION (var)]; |
88dbf20f | 488 | |
27de3d43 | 489 | canonicalize_value (new_val); |
b31eb493 | 490 | |
b7e55469 | 491 | /* We have to be careful to not go up the bitwise lattice |
27de3d43 | 492 | represented by the mask. Instead of dropping to VARYING |
493 | use the meet operator to retain a conservative value. | |
494 | Missed optimizations like PR65851 makes this necessary. | |
495 | It also ensures we converge to a stable lattice solution. */ | |
496 | if (new_val->lattice_val == CONSTANT | |
b7e55469 | 497 | && old_val->lattice_val == CONSTANT |
27de3d43 | 498 | && TREE_CODE (new_val->value) != SSA_NAME) |
499 | ccp_lattice_meet (new_val, old_val); | |
bfa30570 | 500 | |
27de3d43 | 501 | gcc_checking_assert (valid_lattice_transition (*old_val, *new_val)); |
88dbf20f | 502 | |
b7e55469 | 503 | /* If *OLD_VAL and NEW_VAL are the same, return false to inform the |
504 | caller that this was a non-transition. */ | |
27de3d43 | 505 | if (old_val->lattice_val != new_val->lattice_val |
506 | || (new_val->lattice_val == CONSTANT | |
507 | && (TREE_CODE (new_val->value) != TREE_CODE (old_val->value) | |
508 | || (TREE_CODE (new_val->value) == INTEGER_CST | |
509 | && (new_val->mask != old_val->mask | |
bc1d3d97 | 510 | || (wi::bit_and_not (wi::to_widest (old_val->value), |
27de3d43 | 511 | new_val->mask) |
512 | != wi::bit_and_not (wi::to_widest (new_val->value), | |
513 | new_val->mask)))) | |
514 | || (TREE_CODE (new_val->value) != INTEGER_CST | |
515 | && !operand_equal_p (new_val->value, old_val->value, 0))))) | |
4ee9c684 | 516 | { |
b7e55469 | 517 | /* ??? We would like to delay creation of INTEGER_CSTs from |
518 | partially constants here. */ | |
519 | ||
41511585 | 520 | if (dump_file && (dump_flags & TDF_DETAILS)) |
521 | { | |
27de3d43 | 522 | dump_lattice_value (dump_file, "Lattice value changed to ", *new_val); |
bfa30570 | 523 | fprintf (dump_file, ". Adding SSA edges to worklist.\n"); |
41511585 | 524 | } |
525 | ||
27de3d43 | 526 | *old_val = *new_val; |
88dbf20f | 527 | |
27de3d43 | 528 | gcc_assert (new_val->lattice_val != UNINITIALIZED); |
bfa30570 | 529 | return true; |
4ee9c684 | 530 | } |
41511585 | 531 | |
532 | return false; | |
4ee9c684 | 533 | } |
534 | ||
9908fe4d | 535 | static ccp_prop_value_t get_value_for_expr (tree, bool); |
536 | static ccp_prop_value_t bit_value_binop (enum tree_code, tree, tree, tree); | |
5de9d3ed | 537 | static void bit_value_binop_1 (enum tree_code, tree, widest_int *, widest_int *, |
10c3fe8d | 538 | tree, const widest_int &, const widest_int &, |
539 | tree, const widest_int &, const widest_int &); | |
b7e55469 | 540 | |
5de9d3ed | 541 | /* Return a widest_int that can be used for bitwise simplifications |
b7e55469 | 542 | from VAL. */ |
543 | ||
5de9d3ed | 544 | static widest_int |
9908fe4d | 545 | value_to_wide_int (ccp_prop_value_t val) |
b7e55469 | 546 | { |
547 | if (val.value | |
548 | && TREE_CODE (val.value) == INTEGER_CST) | |
5de9d3ed | 549 | return wi::to_widest (val.value); |
e913b5cd | 550 | |
551 | return 0; | |
b7e55469 | 552 | } |
553 | ||
554 | /* Return the value for the address expression EXPR based on alignment | |
555 | information. */ | |
6d0bf6d6 | 556 | |
9908fe4d | 557 | static ccp_prop_value_t |
b7e55469 | 558 | get_value_from_alignment (tree expr) |
559 | { | |
f8abb542 | 560 | tree type = TREE_TYPE (expr); |
9908fe4d | 561 | ccp_prop_value_t val; |
f8abb542 | 562 | unsigned HOST_WIDE_INT bitpos; |
563 | unsigned int align; | |
b7e55469 | 564 | |
565 | gcc_assert (TREE_CODE (expr) == ADDR_EXPR); | |
566 | ||
59da1bcd | 567 | get_pointer_alignment_1 (expr, &align, &bitpos); |
cf8f0e63 | 568 | val.mask = (POINTER_TYPE_P (type) || TYPE_UNSIGNED (type) |
5de9d3ed | 569 | ? wi::mask <widest_int> (TYPE_PRECISION (type), false) |
e913b5cd | 570 | : -1).and_not (align / BITS_PER_UNIT - 1); |
c90d2c17 | 571 | val.lattice_val |
572 | = wi::sext (val.mask, TYPE_PRECISION (type)) == -1 ? VARYING : CONSTANT; | |
f8abb542 | 573 | if (val.lattice_val == CONSTANT) |
796b6678 | 574 | val.value = build_int_cstu (type, bitpos / BITS_PER_UNIT); |
b7e55469 | 575 | else |
f8abb542 | 576 | val.value = NULL_TREE; |
b7e55469 | 577 | |
578 | return val; | |
579 | } | |
580 | ||
581 | /* Return the value for the tree operand EXPR. If FOR_BITS_P is true | |
582 | return constant bits extracted from alignment information for | |
583 | invariant addresses. */ | |
584 | ||
9908fe4d | 585 | static ccp_prop_value_t |
b7e55469 | 586 | get_value_for_expr (tree expr, bool for_bits_p) |
6d0bf6d6 | 587 | { |
9908fe4d | 588 | ccp_prop_value_t val; |
6d0bf6d6 | 589 | |
590 | if (TREE_CODE (expr) == SSA_NAME) | |
b7e55469 | 591 | { |
592 | val = *get_value (expr); | |
593 | if (for_bits_p | |
594 | && val.lattice_val == CONSTANT | |
595 | && TREE_CODE (val.value) == ADDR_EXPR) | |
596 | val = get_value_from_alignment (val.value); | |
bc1d3d97 | 597 | /* Fall back to a copy value. */ |
598 | if (!for_bits_p | |
599 | && val.lattice_val == VARYING | |
600 | && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (expr)) | |
601 | { | |
602 | val.lattice_val = CONSTANT; | |
603 | val.value = expr; | |
604 | val.mask = -1; | |
605 | } | |
b7e55469 | 606 | } |
607 | else if (is_gimple_min_invariant (expr) | |
608 | && (!for_bits_p || TREE_CODE (expr) != ADDR_EXPR)) | |
6d0bf6d6 | 609 | { |
610 | val.lattice_val = CONSTANT; | |
611 | val.value = expr; | |
e913b5cd | 612 | val.mask = 0; |
f5faab84 | 613 | canonicalize_value (&val); |
6d0bf6d6 | 614 | } |
b7e55469 | 615 | else if (TREE_CODE (expr) == ADDR_EXPR) |
616 | val = get_value_from_alignment (expr); | |
6d0bf6d6 | 617 | else |
618 | { | |
619 | val.lattice_val = VARYING; | |
09b2913a | 620 | val.mask = -1; |
6d0bf6d6 | 621 | val.value = NULL_TREE; |
622 | } | |
911a6ef1 | 623 | |
624 | if (val.lattice_val == VARYING | |
625 | && TYPE_UNSIGNED (TREE_TYPE (expr))) | |
626 | val.mask = wi::zext (val.mask, TYPE_PRECISION (TREE_TYPE (expr))); | |
627 | ||
6d0bf6d6 | 628 | return val; |
629 | } | |
630 | ||
88dbf20f | 631 | /* Return the likely CCP lattice value for STMT. |
4ee9c684 | 632 | |
41511585 | 633 | If STMT has no operands, then return CONSTANT. |
4ee9c684 | 634 | |
d61b9af3 | 635 | Else if undefinedness of operands of STMT cause its value to be |
636 | undefined, then return UNDEFINED. | |
4ee9c684 | 637 | |
41511585 | 638 | Else if any operands of STMT are constants, then return CONSTANT. |
4ee9c684 | 639 | |
41511585 | 640 | Else return VARYING. */ |
4ee9c684 | 641 | |
88dbf20f | 642 | static ccp_lattice_t |
42acab1c | 643 | likely_value (gimple *stmt) |
41511585 | 644 | { |
d61b9af3 | 645 | bool has_constant_operand, has_undefined_operand, all_undefined_operands; |
a8a0d56e | 646 | bool has_nsa_operand; |
41511585 | 647 | tree use; |
648 | ssa_op_iter iter; | |
8edeb88b | 649 | unsigned i; |
4ee9c684 | 650 | |
590c3166 | 651 | enum gimple_code code = gimple_code (stmt); |
75a70cf9 | 652 | |
653 | /* This function appears to be called only for assignments, calls, | |
654 | conditionals, and switches, due to the logic in visit_stmt. */ | |
655 | gcc_assert (code == GIMPLE_ASSIGN | |
656 | || code == GIMPLE_CALL | |
657 | || code == GIMPLE_COND | |
658 | || code == GIMPLE_SWITCH); | |
88dbf20f | 659 | |
660 | /* If the statement has volatile operands, it won't fold to a | |
661 | constant value. */ | |
75a70cf9 | 662 | if (gimple_has_volatile_ops (stmt)) |
88dbf20f | 663 | return VARYING; |
664 | ||
75a70cf9 | 665 | /* Arrive here for more complex cases. */ |
bfa30570 | 666 | has_constant_operand = false; |
d61b9af3 | 667 | has_undefined_operand = false; |
668 | all_undefined_operands = true; | |
a8a0d56e | 669 | has_nsa_operand = false; |
8edeb88b | 670 | FOR_EACH_SSA_TREE_OPERAND (use, stmt, iter, SSA_OP_USE) |
41511585 | 671 | { |
9908fe4d | 672 | ccp_prop_value_t *val = get_value (use); |
41511585 | 673 | |
bfa30570 | 674 | if (val->lattice_val == UNDEFINED) |
d61b9af3 | 675 | has_undefined_operand = true; |
676 | else | |
677 | all_undefined_operands = false; | |
88dbf20f | 678 | |
41511585 | 679 | if (val->lattice_val == CONSTANT) |
bfa30570 | 680 | has_constant_operand = true; |
a8a0d56e | 681 | |
682 | if (SSA_NAME_IS_DEFAULT_DEF (use) | |
683 | || !prop_simulate_again_p (SSA_NAME_DEF_STMT (use))) | |
684 | has_nsa_operand = true; | |
4ee9c684 | 685 | } |
41511585 | 686 | |
dd277d48 | 687 | /* There may be constants in regular rhs operands. For calls we |
688 | have to ignore lhs, fndecl and static chain, otherwise only | |
689 | the lhs. */ | |
690 | for (i = (is_gimple_call (stmt) ? 2 : 0) + gimple_has_lhs (stmt); | |
8edeb88b | 691 | i < gimple_num_ops (stmt); ++i) |
692 | { | |
693 | tree op = gimple_op (stmt, i); | |
694 | if (!op || TREE_CODE (op) == SSA_NAME) | |
695 | continue; | |
696 | if (is_gimple_min_invariant (op)) | |
697 | has_constant_operand = true; | |
698 | } | |
699 | ||
87c0a9fc | 700 | if (has_constant_operand) |
701 | all_undefined_operands = false; | |
702 | ||
3d483a94 | 703 | if (has_undefined_operand |
704 | && code == GIMPLE_CALL | |
705 | && gimple_call_internal_p (stmt)) | |
706 | switch (gimple_call_internal_fn (stmt)) | |
707 | { | |
708 | /* These 3 builtins use the first argument just as a magic | |
709 | way how to find out a decl uid. */ | |
710 | case IFN_GOMP_SIMD_LANE: | |
711 | case IFN_GOMP_SIMD_VF: | |
712 | case IFN_GOMP_SIMD_LAST_LANE: | |
713 | has_undefined_operand = false; | |
714 | break; | |
715 | default: | |
716 | break; | |
717 | } | |
718 | ||
d61b9af3 | 719 | /* If the operation combines operands like COMPLEX_EXPR make sure to |
720 | not mark the result UNDEFINED if only one part of the result is | |
721 | undefined. */ | |
75a70cf9 | 722 | if (has_undefined_operand && all_undefined_operands) |
d61b9af3 | 723 | return UNDEFINED; |
75a70cf9 | 724 | else if (code == GIMPLE_ASSIGN && has_undefined_operand) |
d61b9af3 | 725 | { |
75a70cf9 | 726 | switch (gimple_assign_rhs_code (stmt)) |
d61b9af3 | 727 | { |
728 | /* Unary operators are handled with all_undefined_operands. */ | |
729 | case PLUS_EXPR: | |
730 | case MINUS_EXPR: | |
d61b9af3 | 731 | case POINTER_PLUS_EXPR: |
d61b9af3 | 732 | /* Not MIN_EXPR, MAX_EXPR. One VARYING operand may be selected. |
733 | Not bitwise operators, one VARYING operand may specify the | |
734 | result completely. Not logical operators for the same reason. | |
05a936a0 | 735 | Not COMPLEX_EXPR as one VARYING operand makes the result partly |
736 | not UNDEFINED. Not *DIV_EXPR, comparisons and shifts because | |
737 | the undefined operand may be promoted. */ | |
d61b9af3 | 738 | return UNDEFINED; |
739 | ||
43c92e0a | 740 | case ADDR_EXPR: |
741 | /* If any part of an address is UNDEFINED, like the index | |
742 | of an ARRAY_EXPR, then treat the result as UNDEFINED. */ | |
743 | return UNDEFINED; | |
744 | ||
d61b9af3 | 745 | default: |
746 | ; | |
747 | } | |
748 | } | |
749 | /* If there was an UNDEFINED operand but the result may be not UNDEFINED | |
c91fedc5 | 750 | fall back to CONSTANT. During iteration UNDEFINED may still drop |
751 | to CONSTANT. */ | |
d61b9af3 | 752 | if (has_undefined_operand) |
c91fedc5 | 753 | return CONSTANT; |
d61b9af3 | 754 | |
8edeb88b | 755 | /* We do not consider virtual operands here -- load from read-only |
756 | memory may have only VARYING virtual operands, but still be | |
a8a0d56e | 757 | constant. Also we can combine the stmt with definitions from |
758 | operands whose definitions are not simulated again. */ | |
bfa30570 | 759 | if (has_constant_operand |
a8a0d56e | 760 | || has_nsa_operand |
8edeb88b | 761 | || gimple_references_memory_p (stmt)) |
88dbf20f | 762 | return CONSTANT; |
763 | ||
bfa30570 | 764 | return VARYING; |
4ee9c684 | 765 | } |
766 | ||
bfa30570 | 767 | /* Returns true if STMT cannot be constant. */ |
768 | ||
769 | static bool | |
42acab1c | 770 | surely_varying_stmt_p (gimple *stmt) |
bfa30570 | 771 | { |
772 | /* If the statement has operands that we cannot handle, it cannot be | |
773 | constant. */ | |
75a70cf9 | 774 | if (gimple_has_volatile_ops (stmt)) |
bfa30570 | 775 | return true; |
776 | ||
f257af64 | 777 | /* If it is a call and does not return a value or is not a |
237e78b1 | 778 | builtin and not an indirect call or a call to function with |
779 | assume_aligned/alloc_align attribute, it is varying. */ | |
75a70cf9 | 780 | if (is_gimple_call (stmt)) |
f257af64 | 781 | { |
237e78b1 | 782 | tree fndecl, fntype = gimple_call_fntype (stmt); |
f257af64 | 783 | if (!gimple_call_lhs (stmt) |
784 | || ((fndecl = gimple_call_fndecl (stmt)) != NULL_TREE | |
237e78b1 | 785 | && !DECL_BUILT_IN (fndecl) |
786 | && !lookup_attribute ("assume_aligned", | |
787 | TYPE_ATTRIBUTES (fntype)) | |
788 | && !lookup_attribute ("alloc_align", | |
789 | TYPE_ATTRIBUTES (fntype)))) | |
f257af64 | 790 | return true; |
791 | } | |
bfa30570 | 792 | |
8edeb88b | 793 | /* Any other store operation is not interesting. */ |
dd277d48 | 794 | else if (gimple_vdef (stmt)) |
8edeb88b | 795 | return true; |
796 | ||
bfa30570 | 797 | /* Anything other than assignments and conditional jumps are not |
798 | interesting for CCP. */ | |
75a70cf9 | 799 | if (gimple_code (stmt) != GIMPLE_ASSIGN |
f257af64 | 800 | && gimple_code (stmt) != GIMPLE_COND |
801 | && gimple_code (stmt) != GIMPLE_SWITCH | |
802 | && gimple_code (stmt) != GIMPLE_CALL) | |
bfa30570 | 803 | return true; |
804 | ||
805 | return false; | |
806 | } | |
4ee9c684 | 807 | |
41511585 | 808 | /* Initialize local data structures for CCP. */ |
4ee9c684 | 809 | |
810 | static void | |
41511585 | 811 | ccp_initialize (void) |
4ee9c684 | 812 | { |
41511585 | 813 | basic_block bb; |
4ee9c684 | 814 | |
285df01b | 815 | n_const_val = num_ssa_names; |
9908fe4d | 816 | const_val = XCNEWVEC (ccp_prop_value_t, n_const_val); |
4ee9c684 | 817 | |
41511585 | 818 | /* Initialize simulation flags for PHI nodes and statements. */ |
fc00614f | 819 | FOR_EACH_BB_FN (bb, cfun) |
4ee9c684 | 820 | { |
75a70cf9 | 821 | gimple_stmt_iterator i; |
4ee9c684 | 822 | |
75a70cf9 | 823 | for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i)) |
41511585 | 824 | { |
42acab1c | 825 | gimple *stmt = gsi_stmt (i); |
2193544e | 826 | bool is_varying; |
827 | ||
828 | /* If the statement is a control insn, then we do not | |
829 | want to avoid simulating the statement once. Failure | |
830 | to do so means that those edges will never get added. */ | |
831 | if (stmt_ends_bb_p (stmt)) | |
832 | is_varying = false; | |
833 | else | |
834 | is_varying = surely_varying_stmt_p (stmt); | |
4ee9c684 | 835 | |
bfa30570 | 836 | if (is_varying) |
41511585 | 837 | { |
88dbf20f | 838 | tree def; |
839 | ssa_op_iter iter; | |
840 | ||
841 | /* If the statement will not produce a constant, mark | |
842 | all its outputs VARYING. */ | |
843 | FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_ALL_DEFS) | |
8edeb88b | 844 | set_value_varying (def); |
41511585 | 845 | } |
75a70cf9 | 846 | prop_set_simulate_again (stmt, !is_varying); |
41511585 | 847 | } |
4ee9c684 | 848 | } |
849 | ||
75a70cf9 | 850 | /* Now process PHI nodes. We never clear the simulate_again flag on |
851 | phi nodes, since we do not know which edges are executable yet, | |
852 | except for phi nodes for virtual operands when we do not do store ccp. */ | |
fc00614f | 853 | FOR_EACH_BB_FN (bb, cfun) |
4ee9c684 | 854 | { |
1a91d914 | 855 | gphi_iterator i; |
41511585 | 856 | |
75a70cf9 | 857 | for (i = gsi_start_phis (bb); !gsi_end_p (i); gsi_next (&i)) |
858 | { | |
1a91d914 | 859 | gphi *phi = i.phi (); |
75a70cf9 | 860 | |
7c782c9b | 861 | if (virtual_operand_p (gimple_phi_result (phi))) |
75a70cf9 | 862 | prop_set_simulate_again (phi, false); |
bfa30570 | 863 | else |
75a70cf9 | 864 | prop_set_simulate_again (phi, true); |
41511585 | 865 | } |
4ee9c684 | 866 | } |
41511585 | 867 | } |
4ee9c684 | 868 | |
43fb76c1 | 869 | /* Debug count support. Reset the values of ssa names |
870 | VARYING when the total number ssa names analyzed is | |
871 | beyond the debug count specified. */ | |
872 | ||
873 | static void | |
874 | do_dbg_cnt (void) | |
875 | { | |
876 | unsigned i; | |
877 | for (i = 0; i < num_ssa_names; i++) | |
878 | { | |
879 | if (!dbg_cnt (ccp)) | |
880 | { | |
881 | const_val[i].lattice_val = VARYING; | |
e913b5cd | 882 | const_val[i].mask = -1; |
43fb76c1 | 883 | const_val[i].value = NULL_TREE; |
884 | } | |
885 | } | |
886 | } | |
887 | ||
4ee9c684 | 888 | |
88dbf20f | 889 | /* Do final substitution of propagated values, cleanup the flowgraph and |
d0322b7d | 890 | free allocated storage. If NONZERO_P, record nonzero bits. |
4ee9c684 | 891 | |
33a34f1e | 892 | Return TRUE when something was optimized. */ |
893 | ||
894 | static bool | |
d0322b7d | 895 | ccp_finalize (bool nonzero_p) |
4ee9c684 | 896 | { |
43fb76c1 | 897 | bool something_changed; |
153c3b50 | 898 | unsigned i; |
43fb76c1 | 899 | |
900 | do_dbg_cnt (); | |
153c3b50 | 901 | |
902 | /* Derive alignment and misalignment information from partially | |
fc08b993 | 903 | constant pointers in the lattice or nonzero bits from partially |
904 | constant integers. */ | |
153c3b50 | 905 | for (i = 1; i < num_ssa_names; ++i) |
906 | { | |
907 | tree name = ssa_name (i); | |
9908fe4d | 908 | ccp_prop_value_t *val; |
153c3b50 | 909 | unsigned int tem, align; |
910 | ||
911 | if (!name | |
fc08b993 | 912 | || (!POINTER_TYPE_P (TREE_TYPE (name)) |
913 | && (!INTEGRAL_TYPE_P (TREE_TYPE (name)) | |
914 | /* Don't record nonzero bits before IPA to avoid | |
915 | using too much memory. */ | |
d0322b7d | 916 | || !nonzero_p))) |
153c3b50 | 917 | continue; |
918 | ||
919 | val = get_value (name); | |
920 | if (val->lattice_val != CONSTANT | |
921 | || TREE_CODE (val->value) != INTEGER_CST) | |
922 | continue; | |
923 | ||
fc08b993 | 924 | if (POINTER_TYPE_P (TREE_TYPE (name))) |
925 | { | |
926 | /* Trailing mask bits specify the alignment, trailing value | |
927 | bits the misalignment. */ | |
aeb682a2 | 928 | tem = val->mask.to_uhwi (); |
fc08b993 | 929 | align = (tem & -tem); |
930 | if (align > 1) | |
931 | set_ptr_info_alignment (get_ptr_info (name), align, | |
f9ae6f95 | 932 | (TREE_INT_CST_LOW (val->value) |
fc08b993 | 933 | & (align - 1))); |
934 | } | |
935 | else | |
936 | { | |
9c1be15e | 937 | unsigned int precision = TYPE_PRECISION (TREE_TYPE (val->value)); |
938 | wide_int nonzero_bits = wide_int::from (val->mask, precision, | |
939 | UNSIGNED) | val->value; | |
fc08b993 | 940 | nonzero_bits &= get_nonzero_bits (name); |
941 | set_nonzero_bits (name, nonzero_bits); | |
942 | } | |
153c3b50 | 943 | } |
944 | ||
88dbf20f | 945 | /* Perform substitutions based on the known constant values. */ |
14f101cf | 946 | something_changed = substitute_and_fold (get_constant_value, |
947 | ccp_fold_stmt, true); | |
4ee9c684 | 948 | |
88dbf20f | 949 | free (const_val); |
e004838d | 950 | const_val = NULL; |
33a34f1e | 951 | return something_changed;; |
4ee9c684 | 952 | } |
953 | ||
954 | ||
88dbf20f | 955 | /* Compute the meet operator between *VAL1 and *VAL2. Store the result |
956 | in VAL1. | |
957 | ||
958 | any M UNDEFINED = any | |
88dbf20f | 959 | any M VARYING = VARYING |
960 | Ci M Cj = Ci if (i == j) | |
961 | Ci M Cj = VARYING if (i != j) | |
bfa30570 | 962 | */ |
4ee9c684 | 963 | |
964 | static void | |
27de3d43 | 965 | ccp_lattice_meet (ccp_prop_value_t *val1, ccp_prop_value_t *val2) |
4ee9c684 | 966 | { |
fc6cc27b | 967 | if (val1->lattice_val == UNDEFINED |
968 | /* For UNDEFINED M SSA we can't always SSA because its definition | |
969 | may not dominate the PHI node. Doing optimistic copy propagation | |
970 | also causes a lot of gcc.dg/uninit-pred*.c FAILs. */ | |
971 | && (val2->lattice_val != CONSTANT | |
972 | || TREE_CODE (val2->value) != SSA_NAME)) | |
4ee9c684 | 973 | { |
88dbf20f | 974 | /* UNDEFINED M any = any */ |
975 | *val1 = *val2; | |
41511585 | 976 | } |
fc6cc27b | 977 | else if (val2->lattice_val == UNDEFINED |
978 | /* See above. */ | |
979 | && (val1->lattice_val != CONSTANT | |
980 | || TREE_CODE (val1->value) != SSA_NAME)) | |
92481a4d | 981 | { |
88dbf20f | 982 | /* any M UNDEFINED = any |
983 | Nothing to do. VAL1 already contains the value we want. */ | |
984 | ; | |
92481a4d | 985 | } |
88dbf20f | 986 | else if (val1->lattice_val == VARYING |
987 | || val2->lattice_val == VARYING) | |
41511585 | 988 | { |
88dbf20f | 989 | /* any M VARYING = VARYING. */ |
990 | val1->lattice_val = VARYING; | |
e913b5cd | 991 | val1->mask = -1; |
88dbf20f | 992 | val1->value = NULL_TREE; |
41511585 | 993 | } |
b7e55469 | 994 | else if (val1->lattice_val == CONSTANT |
995 | && val2->lattice_val == CONSTANT | |
996 | && TREE_CODE (val1->value) == INTEGER_CST | |
997 | && TREE_CODE (val2->value) == INTEGER_CST) | |
998 | { | |
999 | /* Ci M Cj = Ci if (i == j) | |
1000 | Ci M Cj = VARYING if (i != j) | |
1001 | ||
1002 | For INTEGER_CSTs mask unequal bits. If no equal bits remain, | |
1003 | drop to varying. */ | |
e913b5cd | 1004 | val1->mask = (val1->mask | val2->mask |
5de9d3ed | 1005 | | (wi::to_widest (val1->value) |
1006 | ^ wi::to_widest (val2->value))); | |
c90d2c17 | 1007 | if (wi::sext (val1->mask, TYPE_PRECISION (TREE_TYPE (val1->value))) == -1) |
b7e55469 | 1008 | { |
1009 | val1->lattice_val = VARYING; | |
1010 | val1->value = NULL_TREE; | |
1011 | } | |
1012 | } | |
88dbf20f | 1013 | else if (val1->lattice_val == CONSTANT |
1014 | && val2->lattice_val == CONSTANT | |
27de3d43 | 1015 | && operand_equal_p (val1->value, val2->value, 0)) |
41511585 | 1016 | { |
88dbf20f | 1017 | /* Ci M Cj = Ci if (i == j) |
1018 | Ci M Cj = VARYING if (i != j) | |
1019 | ||
b7e55469 | 1020 | VAL1 already contains the value we want for equivalent values. */ |
1021 | } | |
1022 | else if (val1->lattice_val == CONSTANT | |
1023 | && val2->lattice_val == CONSTANT | |
1024 | && (TREE_CODE (val1->value) == ADDR_EXPR | |
1025 | || TREE_CODE (val2->value) == ADDR_EXPR)) | |
1026 | { | |
1027 | /* When not equal addresses are involved try meeting for | |
1028 | alignment. */ | |
9908fe4d | 1029 | ccp_prop_value_t tem = *val2; |
b7e55469 | 1030 | if (TREE_CODE (val1->value) == ADDR_EXPR) |
1031 | *val1 = get_value_for_expr (val1->value, true); | |
1032 | if (TREE_CODE (val2->value) == ADDR_EXPR) | |
1033 | tem = get_value_for_expr (val2->value, true); | |
27de3d43 | 1034 | ccp_lattice_meet (val1, &tem); |
41511585 | 1035 | } |
1036 | else | |
1037 | { | |
88dbf20f | 1038 | /* Any other combination is VARYING. */ |
1039 | val1->lattice_val = VARYING; | |
e913b5cd | 1040 | val1->mask = -1; |
88dbf20f | 1041 | val1->value = NULL_TREE; |
41511585 | 1042 | } |
4ee9c684 | 1043 | } |
1044 | ||
1045 | ||
41511585 | 1046 | /* Loop through the PHI_NODE's parameters for BLOCK and compare their |
1047 | lattice values to determine PHI_NODE's lattice value. The value of a | |
88dbf20f | 1048 | PHI node is determined calling ccp_lattice_meet with all the arguments |
41511585 | 1049 | of the PHI node that are incoming via executable edges. */ |
4ee9c684 | 1050 | |
41511585 | 1051 | static enum ssa_prop_result |
1a91d914 | 1052 | ccp_visit_phi_node (gphi *phi) |
4ee9c684 | 1053 | { |
75a70cf9 | 1054 | unsigned i; |
bc1d3d97 | 1055 | ccp_prop_value_t new_val; |
4ee9c684 | 1056 | |
41511585 | 1057 | if (dump_file && (dump_flags & TDF_DETAILS)) |
4ee9c684 | 1058 | { |
41511585 | 1059 | fprintf (dump_file, "\nVisiting PHI node: "); |
75a70cf9 | 1060 | print_gimple_stmt (dump_file, phi, 0, dump_flags); |
4ee9c684 | 1061 | } |
4ee9c684 | 1062 | |
bc1d3d97 | 1063 | new_val.lattice_val = UNDEFINED; |
1064 | new_val.value = NULL_TREE; | |
1065 | new_val.mask = 0; | |
4ee9c684 | 1066 | |
bc1d3d97 | 1067 | bool first = true; |
83c60000 | 1068 | bool non_exec_edge = false; |
75a70cf9 | 1069 | for (i = 0; i < gimple_phi_num_args (phi); i++) |
41511585 | 1070 | { |
88dbf20f | 1071 | /* Compute the meet operator over all the PHI arguments flowing |
1072 | through executable edges. */ | |
75a70cf9 | 1073 | edge e = gimple_phi_arg_edge (phi, i); |
4ee9c684 | 1074 | |
41511585 | 1075 | if (dump_file && (dump_flags & TDF_DETAILS)) |
1076 | { | |
1077 | fprintf (dump_file, | |
1078 | "\n Argument #%d (%d -> %d %sexecutable)\n", | |
1079 | i, e->src->index, e->dest->index, | |
1080 | (e->flags & EDGE_EXECUTABLE) ? "" : "not "); | |
1081 | } | |
1082 | ||
1083 | /* If the incoming edge is executable, Compute the meet operator for | |
1084 | the existing value of the PHI node and the current PHI argument. */ | |
1085 | if (e->flags & EDGE_EXECUTABLE) | |
1086 | { | |
75a70cf9 | 1087 | tree arg = gimple_phi_arg (phi, i)->def; |
9908fe4d | 1088 | ccp_prop_value_t arg_val = get_value_for_expr (arg, false); |
4ee9c684 | 1089 | |
bc1d3d97 | 1090 | if (first) |
1091 | { | |
1092 | new_val = arg_val; | |
1093 | first = false; | |
1094 | } | |
1095 | else | |
27de3d43 | 1096 | ccp_lattice_meet (&new_val, &arg_val); |
4ee9c684 | 1097 | |
41511585 | 1098 | if (dump_file && (dump_flags & TDF_DETAILS)) |
1099 | { | |
1100 | fprintf (dump_file, "\t"); | |
88dbf20f | 1101 | print_generic_expr (dump_file, arg, dump_flags); |
1102 | dump_lattice_value (dump_file, "\tValue: ", arg_val); | |
41511585 | 1103 | fprintf (dump_file, "\n"); |
1104 | } | |
4ee9c684 | 1105 | |
41511585 | 1106 | if (new_val.lattice_val == VARYING) |
1107 | break; | |
1108 | } | |
83c60000 | 1109 | else |
1110 | non_exec_edge = true; | |
1111 | } | |
1112 | ||
1113 | /* In case there were non-executable edges and the value is a copy | |
1114 | make sure its definition dominates the PHI node. */ | |
1115 | if (non_exec_edge | |
1116 | && new_val.lattice_val == CONSTANT | |
1117 | && TREE_CODE (new_val.value) == SSA_NAME | |
1118 | && ! SSA_NAME_IS_DEFAULT_DEF (new_val.value) | |
1119 | && ! dominated_by_p (CDI_DOMINATORS, gimple_bb (phi), | |
1120 | gimple_bb (SSA_NAME_DEF_STMT (new_val.value)))) | |
1121 | { | |
1122 | new_val.lattice_val = VARYING; | |
1123 | new_val.value = NULL_TREE; | |
1124 | new_val.mask = -1; | |
41511585 | 1125 | } |
4ee9c684 | 1126 | |
1127 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
41511585 | 1128 | { |
1129 | dump_lattice_value (dump_file, "\n PHI node value: ", new_val); | |
1130 | fprintf (dump_file, "\n\n"); | |
1131 | } | |
1132 | ||
bfa30570 | 1133 | /* Make the transition to the new value. */ |
27de3d43 | 1134 | if (set_lattice_value (gimple_phi_result (phi), &new_val)) |
41511585 | 1135 | { |
1136 | if (new_val.lattice_val == VARYING) | |
1137 | return SSA_PROP_VARYING; | |
1138 | else | |
1139 | return SSA_PROP_INTERESTING; | |
1140 | } | |
1141 | else | |
1142 | return SSA_PROP_NOT_INTERESTING; | |
4ee9c684 | 1143 | } |
1144 | ||
15d138c9 | 1145 | /* Return the constant value for OP or OP otherwise. */ |
00f4f705 | 1146 | |
1147 | static tree | |
15d138c9 | 1148 | valueize_op (tree op) |
00f4f705 | 1149 | { |
00f4f705 | 1150 | if (TREE_CODE (op) == SSA_NAME) |
1151 | { | |
15d138c9 | 1152 | tree tem = get_constant_value (op); |
1153 | if (tem) | |
1154 | return tem; | |
00f4f705 | 1155 | } |
1156 | return op; | |
1157 | } | |
1158 | ||
4e1b3545 | 1159 | /* Return the constant value for OP, but signal to not follow SSA |
1160 | edges if the definition may be simulated again. */ | |
1161 | ||
1162 | static tree | |
1163 | valueize_op_1 (tree op) | |
1164 | { | |
1165 | if (TREE_CODE (op) == SSA_NAME) | |
1166 | { | |
4e1b3545 | 1167 | /* If the definition may be simulated again we cannot follow |
1168 | this SSA edge as the SSA propagator does not necessarily | |
1169 | re-visit the use. */ | |
42acab1c | 1170 | gimple *def_stmt = SSA_NAME_DEF_STMT (op); |
2160d5ba | 1171 | if (!gimple_nop_p (def_stmt) |
1172 | && prop_simulate_again_p (def_stmt)) | |
4e1b3545 | 1173 | return NULL_TREE; |
bc8fa068 | 1174 | tree tem = get_constant_value (op); |
1175 | if (tem) | |
1176 | return tem; | |
4e1b3545 | 1177 | } |
1178 | return op; | |
1179 | } | |
1180 | ||
41511585 | 1181 | /* CCP specific front-end to the non-destructive constant folding |
1182 | routines. | |
4ee9c684 | 1183 | |
1184 | Attempt to simplify the RHS of STMT knowing that one or more | |
1185 | operands are constants. | |
1186 | ||
1187 | If simplification is possible, return the simplified RHS, | |
75a70cf9 | 1188 | otherwise return the original RHS or NULL_TREE. */ |
4ee9c684 | 1189 | |
1190 | static tree | |
42acab1c | 1191 | ccp_fold (gimple *stmt) |
4ee9c684 | 1192 | { |
389dd41b | 1193 | location_t loc = gimple_location (stmt); |
75a70cf9 | 1194 | switch (gimple_code (stmt)) |
88dbf20f | 1195 | { |
75a70cf9 | 1196 | case GIMPLE_COND: |
1197 | { | |
1198 | /* Handle comparison operators that can appear in GIMPLE form. */ | |
15d138c9 | 1199 | tree op0 = valueize_op (gimple_cond_lhs (stmt)); |
1200 | tree op1 = valueize_op (gimple_cond_rhs (stmt)); | |
75a70cf9 | 1201 | enum tree_code code = gimple_cond_code (stmt); |
389dd41b | 1202 | return fold_binary_loc (loc, code, boolean_type_node, op0, op1); |
75a70cf9 | 1203 | } |
4ee9c684 | 1204 | |
75a70cf9 | 1205 | case GIMPLE_SWITCH: |
1206 | { | |
15d138c9 | 1207 | /* Return the constant switch index. */ |
1a91d914 | 1208 | return valueize_op (gimple_switch_index (as_a <gswitch *> (stmt))); |
75a70cf9 | 1209 | } |
912f109f | 1210 | |
1d0b727d | 1211 | case GIMPLE_ASSIGN: |
1212 | case GIMPLE_CALL: | |
4e1b3545 | 1213 | return gimple_fold_stmt_to_constant_1 (stmt, |
1214 | valueize_op, valueize_op_1); | |
04236c3a | 1215 | |
8782adcf | 1216 | default: |
1d0b727d | 1217 | gcc_unreachable (); |
8782adcf | 1218 | } |
8782adcf | 1219 | } |
75a70cf9 | 1220 | |
b7e55469 | 1221 | /* Apply the operation CODE in type TYPE to the value, mask pair |
1222 | RVAL and RMASK representing a value of type RTYPE and set | |
1223 | the value, mask pair *VAL and *MASK to the result. */ | |
1224 | ||
1225 | static void | |
1226 | bit_value_unop_1 (enum tree_code code, tree type, | |
5de9d3ed | 1227 | widest_int *val, widest_int *mask, |
1228 | tree rtype, const widest_int &rval, const widest_int &rmask) | |
b7e55469 | 1229 | { |
1230 | switch (code) | |
1231 | { | |
1232 | case BIT_NOT_EXPR: | |
1233 | *mask = rmask; | |
cf8f0e63 | 1234 | *val = ~rval; |
b7e55469 | 1235 | break; |
1236 | ||
1237 | case NEGATE_EXPR: | |
1238 | { | |
5de9d3ed | 1239 | widest_int temv, temm; |
b7e55469 | 1240 | /* Return ~rval + 1. */ |
1241 | bit_value_unop_1 (BIT_NOT_EXPR, type, &temv, &temm, type, rval, rmask); | |
1242 | bit_value_binop_1 (PLUS_EXPR, type, val, mask, | |
e913b5cd | 1243 | type, temv, temm, type, 1, 0); |
b7e55469 | 1244 | break; |
1245 | } | |
1246 | ||
1247 | CASE_CONVERT: | |
1248 | { | |
e913b5cd | 1249 | signop sgn; |
b7e55469 | 1250 | |
1251 | /* First extend mask and value according to the original type. */ | |
e913b5cd | 1252 | sgn = TYPE_SIGN (rtype); |
796b6678 | 1253 | *mask = wi::ext (rmask, TYPE_PRECISION (rtype), sgn); |
1254 | *val = wi::ext (rval, TYPE_PRECISION (rtype), sgn); | |
b7e55469 | 1255 | |
1256 | /* Then extend mask and value according to the target type. */ | |
e913b5cd | 1257 | sgn = TYPE_SIGN (type); |
796b6678 | 1258 | *mask = wi::ext (*mask, TYPE_PRECISION (type), sgn); |
1259 | *val = wi::ext (*val, TYPE_PRECISION (type), sgn); | |
b7e55469 | 1260 | break; |
1261 | } | |
1262 | ||
1263 | default: | |
e913b5cd | 1264 | *mask = -1; |
b7e55469 | 1265 | break; |
1266 | } | |
1267 | } | |
1268 | ||
1269 | /* Apply the operation CODE in type TYPE to the value, mask pairs | |
1270 | R1VAL, R1MASK and R2VAL, R2MASK representing a values of type R1TYPE | |
1271 | and R2TYPE and set the value, mask pair *VAL and *MASK to the result. */ | |
1272 | ||
1273 | static void | |
1274 | bit_value_binop_1 (enum tree_code code, tree type, | |
5de9d3ed | 1275 | widest_int *val, widest_int *mask, |
10c3fe8d | 1276 | tree r1type, const widest_int &r1val, |
1277 | const widest_int &r1mask, tree r2type, | |
1278 | const widest_int &r2val, const widest_int &r2mask) | |
b7e55469 | 1279 | { |
e913b5cd | 1280 | signop sgn = TYPE_SIGN (type); |
1281 | int width = TYPE_PRECISION (type); | |
10c3fe8d | 1282 | bool swap_p = false; |
e913b5cd | 1283 | |
1284 | /* Assume we'll get a constant result. Use an initial non varying | |
1285 | value, we fall back to varying in the end if necessary. */ | |
1286 | *mask = -1; | |
1287 | ||
b7e55469 | 1288 | switch (code) |
1289 | { | |
1290 | case BIT_AND_EXPR: | |
1291 | /* The mask is constant where there is a known not | |
1292 | set bit, (m1 | m2) & ((v1 | m1) & (v2 | m2)) */ | |
cf8f0e63 | 1293 | *mask = (r1mask | r2mask) & (r1val | r1mask) & (r2val | r2mask); |
1294 | *val = r1val & r2val; | |
b7e55469 | 1295 | break; |
1296 | ||
1297 | case BIT_IOR_EXPR: | |
1298 | /* The mask is constant where there is a known | |
1299 | set bit, (m1 | m2) & ~((v1 & ~m1) | (v2 & ~m2)). */ | |
cf8f0e63 | 1300 | *mask = (r1mask | r2mask) |
1301 | .and_not (r1val.and_not (r1mask) | r2val.and_not (r2mask)); | |
1302 | *val = r1val | r2val; | |
b7e55469 | 1303 | break; |
1304 | ||
1305 | case BIT_XOR_EXPR: | |
1306 | /* m1 | m2 */ | |
cf8f0e63 | 1307 | *mask = r1mask | r2mask; |
1308 | *val = r1val ^ r2val; | |
b7e55469 | 1309 | break; |
1310 | ||
1311 | case LROTATE_EXPR: | |
1312 | case RROTATE_EXPR: | |
796b6678 | 1313 | if (r2mask == 0) |
b7e55469 | 1314 | { |
28e557ef | 1315 | widest_int shift = r2val; |
796b6678 | 1316 | if (shift == 0) |
e913b5cd | 1317 | { |
1318 | *mask = r1mask; | |
1319 | *val = r1val; | |
1320 | } | |
ddb1be65 | 1321 | else |
e913b5cd | 1322 | { |
796b6678 | 1323 | if (wi::neg_p (shift)) |
e913b5cd | 1324 | { |
1325 | shift = -shift; | |
1326 | if (code == RROTATE_EXPR) | |
1327 | code = LROTATE_EXPR; | |
1328 | else | |
1329 | code = RROTATE_EXPR; | |
1330 | } | |
1331 | if (code == RROTATE_EXPR) | |
1332 | { | |
796b6678 | 1333 | *mask = wi::rrotate (r1mask, shift, width); |
1334 | *val = wi::rrotate (r1val, shift, width); | |
e913b5cd | 1335 | } |
1336 | else | |
1337 | { | |
796b6678 | 1338 | *mask = wi::lrotate (r1mask, shift, width); |
1339 | *val = wi::lrotate (r1val, shift, width); | |
e913b5cd | 1340 | } |
1341 | } | |
b7e55469 | 1342 | } |
1343 | break; | |
1344 | ||
1345 | case LSHIFT_EXPR: | |
1346 | case RSHIFT_EXPR: | |
1347 | /* ??? We can handle partially known shift counts if we know | |
1348 | its sign. That way we can tell that (x << (y | 8)) & 255 | |
1349 | is zero. */ | |
796b6678 | 1350 | if (r2mask == 0) |
b7e55469 | 1351 | { |
28e557ef | 1352 | widest_int shift = r2val; |
796b6678 | 1353 | if (shift == 0) |
b7e55469 | 1354 | { |
1355 | *mask = r1mask; | |
1356 | *val = r1val; | |
1357 | } | |
ddb1be65 | 1358 | else |
e913b5cd | 1359 | { |
796b6678 | 1360 | if (wi::neg_p (shift)) |
e913b5cd | 1361 | { |
1362 | shift = -shift; | |
1363 | if (code == RSHIFT_EXPR) | |
1364 | code = LSHIFT_EXPR; | |
1365 | else | |
1366 | code = RSHIFT_EXPR; | |
1367 | } | |
1368 | if (code == RSHIFT_EXPR) | |
1369 | { | |
45639915 | 1370 | *mask = wi::rshift (wi::ext (r1mask, width, sgn), shift, sgn); |
1371 | *val = wi::rshift (wi::ext (r1val, width, sgn), shift, sgn); | |
e913b5cd | 1372 | } |
1373 | else | |
1374 | { | |
45639915 | 1375 | *mask = wi::ext (wi::lshift (r1mask, shift), width, sgn); |
1376 | *val = wi::ext (wi::lshift (r1val, shift), width, sgn); | |
e913b5cd | 1377 | } |
1378 | } | |
b7e55469 | 1379 | } |
1380 | break; | |
1381 | ||
1382 | case PLUS_EXPR: | |
1383 | case POINTER_PLUS_EXPR: | |
1384 | { | |
b7e55469 | 1385 | /* Do the addition with unknown bits set to zero, to give carry-ins of |
1386 | zero wherever possible. */ | |
ab2c1de8 | 1387 | widest_int lo = r1val.and_not (r1mask) + r2val.and_not (r2mask); |
796b6678 | 1388 | lo = wi::ext (lo, width, sgn); |
b7e55469 | 1389 | /* Do the addition with unknown bits set to one, to give carry-ins of |
1390 | one wherever possible. */ | |
ab2c1de8 | 1391 | widest_int hi = (r1val | r1mask) + (r2val | r2mask); |
796b6678 | 1392 | hi = wi::ext (hi, width, sgn); |
b7e55469 | 1393 | /* Each bit in the result is known if (a) the corresponding bits in |
1394 | both inputs are known, and (b) the carry-in to that bit position | |
1395 | is known. We can check condition (b) by seeing if we got the same | |
1396 | result with minimised carries as with maximised carries. */ | |
cf8f0e63 | 1397 | *mask = r1mask | r2mask | (lo ^ hi); |
796b6678 | 1398 | *mask = wi::ext (*mask, width, sgn); |
b7e55469 | 1399 | /* It shouldn't matter whether we choose lo or hi here. */ |
1400 | *val = lo; | |
1401 | break; | |
1402 | } | |
1403 | ||
1404 | case MINUS_EXPR: | |
1405 | { | |
5de9d3ed | 1406 | widest_int temv, temm; |
b7e55469 | 1407 | bit_value_unop_1 (NEGATE_EXPR, r2type, &temv, &temm, |
1408 | r2type, r2val, r2mask); | |
1409 | bit_value_binop_1 (PLUS_EXPR, type, val, mask, | |
1410 | r1type, r1val, r1mask, | |
1411 | r2type, temv, temm); | |
1412 | break; | |
1413 | } | |
1414 | ||
1415 | case MULT_EXPR: | |
1416 | { | |
1417 | /* Just track trailing zeros in both operands and transfer | |
1418 | them to the other. */ | |
796b6678 | 1419 | int r1tz = wi::ctz (r1val | r1mask); |
1420 | int r2tz = wi::ctz (r2val | r2mask); | |
e913b5cd | 1421 | if (r1tz + r2tz >= width) |
b7e55469 | 1422 | { |
e913b5cd | 1423 | *mask = 0; |
1424 | *val = 0; | |
b7e55469 | 1425 | } |
1426 | else if (r1tz + r2tz > 0) | |
1427 | { | |
5de9d3ed | 1428 | *mask = wi::ext (wi::mask <widest_int> (r1tz + r2tz, true), |
796b6678 | 1429 | width, sgn); |
e913b5cd | 1430 | *val = 0; |
b7e55469 | 1431 | } |
1432 | break; | |
1433 | } | |
1434 | ||
1435 | case EQ_EXPR: | |
1436 | case NE_EXPR: | |
1437 | { | |
5de9d3ed | 1438 | widest_int m = r1mask | r2mask; |
cf8f0e63 | 1439 | if (r1val.and_not (m) != r2val.and_not (m)) |
b7e55469 | 1440 | { |
e913b5cd | 1441 | *mask = 0; |
1442 | *val = ((code == EQ_EXPR) ? 0 : 1); | |
b7e55469 | 1443 | } |
1444 | else | |
1445 | { | |
1446 | /* We know the result of a comparison is always one or zero. */ | |
e913b5cd | 1447 | *mask = 1; |
1448 | *val = 0; | |
b7e55469 | 1449 | } |
1450 | break; | |
1451 | } | |
1452 | ||
1453 | case GE_EXPR: | |
1454 | case GT_EXPR: | |
10c3fe8d | 1455 | swap_p = true; |
1456 | code = swap_tree_comparison (code); | |
1457 | /* Fall through. */ | |
b7e55469 | 1458 | case LT_EXPR: |
1459 | case LE_EXPR: | |
1460 | { | |
1461 | int minmax, maxmin; | |
e913b5cd | 1462 | |
10c3fe8d | 1463 | const widest_int &o1val = swap_p ? r2val : r1val; |
1464 | const widest_int &o1mask = swap_p ? r2mask : r1mask; | |
1465 | const widest_int &o2val = swap_p ? r1val : r2val; | |
1466 | const widest_int &o2mask = swap_p ? r1mask : r2mask; | |
1467 | ||
b7e55469 | 1468 | /* If the most significant bits are not known we know nothing. */ |
796b6678 | 1469 | if (wi::neg_p (o1mask) || wi::neg_p (o2mask)) |
b7e55469 | 1470 | break; |
1471 | ||
90c0f5b7 | 1472 | /* For comparisons the signedness is in the comparison operands. */ |
e913b5cd | 1473 | sgn = TYPE_SIGN (r1type); |
90c0f5b7 | 1474 | |
b7e55469 | 1475 | /* If we know the most significant bits we know the values |
1476 | value ranges by means of treating varying bits as zero | |
1477 | or one. Do a cross comparison of the max/min pairs. */ | |
796b6678 | 1478 | maxmin = wi::cmp (o1val | o1mask, o2val.and_not (o2mask), sgn); |
1479 | minmax = wi::cmp (o1val.and_not (o1mask), o2val | o2mask, sgn); | |
e913b5cd | 1480 | if (maxmin < 0) /* o1 is less than o2. */ |
b7e55469 | 1481 | { |
e913b5cd | 1482 | *mask = 0; |
1483 | *val = 1; | |
b7e55469 | 1484 | } |
e913b5cd | 1485 | else if (minmax > 0) /* o1 is not less or equal to o2. */ |
b7e55469 | 1486 | { |
e913b5cd | 1487 | *mask = 0; |
1488 | *val = 0; | |
b7e55469 | 1489 | } |
e913b5cd | 1490 | else if (maxmin == minmax) /* o1 and o2 are equal. */ |
b7e55469 | 1491 | { |
1492 | /* This probably should never happen as we'd have | |
1493 | folded the thing during fully constant value folding. */ | |
e913b5cd | 1494 | *mask = 0; |
1495 | *val = (code == LE_EXPR ? 1 : 0); | |
b7e55469 | 1496 | } |
1497 | else | |
1498 | { | |
1499 | /* We know the result of a comparison is always one or zero. */ | |
e913b5cd | 1500 | *mask = 1; |
1501 | *val = 0; | |
b7e55469 | 1502 | } |
1503 | break; | |
1504 | } | |
1505 | ||
1506 | default:; | |
1507 | } | |
1508 | } | |
1509 | ||
1510 | /* Return the propagation value when applying the operation CODE to | |
1511 | the value RHS yielding type TYPE. */ | |
1512 | ||
9908fe4d | 1513 | static ccp_prop_value_t |
b7e55469 | 1514 | bit_value_unop (enum tree_code code, tree type, tree rhs) |
1515 | { | |
9908fe4d | 1516 | ccp_prop_value_t rval = get_value_for_expr (rhs, true); |
5de9d3ed | 1517 | widest_int value, mask; |
9908fe4d | 1518 | ccp_prop_value_t val; |
c91fedc5 | 1519 | |
1520 | if (rval.lattice_val == UNDEFINED) | |
1521 | return rval; | |
1522 | ||
b7e55469 | 1523 | gcc_assert ((rval.lattice_val == CONSTANT |
1524 | && TREE_CODE (rval.value) == INTEGER_CST) | |
c90d2c17 | 1525 | || wi::sext (rval.mask, TYPE_PRECISION (TREE_TYPE (rhs))) == -1); |
b7e55469 | 1526 | bit_value_unop_1 (code, type, &value, &mask, |
e913b5cd | 1527 | TREE_TYPE (rhs), value_to_wide_int (rval), rval.mask); |
c90d2c17 | 1528 | if (wi::sext (mask, TYPE_PRECISION (type)) != -1) |
b7e55469 | 1529 | { |
1530 | val.lattice_val = CONSTANT; | |
1531 | val.mask = mask; | |
1532 | /* ??? Delay building trees here. */ | |
e913b5cd | 1533 | val.value = wide_int_to_tree (type, value); |
b7e55469 | 1534 | } |
1535 | else | |
1536 | { | |
1537 | val.lattice_val = VARYING; | |
1538 | val.value = NULL_TREE; | |
e913b5cd | 1539 | val.mask = -1; |
b7e55469 | 1540 | } |
1541 | return val; | |
1542 | } | |
1543 | ||
1544 | /* Return the propagation value when applying the operation CODE to | |
1545 | the values RHS1 and RHS2 yielding type TYPE. */ | |
1546 | ||
9908fe4d | 1547 | static ccp_prop_value_t |
b7e55469 | 1548 | bit_value_binop (enum tree_code code, tree type, tree rhs1, tree rhs2) |
1549 | { | |
9908fe4d | 1550 | ccp_prop_value_t r1val = get_value_for_expr (rhs1, true); |
1551 | ccp_prop_value_t r2val = get_value_for_expr (rhs2, true); | |
5de9d3ed | 1552 | widest_int value, mask; |
9908fe4d | 1553 | ccp_prop_value_t val; |
c91fedc5 | 1554 | |
1555 | if (r1val.lattice_val == UNDEFINED | |
1556 | || r2val.lattice_val == UNDEFINED) | |
1557 | { | |
1558 | val.lattice_val = VARYING; | |
1559 | val.value = NULL_TREE; | |
e913b5cd | 1560 | val.mask = -1; |
c91fedc5 | 1561 | return val; |
1562 | } | |
1563 | ||
b7e55469 | 1564 | gcc_assert ((r1val.lattice_val == CONSTANT |
1565 | && TREE_CODE (r1val.value) == INTEGER_CST) | |
c90d2c17 | 1566 | || wi::sext (r1val.mask, |
1567 | TYPE_PRECISION (TREE_TYPE (rhs1))) == -1); | |
b7e55469 | 1568 | gcc_assert ((r2val.lattice_val == CONSTANT |
1569 | && TREE_CODE (r2val.value) == INTEGER_CST) | |
c90d2c17 | 1570 | || wi::sext (r2val.mask, |
1571 | TYPE_PRECISION (TREE_TYPE (rhs2))) == -1); | |
b7e55469 | 1572 | bit_value_binop_1 (code, type, &value, &mask, |
e913b5cd | 1573 | TREE_TYPE (rhs1), value_to_wide_int (r1val), r1val.mask, |
1574 | TREE_TYPE (rhs2), value_to_wide_int (r2val), r2val.mask); | |
c90d2c17 | 1575 | if (wi::sext (mask, TYPE_PRECISION (type)) != -1) |
b7e55469 | 1576 | { |
1577 | val.lattice_val = CONSTANT; | |
1578 | val.mask = mask; | |
1579 | /* ??? Delay building trees here. */ | |
e913b5cd | 1580 | val.value = wide_int_to_tree (type, value); |
b7e55469 | 1581 | } |
1582 | else | |
1583 | { | |
1584 | val.lattice_val = VARYING; | |
1585 | val.value = NULL_TREE; | |
e913b5cd | 1586 | val.mask = -1; |
b7e55469 | 1587 | } |
1588 | return val; | |
1589 | } | |
1590 | ||
237e78b1 | 1591 | /* Return the propagation value for __builtin_assume_aligned |
1592 | and functions with assume_aligned or alloc_aligned attribute. | |
1593 | For __builtin_assume_aligned, ATTR is NULL_TREE, | |
1594 | for assume_aligned attribute ATTR is non-NULL and ALLOC_ALIGNED | |
1595 | is false, for alloc_aligned attribute ATTR is non-NULL and | |
1596 | ALLOC_ALIGNED is true. */ | |
fca0886c | 1597 | |
9908fe4d | 1598 | static ccp_prop_value_t |
42acab1c | 1599 | bit_value_assume_aligned (gimple *stmt, tree attr, ccp_prop_value_t ptrval, |
237e78b1 | 1600 | bool alloc_aligned) |
fca0886c | 1601 | { |
237e78b1 | 1602 | tree align, misalign = NULL_TREE, type; |
fca0886c | 1603 | unsigned HOST_WIDE_INT aligni, misaligni = 0; |
9908fe4d | 1604 | ccp_prop_value_t alignval; |
5de9d3ed | 1605 | widest_int value, mask; |
9908fe4d | 1606 | ccp_prop_value_t val; |
e913b5cd | 1607 | |
237e78b1 | 1608 | if (attr == NULL_TREE) |
1609 | { | |
1610 | tree ptr = gimple_call_arg (stmt, 0); | |
1611 | type = TREE_TYPE (ptr); | |
1612 | ptrval = get_value_for_expr (ptr, true); | |
1613 | } | |
1614 | else | |
1615 | { | |
1616 | tree lhs = gimple_call_lhs (stmt); | |
1617 | type = TREE_TYPE (lhs); | |
1618 | } | |
1619 | ||
fca0886c | 1620 | if (ptrval.lattice_val == UNDEFINED) |
1621 | return ptrval; | |
1622 | gcc_assert ((ptrval.lattice_val == CONSTANT | |
1623 | && TREE_CODE (ptrval.value) == INTEGER_CST) | |
c90d2c17 | 1624 | || wi::sext (ptrval.mask, TYPE_PRECISION (type)) == -1); |
237e78b1 | 1625 | if (attr == NULL_TREE) |
fca0886c | 1626 | { |
237e78b1 | 1627 | /* Get aligni and misaligni from __builtin_assume_aligned. */ |
1628 | align = gimple_call_arg (stmt, 1); | |
1629 | if (!tree_fits_uhwi_p (align)) | |
fca0886c | 1630 | return ptrval; |
237e78b1 | 1631 | aligni = tree_to_uhwi (align); |
1632 | if (gimple_call_num_args (stmt) > 2) | |
1633 | { | |
1634 | misalign = gimple_call_arg (stmt, 2); | |
1635 | if (!tree_fits_uhwi_p (misalign)) | |
1636 | return ptrval; | |
1637 | misaligni = tree_to_uhwi (misalign); | |
1638 | } | |
1639 | } | |
1640 | else | |
fca0886c | 1641 | { |
237e78b1 | 1642 | /* Get aligni and misaligni from assume_aligned or |
1643 | alloc_align attributes. */ | |
1644 | if (TREE_VALUE (attr) == NULL_TREE) | |
fca0886c | 1645 | return ptrval; |
237e78b1 | 1646 | attr = TREE_VALUE (attr); |
1647 | align = TREE_VALUE (attr); | |
1648 | if (!tree_fits_uhwi_p (align)) | |
fca0886c | 1649 | return ptrval; |
237e78b1 | 1650 | aligni = tree_to_uhwi (align); |
1651 | if (alloc_aligned) | |
1652 | { | |
1653 | if (aligni == 0 || aligni > gimple_call_num_args (stmt)) | |
1654 | return ptrval; | |
1655 | align = gimple_call_arg (stmt, aligni - 1); | |
1656 | if (!tree_fits_uhwi_p (align)) | |
1657 | return ptrval; | |
1658 | aligni = tree_to_uhwi (align); | |
1659 | } | |
1660 | else if (TREE_CHAIN (attr) && TREE_VALUE (TREE_CHAIN (attr))) | |
1661 | { | |
1662 | misalign = TREE_VALUE (TREE_CHAIN (attr)); | |
1663 | if (!tree_fits_uhwi_p (misalign)) | |
1664 | return ptrval; | |
1665 | misaligni = tree_to_uhwi (misalign); | |
1666 | } | |
fca0886c | 1667 | } |
237e78b1 | 1668 | if (aligni <= 1 || (aligni & (aligni - 1)) != 0 || misaligni >= aligni) |
1669 | return ptrval; | |
1670 | ||
fca0886c | 1671 | align = build_int_cst_type (type, -aligni); |
1672 | alignval = get_value_for_expr (align, true); | |
1673 | bit_value_binop_1 (BIT_AND_EXPR, type, &value, &mask, | |
e913b5cd | 1674 | type, value_to_wide_int (ptrval), ptrval.mask, |
1675 | type, value_to_wide_int (alignval), alignval.mask); | |
c90d2c17 | 1676 | if (wi::sext (mask, TYPE_PRECISION (type)) != -1) |
fca0886c | 1677 | { |
1678 | val.lattice_val = CONSTANT; | |
1679 | val.mask = mask; | |
e913b5cd | 1680 | gcc_assert ((mask.to_uhwi () & (aligni - 1)) == 0); |
1681 | gcc_assert ((value.to_uhwi () & (aligni - 1)) == 0); | |
1682 | value |= misaligni; | |
fca0886c | 1683 | /* ??? Delay building trees here. */ |
e913b5cd | 1684 | val.value = wide_int_to_tree (type, value); |
fca0886c | 1685 | } |
1686 | else | |
1687 | { | |
1688 | val.lattice_val = VARYING; | |
1689 | val.value = NULL_TREE; | |
e913b5cd | 1690 | val.mask = -1; |
fca0886c | 1691 | } |
1692 | return val; | |
1693 | } | |
1694 | ||
75a70cf9 | 1695 | /* Evaluate statement STMT. |
1696 | Valid only for assignments, calls, conditionals, and switches. */ | |
4ee9c684 | 1697 | |
9908fe4d | 1698 | static ccp_prop_value_t |
42acab1c | 1699 | evaluate_stmt (gimple *stmt) |
4ee9c684 | 1700 | { |
9908fe4d | 1701 | ccp_prop_value_t val; |
4f61cce6 | 1702 | tree simplified = NULL_TREE; |
88dbf20f | 1703 | ccp_lattice_t likelyvalue = likely_value (stmt); |
b7e55469 | 1704 | bool is_constant = false; |
581bf1c2 | 1705 | unsigned int align; |
88dbf20f | 1706 | |
b7e55469 | 1707 | if (dump_file && (dump_flags & TDF_DETAILS)) |
1708 | { | |
1709 | fprintf (dump_file, "which is likely "); | |
1710 | switch (likelyvalue) | |
1711 | { | |
1712 | case CONSTANT: | |
1713 | fprintf (dump_file, "CONSTANT"); | |
1714 | break; | |
1715 | case UNDEFINED: | |
1716 | fprintf (dump_file, "UNDEFINED"); | |
1717 | break; | |
1718 | case VARYING: | |
1719 | fprintf (dump_file, "VARYING"); | |
1720 | break; | |
1721 | default:; | |
1722 | } | |
1723 | fprintf (dump_file, "\n"); | |
1724 | } | |
add6ee5e | 1725 | |
4ee9c684 | 1726 | /* If the statement is likely to have a CONSTANT result, then try |
1727 | to fold the statement to determine the constant value. */ | |
75a70cf9 | 1728 | /* FIXME. This is the only place that we call ccp_fold. |
1729 | Since likely_value never returns CONSTANT for calls, we will | |
1730 | not attempt to fold them, including builtins that may profit. */ | |
4ee9c684 | 1731 | if (likelyvalue == CONSTANT) |
b7e55469 | 1732 | { |
1733 | fold_defer_overflow_warnings (); | |
1734 | simplified = ccp_fold (stmt); | |
27de3d43 | 1735 | if (simplified && TREE_CODE (simplified) == SSA_NAME) |
1736 | { | |
1737 | val = *get_value (simplified); | |
1738 | if (val.lattice_val != VARYING) | |
1739 | { | |
1740 | fold_undefer_overflow_warnings (true, stmt, 0); | |
1741 | return val; | |
1742 | } | |
1743 | } | |
b7e55469 | 1744 | is_constant = simplified && is_gimple_min_invariant (simplified); |
1745 | fold_undefer_overflow_warnings (is_constant, stmt, 0); | |
1746 | if (is_constant) | |
1747 | { | |
1748 | /* The statement produced a constant value. */ | |
1749 | val.lattice_val = CONSTANT; | |
1750 | val.value = simplified; | |
e913b5cd | 1751 | val.mask = 0; |
27de3d43 | 1752 | return val; |
b7e55469 | 1753 | } |
1754 | } | |
4ee9c684 | 1755 | /* If the statement is likely to have a VARYING result, then do not |
1756 | bother folding the statement. */ | |
04236c3a | 1757 | else if (likelyvalue == VARYING) |
75a70cf9 | 1758 | { |
590c3166 | 1759 | enum gimple_code code = gimple_code (stmt); |
75a70cf9 | 1760 | if (code == GIMPLE_ASSIGN) |
1761 | { | |
1762 | enum tree_code subcode = gimple_assign_rhs_code (stmt); | |
48e1416a | 1763 | |
75a70cf9 | 1764 | /* Other cases cannot satisfy is_gimple_min_invariant |
1765 | without folding. */ | |
1766 | if (get_gimple_rhs_class (subcode) == GIMPLE_SINGLE_RHS) | |
1767 | simplified = gimple_assign_rhs1 (stmt); | |
1768 | } | |
1769 | else if (code == GIMPLE_SWITCH) | |
1a91d914 | 1770 | simplified = gimple_switch_index (as_a <gswitch *> (stmt)); |
75a70cf9 | 1771 | else |
a65c4d64 | 1772 | /* These cannot satisfy is_gimple_min_invariant without folding. */ |
1773 | gcc_assert (code == GIMPLE_CALL || code == GIMPLE_COND); | |
b7e55469 | 1774 | is_constant = simplified && is_gimple_min_invariant (simplified); |
1775 | if (is_constant) | |
1776 | { | |
1777 | /* The statement produced a constant value. */ | |
1778 | val.lattice_val = CONSTANT; | |
1779 | val.value = simplified; | |
e913b5cd | 1780 | val.mask = 0; |
b7e55469 | 1781 | } |
75a70cf9 | 1782 | } |
4c9206f2 | 1783 | /* If the statement result is likely UNDEFINED, make it so. */ |
1784 | else if (likelyvalue == UNDEFINED) | |
1785 | { | |
1786 | val.lattice_val = UNDEFINED; | |
1787 | val.value = NULL_TREE; | |
1788 | val.mask = 0; | |
1789 | return val; | |
1790 | } | |
4ee9c684 | 1791 | |
b7e55469 | 1792 | /* Resort to simplification for bitwise tracking. */ |
1793 | if (flag_tree_bit_ccp | |
db48deb0 | 1794 | && (likelyvalue == CONSTANT || is_gimple_call (stmt) |
1795 | || (gimple_assign_single_p (stmt) | |
1796 | && gimple_assign_rhs_code (stmt) == ADDR_EXPR)) | |
b7e55469 | 1797 | && !is_constant) |
912f109f | 1798 | { |
b7e55469 | 1799 | enum gimple_code code = gimple_code (stmt); |
1800 | val.lattice_val = VARYING; | |
1801 | val.value = NULL_TREE; | |
e913b5cd | 1802 | val.mask = -1; |
b7e55469 | 1803 | if (code == GIMPLE_ASSIGN) |
912f109f | 1804 | { |
b7e55469 | 1805 | enum tree_code subcode = gimple_assign_rhs_code (stmt); |
1806 | tree rhs1 = gimple_assign_rhs1 (stmt); | |
c0a4b664 | 1807 | tree lhs = gimple_assign_lhs (stmt); |
1808 | if ((INTEGRAL_TYPE_P (TREE_TYPE (lhs)) | |
1809 | || POINTER_TYPE_P (TREE_TYPE (lhs))) | |
1810 | && (INTEGRAL_TYPE_P (TREE_TYPE (rhs1)) | |
1811 | || POINTER_TYPE_P (TREE_TYPE (rhs1)))) | |
1812 | switch (get_gimple_rhs_class (subcode)) | |
1813 | { | |
1814 | case GIMPLE_SINGLE_RHS: | |
1815 | val = get_value_for_expr (rhs1, true); | |
1816 | break; | |
1817 | ||
1818 | case GIMPLE_UNARY_RHS: | |
1819 | val = bit_value_unop (subcode, TREE_TYPE (lhs), rhs1); | |
1820 | break; | |
1821 | ||
1822 | case GIMPLE_BINARY_RHS: | |
1823 | val = bit_value_binop (subcode, TREE_TYPE (lhs), rhs1, | |
1824 | gimple_assign_rhs2 (stmt)); | |
1825 | break; | |
1826 | ||
1827 | default:; | |
1828 | } | |
912f109f | 1829 | } |
b7e55469 | 1830 | else if (code == GIMPLE_COND) |
1831 | { | |
1832 | enum tree_code code = gimple_cond_code (stmt); | |
1833 | tree rhs1 = gimple_cond_lhs (stmt); | |
1834 | tree rhs2 = gimple_cond_rhs (stmt); | |
1835 | if (INTEGRAL_TYPE_P (TREE_TYPE (rhs1)) | |
1836 | || POINTER_TYPE_P (TREE_TYPE (rhs1))) | |
1837 | val = bit_value_binop (code, TREE_TYPE (rhs1), rhs1, rhs2); | |
1838 | } | |
0b4f0116 | 1839 | else if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL)) |
153c3b50 | 1840 | { |
0b4f0116 | 1841 | tree fndecl = gimple_call_fndecl (stmt); |
153c3b50 | 1842 | switch (DECL_FUNCTION_CODE (fndecl)) |
1843 | { | |
1844 | case BUILT_IN_MALLOC: | |
1845 | case BUILT_IN_REALLOC: | |
1846 | case BUILT_IN_CALLOC: | |
939514e9 | 1847 | case BUILT_IN_STRDUP: |
1848 | case BUILT_IN_STRNDUP: | |
153c3b50 | 1849 | val.lattice_val = CONSTANT; |
1850 | val.value = build_int_cst (TREE_TYPE (gimple_get_lhs (stmt)), 0); | |
796b6678 | 1851 | val.mask = ~((HOST_WIDE_INT) MALLOC_ABI_ALIGNMENT |
1852 | / BITS_PER_UNIT - 1); | |
153c3b50 | 1853 | break; |
1854 | ||
1855 | case BUILT_IN_ALLOCA: | |
581bf1c2 | 1856 | case BUILT_IN_ALLOCA_WITH_ALIGN: |
1857 | align = (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_ALLOCA_WITH_ALIGN | |
f9ae6f95 | 1858 | ? TREE_INT_CST_LOW (gimple_call_arg (stmt, 1)) |
581bf1c2 | 1859 | : BIGGEST_ALIGNMENT); |
153c3b50 | 1860 | val.lattice_val = CONSTANT; |
1861 | val.value = build_int_cst (TREE_TYPE (gimple_get_lhs (stmt)), 0); | |
796b6678 | 1862 | val.mask = ~((HOST_WIDE_INT) align / BITS_PER_UNIT - 1); |
153c3b50 | 1863 | break; |
1864 | ||
939514e9 | 1865 | /* These builtins return their first argument, unmodified. */ |
1866 | case BUILT_IN_MEMCPY: | |
1867 | case BUILT_IN_MEMMOVE: | |
1868 | case BUILT_IN_MEMSET: | |
1869 | case BUILT_IN_STRCPY: | |
1870 | case BUILT_IN_STRNCPY: | |
1871 | case BUILT_IN_MEMCPY_CHK: | |
1872 | case BUILT_IN_MEMMOVE_CHK: | |
1873 | case BUILT_IN_MEMSET_CHK: | |
1874 | case BUILT_IN_STRCPY_CHK: | |
1875 | case BUILT_IN_STRNCPY_CHK: | |
1876 | val = get_value_for_expr (gimple_call_arg (stmt, 0), true); | |
1877 | break; | |
1878 | ||
fca0886c | 1879 | case BUILT_IN_ASSUME_ALIGNED: |
237e78b1 | 1880 | val = bit_value_assume_aligned (stmt, NULL_TREE, val, false); |
fca0886c | 1881 | break; |
1882 | ||
060fc206 | 1883 | case BUILT_IN_ALIGNED_ALLOC: |
1884 | { | |
1885 | tree align = get_constant_value (gimple_call_arg (stmt, 0)); | |
1886 | if (align | |
1887 | && tree_fits_uhwi_p (align)) | |
1888 | { | |
1889 | unsigned HOST_WIDE_INT aligni = tree_to_uhwi (align); | |
1890 | if (aligni > 1 | |
1891 | /* align must be power-of-two */ | |
1892 | && (aligni & (aligni - 1)) == 0) | |
1893 | { | |
1894 | val.lattice_val = CONSTANT; | |
1895 | val.value = build_int_cst (ptr_type_node, 0); | |
cd89b631 | 1896 | val.mask = -aligni; |
060fc206 | 1897 | } |
1898 | } | |
1899 | break; | |
1900 | } | |
1901 | ||
153c3b50 | 1902 | default:; |
1903 | } | |
1904 | } | |
237e78b1 | 1905 | if (is_gimple_call (stmt) && gimple_call_lhs (stmt)) |
1906 | { | |
1907 | tree fntype = gimple_call_fntype (stmt); | |
1908 | if (fntype) | |
1909 | { | |
1910 | tree attrs = lookup_attribute ("assume_aligned", | |
1911 | TYPE_ATTRIBUTES (fntype)); | |
1912 | if (attrs) | |
1913 | val = bit_value_assume_aligned (stmt, attrs, val, false); | |
1914 | attrs = lookup_attribute ("alloc_align", | |
1915 | TYPE_ATTRIBUTES (fntype)); | |
1916 | if (attrs) | |
1917 | val = bit_value_assume_aligned (stmt, attrs, val, true); | |
1918 | } | |
1919 | } | |
b7e55469 | 1920 | is_constant = (val.lattice_val == CONSTANT); |
912f109f | 1921 | } |
1922 | ||
fc08b993 | 1923 | if (flag_tree_bit_ccp |
1924 | && ((is_constant && TREE_CODE (val.value) == INTEGER_CST) | |
4c9206f2 | 1925 | || !is_constant) |
fc08b993 | 1926 | && gimple_get_lhs (stmt) |
1927 | && TREE_CODE (gimple_get_lhs (stmt)) == SSA_NAME) | |
1928 | { | |
1929 | tree lhs = gimple_get_lhs (stmt); | |
9c1be15e | 1930 | wide_int nonzero_bits = get_nonzero_bits (lhs); |
1931 | if (nonzero_bits != -1) | |
fc08b993 | 1932 | { |
1933 | if (!is_constant) | |
1934 | { | |
1935 | val.lattice_val = CONSTANT; | |
1936 | val.value = build_zero_cst (TREE_TYPE (lhs)); | |
9c1be15e | 1937 | val.mask = extend_mask (nonzero_bits); |
fc08b993 | 1938 | is_constant = true; |
1939 | } | |
1940 | else | |
1941 | { | |
9c1be15e | 1942 | if (wi::bit_and_not (val.value, nonzero_bits) != 0) |
aeb682a2 | 1943 | val.value = wide_int_to_tree (TREE_TYPE (lhs), |
9c1be15e | 1944 | nonzero_bits & val.value); |
aeb682a2 | 1945 | if (nonzero_bits == 0) |
1946 | val.mask = 0; | |
fc08b993 | 1947 | else |
09b2913a | 1948 | val.mask = val.mask & extend_mask (nonzero_bits); |
fc08b993 | 1949 | } |
1950 | } | |
1951 | } | |
1952 | ||
4c9206f2 | 1953 | /* The statement produced a nonconstant value. */ |
b7e55469 | 1954 | if (!is_constant) |
4ee9c684 | 1955 | { |
fc6cc27b | 1956 | /* The statement produced a copy. */ |
1957 | if (simplified && TREE_CODE (simplified) == SSA_NAME | |
1958 | && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (simplified)) | |
1959 | { | |
1960 | val.lattice_val = CONSTANT; | |
1961 | val.value = simplified; | |
1962 | val.mask = -1; | |
1963 | } | |
1964 | /* The statement is VARYING. */ | |
1965 | else | |
1966 | { | |
1967 | val.lattice_val = VARYING; | |
1968 | val.value = NULL_TREE; | |
1969 | val.mask = -1; | |
1970 | } | |
4ee9c684 | 1971 | } |
41511585 | 1972 | |
1973 | return val; | |
4ee9c684 | 1974 | } |
1975 | ||
42acab1c | 1976 | typedef hash_table<nofree_ptr_hash<gimple> > gimple_htab; |
2b15d2ba | 1977 | |
582a80ed | 1978 | /* Given a BUILT_IN_STACK_SAVE value SAVED_VAL, insert a clobber of VAR before |
1979 | each matching BUILT_IN_STACK_RESTORE. Mark visited phis in VISITED. */ | |
1980 | ||
1981 | static void | |
2b15d2ba | 1982 | insert_clobber_before_stack_restore (tree saved_val, tree var, |
c1f445d2 | 1983 | gimple_htab **visited) |
582a80ed | 1984 | { |
42acab1c | 1985 | gimple *stmt; |
1a91d914 | 1986 | gassign *clobber_stmt; |
582a80ed | 1987 | tree clobber; |
1988 | imm_use_iterator iter; | |
1989 | gimple_stmt_iterator i; | |
42acab1c | 1990 | gimple **slot; |
582a80ed | 1991 | |
1992 | FOR_EACH_IMM_USE_STMT (stmt, iter, saved_val) | |
1993 | if (gimple_call_builtin_p (stmt, BUILT_IN_STACK_RESTORE)) | |
1994 | { | |
f1f41a6c | 1995 | clobber = build_constructor (TREE_TYPE (var), |
1996 | NULL); | |
582a80ed | 1997 | TREE_THIS_VOLATILE (clobber) = 1; |
1998 | clobber_stmt = gimple_build_assign (var, clobber); | |
1999 | ||
2000 | i = gsi_for_stmt (stmt); | |
2001 | gsi_insert_before (&i, clobber_stmt, GSI_SAME_STMT); | |
2002 | } | |
2003 | else if (gimple_code (stmt) == GIMPLE_PHI) | |
2004 | { | |
c1f445d2 | 2005 | if (!*visited) |
2006 | *visited = new gimple_htab (10); | |
582a80ed | 2007 | |
c1f445d2 | 2008 | slot = (*visited)->find_slot (stmt, INSERT); |
582a80ed | 2009 | if (*slot != NULL) |
2010 | continue; | |
2011 | ||
2012 | *slot = stmt; | |
2013 | insert_clobber_before_stack_restore (gimple_phi_result (stmt), var, | |
2014 | visited); | |
2015 | } | |
42eed683 | 2016 | else if (gimple_assign_ssa_name_copy_p (stmt)) |
2017 | insert_clobber_before_stack_restore (gimple_assign_lhs (stmt), var, | |
2018 | visited); | |
058a1b7a | 2019 | else if (chkp_gimple_call_builtin_p (stmt, BUILT_IN_CHKP_BNDRET)) |
2020 | continue; | |
582a80ed | 2021 | else |
2022 | gcc_assert (is_gimple_debug (stmt)); | |
2023 | } | |
2024 | ||
2025 | /* Advance the iterator to the previous non-debug gimple statement in the same | |
2026 | or dominating basic block. */ | |
2027 | ||
2028 | static inline void | |
2029 | gsi_prev_dom_bb_nondebug (gimple_stmt_iterator *i) | |
2030 | { | |
2031 | basic_block dom; | |
2032 | ||
2033 | gsi_prev_nondebug (i); | |
2034 | while (gsi_end_p (*i)) | |
2035 | { | |
2036 | dom = get_immediate_dominator (CDI_DOMINATORS, i->bb); | |
34154e27 | 2037 | if (dom == NULL || dom == ENTRY_BLOCK_PTR_FOR_FN (cfun)) |
582a80ed | 2038 | return; |
2039 | ||
2040 | *i = gsi_last_bb (dom); | |
2041 | } | |
2042 | } | |
2043 | ||
2044 | /* Find a BUILT_IN_STACK_SAVE dominating gsi_stmt (I), and insert | |
1543f720 | 2045 | a clobber of VAR before each matching BUILT_IN_STACK_RESTORE. |
2046 | ||
2047 | It is possible that BUILT_IN_STACK_SAVE cannot be find in a dominator when a | |
2048 | previous pass (such as DOM) duplicated it along multiple paths to a BB. In | |
2049 | that case the function gives up without inserting the clobbers. */ | |
582a80ed | 2050 | |
2051 | static void | |
2052 | insert_clobbers_for_var (gimple_stmt_iterator i, tree var) | |
2053 | { | |
42acab1c | 2054 | gimple *stmt; |
582a80ed | 2055 | tree saved_val; |
c1f445d2 | 2056 | gimple_htab *visited = NULL; |
582a80ed | 2057 | |
1543f720 | 2058 | for (; !gsi_end_p (i); gsi_prev_dom_bb_nondebug (&i)) |
582a80ed | 2059 | { |
2060 | stmt = gsi_stmt (i); | |
2061 | ||
2062 | if (!gimple_call_builtin_p (stmt, BUILT_IN_STACK_SAVE)) | |
2063 | continue; | |
582a80ed | 2064 | |
2065 | saved_val = gimple_call_lhs (stmt); | |
2066 | if (saved_val == NULL_TREE) | |
2067 | continue; | |
2068 | ||
2069 | insert_clobber_before_stack_restore (saved_val, var, &visited); | |
2070 | break; | |
2071 | } | |
2072 | ||
c1f445d2 | 2073 | delete visited; |
582a80ed | 2074 | } |
2075 | ||
581bf1c2 | 2076 | /* Detects a __builtin_alloca_with_align with constant size argument. Declares |
2077 | fixed-size array and returns the address, if found, otherwise returns | |
2078 | NULL_TREE. */ | |
9a65cc0a | 2079 | |
2080 | static tree | |
42acab1c | 2081 | fold_builtin_alloca_with_align (gimple *stmt) |
9a65cc0a | 2082 | { |
2083 | unsigned HOST_WIDE_INT size, threshold, n_elem; | |
2084 | tree lhs, arg, block, var, elem_type, array_type; | |
9a65cc0a | 2085 | |
2086 | /* Get lhs. */ | |
2087 | lhs = gimple_call_lhs (stmt); | |
2088 | if (lhs == NULL_TREE) | |
2089 | return NULL_TREE; | |
2090 | ||
2091 | /* Detect constant argument. */ | |
2092 | arg = get_constant_value (gimple_call_arg (stmt, 0)); | |
6e93d308 | 2093 | if (arg == NULL_TREE |
2094 | || TREE_CODE (arg) != INTEGER_CST | |
e913b5cd | 2095 | || !tree_fits_uhwi_p (arg)) |
9a65cc0a | 2096 | return NULL_TREE; |
6e93d308 | 2097 | |
8c53c46c | 2098 | size = tree_to_uhwi (arg); |
9a65cc0a | 2099 | |
581bf1c2 | 2100 | /* Heuristic: don't fold large allocas. */ |
9a65cc0a | 2101 | threshold = (unsigned HOST_WIDE_INT)PARAM_VALUE (PARAM_LARGE_STACK_FRAME); |
581bf1c2 | 2102 | /* In case the alloca is located at function entry, it has the same lifetime |
2103 | as a declared array, so we allow a larger size. */ | |
9a65cc0a | 2104 | block = gimple_block (stmt); |
2105 | if (!(cfun->after_inlining | |
14df71e4 | 2106 | && block |
9a65cc0a | 2107 | && TREE_CODE (BLOCK_SUPERCONTEXT (block)) == FUNCTION_DECL)) |
2108 | threshold /= 10; | |
2109 | if (size > threshold) | |
2110 | return NULL_TREE; | |
2111 | ||
2112 | /* Declare array. */ | |
2113 | elem_type = build_nonstandard_integer_type (BITS_PER_UNIT, 1); | |
2114 | n_elem = size * 8 / BITS_PER_UNIT; | |
9a65cc0a | 2115 | array_type = build_array_type_nelts (elem_type, n_elem); |
f9e245b2 | 2116 | var = create_tmp_var (array_type); |
f9ae6f95 | 2117 | DECL_ALIGN (var) = TREE_INT_CST_LOW (gimple_call_arg (stmt, 1)); |
3d4a0a4b | 2118 | { |
2119 | struct ptr_info_def *pi = SSA_NAME_PTR_INFO (lhs); | |
2120 | if (pi != NULL && !pi->pt.anything) | |
2121 | { | |
2122 | bool singleton_p; | |
2123 | unsigned uid; | |
2124 | singleton_p = pt_solution_singleton_p (&pi->pt, &uid); | |
2125 | gcc_assert (singleton_p); | |
2126 | SET_DECL_PT_UID (var, uid); | |
2127 | } | |
2128 | } | |
9a65cc0a | 2129 | |
2130 | /* Fold alloca to the address of the array. */ | |
2131 | return fold_convert (TREE_TYPE (lhs), build_fold_addr_expr (var)); | |
2132 | } | |
2133 | ||
6688f8ec | 2134 | /* Fold the stmt at *GSI with CCP specific information that propagating |
2135 | and regular folding does not catch. */ | |
2136 | ||
2137 | static bool | |
2138 | ccp_fold_stmt (gimple_stmt_iterator *gsi) | |
2139 | { | |
42acab1c | 2140 | gimple *stmt = gsi_stmt (*gsi); |
6688f8ec | 2141 | |
94144e68 | 2142 | switch (gimple_code (stmt)) |
2143 | { | |
2144 | case GIMPLE_COND: | |
2145 | { | |
1a91d914 | 2146 | gcond *cond_stmt = as_a <gcond *> (stmt); |
9908fe4d | 2147 | ccp_prop_value_t val; |
94144e68 | 2148 | /* Statement evaluation will handle type mismatches in constants |
2149 | more gracefully than the final propagation. This allows us to | |
2150 | fold more conditionals here. */ | |
2151 | val = evaluate_stmt (stmt); | |
2152 | if (val.lattice_val != CONSTANT | |
796b6678 | 2153 | || val.mask != 0) |
94144e68 | 2154 | return false; |
2155 | ||
b7e55469 | 2156 | if (dump_file) |
2157 | { | |
2158 | fprintf (dump_file, "Folding predicate "); | |
2159 | print_gimple_expr (dump_file, stmt, 0, 0); | |
2160 | fprintf (dump_file, " to "); | |
2161 | print_generic_expr (dump_file, val.value, 0); | |
2162 | fprintf (dump_file, "\n"); | |
2163 | } | |
2164 | ||
94144e68 | 2165 | if (integer_zerop (val.value)) |
1a91d914 | 2166 | gimple_cond_make_false (cond_stmt); |
94144e68 | 2167 | else |
1a91d914 | 2168 | gimple_cond_make_true (cond_stmt); |
6688f8ec | 2169 | |
94144e68 | 2170 | return true; |
2171 | } | |
6688f8ec | 2172 | |
94144e68 | 2173 | case GIMPLE_CALL: |
2174 | { | |
2175 | tree lhs = gimple_call_lhs (stmt); | |
3064bb7b | 2176 | int flags = gimple_call_flags (stmt); |
15d138c9 | 2177 | tree val; |
94144e68 | 2178 | tree argt; |
2179 | bool changed = false; | |
2180 | unsigned i; | |
2181 | ||
2182 | /* If the call was folded into a constant make sure it goes | |
2183 | away even if we cannot propagate into all uses because of | |
2184 | type issues. */ | |
2185 | if (lhs | |
2186 | && TREE_CODE (lhs) == SSA_NAME | |
3064bb7b | 2187 | && (val = get_constant_value (lhs)) |
2188 | /* Don't optimize away calls that have side-effects. */ | |
2189 | && (flags & (ECF_CONST|ECF_PURE)) != 0 | |
2190 | && (flags & ECF_LOOPING_CONST_OR_PURE) == 0) | |
94144e68 | 2191 | { |
15d138c9 | 2192 | tree new_rhs = unshare_expr (val); |
338cce8f | 2193 | bool res; |
94144e68 | 2194 | if (!useless_type_conversion_p (TREE_TYPE (lhs), |
2195 | TREE_TYPE (new_rhs))) | |
2196 | new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs); | |
338cce8f | 2197 | res = update_call_from_tree (gsi, new_rhs); |
2198 | gcc_assert (res); | |
94144e68 | 2199 | return true; |
2200 | } | |
2201 | ||
fb049fba | 2202 | /* Internal calls provide no argument types, so the extra laxity |
2203 | for normal calls does not apply. */ | |
2204 | if (gimple_call_internal_p (stmt)) | |
2205 | return false; | |
2206 | ||
581bf1c2 | 2207 | /* The heuristic of fold_builtin_alloca_with_align differs before and |
2208 | after inlining, so we don't require the arg to be changed into a | |
2209 | constant for folding, but just to be constant. */ | |
2210 | if (gimple_call_builtin_p (stmt, BUILT_IN_ALLOCA_WITH_ALIGN)) | |
9a65cc0a | 2211 | { |
581bf1c2 | 2212 | tree new_rhs = fold_builtin_alloca_with_align (stmt); |
6e93d308 | 2213 | if (new_rhs) |
2214 | { | |
2215 | bool res = update_call_from_tree (gsi, new_rhs); | |
582a80ed | 2216 | tree var = TREE_OPERAND (TREE_OPERAND (new_rhs, 0),0); |
6e93d308 | 2217 | gcc_assert (res); |
582a80ed | 2218 | insert_clobbers_for_var (*gsi, var); |
6e93d308 | 2219 | return true; |
2220 | } | |
9a65cc0a | 2221 | } |
2222 | ||
94144e68 | 2223 | /* Propagate into the call arguments. Compared to replace_uses_in |
2224 | this can use the argument slot types for type verification | |
2225 | instead of the current argument type. We also can safely | |
2226 | drop qualifiers here as we are dealing with constants anyway. */ | |
2de00a2d | 2227 | argt = TYPE_ARG_TYPES (gimple_call_fntype (stmt)); |
94144e68 | 2228 | for (i = 0; i < gimple_call_num_args (stmt) && argt; |
2229 | ++i, argt = TREE_CHAIN (argt)) | |
2230 | { | |
2231 | tree arg = gimple_call_arg (stmt, i); | |
2232 | if (TREE_CODE (arg) == SSA_NAME | |
15d138c9 | 2233 | && (val = get_constant_value (arg)) |
94144e68 | 2234 | && useless_type_conversion_p |
2235 | (TYPE_MAIN_VARIANT (TREE_VALUE (argt)), | |
15d138c9 | 2236 | TYPE_MAIN_VARIANT (TREE_TYPE (val)))) |
94144e68 | 2237 | { |
15d138c9 | 2238 | gimple_call_set_arg (stmt, i, unshare_expr (val)); |
94144e68 | 2239 | changed = true; |
2240 | } | |
2241 | } | |
e16f4c39 | 2242 | |
94144e68 | 2243 | return changed; |
2244 | } | |
6688f8ec | 2245 | |
6872bf3c | 2246 | case GIMPLE_ASSIGN: |
2247 | { | |
2248 | tree lhs = gimple_assign_lhs (stmt); | |
15d138c9 | 2249 | tree val; |
6872bf3c | 2250 | |
2251 | /* If we have a load that turned out to be constant replace it | |
2252 | as we cannot propagate into all uses in all cases. */ | |
2253 | if (gimple_assign_single_p (stmt) | |
2254 | && TREE_CODE (lhs) == SSA_NAME | |
15d138c9 | 2255 | && (val = get_constant_value (lhs))) |
6872bf3c | 2256 | { |
15d138c9 | 2257 | tree rhs = unshare_expr (val); |
6872bf3c | 2258 | if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs))) |
182cf5a9 | 2259 | rhs = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (lhs), rhs); |
6872bf3c | 2260 | gimple_assign_set_rhs_from_tree (gsi, rhs); |
2261 | return true; | |
2262 | } | |
2263 | ||
2264 | return false; | |
2265 | } | |
2266 | ||
94144e68 | 2267 | default: |
2268 | return false; | |
2269 | } | |
6688f8ec | 2270 | } |
2271 | ||
41511585 | 2272 | /* Visit the assignment statement STMT. Set the value of its LHS to the |
88dbf20f | 2273 | value computed by the RHS and store LHS in *OUTPUT_P. If STMT |
2274 | creates virtual definitions, set the value of each new name to that | |
75a70cf9 | 2275 | of the RHS (if we can derive a constant out of the RHS). |
2276 | Value-returning call statements also perform an assignment, and | |
2277 | are handled here. */ | |
4ee9c684 | 2278 | |
41511585 | 2279 | static enum ssa_prop_result |
42acab1c | 2280 | visit_assignment (gimple *stmt, tree *output_p) |
4ee9c684 | 2281 | { |
9908fe4d | 2282 | ccp_prop_value_t val; |
fc6cc27b | 2283 | enum ssa_prop_result retval = SSA_PROP_NOT_INTERESTING; |
4ee9c684 | 2284 | |
75a70cf9 | 2285 | tree lhs = gimple_get_lhs (stmt); |
88dbf20f | 2286 | if (TREE_CODE (lhs) == SSA_NAME) |
4ee9c684 | 2287 | { |
fc6cc27b | 2288 | /* Evaluate the statement, which could be |
2289 | either a GIMPLE_ASSIGN or a GIMPLE_CALL. */ | |
2290 | val = evaluate_stmt (stmt); | |
2291 | ||
88dbf20f | 2292 | /* If STMT is an assignment to an SSA_NAME, we only have one |
2293 | value to set. */ | |
27de3d43 | 2294 | if (set_lattice_value (lhs, &val)) |
88dbf20f | 2295 | { |
2296 | *output_p = lhs; | |
2297 | if (val.lattice_val == VARYING) | |
2298 | retval = SSA_PROP_VARYING; | |
2299 | else | |
2300 | retval = SSA_PROP_INTERESTING; | |
2301 | } | |
4ee9c684 | 2302 | } |
88dbf20f | 2303 | |
2304 | return retval; | |
4ee9c684 | 2305 | } |
2306 | ||
4ee9c684 | 2307 | |
41511585 | 2308 | /* Visit the conditional statement STMT. Return SSA_PROP_INTERESTING |
2309 | if it can determine which edge will be taken. Otherwise, return | |
2310 | SSA_PROP_VARYING. */ | |
2311 | ||
2312 | static enum ssa_prop_result | |
42acab1c | 2313 | visit_cond_stmt (gimple *stmt, edge *taken_edge_p) |
4ee9c684 | 2314 | { |
9908fe4d | 2315 | ccp_prop_value_t val; |
41511585 | 2316 | basic_block block; |
2317 | ||
75a70cf9 | 2318 | block = gimple_bb (stmt); |
41511585 | 2319 | val = evaluate_stmt (stmt); |
b7e55469 | 2320 | if (val.lattice_val != CONSTANT |
796b6678 | 2321 | || val.mask != 0) |
b7e55469 | 2322 | return SSA_PROP_VARYING; |
41511585 | 2323 | |
2324 | /* Find which edge out of the conditional block will be taken and add it | |
2325 | to the worklist. If no single edge can be determined statically, | |
2326 | return SSA_PROP_VARYING to feed all the outgoing edges to the | |
2327 | propagation engine. */ | |
b7e55469 | 2328 | *taken_edge_p = find_taken_edge (block, val.value); |
41511585 | 2329 | if (*taken_edge_p) |
2330 | return SSA_PROP_INTERESTING; | |
2331 | else | |
2332 | return SSA_PROP_VARYING; | |
4ee9c684 | 2333 | } |
2334 | ||
4ee9c684 | 2335 | |
41511585 | 2336 | /* Evaluate statement STMT. If the statement produces an output value and |
2337 | its evaluation changes the lattice value of its output, return | |
2338 | SSA_PROP_INTERESTING and set *OUTPUT_P to the SSA_NAME holding the | |
2339 | output value. | |
48e1416a | 2340 | |
41511585 | 2341 | If STMT is a conditional branch and we can determine its truth |
2342 | value, set *TAKEN_EDGE_P accordingly. If STMT produces a varying | |
2343 | value, return SSA_PROP_VARYING. */ | |
4ee9c684 | 2344 | |
41511585 | 2345 | static enum ssa_prop_result |
42acab1c | 2346 | ccp_visit_stmt (gimple *stmt, edge *taken_edge_p, tree *output_p) |
41511585 | 2347 | { |
41511585 | 2348 | tree def; |
2349 | ssa_op_iter iter; | |
4ee9c684 | 2350 | |
41511585 | 2351 | if (dump_file && (dump_flags & TDF_DETAILS)) |
4ee9c684 | 2352 | { |
88dbf20f | 2353 | fprintf (dump_file, "\nVisiting statement:\n"); |
75a70cf9 | 2354 | print_gimple_stmt (dump_file, stmt, 0, dump_flags); |
4ee9c684 | 2355 | } |
4ee9c684 | 2356 | |
75a70cf9 | 2357 | switch (gimple_code (stmt)) |
4ee9c684 | 2358 | { |
75a70cf9 | 2359 | case GIMPLE_ASSIGN: |
2360 | /* If the statement is an assignment that produces a single | |
2361 | output value, evaluate its RHS to see if the lattice value of | |
2362 | its output has changed. */ | |
2363 | return visit_assignment (stmt, output_p); | |
2364 | ||
2365 | case GIMPLE_CALL: | |
2366 | /* A value-returning call also performs an assignment. */ | |
2367 | if (gimple_call_lhs (stmt) != NULL_TREE) | |
2368 | return visit_assignment (stmt, output_p); | |
2369 | break; | |
2370 | ||
2371 | case GIMPLE_COND: | |
2372 | case GIMPLE_SWITCH: | |
2373 | /* If STMT is a conditional branch, see if we can determine | |
2374 | which branch will be taken. */ | |
2375 | /* FIXME. It appears that we should be able to optimize | |
2376 | computed GOTOs here as well. */ | |
2377 | return visit_cond_stmt (stmt, taken_edge_p); | |
2378 | ||
2379 | default: | |
2380 | break; | |
4ee9c684 | 2381 | } |
4ee9c684 | 2382 | |
41511585 | 2383 | /* Any other kind of statement is not interesting for constant |
2384 | propagation and, therefore, not worth simulating. */ | |
41511585 | 2385 | if (dump_file && (dump_flags & TDF_DETAILS)) |
2386 | fprintf (dump_file, "No interesting values produced. Marked VARYING.\n"); | |
4ee9c684 | 2387 | |
41511585 | 2388 | /* Definitions made by statements other than assignments to |
2389 | SSA_NAMEs represent unknown modifications to their outputs. | |
2390 | Mark them VARYING. */ | |
88dbf20f | 2391 | FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_ALL_DEFS) |
27de3d43 | 2392 | set_value_varying (def); |
4ee9c684 | 2393 | |
41511585 | 2394 | return SSA_PROP_VARYING; |
2395 | } | |
4ee9c684 | 2396 | |
4ee9c684 | 2397 | |
d0322b7d | 2398 | /* Main entry point for SSA Conditional Constant Propagation. If NONZERO_P, |
2399 | record nonzero bits. */ | |
41511585 | 2400 | |
33a34f1e | 2401 | static unsigned int |
d0322b7d | 2402 | do_ssa_ccp (bool nonzero_p) |
41511585 | 2403 | { |
582a80ed | 2404 | unsigned int todo = 0; |
2405 | calculate_dominance_info (CDI_DOMINATORS); | |
e2588447 | 2406 | |
41511585 | 2407 | ccp_initialize (); |
2408 | ssa_propagate (ccp_visit_stmt, ccp_visit_phi_node); | |
d0322b7d | 2409 | if (ccp_finalize (nonzero_p)) |
e2588447 | 2410 | { |
2411 | todo = (TODO_cleanup_cfg | TODO_update_ssa); | |
2412 | ||
2413 | /* ccp_finalize does not preserve loop-closed ssa. */ | |
2414 | loops_state_clear (LOOP_CLOSED_SSA); | |
2415 | } | |
2416 | ||
582a80ed | 2417 | free_dominance_info (CDI_DOMINATORS); |
2418 | return todo; | |
4ee9c684 | 2419 | } |
2420 | ||
5664499b | 2421 | |
cbe8bda8 | 2422 | namespace { |
2423 | ||
2424 | const pass_data pass_data_ccp = | |
41511585 | 2425 | { |
cbe8bda8 | 2426 | GIMPLE_PASS, /* type */ |
2427 | "ccp", /* name */ | |
2428 | OPTGROUP_NONE, /* optinfo_flags */ | |
cbe8bda8 | 2429 | TV_TREE_CCP, /* tv_id */ |
2430 | ( PROP_cfg | PROP_ssa ), /* properties_required */ | |
2431 | 0, /* properties_provided */ | |
2432 | 0, /* properties_destroyed */ | |
2433 | 0, /* todo_flags_start */ | |
8b88439e | 2434 | TODO_update_address_taken, /* todo_flags_finish */ |
41511585 | 2435 | }; |
4ee9c684 | 2436 | |
cbe8bda8 | 2437 | class pass_ccp : public gimple_opt_pass |
2438 | { | |
2439 | public: | |
9af5ce0c | 2440 | pass_ccp (gcc::context *ctxt) |
d0322b7d | 2441 | : gimple_opt_pass (pass_data_ccp, ctxt), nonzero_p (false) |
cbe8bda8 | 2442 | {} |
2443 | ||
2444 | /* opt_pass methods: */ | |
ae84f584 | 2445 | opt_pass * clone () { return new pass_ccp (m_ctxt); } |
d0322b7d | 2446 | void set_pass_param (unsigned int n, bool param) |
2447 | { | |
2448 | gcc_assert (n == 0); | |
2449 | nonzero_p = param; | |
2450 | } | |
31315c24 | 2451 | virtual bool gate (function *) { return flag_tree_ccp != 0; } |
d0322b7d | 2452 | virtual unsigned int execute (function *) { return do_ssa_ccp (nonzero_p); } |
cbe8bda8 | 2453 | |
d0322b7d | 2454 | private: |
2455 | /* Determines whether the pass instance records nonzero bits. */ | |
2456 | bool nonzero_p; | |
cbe8bda8 | 2457 | }; // class pass_ccp |
2458 | ||
2459 | } // anon namespace | |
2460 | ||
2461 | gimple_opt_pass * | |
2462 | make_pass_ccp (gcc::context *ctxt) | |
2463 | { | |
2464 | return new pass_ccp (ctxt); | |
2465 | } | |
2466 | ||
4ee9c684 | 2467 | |
75a70cf9 | 2468 | |
bdd0e199 | 2469 | /* Try to optimize out __builtin_stack_restore. Optimize it out |
2470 | if there is another __builtin_stack_restore in the same basic | |
2471 | block and no calls or ASM_EXPRs are in between, or if this block's | |
2472 | only outgoing edge is to EXIT_BLOCK and there are no calls or | |
2473 | ASM_EXPRs after this __builtin_stack_restore. */ | |
2474 | ||
2475 | static tree | |
75a70cf9 | 2476 | optimize_stack_restore (gimple_stmt_iterator i) |
bdd0e199 | 2477 | { |
6ea999da | 2478 | tree callee; |
42acab1c | 2479 | gimple *stmt; |
75a70cf9 | 2480 | |
2481 | basic_block bb = gsi_bb (i); | |
42acab1c | 2482 | gimple *call = gsi_stmt (i); |
bdd0e199 | 2483 | |
75a70cf9 | 2484 | if (gimple_code (call) != GIMPLE_CALL |
2485 | || gimple_call_num_args (call) != 1 | |
2486 | || TREE_CODE (gimple_call_arg (call, 0)) != SSA_NAME | |
2487 | || !POINTER_TYPE_P (TREE_TYPE (gimple_call_arg (call, 0)))) | |
bdd0e199 | 2488 | return NULL_TREE; |
2489 | ||
75a70cf9 | 2490 | for (gsi_next (&i); !gsi_end_p (i); gsi_next (&i)) |
bdd0e199 | 2491 | { |
75a70cf9 | 2492 | stmt = gsi_stmt (i); |
2493 | if (gimple_code (stmt) == GIMPLE_ASM) | |
bdd0e199 | 2494 | return NULL_TREE; |
75a70cf9 | 2495 | if (gimple_code (stmt) != GIMPLE_CALL) |
bdd0e199 | 2496 | continue; |
2497 | ||
75a70cf9 | 2498 | callee = gimple_call_fndecl (stmt); |
c40a6f90 | 2499 | if (!callee |
2500 | || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL | |
2501 | /* All regular builtins are ok, just obviously not alloca. */ | |
581bf1c2 | 2502 | || DECL_FUNCTION_CODE (callee) == BUILT_IN_ALLOCA |
2503 | || DECL_FUNCTION_CODE (callee) == BUILT_IN_ALLOCA_WITH_ALIGN) | |
bdd0e199 | 2504 | return NULL_TREE; |
2505 | ||
2506 | if (DECL_FUNCTION_CODE (callee) == BUILT_IN_STACK_RESTORE) | |
6ea999da | 2507 | goto second_stack_restore; |
bdd0e199 | 2508 | } |
2509 | ||
6ea999da | 2510 | if (!gsi_end_p (i)) |
bdd0e199 | 2511 | return NULL_TREE; |
2512 | ||
6ea999da | 2513 | /* Allow one successor of the exit block, or zero successors. */ |
2514 | switch (EDGE_COUNT (bb->succs)) | |
2515 | { | |
2516 | case 0: | |
2517 | break; | |
2518 | case 1: | |
34154e27 | 2519 | if (single_succ_edge (bb)->dest != EXIT_BLOCK_PTR_FOR_FN (cfun)) |
6ea999da | 2520 | return NULL_TREE; |
2521 | break; | |
2522 | default: | |
2523 | return NULL_TREE; | |
2524 | } | |
2525 | second_stack_restore: | |
bdd0e199 | 2526 | |
6ea999da | 2527 | /* If there's exactly one use, then zap the call to __builtin_stack_save. |
2528 | If there are multiple uses, then the last one should remove the call. | |
2529 | In any case, whether the call to __builtin_stack_save can be removed | |
2530 | or not is irrelevant to removing the call to __builtin_stack_restore. */ | |
2531 | if (has_single_use (gimple_call_arg (call, 0))) | |
2532 | { | |
42acab1c | 2533 | gimple *stack_save = SSA_NAME_DEF_STMT (gimple_call_arg (call, 0)); |
6ea999da | 2534 | if (is_gimple_call (stack_save)) |
2535 | { | |
2536 | callee = gimple_call_fndecl (stack_save); | |
2537 | if (callee | |
2538 | && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL | |
2539 | && DECL_FUNCTION_CODE (callee) == BUILT_IN_STACK_SAVE) | |
2540 | { | |
2541 | gimple_stmt_iterator stack_save_gsi; | |
2542 | tree rhs; | |
bdd0e199 | 2543 | |
6ea999da | 2544 | stack_save_gsi = gsi_for_stmt (stack_save); |
2545 | rhs = build_int_cst (TREE_TYPE (gimple_call_arg (call, 0)), 0); | |
2546 | update_call_from_tree (&stack_save_gsi, rhs); | |
2547 | } | |
2548 | } | |
2549 | } | |
bdd0e199 | 2550 | |
75a70cf9 | 2551 | /* No effect, so the statement will be deleted. */ |
bdd0e199 | 2552 | return integer_zero_node; |
2553 | } | |
75a70cf9 | 2554 | |
8a58ed0a | 2555 | /* If va_list type is a simple pointer and nothing special is needed, |
2556 | optimize __builtin_va_start (&ap, 0) into ap = __builtin_next_arg (0), | |
2557 | __builtin_va_end (&ap) out as NOP and __builtin_va_copy into a simple | |
2558 | pointer assignment. */ | |
2559 | ||
2560 | static tree | |
42acab1c | 2561 | optimize_stdarg_builtin (gimple *call) |
8a58ed0a | 2562 | { |
5f57a8b1 | 2563 | tree callee, lhs, rhs, cfun_va_list; |
8a58ed0a | 2564 | bool va_list_simple_ptr; |
389dd41b | 2565 | location_t loc = gimple_location (call); |
8a58ed0a | 2566 | |
75a70cf9 | 2567 | if (gimple_code (call) != GIMPLE_CALL) |
8a58ed0a | 2568 | return NULL_TREE; |
2569 | ||
75a70cf9 | 2570 | callee = gimple_call_fndecl (call); |
5f57a8b1 | 2571 | |
2572 | cfun_va_list = targetm.fn_abi_va_list (callee); | |
2573 | va_list_simple_ptr = POINTER_TYPE_P (cfun_va_list) | |
2574 | && (TREE_TYPE (cfun_va_list) == void_type_node | |
2575 | || TREE_TYPE (cfun_va_list) == char_type_node); | |
2576 | ||
8a58ed0a | 2577 | switch (DECL_FUNCTION_CODE (callee)) |
2578 | { | |
2579 | case BUILT_IN_VA_START: | |
2580 | if (!va_list_simple_ptr | |
2581 | || targetm.expand_builtin_va_start != NULL | |
e7ed5dd7 | 2582 | || !builtin_decl_explicit_p (BUILT_IN_NEXT_ARG)) |
8a58ed0a | 2583 | return NULL_TREE; |
2584 | ||
75a70cf9 | 2585 | if (gimple_call_num_args (call) != 2) |
8a58ed0a | 2586 | return NULL_TREE; |
2587 | ||
75a70cf9 | 2588 | lhs = gimple_call_arg (call, 0); |
8a58ed0a | 2589 | if (!POINTER_TYPE_P (TREE_TYPE (lhs)) |
2590 | || TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (lhs))) | |
5f57a8b1 | 2591 | != TYPE_MAIN_VARIANT (cfun_va_list)) |
8a58ed0a | 2592 | return NULL_TREE; |
48e1416a | 2593 | |
389dd41b | 2594 | lhs = build_fold_indirect_ref_loc (loc, lhs); |
b9a16870 | 2595 | rhs = build_call_expr_loc (loc, builtin_decl_explicit (BUILT_IN_NEXT_ARG), |
75a70cf9 | 2596 | 1, integer_zero_node); |
389dd41b | 2597 | rhs = fold_convert_loc (loc, TREE_TYPE (lhs), rhs); |
8a58ed0a | 2598 | return build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, rhs); |
2599 | ||
2600 | case BUILT_IN_VA_COPY: | |
2601 | if (!va_list_simple_ptr) | |
2602 | return NULL_TREE; | |
2603 | ||
75a70cf9 | 2604 | if (gimple_call_num_args (call) != 2) |
8a58ed0a | 2605 | return NULL_TREE; |
2606 | ||
75a70cf9 | 2607 | lhs = gimple_call_arg (call, 0); |
8a58ed0a | 2608 | if (!POINTER_TYPE_P (TREE_TYPE (lhs)) |
2609 | || TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (lhs))) | |
5f57a8b1 | 2610 | != TYPE_MAIN_VARIANT (cfun_va_list)) |
8a58ed0a | 2611 | return NULL_TREE; |
2612 | ||
389dd41b | 2613 | lhs = build_fold_indirect_ref_loc (loc, lhs); |
75a70cf9 | 2614 | rhs = gimple_call_arg (call, 1); |
8a58ed0a | 2615 | if (TYPE_MAIN_VARIANT (TREE_TYPE (rhs)) |
5f57a8b1 | 2616 | != TYPE_MAIN_VARIANT (cfun_va_list)) |
8a58ed0a | 2617 | return NULL_TREE; |
2618 | ||
389dd41b | 2619 | rhs = fold_convert_loc (loc, TREE_TYPE (lhs), rhs); |
8a58ed0a | 2620 | return build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, rhs); |
2621 | ||
2622 | case BUILT_IN_VA_END: | |
75a70cf9 | 2623 | /* No effect, so the statement will be deleted. */ |
8a58ed0a | 2624 | return integer_zero_node; |
2625 | ||
2626 | default: | |
2627 | gcc_unreachable (); | |
2628 | } | |
2629 | } | |
75a70cf9 | 2630 | |
f87df69a | 2631 | /* Attemp to make the block of __builtin_unreachable I unreachable by changing |
2632 | the incoming jumps. Return true if at least one jump was changed. */ | |
2633 | ||
2634 | static bool | |
2635 | optimize_unreachable (gimple_stmt_iterator i) | |
2636 | { | |
2637 | basic_block bb = gsi_bb (i); | |
2638 | gimple_stmt_iterator gsi; | |
42acab1c | 2639 | gimple *stmt; |
f87df69a | 2640 | edge_iterator ei; |
2641 | edge e; | |
2642 | bool ret; | |
2643 | ||
f6b540af | 2644 | if (flag_sanitize & SANITIZE_UNREACHABLE) |
2645 | return false; | |
2646 | ||
f87df69a | 2647 | for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi)) |
2648 | { | |
2649 | stmt = gsi_stmt (gsi); | |
2650 | ||
2651 | if (is_gimple_debug (stmt)) | |
2652 | continue; | |
2653 | ||
1a91d914 | 2654 | if (glabel *label_stmt = dyn_cast <glabel *> (stmt)) |
f87df69a | 2655 | { |
2656 | /* Verify we do not need to preserve the label. */ | |
1a91d914 | 2657 | if (FORCED_LABEL (gimple_label_label (label_stmt))) |
f87df69a | 2658 | return false; |
2659 | ||
2660 | continue; | |
2661 | } | |
2662 | ||
2663 | /* Only handle the case that __builtin_unreachable is the first statement | |
2664 | in the block. We rely on DCE to remove stmts without side-effects | |
2665 | before __builtin_unreachable. */ | |
2666 | if (gsi_stmt (gsi) != gsi_stmt (i)) | |
2667 | return false; | |
2668 | } | |
2669 | ||
2670 | ret = false; | |
2671 | FOR_EACH_EDGE (e, ei, bb->preds) | |
2672 | { | |
2673 | gsi = gsi_last_bb (e->src); | |
522f73a1 | 2674 | if (gsi_end_p (gsi)) |
2675 | continue; | |
f87df69a | 2676 | |
522f73a1 | 2677 | stmt = gsi_stmt (gsi); |
1a91d914 | 2678 | if (gcond *cond_stmt = dyn_cast <gcond *> (stmt)) |
f87df69a | 2679 | { |
2680 | if (e->flags & EDGE_TRUE_VALUE) | |
1a91d914 | 2681 | gimple_cond_make_false (cond_stmt); |
f87df69a | 2682 | else if (e->flags & EDGE_FALSE_VALUE) |
1a91d914 | 2683 | gimple_cond_make_true (cond_stmt); |
f87df69a | 2684 | else |
2685 | gcc_unreachable (); | |
1a91d914 | 2686 | update_stmt (cond_stmt); |
f87df69a | 2687 | } |
2688 | else | |
2689 | { | |
2690 | /* Todo: handle other cases, f.i. switch statement. */ | |
2691 | continue; | |
2692 | } | |
2693 | ||
2694 | ret = true; | |
2695 | } | |
2696 | ||
2697 | return ret; | |
2698 | } | |
2699 | ||
4ee9c684 | 2700 | /* A simple pass that attempts to fold all builtin functions. This pass |
2701 | is run after we've propagated as many constants as we can. */ | |
2702 | ||
65b0537f | 2703 | namespace { |
2704 | ||
2705 | const pass_data pass_data_fold_builtins = | |
2706 | { | |
2707 | GIMPLE_PASS, /* type */ | |
2708 | "fab", /* name */ | |
2709 | OPTGROUP_NONE, /* optinfo_flags */ | |
65b0537f | 2710 | TV_NONE, /* tv_id */ |
2711 | ( PROP_cfg | PROP_ssa ), /* properties_required */ | |
2712 | 0, /* properties_provided */ | |
2713 | 0, /* properties_destroyed */ | |
2714 | 0, /* todo_flags_start */ | |
8b88439e | 2715 | TODO_update_ssa, /* todo_flags_finish */ |
65b0537f | 2716 | }; |
2717 | ||
2718 | class pass_fold_builtins : public gimple_opt_pass | |
2719 | { | |
2720 | public: | |
2721 | pass_fold_builtins (gcc::context *ctxt) | |
2722 | : gimple_opt_pass (pass_data_fold_builtins, ctxt) | |
2723 | {} | |
2724 | ||
2725 | /* opt_pass methods: */ | |
2726 | opt_pass * clone () { return new pass_fold_builtins (m_ctxt); } | |
2727 | virtual unsigned int execute (function *); | |
2728 | ||
2729 | }; // class pass_fold_builtins | |
2730 | ||
2731 | unsigned int | |
2732 | pass_fold_builtins::execute (function *fun) | |
4ee9c684 | 2733 | { |
b36237eb | 2734 | bool cfg_changed = false; |
4ee9c684 | 2735 | basic_block bb; |
b1b7c0c4 | 2736 | unsigned int todoflags = 0; |
48e1416a | 2737 | |
65b0537f | 2738 | FOR_EACH_BB_FN (bb, fun) |
4ee9c684 | 2739 | { |
75a70cf9 | 2740 | gimple_stmt_iterator i; |
2741 | for (i = gsi_start_bb (bb); !gsi_end_p (i); ) | |
4ee9c684 | 2742 | { |
42acab1c | 2743 | gimple *stmt, *old_stmt; |
40a3eb84 | 2744 | tree callee; |
0a39fd54 | 2745 | enum built_in_function fcode; |
4ee9c684 | 2746 | |
75a70cf9 | 2747 | stmt = gsi_stmt (i); |
2748 | ||
2749 | if (gimple_code (stmt) != GIMPLE_CALL) | |
0a39fd54 | 2750 | { |
896a0c42 | 2751 | /* Remove all *ssaname_N ={v} {CLOBBER}; stmts, |
2752 | after the last GIMPLE DSE they aren't needed and might | |
2753 | unnecessarily keep the SSA_NAMEs live. */ | |
2754 | if (gimple_clobber_p (stmt)) | |
2755 | { | |
2756 | tree lhs = gimple_assign_lhs (stmt); | |
2757 | if (TREE_CODE (lhs) == MEM_REF | |
2758 | && TREE_CODE (TREE_OPERAND (lhs, 0)) == SSA_NAME) | |
2759 | { | |
2760 | unlink_stmt_vdef (stmt); | |
2761 | gsi_remove (&i, true); | |
2762 | release_defs (stmt); | |
2763 | continue; | |
2764 | } | |
2765 | } | |
75a70cf9 | 2766 | gsi_next (&i); |
0a39fd54 | 2767 | continue; |
2768 | } | |
40a3eb84 | 2769 | |
75a70cf9 | 2770 | callee = gimple_call_fndecl (stmt); |
4ee9c684 | 2771 | if (!callee || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL) |
0a39fd54 | 2772 | { |
75a70cf9 | 2773 | gsi_next (&i); |
0a39fd54 | 2774 | continue; |
2775 | } | |
5a4b7e1e | 2776 | |
40a3eb84 | 2777 | fcode = DECL_FUNCTION_CODE (callee); |
2778 | if (fold_stmt (&i)) | |
2779 | ; | |
2780 | else | |
2781 | { | |
2782 | tree result = NULL_TREE; | |
2783 | switch (DECL_FUNCTION_CODE (callee)) | |
2784 | { | |
2785 | case BUILT_IN_CONSTANT_P: | |
2786 | /* Resolve __builtin_constant_p. If it hasn't been | |
2787 | folded to integer_one_node by now, it's fairly | |
2788 | certain that the value simply isn't constant. */ | |
2789 | result = integer_zero_node; | |
2790 | break; | |
5a4b7e1e | 2791 | |
40a3eb84 | 2792 | case BUILT_IN_ASSUME_ALIGNED: |
2793 | /* Remove __builtin_assume_aligned. */ | |
2794 | result = gimple_call_arg (stmt, 0); | |
2795 | break; | |
4ee9c684 | 2796 | |
40a3eb84 | 2797 | case BUILT_IN_STACK_RESTORE: |
2798 | result = optimize_stack_restore (i); | |
2799 | if (result) | |
2800 | break; | |
2801 | gsi_next (&i); | |
2802 | continue; | |
fca0886c | 2803 | |
40a3eb84 | 2804 | case BUILT_IN_UNREACHABLE: |
2805 | if (optimize_unreachable (i)) | |
2806 | cfg_changed = true; | |
8a58ed0a | 2807 | break; |
8a58ed0a | 2808 | |
40a3eb84 | 2809 | case BUILT_IN_VA_START: |
2810 | case BUILT_IN_VA_END: | |
2811 | case BUILT_IN_VA_COPY: | |
2812 | /* These shouldn't be folded before pass_stdarg. */ | |
2813 | result = optimize_stdarg_builtin (stmt); | |
2814 | if (result) | |
2815 | break; | |
2816 | /* FALLTHRU */ | |
f87df69a | 2817 | |
40a3eb84 | 2818 | default:; |
2819 | } | |
bdd0e199 | 2820 | |
40a3eb84 | 2821 | if (!result) |
2822 | { | |
2823 | gsi_next (&i); | |
2824 | continue; | |
2825 | } | |
4ee9c684 | 2826 | |
40a3eb84 | 2827 | if (!update_call_from_tree (&i, result)) |
2828 | gimplify_and_update_call_from_tree (&i, result); | |
2829 | } | |
2830 | ||
2831 | todoflags |= TODO_update_address_taken; | |
f87df69a | 2832 | |
4ee9c684 | 2833 | if (dump_file && (dump_flags & TDF_DETAILS)) |
2834 | { | |
2835 | fprintf (dump_file, "Simplified\n "); | |
75a70cf9 | 2836 | print_gimple_stmt (dump_file, stmt, 0, dump_flags); |
4ee9c684 | 2837 | } |
2838 | ||
75a70cf9 | 2839 | old_stmt = stmt; |
75a70cf9 | 2840 | stmt = gsi_stmt (i); |
4c5fd53c | 2841 | update_stmt (stmt); |
de6ed584 | 2842 | |
75a70cf9 | 2843 | if (maybe_clean_or_replace_eh_stmt (old_stmt, stmt) |
2844 | && gimple_purge_dead_eh_edges (bb)) | |
b36237eb | 2845 | cfg_changed = true; |
4ee9c684 | 2846 | |
2847 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
2848 | { | |
2849 | fprintf (dump_file, "to\n "); | |
75a70cf9 | 2850 | print_gimple_stmt (dump_file, stmt, 0, dump_flags); |
4ee9c684 | 2851 | fprintf (dump_file, "\n"); |
2852 | } | |
0a39fd54 | 2853 | |
2854 | /* Retry the same statement if it changed into another | |
2855 | builtin, there might be new opportunities now. */ | |
75a70cf9 | 2856 | if (gimple_code (stmt) != GIMPLE_CALL) |
0a39fd54 | 2857 | { |
75a70cf9 | 2858 | gsi_next (&i); |
0a39fd54 | 2859 | continue; |
2860 | } | |
75a70cf9 | 2861 | callee = gimple_call_fndecl (stmt); |
0a39fd54 | 2862 | if (!callee |
75a70cf9 | 2863 | || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL |
0a39fd54 | 2864 | || DECL_FUNCTION_CODE (callee) == fcode) |
75a70cf9 | 2865 | gsi_next (&i); |
4ee9c684 | 2866 | } |
2867 | } | |
48e1416a | 2868 | |
b36237eb | 2869 | /* Delete unreachable blocks. */ |
b1b7c0c4 | 2870 | if (cfg_changed) |
2871 | todoflags |= TODO_cleanup_cfg; | |
48e1416a | 2872 | |
b1b7c0c4 | 2873 | return todoflags; |
4ee9c684 | 2874 | } |
2875 | ||
cbe8bda8 | 2876 | } // anon namespace |
2877 | ||
2878 | gimple_opt_pass * | |
2879 | make_pass_fold_builtins (gcc::context *ctxt) | |
2880 | { | |
2881 | return new pass_fold_builtins (ctxt); | |
2882 | } |