]>
Commit | Line | Data |
---|---|---|
291d763b | 1 | /* Forward propagation of expressions for single use variables. |
628ce22b | 2 | Copyright (C) 2004, 2005, 2007, 2008, 2009, 2010, 2011 |
ce084dfc | 3 | Free Software Foundation, Inc. |
4ee9c684 | 4 | |
5 | This file is part of GCC. | |
6 | ||
7 | GCC is free software; you can redistribute it and/or modify | |
8 | it under the terms of the GNU General Public License as published by | |
8c4c00c1 | 9 | the Free Software Foundation; either version 3, or (at your option) |
4ee9c684 | 10 | any later version. |
11 | ||
12 | GCC is distributed in the hope that it will be useful, | |
13 | but WITHOUT ANY WARRANTY; without even the implied warranty of | |
14 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
15 | GNU General Public License for more details. | |
16 | ||
17 | You should have received a copy of the GNU General Public License | |
8c4c00c1 | 18 | along with GCC; see the file COPYING3. If not see |
19 | <http://www.gnu.org/licenses/>. */ | |
4ee9c684 | 20 | |
21 | #include "config.h" | |
22 | #include "system.h" | |
23 | #include "coretypes.h" | |
24 | #include "tm.h" | |
4ee9c684 | 25 | #include "tree.h" |
4ee9c684 | 26 | #include "tm_p.h" |
27 | #include "basic-block.h" | |
28 | #include "timevar.h" | |
ce084dfc | 29 | #include "tree-pretty-print.h" |
4ee9c684 | 30 | #include "tree-flow.h" |
31 | #include "tree-pass.h" | |
32 | #include "tree-dump.h" | |
291d763b | 33 | #include "langhooks.h" |
5adc1066 | 34 | #include "flags.h" |
75a70cf9 | 35 | #include "gimple.h" |
27f931ff | 36 | #include "expr.h" |
4ee9c684 | 37 | |
291d763b | 38 | /* This pass propagates the RHS of assignment statements into use |
39 | sites of the LHS of the assignment. It's basically a specialized | |
8f628ee8 | 40 | form of tree combination. It is hoped all of this can disappear |
41 | when we have a generalized tree combiner. | |
4ee9c684 | 42 | |
291d763b | 43 | One class of common cases we handle is forward propagating a single use |
48e1416a | 44 | variable into a COND_EXPR. |
4ee9c684 | 45 | |
46 | bb0: | |
47 | x = a COND b; | |
48 | if (x) goto ... else goto ... | |
49 | ||
50 | Will be transformed into: | |
51 | ||
52 | bb0: | |
53 | if (a COND b) goto ... else goto ... | |
48e1416a | 54 | |
4ee9c684 | 55 | Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1). |
56 | ||
57 | Or (assuming c1 and c2 are constants): | |
58 | ||
59 | bb0: | |
48e1416a | 60 | x = a + c1; |
4ee9c684 | 61 | if (x EQ/NEQ c2) goto ... else goto ... |
62 | ||
63 | Will be transformed into: | |
64 | ||
65 | bb0: | |
66 | if (a EQ/NEQ (c2 - c1)) goto ... else goto ... | |
67 | ||
68 | Similarly for x = a - c1. | |
48e1416a | 69 | |
4ee9c684 | 70 | Or |
71 | ||
72 | bb0: | |
73 | x = !a | |
74 | if (x) goto ... else goto ... | |
75 | ||
76 | Will be transformed into: | |
77 | ||
78 | bb0: | |
79 | if (a == 0) goto ... else goto ... | |
80 | ||
81 | Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1). | |
82 | For these cases, we propagate A into all, possibly more than one, | |
83 | COND_EXPRs that use X. | |
84 | ||
f5c8cff5 | 85 | Or |
86 | ||
87 | bb0: | |
88 | x = (typecast) a | |
89 | if (x) goto ... else goto ... | |
90 | ||
91 | Will be transformed into: | |
92 | ||
93 | bb0: | |
94 | if (a != 0) goto ... else goto ... | |
95 | ||
96 | (Assuming a is an integral type and x is a boolean or x is an | |
97 | integral and a is a boolean.) | |
98 | ||
99 | Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1). | |
100 | For these cases, we propagate A into all, possibly more than one, | |
101 | COND_EXPRs that use X. | |
102 | ||
4ee9c684 | 103 | In addition to eliminating the variable and the statement which assigns |
104 | a value to the variable, we may be able to later thread the jump without | |
e6dfde59 | 105 | adding insane complexity in the dominator optimizer. |
4ee9c684 | 106 | |
f5c8cff5 | 107 | Also note these transformations can cascade. We handle this by having |
108 | a worklist of COND_EXPR statements to examine. As we make a change to | |
109 | a statement, we put it back on the worklist to examine on the next | |
110 | iteration of the main loop. | |
111 | ||
291d763b | 112 | A second class of propagation opportunities arises for ADDR_EXPR |
113 | nodes. | |
114 | ||
115 | ptr = &x->y->z; | |
116 | res = *ptr; | |
117 | ||
118 | Will get turned into | |
119 | ||
120 | res = x->y->z; | |
121 | ||
50f39ec6 | 122 | Or |
123 | ptr = (type1*)&type2var; | |
124 | res = *ptr | |
125 | ||
126 | Will get turned into (if type1 and type2 are the same size | |
127 | and neither have volatile on them): | |
128 | res = VIEW_CONVERT_EXPR<type1>(type2var) | |
129 | ||
291d763b | 130 | Or |
131 | ||
132 | ptr = &x[0]; | |
133 | ptr2 = ptr + <constant>; | |
134 | ||
135 | Will get turned into | |
136 | ||
137 | ptr2 = &x[constant/elementsize]; | |
138 | ||
139 | Or | |
140 | ||
141 | ptr = &x[0]; | |
142 | offset = index * element_size; | |
143 | offset_p = (pointer) offset; | |
144 | ptr2 = ptr + offset_p | |
145 | ||
146 | Will get turned into: | |
147 | ||
148 | ptr2 = &x[index]; | |
149 | ||
1c4607fd | 150 | Or |
151 | ssa = (int) decl | |
152 | res = ssa & 1 | |
153 | ||
154 | Provided that decl has known alignment >= 2, will get turned into | |
155 | ||
156 | res = 0 | |
157 | ||
8f628ee8 | 158 | We also propagate casts into SWITCH_EXPR and COND_EXPR conditions to |
159 | allow us to remove the cast and {NOT_EXPR,NEG_EXPR} into a subsequent | |
160 | {NOT_EXPR,NEG_EXPR}. | |
291d763b | 161 | |
4ee9c684 | 162 | This will (of course) be extended as other needs arise. */ |
163 | ||
15ec875c | 164 | static bool forward_propagate_addr_expr (tree name, tree rhs); |
148aa112 | 165 | |
166 | /* Set to true if we delete EH edges during the optimization. */ | |
167 | static bool cfg_changed; | |
168 | ||
75a70cf9 | 169 | static tree rhs_to_tree (tree type, gimple stmt); |
148aa112 | 170 | |
83a20baf | 171 | /* Get the next statement we can propagate NAME's value into skipping |
5adc1066 | 172 | trivial copies. Returns the statement that is suitable as a |
173 | propagation destination or NULL_TREE if there is no such one. | |
174 | This only returns destinations in a single-use chain. FINAL_NAME_P | |
175 | if non-NULL is written to the ssa name that represents the use. */ | |
a3451973 | 176 | |
75a70cf9 | 177 | static gimple |
5adc1066 | 178 | get_prop_dest_stmt (tree name, tree *final_name_p) |
a3451973 | 179 | { |
5adc1066 | 180 | use_operand_p use; |
75a70cf9 | 181 | gimple use_stmt; |
a3451973 | 182 | |
5adc1066 | 183 | do { |
184 | /* If name has multiple uses, bail out. */ | |
185 | if (!single_imm_use (name, &use, &use_stmt)) | |
75a70cf9 | 186 | return NULL; |
a3451973 | 187 | |
5adc1066 | 188 | /* If this is not a trivial copy, we found it. */ |
8f0b877f | 189 | if (!gimple_assign_ssa_name_copy_p (use_stmt) |
75a70cf9 | 190 | || gimple_assign_rhs1 (use_stmt) != name) |
5adc1066 | 191 | break; |
192 | ||
193 | /* Continue searching uses of the copy destination. */ | |
75a70cf9 | 194 | name = gimple_assign_lhs (use_stmt); |
5adc1066 | 195 | } while (1); |
196 | ||
197 | if (final_name_p) | |
198 | *final_name_p = name; | |
199 | ||
200 | return use_stmt; | |
a3451973 | 201 | } |
202 | ||
5adc1066 | 203 | /* Get the statement we can propagate from into NAME skipping |
204 | trivial copies. Returns the statement which defines the | |
205 | propagation source or NULL_TREE if there is no such one. | |
206 | If SINGLE_USE_ONLY is set considers only sources which have | |
207 | a single use chain up to NAME. If SINGLE_USE_P is non-null, | |
208 | it is set to whether the chain to NAME is a single use chain | |
209 | or not. SINGLE_USE_P is not written to if SINGLE_USE_ONLY is set. */ | |
4ee9c684 | 210 | |
75a70cf9 | 211 | static gimple |
5adc1066 | 212 | get_prop_source_stmt (tree name, bool single_use_only, bool *single_use_p) |
f5c8cff5 | 213 | { |
5adc1066 | 214 | bool single_use = true; |
215 | ||
216 | do { | |
75a70cf9 | 217 | gimple def_stmt = SSA_NAME_DEF_STMT (name); |
5adc1066 | 218 | |
219 | if (!has_single_use (name)) | |
220 | { | |
221 | single_use = false; | |
222 | if (single_use_only) | |
75a70cf9 | 223 | return NULL; |
5adc1066 | 224 | } |
225 | ||
226 | /* If name is defined by a PHI node or is the default def, bail out. */ | |
8f0b877f | 227 | if (!is_gimple_assign (def_stmt)) |
75a70cf9 | 228 | return NULL; |
5adc1066 | 229 | |
8f0b877f | 230 | /* If def_stmt is not a simple copy, we possibly found it. */ |
231 | if (!gimple_assign_ssa_name_copy_p (def_stmt)) | |
5adc1066 | 232 | { |
b9e98b8a | 233 | tree rhs; |
234 | ||
5adc1066 | 235 | if (!single_use_only && single_use_p) |
236 | *single_use_p = single_use; | |
237 | ||
b9e98b8a | 238 | /* We can look through pointer conversions in the search |
239 | for a useful stmt for the comparison folding. */ | |
75a70cf9 | 240 | rhs = gimple_assign_rhs1 (def_stmt); |
d9659041 | 241 | if (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt)) |
75a70cf9 | 242 | && TREE_CODE (rhs) == SSA_NAME |
243 | && POINTER_TYPE_P (TREE_TYPE (gimple_assign_lhs (def_stmt))) | |
244 | && POINTER_TYPE_P (TREE_TYPE (rhs))) | |
245 | name = rhs; | |
b9e98b8a | 246 | else |
247 | return def_stmt; | |
248 | } | |
249 | else | |
250 | { | |
251 | /* Continue searching the def of the copy source name. */ | |
75a70cf9 | 252 | name = gimple_assign_rhs1 (def_stmt); |
5adc1066 | 253 | } |
5adc1066 | 254 | } while (1); |
255 | } | |
e6dfde59 | 256 | |
5adc1066 | 257 | /* Checks if the destination ssa name in DEF_STMT can be used as |
258 | propagation source. Returns true if so, otherwise false. */ | |
e6dfde59 | 259 | |
5adc1066 | 260 | static bool |
75a70cf9 | 261 | can_propagate_from (gimple def_stmt) |
5adc1066 | 262 | { |
75a70cf9 | 263 | gcc_assert (is_gimple_assign (def_stmt)); |
8f0b877f | 264 | |
484b827b | 265 | /* If the rhs has side-effects we cannot propagate from it. */ |
75a70cf9 | 266 | if (gimple_has_volatile_ops (def_stmt)) |
484b827b | 267 | return false; |
268 | ||
269 | /* If the rhs is a load we cannot propagate from it. */ | |
75a70cf9 | 270 | if (TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)) == tcc_reference |
271 | || TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)) == tcc_declaration) | |
484b827b | 272 | return false; |
273 | ||
b9e98b8a | 274 | /* Constants can be always propagated. */ |
8f0b877f | 275 | if (gimple_assign_single_p (def_stmt) |
276 | && is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt))) | |
b9e98b8a | 277 | return true; |
278 | ||
75a70cf9 | 279 | /* We cannot propagate ssa names that occur in abnormal phi nodes. */ |
32cdcc42 | 280 | if (stmt_references_abnormal_ssa_name (def_stmt)) |
281 | return false; | |
4ee9c684 | 282 | |
5adc1066 | 283 | /* If the definition is a conversion of a pointer to a function type, |
75a70cf9 | 284 | then we can not apply optimizations as some targets require |
285 | function pointers to be canonicalized and in this case this | |
286 | optimization could eliminate a necessary canonicalization. */ | |
8f0b877f | 287 | if (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt))) |
75a70cf9 | 288 | { |
289 | tree rhs = gimple_assign_rhs1 (def_stmt); | |
290 | if (POINTER_TYPE_P (TREE_TYPE (rhs)) | |
291 | && TREE_CODE (TREE_TYPE (TREE_TYPE (rhs))) == FUNCTION_TYPE) | |
292 | return false; | |
293 | } | |
8f0b877f | 294 | |
5adc1066 | 295 | return true; |
e6dfde59 | 296 | } |
297 | ||
5d2361b0 | 298 | /* Remove a copy chain ending in NAME along the defs. |
299 | If NAME was replaced in its only use then this function can be used | |
300 | to clean up dead stmts. Returns true if cleanup-cfg has to run. */ | |
8f628ee8 | 301 | |
5adc1066 | 302 | static bool |
5d2361b0 | 303 | remove_prop_source_from_use (tree name) |
5adc1066 | 304 | { |
75a70cf9 | 305 | gimple_stmt_iterator gsi; |
306 | gimple stmt; | |
5d2361b0 | 307 | bool cfg_changed = false; |
8f628ee8 | 308 | |
5adc1066 | 309 | do { |
5d2361b0 | 310 | basic_block bb; |
311 | ||
5adc1066 | 312 | if (!has_zero_uses (name)) |
5d2361b0 | 313 | return cfg_changed; |
8f628ee8 | 314 | |
5adc1066 | 315 | stmt = SSA_NAME_DEF_STMT (name); |
75a70cf9 | 316 | gsi = gsi_for_stmt (stmt); |
5d2361b0 | 317 | bb = gimple_bb (stmt); |
5adc1066 | 318 | release_defs (stmt); |
75a70cf9 | 319 | gsi_remove (&gsi, true); |
5d2361b0 | 320 | cfg_changed |= gimple_purge_dead_eh_edges (bb); |
8f628ee8 | 321 | |
75a70cf9 | 322 | name = (gimple_assign_copy_p (stmt)) ? gimple_assign_rhs1 (stmt) : NULL; |
323 | } while (name && TREE_CODE (name) == SSA_NAME); | |
8f628ee8 | 324 | |
5d2361b0 | 325 | return cfg_changed; |
5adc1066 | 326 | } |
8f628ee8 | 327 | |
75a70cf9 | 328 | /* Return the rhs of a gimple_assign STMT in a form of a single tree, |
329 | converted to type TYPE. | |
48e1416a | 330 | |
75a70cf9 | 331 | This should disappear, but is needed so we can combine expressions and use |
332 | the fold() interfaces. Long term, we need to develop folding and combine | |
333 | routines that deal with gimple exclusively . */ | |
334 | ||
335 | static tree | |
336 | rhs_to_tree (tree type, gimple stmt) | |
337 | { | |
389dd41b | 338 | location_t loc = gimple_location (stmt); |
75a70cf9 | 339 | enum tree_code code = gimple_assign_rhs_code (stmt); |
57c45d70 | 340 | if (get_gimple_rhs_class (code) == GIMPLE_TERNARY_RHS) |
341 | return fold_build3_loc (loc, code, type, gimple_assign_rhs1 (stmt), | |
342 | gimple_assign_rhs2 (stmt), | |
343 | gimple_assign_rhs3 (stmt)); | |
344 | else if (get_gimple_rhs_class (code) == GIMPLE_BINARY_RHS) | |
389dd41b | 345 | return fold_build2_loc (loc, code, type, gimple_assign_rhs1 (stmt), |
fb8ed03f | 346 | gimple_assign_rhs2 (stmt)); |
75a70cf9 | 347 | else if (get_gimple_rhs_class (code) == GIMPLE_UNARY_RHS) |
fb8ed03f | 348 | return build1 (code, type, gimple_assign_rhs1 (stmt)); |
75a70cf9 | 349 | else if (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS) |
350 | return gimple_assign_rhs1 (stmt); | |
351 | else | |
352 | gcc_unreachable (); | |
353 | } | |
354 | ||
5adc1066 | 355 | /* Combine OP0 CODE OP1 in the context of a COND_EXPR. Returns |
356 | the folded result in a form suitable for COND_EXPR_COND or | |
357 | NULL_TREE, if there is no suitable simplified form. If | |
358 | INVARIANT_ONLY is true only gimple_min_invariant results are | |
359 | considered simplified. */ | |
8f628ee8 | 360 | |
361 | static tree | |
389dd41b | 362 | combine_cond_expr_cond (location_t loc, enum tree_code code, tree type, |
5adc1066 | 363 | tree op0, tree op1, bool invariant_only) |
8f628ee8 | 364 | { |
5adc1066 | 365 | tree t; |
8f628ee8 | 366 | |
5adc1066 | 367 | gcc_assert (TREE_CODE_CLASS (code) == tcc_comparison); |
8f628ee8 | 368 | |
389dd41b | 369 | t = fold_binary_loc (loc, code, type, op0, op1); |
5adc1066 | 370 | if (!t) |
371 | return NULL_TREE; | |
8f628ee8 | 372 | |
5adc1066 | 373 | /* Require that we got a boolean type out if we put one in. */ |
374 | gcc_assert (TREE_CODE (TREE_TYPE (t)) == TREE_CODE (type)); | |
8f628ee8 | 375 | |
a7392604 | 376 | /* Canonicalize the combined condition for use in a COND_EXPR. */ |
377 | t = canonicalize_cond_expr_cond (t); | |
8f628ee8 | 378 | |
5adc1066 | 379 | /* Bail out if we required an invariant but didn't get one. */ |
75a70cf9 | 380 | if (!t || (invariant_only && !is_gimple_min_invariant (t))) |
5adc1066 | 381 | return NULL_TREE; |
8f628ee8 | 382 | |
a7392604 | 383 | return t; |
8f628ee8 | 384 | } |
385 | ||
c8126d25 | 386 | /* Combine the comparison OP0 CODE OP1 at LOC with the defining statements |
387 | of its operand. Return a new comparison tree or NULL_TREE if there | |
388 | were no simplifying combines. */ | |
389 | ||
390 | static tree | |
678b2f5b | 391 | forward_propagate_into_comparison_1 (location_t loc, |
392 | enum tree_code code, tree type, | |
393 | tree op0, tree op1) | |
c8126d25 | 394 | { |
395 | tree tmp = NULL_TREE; | |
396 | tree rhs0 = NULL_TREE, rhs1 = NULL_TREE; | |
397 | bool single_use0_p = false, single_use1_p = false; | |
398 | ||
399 | /* For comparisons use the first operand, that is likely to | |
400 | simplify comparisons against constants. */ | |
401 | if (TREE_CODE (op0) == SSA_NAME) | |
402 | { | |
403 | gimple def_stmt = get_prop_source_stmt (op0, false, &single_use0_p); | |
404 | if (def_stmt && can_propagate_from (def_stmt)) | |
405 | { | |
406 | rhs0 = rhs_to_tree (TREE_TYPE (op1), def_stmt); | |
407 | tmp = combine_cond_expr_cond (loc, code, type, | |
408 | rhs0, op1, !single_use0_p); | |
409 | if (tmp) | |
410 | return tmp; | |
411 | } | |
412 | } | |
413 | ||
414 | /* If that wasn't successful, try the second operand. */ | |
415 | if (TREE_CODE (op1) == SSA_NAME) | |
416 | { | |
417 | gimple def_stmt = get_prop_source_stmt (op1, false, &single_use1_p); | |
418 | if (def_stmt && can_propagate_from (def_stmt)) | |
419 | { | |
420 | rhs1 = rhs_to_tree (TREE_TYPE (op0), def_stmt); | |
421 | tmp = combine_cond_expr_cond (loc, code, type, | |
422 | op0, rhs1, !single_use1_p); | |
423 | if (tmp) | |
424 | return tmp; | |
425 | } | |
426 | } | |
427 | ||
428 | /* If that wasn't successful either, try both operands. */ | |
429 | if (rhs0 != NULL_TREE | |
430 | && rhs1 != NULL_TREE) | |
431 | tmp = combine_cond_expr_cond (loc, code, type, | |
432 | rhs0, rhs1, | |
433 | !(single_use0_p && single_use1_p)); | |
434 | ||
435 | return tmp; | |
436 | } | |
437 | ||
678b2f5b | 438 | /* Propagate from the ssa name definition statements of the assignment |
439 | from a comparison at *GSI into the conditional if that simplifies it. | |
440 | Returns true if the stmt was modified, false if not. */ | |
c8126d25 | 441 | |
678b2f5b | 442 | static bool |
443 | forward_propagate_into_comparison (gimple_stmt_iterator *gsi) | |
c8126d25 | 444 | { |
678b2f5b | 445 | gimple stmt = gsi_stmt (*gsi); |
446 | tree tmp; | |
c8126d25 | 447 | |
448 | /* Combine the comparison with defining statements. */ | |
678b2f5b | 449 | tmp = forward_propagate_into_comparison_1 (gimple_location (stmt), |
450 | gimple_assign_rhs_code (stmt), | |
451 | TREE_TYPE | |
452 | (gimple_assign_lhs (stmt)), | |
453 | gimple_assign_rhs1 (stmt), | |
454 | gimple_assign_rhs2 (stmt)); | |
455 | if (tmp) | |
c8126d25 | 456 | { |
678b2f5b | 457 | gimple_assign_set_rhs_from_tree (gsi, tmp); |
458 | update_stmt (stmt); | |
459 | return true; | |
c8126d25 | 460 | } |
461 | ||
678b2f5b | 462 | return false; |
c8126d25 | 463 | } |
464 | ||
5adc1066 | 465 | /* Propagate from the ssa name definition statements of COND_EXPR |
75a70cf9 | 466 | in GIMPLE_COND statement STMT into the conditional if that simplifies it. |
467 | Returns zero if no statement was changed, one if there were | |
468 | changes and two if cfg_cleanup needs to run. | |
48e1416a | 469 | |
75a70cf9 | 470 | This must be kept in sync with forward_propagate_into_cond. */ |
471 | ||
472 | static int | |
473 | forward_propagate_into_gimple_cond (gimple stmt) | |
474 | { | |
389dd41b | 475 | int did_something = 0; |
48e1416a | 476 | location_t loc = gimple_location (stmt); |
678b2f5b | 477 | tree tmp; |
478 | enum tree_code code = gimple_cond_code (stmt); | |
479 | ||
480 | /* We can do tree combining on SSA_NAME and comparison expressions. */ | |
481 | if (TREE_CODE_CLASS (gimple_cond_code (stmt)) != tcc_comparison) | |
482 | return 0; | |
483 | ||
484 | tmp = forward_propagate_into_comparison_1 (loc, code, | |
485 | boolean_type_node, | |
486 | gimple_cond_lhs (stmt), | |
487 | gimple_cond_rhs (stmt)); | |
488 | if (tmp) | |
489 | { | |
490 | if (dump_file && tmp) | |
491 | { | |
492 | tree cond = build2 (gimple_cond_code (stmt), | |
493 | boolean_type_node, | |
494 | gimple_cond_lhs (stmt), | |
495 | gimple_cond_rhs (stmt)); | |
496 | fprintf (dump_file, " Replaced '"); | |
497 | print_generic_expr (dump_file, cond, 0); | |
498 | fprintf (dump_file, "' with '"); | |
499 | print_generic_expr (dump_file, tmp, 0); | |
500 | fprintf (dump_file, "'\n"); | |
501 | } | |
75a70cf9 | 502 | |
678b2f5b | 503 | gimple_cond_set_condition_from_tree (stmt, unshare_expr (tmp)); |
504 | update_stmt (stmt); | |
75a70cf9 | 505 | |
678b2f5b | 506 | /* Remove defining statements. */ |
507 | if (is_gimple_min_invariant (tmp)) | |
508 | did_something = 2; | |
509 | else if (did_something == 0) | |
510 | did_something = 1; | |
511 | } | |
75a70cf9 | 512 | |
513 | return did_something; | |
514 | } | |
515 | ||
516 | ||
517 | /* Propagate from the ssa name definition statements of COND_EXPR | |
518 | in the rhs of statement STMT into the conditional if that simplifies it. | |
4c580c8c | 519 | Returns zero if no statement was changed, one if there were |
75a70cf9 | 520 | changes and two if cfg_cleanup needs to run. |
521 | ||
522 | This must be kept in sync with forward_propagate_into_gimple_cond. */ | |
4ee9c684 | 523 | |
4c580c8c | 524 | static int |
75a70cf9 | 525 | forward_propagate_into_cond (gimple_stmt_iterator *gsi_p) |
e6dfde59 | 526 | { |
75a70cf9 | 527 | gimple stmt = gsi_stmt (*gsi_p); |
389dd41b | 528 | location_t loc = gimple_location (stmt); |
4c580c8c | 529 | int did_something = 0; |
678b2f5b | 530 | tree tmp = NULL_TREE; |
531 | tree cond = gimple_assign_rhs1 (stmt); | |
d080be9e | 532 | |
678b2f5b | 533 | /* We can do tree combining on SSA_NAME and comparison expressions. */ |
534 | if (COMPARISON_CLASS_P (cond)) | |
535 | tmp = forward_propagate_into_comparison_1 (loc, TREE_CODE (cond), | |
c8126d25 | 536 | boolean_type_node, |
537 | TREE_OPERAND (cond, 0), | |
538 | TREE_OPERAND (cond, 1)); | |
678b2f5b | 539 | else if (TREE_CODE (cond) == SSA_NAME) |
540 | { | |
541 | tree name = cond, rhs0; | |
542 | gimple def_stmt = get_prop_source_stmt (name, true, NULL); | |
543 | if (!def_stmt || !can_propagate_from (def_stmt)) | |
544 | return did_something; | |
5adc1066 | 545 | |
678b2f5b | 546 | rhs0 = gimple_assign_rhs1 (def_stmt); |
547 | tmp = combine_cond_expr_cond (loc, NE_EXPR, boolean_type_node, rhs0, | |
548 | build_int_cst (TREE_TYPE (rhs0), 0), | |
549 | false); | |
550 | } | |
5adc1066 | 551 | |
678b2f5b | 552 | if (tmp) |
553 | { | |
554 | if (dump_file && tmp) | |
555 | { | |
556 | fprintf (dump_file, " Replaced '"); | |
557 | print_generic_expr (dump_file, cond, 0); | |
558 | fprintf (dump_file, "' with '"); | |
559 | print_generic_expr (dump_file, tmp, 0); | |
560 | fprintf (dump_file, "'\n"); | |
561 | } | |
d080be9e | 562 | |
678b2f5b | 563 | gimple_assign_set_rhs_from_tree (gsi_p, unshare_expr (tmp)); |
564 | stmt = gsi_stmt (*gsi_p); | |
565 | update_stmt (stmt); | |
5adc1066 | 566 | |
678b2f5b | 567 | /* Remove defining statements. */ |
568 | if (is_gimple_min_invariant (tmp)) | |
569 | did_something = 2; | |
570 | else if (did_something == 0) | |
571 | did_something = 1; | |
572 | } | |
d080be9e | 573 | |
574 | return did_something; | |
4ee9c684 | 575 | } |
576 | ||
48e1416a | 577 | /* We've just substituted an ADDR_EXPR into stmt. Update all the |
148aa112 | 578 | relevant data structures to match. */ |
579 | ||
580 | static void | |
75a70cf9 | 581 | tidy_after_forward_propagate_addr (gimple stmt) |
148aa112 | 582 | { |
148aa112 | 583 | /* We may have turned a trapping insn into a non-trapping insn. */ |
584 | if (maybe_clean_or_replace_eh_stmt (stmt, stmt) | |
75a70cf9 | 585 | && gimple_purge_dead_eh_edges (gimple_bb (stmt))) |
148aa112 | 586 | cfg_changed = true; |
f2fae51f | 587 | |
75a70cf9 | 588 | if (TREE_CODE (gimple_assign_rhs1 (stmt)) == ADDR_EXPR) |
589 | recompute_tree_invariant_for_addr_expr (gimple_assign_rhs1 (stmt)); | |
148aa112 | 590 | } |
591 | ||
75a70cf9 | 592 | /* DEF_RHS contains the address of the 0th element in an array. |
6c01267c | 593 | USE_STMT uses type of DEF_RHS to compute the address of an |
291d763b | 594 | arbitrary element within the array. The (variable) byte offset |
595 | of the element is contained in OFFSET. | |
596 | ||
597 | We walk back through the use-def chains of OFFSET to verify that | |
598 | it is indeed computing the offset of an element within the array | |
599 | and extract the index corresponding to the given byte offset. | |
600 | ||
601 | We then try to fold the entire address expression into a form | |
602 | &array[index]. | |
603 | ||
604 | If we are successful, we replace the right hand side of USE_STMT | |
605 | with the new address computation. */ | |
606 | ||
607 | static bool | |
6c01267c | 608 | forward_propagate_addr_into_variable_array_index (tree offset, |
75a70cf9 | 609 | tree def_rhs, |
610 | gimple_stmt_iterator *use_stmt_gsi) | |
291d763b | 611 | { |
401d1fb3 | 612 | tree index, tunit; |
75a70cf9 | 613 | gimple offset_def, use_stmt = gsi_stmt (*use_stmt_gsi); |
182cf5a9 | 614 | tree new_rhs, tmp; |
401d1fb3 | 615 | |
182cf5a9 | 616 | if (TREE_CODE (TREE_OPERAND (def_rhs, 0)) == ARRAY_REF) |
617 | tunit = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (def_rhs))); | |
618 | else if (TREE_CODE (TREE_TYPE (TREE_OPERAND (def_rhs, 0))) == ARRAY_TYPE) | |
619 | tunit = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (TREE_TYPE (def_rhs)))); | |
620 | else | |
621 | return false; | |
401d1fb3 | 622 | if (!host_integerp (tunit, 1)) |
623 | return false; | |
291d763b | 624 | |
65c220cd | 625 | /* Get the offset's defining statement. */ |
626 | offset_def = SSA_NAME_DEF_STMT (offset); | |
627 | ||
628 | /* Try to find an expression for a proper index. This is either a | |
629 | multiplication expression by the element size or just the ssa name we came | |
630 | along in case the element size is one. In that case, however, we do not | |
631 | allow multiplications because they can be computing index to a higher | |
632 | level dimension (PR 37861). */ | |
401d1fb3 | 633 | if (integer_onep (tunit)) |
1a773ec5 | 634 | { |
65c220cd | 635 | if (is_gimple_assign (offset_def) |
636 | && gimple_assign_rhs_code (offset_def) == MULT_EXPR) | |
637 | return false; | |
291d763b | 638 | |
65c220cd | 639 | index = offset; |
640 | } | |
641 | else | |
642 | { | |
0de36bdb | 643 | /* The statement which defines OFFSET before type conversion |
75a70cf9 | 644 | must be a simple GIMPLE_ASSIGN. */ |
65c220cd | 645 | if (!is_gimple_assign (offset_def)) |
1a773ec5 | 646 | return false; |
291d763b | 647 | |
0de36bdb | 648 | /* The RHS of the statement which defines OFFSET must be a |
48e1416a | 649 | multiplication of an object by the size of the array elements. |
0de36bdb | 650 | This implicitly verifies that the size of the array elements |
651 | is constant. */ | |
401d1fb3 | 652 | if (gimple_assign_rhs_code (offset_def) == MULT_EXPR |
653 | && TREE_CODE (gimple_assign_rhs2 (offset_def)) == INTEGER_CST | |
654 | && tree_int_cst_equal (gimple_assign_rhs2 (offset_def), tunit)) | |
655 | { | |
656 | /* The first operand to the MULT_EXPR is the desired index. */ | |
657 | index = gimple_assign_rhs1 (offset_def); | |
658 | } | |
659 | /* If we have idx * tunit + CST * tunit re-associate that. */ | |
660 | else if ((gimple_assign_rhs_code (offset_def) == PLUS_EXPR | |
661 | || gimple_assign_rhs_code (offset_def) == MINUS_EXPR) | |
662 | && TREE_CODE (gimple_assign_rhs1 (offset_def)) == SSA_NAME | |
663 | && TREE_CODE (gimple_assign_rhs2 (offset_def)) == INTEGER_CST | |
664 | && (tmp = div_if_zero_remainder (EXACT_DIV_EXPR, | |
665 | gimple_assign_rhs2 (offset_def), | |
666 | tunit)) != NULL_TREE) | |
667 | { | |
668 | gimple offset_def2 = SSA_NAME_DEF_STMT (gimple_assign_rhs1 (offset_def)); | |
507b89a4 | 669 | if (is_gimple_assign (offset_def2) |
670 | && gimple_assign_rhs_code (offset_def2) == MULT_EXPR | |
401d1fb3 | 671 | && TREE_CODE (gimple_assign_rhs2 (offset_def2)) == INTEGER_CST |
672 | && tree_int_cst_equal (gimple_assign_rhs2 (offset_def2), tunit)) | |
673 | { | |
674 | index = fold_build2 (gimple_assign_rhs_code (offset_def), | |
675 | TREE_TYPE (offset), | |
676 | gimple_assign_rhs1 (offset_def2), tmp); | |
677 | } | |
678 | else | |
679 | return false; | |
680 | } | |
681 | else | |
1a773ec5 | 682 | return false; |
1a773ec5 | 683 | } |
291d763b | 684 | |
685 | /* Replace the pointer addition with array indexing. */ | |
401d1fb3 | 686 | index = force_gimple_operand_gsi (use_stmt_gsi, index, true, NULL_TREE, |
687 | true, GSI_SAME_STMT); | |
182cf5a9 | 688 | if (TREE_CODE (TREE_OPERAND (def_rhs, 0)) == ARRAY_REF) |
689 | { | |
690 | new_rhs = unshare_expr (def_rhs); | |
691 | TREE_OPERAND (TREE_OPERAND (new_rhs, 0), 1) = index; | |
692 | } | |
693 | else | |
694 | { | |
695 | new_rhs = build4 (ARRAY_REF, TREE_TYPE (TREE_TYPE (TREE_TYPE (def_rhs))), | |
696 | unshare_expr (TREE_OPERAND (def_rhs, 0)), | |
697 | index, integer_zero_node, NULL_TREE); | |
698 | new_rhs = build_fold_addr_expr (new_rhs); | |
699 | if (!useless_type_conversion_p (TREE_TYPE (gimple_assign_lhs (use_stmt)), | |
700 | TREE_TYPE (new_rhs))) | |
701 | { | |
702 | new_rhs = force_gimple_operand_gsi (use_stmt_gsi, new_rhs, true, | |
703 | NULL_TREE, true, GSI_SAME_STMT); | |
704 | new_rhs = fold_convert (TREE_TYPE (gimple_assign_lhs (use_stmt)), | |
705 | new_rhs); | |
706 | } | |
707 | } | |
708 | gimple_assign_set_rhs_from_tree (use_stmt_gsi, new_rhs); | |
75a70cf9 | 709 | use_stmt = gsi_stmt (*use_stmt_gsi); |
291d763b | 710 | |
711 | /* That should have created gimple, so there is no need to | |
712 | record information to undo the propagation. */ | |
148aa112 | 713 | fold_stmt_inplace (use_stmt); |
714 | tidy_after_forward_propagate_addr (use_stmt); | |
291d763b | 715 | return true; |
716 | } | |
717 | ||
15ec875c | 718 | /* NAME is a SSA_NAME representing DEF_RHS which is of the form |
719 | ADDR_EXPR <whatever>. | |
291d763b | 720 | |
3d5cfe81 | 721 | Try to forward propagate the ADDR_EXPR into the use USE_STMT. |
291d763b | 722 | Often this will allow for removal of an ADDR_EXPR and INDIRECT_REF |
3d5cfe81 | 723 | node or for recovery of array indexing from pointer arithmetic. |
75a70cf9 | 724 | |
6b5a5c42 | 725 | Return true if the propagation was successful (the propagation can |
726 | be not totally successful, yet things may have been changed). */ | |
291d763b | 727 | |
728 | static bool | |
75a70cf9 | 729 | forward_propagate_addr_expr_1 (tree name, tree def_rhs, |
730 | gimple_stmt_iterator *use_stmt_gsi, | |
6776dec8 | 731 | bool single_use_p) |
291d763b | 732 | { |
75a70cf9 | 733 | tree lhs, rhs, rhs2, array_ref; |
75a70cf9 | 734 | gimple use_stmt = gsi_stmt (*use_stmt_gsi); |
735 | enum tree_code rhs_code; | |
9e019299 | 736 | bool res = true; |
291d763b | 737 | |
971c637a | 738 | gcc_assert (TREE_CODE (def_rhs) == ADDR_EXPR); |
291d763b | 739 | |
75a70cf9 | 740 | lhs = gimple_assign_lhs (use_stmt); |
741 | rhs_code = gimple_assign_rhs_code (use_stmt); | |
742 | rhs = gimple_assign_rhs1 (use_stmt); | |
15ec875c | 743 | |
6776dec8 | 744 | /* Trivial cases. The use statement could be a trivial copy or a |
15ec875c | 745 | useless conversion. Recurse to the uses of the lhs as copyprop does |
971c637a | 746 | not copy through different variant pointers and FRE does not catch |
6776dec8 | 747 | all useless conversions. Treat the case of a single-use name and |
748 | a conversion to def_rhs type separate, though. */ | |
971c637a | 749 | if (TREE_CODE (lhs) == SSA_NAME |
75a70cf9 | 750 | && ((rhs_code == SSA_NAME && rhs == name) |
316616c9 | 751 | || CONVERT_EXPR_CODE_P (rhs_code))) |
6776dec8 | 752 | { |
316616c9 | 753 | /* Only recurse if we don't deal with a single use or we cannot |
754 | do the propagation to the current statement. In particular | |
755 | we can end up with a conversion needed for a non-invariant | |
756 | address which we cannot do in a single statement. */ | |
757 | if (!single_use_p | |
758 | || (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (def_rhs)) | |
bd8d8d81 | 759 | && (!is_gimple_min_invariant (def_rhs) |
760 | || (INTEGRAL_TYPE_P (TREE_TYPE (lhs)) | |
761 | && POINTER_TYPE_P (TREE_TYPE (def_rhs)) | |
762 | && (TYPE_PRECISION (TREE_TYPE (lhs)) | |
763 | > TYPE_PRECISION (TREE_TYPE (def_rhs))))))) | |
971c637a | 764 | return forward_propagate_addr_expr (lhs, def_rhs); |
765 | ||
75a70cf9 | 766 | gimple_assign_set_rhs1 (use_stmt, unshare_expr (def_rhs)); |
316616c9 | 767 | if (useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (def_rhs))) |
768 | gimple_assign_set_rhs_code (use_stmt, TREE_CODE (def_rhs)); | |
769 | else | |
770 | gimple_assign_set_rhs_code (use_stmt, NOP_EXPR); | |
6776dec8 | 771 | return true; |
772 | } | |
971c637a | 773 | |
182cf5a9 | 774 | /* Propagate through constant pointer adjustments. */ |
775 | if (TREE_CODE (lhs) == SSA_NAME | |
776 | && rhs_code == POINTER_PLUS_EXPR | |
777 | && rhs == name | |
778 | && TREE_CODE (gimple_assign_rhs2 (use_stmt)) == INTEGER_CST) | |
779 | { | |
780 | tree new_def_rhs; | |
781 | /* As we come here with non-invariant addresses in def_rhs we need | |
782 | to make sure we can build a valid constant offsetted address | |
783 | for further propagation. Simply rely on fold building that | |
784 | and check after the fact. */ | |
785 | new_def_rhs = fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (rhs)), | |
786 | def_rhs, | |
787 | fold_convert (ptr_type_node, | |
788 | gimple_assign_rhs2 (use_stmt))); | |
789 | if (TREE_CODE (new_def_rhs) == MEM_REF | |
f5d03f27 | 790 | && !is_gimple_mem_ref_addr (TREE_OPERAND (new_def_rhs, 0))) |
182cf5a9 | 791 | return false; |
792 | new_def_rhs = build_fold_addr_expr_with_type (new_def_rhs, | |
793 | TREE_TYPE (rhs)); | |
794 | ||
795 | /* Recurse. If we could propagate into all uses of lhs do not | |
796 | bother to replace into the current use but just pretend we did. */ | |
797 | if (TREE_CODE (new_def_rhs) == ADDR_EXPR | |
798 | && forward_propagate_addr_expr (lhs, new_def_rhs)) | |
799 | return true; | |
800 | ||
801 | if (useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (new_def_rhs))) | |
802 | gimple_assign_set_rhs_with_ops (use_stmt_gsi, TREE_CODE (new_def_rhs), | |
803 | new_def_rhs, NULL_TREE); | |
804 | else if (is_gimple_min_invariant (new_def_rhs)) | |
805 | gimple_assign_set_rhs_with_ops (use_stmt_gsi, NOP_EXPR, | |
806 | new_def_rhs, NULL_TREE); | |
807 | else | |
808 | return false; | |
809 | gcc_assert (gsi_stmt (*use_stmt_gsi) == use_stmt); | |
810 | update_stmt (use_stmt); | |
811 | return true; | |
812 | } | |
813 | ||
48e1416a | 814 | /* Now strip away any outer COMPONENT_REF/ARRAY_REF nodes from the LHS. |
971c637a | 815 | ADDR_EXPR will not appear on the LHS. */ |
182cf5a9 | 816 | lhs = gimple_assign_lhs (use_stmt); |
817 | while (handled_component_p (lhs)) | |
818 | lhs = TREE_OPERAND (lhs, 0); | |
971c637a | 819 | |
182cf5a9 | 820 | /* Now see if the LHS node is a MEM_REF using NAME. If so, |
971c637a | 821 | propagate the ADDR_EXPR into the use of NAME and fold the result. */ |
182cf5a9 | 822 | if (TREE_CODE (lhs) == MEM_REF |
9e019299 | 823 | && TREE_OPERAND (lhs, 0) == name) |
971c637a | 824 | { |
182cf5a9 | 825 | tree def_rhs_base; |
826 | HOST_WIDE_INT def_rhs_offset; | |
827 | /* If the address is invariant we can always fold it. */ | |
828 | if ((def_rhs_base = get_addr_base_and_unit_offset (TREE_OPERAND (def_rhs, 0), | |
829 | &def_rhs_offset))) | |
9e019299 | 830 | { |
182cf5a9 | 831 | double_int off = mem_ref_offset (lhs); |
832 | tree new_ptr; | |
833 | off = double_int_add (off, | |
834 | shwi_to_double_int (def_rhs_offset)); | |
835 | if (TREE_CODE (def_rhs_base) == MEM_REF) | |
836 | { | |
837 | off = double_int_add (off, mem_ref_offset (def_rhs_base)); | |
838 | new_ptr = TREE_OPERAND (def_rhs_base, 0); | |
839 | } | |
840 | else | |
841 | new_ptr = build_fold_addr_expr (def_rhs_base); | |
842 | TREE_OPERAND (lhs, 0) = new_ptr; | |
843 | TREE_OPERAND (lhs, 1) | |
844 | = double_int_to_tree (TREE_TYPE (TREE_OPERAND (lhs, 1)), off); | |
9e019299 | 845 | tidy_after_forward_propagate_addr (use_stmt); |
9e019299 | 846 | /* Continue propagating into the RHS if this was not the only use. */ |
847 | if (single_use_p) | |
848 | return true; | |
849 | } | |
182cf5a9 | 850 | /* If the LHS is a plain dereference and the value type is the same as |
851 | that of the pointed-to type of the address we can put the | |
852 | dereferenced address on the LHS preserving the original alias-type. */ | |
853 | else if (gimple_assign_lhs (use_stmt) == lhs | |
854 | && useless_type_conversion_p | |
855 | (TREE_TYPE (TREE_OPERAND (def_rhs, 0)), | |
856 | TREE_TYPE (gimple_assign_rhs1 (use_stmt)))) | |
857 | { | |
858 | tree *def_rhs_basep = &TREE_OPERAND (def_rhs, 0); | |
859 | tree new_offset, new_base, saved; | |
860 | while (handled_component_p (*def_rhs_basep)) | |
861 | def_rhs_basep = &TREE_OPERAND (*def_rhs_basep, 0); | |
862 | saved = *def_rhs_basep; | |
863 | if (TREE_CODE (*def_rhs_basep) == MEM_REF) | |
864 | { | |
865 | new_base = TREE_OPERAND (*def_rhs_basep, 0); | |
866 | new_offset | |
867 | = int_const_binop (PLUS_EXPR, TREE_OPERAND (lhs, 1), | |
317e2a67 | 868 | TREE_OPERAND (*def_rhs_basep, 1)); |
182cf5a9 | 869 | } |
870 | else | |
871 | { | |
872 | new_base = build_fold_addr_expr (*def_rhs_basep); | |
873 | new_offset = TREE_OPERAND (lhs, 1); | |
874 | } | |
875 | *def_rhs_basep = build2 (MEM_REF, TREE_TYPE (*def_rhs_basep), | |
876 | new_base, new_offset); | |
877 | gimple_assign_set_lhs (use_stmt, | |
878 | unshare_expr (TREE_OPERAND (def_rhs, 0))); | |
879 | *def_rhs_basep = saved; | |
880 | tidy_after_forward_propagate_addr (use_stmt); | |
881 | /* Continue propagating into the RHS if this was not the | |
882 | only use. */ | |
883 | if (single_use_p) | |
884 | return true; | |
885 | } | |
9e019299 | 886 | else |
887 | /* We can have a struct assignment dereferencing our name twice. | |
888 | Note that we didn't propagate into the lhs to not falsely | |
889 | claim we did when propagating into the rhs. */ | |
890 | res = false; | |
971c637a | 891 | } |
15ec875c | 892 | |
631d5db6 | 893 | /* Strip away any outer COMPONENT_REF, ARRAY_REF or ADDR_EXPR |
894 | nodes from the RHS. */ | |
182cf5a9 | 895 | rhs = gimple_assign_rhs1 (use_stmt); |
896 | if (TREE_CODE (rhs) == ADDR_EXPR) | |
897 | rhs = TREE_OPERAND (rhs, 0); | |
898 | while (handled_component_p (rhs)) | |
899 | rhs = TREE_OPERAND (rhs, 0); | |
291d763b | 900 | |
182cf5a9 | 901 | /* Now see if the RHS node is a MEM_REF using NAME. If so, |
291d763b | 902 | propagate the ADDR_EXPR into the use of NAME and fold the result. */ |
182cf5a9 | 903 | if (TREE_CODE (rhs) == MEM_REF |
904 | && TREE_OPERAND (rhs, 0) == name) | |
291d763b | 905 | { |
182cf5a9 | 906 | tree def_rhs_base; |
907 | HOST_WIDE_INT def_rhs_offset; | |
908 | if ((def_rhs_base = get_addr_base_and_unit_offset (TREE_OPERAND (def_rhs, 0), | |
909 | &def_rhs_offset))) | |
910 | { | |
911 | double_int off = mem_ref_offset (rhs); | |
912 | tree new_ptr; | |
913 | off = double_int_add (off, | |
914 | shwi_to_double_int (def_rhs_offset)); | |
915 | if (TREE_CODE (def_rhs_base) == MEM_REF) | |
916 | { | |
917 | off = double_int_add (off, mem_ref_offset (def_rhs_base)); | |
918 | new_ptr = TREE_OPERAND (def_rhs_base, 0); | |
919 | } | |
920 | else | |
921 | new_ptr = build_fold_addr_expr (def_rhs_base); | |
922 | TREE_OPERAND (rhs, 0) = new_ptr; | |
923 | TREE_OPERAND (rhs, 1) | |
924 | = double_int_to_tree (TREE_TYPE (TREE_OPERAND (rhs, 1)), off); | |
925 | fold_stmt_inplace (use_stmt); | |
926 | tidy_after_forward_propagate_addr (use_stmt); | |
927 | return res; | |
928 | } | |
929 | /* If the LHS is a plain dereference and the value type is the same as | |
930 | that of the pointed-to type of the address we can put the | |
931 | dereferenced address on the LHS preserving the original alias-type. */ | |
932 | else if (gimple_assign_rhs1 (use_stmt) == rhs | |
933 | && useless_type_conversion_p | |
934 | (TREE_TYPE (gimple_assign_lhs (use_stmt)), | |
935 | TREE_TYPE (TREE_OPERAND (def_rhs, 0)))) | |
936 | { | |
937 | tree *def_rhs_basep = &TREE_OPERAND (def_rhs, 0); | |
938 | tree new_offset, new_base, saved; | |
939 | while (handled_component_p (*def_rhs_basep)) | |
940 | def_rhs_basep = &TREE_OPERAND (*def_rhs_basep, 0); | |
941 | saved = *def_rhs_basep; | |
942 | if (TREE_CODE (*def_rhs_basep) == MEM_REF) | |
943 | { | |
944 | new_base = TREE_OPERAND (*def_rhs_basep, 0); | |
945 | new_offset | |
946 | = int_const_binop (PLUS_EXPR, TREE_OPERAND (rhs, 1), | |
317e2a67 | 947 | TREE_OPERAND (*def_rhs_basep, 1)); |
182cf5a9 | 948 | } |
949 | else | |
950 | { | |
951 | new_base = build_fold_addr_expr (*def_rhs_basep); | |
952 | new_offset = TREE_OPERAND (rhs, 1); | |
953 | } | |
954 | *def_rhs_basep = build2 (MEM_REF, TREE_TYPE (*def_rhs_basep), | |
955 | new_base, new_offset); | |
956 | gimple_assign_set_rhs1 (use_stmt, | |
957 | unshare_expr (TREE_OPERAND (def_rhs, 0))); | |
958 | *def_rhs_basep = saved; | |
959 | fold_stmt_inplace (use_stmt); | |
960 | tidy_after_forward_propagate_addr (use_stmt); | |
961 | return res; | |
962 | } | |
291d763b | 963 | } |
964 | ||
971c637a | 965 | /* If the use of the ADDR_EXPR is not a POINTER_PLUS_EXPR, there |
966 | is nothing to do. */ | |
75a70cf9 | 967 | if (gimple_assign_rhs_code (use_stmt) != POINTER_PLUS_EXPR |
968 | || gimple_assign_rhs1 (use_stmt) != name) | |
971c637a | 969 | return false; |
970 | ||
291d763b | 971 | /* The remaining cases are all for turning pointer arithmetic into |
972 | array indexing. They only apply when we have the address of | |
973 | element zero in an array. If that is not the case then there | |
974 | is nothing to do. */ | |
15ec875c | 975 | array_ref = TREE_OPERAND (def_rhs, 0); |
182cf5a9 | 976 | if ((TREE_CODE (array_ref) != ARRAY_REF |
977 | || TREE_CODE (TREE_TYPE (TREE_OPERAND (array_ref, 0))) != ARRAY_TYPE | |
978 | || TREE_CODE (TREE_OPERAND (array_ref, 1)) != INTEGER_CST) | |
979 | && TREE_CODE (TREE_TYPE (array_ref)) != ARRAY_TYPE) | |
291d763b | 980 | return false; |
981 | ||
75a70cf9 | 982 | rhs2 = gimple_assign_rhs2 (use_stmt); |
088cc5d5 | 983 | /* Try to optimize &x[C1] p+ C2 where C2 is a multiple of the size |
984 | of the elements in X into &x[C1 + C2/element size]. */ | |
75a70cf9 | 985 | if (TREE_CODE (rhs2) == INTEGER_CST) |
291d763b | 986 | { |
e60a6f7b | 987 | tree new_rhs = maybe_fold_stmt_addition (gimple_location (use_stmt), |
988 | TREE_TYPE (def_rhs), | |
088cc5d5 | 989 | def_rhs, rhs2); |
75a70cf9 | 990 | if (new_rhs) |
291d763b | 991 | { |
7b705d94 | 992 | tree type = TREE_TYPE (gimple_assign_lhs (use_stmt)); |
993 | new_rhs = unshare_expr (new_rhs); | |
994 | if (!useless_type_conversion_p (type, TREE_TYPE (new_rhs))) | |
995 | { | |
996 | if (!is_gimple_min_invariant (new_rhs)) | |
997 | new_rhs = force_gimple_operand_gsi (use_stmt_gsi, new_rhs, | |
998 | true, NULL_TREE, | |
999 | true, GSI_SAME_STMT); | |
1000 | new_rhs = fold_convert (type, new_rhs); | |
1001 | } | |
1002 | gimple_assign_set_rhs_from_tree (use_stmt_gsi, new_rhs); | |
75a70cf9 | 1003 | use_stmt = gsi_stmt (*use_stmt_gsi); |
1004 | update_stmt (use_stmt); | |
148aa112 | 1005 | tidy_after_forward_propagate_addr (use_stmt); |
291d763b | 1006 | return true; |
1007 | } | |
291d763b | 1008 | } |
1009 | ||
0de36bdb | 1010 | /* Try to optimize &x[0] p+ OFFSET where OFFSET is defined by |
291d763b | 1011 | converting a multiplication of an index by the size of the |
1012 | array elements, then the result is converted into the proper | |
1013 | type for the arithmetic. */ | |
75a70cf9 | 1014 | if (TREE_CODE (rhs2) == SSA_NAME |
182cf5a9 | 1015 | && (TREE_CODE (array_ref) != ARRAY_REF |
1016 | || integer_zerop (TREE_OPERAND (array_ref, 1))) | |
c019af4d | 1017 | && useless_type_conversion_p (TREE_TYPE (name), TREE_TYPE (def_rhs)) |
291d763b | 1018 | /* Avoid problems with IVopts creating PLUS_EXPRs with a |
1019 | different type than their operands. */ | |
83a99d39 | 1020 | && useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (def_rhs))) |
75a70cf9 | 1021 | return forward_propagate_addr_into_variable_array_index (rhs2, def_rhs, |
1022 | use_stmt_gsi); | |
291d763b | 1023 | return false; |
1024 | } | |
1025 | ||
3d5cfe81 | 1026 | /* STMT is a statement of the form SSA_NAME = ADDR_EXPR <whatever>. |
1027 | ||
1028 | Try to forward propagate the ADDR_EXPR into all uses of the SSA_NAME. | |
1029 | Often this will allow for removal of an ADDR_EXPR and INDIRECT_REF | |
1030 | node or for recovery of array indexing from pointer arithmetic. | |
1031 | Returns true, if all uses have been propagated into. */ | |
1032 | ||
1033 | static bool | |
15ec875c | 1034 | forward_propagate_addr_expr (tree name, tree rhs) |
3d5cfe81 | 1035 | { |
75a70cf9 | 1036 | int stmt_loop_depth = gimple_bb (SSA_NAME_DEF_STMT (name))->loop_depth; |
3d5cfe81 | 1037 | imm_use_iterator iter; |
75a70cf9 | 1038 | gimple use_stmt; |
3d5cfe81 | 1039 | bool all = true; |
6776dec8 | 1040 | bool single_use_p = has_single_use (name); |
3d5cfe81 | 1041 | |
09aca5bc | 1042 | FOR_EACH_IMM_USE_STMT (use_stmt, iter, name) |
3d5cfe81 | 1043 | { |
c96420f8 | 1044 | bool result; |
9481f629 | 1045 | tree use_rhs; |
3d5cfe81 | 1046 | |
1047 | /* If the use is not in a simple assignment statement, then | |
1048 | there is nothing we can do. */ | |
75a70cf9 | 1049 | if (gimple_code (use_stmt) != GIMPLE_ASSIGN) |
3d5cfe81 | 1050 | { |
688ff29b | 1051 | if (!is_gimple_debug (use_stmt)) |
9845d120 | 1052 | all = false; |
3d5cfe81 | 1053 | continue; |
1054 | } | |
1055 | ||
a540e2fe | 1056 | /* If the use is in a deeper loop nest, then we do not want |
ed40c3d0 | 1057 | to propagate non-invariant ADDR_EXPRs into the loop as that |
1058 | is likely adding expression evaluations into the loop. */ | |
1059 | if (gimple_bb (use_stmt)->loop_depth > stmt_loop_depth | |
1060 | && !is_gimple_min_invariant (rhs)) | |
3d5cfe81 | 1061 | { |
1062 | all = false; | |
1063 | continue; | |
1064 | } | |
a540e2fe | 1065 | |
75a70cf9 | 1066 | { |
1067 | gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt); | |
1068 | result = forward_propagate_addr_expr_1 (name, rhs, &gsi, | |
1069 | single_use_p); | |
dd277d48 | 1070 | /* If the use has moved to a different statement adjust |
4c5fd53c | 1071 | the update machinery for the old statement too. */ |
dd277d48 | 1072 | if (use_stmt != gsi_stmt (gsi)) |
1073 | { | |
dd277d48 | 1074 | update_stmt (use_stmt); |
4c5fd53c | 1075 | use_stmt = gsi_stmt (gsi); |
dd277d48 | 1076 | } |
4c5fd53c | 1077 | |
1078 | update_stmt (use_stmt); | |
75a70cf9 | 1079 | } |
c96420f8 | 1080 | all &= result; |
de6ed584 | 1081 | |
15ec875c | 1082 | /* Remove intermediate now unused copy and conversion chains. */ |
75a70cf9 | 1083 | use_rhs = gimple_assign_rhs1 (use_stmt); |
15ec875c | 1084 | if (result |
75a70cf9 | 1085 | && TREE_CODE (gimple_assign_lhs (use_stmt)) == SSA_NAME |
7b705d94 | 1086 | && TREE_CODE (use_rhs) == SSA_NAME |
1087 | && has_zero_uses (gimple_assign_lhs (use_stmt))) | |
15ec875c | 1088 | { |
75a70cf9 | 1089 | gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt); |
15ec875c | 1090 | release_defs (use_stmt); |
75a70cf9 | 1091 | gsi_remove (&gsi, true); |
15ec875c | 1092 | } |
3d5cfe81 | 1093 | } |
1094 | ||
628ce22b | 1095 | return all && has_zero_uses (name); |
3d5cfe81 | 1096 | } |
1097 | ||
678b2f5b | 1098 | |
1099 | /* Forward propagate the comparison defined in STMT like | |
1100 | cond_1 = x CMP y to uses of the form | |
1101 | a_1 = (T')cond_1 | |
1102 | a_1 = !cond_1 | |
1103 | a_1 = cond_1 != 0 | |
1104 | Returns true if stmt is now unused. */ | |
1105 | ||
1106 | static bool | |
1107 | forward_propagate_comparison (gimple stmt) | |
1108 | { | |
1109 | tree name = gimple_assign_lhs (stmt); | |
1110 | gimple use_stmt; | |
1111 | tree tmp = NULL_TREE; | |
1112 | ||
1113 | /* Don't propagate ssa names that occur in abnormal phis. */ | |
1114 | if ((TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME | |
1115 | && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_assign_rhs1 (stmt))) | |
1116 | || (TREE_CODE (gimple_assign_rhs2 (stmt)) == SSA_NAME | |
1117 | && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_assign_rhs2 (stmt)))) | |
1118 | return false; | |
1119 | ||
1120 | /* Do not un-cse comparisons. But propagate through copies. */ | |
1121 | use_stmt = get_prop_dest_stmt (name, &name); | |
1122 | if (!use_stmt) | |
1123 | return false; | |
1124 | ||
1125 | /* Conversion of the condition result to another integral type. */ | |
1126 | if (is_gimple_assign (use_stmt) | |
1127 | && (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (use_stmt)) | |
1128 | || TREE_CODE_CLASS (gimple_assign_rhs_code (use_stmt)) | |
1129 | == tcc_comparison | |
1130 | || gimple_assign_rhs_code (use_stmt) == TRUTH_NOT_EXPR) | |
1131 | && INTEGRAL_TYPE_P (TREE_TYPE (gimple_assign_lhs (use_stmt)))) | |
1132 | { | |
1133 | tree lhs = gimple_assign_lhs (use_stmt); | |
1134 | ||
1135 | /* We can propagate the condition into a conversion. */ | |
1136 | if (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (use_stmt))) | |
1137 | { | |
1138 | /* Avoid using fold here as that may create a COND_EXPR with | |
1139 | non-boolean condition as canonical form. */ | |
1140 | tmp = build2 (gimple_assign_rhs_code (stmt), TREE_TYPE (lhs), | |
1141 | gimple_assign_rhs1 (stmt), gimple_assign_rhs2 (stmt)); | |
1142 | } | |
1143 | /* We can propagate the condition into X op CST where op | |
1144 | is EQ_EXPR or NE_EXPR and CST is either one or zero. */ | |
1145 | else if (TREE_CODE_CLASS (gimple_assign_rhs_code (use_stmt)) | |
1146 | == tcc_comparison | |
1147 | && TREE_CODE (gimple_assign_rhs1 (use_stmt)) == SSA_NAME | |
1148 | && TREE_CODE (gimple_assign_rhs2 (use_stmt)) == INTEGER_CST) | |
1149 | { | |
1150 | enum tree_code code = gimple_assign_rhs_code (use_stmt); | |
1151 | tree cst = gimple_assign_rhs2 (use_stmt); | |
1152 | tree cond; | |
1153 | ||
1154 | cond = build2 (gimple_assign_rhs_code (stmt), | |
1155 | TREE_TYPE (cst), | |
1156 | gimple_assign_rhs1 (stmt), | |
1157 | gimple_assign_rhs2 (stmt)); | |
1158 | ||
1159 | tmp = combine_cond_expr_cond (gimple_location (use_stmt), | |
1160 | code, TREE_TYPE (lhs), | |
1161 | cond, cst, false); | |
1162 | if (tmp == NULL_TREE) | |
1163 | return false; | |
1164 | } | |
1165 | /* We can propagate the condition into a statement that | |
1166 | computes the logical negation of the comparison result. */ | |
1167 | else if (gimple_assign_rhs_code (use_stmt) == TRUTH_NOT_EXPR) | |
1168 | { | |
1169 | tree type = TREE_TYPE (gimple_assign_rhs1 (stmt)); | |
1170 | bool nans = HONOR_NANS (TYPE_MODE (type)); | |
1171 | enum tree_code code; | |
1172 | code = invert_tree_comparison (gimple_assign_rhs_code (stmt), nans); | |
1173 | if (code == ERROR_MARK) | |
1174 | return false; | |
1175 | ||
1176 | tmp = build2 (code, TREE_TYPE (lhs), gimple_assign_rhs1 (stmt), | |
1177 | gimple_assign_rhs2 (stmt)); | |
1178 | } | |
1179 | else | |
1180 | return false; | |
1181 | ||
1182 | { | |
1183 | gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt); | |
1184 | gimple_assign_set_rhs_from_tree (&gsi, unshare_expr (tmp)); | |
1185 | use_stmt = gsi_stmt (gsi); | |
1186 | update_stmt (use_stmt); | |
1187 | } | |
1188 | ||
1189 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
1190 | { | |
1191 | tree old_rhs = rhs_to_tree (TREE_TYPE (gimple_assign_lhs (stmt)), | |
1192 | stmt); | |
1193 | fprintf (dump_file, " Replaced '"); | |
1194 | print_generic_expr (dump_file, old_rhs, dump_flags); | |
1195 | fprintf (dump_file, "' with '"); | |
1196 | print_generic_expr (dump_file, tmp, dump_flags); | |
1197 | fprintf (dump_file, "'\n"); | |
1198 | } | |
1199 | ||
1200 | /* Remove defining statements. */ | |
1201 | return remove_prop_source_from_use (name); | |
1202 | } | |
1203 | ||
1204 | return false; | |
1205 | } | |
1206 | ||
1207 | ||
3a938499 | 1208 | /* If we have lhs = ~x (STMT), look and see if earlier we had x = ~y. |
1209 | If so, we can change STMT into lhs = y which can later be copy | |
48e1416a | 1210 | propagated. Similarly for negation. |
3a938499 | 1211 | |
48e1416a | 1212 | This could trivially be formulated as a forward propagation |
3a938499 | 1213 | to immediate uses. However, we already had an implementation |
1214 | from DOM which used backward propagation via the use-def links. | |
1215 | ||
1216 | It turns out that backward propagation is actually faster as | |
1217 | there's less work to do for each NOT/NEG expression we find. | |
1218 | Backwards propagation needs to look at the statement in a single | |
1219 | backlink. Forward propagation needs to look at potentially more | |
678b2f5b | 1220 | than one forward link. |
3a938499 | 1221 | |
678b2f5b | 1222 | Returns true when the statement was changed. */ |
1223 | ||
1224 | static bool | |
75a70cf9 | 1225 | simplify_not_neg_expr (gimple_stmt_iterator *gsi_p) |
3a938499 | 1226 | { |
75a70cf9 | 1227 | gimple stmt = gsi_stmt (*gsi_p); |
1228 | tree rhs = gimple_assign_rhs1 (stmt); | |
1229 | gimple rhs_def_stmt = SSA_NAME_DEF_STMT (rhs); | |
3a938499 | 1230 | |
1231 | /* See if the RHS_DEF_STMT has the same form as our statement. */ | |
75a70cf9 | 1232 | if (is_gimple_assign (rhs_def_stmt) |
1233 | && gimple_assign_rhs_code (rhs_def_stmt) == gimple_assign_rhs_code (stmt)) | |
3a938499 | 1234 | { |
75a70cf9 | 1235 | tree rhs_def_operand = gimple_assign_rhs1 (rhs_def_stmt); |
3a938499 | 1236 | |
1237 | /* Verify that RHS_DEF_OPERAND is a suitable SSA_NAME. */ | |
1238 | if (TREE_CODE (rhs_def_operand) == SSA_NAME | |
1239 | && ! SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs_def_operand)) | |
1240 | { | |
75a70cf9 | 1241 | gimple_assign_set_rhs_from_tree (gsi_p, rhs_def_operand); |
1242 | stmt = gsi_stmt (*gsi_p); | |
3a938499 | 1243 | update_stmt (stmt); |
678b2f5b | 1244 | return true; |
3a938499 | 1245 | } |
1246 | } | |
678b2f5b | 1247 | |
1248 | return false; | |
3a938499 | 1249 | } |
3d5cfe81 | 1250 | |
b5860aba | 1251 | /* STMT is a SWITCH_EXPR for which we attempt to find equivalent forms of |
1252 | the condition which we may be able to optimize better. */ | |
1253 | ||
678b2f5b | 1254 | static bool |
75a70cf9 | 1255 | simplify_gimple_switch (gimple stmt) |
b5860aba | 1256 | { |
75a70cf9 | 1257 | tree cond = gimple_switch_index (stmt); |
b5860aba | 1258 | tree def, to, ti; |
75a70cf9 | 1259 | gimple def_stmt; |
b5860aba | 1260 | |
1261 | /* The optimization that we really care about is removing unnecessary | |
1262 | casts. That will let us do much better in propagating the inferred | |
1263 | constant at the switch target. */ | |
1264 | if (TREE_CODE (cond) == SSA_NAME) | |
1265 | { | |
75a70cf9 | 1266 | def_stmt = SSA_NAME_DEF_STMT (cond); |
1267 | if (is_gimple_assign (def_stmt)) | |
b5860aba | 1268 | { |
75a70cf9 | 1269 | if (gimple_assign_rhs_code (def_stmt) == NOP_EXPR) |
b5860aba | 1270 | { |
1271 | int need_precision; | |
1272 | bool fail; | |
1273 | ||
75a70cf9 | 1274 | def = gimple_assign_rhs1 (def_stmt); |
b5860aba | 1275 | |
b5860aba | 1276 | /* ??? Why was Jeff testing this? We are gimple... */ |
1b4345f7 | 1277 | gcc_checking_assert (is_gimple_val (def)); |
b5860aba | 1278 | |
1279 | to = TREE_TYPE (cond); | |
1280 | ti = TREE_TYPE (def); | |
1281 | ||
1282 | /* If we have an extension that preserves value, then we | |
1283 | can copy the source value into the switch. */ | |
1284 | ||
1285 | need_precision = TYPE_PRECISION (ti); | |
1286 | fail = false; | |
c5237b8b | 1287 | if (! INTEGRAL_TYPE_P (ti)) |
1288 | fail = true; | |
1289 | else if (TYPE_UNSIGNED (to) && !TYPE_UNSIGNED (ti)) | |
b5860aba | 1290 | fail = true; |
1291 | else if (!TYPE_UNSIGNED (to) && TYPE_UNSIGNED (ti)) | |
1292 | need_precision += 1; | |
1293 | if (TYPE_PRECISION (to) < need_precision) | |
1294 | fail = true; | |
1295 | ||
1296 | if (!fail) | |
1297 | { | |
75a70cf9 | 1298 | gimple_switch_set_index (stmt, def); |
b5860aba | 1299 | update_stmt (stmt); |
678b2f5b | 1300 | return true; |
b5860aba | 1301 | } |
1302 | } | |
1303 | } | |
1304 | } | |
678b2f5b | 1305 | |
1306 | return false; | |
b5860aba | 1307 | } |
1308 | ||
27f931ff | 1309 | /* For pointers p2 and p1 return p2 - p1 if the |
1310 | difference is known and constant, otherwise return NULL. */ | |
1311 | ||
1312 | static tree | |
1313 | constant_pointer_difference (tree p1, tree p2) | |
1314 | { | |
1315 | int i, j; | |
1316 | #define CPD_ITERATIONS 5 | |
1317 | tree exps[2][CPD_ITERATIONS]; | |
1318 | tree offs[2][CPD_ITERATIONS]; | |
1319 | int cnt[2]; | |
1320 | ||
1321 | for (i = 0; i < 2; i++) | |
1322 | { | |
1323 | tree p = i ? p1 : p2; | |
1324 | tree off = size_zero_node; | |
1325 | gimple stmt; | |
1326 | enum tree_code code; | |
1327 | ||
1328 | /* For each of p1 and p2 we need to iterate at least | |
1329 | twice, to handle ADDR_EXPR directly in p1/p2, | |
1330 | SSA_NAME with ADDR_EXPR or POINTER_PLUS_EXPR etc. | |
1331 | on definition's stmt RHS. Iterate a few extra times. */ | |
1332 | j = 0; | |
1333 | do | |
1334 | { | |
1335 | if (!POINTER_TYPE_P (TREE_TYPE (p))) | |
1336 | break; | |
1337 | if (TREE_CODE (p) == ADDR_EXPR) | |
1338 | { | |
1339 | tree q = TREE_OPERAND (p, 0); | |
1340 | HOST_WIDE_INT offset; | |
1341 | tree base = get_addr_base_and_unit_offset (q, &offset); | |
1342 | if (base) | |
1343 | { | |
1344 | q = base; | |
1345 | if (offset) | |
1346 | off = size_binop (PLUS_EXPR, off, size_int (offset)); | |
1347 | } | |
1348 | if (TREE_CODE (q) == MEM_REF | |
1349 | && TREE_CODE (TREE_OPERAND (q, 0)) == SSA_NAME) | |
1350 | { | |
1351 | p = TREE_OPERAND (q, 0); | |
1352 | off = size_binop (PLUS_EXPR, off, | |
1353 | double_int_to_tree (sizetype, | |
1354 | mem_ref_offset (q))); | |
1355 | } | |
1356 | else | |
1357 | { | |
1358 | exps[i][j] = q; | |
1359 | offs[i][j++] = off; | |
1360 | break; | |
1361 | } | |
1362 | } | |
1363 | if (TREE_CODE (p) != SSA_NAME) | |
1364 | break; | |
1365 | exps[i][j] = p; | |
1366 | offs[i][j++] = off; | |
1367 | if (j == CPD_ITERATIONS) | |
1368 | break; | |
1369 | stmt = SSA_NAME_DEF_STMT (p); | |
1370 | if (!is_gimple_assign (stmt) || gimple_assign_lhs (stmt) != p) | |
1371 | break; | |
1372 | code = gimple_assign_rhs_code (stmt); | |
1373 | if (code == POINTER_PLUS_EXPR) | |
1374 | { | |
1375 | if (TREE_CODE (gimple_assign_rhs2 (stmt)) != INTEGER_CST) | |
1376 | break; | |
1377 | off = size_binop (PLUS_EXPR, off, gimple_assign_rhs2 (stmt)); | |
1378 | p = gimple_assign_rhs1 (stmt); | |
1379 | } | |
1380 | else if (code == ADDR_EXPR || code == NOP_EXPR) | |
1381 | p = gimple_assign_rhs1 (stmt); | |
1382 | else | |
1383 | break; | |
1384 | } | |
1385 | while (1); | |
1386 | cnt[i] = j; | |
1387 | } | |
1388 | ||
1389 | for (i = 0; i < cnt[0]; i++) | |
1390 | for (j = 0; j < cnt[1]; j++) | |
1391 | if (exps[0][i] == exps[1][j]) | |
1392 | return size_binop (MINUS_EXPR, offs[0][i], offs[1][j]); | |
1393 | ||
1394 | return NULL_TREE; | |
1395 | } | |
1396 | ||
1397 | /* *GSI_P is a GIMPLE_CALL to a builtin function. | |
1398 | Optimize | |
1399 | memcpy (p, "abcd", 4); | |
1400 | memset (p + 4, ' ', 3); | |
1401 | into | |
1402 | memcpy (p, "abcd ", 7); | |
1403 | call if the latter can be stored by pieces during expansion. */ | |
1404 | ||
1405 | static bool | |
1406 | simplify_builtin_call (gimple_stmt_iterator *gsi_p, tree callee2) | |
1407 | { | |
1408 | gimple stmt1, stmt2 = gsi_stmt (*gsi_p); | |
1409 | tree vuse = gimple_vuse (stmt2); | |
1410 | if (vuse == NULL) | |
1411 | return false; | |
1412 | stmt1 = SSA_NAME_DEF_STMT (vuse); | |
1413 | ||
1414 | switch (DECL_FUNCTION_CODE (callee2)) | |
1415 | { | |
1416 | case BUILT_IN_MEMSET: | |
1417 | if (gimple_call_num_args (stmt2) != 3 | |
1418 | || gimple_call_lhs (stmt2) | |
1419 | || CHAR_BIT != 8 | |
1420 | || BITS_PER_UNIT != 8) | |
1421 | break; | |
1422 | else | |
1423 | { | |
1424 | tree callee1; | |
1425 | tree ptr1, src1, str1, off1, len1, lhs1; | |
1426 | tree ptr2 = gimple_call_arg (stmt2, 0); | |
1427 | tree val2 = gimple_call_arg (stmt2, 1); | |
1428 | tree len2 = gimple_call_arg (stmt2, 2); | |
1429 | tree diff, vdef, new_str_cst; | |
1430 | gimple use_stmt; | |
1431 | unsigned int ptr1_align; | |
1432 | unsigned HOST_WIDE_INT src_len; | |
1433 | char *src_buf; | |
1434 | use_operand_p use_p; | |
1435 | ||
1436 | if (!host_integerp (val2, 0) | |
1437 | || !host_integerp (len2, 1)) | |
1438 | break; | |
1439 | if (is_gimple_call (stmt1)) | |
1440 | { | |
1441 | /* If first stmt is a call, it needs to be memcpy | |
1442 | or mempcpy, with string literal as second argument and | |
1443 | constant length. */ | |
1444 | callee1 = gimple_call_fndecl (stmt1); | |
1445 | if (callee1 == NULL_TREE | |
1446 | || DECL_BUILT_IN_CLASS (callee1) != BUILT_IN_NORMAL | |
1447 | || gimple_call_num_args (stmt1) != 3) | |
1448 | break; | |
1449 | if (DECL_FUNCTION_CODE (callee1) != BUILT_IN_MEMCPY | |
1450 | && DECL_FUNCTION_CODE (callee1) != BUILT_IN_MEMPCPY) | |
1451 | break; | |
1452 | ptr1 = gimple_call_arg (stmt1, 0); | |
1453 | src1 = gimple_call_arg (stmt1, 1); | |
1454 | len1 = gimple_call_arg (stmt1, 2); | |
1455 | lhs1 = gimple_call_lhs (stmt1); | |
1456 | if (!host_integerp (len1, 1)) | |
1457 | break; | |
1458 | str1 = string_constant (src1, &off1); | |
1459 | if (str1 == NULL_TREE) | |
1460 | break; | |
1461 | if (!host_integerp (off1, 1) | |
1462 | || compare_tree_int (off1, TREE_STRING_LENGTH (str1) - 1) > 0 | |
1463 | || compare_tree_int (len1, TREE_STRING_LENGTH (str1) | |
1464 | - tree_low_cst (off1, 1)) > 0 | |
1465 | || TREE_CODE (TREE_TYPE (str1)) != ARRAY_TYPE | |
1466 | || TYPE_MODE (TREE_TYPE (TREE_TYPE (str1))) | |
1467 | != TYPE_MODE (char_type_node)) | |
1468 | break; | |
1469 | } | |
1470 | else if (gimple_assign_single_p (stmt1)) | |
1471 | { | |
1472 | /* Otherwise look for length 1 memcpy optimized into | |
1473 | assignment. */ | |
1474 | ptr1 = gimple_assign_lhs (stmt1); | |
1475 | src1 = gimple_assign_rhs1 (stmt1); | |
1476 | if (TREE_CODE (ptr1) != MEM_REF | |
1477 | || TYPE_MODE (TREE_TYPE (ptr1)) != TYPE_MODE (char_type_node) | |
1478 | || !host_integerp (src1, 0)) | |
1479 | break; | |
1480 | ptr1 = build_fold_addr_expr (ptr1); | |
1481 | callee1 = NULL_TREE; | |
1482 | len1 = size_one_node; | |
1483 | lhs1 = NULL_TREE; | |
1484 | off1 = size_zero_node; | |
1485 | str1 = NULL_TREE; | |
1486 | } | |
1487 | else | |
1488 | break; | |
1489 | ||
1490 | diff = constant_pointer_difference (ptr1, ptr2); | |
1491 | if (diff == NULL && lhs1 != NULL) | |
1492 | { | |
1493 | diff = constant_pointer_difference (lhs1, ptr2); | |
1494 | if (DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY | |
1495 | && diff != NULL) | |
1496 | diff = size_binop (PLUS_EXPR, diff, | |
1497 | fold_convert (sizetype, len1)); | |
1498 | } | |
1499 | /* If the difference between the second and first destination pointer | |
1500 | is not constant, or is bigger than memcpy length, bail out. */ | |
1501 | if (diff == NULL | |
1502 | || !host_integerp (diff, 1) | |
1503 | || tree_int_cst_lt (len1, diff)) | |
1504 | break; | |
1505 | ||
1506 | /* Use maximum of difference plus memset length and memcpy length | |
1507 | as the new memcpy length, if it is too big, bail out. */ | |
1508 | src_len = tree_low_cst (diff, 1); | |
1509 | src_len += tree_low_cst (len2, 1); | |
1510 | if (src_len < (unsigned HOST_WIDE_INT) tree_low_cst (len1, 1)) | |
1511 | src_len = tree_low_cst (len1, 1); | |
1512 | if (src_len > 1024) | |
1513 | break; | |
1514 | ||
1515 | /* If mempcpy value is used elsewhere, bail out, as mempcpy | |
1516 | with bigger length will return different result. */ | |
1517 | if (lhs1 != NULL_TREE | |
1518 | && DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY | |
1519 | && (TREE_CODE (lhs1) != SSA_NAME | |
1520 | || !single_imm_use (lhs1, &use_p, &use_stmt) | |
1521 | || use_stmt != stmt2)) | |
1522 | break; | |
1523 | ||
1524 | /* If anything reads memory in between memcpy and memset | |
1525 | call, the modified memcpy call might change it. */ | |
1526 | vdef = gimple_vdef (stmt1); | |
1527 | if (vdef != NULL | |
1528 | && (!single_imm_use (vdef, &use_p, &use_stmt) | |
1529 | || use_stmt != stmt2)) | |
1530 | break; | |
1531 | ||
1532 | ptr1_align = get_pointer_alignment (ptr1, BIGGEST_ALIGNMENT); | |
1533 | /* Construct the new source string literal. */ | |
1534 | src_buf = XALLOCAVEC (char, src_len + 1); | |
1535 | if (callee1) | |
1536 | memcpy (src_buf, | |
1537 | TREE_STRING_POINTER (str1) + tree_low_cst (off1, 1), | |
1538 | tree_low_cst (len1, 1)); | |
1539 | else | |
1540 | src_buf[0] = tree_low_cst (src1, 0); | |
1541 | memset (src_buf + tree_low_cst (diff, 1), | |
1542 | tree_low_cst (val2, 1), tree_low_cst (len2, 1)); | |
1543 | src_buf[src_len] = '\0'; | |
1544 | /* Neither builtin_strncpy_read_str nor builtin_memcpy_read_str | |
1545 | handle embedded '\0's. */ | |
1546 | if (strlen (src_buf) != src_len) | |
1547 | break; | |
1548 | rtl_profile_for_bb (gimple_bb (stmt2)); | |
1549 | /* If the new memcpy wouldn't be emitted by storing the literal | |
1550 | by pieces, this optimization might enlarge .rodata too much, | |
1551 | as commonly used string literals couldn't be shared any | |
1552 | longer. */ | |
1553 | if (!can_store_by_pieces (src_len, | |
1554 | builtin_strncpy_read_str, | |
1555 | src_buf, ptr1_align, false)) | |
1556 | break; | |
1557 | ||
1558 | new_str_cst = build_string_literal (src_len, src_buf); | |
1559 | if (callee1) | |
1560 | { | |
1561 | /* If STMT1 is a mem{,p}cpy call, adjust it and remove | |
1562 | memset call. */ | |
1563 | if (lhs1 && DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY) | |
1564 | gimple_call_set_lhs (stmt1, NULL_TREE); | |
1565 | gimple_call_set_arg (stmt1, 1, new_str_cst); | |
1566 | gimple_call_set_arg (stmt1, 2, | |
1567 | build_int_cst (TREE_TYPE (len1), src_len)); | |
1568 | update_stmt (stmt1); | |
1569 | unlink_stmt_vdef (stmt2); | |
1570 | gsi_remove (gsi_p, true); | |
1571 | release_defs (stmt2); | |
1572 | if (lhs1 && DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY) | |
1573 | release_ssa_name (lhs1); | |
1574 | return true; | |
1575 | } | |
1576 | else | |
1577 | { | |
1578 | /* Otherwise, if STMT1 is length 1 memcpy optimized into | |
1579 | assignment, remove STMT1 and change memset call into | |
1580 | memcpy call. */ | |
1581 | gimple_stmt_iterator gsi = gsi_for_stmt (stmt1); | |
1582 | ||
7ecb2e7c | 1583 | if (!is_gimple_val (ptr1)) |
1584 | ptr1 = force_gimple_operand_gsi (gsi_p, ptr1, true, NULL_TREE, | |
1585 | true, GSI_SAME_STMT); | |
27f931ff | 1586 | gimple_call_set_fndecl (stmt2, built_in_decls [BUILT_IN_MEMCPY]); |
1587 | gimple_call_set_arg (stmt2, 0, ptr1); | |
1588 | gimple_call_set_arg (stmt2, 1, new_str_cst); | |
1589 | gimple_call_set_arg (stmt2, 2, | |
1590 | build_int_cst (TREE_TYPE (len2), src_len)); | |
1591 | unlink_stmt_vdef (stmt1); | |
1592 | gsi_remove (&gsi, true); | |
1593 | release_defs (stmt1); | |
1594 | update_stmt (stmt2); | |
1595 | return false; | |
1596 | } | |
1597 | } | |
1598 | break; | |
1599 | default: | |
1600 | break; | |
1601 | } | |
1602 | return false; | |
1603 | } | |
1604 | ||
300da094 | 1605 | /* Simplify bitwise binary operations. |
1606 | Return true if a transformation applied, otherwise return false. */ | |
1c4607fd | 1607 | |
300da094 | 1608 | static bool |
1609 | simplify_bitwise_binary (gimple_stmt_iterator *gsi) | |
1c4607fd | 1610 | { |
300da094 | 1611 | gimple stmt = gsi_stmt (*gsi); |
1c4607fd | 1612 | tree arg1 = gimple_assign_rhs1 (stmt); |
1613 | tree arg2 = gimple_assign_rhs2 (stmt); | |
300da094 | 1614 | enum tree_code code = gimple_assign_rhs_code (stmt); |
1615 | tree res; | |
26f54bd0 | 1616 | gimple def1 = NULL, def2 = NULL; |
1617 | tree def1_arg1, def2_arg1; | |
1618 | enum tree_code def1_code, def2_code; | |
1c4607fd | 1619 | |
300da094 | 1620 | /* If the first argument is an SSA name that is itself a result of a |
1621 | typecast of an ADDR_EXPR to an integer, feed the ADDR_EXPR to the | |
1622 | folder rather than the ssa name. */ | |
1623 | if (code == BIT_AND_EXPR | |
1624 | && TREE_CODE (arg2) == INTEGER_CST | |
1625 | && TREE_CODE (arg1) == SSA_NAME) | |
1c4607fd | 1626 | { |
1627 | gimple def = SSA_NAME_DEF_STMT (arg1); | |
300da094 | 1628 | tree op = arg1; |
1c4607fd | 1629 | |
300da094 | 1630 | /* ??? This looks bogus - the conversion could be truncating. */ |
1631 | if (is_gimple_assign (def) | |
1632 | && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)) | |
1633 | && INTEGRAL_TYPE_P (TREE_TYPE (arg1))) | |
1c4607fd | 1634 | { |
300da094 | 1635 | tree opp = gimple_assign_rhs1 (def); |
1636 | if (TREE_CODE (opp) == ADDR_EXPR) | |
1637 | op = opp; | |
1638 | } | |
1c4607fd | 1639 | |
300da094 | 1640 | res = fold_binary_loc (gimple_location (stmt), |
1641 | BIT_AND_EXPR, TREE_TYPE (gimple_assign_lhs (stmt)), | |
1642 | op, arg2); | |
1643 | if (res && is_gimple_min_invariant (res)) | |
1644 | { | |
1645 | gimple_assign_set_rhs_from_tree (gsi, res); | |
1646 | update_stmt (stmt); | |
1647 | return true; | |
1c4607fd | 1648 | } |
1649 | } | |
1650 | ||
26f54bd0 | 1651 | def1_code = TREE_CODE (arg1); |
1652 | def1_arg1 = arg1; | |
1653 | if (TREE_CODE (arg1) == SSA_NAME) | |
1654 | { | |
1655 | def1 = SSA_NAME_DEF_STMT (arg1); | |
1656 | if (is_gimple_assign (def1)) | |
1657 | { | |
1658 | def1_code = gimple_assign_rhs_code (def1); | |
1659 | def1_arg1 = gimple_assign_rhs1 (def1); | |
1660 | } | |
1661 | } | |
1662 | ||
1663 | def2_code = TREE_CODE (arg2); | |
1664 | def2_arg1 = arg2; | |
1665 | if (TREE_CODE (arg2) == SSA_NAME) | |
1666 | { | |
1667 | def2 = SSA_NAME_DEF_STMT (arg2); | |
1668 | if (is_gimple_assign (def2)) | |
1669 | { | |
1670 | def2_code = gimple_assign_rhs_code (def2); | |
1671 | def2_arg1 = gimple_assign_rhs1 (def2); | |
1672 | } | |
1673 | } | |
1674 | ||
25ce0d90 | 1675 | /* Try to fold (type) X op CST -> (type) (X op ((type-x) CST)). */ |
1676 | if (TREE_CODE (arg2) == INTEGER_CST | |
1677 | && CONVERT_EXPR_CODE_P (def1_code) | |
105fc895 | 1678 | && INTEGRAL_TYPE_P (TREE_TYPE (def1_arg1)) |
25ce0d90 | 1679 | && int_fits_type_p (arg2, TREE_TYPE (def1_arg1))) |
1680 | { | |
1681 | gimple newop; | |
1682 | tree tem = create_tmp_reg (TREE_TYPE (def1_arg1), NULL); | |
1683 | newop = | |
1684 | gimple_build_assign_with_ops (code, tem, def1_arg1, | |
1685 | fold_convert_loc (gimple_location (stmt), | |
1686 | TREE_TYPE (def1_arg1), | |
1687 | arg2)); | |
1688 | tem = make_ssa_name (tem, newop); | |
1689 | gimple_assign_set_lhs (newop, tem); | |
1690 | gsi_insert_before (gsi, newop, GSI_SAME_STMT); | |
1691 | gimple_assign_set_rhs_with_ops_1 (gsi, NOP_EXPR, | |
1692 | tem, NULL_TREE, NULL_TREE); | |
1693 | update_stmt (gsi_stmt (*gsi)); | |
1694 | return true; | |
1695 | } | |
1696 | ||
300da094 | 1697 | /* For bitwise binary operations apply operand conversions to the |
1698 | binary operation result instead of to the operands. This allows | |
1699 | to combine successive conversions and bitwise binary operations. */ | |
26f54bd0 | 1700 | if (CONVERT_EXPR_CODE_P (def1_code) |
1701 | && CONVERT_EXPR_CODE_P (def2_code) | |
1702 | && types_compatible_p (TREE_TYPE (def1_arg1), TREE_TYPE (def2_arg1)) | |
25ce0d90 | 1703 | /* Make sure that the conversion widens the operands, or has same |
1704 | precision, or that it changes the operation to a bitfield | |
1705 | precision. */ | |
26f54bd0 | 1706 | && ((TYPE_PRECISION (TREE_TYPE (def1_arg1)) |
25ce0d90 | 1707 | <= TYPE_PRECISION (TREE_TYPE (arg1))) |
26f54bd0 | 1708 | || (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (arg1))) |
1709 | != MODE_INT) | |
1710 | || (TYPE_PRECISION (TREE_TYPE (arg1)) | |
1711 | != GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg1)))))) | |
1c4607fd | 1712 | { |
26f54bd0 | 1713 | gimple newop; |
1714 | tree tem = create_tmp_reg (TREE_TYPE (def1_arg1), | |
1715 | NULL); | |
1716 | newop = gimple_build_assign_with_ops (code, tem, def1_arg1, def2_arg1); | |
1717 | tem = make_ssa_name (tem, newop); | |
1718 | gimple_assign_set_lhs (newop, tem); | |
1719 | gsi_insert_before (gsi, newop, GSI_SAME_STMT); | |
1720 | gimple_assign_set_rhs_with_ops_1 (gsi, NOP_EXPR, | |
1721 | tem, NULL_TREE, NULL_TREE); | |
1722 | update_stmt (gsi_stmt (*gsi)); | |
1723 | return true; | |
1724 | } | |
1725 | ||
1726 | /* (a | CST1) & CST2 -> (a & CST2) | (CST1 & CST2). */ | |
1727 | if (code == BIT_AND_EXPR | |
1728 | && def1_code == BIT_IOR_EXPR | |
1729 | && TREE_CODE (arg2) == INTEGER_CST | |
1730 | && TREE_CODE (gimple_assign_rhs2 (def1)) == INTEGER_CST) | |
1731 | { | |
1732 | tree cst = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg2), | |
1733 | arg2, gimple_assign_rhs2 (def1)); | |
1734 | tree tem; | |
1735 | gimple newop; | |
1736 | if (integer_zerop (cst)) | |
300da094 | 1737 | { |
26f54bd0 | 1738 | gimple_assign_set_rhs1 (stmt, def1_arg1); |
1739 | update_stmt (stmt); | |
1740 | return true; | |
300da094 | 1741 | } |
26f54bd0 | 1742 | tem = create_tmp_reg (TREE_TYPE (arg2), NULL); |
1743 | newop = gimple_build_assign_with_ops (BIT_AND_EXPR, | |
1744 | tem, def1_arg1, arg2); | |
1745 | tem = make_ssa_name (tem, newop); | |
1746 | gimple_assign_set_lhs (newop, tem); | |
1747 | /* Make sure to re-process the new stmt as it's walking upwards. */ | |
1748 | gsi_insert_before (gsi, newop, GSI_NEW_STMT); | |
1749 | gimple_assign_set_rhs1 (stmt, tem); | |
1750 | gimple_assign_set_rhs2 (stmt, cst); | |
1751 | gimple_assign_set_rhs_code (stmt, BIT_IOR_EXPR); | |
1752 | update_stmt (stmt); | |
1753 | return true; | |
1754 | } | |
1755 | ||
1756 | /* Combine successive equal operations with constants. */ | |
1757 | if ((code == BIT_AND_EXPR | |
1758 | || code == BIT_IOR_EXPR | |
1759 | || code == BIT_XOR_EXPR) | |
1760 | && def1_code == code | |
1761 | && TREE_CODE (arg2) == INTEGER_CST | |
1762 | && TREE_CODE (gimple_assign_rhs2 (def1)) == INTEGER_CST) | |
1763 | { | |
1764 | tree cst = fold_build2 (code, TREE_TYPE (arg2), | |
1765 | arg2, gimple_assign_rhs2 (def1)); | |
1766 | gimple_assign_set_rhs1 (stmt, def1_arg1); | |
1767 | gimple_assign_set_rhs2 (stmt, cst); | |
1768 | update_stmt (stmt); | |
1769 | return true; | |
1c4607fd | 1770 | } |
300da094 | 1771 | |
1772 | return false; | |
1c4607fd | 1773 | } |
1774 | ||
ca3c9092 | 1775 | |
1776 | /* Perform re-associations of the plus or minus statement STMT that are | |
b69d1cb6 | 1777 | always permitted. Returns true if the CFG was changed. */ |
ca3c9092 | 1778 | |
b69d1cb6 | 1779 | static bool |
ca3c9092 | 1780 | associate_plusminus (gimple stmt) |
1781 | { | |
1782 | tree rhs1 = gimple_assign_rhs1 (stmt); | |
1783 | tree rhs2 = gimple_assign_rhs2 (stmt); | |
1784 | enum tree_code code = gimple_assign_rhs_code (stmt); | |
1785 | gimple_stmt_iterator gsi; | |
1786 | bool changed; | |
1787 | ||
1788 | /* We can't reassociate at all for saturating types. */ | |
1789 | if (TYPE_SATURATING (TREE_TYPE (rhs1))) | |
b69d1cb6 | 1790 | return false; |
ca3c9092 | 1791 | |
1792 | /* First contract negates. */ | |
1793 | do | |
1794 | { | |
1795 | changed = false; | |
1796 | ||
1797 | /* A +- (-B) -> A -+ B. */ | |
1798 | if (TREE_CODE (rhs2) == SSA_NAME) | |
1799 | { | |
1800 | gimple def_stmt = SSA_NAME_DEF_STMT (rhs2); | |
1801 | if (is_gimple_assign (def_stmt) | |
32cdcc42 | 1802 | && gimple_assign_rhs_code (def_stmt) == NEGATE_EXPR |
1803 | && can_propagate_from (def_stmt)) | |
ca3c9092 | 1804 | { |
1805 | code = (code == MINUS_EXPR) ? PLUS_EXPR : MINUS_EXPR; | |
1806 | gimple_assign_set_rhs_code (stmt, code); | |
1807 | rhs2 = gimple_assign_rhs1 (def_stmt); | |
1808 | gimple_assign_set_rhs2 (stmt, rhs2); | |
1809 | gimple_set_modified (stmt, true); | |
1810 | changed = true; | |
1811 | } | |
1812 | } | |
1813 | ||
1814 | /* (-A) + B -> B - A. */ | |
1815 | if (TREE_CODE (rhs1) == SSA_NAME | |
1816 | && code == PLUS_EXPR) | |
1817 | { | |
1818 | gimple def_stmt = SSA_NAME_DEF_STMT (rhs1); | |
1819 | if (is_gimple_assign (def_stmt) | |
32cdcc42 | 1820 | && gimple_assign_rhs_code (def_stmt) == NEGATE_EXPR |
1821 | && can_propagate_from (def_stmt)) | |
ca3c9092 | 1822 | { |
1823 | code = MINUS_EXPR; | |
1824 | gimple_assign_set_rhs_code (stmt, code); | |
1825 | rhs1 = rhs2; | |
1826 | gimple_assign_set_rhs1 (stmt, rhs1); | |
1827 | rhs2 = gimple_assign_rhs1 (def_stmt); | |
1828 | gimple_assign_set_rhs2 (stmt, rhs2); | |
1829 | gimple_set_modified (stmt, true); | |
1830 | changed = true; | |
1831 | } | |
1832 | } | |
1833 | } | |
1834 | while (changed); | |
1835 | ||
1836 | /* We can't reassociate floating-point or fixed-point plus or minus | |
1837 | because of saturation to +-Inf. */ | |
1838 | if (FLOAT_TYPE_P (TREE_TYPE (rhs1)) | |
1839 | || FIXED_POINT_TYPE_P (TREE_TYPE (rhs1))) | |
1840 | goto out; | |
1841 | ||
1842 | /* Second match patterns that allow contracting a plus-minus pair | |
1843 | irrespective of overflow issues. | |
1844 | ||
1845 | (A +- B) - A -> +- B | |
1846 | (A +- B) -+ B -> A | |
1847 | (CST +- A) +- CST -> CST +- A | |
1848 | (A + CST) +- CST -> A + CST | |
1849 | ~A + A -> -1 | |
1850 | ~A + 1 -> -A | |
1851 | A - (A +- B) -> -+ B | |
1852 | A +- (B +- A) -> +- B | |
1853 | CST +- (CST +- A) -> CST +- A | |
1854 | CST +- (A +- CST) -> CST +- A | |
1855 | A + ~A -> -1 | |
1856 | ||
1857 | via commutating the addition and contracting operations to zero | |
1858 | by reassociation. */ | |
1859 | ||
1860 | gsi = gsi_for_stmt (stmt); | |
1861 | if (TREE_CODE (rhs1) == SSA_NAME) | |
1862 | { | |
1863 | gimple def_stmt = SSA_NAME_DEF_STMT (rhs1); | |
32cdcc42 | 1864 | if (is_gimple_assign (def_stmt) && can_propagate_from (def_stmt)) |
ca3c9092 | 1865 | { |
1866 | enum tree_code def_code = gimple_assign_rhs_code (def_stmt); | |
1867 | if (def_code == PLUS_EXPR | |
1868 | || def_code == MINUS_EXPR) | |
1869 | { | |
1870 | tree def_rhs1 = gimple_assign_rhs1 (def_stmt); | |
1871 | tree def_rhs2 = gimple_assign_rhs2 (def_stmt); | |
1872 | if (operand_equal_p (def_rhs1, rhs2, 0) | |
1873 | && code == MINUS_EXPR) | |
1874 | { | |
1875 | /* (A +- B) - A -> +- B. */ | |
1876 | code = ((def_code == PLUS_EXPR) | |
1877 | ? TREE_CODE (def_rhs2) : NEGATE_EXPR); | |
1878 | rhs1 = def_rhs2; | |
1879 | rhs2 = NULL_TREE; | |
1880 | gimple_assign_set_rhs_with_ops (&gsi, code, rhs1, NULL_TREE); | |
1881 | gcc_assert (gsi_stmt (gsi) == stmt); | |
1882 | gimple_set_modified (stmt, true); | |
1883 | } | |
1884 | else if (operand_equal_p (def_rhs2, rhs2, 0) | |
1885 | && code != def_code) | |
1886 | { | |
1887 | /* (A +- B) -+ B -> A. */ | |
1888 | code = TREE_CODE (def_rhs1); | |
1889 | rhs1 = def_rhs1; | |
1890 | rhs2 = NULL_TREE; | |
1891 | gimple_assign_set_rhs_with_ops (&gsi, code, rhs1, NULL_TREE); | |
1892 | gcc_assert (gsi_stmt (gsi) == stmt); | |
1893 | gimple_set_modified (stmt, true); | |
1894 | } | |
1895 | else if (TREE_CODE (rhs2) == INTEGER_CST | |
1896 | && TREE_CODE (def_rhs1) == INTEGER_CST) | |
1897 | { | |
1898 | /* (CST +- A) +- CST -> CST +- A. */ | |
1899 | tree cst = fold_binary (code, TREE_TYPE (rhs1), | |
1900 | def_rhs1, rhs2); | |
1901 | if (cst && !TREE_OVERFLOW (cst)) | |
1902 | { | |
1903 | code = def_code; | |
1904 | gimple_assign_set_rhs_code (stmt, code); | |
1905 | rhs1 = cst; | |
1906 | gimple_assign_set_rhs1 (stmt, rhs1); | |
1907 | rhs2 = def_rhs2; | |
1908 | gimple_assign_set_rhs2 (stmt, rhs2); | |
1909 | gimple_set_modified (stmt, true); | |
1910 | } | |
1911 | } | |
1912 | else if (TREE_CODE (rhs2) == INTEGER_CST | |
1913 | && TREE_CODE (def_rhs2) == INTEGER_CST | |
1914 | && def_code == PLUS_EXPR) | |
1915 | { | |
1916 | /* (A + CST) +- CST -> A + CST. */ | |
1917 | tree cst = fold_binary (code, TREE_TYPE (rhs1), | |
1918 | def_rhs2, rhs2); | |
1919 | if (cst && !TREE_OVERFLOW (cst)) | |
1920 | { | |
1921 | code = PLUS_EXPR; | |
1922 | gimple_assign_set_rhs_code (stmt, code); | |
1923 | rhs1 = def_rhs1; | |
1924 | gimple_assign_set_rhs1 (stmt, rhs1); | |
1925 | rhs2 = cst; | |
1926 | gimple_assign_set_rhs2 (stmt, rhs2); | |
1927 | gimple_set_modified (stmt, true); | |
1928 | } | |
1929 | } | |
1930 | } | |
1931 | else if (def_code == BIT_NOT_EXPR | |
1932 | && INTEGRAL_TYPE_P (TREE_TYPE (rhs1))) | |
1933 | { | |
1934 | tree def_rhs1 = gimple_assign_rhs1 (def_stmt); | |
1935 | if (code == PLUS_EXPR | |
1936 | && operand_equal_p (def_rhs1, rhs2, 0)) | |
1937 | { | |
1938 | /* ~A + A -> -1. */ | |
1939 | code = INTEGER_CST; | |
19d861b9 | 1940 | rhs1 = build_int_cst_type (TREE_TYPE (rhs2), -1); |
ca3c9092 | 1941 | rhs2 = NULL_TREE; |
1942 | gimple_assign_set_rhs_with_ops (&gsi, code, rhs1, NULL_TREE); | |
1943 | gcc_assert (gsi_stmt (gsi) == stmt); | |
1944 | gimple_set_modified (stmt, true); | |
1945 | } | |
1946 | else if (code == PLUS_EXPR | |
1947 | && integer_onep (rhs1)) | |
1948 | { | |
1949 | /* ~A + 1 -> -A. */ | |
1950 | code = NEGATE_EXPR; | |
1951 | rhs1 = def_rhs1; | |
1952 | rhs2 = NULL_TREE; | |
1953 | gimple_assign_set_rhs_with_ops (&gsi, code, rhs1, NULL_TREE); | |
1954 | gcc_assert (gsi_stmt (gsi) == stmt); | |
1955 | gimple_set_modified (stmt, true); | |
1956 | } | |
1957 | } | |
1958 | } | |
1959 | } | |
1960 | ||
1961 | if (rhs2 && TREE_CODE (rhs2) == SSA_NAME) | |
1962 | { | |
1963 | gimple def_stmt = SSA_NAME_DEF_STMT (rhs2); | |
32cdcc42 | 1964 | if (is_gimple_assign (def_stmt) && can_propagate_from (def_stmt)) |
ca3c9092 | 1965 | { |
1966 | enum tree_code def_code = gimple_assign_rhs_code (def_stmt); | |
1967 | if (def_code == PLUS_EXPR | |
1968 | || def_code == MINUS_EXPR) | |
1969 | { | |
1970 | tree def_rhs1 = gimple_assign_rhs1 (def_stmt); | |
1971 | tree def_rhs2 = gimple_assign_rhs2 (def_stmt); | |
1972 | if (operand_equal_p (def_rhs1, rhs1, 0) | |
1973 | && code == MINUS_EXPR) | |
1974 | { | |
1975 | /* A - (A +- B) -> -+ B. */ | |
1976 | code = ((def_code == PLUS_EXPR) | |
1977 | ? NEGATE_EXPR : TREE_CODE (def_rhs2)); | |
1978 | rhs1 = def_rhs2; | |
1979 | rhs2 = NULL_TREE; | |
1980 | gimple_assign_set_rhs_with_ops (&gsi, code, rhs1, NULL_TREE); | |
1981 | gcc_assert (gsi_stmt (gsi) == stmt); | |
1982 | gimple_set_modified (stmt, true); | |
1983 | } | |
1984 | else if (operand_equal_p (def_rhs2, rhs1, 0) | |
1985 | && code != def_code) | |
1986 | { | |
1987 | /* A +- (B +- A) -> +- B. */ | |
1988 | code = ((code == PLUS_EXPR) | |
1989 | ? TREE_CODE (def_rhs1) : NEGATE_EXPR); | |
1990 | rhs1 = def_rhs1; | |
1991 | rhs2 = NULL_TREE; | |
1992 | gimple_assign_set_rhs_with_ops (&gsi, code, rhs1, NULL_TREE); | |
1993 | gcc_assert (gsi_stmt (gsi) == stmt); | |
1994 | gimple_set_modified (stmt, true); | |
1995 | } | |
1996 | else if (TREE_CODE (rhs1) == INTEGER_CST | |
1997 | && TREE_CODE (def_rhs1) == INTEGER_CST) | |
1998 | { | |
1999 | /* CST +- (CST +- A) -> CST +- A. */ | |
2000 | tree cst = fold_binary (code, TREE_TYPE (rhs2), | |
2001 | rhs1, def_rhs1); | |
2002 | if (cst && !TREE_OVERFLOW (cst)) | |
2003 | { | |
2004 | code = (code == def_code ? PLUS_EXPR : MINUS_EXPR); | |
2005 | gimple_assign_set_rhs_code (stmt, code); | |
2006 | rhs1 = cst; | |
2007 | gimple_assign_set_rhs1 (stmt, rhs1); | |
2008 | rhs2 = def_rhs2; | |
2009 | gimple_assign_set_rhs2 (stmt, rhs2); | |
2010 | gimple_set_modified (stmt, true); | |
2011 | } | |
2012 | } | |
2013 | else if (TREE_CODE (rhs1) == INTEGER_CST | |
2014 | && TREE_CODE (def_rhs2) == INTEGER_CST) | |
2015 | { | |
2016 | /* CST +- (A +- CST) -> CST +- A. */ | |
2017 | tree cst = fold_binary (def_code == code | |
2018 | ? PLUS_EXPR : MINUS_EXPR, | |
2019 | TREE_TYPE (rhs2), | |
2020 | rhs1, def_rhs2); | |
2021 | if (cst && !TREE_OVERFLOW (cst)) | |
2022 | { | |
2023 | rhs1 = cst; | |
2024 | gimple_assign_set_rhs1 (stmt, rhs1); | |
2025 | rhs2 = def_rhs1; | |
2026 | gimple_assign_set_rhs2 (stmt, rhs2); | |
2027 | gimple_set_modified (stmt, true); | |
2028 | } | |
2029 | } | |
2030 | } | |
2031 | else if (def_code == BIT_NOT_EXPR | |
2032 | && INTEGRAL_TYPE_P (TREE_TYPE (rhs2))) | |
2033 | { | |
2034 | tree def_rhs1 = gimple_assign_rhs1 (def_stmt); | |
2035 | if (code == PLUS_EXPR | |
2036 | && operand_equal_p (def_rhs1, rhs1, 0)) | |
2037 | { | |
2038 | /* A + ~A -> -1. */ | |
2039 | code = INTEGER_CST; | |
19d861b9 | 2040 | rhs1 = build_int_cst_type (TREE_TYPE (rhs1), -1); |
ca3c9092 | 2041 | rhs2 = NULL_TREE; |
2042 | gimple_assign_set_rhs_with_ops (&gsi, code, rhs1, NULL_TREE); | |
2043 | gcc_assert (gsi_stmt (gsi) == stmt); | |
2044 | gimple_set_modified (stmt, true); | |
2045 | } | |
2046 | } | |
2047 | } | |
2048 | } | |
2049 | ||
2050 | out: | |
2051 | if (gimple_modified_p (stmt)) | |
2052 | { | |
2053 | fold_stmt_inplace (stmt); | |
2054 | update_stmt (stmt); | |
b69d1cb6 | 2055 | if (maybe_clean_or_replace_eh_stmt (stmt, stmt) |
2056 | && gimple_purge_dead_eh_edges (gimple_bb (stmt))) | |
2057 | return true; | |
ca3c9092 | 2058 | } |
b69d1cb6 | 2059 | |
2060 | return false; | |
ca3c9092 | 2061 | } |
2062 | ||
6afd0544 | 2063 | /* Combine two conversions in a row for the second conversion at *GSI. |
2064 | Returns true if there were any changes made. */ | |
2065 | ||
2066 | static bool | |
2067 | combine_conversions (gimple_stmt_iterator *gsi) | |
2068 | { | |
2069 | gimple stmt = gsi_stmt (*gsi); | |
2070 | gimple def_stmt; | |
2071 | tree op0, lhs; | |
2072 | enum tree_code code = gimple_assign_rhs_code (stmt); | |
2073 | ||
2074 | gcc_checking_assert (CONVERT_EXPR_CODE_P (code) | |
2075 | || code == FLOAT_EXPR | |
2076 | || code == FIX_TRUNC_EXPR); | |
2077 | ||
2078 | lhs = gimple_assign_lhs (stmt); | |
2079 | op0 = gimple_assign_rhs1 (stmt); | |
2080 | if (useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (op0))) | |
2081 | { | |
2082 | gimple_assign_set_rhs_code (stmt, TREE_CODE (op0)); | |
2083 | return true; | |
2084 | } | |
2085 | ||
2086 | if (TREE_CODE (op0) != SSA_NAME) | |
2087 | return false; | |
2088 | ||
2089 | def_stmt = SSA_NAME_DEF_STMT (op0); | |
2090 | if (!is_gimple_assign (def_stmt)) | |
2091 | return false; | |
2092 | ||
2093 | if (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt))) | |
2094 | { | |
2095 | tree defop0 = gimple_assign_rhs1 (def_stmt); | |
2096 | tree type = TREE_TYPE (lhs); | |
2097 | tree inside_type = TREE_TYPE (defop0); | |
2098 | tree inter_type = TREE_TYPE (op0); | |
2099 | int inside_int = INTEGRAL_TYPE_P (inside_type); | |
2100 | int inside_ptr = POINTER_TYPE_P (inside_type); | |
2101 | int inside_float = FLOAT_TYPE_P (inside_type); | |
2102 | int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE; | |
2103 | unsigned int inside_prec = TYPE_PRECISION (inside_type); | |
2104 | int inside_unsignedp = TYPE_UNSIGNED (inside_type); | |
2105 | int inter_int = INTEGRAL_TYPE_P (inter_type); | |
2106 | int inter_ptr = POINTER_TYPE_P (inter_type); | |
2107 | int inter_float = FLOAT_TYPE_P (inter_type); | |
2108 | int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE; | |
2109 | unsigned int inter_prec = TYPE_PRECISION (inter_type); | |
2110 | int inter_unsignedp = TYPE_UNSIGNED (inter_type); | |
2111 | int final_int = INTEGRAL_TYPE_P (type); | |
2112 | int final_ptr = POINTER_TYPE_P (type); | |
2113 | int final_float = FLOAT_TYPE_P (type); | |
2114 | int final_vec = TREE_CODE (type) == VECTOR_TYPE; | |
2115 | unsigned int final_prec = TYPE_PRECISION (type); | |
2116 | int final_unsignedp = TYPE_UNSIGNED (type); | |
2117 | ||
2118 | /* In addition to the cases of two conversions in a row | |
2119 | handled below, if we are converting something to its own | |
2120 | type via an object of identical or wider precision, neither | |
2121 | conversion is needed. */ | |
2122 | if (useless_type_conversion_p (type, inside_type) | |
2123 | && (((inter_int || inter_ptr) && final_int) | |
2124 | || (inter_float && final_float)) | |
2125 | && inter_prec >= final_prec) | |
2126 | { | |
2127 | gimple_assign_set_rhs1 (stmt, unshare_expr (defop0)); | |
2128 | gimple_assign_set_rhs_code (stmt, TREE_CODE (defop0)); | |
2129 | update_stmt (stmt); | |
2130 | return true; | |
2131 | } | |
2132 | ||
2133 | /* Likewise, if the intermediate and initial types are either both | |
2134 | float or both integer, we don't need the middle conversion if the | |
2135 | former is wider than the latter and doesn't change the signedness | |
2136 | (for integers). Avoid this if the final type is a pointer since | |
2137 | then we sometimes need the middle conversion. Likewise if the | |
2138 | final type has a precision not equal to the size of its mode. */ | |
2139 | if (((inter_int && inside_int) | |
2140 | || (inter_float && inside_float) | |
2141 | || (inter_vec && inside_vec)) | |
2142 | && inter_prec >= inside_prec | |
2143 | && (inter_float || inter_vec | |
2144 | || inter_unsignedp == inside_unsignedp) | |
2145 | && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type)) | |
2146 | && TYPE_MODE (type) == TYPE_MODE (inter_type)) | |
2147 | && ! final_ptr | |
2148 | && (! final_vec || inter_prec == inside_prec)) | |
2149 | { | |
2150 | gimple_assign_set_rhs1 (stmt, defop0); | |
2151 | update_stmt (stmt); | |
2152 | return true; | |
2153 | } | |
2154 | ||
2155 | /* If we have a sign-extension of a zero-extended value, we can | |
2156 | replace that by a single zero-extension. */ | |
2157 | if (inside_int && inter_int && final_int | |
2158 | && inside_prec < inter_prec && inter_prec < final_prec | |
2159 | && inside_unsignedp && !inter_unsignedp) | |
2160 | { | |
2161 | gimple_assign_set_rhs1 (stmt, defop0); | |
2162 | update_stmt (stmt); | |
2163 | return true; | |
2164 | } | |
2165 | ||
2166 | /* Two conversions in a row are not needed unless: | |
2167 | - some conversion is floating-point (overstrict for now), or | |
2168 | - some conversion is a vector (overstrict for now), or | |
2169 | - the intermediate type is narrower than both initial and | |
2170 | final, or | |
2171 | - the intermediate type and innermost type differ in signedness, | |
2172 | and the outermost type is wider than the intermediate, or | |
2173 | - the initial type is a pointer type and the precisions of the | |
2174 | intermediate and final types differ, or | |
2175 | - the final type is a pointer type and the precisions of the | |
2176 | initial and intermediate types differ. */ | |
2177 | if (! inside_float && ! inter_float && ! final_float | |
2178 | && ! inside_vec && ! inter_vec && ! final_vec | |
2179 | && (inter_prec >= inside_prec || inter_prec >= final_prec) | |
2180 | && ! (inside_int && inter_int | |
2181 | && inter_unsignedp != inside_unsignedp | |
2182 | && inter_prec < final_prec) | |
2183 | && ((inter_unsignedp && inter_prec > inside_prec) | |
2184 | == (final_unsignedp && final_prec > inter_prec)) | |
2185 | && ! (inside_ptr && inter_prec != final_prec) | |
2186 | && ! (final_ptr && inside_prec != inter_prec) | |
2187 | && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type)) | |
2188 | && TYPE_MODE (type) == TYPE_MODE (inter_type))) | |
2189 | { | |
2190 | gimple_assign_set_rhs1 (stmt, defop0); | |
2191 | update_stmt (stmt); | |
2192 | return true; | |
2193 | } | |
2194 | ||
2195 | /* A truncation to an unsigned type should be canonicalized as | |
2196 | bitwise and of a mask. */ | |
2197 | if (final_int && inter_int && inside_int | |
2198 | && final_prec == inside_prec | |
2199 | && final_prec > inter_prec | |
2200 | && inter_unsignedp) | |
2201 | { | |
2202 | tree tem; | |
2203 | tem = fold_build2 (BIT_AND_EXPR, inside_type, | |
2204 | defop0, | |
2205 | double_int_to_tree | |
2206 | (inside_type, double_int_mask (inter_prec))); | |
2207 | if (!useless_type_conversion_p (type, inside_type)) | |
2208 | { | |
2209 | tem = force_gimple_operand_gsi (gsi, tem, true, NULL_TREE, true, | |
2210 | GSI_SAME_STMT); | |
2211 | gimple_assign_set_rhs1 (stmt, tem); | |
2212 | } | |
2213 | else | |
2214 | gimple_assign_set_rhs_from_tree (gsi, tem); | |
2215 | update_stmt (gsi_stmt (*gsi)); | |
2216 | return true; | |
2217 | } | |
2218 | } | |
2219 | ||
2220 | return false; | |
2221 | } | |
2222 | ||
678b2f5b | 2223 | /* Main entry point for the forward propagation and statement combine |
2224 | optimizer. */ | |
4ee9c684 | 2225 | |
2a1990e9 | 2226 | static unsigned int |
678b2f5b | 2227 | ssa_forward_propagate_and_combine (void) |
4ee9c684 | 2228 | { |
f5c8cff5 | 2229 | basic_block bb; |
c96420f8 | 2230 | unsigned int todoflags = 0; |
4ee9c684 | 2231 | |
148aa112 | 2232 | cfg_changed = false; |
2233 | ||
f5c8cff5 | 2234 | FOR_EACH_BB (bb) |
2235 | { | |
a7107e58 | 2236 | gimple_stmt_iterator gsi, prev; |
2237 | bool prev_initialized; | |
291d763b | 2238 | |
678b2f5b | 2239 | /* Apply forward propagation to all stmts in the basic-block. |
2240 | Note we update GSI within the loop as necessary. */ | |
75a70cf9 | 2241 | for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); ) |
291d763b | 2242 | { |
75a70cf9 | 2243 | gimple stmt = gsi_stmt (gsi); |
678b2f5b | 2244 | tree lhs, rhs; |
2245 | enum tree_code code; | |
291d763b | 2246 | |
678b2f5b | 2247 | if (!is_gimple_assign (stmt)) |
291d763b | 2248 | { |
678b2f5b | 2249 | gsi_next (&gsi); |
2250 | continue; | |
2251 | } | |
3a938499 | 2252 | |
678b2f5b | 2253 | lhs = gimple_assign_lhs (stmt); |
2254 | rhs = gimple_assign_rhs1 (stmt); | |
2255 | code = gimple_assign_rhs_code (stmt); | |
2256 | if (TREE_CODE (lhs) != SSA_NAME | |
2257 | || has_zero_uses (lhs)) | |
2258 | { | |
2259 | gsi_next (&gsi); | |
2260 | continue; | |
2261 | } | |
3a938499 | 2262 | |
678b2f5b | 2263 | /* If this statement sets an SSA_NAME to an address, |
2264 | try to propagate the address into the uses of the SSA_NAME. */ | |
2265 | if (code == ADDR_EXPR | |
2266 | /* Handle pointer conversions on invariant addresses | |
2267 | as well, as this is valid gimple. */ | |
2268 | || (CONVERT_EXPR_CODE_P (code) | |
2269 | && TREE_CODE (rhs) == ADDR_EXPR | |
2270 | && POINTER_TYPE_P (TREE_TYPE (lhs)))) | |
2271 | { | |
2272 | tree base = get_base_address (TREE_OPERAND (rhs, 0)); | |
2273 | if ((!base | |
2274 | || !DECL_P (base) | |
2275 | || decl_address_invariant_p (base)) | |
2276 | && !stmt_references_abnormal_ssa_name (stmt) | |
2277 | && forward_propagate_addr_expr (lhs, rhs)) | |
1c4607fd | 2278 | { |
678b2f5b | 2279 | release_defs (stmt); |
2280 | todoflags |= TODO_remove_unused_locals; | |
2281 | gsi_remove (&gsi, true); | |
1c4607fd | 2282 | } |
678b2f5b | 2283 | else |
2284 | gsi_next (&gsi); | |
2285 | } | |
32cdcc42 | 2286 | else if (code == POINTER_PLUS_EXPR && can_propagate_from (stmt)) |
678b2f5b | 2287 | { |
2288 | if (TREE_CODE (gimple_assign_rhs2 (stmt)) == INTEGER_CST | |
2289 | /* ??? Better adjust the interface to that function | |
2290 | instead of building new trees here. */ | |
2291 | && forward_propagate_addr_expr | |
2292 | (lhs, | |
2293 | build1 (ADDR_EXPR, | |
2294 | TREE_TYPE (rhs), | |
2295 | fold_build2 (MEM_REF, | |
2296 | TREE_TYPE (TREE_TYPE (rhs)), | |
2297 | rhs, | |
2298 | fold_convert | |
2299 | (ptr_type_node, | |
2300 | gimple_assign_rhs2 (stmt)))))) | |
ca3c9092 | 2301 | { |
678b2f5b | 2302 | release_defs (stmt); |
2303 | todoflags |= TODO_remove_unused_locals; | |
2304 | gsi_remove (&gsi, true); | |
ca3c9092 | 2305 | } |
678b2f5b | 2306 | else if (is_gimple_min_invariant (rhs)) |
6afd0544 | 2307 | { |
678b2f5b | 2308 | /* Make sure to fold &a[0] + off_1 here. */ |
2309 | fold_stmt_inplace (stmt); | |
2310 | update_stmt (stmt); | |
2311 | if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR) | |
6afd0544 | 2312 | gsi_next (&gsi); |
2313 | } | |
291d763b | 2314 | else |
75a70cf9 | 2315 | gsi_next (&gsi); |
291d763b | 2316 | } |
678b2f5b | 2317 | else if (TREE_CODE_CLASS (code) == tcc_comparison) |
b5860aba | 2318 | { |
678b2f5b | 2319 | forward_propagate_comparison (stmt); |
75a70cf9 | 2320 | gsi_next (&gsi); |
b5860aba | 2321 | } |
291d763b | 2322 | else |
75a70cf9 | 2323 | gsi_next (&gsi); |
291d763b | 2324 | } |
678b2f5b | 2325 | |
2326 | /* Combine stmts with the stmts defining their operands. | |
2327 | Note we update GSI within the loop as necessary. */ | |
a7107e58 | 2328 | prev_initialized = false; |
2329 | for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi);) | |
678b2f5b | 2330 | { |
2331 | gimple stmt = gsi_stmt (gsi); | |
2332 | bool changed = false; | |
2333 | ||
2334 | switch (gimple_code (stmt)) | |
2335 | { | |
2336 | case GIMPLE_ASSIGN: | |
2337 | { | |
2338 | tree rhs1 = gimple_assign_rhs1 (stmt); | |
2339 | enum tree_code code = gimple_assign_rhs_code (stmt); | |
2340 | ||
2341 | if ((code == BIT_NOT_EXPR | |
2342 | || code == NEGATE_EXPR) | |
2343 | && TREE_CODE (rhs1) == SSA_NAME) | |
2344 | changed = simplify_not_neg_expr (&gsi); | |
2345 | else if (code == COND_EXPR) | |
2346 | { | |
2347 | /* In this case the entire COND_EXPR is in rhs1. */ | |
2348 | int did_something; | |
2349 | fold_defer_overflow_warnings (); | |
2350 | did_something = forward_propagate_into_cond (&gsi); | |
2351 | stmt = gsi_stmt (gsi); | |
2352 | if (did_something == 2) | |
2353 | cfg_changed = true; | |
2354 | fold_undefer_overflow_warnings | |
2355 | (!TREE_NO_WARNING (rhs1) && did_something, stmt, | |
2356 | WARN_STRICT_OVERFLOW_CONDITIONAL); | |
2357 | changed = did_something != 0; | |
2358 | } | |
2359 | else if (TREE_CODE_CLASS (code) == tcc_comparison) | |
2360 | { | |
2361 | bool no_warning = gimple_no_warning_p (stmt); | |
2362 | fold_defer_overflow_warnings (); | |
2363 | changed = forward_propagate_into_comparison (&gsi); | |
2364 | fold_undefer_overflow_warnings | |
2365 | (!no_warning && changed, | |
2366 | stmt, WARN_STRICT_OVERFLOW_CONDITIONAL); | |
2367 | } | |
2368 | else if (code == BIT_AND_EXPR | |
2369 | || code == BIT_IOR_EXPR | |
2370 | || code == BIT_XOR_EXPR) | |
2371 | changed = simplify_bitwise_binary (&gsi); | |
2372 | else if (code == PLUS_EXPR | |
2373 | || code == MINUS_EXPR) | |
2374 | changed = associate_plusminus (stmt); | |
2375 | else if (CONVERT_EXPR_CODE_P (code) | |
2376 | || code == FLOAT_EXPR | |
2377 | || code == FIX_TRUNC_EXPR) | |
2378 | changed = combine_conversions (&gsi); | |
2379 | break; | |
2380 | } | |
2381 | ||
2382 | case GIMPLE_SWITCH: | |
2383 | changed = simplify_gimple_switch (stmt); | |
2384 | break; | |
2385 | ||
2386 | case GIMPLE_COND: | |
2387 | { | |
2388 | int did_something; | |
2389 | fold_defer_overflow_warnings (); | |
2390 | did_something = forward_propagate_into_gimple_cond (stmt); | |
2391 | if (did_something == 2) | |
2392 | cfg_changed = true; | |
2393 | fold_undefer_overflow_warnings | |
2394 | (did_something, stmt, WARN_STRICT_OVERFLOW_CONDITIONAL); | |
2395 | changed = did_something != 0; | |
2396 | break; | |
2397 | } | |
2398 | ||
2399 | case GIMPLE_CALL: | |
2400 | { | |
2401 | tree callee = gimple_call_fndecl (stmt); | |
2402 | if (callee != NULL_TREE | |
2403 | && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL) | |
2404 | changed = simplify_builtin_call (&gsi, callee); | |
2405 | break; | |
2406 | } | |
2407 | ||
2408 | default:; | |
2409 | } | |
2410 | ||
a7107e58 | 2411 | if (changed) |
2412 | { | |
2413 | /* If the stmt changed then re-visit it and the statements | |
2414 | inserted before it. */ | |
2415 | if (!prev_initialized) | |
2416 | gsi = gsi_start_bb (bb); | |
2417 | else | |
2418 | { | |
2419 | gsi = prev; | |
2420 | gsi_next (&gsi); | |
2421 | } | |
2422 | } | |
2423 | else | |
2424 | { | |
2425 | prev = gsi; | |
2426 | prev_initialized = true; | |
2427 | gsi_next (&gsi); | |
2428 | } | |
678b2f5b | 2429 | } |
f5c8cff5 | 2430 | } |
148aa112 | 2431 | |
2432 | if (cfg_changed) | |
6fa78c7b | 2433 | todoflags |= TODO_cleanup_cfg; |
678b2f5b | 2434 | |
c96420f8 | 2435 | return todoflags; |
4ee9c684 | 2436 | } |
2437 | ||
2438 | ||
2439 | static bool | |
2440 | gate_forwprop (void) | |
2441 | { | |
408c3c77 | 2442 | return flag_tree_forwprop; |
4ee9c684 | 2443 | } |
2444 | ||
48e1416a | 2445 | struct gimple_opt_pass pass_forwprop = |
20099e35 | 2446 | { |
2447 | { | |
2448 | GIMPLE_PASS, | |
4ee9c684 | 2449 | "forwprop", /* name */ |
2450 | gate_forwprop, /* gate */ | |
678b2f5b | 2451 | ssa_forward_propagate_and_combine, /* execute */ |
4ee9c684 | 2452 | NULL, /* sub */ |
2453 | NULL, /* next */ | |
2454 | 0, /* static_pass_number */ | |
2455 | TV_TREE_FORWPROP, /* tv_id */ | |
49290934 | 2456 | PROP_cfg | PROP_ssa, /* properties_required */ |
4ee9c684 | 2457 | 0, /* properties_provided */ |
b6246c40 | 2458 | 0, /* properties_destroyed */ |
4ee9c684 | 2459 | 0, /* todo_flags_start */ |
771e2890 | 2460 | TODO_ggc_collect |
de6ed584 | 2461 | | TODO_update_ssa |
20099e35 | 2462 | | TODO_verify_ssa /* todo_flags_finish */ |
2463 | } | |
4ee9c684 | 2464 | }; |