]>
Commit | Line | Data |
---|---|---|
291d763b | 1 | /* Forward propagation of expressions for single use variables. |
628ce22b | 2 | Copyright (C) 2004, 2005, 2007, 2008, 2009, 2010, 2011 |
ce084dfc | 3 | Free Software Foundation, Inc. |
4ee9c684 | 4 | |
5 | This file is part of GCC. | |
6 | ||
7 | GCC is free software; you can redistribute it and/or modify | |
8 | it under the terms of the GNU General Public License as published by | |
8c4c00c1 | 9 | the Free Software Foundation; either version 3, or (at your option) |
4ee9c684 | 10 | any later version. |
11 | ||
12 | GCC is distributed in the hope that it will be useful, | |
13 | but WITHOUT ANY WARRANTY; without even the implied warranty of | |
14 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
15 | GNU General Public License for more details. | |
16 | ||
17 | You should have received a copy of the GNU General Public License | |
8c4c00c1 | 18 | along with GCC; see the file COPYING3. If not see |
19 | <http://www.gnu.org/licenses/>. */ | |
4ee9c684 | 20 | |
21 | #include "config.h" | |
22 | #include "system.h" | |
23 | #include "coretypes.h" | |
24 | #include "tm.h" | |
4ee9c684 | 25 | #include "tree.h" |
4ee9c684 | 26 | #include "tm_p.h" |
27 | #include "basic-block.h" | |
28 | #include "timevar.h" | |
ce084dfc | 29 | #include "tree-pretty-print.h" |
4ee9c684 | 30 | #include "tree-flow.h" |
31 | #include "tree-pass.h" | |
32 | #include "tree-dump.h" | |
291d763b | 33 | #include "langhooks.h" |
5adc1066 | 34 | #include "flags.h" |
75a70cf9 | 35 | #include "gimple.h" |
27f931ff | 36 | #include "expr.h" |
4ee9c684 | 37 | |
291d763b | 38 | /* This pass propagates the RHS of assignment statements into use |
39 | sites of the LHS of the assignment. It's basically a specialized | |
8f628ee8 | 40 | form of tree combination. It is hoped all of this can disappear |
41 | when we have a generalized tree combiner. | |
4ee9c684 | 42 | |
291d763b | 43 | One class of common cases we handle is forward propagating a single use |
48e1416a | 44 | variable into a COND_EXPR. |
4ee9c684 | 45 | |
46 | bb0: | |
47 | x = a COND b; | |
48 | if (x) goto ... else goto ... | |
49 | ||
50 | Will be transformed into: | |
51 | ||
52 | bb0: | |
53 | if (a COND b) goto ... else goto ... | |
48e1416a | 54 | |
4ee9c684 | 55 | Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1). |
56 | ||
57 | Or (assuming c1 and c2 are constants): | |
58 | ||
59 | bb0: | |
48e1416a | 60 | x = a + c1; |
4ee9c684 | 61 | if (x EQ/NEQ c2) goto ... else goto ... |
62 | ||
63 | Will be transformed into: | |
64 | ||
65 | bb0: | |
66 | if (a EQ/NEQ (c2 - c1)) goto ... else goto ... | |
67 | ||
68 | Similarly for x = a - c1. | |
48e1416a | 69 | |
4ee9c684 | 70 | Or |
71 | ||
72 | bb0: | |
73 | x = !a | |
74 | if (x) goto ... else goto ... | |
75 | ||
76 | Will be transformed into: | |
77 | ||
78 | bb0: | |
79 | if (a == 0) goto ... else goto ... | |
80 | ||
81 | Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1). | |
82 | For these cases, we propagate A into all, possibly more than one, | |
83 | COND_EXPRs that use X. | |
84 | ||
f5c8cff5 | 85 | Or |
86 | ||
87 | bb0: | |
88 | x = (typecast) a | |
89 | if (x) goto ... else goto ... | |
90 | ||
91 | Will be transformed into: | |
92 | ||
93 | bb0: | |
94 | if (a != 0) goto ... else goto ... | |
95 | ||
96 | (Assuming a is an integral type and x is a boolean or x is an | |
97 | integral and a is a boolean.) | |
98 | ||
99 | Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1). | |
100 | For these cases, we propagate A into all, possibly more than one, | |
101 | COND_EXPRs that use X. | |
102 | ||
4ee9c684 | 103 | In addition to eliminating the variable and the statement which assigns |
104 | a value to the variable, we may be able to later thread the jump without | |
e6dfde59 | 105 | adding insane complexity in the dominator optimizer. |
4ee9c684 | 106 | |
f5c8cff5 | 107 | Also note these transformations can cascade. We handle this by having |
108 | a worklist of COND_EXPR statements to examine. As we make a change to | |
109 | a statement, we put it back on the worklist to examine on the next | |
110 | iteration of the main loop. | |
111 | ||
291d763b | 112 | A second class of propagation opportunities arises for ADDR_EXPR |
113 | nodes. | |
114 | ||
115 | ptr = &x->y->z; | |
116 | res = *ptr; | |
117 | ||
118 | Will get turned into | |
119 | ||
120 | res = x->y->z; | |
121 | ||
50f39ec6 | 122 | Or |
123 | ptr = (type1*)&type2var; | |
124 | res = *ptr | |
125 | ||
126 | Will get turned into (if type1 and type2 are the same size | |
127 | and neither have volatile on them): | |
128 | res = VIEW_CONVERT_EXPR<type1>(type2var) | |
129 | ||
291d763b | 130 | Or |
131 | ||
132 | ptr = &x[0]; | |
133 | ptr2 = ptr + <constant>; | |
134 | ||
135 | Will get turned into | |
136 | ||
137 | ptr2 = &x[constant/elementsize]; | |
138 | ||
139 | Or | |
140 | ||
141 | ptr = &x[0]; | |
142 | offset = index * element_size; | |
143 | offset_p = (pointer) offset; | |
144 | ptr2 = ptr + offset_p | |
145 | ||
146 | Will get turned into: | |
147 | ||
148 | ptr2 = &x[index]; | |
149 | ||
1c4607fd | 150 | Or |
151 | ssa = (int) decl | |
152 | res = ssa & 1 | |
153 | ||
154 | Provided that decl has known alignment >= 2, will get turned into | |
155 | ||
156 | res = 0 | |
157 | ||
8f628ee8 | 158 | We also propagate casts into SWITCH_EXPR and COND_EXPR conditions to |
159 | allow us to remove the cast and {NOT_EXPR,NEG_EXPR} into a subsequent | |
160 | {NOT_EXPR,NEG_EXPR}. | |
291d763b | 161 | |
4ee9c684 | 162 | This will (of course) be extended as other needs arise. */ |
163 | ||
15ec875c | 164 | static bool forward_propagate_addr_expr (tree name, tree rhs); |
148aa112 | 165 | |
166 | /* Set to true if we delete EH edges during the optimization. */ | |
167 | static bool cfg_changed; | |
168 | ||
75a70cf9 | 169 | static tree rhs_to_tree (tree type, gimple stmt); |
148aa112 | 170 | |
83a20baf | 171 | /* Get the next statement we can propagate NAME's value into skipping |
5adc1066 | 172 | trivial copies. Returns the statement that is suitable as a |
173 | propagation destination or NULL_TREE if there is no such one. | |
174 | This only returns destinations in a single-use chain. FINAL_NAME_P | |
175 | if non-NULL is written to the ssa name that represents the use. */ | |
a3451973 | 176 | |
75a70cf9 | 177 | static gimple |
5adc1066 | 178 | get_prop_dest_stmt (tree name, tree *final_name_p) |
a3451973 | 179 | { |
5adc1066 | 180 | use_operand_p use; |
75a70cf9 | 181 | gimple use_stmt; |
a3451973 | 182 | |
5adc1066 | 183 | do { |
184 | /* If name has multiple uses, bail out. */ | |
185 | if (!single_imm_use (name, &use, &use_stmt)) | |
75a70cf9 | 186 | return NULL; |
a3451973 | 187 | |
5adc1066 | 188 | /* If this is not a trivial copy, we found it. */ |
8f0b877f | 189 | if (!gimple_assign_ssa_name_copy_p (use_stmt) |
75a70cf9 | 190 | || gimple_assign_rhs1 (use_stmt) != name) |
5adc1066 | 191 | break; |
192 | ||
193 | /* Continue searching uses of the copy destination. */ | |
75a70cf9 | 194 | name = gimple_assign_lhs (use_stmt); |
5adc1066 | 195 | } while (1); |
196 | ||
197 | if (final_name_p) | |
198 | *final_name_p = name; | |
199 | ||
200 | return use_stmt; | |
a3451973 | 201 | } |
202 | ||
5adc1066 | 203 | /* Get the statement we can propagate from into NAME skipping |
204 | trivial copies. Returns the statement which defines the | |
205 | propagation source or NULL_TREE if there is no such one. | |
206 | If SINGLE_USE_ONLY is set considers only sources which have | |
207 | a single use chain up to NAME. If SINGLE_USE_P is non-null, | |
208 | it is set to whether the chain to NAME is a single use chain | |
209 | or not. SINGLE_USE_P is not written to if SINGLE_USE_ONLY is set. */ | |
4ee9c684 | 210 | |
75a70cf9 | 211 | static gimple |
5adc1066 | 212 | get_prop_source_stmt (tree name, bool single_use_only, bool *single_use_p) |
f5c8cff5 | 213 | { |
5adc1066 | 214 | bool single_use = true; |
215 | ||
216 | do { | |
75a70cf9 | 217 | gimple def_stmt = SSA_NAME_DEF_STMT (name); |
5adc1066 | 218 | |
219 | if (!has_single_use (name)) | |
220 | { | |
221 | single_use = false; | |
222 | if (single_use_only) | |
75a70cf9 | 223 | return NULL; |
5adc1066 | 224 | } |
225 | ||
226 | /* If name is defined by a PHI node or is the default def, bail out. */ | |
8f0b877f | 227 | if (!is_gimple_assign (def_stmt)) |
75a70cf9 | 228 | return NULL; |
5adc1066 | 229 | |
8f0b877f | 230 | /* If def_stmt is not a simple copy, we possibly found it. */ |
231 | if (!gimple_assign_ssa_name_copy_p (def_stmt)) | |
5adc1066 | 232 | { |
b9e98b8a | 233 | tree rhs; |
234 | ||
5adc1066 | 235 | if (!single_use_only && single_use_p) |
236 | *single_use_p = single_use; | |
237 | ||
b9e98b8a | 238 | /* We can look through pointer conversions in the search |
239 | for a useful stmt for the comparison folding. */ | |
75a70cf9 | 240 | rhs = gimple_assign_rhs1 (def_stmt); |
d9659041 | 241 | if (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt)) |
75a70cf9 | 242 | && TREE_CODE (rhs) == SSA_NAME |
243 | && POINTER_TYPE_P (TREE_TYPE (gimple_assign_lhs (def_stmt))) | |
244 | && POINTER_TYPE_P (TREE_TYPE (rhs))) | |
245 | name = rhs; | |
b9e98b8a | 246 | else |
247 | return def_stmt; | |
248 | } | |
249 | else | |
250 | { | |
251 | /* Continue searching the def of the copy source name. */ | |
75a70cf9 | 252 | name = gimple_assign_rhs1 (def_stmt); |
5adc1066 | 253 | } |
5adc1066 | 254 | } while (1); |
255 | } | |
e6dfde59 | 256 | |
5adc1066 | 257 | /* Checks if the destination ssa name in DEF_STMT can be used as |
258 | propagation source. Returns true if so, otherwise false. */ | |
e6dfde59 | 259 | |
5adc1066 | 260 | static bool |
75a70cf9 | 261 | can_propagate_from (gimple def_stmt) |
5adc1066 | 262 | { |
cc5ef3f4 | 263 | use_operand_p use_p; |
264 | ssa_op_iter iter; | |
e6dfde59 | 265 | |
75a70cf9 | 266 | gcc_assert (is_gimple_assign (def_stmt)); |
8f0b877f | 267 | |
484b827b | 268 | /* If the rhs has side-effects we cannot propagate from it. */ |
75a70cf9 | 269 | if (gimple_has_volatile_ops (def_stmt)) |
484b827b | 270 | return false; |
271 | ||
272 | /* If the rhs is a load we cannot propagate from it. */ | |
75a70cf9 | 273 | if (TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)) == tcc_reference |
274 | || TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)) == tcc_declaration) | |
484b827b | 275 | return false; |
276 | ||
b9e98b8a | 277 | /* Constants can be always propagated. */ |
8f0b877f | 278 | if (gimple_assign_single_p (def_stmt) |
279 | && is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt))) | |
b9e98b8a | 280 | return true; |
281 | ||
75a70cf9 | 282 | /* We cannot propagate ssa names that occur in abnormal phi nodes. */ |
cc5ef3f4 | 283 | FOR_EACH_SSA_USE_OPERAND (use_p, def_stmt, iter, SSA_OP_USE) |
284 | if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (USE_FROM_PTR (use_p))) | |
5adc1066 | 285 | return false; |
4ee9c684 | 286 | |
5adc1066 | 287 | /* If the definition is a conversion of a pointer to a function type, |
75a70cf9 | 288 | then we can not apply optimizations as some targets require |
289 | function pointers to be canonicalized and in this case this | |
290 | optimization could eliminate a necessary canonicalization. */ | |
8f0b877f | 291 | if (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt))) |
75a70cf9 | 292 | { |
293 | tree rhs = gimple_assign_rhs1 (def_stmt); | |
294 | if (POINTER_TYPE_P (TREE_TYPE (rhs)) | |
295 | && TREE_CODE (TREE_TYPE (TREE_TYPE (rhs))) == FUNCTION_TYPE) | |
296 | return false; | |
297 | } | |
8f0b877f | 298 | |
5adc1066 | 299 | return true; |
e6dfde59 | 300 | } |
301 | ||
5d2361b0 | 302 | /* Remove a copy chain ending in NAME along the defs. |
303 | If NAME was replaced in its only use then this function can be used | |
304 | to clean up dead stmts. Returns true if cleanup-cfg has to run. */ | |
8f628ee8 | 305 | |
5adc1066 | 306 | static bool |
5d2361b0 | 307 | remove_prop_source_from_use (tree name) |
5adc1066 | 308 | { |
75a70cf9 | 309 | gimple_stmt_iterator gsi; |
310 | gimple stmt; | |
5d2361b0 | 311 | bool cfg_changed = false; |
8f628ee8 | 312 | |
5adc1066 | 313 | do { |
5d2361b0 | 314 | basic_block bb; |
315 | ||
5adc1066 | 316 | if (!has_zero_uses (name)) |
5d2361b0 | 317 | return cfg_changed; |
8f628ee8 | 318 | |
5adc1066 | 319 | stmt = SSA_NAME_DEF_STMT (name); |
75a70cf9 | 320 | gsi = gsi_for_stmt (stmt); |
5d2361b0 | 321 | bb = gimple_bb (stmt); |
5adc1066 | 322 | release_defs (stmt); |
75a70cf9 | 323 | gsi_remove (&gsi, true); |
5d2361b0 | 324 | cfg_changed |= gimple_purge_dead_eh_edges (bb); |
8f628ee8 | 325 | |
75a70cf9 | 326 | name = (gimple_assign_copy_p (stmt)) ? gimple_assign_rhs1 (stmt) : NULL; |
327 | } while (name && TREE_CODE (name) == SSA_NAME); | |
8f628ee8 | 328 | |
5d2361b0 | 329 | return cfg_changed; |
5adc1066 | 330 | } |
8f628ee8 | 331 | |
75a70cf9 | 332 | /* Return the rhs of a gimple_assign STMT in a form of a single tree, |
333 | converted to type TYPE. | |
48e1416a | 334 | |
75a70cf9 | 335 | This should disappear, but is needed so we can combine expressions and use |
336 | the fold() interfaces. Long term, we need to develop folding and combine | |
337 | routines that deal with gimple exclusively . */ | |
338 | ||
339 | static tree | |
340 | rhs_to_tree (tree type, gimple stmt) | |
341 | { | |
389dd41b | 342 | location_t loc = gimple_location (stmt); |
75a70cf9 | 343 | enum tree_code code = gimple_assign_rhs_code (stmt); |
57c45d70 | 344 | if (get_gimple_rhs_class (code) == GIMPLE_TERNARY_RHS) |
345 | return fold_build3_loc (loc, code, type, gimple_assign_rhs1 (stmt), | |
346 | gimple_assign_rhs2 (stmt), | |
347 | gimple_assign_rhs3 (stmt)); | |
348 | else if (get_gimple_rhs_class (code) == GIMPLE_BINARY_RHS) | |
389dd41b | 349 | return fold_build2_loc (loc, code, type, gimple_assign_rhs1 (stmt), |
fb8ed03f | 350 | gimple_assign_rhs2 (stmt)); |
75a70cf9 | 351 | else if (get_gimple_rhs_class (code) == GIMPLE_UNARY_RHS) |
fb8ed03f | 352 | return build1 (code, type, gimple_assign_rhs1 (stmt)); |
75a70cf9 | 353 | else if (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS) |
354 | return gimple_assign_rhs1 (stmt); | |
355 | else | |
356 | gcc_unreachable (); | |
357 | } | |
358 | ||
5adc1066 | 359 | /* Combine OP0 CODE OP1 in the context of a COND_EXPR. Returns |
360 | the folded result in a form suitable for COND_EXPR_COND or | |
361 | NULL_TREE, if there is no suitable simplified form. If | |
362 | INVARIANT_ONLY is true only gimple_min_invariant results are | |
363 | considered simplified. */ | |
8f628ee8 | 364 | |
365 | static tree | |
389dd41b | 366 | combine_cond_expr_cond (location_t loc, enum tree_code code, tree type, |
5adc1066 | 367 | tree op0, tree op1, bool invariant_only) |
8f628ee8 | 368 | { |
5adc1066 | 369 | tree t; |
8f628ee8 | 370 | |
5adc1066 | 371 | gcc_assert (TREE_CODE_CLASS (code) == tcc_comparison); |
8f628ee8 | 372 | |
389dd41b | 373 | t = fold_binary_loc (loc, code, type, op0, op1); |
5adc1066 | 374 | if (!t) |
375 | return NULL_TREE; | |
8f628ee8 | 376 | |
5adc1066 | 377 | /* Require that we got a boolean type out if we put one in. */ |
378 | gcc_assert (TREE_CODE (TREE_TYPE (t)) == TREE_CODE (type)); | |
8f628ee8 | 379 | |
a7392604 | 380 | /* Canonicalize the combined condition for use in a COND_EXPR. */ |
381 | t = canonicalize_cond_expr_cond (t); | |
8f628ee8 | 382 | |
5adc1066 | 383 | /* Bail out if we required an invariant but didn't get one. */ |
75a70cf9 | 384 | if (!t || (invariant_only && !is_gimple_min_invariant (t))) |
5adc1066 | 385 | return NULL_TREE; |
8f628ee8 | 386 | |
a7392604 | 387 | return t; |
8f628ee8 | 388 | } |
389 | ||
5adc1066 | 390 | /* Propagate from the ssa name definition statements of COND_EXPR |
75a70cf9 | 391 | in GIMPLE_COND statement STMT into the conditional if that simplifies it. |
392 | Returns zero if no statement was changed, one if there were | |
393 | changes and two if cfg_cleanup needs to run. | |
48e1416a | 394 | |
75a70cf9 | 395 | This must be kept in sync with forward_propagate_into_cond. */ |
396 | ||
397 | static int | |
398 | forward_propagate_into_gimple_cond (gimple stmt) | |
399 | { | |
389dd41b | 400 | int did_something = 0; |
48e1416a | 401 | location_t loc = gimple_location (stmt); |
75a70cf9 | 402 | |
403 | do { | |
404 | tree tmp = NULL_TREE; | |
7f2d9047 | 405 | tree name = NULL_TREE, rhs0 = NULL_TREE, rhs1 = NULL_TREE; |
75a70cf9 | 406 | gimple def_stmt; |
407 | bool single_use0_p = false, single_use1_p = false; | |
408 | enum tree_code code = gimple_cond_code (stmt); | |
409 | ||
410 | /* We can do tree combining on SSA_NAME and comparison expressions. */ | |
7f2d9047 | 411 | if (TREE_CODE_CLASS (gimple_cond_code (stmt)) == tcc_comparison) |
75a70cf9 | 412 | { |
413 | /* For comparisons use the first operand, that is likely to | |
414 | simplify comparisons against constants. */ | |
7f2d9047 | 415 | if (TREE_CODE (gimple_cond_lhs (stmt)) == SSA_NAME) |
75a70cf9 | 416 | { |
7f2d9047 | 417 | name = gimple_cond_lhs (stmt); |
418 | def_stmt = get_prop_source_stmt (name, false, &single_use0_p); | |
419 | if (def_stmt && can_propagate_from (def_stmt)) | |
420 | { | |
421 | tree op1 = gimple_cond_rhs (stmt); | |
422 | rhs0 = rhs_to_tree (TREE_TYPE (op1), def_stmt); | |
423 | tmp = combine_cond_expr_cond (loc, code, boolean_type_node, | |
424 | rhs0, op1, !single_use0_p); | |
425 | } | |
75a70cf9 | 426 | } |
427 | /* If that wasn't successful, try the second operand. */ | |
428 | if (tmp == NULL_TREE | |
429 | && TREE_CODE (gimple_cond_rhs (stmt)) == SSA_NAME) | |
430 | { | |
431 | tree op0 = gimple_cond_lhs (stmt); | |
432 | name = gimple_cond_rhs (stmt); | |
433 | def_stmt = get_prop_source_stmt (name, false, &single_use1_p); | |
434 | if (!def_stmt || !can_propagate_from (def_stmt)) | |
435 | return did_something; | |
436 | ||
437 | rhs1 = rhs_to_tree (TREE_TYPE (op0), def_stmt); | |
389dd41b | 438 | tmp = combine_cond_expr_cond (loc, code, boolean_type_node, op0, |
439 | rhs1, !single_use1_p); | |
75a70cf9 | 440 | } |
441 | /* If that wasn't successful either, try both operands. */ | |
442 | if (tmp == NULL_TREE | |
443 | && rhs0 != NULL_TREE | |
444 | && rhs1 != NULL_TREE) | |
389dd41b | 445 | tmp = combine_cond_expr_cond (loc, code, boolean_type_node, rhs0, |
446 | fold_convert_loc (loc, | |
447 | TREE_TYPE (rhs0), | |
448 | rhs1), | |
75a70cf9 | 449 | !(single_use0_p && single_use1_p)); |
450 | } | |
451 | ||
452 | if (tmp) | |
453 | { | |
454 | if (dump_file && tmp) | |
455 | { | |
456 | tree cond = build2 (gimple_cond_code (stmt), | |
457 | boolean_type_node, | |
458 | gimple_cond_lhs (stmt), | |
459 | gimple_cond_rhs (stmt)); | |
460 | fprintf (dump_file, " Replaced '"); | |
461 | print_generic_expr (dump_file, cond, 0); | |
462 | fprintf (dump_file, "' with '"); | |
463 | print_generic_expr (dump_file, tmp, 0); | |
464 | fprintf (dump_file, "'\n"); | |
465 | } | |
466 | ||
467 | gimple_cond_set_condition_from_tree (stmt, unshare_expr (tmp)); | |
468 | update_stmt (stmt); | |
469 | ||
470 | /* Remove defining statements. */ | |
5d2361b0 | 471 | if (remove_prop_source_from_use (name) |
472 | || is_gimple_min_invariant (tmp)) | |
75a70cf9 | 473 | did_something = 2; |
474 | else if (did_something == 0) | |
475 | did_something = 1; | |
476 | ||
477 | /* Continue combining. */ | |
478 | continue; | |
479 | } | |
480 | ||
481 | break; | |
482 | } while (1); | |
483 | ||
484 | return did_something; | |
485 | } | |
486 | ||
487 | ||
488 | /* Propagate from the ssa name definition statements of COND_EXPR | |
489 | in the rhs of statement STMT into the conditional if that simplifies it. | |
4c580c8c | 490 | Returns zero if no statement was changed, one if there were |
75a70cf9 | 491 | changes and two if cfg_cleanup needs to run. |
492 | ||
493 | This must be kept in sync with forward_propagate_into_gimple_cond. */ | |
4ee9c684 | 494 | |
4c580c8c | 495 | static int |
75a70cf9 | 496 | forward_propagate_into_cond (gimple_stmt_iterator *gsi_p) |
e6dfde59 | 497 | { |
75a70cf9 | 498 | gimple stmt = gsi_stmt (*gsi_p); |
389dd41b | 499 | location_t loc = gimple_location (stmt); |
4c580c8c | 500 | int did_something = 0; |
d080be9e | 501 | |
5adc1066 | 502 | do { |
503 | tree tmp = NULL_TREE; | |
75a70cf9 | 504 | tree cond = gimple_assign_rhs1 (stmt); |
505 | tree name, rhs0 = NULL_TREE, rhs1 = NULL_TREE; | |
506 | gimple def_stmt; | |
f4628d45 | 507 | bool single_use0_p = false, single_use1_p = false; |
5adc1066 | 508 | |
509 | /* We can do tree combining on SSA_NAME and comparison expressions. */ | |
510 | if (COMPARISON_CLASS_P (cond) | |
511 | && TREE_CODE (TREE_OPERAND (cond, 0)) == SSA_NAME) | |
512 | { | |
513 | /* For comparisons use the first operand, that is likely to | |
514 | simplify comparisons against constants. */ | |
515 | name = TREE_OPERAND (cond, 0); | |
f4628d45 | 516 | def_stmt = get_prop_source_stmt (name, false, &single_use0_p); |
75a70cf9 | 517 | if (def_stmt && can_propagate_from (def_stmt)) |
5adc1066 | 518 | { |
519 | tree op1 = TREE_OPERAND (cond, 1); | |
75a70cf9 | 520 | rhs0 = rhs_to_tree (TREE_TYPE (op1), def_stmt); |
389dd41b | 521 | tmp = combine_cond_expr_cond (loc, TREE_CODE (cond), |
522 | boolean_type_node, | |
75a70cf9 | 523 | rhs0, op1, !single_use0_p); |
5adc1066 | 524 | } |
525 | /* If that wasn't successful, try the second operand. */ | |
526 | if (tmp == NULL_TREE | |
527 | && TREE_CODE (TREE_OPERAND (cond, 1)) == SSA_NAME) | |
528 | { | |
529 | tree op0 = TREE_OPERAND (cond, 0); | |
530 | name = TREE_OPERAND (cond, 1); | |
f4628d45 | 531 | def_stmt = get_prop_source_stmt (name, false, &single_use1_p); |
75a70cf9 | 532 | if (!def_stmt || !can_propagate_from (def_stmt)) |
d080be9e | 533 | return did_something; |
5adc1066 | 534 | |
75a70cf9 | 535 | rhs1 = rhs_to_tree (TREE_TYPE (op0), def_stmt); |
389dd41b | 536 | tmp = combine_cond_expr_cond (loc, TREE_CODE (cond), |
537 | boolean_type_node, | |
75a70cf9 | 538 | op0, rhs1, !single_use1_p); |
5adc1066 | 539 | } |
484b827b | 540 | /* If that wasn't successful either, try both operands. */ |
541 | if (tmp == NULL_TREE | |
542 | && rhs0 != NULL_TREE | |
543 | && rhs1 != NULL_TREE) | |
389dd41b | 544 | tmp = combine_cond_expr_cond (loc, TREE_CODE (cond), |
545 | boolean_type_node, | |
546 | rhs0, | |
547 | fold_convert_loc (loc, | |
548 | TREE_TYPE (rhs0), | |
549 | rhs1), | |
f4628d45 | 550 | !(single_use0_p && single_use1_p)); |
5adc1066 | 551 | } |
552 | else if (TREE_CODE (cond) == SSA_NAME) | |
553 | { | |
554 | name = cond; | |
555 | def_stmt = get_prop_source_stmt (name, true, NULL); | |
1084097d | 556 | if (!def_stmt || !can_propagate_from (def_stmt)) |
d080be9e | 557 | return did_something; |
5adc1066 | 558 | |
75a70cf9 | 559 | rhs0 = gimple_assign_rhs1 (def_stmt); |
389dd41b | 560 | tmp = combine_cond_expr_cond (loc, NE_EXPR, boolean_type_node, rhs0, |
484b827b | 561 | build_int_cst (TREE_TYPE (rhs0), 0), |
5adc1066 | 562 | false); |
563 | } | |
564 | ||
565 | if (tmp) | |
566 | { | |
567 | if (dump_file && tmp) | |
568 | { | |
569 | fprintf (dump_file, " Replaced '"); | |
570 | print_generic_expr (dump_file, cond, 0); | |
571 | fprintf (dump_file, "' with '"); | |
572 | print_generic_expr (dump_file, tmp, 0); | |
573 | fprintf (dump_file, "'\n"); | |
574 | } | |
575 | ||
75a70cf9 | 576 | gimple_assign_set_rhs_from_tree (gsi_p, unshare_expr (tmp)); |
577 | stmt = gsi_stmt (*gsi_p); | |
5adc1066 | 578 | update_stmt (stmt); |
579 | ||
580 | /* Remove defining statements. */ | |
5d2361b0 | 581 | if (remove_prop_source_from_use (name) |
582 | || is_gimple_min_invariant (tmp)) | |
4c580c8c | 583 | did_something = 2; |
584 | else if (did_something == 0) | |
585 | did_something = 1; | |
d080be9e | 586 | |
5adc1066 | 587 | /* Continue combining. */ |
588 | continue; | |
589 | } | |
590 | ||
591 | break; | |
592 | } while (1); | |
d080be9e | 593 | |
594 | return did_something; | |
4ee9c684 | 595 | } |
596 | ||
48e1416a | 597 | /* We've just substituted an ADDR_EXPR into stmt. Update all the |
148aa112 | 598 | relevant data structures to match. */ |
599 | ||
600 | static void | |
75a70cf9 | 601 | tidy_after_forward_propagate_addr (gimple stmt) |
148aa112 | 602 | { |
148aa112 | 603 | /* We may have turned a trapping insn into a non-trapping insn. */ |
604 | if (maybe_clean_or_replace_eh_stmt (stmt, stmt) | |
75a70cf9 | 605 | && gimple_purge_dead_eh_edges (gimple_bb (stmt))) |
148aa112 | 606 | cfg_changed = true; |
f2fae51f | 607 | |
75a70cf9 | 608 | if (TREE_CODE (gimple_assign_rhs1 (stmt)) == ADDR_EXPR) |
609 | recompute_tree_invariant_for_addr_expr (gimple_assign_rhs1 (stmt)); | |
148aa112 | 610 | } |
611 | ||
75a70cf9 | 612 | /* DEF_RHS contains the address of the 0th element in an array. |
6c01267c | 613 | USE_STMT uses type of DEF_RHS to compute the address of an |
291d763b | 614 | arbitrary element within the array. The (variable) byte offset |
615 | of the element is contained in OFFSET. | |
616 | ||
617 | We walk back through the use-def chains of OFFSET to verify that | |
618 | it is indeed computing the offset of an element within the array | |
619 | and extract the index corresponding to the given byte offset. | |
620 | ||
621 | We then try to fold the entire address expression into a form | |
622 | &array[index]. | |
623 | ||
624 | If we are successful, we replace the right hand side of USE_STMT | |
625 | with the new address computation. */ | |
626 | ||
627 | static bool | |
6c01267c | 628 | forward_propagate_addr_into_variable_array_index (tree offset, |
75a70cf9 | 629 | tree def_rhs, |
630 | gimple_stmt_iterator *use_stmt_gsi) | |
291d763b | 631 | { |
401d1fb3 | 632 | tree index, tunit; |
75a70cf9 | 633 | gimple offset_def, use_stmt = gsi_stmt (*use_stmt_gsi); |
182cf5a9 | 634 | tree new_rhs, tmp; |
401d1fb3 | 635 | |
182cf5a9 | 636 | if (TREE_CODE (TREE_OPERAND (def_rhs, 0)) == ARRAY_REF) |
637 | tunit = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (def_rhs))); | |
638 | else if (TREE_CODE (TREE_TYPE (TREE_OPERAND (def_rhs, 0))) == ARRAY_TYPE) | |
639 | tunit = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (TREE_TYPE (def_rhs)))); | |
640 | else | |
641 | return false; | |
401d1fb3 | 642 | if (!host_integerp (tunit, 1)) |
643 | return false; | |
291d763b | 644 | |
65c220cd | 645 | /* Get the offset's defining statement. */ |
646 | offset_def = SSA_NAME_DEF_STMT (offset); | |
647 | ||
648 | /* Try to find an expression for a proper index. This is either a | |
649 | multiplication expression by the element size or just the ssa name we came | |
650 | along in case the element size is one. In that case, however, we do not | |
651 | allow multiplications because they can be computing index to a higher | |
652 | level dimension (PR 37861). */ | |
401d1fb3 | 653 | if (integer_onep (tunit)) |
1a773ec5 | 654 | { |
65c220cd | 655 | if (is_gimple_assign (offset_def) |
656 | && gimple_assign_rhs_code (offset_def) == MULT_EXPR) | |
657 | return false; | |
291d763b | 658 | |
65c220cd | 659 | index = offset; |
660 | } | |
661 | else | |
662 | { | |
0de36bdb | 663 | /* The statement which defines OFFSET before type conversion |
75a70cf9 | 664 | must be a simple GIMPLE_ASSIGN. */ |
65c220cd | 665 | if (!is_gimple_assign (offset_def)) |
1a773ec5 | 666 | return false; |
291d763b | 667 | |
0de36bdb | 668 | /* The RHS of the statement which defines OFFSET must be a |
48e1416a | 669 | multiplication of an object by the size of the array elements. |
0de36bdb | 670 | This implicitly verifies that the size of the array elements |
671 | is constant. */ | |
401d1fb3 | 672 | if (gimple_assign_rhs_code (offset_def) == MULT_EXPR |
673 | && TREE_CODE (gimple_assign_rhs2 (offset_def)) == INTEGER_CST | |
674 | && tree_int_cst_equal (gimple_assign_rhs2 (offset_def), tunit)) | |
675 | { | |
676 | /* The first operand to the MULT_EXPR is the desired index. */ | |
677 | index = gimple_assign_rhs1 (offset_def); | |
678 | } | |
679 | /* If we have idx * tunit + CST * tunit re-associate that. */ | |
680 | else if ((gimple_assign_rhs_code (offset_def) == PLUS_EXPR | |
681 | || gimple_assign_rhs_code (offset_def) == MINUS_EXPR) | |
682 | && TREE_CODE (gimple_assign_rhs1 (offset_def)) == SSA_NAME | |
683 | && TREE_CODE (gimple_assign_rhs2 (offset_def)) == INTEGER_CST | |
684 | && (tmp = div_if_zero_remainder (EXACT_DIV_EXPR, | |
685 | gimple_assign_rhs2 (offset_def), | |
686 | tunit)) != NULL_TREE) | |
687 | { | |
688 | gimple offset_def2 = SSA_NAME_DEF_STMT (gimple_assign_rhs1 (offset_def)); | |
507b89a4 | 689 | if (is_gimple_assign (offset_def2) |
690 | && gimple_assign_rhs_code (offset_def2) == MULT_EXPR | |
401d1fb3 | 691 | && TREE_CODE (gimple_assign_rhs2 (offset_def2)) == INTEGER_CST |
692 | && tree_int_cst_equal (gimple_assign_rhs2 (offset_def2), tunit)) | |
693 | { | |
694 | index = fold_build2 (gimple_assign_rhs_code (offset_def), | |
695 | TREE_TYPE (offset), | |
696 | gimple_assign_rhs1 (offset_def2), tmp); | |
697 | } | |
698 | else | |
699 | return false; | |
700 | } | |
701 | else | |
1a773ec5 | 702 | return false; |
1a773ec5 | 703 | } |
291d763b | 704 | |
705 | /* Replace the pointer addition with array indexing. */ | |
401d1fb3 | 706 | index = force_gimple_operand_gsi (use_stmt_gsi, index, true, NULL_TREE, |
707 | true, GSI_SAME_STMT); | |
182cf5a9 | 708 | if (TREE_CODE (TREE_OPERAND (def_rhs, 0)) == ARRAY_REF) |
709 | { | |
710 | new_rhs = unshare_expr (def_rhs); | |
711 | TREE_OPERAND (TREE_OPERAND (new_rhs, 0), 1) = index; | |
712 | } | |
713 | else | |
714 | { | |
715 | new_rhs = build4 (ARRAY_REF, TREE_TYPE (TREE_TYPE (TREE_TYPE (def_rhs))), | |
716 | unshare_expr (TREE_OPERAND (def_rhs, 0)), | |
717 | index, integer_zero_node, NULL_TREE); | |
718 | new_rhs = build_fold_addr_expr (new_rhs); | |
719 | if (!useless_type_conversion_p (TREE_TYPE (gimple_assign_lhs (use_stmt)), | |
720 | TREE_TYPE (new_rhs))) | |
721 | { | |
722 | new_rhs = force_gimple_operand_gsi (use_stmt_gsi, new_rhs, true, | |
723 | NULL_TREE, true, GSI_SAME_STMT); | |
724 | new_rhs = fold_convert (TREE_TYPE (gimple_assign_lhs (use_stmt)), | |
725 | new_rhs); | |
726 | } | |
727 | } | |
728 | gimple_assign_set_rhs_from_tree (use_stmt_gsi, new_rhs); | |
75a70cf9 | 729 | use_stmt = gsi_stmt (*use_stmt_gsi); |
291d763b | 730 | |
731 | /* That should have created gimple, so there is no need to | |
732 | record information to undo the propagation. */ | |
148aa112 | 733 | fold_stmt_inplace (use_stmt); |
734 | tidy_after_forward_propagate_addr (use_stmt); | |
291d763b | 735 | return true; |
736 | } | |
737 | ||
15ec875c | 738 | /* NAME is a SSA_NAME representing DEF_RHS which is of the form |
739 | ADDR_EXPR <whatever>. | |
291d763b | 740 | |
3d5cfe81 | 741 | Try to forward propagate the ADDR_EXPR into the use USE_STMT. |
291d763b | 742 | Often this will allow for removal of an ADDR_EXPR and INDIRECT_REF |
3d5cfe81 | 743 | node or for recovery of array indexing from pointer arithmetic. |
75a70cf9 | 744 | |
6b5a5c42 | 745 | Return true if the propagation was successful (the propagation can |
746 | be not totally successful, yet things may have been changed). */ | |
291d763b | 747 | |
748 | static bool | |
75a70cf9 | 749 | forward_propagate_addr_expr_1 (tree name, tree def_rhs, |
750 | gimple_stmt_iterator *use_stmt_gsi, | |
6776dec8 | 751 | bool single_use_p) |
291d763b | 752 | { |
75a70cf9 | 753 | tree lhs, rhs, rhs2, array_ref; |
75a70cf9 | 754 | gimple use_stmt = gsi_stmt (*use_stmt_gsi); |
755 | enum tree_code rhs_code; | |
9e019299 | 756 | bool res = true; |
291d763b | 757 | |
971c637a | 758 | gcc_assert (TREE_CODE (def_rhs) == ADDR_EXPR); |
291d763b | 759 | |
75a70cf9 | 760 | lhs = gimple_assign_lhs (use_stmt); |
761 | rhs_code = gimple_assign_rhs_code (use_stmt); | |
762 | rhs = gimple_assign_rhs1 (use_stmt); | |
15ec875c | 763 | |
6776dec8 | 764 | /* Trivial cases. The use statement could be a trivial copy or a |
15ec875c | 765 | useless conversion. Recurse to the uses of the lhs as copyprop does |
971c637a | 766 | not copy through different variant pointers and FRE does not catch |
6776dec8 | 767 | all useless conversions. Treat the case of a single-use name and |
768 | a conversion to def_rhs type separate, though. */ | |
971c637a | 769 | if (TREE_CODE (lhs) == SSA_NAME |
75a70cf9 | 770 | && ((rhs_code == SSA_NAME && rhs == name) |
316616c9 | 771 | || CONVERT_EXPR_CODE_P (rhs_code))) |
6776dec8 | 772 | { |
316616c9 | 773 | /* Only recurse if we don't deal with a single use or we cannot |
774 | do the propagation to the current statement. In particular | |
775 | we can end up with a conversion needed for a non-invariant | |
776 | address which we cannot do in a single statement. */ | |
777 | if (!single_use_p | |
778 | || (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (def_rhs)) | |
bd8d8d81 | 779 | && (!is_gimple_min_invariant (def_rhs) |
780 | || (INTEGRAL_TYPE_P (TREE_TYPE (lhs)) | |
781 | && POINTER_TYPE_P (TREE_TYPE (def_rhs)) | |
782 | && (TYPE_PRECISION (TREE_TYPE (lhs)) | |
783 | > TYPE_PRECISION (TREE_TYPE (def_rhs))))))) | |
971c637a | 784 | return forward_propagate_addr_expr (lhs, def_rhs); |
785 | ||
75a70cf9 | 786 | gimple_assign_set_rhs1 (use_stmt, unshare_expr (def_rhs)); |
316616c9 | 787 | if (useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (def_rhs))) |
788 | gimple_assign_set_rhs_code (use_stmt, TREE_CODE (def_rhs)); | |
789 | else | |
790 | gimple_assign_set_rhs_code (use_stmt, NOP_EXPR); | |
6776dec8 | 791 | return true; |
792 | } | |
971c637a | 793 | |
182cf5a9 | 794 | /* Propagate through constant pointer adjustments. */ |
795 | if (TREE_CODE (lhs) == SSA_NAME | |
796 | && rhs_code == POINTER_PLUS_EXPR | |
797 | && rhs == name | |
798 | && TREE_CODE (gimple_assign_rhs2 (use_stmt)) == INTEGER_CST) | |
799 | { | |
800 | tree new_def_rhs; | |
801 | /* As we come here with non-invariant addresses in def_rhs we need | |
802 | to make sure we can build a valid constant offsetted address | |
803 | for further propagation. Simply rely on fold building that | |
804 | and check after the fact. */ | |
805 | new_def_rhs = fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (rhs)), | |
806 | def_rhs, | |
807 | fold_convert (ptr_type_node, | |
808 | gimple_assign_rhs2 (use_stmt))); | |
809 | if (TREE_CODE (new_def_rhs) == MEM_REF | |
f5d03f27 | 810 | && !is_gimple_mem_ref_addr (TREE_OPERAND (new_def_rhs, 0))) |
182cf5a9 | 811 | return false; |
812 | new_def_rhs = build_fold_addr_expr_with_type (new_def_rhs, | |
813 | TREE_TYPE (rhs)); | |
814 | ||
815 | /* Recurse. If we could propagate into all uses of lhs do not | |
816 | bother to replace into the current use but just pretend we did. */ | |
817 | if (TREE_CODE (new_def_rhs) == ADDR_EXPR | |
818 | && forward_propagate_addr_expr (lhs, new_def_rhs)) | |
819 | return true; | |
820 | ||
821 | if (useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (new_def_rhs))) | |
822 | gimple_assign_set_rhs_with_ops (use_stmt_gsi, TREE_CODE (new_def_rhs), | |
823 | new_def_rhs, NULL_TREE); | |
824 | else if (is_gimple_min_invariant (new_def_rhs)) | |
825 | gimple_assign_set_rhs_with_ops (use_stmt_gsi, NOP_EXPR, | |
826 | new_def_rhs, NULL_TREE); | |
827 | else | |
828 | return false; | |
829 | gcc_assert (gsi_stmt (*use_stmt_gsi) == use_stmt); | |
830 | update_stmt (use_stmt); | |
831 | return true; | |
832 | } | |
833 | ||
48e1416a | 834 | /* Now strip away any outer COMPONENT_REF/ARRAY_REF nodes from the LHS. |
971c637a | 835 | ADDR_EXPR will not appear on the LHS. */ |
182cf5a9 | 836 | lhs = gimple_assign_lhs (use_stmt); |
837 | while (handled_component_p (lhs)) | |
838 | lhs = TREE_OPERAND (lhs, 0); | |
971c637a | 839 | |
182cf5a9 | 840 | /* Now see if the LHS node is a MEM_REF using NAME. If so, |
971c637a | 841 | propagate the ADDR_EXPR into the use of NAME and fold the result. */ |
182cf5a9 | 842 | if (TREE_CODE (lhs) == MEM_REF |
9e019299 | 843 | && TREE_OPERAND (lhs, 0) == name) |
971c637a | 844 | { |
182cf5a9 | 845 | tree def_rhs_base; |
846 | HOST_WIDE_INT def_rhs_offset; | |
847 | /* If the address is invariant we can always fold it. */ | |
848 | if ((def_rhs_base = get_addr_base_and_unit_offset (TREE_OPERAND (def_rhs, 0), | |
849 | &def_rhs_offset))) | |
9e019299 | 850 | { |
182cf5a9 | 851 | double_int off = mem_ref_offset (lhs); |
852 | tree new_ptr; | |
853 | off = double_int_add (off, | |
854 | shwi_to_double_int (def_rhs_offset)); | |
855 | if (TREE_CODE (def_rhs_base) == MEM_REF) | |
856 | { | |
857 | off = double_int_add (off, mem_ref_offset (def_rhs_base)); | |
858 | new_ptr = TREE_OPERAND (def_rhs_base, 0); | |
859 | } | |
860 | else | |
861 | new_ptr = build_fold_addr_expr (def_rhs_base); | |
862 | TREE_OPERAND (lhs, 0) = new_ptr; | |
863 | TREE_OPERAND (lhs, 1) | |
864 | = double_int_to_tree (TREE_TYPE (TREE_OPERAND (lhs, 1)), off); | |
9e019299 | 865 | tidy_after_forward_propagate_addr (use_stmt); |
9e019299 | 866 | /* Continue propagating into the RHS if this was not the only use. */ |
867 | if (single_use_p) | |
868 | return true; | |
869 | } | |
182cf5a9 | 870 | /* If the LHS is a plain dereference and the value type is the same as |
871 | that of the pointed-to type of the address we can put the | |
872 | dereferenced address on the LHS preserving the original alias-type. */ | |
873 | else if (gimple_assign_lhs (use_stmt) == lhs | |
874 | && useless_type_conversion_p | |
875 | (TREE_TYPE (TREE_OPERAND (def_rhs, 0)), | |
876 | TREE_TYPE (gimple_assign_rhs1 (use_stmt)))) | |
877 | { | |
878 | tree *def_rhs_basep = &TREE_OPERAND (def_rhs, 0); | |
879 | tree new_offset, new_base, saved; | |
880 | while (handled_component_p (*def_rhs_basep)) | |
881 | def_rhs_basep = &TREE_OPERAND (*def_rhs_basep, 0); | |
882 | saved = *def_rhs_basep; | |
883 | if (TREE_CODE (*def_rhs_basep) == MEM_REF) | |
884 | { | |
885 | new_base = TREE_OPERAND (*def_rhs_basep, 0); | |
886 | new_offset | |
887 | = int_const_binop (PLUS_EXPR, TREE_OPERAND (lhs, 1), | |
317e2a67 | 888 | TREE_OPERAND (*def_rhs_basep, 1)); |
182cf5a9 | 889 | } |
890 | else | |
891 | { | |
892 | new_base = build_fold_addr_expr (*def_rhs_basep); | |
893 | new_offset = TREE_OPERAND (lhs, 1); | |
894 | } | |
895 | *def_rhs_basep = build2 (MEM_REF, TREE_TYPE (*def_rhs_basep), | |
896 | new_base, new_offset); | |
897 | gimple_assign_set_lhs (use_stmt, | |
898 | unshare_expr (TREE_OPERAND (def_rhs, 0))); | |
899 | *def_rhs_basep = saved; | |
900 | tidy_after_forward_propagate_addr (use_stmt); | |
901 | /* Continue propagating into the RHS if this was not the | |
902 | only use. */ | |
903 | if (single_use_p) | |
904 | return true; | |
905 | } | |
9e019299 | 906 | else |
907 | /* We can have a struct assignment dereferencing our name twice. | |
908 | Note that we didn't propagate into the lhs to not falsely | |
909 | claim we did when propagating into the rhs. */ | |
910 | res = false; | |
971c637a | 911 | } |
15ec875c | 912 | |
631d5db6 | 913 | /* Strip away any outer COMPONENT_REF, ARRAY_REF or ADDR_EXPR |
914 | nodes from the RHS. */ | |
182cf5a9 | 915 | rhs = gimple_assign_rhs1 (use_stmt); |
916 | if (TREE_CODE (rhs) == ADDR_EXPR) | |
917 | rhs = TREE_OPERAND (rhs, 0); | |
918 | while (handled_component_p (rhs)) | |
919 | rhs = TREE_OPERAND (rhs, 0); | |
291d763b | 920 | |
182cf5a9 | 921 | /* Now see if the RHS node is a MEM_REF using NAME. If so, |
291d763b | 922 | propagate the ADDR_EXPR into the use of NAME and fold the result. */ |
182cf5a9 | 923 | if (TREE_CODE (rhs) == MEM_REF |
924 | && TREE_OPERAND (rhs, 0) == name) | |
291d763b | 925 | { |
182cf5a9 | 926 | tree def_rhs_base; |
927 | HOST_WIDE_INT def_rhs_offset; | |
928 | if ((def_rhs_base = get_addr_base_and_unit_offset (TREE_OPERAND (def_rhs, 0), | |
929 | &def_rhs_offset))) | |
930 | { | |
931 | double_int off = mem_ref_offset (rhs); | |
932 | tree new_ptr; | |
933 | off = double_int_add (off, | |
934 | shwi_to_double_int (def_rhs_offset)); | |
935 | if (TREE_CODE (def_rhs_base) == MEM_REF) | |
936 | { | |
937 | off = double_int_add (off, mem_ref_offset (def_rhs_base)); | |
938 | new_ptr = TREE_OPERAND (def_rhs_base, 0); | |
939 | } | |
940 | else | |
941 | new_ptr = build_fold_addr_expr (def_rhs_base); | |
942 | TREE_OPERAND (rhs, 0) = new_ptr; | |
943 | TREE_OPERAND (rhs, 1) | |
944 | = double_int_to_tree (TREE_TYPE (TREE_OPERAND (rhs, 1)), off); | |
945 | fold_stmt_inplace (use_stmt); | |
946 | tidy_after_forward_propagate_addr (use_stmt); | |
947 | return res; | |
948 | } | |
949 | /* If the LHS is a plain dereference and the value type is the same as | |
950 | that of the pointed-to type of the address we can put the | |
951 | dereferenced address on the LHS preserving the original alias-type. */ | |
952 | else if (gimple_assign_rhs1 (use_stmt) == rhs | |
953 | && useless_type_conversion_p | |
954 | (TREE_TYPE (gimple_assign_lhs (use_stmt)), | |
955 | TREE_TYPE (TREE_OPERAND (def_rhs, 0)))) | |
956 | { | |
957 | tree *def_rhs_basep = &TREE_OPERAND (def_rhs, 0); | |
958 | tree new_offset, new_base, saved; | |
959 | while (handled_component_p (*def_rhs_basep)) | |
960 | def_rhs_basep = &TREE_OPERAND (*def_rhs_basep, 0); | |
961 | saved = *def_rhs_basep; | |
962 | if (TREE_CODE (*def_rhs_basep) == MEM_REF) | |
963 | { | |
964 | new_base = TREE_OPERAND (*def_rhs_basep, 0); | |
965 | new_offset | |
966 | = int_const_binop (PLUS_EXPR, TREE_OPERAND (rhs, 1), | |
317e2a67 | 967 | TREE_OPERAND (*def_rhs_basep, 1)); |
182cf5a9 | 968 | } |
969 | else | |
970 | { | |
971 | new_base = build_fold_addr_expr (*def_rhs_basep); | |
972 | new_offset = TREE_OPERAND (rhs, 1); | |
973 | } | |
974 | *def_rhs_basep = build2 (MEM_REF, TREE_TYPE (*def_rhs_basep), | |
975 | new_base, new_offset); | |
976 | gimple_assign_set_rhs1 (use_stmt, | |
977 | unshare_expr (TREE_OPERAND (def_rhs, 0))); | |
978 | *def_rhs_basep = saved; | |
979 | fold_stmt_inplace (use_stmt); | |
980 | tidy_after_forward_propagate_addr (use_stmt); | |
981 | return res; | |
982 | } | |
291d763b | 983 | } |
984 | ||
971c637a | 985 | /* If the use of the ADDR_EXPR is not a POINTER_PLUS_EXPR, there |
986 | is nothing to do. */ | |
75a70cf9 | 987 | if (gimple_assign_rhs_code (use_stmt) != POINTER_PLUS_EXPR |
988 | || gimple_assign_rhs1 (use_stmt) != name) | |
971c637a | 989 | return false; |
990 | ||
291d763b | 991 | /* The remaining cases are all for turning pointer arithmetic into |
992 | array indexing. They only apply when we have the address of | |
993 | element zero in an array. If that is not the case then there | |
994 | is nothing to do. */ | |
15ec875c | 995 | array_ref = TREE_OPERAND (def_rhs, 0); |
182cf5a9 | 996 | if ((TREE_CODE (array_ref) != ARRAY_REF |
997 | || TREE_CODE (TREE_TYPE (TREE_OPERAND (array_ref, 0))) != ARRAY_TYPE | |
998 | || TREE_CODE (TREE_OPERAND (array_ref, 1)) != INTEGER_CST) | |
999 | && TREE_CODE (TREE_TYPE (array_ref)) != ARRAY_TYPE) | |
291d763b | 1000 | return false; |
1001 | ||
75a70cf9 | 1002 | rhs2 = gimple_assign_rhs2 (use_stmt); |
088cc5d5 | 1003 | /* Try to optimize &x[C1] p+ C2 where C2 is a multiple of the size |
1004 | of the elements in X into &x[C1 + C2/element size]. */ | |
75a70cf9 | 1005 | if (TREE_CODE (rhs2) == INTEGER_CST) |
291d763b | 1006 | { |
e60a6f7b | 1007 | tree new_rhs = maybe_fold_stmt_addition (gimple_location (use_stmt), |
1008 | TREE_TYPE (def_rhs), | |
088cc5d5 | 1009 | def_rhs, rhs2); |
75a70cf9 | 1010 | if (new_rhs) |
291d763b | 1011 | { |
7b705d94 | 1012 | tree type = TREE_TYPE (gimple_assign_lhs (use_stmt)); |
1013 | new_rhs = unshare_expr (new_rhs); | |
1014 | if (!useless_type_conversion_p (type, TREE_TYPE (new_rhs))) | |
1015 | { | |
1016 | if (!is_gimple_min_invariant (new_rhs)) | |
1017 | new_rhs = force_gimple_operand_gsi (use_stmt_gsi, new_rhs, | |
1018 | true, NULL_TREE, | |
1019 | true, GSI_SAME_STMT); | |
1020 | new_rhs = fold_convert (type, new_rhs); | |
1021 | } | |
1022 | gimple_assign_set_rhs_from_tree (use_stmt_gsi, new_rhs); | |
75a70cf9 | 1023 | use_stmt = gsi_stmt (*use_stmt_gsi); |
1024 | update_stmt (use_stmt); | |
148aa112 | 1025 | tidy_after_forward_propagate_addr (use_stmt); |
291d763b | 1026 | return true; |
1027 | } | |
291d763b | 1028 | } |
1029 | ||
0de36bdb | 1030 | /* Try to optimize &x[0] p+ OFFSET where OFFSET is defined by |
291d763b | 1031 | converting a multiplication of an index by the size of the |
1032 | array elements, then the result is converted into the proper | |
1033 | type for the arithmetic. */ | |
75a70cf9 | 1034 | if (TREE_CODE (rhs2) == SSA_NAME |
182cf5a9 | 1035 | && (TREE_CODE (array_ref) != ARRAY_REF |
1036 | || integer_zerop (TREE_OPERAND (array_ref, 1))) | |
c019af4d | 1037 | && useless_type_conversion_p (TREE_TYPE (name), TREE_TYPE (def_rhs)) |
291d763b | 1038 | /* Avoid problems with IVopts creating PLUS_EXPRs with a |
1039 | different type than their operands. */ | |
83a99d39 | 1040 | && useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (def_rhs))) |
75a70cf9 | 1041 | return forward_propagate_addr_into_variable_array_index (rhs2, def_rhs, |
1042 | use_stmt_gsi); | |
291d763b | 1043 | return false; |
1044 | } | |
1045 | ||
3d5cfe81 | 1046 | /* STMT is a statement of the form SSA_NAME = ADDR_EXPR <whatever>. |
1047 | ||
1048 | Try to forward propagate the ADDR_EXPR into all uses of the SSA_NAME. | |
1049 | Often this will allow for removal of an ADDR_EXPR and INDIRECT_REF | |
1050 | node or for recovery of array indexing from pointer arithmetic. | |
1051 | Returns true, if all uses have been propagated into. */ | |
1052 | ||
1053 | static bool | |
15ec875c | 1054 | forward_propagate_addr_expr (tree name, tree rhs) |
3d5cfe81 | 1055 | { |
75a70cf9 | 1056 | int stmt_loop_depth = gimple_bb (SSA_NAME_DEF_STMT (name))->loop_depth; |
3d5cfe81 | 1057 | imm_use_iterator iter; |
75a70cf9 | 1058 | gimple use_stmt; |
3d5cfe81 | 1059 | bool all = true; |
6776dec8 | 1060 | bool single_use_p = has_single_use (name); |
3d5cfe81 | 1061 | |
09aca5bc | 1062 | FOR_EACH_IMM_USE_STMT (use_stmt, iter, name) |
3d5cfe81 | 1063 | { |
c96420f8 | 1064 | bool result; |
9481f629 | 1065 | tree use_rhs; |
3d5cfe81 | 1066 | |
1067 | /* If the use is not in a simple assignment statement, then | |
1068 | there is nothing we can do. */ | |
75a70cf9 | 1069 | if (gimple_code (use_stmt) != GIMPLE_ASSIGN) |
3d5cfe81 | 1070 | { |
688ff29b | 1071 | if (!is_gimple_debug (use_stmt)) |
9845d120 | 1072 | all = false; |
3d5cfe81 | 1073 | continue; |
1074 | } | |
1075 | ||
a540e2fe | 1076 | /* If the use is in a deeper loop nest, then we do not want |
ed40c3d0 | 1077 | to propagate non-invariant ADDR_EXPRs into the loop as that |
1078 | is likely adding expression evaluations into the loop. */ | |
1079 | if (gimple_bb (use_stmt)->loop_depth > stmt_loop_depth | |
1080 | && !is_gimple_min_invariant (rhs)) | |
3d5cfe81 | 1081 | { |
1082 | all = false; | |
1083 | continue; | |
1084 | } | |
a540e2fe | 1085 | |
75a70cf9 | 1086 | { |
1087 | gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt); | |
1088 | result = forward_propagate_addr_expr_1 (name, rhs, &gsi, | |
1089 | single_use_p); | |
dd277d48 | 1090 | /* If the use has moved to a different statement adjust |
4c5fd53c | 1091 | the update machinery for the old statement too. */ |
dd277d48 | 1092 | if (use_stmt != gsi_stmt (gsi)) |
1093 | { | |
dd277d48 | 1094 | update_stmt (use_stmt); |
4c5fd53c | 1095 | use_stmt = gsi_stmt (gsi); |
dd277d48 | 1096 | } |
4c5fd53c | 1097 | |
1098 | update_stmt (use_stmt); | |
75a70cf9 | 1099 | } |
c96420f8 | 1100 | all &= result; |
de6ed584 | 1101 | |
15ec875c | 1102 | /* Remove intermediate now unused copy and conversion chains. */ |
75a70cf9 | 1103 | use_rhs = gimple_assign_rhs1 (use_stmt); |
15ec875c | 1104 | if (result |
75a70cf9 | 1105 | && TREE_CODE (gimple_assign_lhs (use_stmt)) == SSA_NAME |
7b705d94 | 1106 | && TREE_CODE (use_rhs) == SSA_NAME |
1107 | && has_zero_uses (gimple_assign_lhs (use_stmt))) | |
15ec875c | 1108 | { |
75a70cf9 | 1109 | gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt); |
15ec875c | 1110 | release_defs (use_stmt); |
75a70cf9 | 1111 | gsi_remove (&gsi, true); |
15ec875c | 1112 | } |
3d5cfe81 | 1113 | } |
1114 | ||
628ce22b | 1115 | return all && has_zero_uses (name); |
3d5cfe81 | 1116 | } |
1117 | ||
75a70cf9 | 1118 | /* Forward propagate the comparison defined in STMT like |
5adc1066 | 1119 | cond_1 = x CMP y to uses of the form |
1120 | a_1 = (T')cond_1 | |
1121 | a_1 = !cond_1 | |
1122 | a_1 = cond_1 != 0 | |
1123 | Returns true if stmt is now unused. */ | |
1124 | ||
1125 | static bool | |
75a70cf9 | 1126 | forward_propagate_comparison (gimple stmt) |
5adc1066 | 1127 | { |
75a70cf9 | 1128 | tree name = gimple_assign_lhs (stmt); |
1129 | gimple use_stmt; | |
1130 | tree tmp = NULL_TREE; | |
5adc1066 | 1131 | |
1132 | /* Don't propagate ssa names that occur in abnormal phis. */ | |
75a70cf9 | 1133 | if ((TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME |
1134 | && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_assign_rhs1 (stmt))) | |
1135 | || (TREE_CODE (gimple_assign_rhs2 (stmt)) == SSA_NAME | |
1136 | && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_assign_rhs2 (stmt)))) | |
5adc1066 | 1137 | return false; |
1138 | ||
1139 | /* Do not un-cse comparisons. But propagate through copies. */ | |
1140 | use_stmt = get_prop_dest_stmt (name, &name); | |
75a70cf9 | 1141 | if (!use_stmt) |
5adc1066 | 1142 | return false; |
1143 | ||
1144 | /* Conversion of the condition result to another integral type. */ | |
75a70cf9 | 1145 | if (is_gimple_assign (use_stmt) |
d9659041 | 1146 | && (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (use_stmt)) |
75a70cf9 | 1147 | || TREE_CODE_CLASS (gimple_assign_rhs_code (use_stmt)) |
1148 | == tcc_comparison | |
1149 | || gimple_assign_rhs_code (use_stmt) == TRUTH_NOT_EXPR) | |
1150 | && INTEGRAL_TYPE_P (TREE_TYPE (gimple_assign_lhs (use_stmt)))) | |
5adc1066 | 1151 | { |
75a70cf9 | 1152 | tree lhs = gimple_assign_lhs (use_stmt); |
5adc1066 | 1153 | |
1154 | /* We can propagate the condition into a conversion. */ | |
d9659041 | 1155 | if (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (use_stmt))) |
5adc1066 | 1156 | { |
1157 | /* Avoid using fold here as that may create a COND_EXPR with | |
1158 | non-boolean condition as canonical form. */ | |
75a70cf9 | 1159 | tmp = build2 (gimple_assign_rhs_code (stmt), TREE_TYPE (lhs), |
1160 | gimple_assign_rhs1 (stmt), gimple_assign_rhs2 (stmt)); | |
5adc1066 | 1161 | } |
1162 | /* We can propagate the condition into X op CST where op | |
f0b5f617 | 1163 | is EQ_EXPR or NE_EXPR and CST is either one or zero. */ |
75a70cf9 | 1164 | else if (TREE_CODE_CLASS (gimple_assign_rhs_code (use_stmt)) |
1165 | == tcc_comparison | |
1166 | && TREE_CODE (gimple_assign_rhs1 (use_stmt)) == SSA_NAME | |
1167 | && TREE_CODE (gimple_assign_rhs2 (use_stmt)) == INTEGER_CST) | |
1168 | { | |
1169 | enum tree_code code = gimple_assign_rhs_code (use_stmt); | |
1170 | tree cst = gimple_assign_rhs2 (use_stmt); | |
1171 | tree cond; | |
1172 | ||
1173 | cond = build2 (gimple_assign_rhs_code (stmt), | |
1174 | TREE_TYPE (cst), | |
1175 | gimple_assign_rhs1 (stmt), | |
1176 | gimple_assign_rhs2 (stmt)); | |
1177 | ||
389dd41b | 1178 | tmp = combine_cond_expr_cond (gimple_location (use_stmt), |
1179 | code, TREE_TYPE (lhs), | |
1180 | cond, cst, false); | |
75a70cf9 | 1181 | if (tmp == NULL_TREE) |
1182 | return false; | |
1183 | } | |
5adc1066 | 1184 | /* We can propagate the condition into a statement that |
1185 | computes the logical negation of the comparison result. */ | |
75a70cf9 | 1186 | else if (gimple_assign_rhs_code (use_stmt) == TRUTH_NOT_EXPR) |
5adc1066 | 1187 | { |
75a70cf9 | 1188 | tree type = TREE_TYPE (gimple_assign_rhs1 (stmt)); |
5adc1066 | 1189 | bool nans = HONOR_NANS (TYPE_MODE (type)); |
1190 | enum tree_code code; | |
75a70cf9 | 1191 | code = invert_tree_comparison (gimple_assign_rhs_code (stmt), nans); |
5adc1066 | 1192 | if (code == ERROR_MARK) |
1193 | return false; | |
1194 | ||
75a70cf9 | 1195 | tmp = build2 (code, TREE_TYPE (lhs), gimple_assign_rhs1 (stmt), |
1196 | gimple_assign_rhs2 (stmt)); | |
5adc1066 | 1197 | } |
1198 | else | |
1199 | return false; | |
1200 | ||
75a70cf9 | 1201 | { |
1202 | gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt); | |
1203 | gimple_assign_set_rhs_from_tree (&gsi, unshare_expr (tmp)); | |
1204 | use_stmt = gsi_stmt (gsi); | |
1205 | update_stmt (use_stmt); | |
1206 | } | |
5adc1066 | 1207 | |
5adc1066 | 1208 | if (dump_file && (dump_flags & TDF_DETAILS)) |
1209 | { | |
75a70cf9 | 1210 | tree old_rhs = rhs_to_tree (TREE_TYPE (gimple_assign_lhs (stmt)), |
1211 | stmt); | |
5adc1066 | 1212 | fprintf (dump_file, " Replaced '"); |
75a70cf9 | 1213 | print_generic_expr (dump_file, old_rhs, dump_flags); |
5adc1066 | 1214 | fprintf (dump_file, "' with '"); |
1215 | print_generic_expr (dump_file, tmp, dump_flags); | |
1216 | fprintf (dump_file, "'\n"); | |
1217 | } | |
1218 | ||
5d2361b0 | 1219 | /* Remove defining statements. */ |
1220 | return remove_prop_source_from_use (name); | |
5adc1066 | 1221 | } |
1222 | ||
1223 | return false; | |
1224 | } | |
1225 | ||
3a938499 | 1226 | /* If we have lhs = ~x (STMT), look and see if earlier we had x = ~y. |
1227 | If so, we can change STMT into lhs = y which can later be copy | |
48e1416a | 1228 | propagated. Similarly for negation. |
3a938499 | 1229 | |
48e1416a | 1230 | This could trivially be formulated as a forward propagation |
3a938499 | 1231 | to immediate uses. However, we already had an implementation |
1232 | from DOM which used backward propagation via the use-def links. | |
1233 | ||
1234 | It turns out that backward propagation is actually faster as | |
1235 | there's less work to do for each NOT/NEG expression we find. | |
1236 | Backwards propagation needs to look at the statement in a single | |
1237 | backlink. Forward propagation needs to look at potentially more | |
1238 | than one forward link. */ | |
1239 | ||
1240 | static void | |
75a70cf9 | 1241 | simplify_not_neg_expr (gimple_stmt_iterator *gsi_p) |
3a938499 | 1242 | { |
75a70cf9 | 1243 | gimple stmt = gsi_stmt (*gsi_p); |
1244 | tree rhs = gimple_assign_rhs1 (stmt); | |
1245 | gimple rhs_def_stmt = SSA_NAME_DEF_STMT (rhs); | |
3a938499 | 1246 | |
1247 | /* See if the RHS_DEF_STMT has the same form as our statement. */ | |
75a70cf9 | 1248 | if (is_gimple_assign (rhs_def_stmt) |
1249 | && gimple_assign_rhs_code (rhs_def_stmt) == gimple_assign_rhs_code (stmt)) | |
3a938499 | 1250 | { |
75a70cf9 | 1251 | tree rhs_def_operand = gimple_assign_rhs1 (rhs_def_stmt); |
3a938499 | 1252 | |
1253 | /* Verify that RHS_DEF_OPERAND is a suitable SSA_NAME. */ | |
1254 | if (TREE_CODE (rhs_def_operand) == SSA_NAME | |
1255 | && ! SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs_def_operand)) | |
1256 | { | |
75a70cf9 | 1257 | gimple_assign_set_rhs_from_tree (gsi_p, rhs_def_operand); |
1258 | stmt = gsi_stmt (*gsi_p); | |
3a938499 | 1259 | update_stmt (stmt); |
1260 | } | |
1261 | } | |
1262 | } | |
3d5cfe81 | 1263 | |
b5860aba | 1264 | /* STMT is a SWITCH_EXPR for which we attempt to find equivalent forms of |
1265 | the condition which we may be able to optimize better. */ | |
1266 | ||
1267 | static void | |
75a70cf9 | 1268 | simplify_gimple_switch (gimple stmt) |
b5860aba | 1269 | { |
75a70cf9 | 1270 | tree cond = gimple_switch_index (stmt); |
b5860aba | 1271 | tree def, to, ti; |
75a70cf9 | 1272 | gimple def_stmt; |
b5860aba | 1273 | |
1274 | /* The optimization that we really care about is removing unnecessary | |
1275 | casts. That will let us do much better in propagating the inferred | |
1276 | constant at the switch target. */ | |
1277 | if (TREE_CODE (cond) == SSA_NAME) | |
1278 | { | |
75a70cf9 | 1279 | def_stmt = SSA_NAME_DEF_STMT (cond); |
1280 | if (is_gimple_assign (def_stmt)) | |
b5860aba | 1281 | { |
75a70cf9 | 1282 | if (gimple_assign_rhs_code (def_stmt) == NOP_EXPR) |
b5860aba | 1283 | { |
1284 | int need_precision; | |
1285 | bool fail; | |
1286 | ||
75a70cf9 | 1287 | def = gimple_assign_rhs1 (def_stmt); |
b5860aba | 1288 | |
b5860aba | 1289 | /* ??? Why was Jeff testing this? We are gimple... */ |
1b4345f7 | 1290 | gcc_checking_assert (is_gimple_val (def)); |
b5860aba | 1291 | |
1292 | to = TREE_TYPE (cond); | |
1293 | ti = TREE_TYPE (def); | |
1294 | ||
1295 | /* If we have an extension that preserves value, then we | |
1296 | can copy the source value into the switch. */ | |
1297 | ||
1298 | need_precision = TYPE_PRECISION (ti); | |
1299 | fail = false; | |
c5237b8b | 1300 | if (! INTEGRAL_TYPE_P (ti)) |
1301 | fail = true; | |
1302 | else if (TYPE_UNSIGNED (to) && !TYPE_UNSIGNED (ti)) | |
b5860aba | 1303 | fail = true; |
1304 | else if (!TYPE_UNSIGNED (to) && TYPE_UNSIGNED (ti)) | |
1305 | need_precision += 1; | |
1306 | if (TYPE_PRECISION (to) < need_precision) | |
1307 | fail = true; | |
1308 | ||
1309 | if (!fail) | |
1310 | { | |
75a70cf9 | 1311 | gimple_switch_set_index (stmt, def); |
b5860aba | 1312 | update_stmt (stmt); |
1313 | } | |
1314 | } | |
1315 | } | |
1316 | } | |
1317 | } | |
1318 | ||
27f931ff | 1319 | /* For pointers p2 and p1 return p2 - p1 if the |
1320 | difference is known and constant, otherwise return NULL. */ | |
1321 | ||
1322 | static tree | |
1323 | constant_pointer_difference (tree p1, tree p2) | |
1324 | { | |
1325 | int i, j; | |
1326 | #define CPD_ITERATIONS 5 | |
1327 | tree exps[2][CPD_ITERATIONS]; | |
1328 | tree offs[2][CPD_ITERATIONS]; | |
1329 | int cnt[2]; | |
1330 | ||
1331 | for (i = 0; i < 2; i++) | |
1332 | { | |
1333 | tree p = i ? p1 : p2; | |
1334 | tree off = size_zero_node; | |
1335 | gimple stmt; | |
1336 | enum tree_code code; | |
1337 | ||
1338 | /* For each of p1 and p2 we need to iterate at least | |
1339 | twice, to handle ADDR_EXPR directly in p1/p2, | |
1340 | SSA_NAME with ADDR_EXPR or POINTER_PLUS_EXPR etc. | |
1341 | on definition's stmt RHS. Iterate a few extra times. */ | |
1342 | j = 0; | |
1343 | do | |
1344 | { | |
1345 | if (!POINTER_TYPE_P (TREE_TYPE (p))) | |
1346 | break; | |
1347 | if (TREE_CODE (p) == ADDR_EXPR) | |
1348 | { | |
1349 | tree q = TREE_OPERAND (p, 0); | |
1350 | HOST_WIDE_INT offset; | |
1351 | tree base = get_addr_base_and_unit_offset (q, &offset); | |
1352 | if (base) | |
1353 | { | |
1354 | q = base; | |
1355 | if (offset) | |
1356 | off = size_binop (PLUS_EXPR, off, size_int (offset)); | |
1357 | } | |
1358 | if (TREE_CODE (q) == MEM_REF | |
1359 | && TREE_CODE (TREE_OPERAND (q, 0)) == SSA_NAME) | |
1360 | { | |
1361 | p = TREE_OPERAND (q, 0); | |
1362 | off = size_binop (PLUS_EXPR, off, | |
1363 | double_int_to_tree (sizetype, | |
1364 | mem_ref_offset (q))); | |
1365 | } | |
1366 | else | |
1367 | { | |
1368 | exps[i][j] = q; | |
1369 | offs[i][j++] = off; | |
1370 | break; | |
1371 | } | |
1372 | } | |
1373 | if (TREE_CODE (p) != SSA_NAME) | |
1374 | break; | |
1375 | exps[i][j] = p; | |
1376 | offs[i][j++] = off; | |
1377 | if (j == CPD_ITERATIONS) | |
1378 | break; | |
1379 | stmt = SSA_NAME_DEF_STMT (p); | |
1380 | if (!is_gimple_assign (stmt) || gimple_assign_lhs (stmt) != p) | |
1381 | break; | |
1382 | code = gimple_assign_rhs_code (stmt); | |
1383 | if (code == POINTER_PLUS_EXPR) | |
1384 | { | |
1385 | if (TREE_CODE (gimple_assign_rhs2 (stmt)) != INTEGER_CST) | |
1386 | break; | |
1387 | off = size_binop (PLUS_EXPR, off, gimple_assign_rhs2 (stmt)); | |
1388 | p = gimple_assign_rhs1 (stmt); | |
1389 | } | |
1390 | else if (code == ADDR_EXPR || code == NOP_EXPR) | |
1391 | p = gimple_assign_rhs1 (stmt); | |
1392 | else | |
1393 | break; | |
1394 | } | |
1395 | while (1); | |
1396 | cnt[i] = j; | |
1397 | } | |
1398 | ||
1399 | for (i = 0; i < cnt[0]; i++) | |
1400 | for (j = 0; j < cnt[1]; j++) | |
1401 | if (exps[0][i] == exps[1][j]) | |
1402 | return size_binop (MINUS_EXPR, offs[0][i], offs[1][j]); | |
1403 | ||
1404 | return NULL_TREE; | |
1405 | } | |
1406 | ||
1407 | /* *GSI_P is a GIMPLE_CALL to a builtin function. | |
1408 | Optimize | |
1409 | memcpy (p, "abcd", 4); | |
1410 | memset (p + 4, ' ', 3); | |
1411 | into | |
1412 | memcpy (p, "abcd ", 7); | |
1413 | call if the latter can be stored by pieces during expansion. */ | |
1414 | ||
1415 | static bool | |
1416 | simplify_builtin_call (gimple_stmt_iterator *gsi_p, tree callee2) | |
1417 | { | |
1418 | gimple stmt1, stmt2 = gsi_stmt (*gsi_p); | |
1419 | tree vuse = gimple_vuse (stmt2); | |
1420 | if (vuse == NULL) | |
1421 | return false; | |
1422 | stmt1 = SSA_NAME_DEF_STMT (vuse); | |
1423 | ||
1424 | switch (DECL_FUNCTION_CODE (callee2)) | |
1425 | { | |
1426 | case BUILT_IN_MEMSET: | |
1427 | if (gimple_call_num_args (stmt2) != 3 | |
1428 | || gimple_call_lhs (stmt2) | |
1429 | || CHAR_BIT != 8 | |
1430 | || BITS_PER_UNIT != 8) | |
1431 | break; | |
1432 | else | |
1433 | { | |
1434 | tree callee1; | |
1435 | tree ptr1, src1, str1, off1, len1, lhs1; | |
1436 | tree ptr2 = gimple_call_arg (stmt2, 0); | |
1437 | tree val2 = gimple_call_arg (stmt2, 1); | |
1438 | tree len2 = gimple_call_arg (stmt2, 2); | |
1439 | tree diff, vdef, new_str_cst; | |
1440 | gimple use_stmt; | |
1441 | unsigned int ptr1_align; | |
1442 | unsigned HOST_WIDE_INT src_len; | |
1443 | char *src_buf; | |
1444 | use_operand_p use_p; | |
1445 | ||
1446 | if (!host_integerp (val2, 0) | |
1447 | || !host_integerp (len2, 1)) | |
1448 | break; | |
1449 | if (is_gimple_call (stmt1)) | |
1450 | { | |
1451 | /* If first stmt is a call, it needs to be memcpy | |
1452 | or mempcpy, with string literal as second argument and | |
1453 | constant length. */ | |
1454 | callee1 = gimple_call_fndecl (stmt1); | |
1455 | if (callee1 == NULL_TREE | |
1456 | || DECL_BUILT_IN_CLASS (callee1) != BUILT_IN_NORMAL | |
1457 | || gimple_call_num_args (stmt1) != 3) | |
1458 | break; | |
1459 | if (DECL_FUNCTION_CODE (callee1) != BUILT_IN_MEMCPY | |
1460 | && DECL_FUNCTION_CODE (callee1) != BUILT_IN_MEMPCPY) | |
1461 | break; | |
1462 | ptr1 = gimple_call_arg (stmt1, 0); | |
1463 | src1 = gimple_call_arg (stmt1, 1); | |
1464 | len1 = gimple_call_arg (stmt1, 2); | |
1465 | lhs1 = gimple_call_lhs (stmt1); | |
1466 | if (!host_integerp (len1, 1)) | |
1467 | break; | |
1468 | str1 = string_constant (src1, &off1); | |
1469 | if (str1 == NULL_TREE) | |
1470 | break; | |
1471 | if (!host_integerp (off1, 1) | |
1472 | || compare_tree_int (off1, TREE_STRING_LENGTH (str1) - 1) > 0 | |
1473 | || compare_tree_int (len1, TREE_STRING_LENGTH (str1) | |
1474 | - tree_low_cst (off1, 1)) > 0 | |
1475 | || TREE_CODE (TREE_TYPE (str1)) != ARRAY_TYPE | |
1476 | || TYPE_MODE (TREE_TYPE (TREE_TYPE (str1))) | |
1477 | != TYPE_MODE (char_type_node)) | |
1478 | break; | |
1479 | } | |
1480 | else if (gimple_assign_single_p (stmt1)) | |
1481 | { | |
1482 | /* Otherwise look for length 1 memcpy optimized into | |
1483 | assignment. */ | |
1484 | ptr1 = gimple_assign_lhs (stmt1); | |
1485 | src1 = gimple_assign_rhs1 (stmt1); | |
1486 | if (TREE_CODE (ptr1) != MEM_REF | |
1487 | || TYPE_MODE (TREE_TYPE (ptr1)) != TYPE_MODE (char_type_node) | |
1488 | || !host_integerp (src1, 0)) | |
1489 | break; | |
1490 | ptr1 = build_fold_addr_expr (ptr1); | |
1491 | callee1 = NULL_TREE; | |
1492 | len1 = size_one_node; | |
1493 | lhs1 = NULL_TREE; | |
1494 | off1 = size_zero_node; | |
1495 | str1 = NULL_TREE; | |
1496 | } | |
1497 | else | |
1498 | break; | |
1499 | ||
1500 | diff = constant_pointer_difference (ptr1, ptr2); | |
1501 | if (diff == NULL && lhs1 != NULL) | |
1502 | { | |
1503 | diff = constant_pointer_difference (lhs1, ptr2); | |
1504 | if (DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY | |
1505 | && diff != NULL) | |
1506 | diff = size_binop (PLUS_EXPR, diff, | |
1507 | fold_convert (sizetype, len1)); | |
1508 | } | |
1509 | /* If the difference between the second and first destination pointer | |
1510 | is not constant, or is bigger than memcpy length, bail out. */ | |
1511 | if (diff == NULL | |
1512 | || !host_integerp (diff, 1) | |
1513 | || tree_int_cst_lt (len1, diff)) | |
1514 | break; | |
1515 | ||
1516 | /* Use maximum of difference plus memset length and memcpy length | |
1517 | as the new memcpy length, if it is too big, bail out. */ | |
1518 | src_len = tree_low_cst (diff, 1); | |
1519 | src_len += tree_low_cst (len2, 1); | |
1520 | if (src_len < (unsigned HOST_WIDE_INT) tree_low_cst (len1, 1)) | |
1521 | src_len = tree_low_cst (len1, 1); | |
1522 | if (src_len > 1024) | |
1523 | break; | |
1524 | ||
1525 | /* If mempcpy value is used elsewhere, bail out, as mempcpy | |
1526 | with bigger length will return different result. */ | |
1527 | if (lhs1 != NULL_TREE | |
1528 | && DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY | |
1529 | && (TREE_CODE (lhs1) != SSA_NAME | |
1530 | || !single_imm_use (lhs1, &use_p, &use_stmt) | |
1531 | || use_stmt != stmt2)) | |
1532 | break; | |
1533 | ||
1534 | /* If anything reads memory in between memcpy and memset | |
1535 | call, the modified memcpy call might change it. */ | |
1536 | vdef = gimple_vdef (stmt1); | |
1537 | if (vdef != NULL | |
1538 | && (!single_imm_use (vdef, &use_p, &use_stmt) | |
1539 | || use_stmt != stmt2)) | |
1540 | break; | |
1541 | ||
1542 | ptr1_align = get_pointer_alignment (ptr1, BIGGEST_ALIGNMENT); | |
1543 | /* Construct the new source string literal. */ | |
1544 | src_buf = XALLOCAVEC (char, src_len + 1); | |
1545 | if (callee1) | |
1546 | memcpy (src_buf, | |
1547 | TREE_STRING_POINTER (str1) + tree_low_cst (off1, 1), | |
1548 | tree_low_cst (len1, 1)); | |
1549 | else | |
1550 | src_buf[0] = tree_low_cst (src1, 0); | |
1551 | memset (src_buf + tree_low_cst (diff, 1), | |
1552 | tree_low_cst (val2, 1), tree_low_cst (len2, 1)); | |
1553 | src_buf[src_len] = '\0'; | |
1554 | /* Neither builtin_strncpy_read_str nor builtin_memcpy_read_str | |
1555 | handle embedded '\0's. */ | |
1556 | if (strlen (src_buf) != src_len) | |
1557 | break; | |
1558 | rtl_profile_for_bb (gimple_bb (stmt2)); | |
1559 | /* If the new memcpy wouldn't be emitted by storing the literal | |
1560 | by pieces, this optimization might enlarge .rodata too much, | |
1561 | as commonly used string literals couldn't be shared any | |
1562 | longer. */ | |
1563 | if (!can_store_by_pieces (src_len, | |
1564 | builtin_strncpy_read_str, | |
1565 | src_buf, ptr1_align, false)) | |
1566 | break; | |
1567 | ||
1568 | new_str_cst = build_string_literal (src_len, src_buf); | |
1569 | if (callee1) | |
1570 | { | |
1571 | /* If STMT1 is a mem{,p}cpy call, adjust it and remove | |
1572 | memset call. */ | |
1573 | if (lhs1 && DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY) | |
1574 | gimple_call_set_lhs (stmt1, NULL_TREE); | |
1575 | gimple_call_set_arg (stmt1, 1, new_str_cst); | |
1576 | gimple_call_set_arg (stmt1, 2, | |
1577 | build_int_cst (TREE_TYPE (len1), src_len)); | |
1578 | update_stmt (stmt1); | |
1579 | unlink_stmt_vdef (stmt2); | |
1580 | gsi_remove (gsi_p, true); | |
1581 | release_defs (stmt2); | |
1582 | if (lhs1 && DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY) | |
1583 | release_ssa_name (lhs1); | |
1584 | return true; | |
1585 | } | |
1586 | else | |
1587 | { | |
1588 | /* Otherwise, if STMT1 is length 1 memcpy optimized into | |
1589 | assignment, remove STMT1 and change memset call into | |
1590 | memcpy call. */ | |
1591 | gimple_stmt_iterator gsi = gsi_for_stmt (stmt1); | |
1592 | ||
7ecb2e7c | 1593 | if (!is_gimple_val (ptr1)) |
1594 | ptr1 = force_gimple_operand_gsi (gsi_p, ptr1, true, NULL_TREE, | |
1595 | true, GSI_SAME_STMT); | |
27f931ff | 1596 | gimple_call_set_fndecl (stmt2, built_in_decls [BUILT_IN_MEMCPY]); |
1597 | gimple_call_set_arg (stmt2, 0, ptr1); | |
1598 | gimple_call_set_arg (stmt2, 1, new_str_cst); | |
1599 | gimple_call_set_arg (stmt2, 2, | |
1600 | build_int_cst (TREE_TYPE (len2), src_len)); | |
1601 | unlink_stmt_vdef (stmt1); | |
1602 | gsi_remove (&gsi, true); | |
1603 | release_defs (stmt1); | |
1604 | update_stmt (stmt2); | |
1605 | return false; | |
1606 | } | |
1607 | } | |
1608 | break; | |
1609 | default: | |
1610 | break; | |
1611 | } | |
1612 | return false; | |
1613 | } | |
1614 | ||
300da094 | 1615 | /* Simplify bitwise binary operations. |
1616 | Return true if a transformation applied, otherwise return false. */ | |
1c4607fd | 1617 | |
300da094 | 1618 | static bool |
1619 | simplify_bitwise_binary (gimple_stmt_iterator *gsi) | |
1c4607fd | 1620 | { |
300da094 | 1621 | gimple stmt = gsi_stmt (*gsi); |
1c4607fd | 1622 | tree arg1 = gimple_assign_rhs1 (stmt); |
1623 | tree arg2 = gimple_assign_rhs2 (stmt); | |
300da094 | 1624 | enum tree_code code = gimple_assign_rhs_code (stmt); |
1625 | tree res; | |
26f54bd0 | 1626 | gimple def1 = NULL, def2 = NULL; |
1627 | tree def1_arg1, def2_arg1; | |
1628 | enum tree_code def1_code, def2_code; | |
1c4607fd | 1629 | |
300da094 | 1630 | /* If the first argument is an SSA name that is itself a result of a |
1631 | typecast of an ADDR_EXPR to an integer, feed the ADDR_EXPR to the | |
1632 | folder rather than the ssa name. */ | |
1633 | if (code == BIT_AND_EXPR | |
1634 | && TREE_CODE (arg2) == INTEGER_CST | |
1635 | && TREE_CODE (arg1) == SSA_NAME) | |
1c4607fd | 1636 | { |
1637 | gimple def = SSA_NAME_DEF_STMT (arg1); | |
300da094 | 1638 | tree op = arg1; |
1c4607fd | 1639 | |
300da094 | 1640 | /* ??? This looks bogus - the conversion could be truncating. */ |
1641 | if (is_gimple_assign (def) | |
1642 | && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)) | |
1643 | && INTEGRAL_TYPE_P (TREE_TYPE (arg1))) | |
1c4607fd | 1644 | { |
300da094 | 1645 | tree opp = gimple_assign_rhs1 (def); |
1646 | if (TREE_CODE (opp) == ADDR_EXPR) | |
1647 | op = opp; | |
1648 | } | |
1c4607fd | 1649 | |
300da094 | 1650 | res = fold_binary_loc (gimple_location (stmt), |
1651 | BIT_AND_EXPR, TREE_TYPE (gimple_assign_lhs (stmt)), | |
1652 | op, arg2); | |
1653 | if (res && is_gimple_min_invariant (res)) | |
1654 | { | |
1655 | gimple_assign_set_rhs_from_tree (gsi, res); | |
1656 | update_stmt (stmt); | |
1657 | return true; | |
1c4607fd | 1658 | } |
1659 | } | |
1660 | ||
26f54bd0 | 1661 | def1_code = TREE_CODE (arg1); |
1662 | def1_arg1 = arg1; | |
1663 | if (TREE_CODE (arg1) == SSA_NAME) | |
1664 | { | |
1665 | def1 = SSA_NAME_DEF_STMT (arg1); | |
1666 | if (is_gimple_assign (def1)) | |
1667 | { | |
1668 | def1_code = gimple_assign_rhs_code (def1); | |
1669 | def1_arg1 = gimple_assign_rhs1 (def1); | |
1670 | } | |
1671 | } | |
1672 | ||
1673 | def2_code = TREE_CODE (arg2); | |
1674 | def2_arg1 = arg2; | |
1675 | if (TREE_CODE (arg2) == SSA_NAME) | |
1676 | { | |
1677 | def2 = SSA_NAME_DEF_STMT (arg2); | |
1678 | if (is_gimple_assign (def2)) | |
1679 | { | |
1680 | def2_code = gimple_assign_rhs_code (def2); | |
1681 | def2_arg1 = gimple_assign_rhs1 (def2); | |
1682 | } | |
1683 | } | |
1684 | ||
300da094 | 1685 | /* For bitwise binary operations apply operand conversions to the |
1686 | binary operation result instead of to the operands. This allows | |
1687 | to combine successive conversions and bitwise binary operations. */ | |
26f54bd0 | 1688 | if (CONVERT_EXPR_CODE_P (def1_code) |
1689 | && CONVERT_EXPR_CODE_P (def2_code) | |
1690 | && types_compatible_p (TREE_TYPE (def1_arg1), TREE_TYPE (def2_arg1)) | |
1691 | /* Make sure that the conversion widens the operands or that it | |
1692 | changes the operation to a bitfield precision. */ | |
1693 | && ((TYPE_PRECISION (TREE_TYPE (def1_arg1)) | |
1694 | < TYPE_PRECISION (TREE_TYPE (arg1))) | |
1695 | || (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (arg1))) | |
1696 | != MODE_INT) | |
1697 | || (TYPE_PRECISION (TREE_TYPE (arg1)) | |
1698 | != GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg1)))))) | |
1c4607fd | 1699 | { |
26f54bd0 | 1700 | gimple newop; |
1701 | tree tem = create_tmp_reg (TREE_TYPE (def1_arg1), | |
1702 | NULL); | |
1703 | newop = gimple_build_assign_with_ops (code, tem, def1_arg1, def2_arg1); | |
1704 | tem = make_ssa_name (tem, newop); | |
1705 | gimple_assign_set_lhs (newop, tem); | |
1706 | gsi_insert_before (gsi, newop, GSI_SAME_STMT); | |
1707 | gimple_assign_set_rhs_with_ops_1 (gsi, NOP_EXPR, | |
1708 | tem, NULL_TREE, NULL_TREE); | |
1709 | update_stmt (gsi_stmt (*gsi)); | |
1710 | return true; | |
1711 | } | |
1712 | ||
1713 | /* (a | CST1) & CST2 -> (a & CST2) | (CST1 & CST2). */ | |
1714 | if (code == BIT_AND_EXPR | |
1715 | && def1_code == BIT_IOR_EXPR | |
1716 | && TREE_CODE (arg2) == INTEGER_CST | |
1717 | && TREE_CODE (gimple_assign_rhs2 (def1)) == INTEGER_CST) | |
1718 | { | |
1719 | tree cst = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg2), | |
1720 | arg2, gimple_assign_rhs2 (def1)); | |
1721 | tree tem; | |
1722 | gimple newop; | |
1723 | if (integer_zerop (cst)) | |
300da094 | 1724 | { |
26f54bd0 | 1725 | gimple_assign_set_rhs1 (stmt, def1_arg1); |
1726 | update_stmt (stmt); | |
1727 | return true; | |
300da094 | 1728 | } |
26f54bd0 | 1729 | tem = create_tmp_reg (TREE_TYPE (arg2), NULL); |
1730 | newop = gimple_build_assign_with_ops (BIT_AND_EXPR, | |
1731 | tem, def1_arg1, arg2); | |
1732 | tem = make_ssa_name (tem, newop); | |
1733 | gimple_assign_set_lhs (newop, tem); | |
1734 | /* Make sure to re-process the new stmt as it's walking upwards. */ | |
1735 | gsi_insert_before (gsi, newop, GSI_NEW_STMT); | |
1736 | gimple_assign_set_rhs1 (stmt, tem); | |
1737 | gimple_assign_set_rhs2 (stmt, cst); | |
1738 | gimple_assign_set_rhs_code (stmt, BIT_IOR_EXPR); | |
1739 | update_stmt (stmt); | |
1740 | return true; | |
1741 | } | |
1742 | ||
1743 | /* Combine successive equal operations with constants. */ | |
1744 | if ((code == BIT_AND_EXPR | |
1745 | || code == BIT_IOR_EXPR | |
1746 | || code == BIT_XOR_EXPR) | |
1747 | && def1_code == code | |
1748 | && TREE_CODE (arg2) == INTEGER_CST | |
1749 | && TREE_CODE (gimple_assign_rhs2 (def1)) == INTEGER_CST) | |
1750 | { | |
1751 | tree cst = fold_build2 (code, TREE_TYPE (arg2), | |
1752 | arg2, gimple_assign_rhs2 (def1)); | |
1753 | gimple_assign_set_rhs1 (stmt, def1_arg1); | |
1754 | gimple_assign_set_rhs2 (stmt, cst); | |
1755 | update_stmt (stmt); | |
1756 | return true; | |
1c4607fd | 1757 | } |
300da094 | 1758 | |
1759 | return false; | |
1c4607fd | 1760 | } |
1761 | ||
ca3c9092 | 1762 | |
1763 | /* Perform re-associations of the plus or minus statement STMT that are | |
b69d1cb6 | 1764 | always permitted. Returns true if the CFG was changed. */ |
ca3c9092 | 1765 | |
b69d1cb6 | 1766 | static bool |
ca3c9092 | 1767 | associate_plusminus (gimple stmt) |
1768 | { | |
1769 | tree rhs1 = gimple_assign_rhs1 (stmt); | |
1770 | tree rhs2 = gimple_assign_rhs2 (stmt); | |
1771 | enum tree_code code = gimple_assign_rhs_code (stmt); | |
1772 | gimple_stmt_iterator gsi; | |
1773 | bool changed; | |
1774 | ||
1775 | /* We can't reassociate at all for saturating types. */ | |
1776 | if (TYPE_SATURATING (TREE_TYPE (rhs1))) | |
b69d1cb6 | 1777 | return false; |
ca3c9092 | 1778 | |
1779 | /* First contract negates. */ | |
1780 | do | |
1781 | { | |
1782 | changed = false; | |
1783 | ||
1784 | /* A +- (-B) -> A -+ B. */ | |
1785 | if (TREE_CODE (rhs2) == SSA_NAME) | |
1786 | { | |
1787 | gimple def_stmt = SSA_NAME_DEF_STMT (rhs2); | |
1788 | if (is_gimple_assign (def_stmt) | |
1789 | && gimple_assign_rhs_code (def_stmt) == NEGATE_EXPR) | |
1790 | { | |
1791 | code = (code == MINUS_EXPR) ? PLUS_EXPR : MINUS_EXPR; | |
1792 | gimple_assign_set_rhs_code (stmt, code); | |
1793 | rhs2 = gimple_assign_rhs1 (def_stmt); | |
1794 | gimple_assign_set_rhs2 (stmt, rhs2); | |
1795 | gimple_set_modified (stmt, true); | |
1796 | changed = true; | |
1797 | } | |
1798 | } | |
1799 | ||
1800 | /* (-A) + B -> B - A. */ | |
1801 | if (TREE_CODE (rhs1) == SSA_NAME | |
1802 | && code == PLUS_EXPR) | |
1803 | { | |
1804 | gimple def_stmt = SSA_NAME_DEF_STMT (rhs1); | |
1805 | if (is_gimple_assign (def_stmt) | |
1806 | && gimple_assign_rhs_code (def_stmt) == NEGATE_EXPR) | |
1807 | { | |
1808 | code = MINUS_EXPR; | |
1809 | gimple_assign_set_rhs_code (stmt, code); | |
1810 | rhs1 = rhs2; | |
1811 | gimple_assign_set_rhs1 (stmt, rhs1); | |
1812 | rhs2 = gimple_assign_rhs1 (def_stmt); | |
1813 | gimple_assign_set_rhs2 (stmt, rhs2); | |
1814 | gimple_set_modified (stmt, true); | |
1815 | changed = true; | |
1816 | } | |
1817 | } | |
1818 | } | |
1819 | while (changed); | |
1820 | ||
1821 | /* We can't reassociate floating-point or fixed-point plus or minus | |
1822 | because of saturation to +-Inf. */ | |
1823 | if (FLOAT_TYPE_P (TREE_TYPE (rhs1)) | |
1824 | || FIXED_POINT_TYPE_P (TREE_TYPE (rhs1))) | |
1825 | goto out; | |
1826 | ||
1827 | /* Second match patterns that allow contracting a plus-minus pair | |
1828 | irrespective of overflow issues. | |
1829 | ||
1830 | (A +- B) - A -> +- B | |
1831 | (A +- B) -+ B -> A | |
1832 | (CST +- A) +- CST -> CST +- A | |
1833 | (A + CST) +- CST -> A + CST | |
1834 | ~A + A -> -1 | |
1835 | ~A + 1 -> -A | |
1836 | A - (A +- B) -> -+ B | |
1837 | A +- (B +- A) -> +- B | |
1838 | CST +- (CST +- A) -> CST +- A | |
1839 | CST +- (A +- CST) -> CST +- A | |
1840 | A + ~A -> -1 | |
1841 | ||
1842 | via commutating the addition and contracting operations to zero | |
1843 | by reassociation. */ | |
1844 | ||
1845 | gsi = gsi_for_stmt (stmt); | |
1846 | if (TREE_CODE (rhs1) == SSA_NAME) | |
1847 | { | |
1848 | gimple def_stmt = SSA_NAME_DEF_STMT (rhs1); | |
1849 | if (is_gimple_assign (def_stmt)) | |
1850 | { | |
1851 | enum tree_code def_code = gimple_assign_rhs_code (def_stmt); | |
1852 | if (def_code == PLUS_EXPR | |
1853 | || def_code == MINUS_EXPR) | |
1854 | { | |
1855 | tree def_rhs1 = gimple_assign_rhs1 (def_stmt); | |
1856 | tree def_rhs2 = gimple_assign_rhs2 (def_stmt); | |
1857 | if (operand_equal_p (def_rhs1, rhs2, 0) | |
1858 | && code == MINUS_EXPR) | |
1859 | { | |
1860 | /* (A +- B) - A -> +- B. */ | |
1861 | code = ((def_code == PLUS_EXPR) | |
1862 | ? TREE_CODE (def_rhs2) : NEGATE_EXPR); | |
1863 | rhs1 = def_rhs2; | |
1864 | rhs2 = NULL_TREE; | |
1865 | gimple_assign_set_rhs_with_ops (&gsi, code, rhs1, NULL_TREE); | |
1866 | gcc_assert (gsi_stmt (gsi) == stmt); | |
1867 | gimple_set_modified (stmt, true); | |
1868 | } | |
1869 | else if (operand_equal_p (def_rhs2, rhs2, 0) | |
1870 | && code != def_code) | |
1871 | { | |
1872 | /* (A +- B) -+ B -> A. */ | |
1873 | code = TREE_CODE (def_rhs1); | |
1874 | rhs1 = def_rhs1; | |
1875 | rhs2 = NULL_TREE; | |
1876 | gimple_assign_set_rhs_with_ops (&gsi, code, rhs1, NULL_TREE); | |
1877 | gcc_assert (gsi_stmt (gsi) == stmt); | |
1878 | gimple_set_modified (stmt, true); | |
1879 | } | |
1880 | else if (TREE_CODE (rhs2) == INTEGER_CST | |
1881 | && TREE_CODE (def_rhs1) == INTEGER_CST) | |
1882 | { | |
1883 | /* (CST +- A) +- CST -> CST +- A. */ | |
1884 | tree cst = fold_binary (code, TREE_TYPE (rhs1), | |
1885 | def_rhs1, rhs2); | |
1886 | if (cst && !TREE_OVERFLOW (cst)) | |
1887 | { | |
1888 | code = def_code; | |
1889 | gimple_assign_set_rhs_code (stmt, code); | |
1890 | rhs1 = cst; | |
1891 | gimple_assign_set_rhs1 (stmt, rhs1); | |
1892 | rhs2 = def_rhs2; | |
1893 | gimple_assign_set_rhs2 (stmt, rhs2); | |
1894 | gimple_set_modified (stmt, true); | |
1895 | } | |
1896 | } | |
1897 | else if (TREE_CODE (rhs2) == INTEGER_CST | |
1898 | && TREE_CODE (def_rhs2) == INTEGER_CST | |
1899 | && def_code == PLUS_EXPR) | |
1900 | { | |
1901 | /* (A + CST) +- CST -> A + CST. */ | |
1902 | tree cst = fold_binary (code, TREE_TYPE (rhs1), | |
1903 | def_rhs2, rhs2); | |
1904 | if (cst && !TREE_OVERFLOW (cst)) | |
1905 | { | |
1906 | code = PLUS_EXPR; | |
1907 | gimple_assign_set_rhs_code (stmt, code); | |
1908 | rhs1 = def_rhs1; | |
1909 | gimple_assign_set_rhs1 (stmt, rhs1); | |
1910 | rhs2 = cst; | |
1911 | gimple_assign_set_rhs2 (stmt, rhs2); | |
1912 | gimple_set_modified (stmt, true); | |
1913 | } | |
1914 | } | |
1915 | } | |
1916 | else if (def_code == BIT_NOT_EXPR | |
1917 | && INTEGRAL_TYPE_P (TREE_TYPE (rhs1))) | |
1918 | { | |
1919 | tree def_rhs1 = gimple_assign_rhs1 (def_stmt); | |
1920 | if (code == PLUS_EXPR | |
1921 | && operand_equal_p (def_rhs1, rhs2, 0)) | |
1922 | { | |
1923 | /* ~A + A -> -1. */ | |
1924 | code = INTEGER_CST; | |
19d861b9 | 1925 | rhs1 = build_int_cst_type (TREE_TYPE (rhs2), -1); |
ca3c9092 | 1926 | rhs2 = NULL_TREE; |
1927 | gimple_assign_set_rhs_with_ops (&gsi, code, rhs1, NULL_TREE); | |
1928 | gcc_assert (gsi_stmt (gsi) == stmt); | |
1929 | gimple_set_modified (stmt, true); | |
1930 | } | |
1931 | else if (code == PLUS_EXPR | |
1932 | && integer_onep (rhs1)) | |
1933 | { | |
1934 | /* ~A + 1 -> -A. */ | |
1935 | code = NEGATE_EXPR; | |
1936 | rhs1 = def_rhs1; | |
1937 | rhs2 = NULL_TREE; | |
1938 | gimple_assign_set_rhs_with_ops (&gsi, code, rhs1, NULL_TREE); | |
1939 | gcc_assert (gsi_stmt (gsi) == stmt); | |
1940 | gimple_set_modified (stmt, true); | |
1941 | } | |
1942 | } | |
1943 | } | |
1944 | } | |
1945 | ||
1946 | if (rhs2 && TREE_CODE (rhs2) == SSA_NAME) | |
1947 | { | |
1948 | gimple def_stmt = SSA_NAME_DEF_STMT (rhs2); | |
1949 | if (is_gimple_assign (def_stmt)) | |
1950 | { | |
1951 | enum tree_code def_code = gimple_assign_rhs_code (def_stmt); | |
1952 | if (def_code == PLUS_EXPR | |
1953 | || def_code == MINUS_EXPR) | |
1954 | { | |
1955 | tree def_rhs1 = gimple_assign_rhs1 (def_stmt); | |
1956 | tree def_rhs2 = gimple_assign_rhs2 (def_stmt); | |
1957 | if (operand_equal_p (def_rhs1, rhs1, 0) | |
1958 | && code == MINUS_EXPR) | |
1959 | { | |
1960 | /* A - (A +- B) -> -+ B. */ | |
1961 | code = ((def_code == PLUS_EXPR) | |
1962 | ? NEGATE_EXPR : TREE_CODE (def_rhs2)); | |
1963 | rhs1 = def_rhs2; | |
1964 | rhs2 = NULL_TREE; | |
1965 | gimple_assign_set_rhs_with_ops (&gsi, code, rhs1, NULL_TREE); | |
1966 | gcc_assert (gsi_stmt (gsi) == stmt); | |
1967 | gimple_set_modified (stmt, true); | |
1968 | } | |
1969 | else if (operand_equal_p (def_rhs2, rhs1, 0) | |
1970 | && code != def_code) | |
1971 | { | |
1972 | /* A +- (B +- A) -> +- B. */ | |
1973 | code = ((code == PLUS_EXPR) | |
1974 | ? TREE_CODE (def_rhs1) : NEGATE_EXPR); | |
1975 | rhs1 = def_rhs1; | |
1976 | rhs2 = NULL_TREE; | |
1977 | gimple_assign_set_rhs_with_ops (&gsi, code, rhs1, NULL_TREE); | |
1978 | gcc_assert (gsi_stmt (gsi) == stmt); | |
1979 | gimple_set_modified (stmt, true); | |
1980 | } | |
1981 | else if (TREE_CODE (rhs1) == INTEGER_CST | |
1982 | && TREE_CODE (def_rhs1) == INTEGER_CST) | |
1983 | { | |
1984 | /* CST +- (CST +- A) -> CST +- A. */ | |
1985 | tree cst = fold_binary (code, TREE_TYPE (rhs2), | |
1986 | rhs1, def_rhs1); | |
1987 | if (cst && !TREE_OVERFLOW (cst)) | |
1988 | { | |
1989 | code = (code == def_code ? PLUS_EXPR : MINUS_EXPR); | |
1990 | gimple_assign_set_rhs_code (stmt, code); | |
1991 | rhs1 = cst; | |
1992 | gimple_assign_set_rhs1 (stmt, rhs1); | |
1993 | rhs2 = def_rhs2; | |
1994 | gimple_assign_set_rhs2 (stmt, rhs2); | |
1995 | gimple_set_modified (stmt, true); | |
1996 | } | |
1997 | } | |
1998 | else if (TREE_CODE (rhs1) == INTEGER_CST | |
1999 | && TREE_CODE (def_rhs2) == INTEGER_CST) | |
2000 | { | |
2001 | /* CST +- (A +- CST) -> CST +- A. */ | |
2002 | tree cst = fold_binary (def_code == code | |
2003 | ? PLUS_EXPR : MINUS_EXPR, | |
2004 | TREE_TYPE (rhs2), | |
2005 | rhs1, def_rhs2); | |
2006 | if (cst && !TREE_OVERFLOW (cst)) | |
2007 | { | |
2008 | rhs1 = cst; | |
2009 | gimple_assign_set_rhs1 (stmt, rhs1); | |
2010 | rhs2 = def_rhs1; | |
2011 | gimple_assign_set_rhs2 (stmt, rhs2); | |
2012 | gimple_set_modified (stmt, true); | |
2013 | } | |
2014 | } | |
2015 | } | |
2016 | else if (def_code == BIT_NOT_EXPR | |
2017 | && INTEGRAL_TYPE_P (TREE_TYPE (rhs2))) | |
2018 | { | |
2019 | tree def_rhs1 = gimple_assign_rhs1 (def_stmt); | |
2020 | if (code == PLUS_EXPR | |
2021 | && operand_equal_p (def_rhs1, rhs1, 0)) | |
2022 | { | |
2023 | /* A + ~A -> -1. */ | |
2024 | code = INTEGER_CST; | |
19d861b9 | 2025 | rhs1 = build_int_cst_type (TREE_TYPE (rhs1), -1); |
ca3c9092 | 2026 | rhs2 = NULL_TREE; |
2027 | gimple_assign_set_rhs_with_ops (&gsi, code, rhs1, NULL_TREE); | |
2028 | gcc_assert (gsi_stmt (gsi) == stmt); | |
2029 | gimple_set_modified (stmt, true); | |
2030 | } | |
2031 | } | |
2032 | } | |
2033 | } | |
2034 | ||
2035 | out: | |
2036 | if (gimple_modified_p (stmt)) | |
2037 | { | |
2038 | fold_stmt_inplace (stmt); | |
2039 | update_stmt (stmt); | |
b69d1cb6 | 2040 | if (maybe_clean_or_replace_eh_stmt (stmt, stmt) |
2041 | && gimple_purge_dead_eh_edges (gimple_bb (stmt))) | |
2042 | return true; | |
ca3c9092 | 2043 | } |
b69d1cb6 | 2044 | |
2045 | return false; | |
ca3c9092 | 2046 | } |
2047 | ||
6afd0544 | 2048 | /* Combine two conversions in a row for the second conversion at *GSI. |
2049 | Returns true if there were any changes made. */ | |
2050 | ||
2051 | static bool | |
2052 | combine_conversions (gimple_stmt_iterator *gsi) | |
2053 | { | |
2054 | gimple stmt = gsi_stmt (*gsi); | |
2055 | gimple def_stmt; | |
2056 | tree op0, lhs; | |
2057 | enum tree_code code = gimple_assign_rhs_code (stmt); | |
2058 | ||
2059 | gcc_checking_assert (CONVERT_EXPR_CODE_P (code) | |
2060 | || code == FLOAT_EXPR | |
2061 | || code == FIX_TRUNC_EXPR); | |
2062 | ||
2063 | lhs = gimple_assign_lhs (stmt); | |
2064 | op0 = gimple_assign_rhs1 (stmt); | |
2065 | if (useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (op0))) | |
2066 | { | |
2067 | gimple_assign_set_rhs_code (stmt, TREE_CODE (op0)); | |
2068 | return true; | |
2069 | } | |
2070 | ||
2071 | if (TREE_CODE (op0) != SSA_NAME) | |
2072 | return false; | |
2073 | ||
2074 | def_stmt = SSA_NAME_DEF_STMT (op0); | |
2075 | if (!is_gimple_assign (def_stmt)) | |
2076 | return false; | |
2077 | ||
2078 | if (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt))) | |
2079 | { | |
2080 | tree defop0 = gimple_assign_rhs1 (def_stmt); | |
2081 | tree type = TREE_TYPE (lhs); | |
2082 | tree inside_type = TREE_TYPE (defop0); | |
2083 | tree inter_type = TREE_TYPE (op0); | |
2084 | int inside_int = INTEGRAL_TYPE_P (inside_type); | |
2085 | int inside_ptr = POINTER_TYPE_P (inside_type); | |
2086 | int inside_float = FLOAT_TYPE_P (inside_type); | |
2087 | int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE; | |
2088 | unsigned int inside_prec = TYPE_PRECISION (inside_type); | |
2089 | int inside_unsignedp = TYPE_UNSIGNED (inside_type); | |
2090 | int inter_int = INTEGRAL_TYPE_P (inter_type); | |
2091 | int inter_ptr = POINTER_TYPE_P (inter_type); | |
2092 | int inter_float = FLOAT_TYPE_P (inter_type); | |
2093 | int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE; | |
2094 | unsigned int inter_prec = TYPE_PRECISION (inter_type); | |
2095 | int inter_unsignedp = TYPE_UNSIGNED (inter_type); | |
2096 | int final_int = INTEGRAL_TYPE_P (type); | |
2097 | int final_ptr = POINTER_TYPE_P (type); | |
2098 | int final_float = FLOAT_TYPE_P (type); | |
2099 | int final_vec = TREE_CODE (type) == VECTOR_TYPE; | |
2100 | unsigned int final_prec = TYPE_PRECISION (type); | |
2101 | int final_unsignedp = TYPE_UNSIGNED (type); | |
2102 | ||
2103 | /* In addition to the cases of two conversions in a row | |
2104 | handled below, if we are converting something to its own | |
2105 | type via an object of identical or wider precision, neither | |
2106 | conversion is needed. */ | |
2107 | if (useless_type_conversion_p (type, inside_type) | |
2108 | && (((inter_int || inter_ptr) && final_int) | |
2109 | || (inter_float && final_float)) | |
2110 | && inter_prec >= final_prec) | |
2111 | { | |
2112 | gimple_assign_set_rhs1 (stmt, unshare_expr (defop0)); | |
2113 | gimple_assign_set_rhs_code (stmt, TREE_CODE (defop0)); | |
2114 | update_stmt (stmt); | |
2115 | return true; | |
2116 | } | |
2117 | ||
2118 | /* Likewise, if the intermediate and initial types are either both | |
2119 | float or both integer, we don't need the middle conversion if the | |
2120 | former is wider than the latter and doesn't change the signedness | |
2121 | (for integers). Avoid this if the final type is a pointer since | |
2122 | then we sometimes need the middle conversion. Likewise if the | |
2123 | final type has a precision not equal to the size of its mode. */ | |
2124 | if (((inter_int && inside_int) | |
2125 | || (inter_float && inside_float) | |
2126 | || (inter_vec && inside_vec)) | |
2127 | && inter_prec >= inside_prec | |
2128 | && (inter_float || inter_vec | |
2129 | || inter_unsignedp == inside_unsignedp) | |
2130 | && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type)) | |
2131 | && TYPE_MODE (type) == TYPE_MODE (inter_type)) | |
2132 | && ! final_ptr | |
2133 | && (! final_vec || inter_prec == inside_prec)) | |
2134 | { | |
2135 | gimple_assign_set_rhs1 (stmt, defop0); | |
2136 | update_stmt (stmt); | |
2137 | return true; | |
2138 | } | |
2139 | ||
2140 | /* If we have a sign-extension of a zero-extended value, we can | |
2141 | replace that by a single zero-extension. */ | |
2142 | if (inside_int && inter_int && final_int | |
2143 | && inside_prec < inter_prec && inter_prec < final_prec | |
2144 | && inside_unsignedp && !inter_unsignedp) | |
2145 | { | |
2146 | gimple_assign_set_rhs1 (stmt, defop0); | |
2147 | update_stmt (stmt); | |
2148 | return true; | |
2149 | } | |
2150 | ||
2151 | /* Two conversions in a row are not needed unless: | |
2152 | - some conversion is floating-point (overstrict for now), or | |
2153 | - some conversion is a vector (overstrict for now), or | |
2154 | - the intermediate type is narrower than both initial and | |
2155 | final, or | |
2156 | - the intermediate type and innermost type differ in signedness, | |
2157 | and the outermost type is wider than the intermediate, or | |
2158 | - the initial type is a pointer type and the precisions of the | |
2159 | intermediate and final types differ, or | |
2160 | - the final type is a pointer type and the precisions of the | |
2161 | initial and intermediate types differ. */ | |
2162 | if (! inside_float && ! inter_float && ! final_float | |
2163 | && ! inside_vec && ! inter_vec && ! final_vec | |
2164 | && (inter_prec >= inside_prec || inter_prec >= final_prec) | |
2165 | && ! (inside_int && inter_int | |
2166 | && inter_unsignedp != inside_unsignedp | |
2167 | && inter_prec < final_prec) | |
2168 | && ((inter_unsignedp && inter_prec > inside_prec) | |
2169 | == (final_unsignedp && final_prec > inter_prec)) | |
2170 | && ! (inside_ptr && inter_prec != final_prec) | |
2171 | && ! (final_ptr && inside_prec != inter_prec) | |
2172 | && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type)) | |
2173 | && TYPE_MODE (type) == TYPE_MODE (inter_type))) | |
2174 | { | |
2175 | gimple_assign_set_rhs1 (stmt, defop0); | |
2176 | update_stmt (stmt); | |
2177 | return true; | |
2178 | } | |
2179 | ||
2180 | /* A truncation to an unsigned type should be canonicalized as | |
2181 | bitwise and of a mask. */ | |
2182 | if (final_int && inter_int && inside_int | |
2183 | && final_prec == inside_prec | |
2184 | && final_prec > inter_prec | |
2185 | && inter_unsignedp) | |
2186 | { | |
2187 | tree tem; | |
2188 | tem = fold_build2 (BIT_AND_EXPR, inside_type, | |
2189 | defop0, | |
2190 | double_int_to_tree | |
2191 | (inside_type, double_int_mask (inter_prec))); | |
2192 | if (!useless_type_conversion_p (type, inside_type)) | |
2193 | { | |
2194 | tem = force_gimple_operand_gsi (gsi, tem, true, NULL_TREE, true, | |
2195 | GSI_SAME_STMT); | |
2196 | gimple_assign_set_rhs1 (stmt, tem); | |
2197 | } | |
2198 | else | |
2199 | gimple_assign_set_rhs_from_tree (gsi, tem); | |
2200 | update_stmt (gsi_stmt (*gsi)); | |
2201 | return true; | |
2202 | } | |
2203 | } | |
2204 | ||
2205 | return false; | |
2206 | } | |
2207 | ||
4ee9c684 | 2208 | /* Main entry point for the forward propagation optimizer. */ |
2209 | ||
2a1990e9 | 2210 | static unsigned int |
4ee9c684 | 2211 | tree_ssa_forward_propagate_single_use_vars (void) |
2212 | { | |
f5c8cff5 | 2213 | basic_block bb; |
c96420f8 | 2214 | unsigned int todoflags = 0; |
4ee9c684 | 2215 | |
148aa112 | 2216 | cfg_changed = false; |
2217 | ||
f5c8cff5 | 2218 | FOR_EACH_BB (bb) |
2219 | { | |
75a70cf9 | 2220 | gimple_stmt_iterator gsi; |
291d763b | 2221 | |
75a70cf9 | 2222 | /* Note we update GSI within the loop as necessary. */ |
2223 | for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); ) | |
291d763b | 2224 | { |
75a70cf9 | 2225 | gimple stmt = gsi_stmt (gsi); |
291d763b | 2226 | |
2227 | /* If this statement sets an SSA_NAME to an address, | |
2228 | try to propagate the address into the uses of the SSA_NAME. */ | |
75a70cf9 | 2229 | if (is_gimple_assign (stmt)) |
291d763b | 2230 | { |
75a70cf9 | 2231 | tree lhs = gimple_assign_lhs (stmt); |
2232 | tree rhs = gimple_assign_rhs1 (stmt); | |
300da094 | 2233 | enum tree_code code = gimple_assign_rhs_code (stmt); |
3a938499 | 2234 | |
26f54bd0 | 2235 | if (TREE_CODE (lhs) != SSA_NAME |
2236 | || has_zero_uses (lhs)) | |
3a938499 | 2237 | { |
75a70cf9 | 2238 | gsi_next (&gsi); |
3a938499 | 2239 | continue; |
2240 | } | |
2241 | ||
300da094 | 2242 | if (code == ADDR_EXPR |
971c637a | 2243 | /* Handle pointer conversions on invariant addresses |
2244 | as well, as this is valid gimple. */ | |
300da094 | 2245 | || (CONVERT_EXPR_CODE_P (code) |
75a70cf9 | 2246 | && TREE_CODE (rhs) == ADDR_EXPR |
2247 | && POINTER_TYPE_P (TREE_TYPE (lhs)))) | |
3a938499 | 2248 | { |
f5d03f27 | 2249 | tree base = get_base_address (TREE_OPERAND (rhs, 0)); |
2250 | if ((!base | |
2251 | || !DECL_P (base) | |
2252 | || decl_address_invariant_p (base)) | |
2253 | && !stmt_references_abnormal_ssa_name (stmt) | |
b75537fb | 2254 | && forward_propagate_addr_expr (lhs, rhs)) |
24838d3f | 2255 | { |
2256 | release_defs (stmt); | |
ea0ab927 | 2257 | todoflags |= TODO_remove_unused_locals; |
75a70cf9 | 2258 | gsi_remove (&gsi, true); |
24838d3f | 2259 | } |
3a938499 | 2260 | else |
75a70cf9 | 2261 | gsi_next (&gsi); |
3a938499 | 2262 | } |
300da094 | 2263 | else if (code == POINTER_PLUS_EXPR |
890e4895 | 2264 | && can_propagate_from (stmt)) |
87c5de3b | 2265 | { |
182cf5a9 | 2266 | if (TREE_CODE (gimple_assign_rhs2 (stmt)) == INTEGER_CST |
2267 | /* ??? Better adjust the interface to that function | |
2268 | instead of building new trees here. */ | |
2269 | && forward_propagate_addr_expr | |
2270 | (lhs, | |
2271 | build1 (ADDR_EXPR, | |
2272 | TREE_TYPE (rhs), | |
2273 | fold_build2 (MEM_REF, | |
2274 | TREE_TYPE (TREE_TYPE (rhs)), | |
2275 | rhs, | |
2276 | fold_convert | |
2277 | (ptr_type_node, | |
2278 | gimple_assign_rhs2 (stmt)))))) | |
2279 | { | |
2280 | release_defs (stmt); | |
2281 | todoflags |= TODO_remove_unused_locals; | |
2282 | gsi_remove (&gsi, true); | |
2283 | } | |
2284 | else if (is_gimple_min_invariant (rhs)) | |
2285 | { | |
2286 | /* Make sure to fold &a[0] + off_1 here. */ | |
2287 | fold_stmt_inplace (stmt); | |
2288 | update_stmt (stmt); | |
2289 | if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR) | |
2290 | gsi_next (&gsi); | |
2291 | } | |
2292 | else | |
87c5de3b | 2293 | gsi_next (&gsi); |
2294 | } | |
300da094 | 2295 | else if ((code == BIT_NOT_EXPR |
2296 | || code == NEGATE_EXPR) | |
75a70cf9 | 2297 | && TREE_CODE (rhs) == SSA_NAME) |
3a938499 | 2298 | { |
75a70cf9 | 2299 | simplify_not_neg_expr (&gsi); |
2300 | gsi_next (&gsi); | |
3a938499 | 2301 | } |
300da094 | 2302 | else if (code == COND_EXPR) |
ec0fa513 | 2303 | { |
75a70cf9 | 2304 | /* In this case the entire COND_EXPR is in rhs1. */ |
4c580c8c | 2305 | int did_something; |
d080be9e | 2306 | fold_defer_overflow_warnings (); |
75a70cf9 | 2307 | did_something = forward_propagate_into_cond (&gsi); |
2308 | stmt = gsi_stmt (gsi); | |
4c580c8c | 2309 | if (did_something == 2) |
2310 | cfg_changed = true; | |
d080be9e | 2311 | fold_undefer_overflow_warnings (!TREE_NO_WARNING (rhs) |
2312 | && did_something, stmt, WARN_STRICT_OVERFLOW_CONDITIONAL); | |
75a70cf9 | 2313 | gsi_next (&gsi); |
ec0fa513 | 2314 | } |
300da094 | 2315 | else if (TREE_CODE_CLASS (code) == tcc_comparison) |
5adc1066 | 2316 | { |
75a70cf9 | 2317 | if (forward_propagate_comparison (stmt)) |
5d2361b0 | 2318 | cfg_changed = true; |
2319 | gsi_next (&gsi); | |
5adc1066 | 2320 | } |
300da094 | 2321 | else if (code == BIT_AND_EXPR |
2322 | || code == BIT_IOR_EXPR | |
2323 | || code == BIT_XOR_EXPR) | |
1c4607fd | 2324 | { |
300da094 | 2325 | if (!simplify_bitwise_binary (&gsi)) |
2326 | gsi_next (&gsi); | |
1c4607fd | 2327 | } |
300da094 | 2328 | else if (code == PLUS_EXPR |
2329 | || code == MINUS_EXPR) | |
ca3c9092 | 2330 | { |
b69d1cb6 | 2331 | cfg_changed |= associate_plusminus (stmt); |
ca3c9092 | 2332 | gsi_next (&gsi); |
2333 | } | |
300da094 | 2334 | else if (CONVERT_EXPR_CODE_P (code) |
2335 | || code == FLOAT_EXPR | |
2336 | || code == FIX_TRUNC_EXPR) | |
6afd0544 | 2337 | { |
2338 | if (!combine_conversions (&gsi)) | |
2339 | gsi_next (&gsi); | |
2340 | } | |
291d763b | 2341 | else |
75a70cf9 | 2342 | gsi_next (&gsi); |
291d763b | 2343 | } |
75a70cf9 | 2344 | else if (gimple_code (stmt) == GIMPLE_SWITCH) |
b5860aba | 2345 | { |
75a70cf9 | 2346 | simplify_gimple_switch (stmt); |
2347 | gsi_next (&gsi); | |
b5860aba | 2348 | } |
75a70cf9 | 2349 | else if (gimple_code (stmt) == GIMPLE_COND) |
291d763b | 2350 | { |
4c580c8c | 2351 | int did_something; |
d080be9e | 2352 | fold_defer_overflow_warnings (); |
75a70cf9 | 2353 | did_something = forward_propagate_into_gimple_cond (stmt); |
4c580c8c | 2354 | if (did_something == 2) |
2355 | cfg_changed = true; | |
72c59a18 | 2356 | fold_undefer_overflow_warnings (did_something, stmt, |
d080be9e | 2357 | WARN_STRICT_OVERFLOW_CONDITIONAL); |
75a70cf9 | 2358 | gsi_next (&gsi); |
291d763b | 2359 | } |
27f931ff | 2360 | else if (is_gimple_call (stmt)) |
2361 | { | |
2362 | tree callee = gimple_call_fndecl (stmt); | |
2363 | if (callee == NULL_TREE | |
2364 | || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL | |
2365 | || !simplify_builtin_call (&gsi, callee)) | |
2366 | gsi_next (&gsi); | |
2367 | } | |
291d763b | 2368 | else |
75a70cf9 | 2369 | gsi_next (&gsi); |
291d763b | 2370 | } |
f5c8cff5 | 2371 | } |
148aa112 | 2372 | |
2373 | if (cfg_changed) | |
6fa78c7b | 2374 | todoflags |= TODO_cleanup_cfg; |
c96420f8 | 2375 | return todoflags; |
4ee9c684 | 2376 | } |
2377 | ||
2378 | ||
2379 | static bool | |
2380 | gate_forwprop (void) | |
2381 | { | |
408c3c77 | 2382 | return flag_tree_forwprop; |
4ee9c684 | 2383 | } |
2384 | ||
48e1416a | 2385 | struct gimple_opt_pass pass_forwprop = |
20099e35 | 2386 | { |
2387 | { | |
2388 | GIMPLE_PASS, | |
4ee9c684 | 2389 | "forwprop", /* name */ |
2390 | gate_forwprop, /* gate */ | |
2391 | tree_ssa_forward_propagate_single_use_vars, /* execute */ | |
2392 | NULL, /* sub */ | |
2393 | NULL, /* next */ | |
2394 | 0, /* static_pass_number */ | |
2395 | TV_TREE_FORWPROP, /* tv_id */ | |
49290934 | 2396 | PROP_cfg | PROP_ssa, /* properties_required */ |
4ee9c684 | 2397 | 0, /* properties_provided */ |
b6246c40 | 2398 | 0, /* properties_destroyed */ |
4ee9c684 | 2399 | 0, /* todo_flags_start */ |
de6ed584 | 2400 | TODO_dump_func |
abd433a7 | 2401 | | TODO_ggc_collect |
de6ed584 | 2402 | | TODO_update_ssa |
20099e35 | 2403 | | TODO_verify_ssa /* todo_flags_finish */ |
2404 | } | |
4ee9c684 | 2405 | }; |
37361b38 | 2406 |