]>
Commit | Line | Data |
---|---|---|
18d08014 | 1 | /* Combining of if-expressions on trees. |
23a5b65a | 2 | Copyright (C) 2007-2014 Free Software Foundation, Inc. |
18d08014 RG |
3 | Contributed by Richard Guenther <rguenther@suse.de> |
4 | ||
5 | This file is part of GCC. | |
6 | ||
7 | GCC is free software; you can redistribute it and/or modify | |
8 | it under the terms of the GNU General Public License as published by | |
9dcd6f09 | 9 | the Free Software Foundation; either version 3, or (at your option) |
18d08014 RG |
10 | any later version. |
11 | ||
12 | GCC is distributed in the hope that it will be useful, | |
13 | but WITHOUT ANY WARRANTY; without even the implied warranty of | |
14 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
15 | GNU General Public License for more details. | |
16 | ||
17 | You should have received a copy of the GNU General Public License | |
9dcd6f09 NC |
18 | along with GCC; see the file COPYING3. If not see |
19 | <http://www.gnu.org/licenses/>. */ | |
18d08014 RG |
20 | |
21 | #include "config.h" | |
22 | #include "system.h" | |
23 | #include "coretypes.h" | |
24 | #include "tm.h" | |
5d2a9da9 AP |
25 | /* rtl is needed only because arm back-end requires it for |
26 | BRANCH_COST. */ | |
27 | #include "rtl.h" | |
28 | #include "tm_p.h" | |
18d08014 | 29 | #include "tree.h" |
d8a2d370 | 30 | #include "stor-layout.h" |
18d08014 | 31 | #include "basic-block.h" |
cf835838 | 32 | #include "tree-pretty-print.h" |
2fb9a547 AM |
33 | #include "tree-ssa-alias.h" |
34 | #include "internal-fn.h" | |
35 | #include "gimple-fold.h" | |
36 | #include "gimple-expr.h" | |
37 | #include "is-a.h" | |
18f429e2 | 38 | #include "gimple.h" |
5be5c238 | 39 | #include "gimple-iterator.h" |
18f429e2 | 40 | #include "gimplify-me.h" |
442b4905 AM |
41 | #include "gimple-ssa.h" |
42 | #include "tree-cfg.h" | |
43 | #include "tree-phinodes.h" | |
44 | #include "ssa-iterators.h" | |
18d08014 | 45 | #include "tree-pass.h" |
18d08014 | 46 | |
5d2a9da9 AP |
47 | #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT |
48 | #define LOGICAL_OP_NON_SHORT_CIRCUIT \ | |
49 | (BRANCH_COST (optimize_function_for_speed_p (cfun), \ | |
50 | false) >= 2) | |
51 | #endif | |
52 | ||
18d08014 RG |
53 | /* This pass combines COND_EXPRs to simplify control flow. It |
54 | currently recognizes bit tests and comparisons in chains that | |
55 | represent logical and or logical or of two COND_EXPRs. | |
56 | ||
57 | It does so by walking basic blocks in a approximate reverse | |
58 | post-dominator order and trying to match CFG patterns that | |
59 | represent logical and or logical or of two COND_EXPRs. | |
60 | Transformations are done if the COND_EXPR conditions match | |
61 | either | |
62 | ||
63 | 1. two single bit tests X & (1 << Yn) (for logical and) | |
64 | ||
65 | 2. two bit tests X & Yn (for logical or) | |
66 | ||
67 | 3. two comparisons X OPn Y (for logical or) | |
68 | ||
69 | To simplify this pass, removing basic blocks and dead code | |
70 | is left to CFG cleanup and DCE. */ | |
71 | ||
72 | ||
73 | /* Recognize a if-then-else CFG pattern starting to match with the | |
74 | COND_BB basic-block containing the COND_EXPR. The recognized | |
75 | then end else blocks are stored to *THEN_BB and *ELSE_BB. If | |
76 | *THEN_BB and/or *ELSE_BB are already set, they are required to | |
77 | match the then and else basic-blocks to make the pattern match. | |
78 | Returns true if the pattern matched, false otherwise. */ | |
79 | ||
80 | static bool | |
81 | recognize_if_then_else (basic_block cond_bb, | |
82 | basic_block *then_bb, basic_block *else_bb) | |
83 | { | |
84 | edge t, e; | |
85 | ||
86 | if (EDGE_COUNT (cond_bb->succs) != 2) | |
87 | return false; | |
88 | ||
89 | /* Find the then/else edges. */ | |
90 | t = EDGE_SUCC (cond_bb, 0); | |
91 | e = EDGE_SUCC (cond_bb, 1); | |
92 | if (!(t->flags & EDGE_TRUE_VALUE)) | |
93 | { | |
94 | edge tmp = t; | |
95 | t = e; | |
96 | e = tmp; | |
97 | } | |
98 | if (!(t->flags & EDGE_TRUE_VALUE) | |
99 | || !(e->flags & EDGE_FALSE_VALUE)) | |
100 | return false; | |
101 | ||
102 | /* Check if the edge destinations point to the required block. */ | |
103 | if (*then_bb | |
104 | && t->dest != *then_bb) | |
105 | return false; | |
106 | if (*else_bb | |
107 | && e->dest != *else_bb) | |
108 | return false; | |
109 | ||
110 | if (!*then_bb) | |
111 | *then_bb = t->dest; | |
112 | if (!*else_bb) | |
113 | *else_bb = e->dest; | |
114 | ||
115 | return true; | |
116 | } | |
117 | ||
118 | /* Verify if the basic block BB does not have side-effects. Return | |
119 | true in this case, else false. */ | |
120 | ||
121 | static bool | |
122 | bb_no_side_effects_p (basic_block bb) | |
123 | { | |
726a989a | 124 | gimple_stmt_iterator gsi; |
18d08014 | 125 | |
726a989a | 126 | for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi)) |
18d08014 | 127 | { |
726a989a | 128 | gimple stmt = gsi_stmt (gsi); |
18d08014 | 129 | |
597c6315 RB |
130 | if (is_gimple_debug (stmt)) |
131 | continue; | |
132 | ||
179184e3 | 133 | if (gimple_has_side_effects (stmt) |
597c6315 | 134 | || gimple_could_trap_p (stmt) |
5006671f | 135 | || gimple_vuse (stmt)) |
18d08014 RG |
136 | return false; |
137 | } | |
138 | ||
139 | return true; | |
140 | } | |
141 | ||
bf4787b2 JJ |
142 | /* Return true if BB is an empty forwarder block to TO_BB. */ |
143 | ||
144 | static bool | |
145 | forwarder_block_to (basic_block bb, basic_block to_bb) | |
146 | { | |
147 | return empty_block_p (bb) | |
148 | && single_succ_p (bb) | |
149 | && single_succ (bb) == to_bb; | |
150 | } | |
151 | ||
18d08014 RG |
152 | /* Verify if all PHI node arguments in DEST for edges from BB1 or |
153 | BB2 to DEST are the same. This makes the CFG merge point | |
154 | free from side-effects. Return true in this case, else false. */ | |
155 | ||
156 | static bool | |
157 | same_phi_args_p (basic_block bb1, basic_block bb2, basic_block dest) | |
158 | { | |
159 | edge e1 = find_edge (bb1, dest); | |
160 | edge e2 = find_edge (bb2, dest); | |
726a989a RB |
161 | gimple_stmt_iterator gsi; |
162 | gimple phi; | |
18d08014 | 163 | |
726a989a RB |
164 | for (gsi = gsi_start_phis (dest); !gsi_end_p (gsi); gsi_next (&gsi)) |
165 | { | |
166 | phi = gsi_stmt (gsi); | |
167 | if (!operand_equal_p (PHI_ARG_DEF_FROM_EDGE (phi, e1), | |
168 | PHI_ARG_DEF_FROM_EDGE (phi, e2), 0)) | |
169 | return false; | |
170 | } | |
18d08014 RG |
171 | |
172 | return true; | |
173 | } | |
174 | ||
d7b339dd RG |
175 | /* Return the best representative SSA name for CANDIDATE which is used |
176 | in a bit test. */ | |
177 | ||
178 | static tree | |
179 | get_name_for_bit_test (tree candidate) | |
180 | { | |
181 | /* Skip single-use names in favor of using the name from a | |
182 | non-widening conversion definition. */ | |
183 | if (TREE_CODE (candidate) == SSA_NAME | |
184 | && has_single_use (candidate)) | |
185 | { | |
726a989a RB |
186 | gimple def_stmt = SSA_NAME_DEF_STMT (candidate); |
187 | if (is_gimple_assign (def_stmt) | |
a6450905 | 188 | && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt))) |
d7b339dd | 189 | { |
726a989a RB |
190 | if (TYPE_PRECISION (TREE_TYPE (candidate)) |
191 | <= TYPE_PRECISION (TREE_TYPE (gimple_assign_rhs1 (def_stmt)))) | |
192 | return gimple_assign_rhs1 (def_stmt); | |
d7b339dd RG |
193 | } |
194 | } | |
195 | ||
196 | return candidate; | |
197 | } | |
198 | ||
726a989a | 199 | /* Recognize a single bit test pattern in GIMPLE_COND and its defining |
18d08014 | 200 | statements. Store the name being tested in *NAME and the bit |
726a989a | 201 | in *BIT. The GIMPLE_COND computes *NAME & (1 << *BIT). |
18d08014 RG |
202 | Returns true if the pattern matched, false otherwise. */ |
203 | ||
204 | static bool | |
777d77b3 | 205 | recognize_single_bit_test (gimple cond, tree *name, tree *bit, bool inv) |
18d08014 | 206 | { |
726a989a | 207 | gimple stmt; |
18d08014 RG |
208 | |
209 | /* Get at the definition of the result of the bit test. */ | |
777d77b3 | 210 | if (gimple_cond_code (cond) != (inv ? EQ_EXPR : NE_EXPR) |
726a989a RB |
211 | || TREE_CODE (gimple_cond_lhs (cond)) != SSA_NAME |
212 | || !integer_zerop (gimple_cond_rhs (cond))) | |
18d08014 | 213 | return false; |
726a989a RB |
214 | stmt = SSA_NAME_DEF_STMT (gimple_cond_lhs (cond)); |
215 | if (!is_gimple_assign (stmt)) | |
18d08014 | 216 | return false; |
18d08014 RG |
217 | |
218 | /* Look at which bit is tested. One form to recognize is | |
219 | D.1985_5 = state_3(D) >> control1_4(D); | |
220 | D.1986_6 = (int) D.1985_5; | |
221 | D.1987_7 = op0 & 1; | |
222 | if (D.1987_7 != 0) */ | |
726a989a RB |
223 | if (gimple_assign_rhs_code (stmt) == BIT_AND_EXPR |
224 | && integer_onep (gimple_assign_rhs2 (stmt)) | |
225 | && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME) | |
18d08014 | 226 | { |
726a989a | 227 | tree orig_name = gimple_assign_rhs1 (stmt); |
b0569227 RG |
228 | |
229 | /* Look through copies and conversions to eventually | |
230 | find the stmt that computes the shift. */ | |
726a989a RB |
231 | stmt = SSA_NAME_DEF_STMT (orig_name); |
232 | ||
233 | while (is_gimple_assign (stmt) | |
a6450905 RG |
234 | && ((CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt)) |
235 | && (TYPE_PRECISION (TREE_TYPE (gimple_assign_lhs (stmt))) | |
236 | <= TYPE_PRECISION (TREE_TYPE (gimple_assign_rhs1 (stmt))))) | |
237 | || gimple_assign_ssa_name_copy_p (stmt))) | |
238 | stmt = SSA_NAME_DEF_STMT (gimple_assign_rhs1 (stmt)); | |
18d08014 | 239 | |
b0569227 | 240 | /* If we found such, decompose it. */ |
726a989a RB |
241 | if (is_gimple_assign (stmt) |
242 | && gimple_assign_rhs_code (stmt) == RSHIFT_EXPR) | |
18d08014 RG |
243 | { |
244 | /* op0 & (1 << op1) */ | |
726a989a RB |
245 | *bit = gimple_assign_rhs2 (stmt); |
246 | *name = gimple_assign_rhs1 (stmt); | |
18d08014 RG |
247 | } |
248 | else | |
249 | { | |
250 | /* t & 1 */ | |
b0569227 | 251 | *bit = integer_zero_node; |
d7b339dd | 252 | *name = get_name_for_bit_test (orig_name); |
18d08014 RG |
253 | } |
254 | ||
255 | return true; | |
256 | } | |
257 | ||
258 | /* Another form is | |
259 | D.1987_7 = op0 & (1 << CST) | |
260 | if (D.1987_7 != 0) */ | |
726a989a RB |
261 | if (gimple_assign_rhs_code (stmt) == BIT_AND_EXPR |
262 | && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME | |
263 | && integer_pow2p (gimple_assign_rhs2 (stmt))) | |
18d08014 | 264 | { |
726a989a | 265 | *name = gimple_assign_rhs1 (stmt); |
18d08014 | 266 | *bit = build_int_cst (integer_type_node, |
726a989a | 267 | tree_log2 (gimple_assign_rhs2 (stmt))); |
18d08014 RG |
268 | return true; |
269 | } | |
270 | ||
271 | /* Another form is | |
272 | D.1986_6 = 1 << control1_4(D) | |
273 | D.1987_7 = op0 & D.1986_6 | |
274 | if (D.1987_7 != 0) */ | |
726a989a RB |
275 | if (gimple_assign_rhs_code (stmt) == BIT_AND_EXPR |
276 | && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME | |
277 | && TREE_CODE (gimple_assign_rhs2 (stmt)) == SSA_NAME) | |
18d08014 | 278 | { |
726a989a | 279 | gimple tmp; |
18d08014 RG |
280 | |
281 | /* Both arguments of the BIT_AND_EXPR can be the single-bit | |
282 | specifying expression. */ | |
726a989a RB |
283 | tmp = SSA_NAME_DEF_STMT (gimple_assign_rhs1 (stmt)); |
284 | if (is_gimple_assign (tmp) | |
285 | && gimple_assign_rhs_code (tmp) == LSHIFT_EXPR | |
286 | && integer_onep (gimple_assign_rhs1 (tmp))) | |
18d08014 | 287 | { |
726a989a RB |
288 | *name = gimple_assign_rhs2 (stmt); |
289 | *bit = gimple_assign_rhs2 (tmp); | |
18d08014 RG |
290 | return true; |
291 | } | |
292 | ||
726a989a RB |
293 | tmp = SSA_NAME_DEF_STMT (gimple_assign_rhs2 (stmt)); |
294 | if (is_gimple_assign (tmp) | |
295 | && gimple_assign_rhs_code (tmp) == LSHIFT_EXPR | |
296 | && integer_onep (gimple_assign_rhs1 (tmp))) | |
18d08014 | 297 | { |
726a989a RB |
298 | *name = gimple_assign_rhs1 (stmt); |
299 | *bit = gimple_assign_rhs2 (tmp); | |
18d08014 RG |
300 | return true; |
301 | } | |
302 | } | |
303 | ||
304 | return false; | |
305 | } | |
306 | ||
726a989a | 307 | /* Recognize a bit test pattern in a GIMPLE_COND and its defining |
18d08014 RG |
308 | statements. Store the name being tested in *NAME and the bits |
309 | in *BITS. The COND_EXPR computes *NAME & *BITS. | |
310 | Returns true if the pattern matched, false otherwise. */ | |
311 | ||
312 | static bool | |
777d77b3 | 313 | recognize_bits_test (gimple cond, tree *name, tree *bits, bool inv) |
18d08014 | 314 | { |
726a989a | 315 | gimple stmt; |
18d08014 RG |
316 | |
317 | /* Get at the definition of the result of the bit test. */ | |
777d77b3 | 318 | if (gimple_cond_code (cond) != (inv ? EQ_EXPR : NE_EXPR) |
726a989a RB |
319 | || TREE_CODE (gimple_cond_lhs (cond)) != SSA_NAME |
320 | || !integer_zerop (gimple_cond_rhs (cond))) | |
18d08014 | 321 | return false; |
726a989a RB |
322 | stmt = SSA_NAME_DEF_STMT (gimple_cond_lhs (cond)); |
323 | if (!is_gimple_assign (stmt) | |
324 | || gimple_assign_rhs_code (stmt) != BIT_AND_EXPR) | |
18d08014 RG |
325 | return false; |
326 | ||
726a989a RB |
327 | *name = get_name_for_bit_test (gimple_assign_rhs1 (stmt)); |
328 | *bits = gimple_assign_rhs2 (stmt); | |
18d08014 RG |
329 | |
330 | return true; | |
331 | } | |
332 | ||
333 | /* If-convert on a and pattern with a common else block. The inner | |
334 | if is specified by its INNER_COND_BB, the outer by OUTER_COND_BB. | |
777d77b3 MG |
335 | inner_inv, outer_inv and result_inv indicate whether the conditions |
336 | are inverted. | |
18d08014 RG |
337 | Returns true if the edges to the common else basic-block were merged. */ |
338 | ||
339 | static bool | |
777d77b3 MG |
340 | ifcombine_ifandif (basic_block inner_cond_bb, bool inner_inv, |
341 | basic_block outer_cond_bb, bool outer_inv, bool result_inv) | |
18d08014 | 342 | { |
726a989a RB |
343 | gimple_stmt_iterator gsi; |
344 | gimple inner_cond, outer_cond; | |
777d77b3 | 345 | tree name1, name2, bit1, bit2, bits1, bits2; |
18d08014 RG |
346 | |
347 | inner_cond = last_stmt (inner_cond_bb); | |
348 | if (!inner_cond | |
726a989a | 349 | || gimple_code (inner_cond) != GIMPLE_COND) |
18d08014 RG |
350 | return false; |
351 | ||
352 | outer_cond = last_stmt (outer_cond_bb); | |
353 | if (!outer_cond | |
726a989a | 354 | || gimple_code (outer_cond) != GIMPLE_COND) |
18d08014 RG |
355 | return false; |
356 | ||
357 | /* See if we test a single bit of the same name in both tests. In | |
358 | that case remove the outer test, merging both else edges, | |
359 | and change the inner one to test for | |
360 | name & (bit1 | bit2) == (bit1 | bit2). */ | |
777d77b3 MG |
361 | if (recognize_single_bit_test (inner_cond, &name1, &bit1, inner_inv) |
362 | && recognize_single_bit_test (outer_cond, &name2, &bit2, outer_inv) | |
18d08014 RG |
363 | && name1 == name2) |
364 | { | |
365 | tree t, t2; | |
366 | ||
367 | /* Do it. */ | |
726a989a | 368 | gsi = gsi_for_stmt (inner_cond); |
18d08014 | 369 | t = fold_build2 (LSHIFT_EXPR, TREE_TYPE (name1), |
3886f1d0 | 370 | build_int_cst (TREE_TYPE (name1), 1), bit1); |
18d08014 | 371 | t2 = fold_build2 (LSHIFT_EXPR, TREE_TYPE (name1), |
3886f1d0 | 372 | build_int_cst (TREE_TYPE (name1), 1), bit2); |
18d08014 | 373 | t = fold_build2 (BIT_IOR_EXPR, TREE_TYPE (name1), t, t2); |
726a989a RB |
374 | t = force_gimple_operand_gsi (&gsi, t, true, NULL_TREE, |
375 | true, GSI_SAME_STMT); | |
18d08014 | 376 | t2 = fold_build2 (BIT_AND_EXPR, TREE_TYPE (name1), name1, t); |
726a989a RB |
377 | t2 = force_gimple_operand_gsi (&gsi, t2, true, NULL_TREE, |
378 | true, GSI_SAME_STMT); | |
777d77b3 MG |
379 | t = fold_build2 (result_inv ? NE_EXPR : EQ_EXPR, |
380 | boolean_type_node, t2, t); | |
740bb6ad RG |
381 | t = canonicalize_cond_expr_cond (t); |
382 | if (!t) | |
383 | return false; | |
726a989a | 384 | gimple_cond_set_condition_from_tree (inner_cond, t); |
18d08014 RG |
385 | update_stmt (inner_cond); |
386 | ||
387 | /* Leave CFG optimization to cfg_cleanup. */ | |
777d77b3 MG |
388 | gimple_cond_set_condition_from_tree (outer_cond, |
389 | outer_inv ? boolean_false_node : boolean_true_node); | |
18d08014 RG |
390 | update_stmt (outer_cond); |
391 | ||
392 | if (dump_file) | |
393 | { | |
394 | fprintf (dump_file, "optimizing double bit test to "); | |
395 | print_generic_expr (dump_file, name1, 0); | |
396 | fprintf (dump_file, " & T == T\nwith temporary T = (1 << "); | |
397 | print_generic_expr (dump_file, bit1, 0); | |
398 | fprintf (dump_file, ") | (1 << "); | |
399 | print_generic_expr (dump_file, bit2, 0); | |
400 | fprintf (dump_file, ")\n"); | |
401 | } | |
402 | ||
403 | return true; | |
404 | } | |
405 | ||
18d08014 RG |
406 | /* See if we have two bit tests of the same name in both tests. |
407 | In that case remove the outer test and change the inner one to | |
408 | test for name & (bits1 | bits2) != 0. */ | |
777d77b3 MG |
409 | else if (recognize_bits_test (inner_cond, &name1, &bits1, !inner_inv) |
410 | && recognize_bits_test (outer_cond, &name2, &bits2, !outer_inv)) | |
18d08014 | 411 | { |
726a989a | 412 | gimple_stmt_iterator gsi; |
18d08014 RG |
413 | tree t; |
414 | ||
415 | /* Find the common name which is bit-tested. */ | |
416 | if (name1 == name2) | |
417 | ; | |
418 | else if (bits1 == bits2) | |
419 | { | |
420 | t = name2; | |
421 | name2 = bits2; | |
422 | bits2 = t; | |
423 | t = name1; | |
424 | name1 = bits1; | |
425 | bits1 = t; | |
426 | } | |
427 | else if (name1 == bits2) | |
428 | { | |
429 | t = name2; | |
430 | name2 = bits2; | |
431 | bits2 = t; | |
432 | } | |
433 | else if (bits1 == name2) | |
434 | { | |
435 | t = name1; | |
436 | name1 = bits1; | |
437 | bits1 = t; | |
438 | } | |
439 | else | |
440 | return false; | |
441 | ||
6e548df5 RG |
442 | /* As we strip non-widening conversions in finding a common |
443 | name that is tested make sure to end up with an integral | |
444 | type for building the bit operations. */ | |
445 | if (TYPE_PRECISION (TREE_TYPE (bits1)) | |
446 | >= TYPE_PRECISION (TREE_TYPE (bits2))) | |
447 | { | |
448 | bits1 = fold_convert (unsigned_type_for (TREE_TYPE (bits1)), bits1); | |
449 | name1 = fold_convert (TREE_TYPE (bits1), name1); | |
450 | bits2 = fold_convert (unsigned_type_for (TREE_TYPE (bits2)), bits2); | |
451 | bits2 = fold_convert (TREE_TYPE (bits1), bits2); | |
452 | } | |
453 | else | |
454 | { | |
455 | bits2 = fold_convert (unsigned_type_for (TREE_TYPE (bits2)), bits2); | |
456 | name1 = fold_convert (TREE_TYPE (bits2), name1); | |
457 | bits1 = fold_convert (unsigned_type_for (TREE_TYPE (bits1)), bits1); | |
458 | bits1 = fold_convert (TREE_TYPE (bits2), bits1); | |
459 | } | |
460 | ||
18d08014 | 461 | /* Do it. */ |
726a989a | 462 | gsi = gsi_for_stmt (inner_cond); |
18d08014 | 463 | t = fold_build2 (BIT_IOR_EXPR, TREE_TYPE (name1), bits1, bits2); |
726a989a RB |
464 | t = force_gimple_operand_gsi (&gsi, t, true, NULL_TREE, |
465 | true, GSI_SAME_STMT); | |
18d08014 | 466 | t = fold_build2 (BIT_AND_EXPR, TREE_TYPE (name1), name1, t); |
726a989a RB |
467 | t = force_gimple_operand_gsi (&gsi, t, true, NULL_TREE, |
468 | true, GSI_SAME_STMT); | |
777d77b3 | 469 | t = fold_build2 (result_inv ? NE_EXPR : EQ_EXPR, boolean_type_node, t, |
726a989a | 470 | build_int_cst (TREE_TYPE (t), 0)); |
740bb6ad RG |
471 | t = canonicalize_cond_expr_cond (t); |
472 | if (!t) | |
473 | return false; | |
726a989a | 474 | gimple_cond_set_condition_from_tree (inner_cond, t); |
18d08014 RG |
475 | update_stmt (inner_cond); |
476 | ||
477 | /* Leave CFG optimization to cfg_cleanup. */ | |
777d77b3 MG |
478 | gimple_cond_set_condition_from_tree (outer_cond, |
479 | outer_inv ? boolean_false_node : boolean_true_node); | |
18d08014 RG |
480 | update_stmt (outer_cond); |
481 | ||
482 | if (dump_file) | |
483 | { | |
484 | fprintf (dump_file, "optimizing bits or bits test to "); | |
485 | print_generic_expr (dump_file, name1, 0); | |
486 | fprintf (dump_file, " & T != 0\nwith temporary T = "); | |
487 | print_generic_expr (dump_file, bits1, 0); | |
488 | fprintf (dump_file, " | "); | |
489 | print_generic_expr (dump_file, bits2, 0); | |
490 | fprintf (dump_file, "\n"); | |
491 | } | |
492 | ||
493 | return true; | |
494 | } | |
495 | ||
777d77b3 MG |
496 | /* See if we have two comparisons that we can merge into one. */ |
497 | else if (TREE_CODE_CLASS (gimple_cond_code (inner_cond)) == tcc_comparison | |
e89065a1 | 498 | && TREE_CODE_CLASS (gimple_cond_code (outer_cond)) == tcc_comparison) |
18d08014 | 499 | { |
18d08014 | 500 | tree t; |
777d77b3 MG |
501 | enum tree_code inner_cond_code = gimple_cond_code (inner_cond); |
502 | enum tree_code outer_cond_code = gimple_cond_code (outer_cond); | |
503 | ||
504 | /* Invert comparisons if necessary (and possible). */ | |
505 | if (inner_inv) | |
506 | inner_cond_code = invert_tree_comparison (inner_cond_code, | |
507 | HONOR_NANS (TYPE_MODE (TREE_TYPE (gimple_cond_lhs (inner_cond))))); | |
508 | if (inner_cond_code == ERROR_MARK) | |
509 | return false; | |
510 | if (outer_inv) | |
511 | outer_cond_code = invert_tree_comparison (outer_cond_code, | |
512 | HONOR_NANS (TYPE_MODE (TREE_TYPE (gimple_cond_lhs (outer_cond))))); | |
513 | if (outer_cond_code == ERROR_MARK) | |
514 | return false; | |
515 | /* Don't return false so fast, try maybe_fold_or_comparisons? */ | |
18d08014 | 516 | |
777d77b3 MG |
517 | if (!(t = maybe_fold_and_comparisons (inner_cond_code, |
518 | gimple_cond_lhs (inner_cond), | |
519 | gimple_cond_rhs (inner_cond), | |
520 | outer_cond_code, | |
521 | gimple_cond_lhs (outer_cond), | |
522 | gimple_cond_rhs (outer_cond)))) | |
5d2a9da9 AP |
523 | { |
524 | tree t1, t2; | |
525 | gimple_stmt_iterator gsi; | |
526 | if (!LOGICAL_OP_NON_SHORT_CIRCUIT) | |
527 | return false; | |
528 | /* Only do this optimization if the inner bb contains only the conditional. */ | |
529 | if (!gsi_one_before_end_p (gsi_start_nondebug_after_labels_bb (inner_cond_bb))) | |
530 | return false; | |
531 | t1 = fold_build2_loc (gimple_location (inner_cond), | |
532 | inner_cond_code, | |
533 | boolean_type_node, | |
534 | gimple_cond_lhs (inner_cond), | |
535 | gimple_cond_rhs (inner_cond)); | |
536 | t2 = fold_build2_loc (gimple_location (outer_cond), | |
537 | outer_cond_code, | |
538 | boolean_type_node, | |
539 | gimple_cond_lhs (outer_cond), | |
540 | gimple_cond_rhs (outer_cond)); | |
541 | t = fold_build2_loc (gimple_location (inner_cond), | |
542 | TRUTH_AND_EXPR, boolean_type_node, t1, t2); | |
543 | if (result_inv) | |
544 | { | |
545 | t = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (t), t); | |
546 | result_inv = false; | |
547 | } | |
548 | gsi = gsi_for_stmt (inner_cond); | |
549 | t = force_gimple_operand_gsi_1 (&gsi, t, is_gimple_condexpr, NULL, true, | |
550 | GSI_SAME_STMT); | |
551 | } | |
777d77b3 MG |
552 | if (result_inv) |
553 | t = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (t), t); | |
dc575233 RG |
554 | t = canonicalize_cond_expr_cond (t); |
555 | if (!t) | |
556 | return false; | |
726a989a | 557 | gimple_cond_set_condition_from_tree (inner_cond, t); |
18d08014 RG |
558 | update_stmt (inner_cond); |
559 | ||
560 | /* Leave CFG optimization to cfg_cleanup. */ | |
777d77b3 MG |
561 | gimple_cond_set_condition_from_tree (outer_cond, |
562 | outer_inv ? boolean_false_node : boolean_true_node); | |
18d08014 RG |
563 | update_stmt (outer_cond); |
564 | ||
565 | if (dump_file) | |
566 | { | |
567 | fprintf (dump_file, "optimizing two comparisons to "); | |
568 | print_generic_expr (dump_file, t, 0); | |
569 | fprintf (dump_file, "\n"); | |
570 | } | |
571 | ||
572 | return true; | |
573 | } | |
574 | ||
575 | return false; | |
576 | } | |
577 | ||
bf4787b2 JJ |
578 | /* Helper function for tree_ssa_ifcombine_bb. Recognize a CFG pattern and |
579 | dispatch to the appropriate if-conversion helper for a particular | |
580 | set of INNER_COND_BB, OUTER_COND_BB, THEN_BB and ELSE_BB. | |
581 | PHI_PRED_BB should be one of INNER_COND_BB, THEN_BB or ELSE_BB. */ | |
582 | ||
583 | static bool | |
584 | tree_ssa_ifcombine_bb_1 (basic_block inner_cond_bb, basic_block outer_cond_bb, | |
585 | basic_block then_bb, basic_block else_bb, | |
586 | basic_block phi_pred_bb) | |
587 | { | |
588 | /* The && form is characterized by a common else_bb with | |
589 | the two edges leading to it mergable. The latter is | |
590 | guaranteed by matching PHI arguments in the else_bb and | |
591 | the inner cond_bb having no side-effects. */ | |
592 | if (phi_pred_bb != else_bb | |
593 | && recognize_if_then_else (outer_cond_bb, &inner_cond_bb, &else_bb) | |
594 | && same_phi_args_p (outer_cond_bb, phi_pred_bb, else_bb) | |
595 | && bb_no_side_effects_p (inner_cond_bb)) | |
596 | { | |
597 | /* We have | |
598 | <outer_cond_bb> | |
599 | if (q) goto inner_cond_bb; else goto else_bb; | |
600 | <inner_cond_bb> | |
601 | if (p) goto ...; else goto else_bb; | |
602 | ... | |
603 | <else_bb> | |
604 | ... | |
605 | */ | |
606 | return ifcombine_ifandif (inner_cond_bb, false, outer_cond_bb, false, | |
607 | false); | |
608 | } | |
609 | ||
610 | /* And a version where the outer condition is negated. */ | |
611 | if (phi_pred_bb != else_bb | |
612 | && recognize_if_then_else (outer_cond_bb, &else_bb, &inner_cond_bb) | |
613 | && same_phi_args_p (outer_cond_bb, phi_pred_bb, else_bb) | |
614 | && bb_no_side_effects_p (inner_cond_bb)) | |
615 | { | |
616 | /* We have | |
617 | <outer_cond_bb> | |
618 | if (q) goto else_bb; else goto inner_cond_bb; | |
619 | <inner_cond_bb> | |
620 | if (p) goto ...; else goto else_bb; | |
621 | ... | |
622 | <else_bb> | |
623 | ... | |
624 | */ | |
625 | return ifcombine_ifandif (inner_cond_bb, false, outer_cond_bb, true, | |
626 | false); | |
627 | } | |
628 | ||
629 | /* The || form is characterized by a common then_bb with the | |
630 | two edges leading to it mergable. The latter is guaranteed | |
631 | by matching PHI arguments in the then_bb and the inner cond_bb | |
632 | having no side-effects. */ | |
633 | if (phi_pred_bb != then_bb | |
634 | && recognize_if_then_else (outer_cond_bb, &then_bb, &inner_cond_bb) | |
635 | && same_phi_args_p (outer_cond_bb, phi_pred_bb, then_bb) | |
636 | && bb_no_side_effects_p (inner_cond_bb)) | |
637 | { | |
638 | /* We have | |
639 | <outer_cond_bb> | |
640 | if (q) goto then_bb; else goto inner_cond_bb; | |
641 | <inner_cond_bb> | |
642 | if (q) goto then_bb; else goto ...; | |
643 | <then_bb> | |
644 | ... | |
645 | */ | |
646 | return ifcombine_ifandif (inner_cond_bb, true, outer_cond_bb, true, | |
647 | true); | |
648 | } | |
649 | ||
650 | /* And a version where the outer condition is negated. */ | |
651 | if (phi_pred_bb != then_bb | |
652 | && recognize_if_then_else (outer_cond_bb, &inner_cond_bb, &then_bb) | |
653 | && same_phi_args_p (outer_cond_bb, phi_pred_bb, then_bb) | |
654 | && bb_no_side_effects_p (inner_cond_bb)) | |
655 | { | |
656 | /* We have | |
657 | <outer_cond_bb> | |
658 | if (q) goto inner_cond_bb; else goto then_bb; | |
659 | <inner_cond_bb> | |
660 | if (q) goto then_bb; else goto ...; | |
661 | <then_bb> | |
662 | ... | |
663 | */ | |
664 | return ifcombine_ifandif (inner_cond_bb, true, outer_cond_bb, false, | |
665 | true); | |
666 | } | |
667 | ||
668 | return false; | |
669 | } | |
670 | ||
18d08014 RG |
671 | /* Recognize a CFG pattern and dispatch to the appropriate |
672 | if-conversion helper. We start with BB as the innermost | |
673 | worker basic-block. Returns true if a transformation was done. */ | |
674 | ||
675 | static bool | |
676 | tree_ssa_ifcombine_bb (basic_block inner_cond_bb) | |
677 | { | |
678 | basic_block then_bb = NULL, else_bb = NULL; | |
679 | ||
680 | if (!recognize_if_then_else (inner_cond_bb, &then_bb, &else_bb)) | |
681 | return false; | |
682 | ||
683 | /* Recognize && and || of two conditions with a common | |
684 | then/else block which entry edges we can merge. That is: | |
685 | if (a || b) | |
686 | ; | |
687 | and | |
688 | if (a && b) | |
689 | ; | |
690 | This requires a single predecessor of the inner cond_bb. */ | |
691 | if (single_pred_p (inner_cond_bb)) | |
692 | { | |
693 | basic_block outer_cond_bb = single_pred (inner_cond_bb); | |
694 | ||
bf4787b2 JJ |
695 | if (tree_ssa_ifcombine_bb_1 (inner_cond_bb, outer_cond_bb, |
696 | then_bb, else_bb, inner_cond_bb)) | |
697 | return true; | |
777d77b3 | 698 | |
bf4787b2 | 699 | if (forwarder_block_to (else_bb, then_bb)) |
777d77b3 | 700 | { |
bf4787b2 JJ |
701 | /* Other possibilities for the && form, if else_bb is |
702 | empty forwarder block to then_bb. Compared to the above simpler | |
703 | forms this can be treated as if then_bb and else_bb were swapped, | |
704 | and the corresponding inner_cond_bb not inverted because of that. | |
705 | For same_phi_args_p we look at equality of arguments between | |
706 | edge from outer_cond_bb and the forwarder block. */ | |
707 | if (tree_ssa_ifcombine_bb_1 (inner_cond_bb, outer_cond_bb, else_bb, | |
708 | then_bb, else_bb)) | |
709 | return true; | |
18d08014 | 710 | } |
bf4787b2 | 711 | else if (forwarder_block_to (then_bb, else_bb)) |
777d77b3 | 712 | { |
bf4787b2 JJ |
713 | /* Other possibilities for the || form, if then_bb is |
714 | empty forwarder block to else_bb. Compared to the above simpler | |
715 | forms this can be treated as if then_bb and else_bb were swapped, | |
716 | and the corresponding inner_cond_bb not inverted because of that. | |
717 | For same_phi_args_p we look at equality of arguments between | |
718 | edge from outer_cond_bb and the forwarder block. */ | |
719 | if (tree_ssa_ifcombine_bb_1 (inner_cond_bb, outer_cond_bb, else_bb, | |
720 | then_bb, then_bb)) | |
721 | return true; | |
18d08014 RG |
722 | } |
723 | } | |
724 | ||
725 | return false; | |
726 | } | |
727 | ||
728 | /* Main entry for the tree if-conversion pass. */ | |
729 | ||
be55bfe6 TS |
730 | namespace { |
731 | ||
732 | const pass_data pass_data_tree_ifcombine = | |
733 | { | |
734 | GIMPLE_PASS, /* type */ | |
735 | "ifcombine", /* name */ | |
736 | OPTGROUP_NONE, /* optinfo_flags */ | |
737 | true, /* has_execute */ | |
738 | TV_TREE_IFCOMBINE, /* tv_id */ | |
739 | ( PROP_cfg | PROP_ssa ), /* properties_required */ | |
740 | 0, /* properties_provided */ | |
741 | 0, /* properties_destroyed */ | |
742 | 0, /* todo_flags_start */ | |
3bea341f | 743 | TODO_update_ssa, /* todo_flags_finish */ |
be55bfe6 TS |
744 | }; |
745 | ||
746 | class pass_tree_ifcombine : public gimple_opt_pass | |
747 | { | |
748 | public: | |
749 | pass_tree_ifcombine (gcc::context *ctxt) | |
750 | : gimple_opt_pass (pass_data_tree_ifcombine, ctxt) | |
751 | {} | |
752 | ||
753 | /* opt_pass methods: */ | |
754 | virtual unsigned int execute (function *); | |
755 | ||
756 | }; // class pass_tree_ifcombine | |
757 | ||
758 | unsigned int | |
759 | pass_tree_ifcombine::execute (function *fun) | |
18d08014 RG |
760 | { |
761 | basic_block *bbs; | |
762 | bool cfg_changed = false; | |
763 | int i; | |
764 | ||
3d9c733e | 765 | bbs = single_pred_before_succ_order (); |
6c66f733 | 766 | calculate_dominance_info (CDI_DOMINATORS); |
18d08014 | 767 | |
5d2a9da9 AP |
768 | /* Search every basic block for COND_EXPR we may be able to optimize. |
769 | ||
770 | We walk the blocks in order that guarantees that a block with | |
771 | a single predecessor is processed after the predecessor. | |
772 | This ensures that we collapse outter ifs before visiting the | |
773 | inner ones, and also that we do not try to visit a removed | |
774 | block. This is opposite of PHI-OPT, because we cascade the | |
775 | combining rather than cascading PHIs. */ | |
be55bfe6 | 776 | for (i = n_basic_blocks_for_fn (fun) - NUM_FIXED_BLOCKS - 1; i >= 0; i--) |
18d08014 RG |
777 | { |
778 | basic_block bb = bbs[i]; | |
726a989a | 779 | gimple stmt = last_stmt (bb); |
18d08014 RG |
780 | |
781 | if (stmt | |
726a989a | 782 | && gimple_code (stmt) == GIMPLE_COND) |
18d08014 RG |
783 | cfg_changed |= tree_ssa_ifcombine_bb (bb); |
784 | } | |
785 | ||
786 | free (bbs); | |
787 | ||
788 | return cfg_changed ? TODO_cleanup_cfg : 0; | |
789 | } | |
790 | ||
27a4cd48 DM |
791 | } // anon namespace |
792 | ||
793 | gimple_opt_pass * | |
794 | make_pass_tree_ifcombine (gcc::context *ctxt) | |
795 | { | |
796 | return new pass_tree_ifcombine (ctxt); | |
797 | } |