]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/tree-ssa-forwprop.c
backport: As described in http://gcc.gnu.org/ml/gcc/2012-08/msg00015.html...
[thirdparty/gcc.git] / gcc / tree-ssa-forwprop.c
1 /* Forward propagation of expressions for single use variables.
2 Copyright (C) 2004, 2005, 2007, 2008, 2009, 2010, 2011, 2012
3 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "tm_p.h"
27 #include "basic-block.h"
28 #include "gimple-pretty-print.h"
29 #include "tree-flow.h"
30 #include "tree-pass.h"
31 #include "langhooks.h"
32 #include "flags.h"
33 #include "gimple.h"
34 #include "expr.h"
35 #include "cfgloop.h"
36
37 /* This pass propagates the RHS of assignment statements into use
38 sites of the LHS of the assignment. It's basically a specialized
39 form of tree combination. It is hoped all of this can disappear
40 when we have a generalized tree combiner.
41
42 One class of common cases we handle is forward propagating a single use
43 variable into a COND_EXPR.
44
45 bb0:
46 x = a COND b;
47 if (x) goto ... else goto ...
48
49 Will be transformed into:
50
51 bb0:
52 if (a COND b) goto ... else goto ...
53
54 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
55
56 Or (assuming c1 and c2 are constants):
57
58 bb0:
59 x = a + c1;
60 if (x EQ/NEQ c2) goto ... else goto ...
61
62 Will be transformed into:
63
64 bb0:
65 if (a EQ/NEQ (c2 - c1)) goto ... else goto ...
66
67 Similarly for x = a - c1.
68
69 Or
70
71 bb0:
72 x = !a
73 if (x) goto ... else goto ...
74
75 Will be transformed into:
76
77 bb0:
78 if (a == 0) goto ... else goto ...
79
80 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
81 For these cases, we propagate A into all, possibly more than one,
82 COND_EXPRs that use X.
83
84 Or
85
86 bb0:
87 x = (typecast) a
88 if (x) goto ... else goto ...
89
90 Will be transformed into:
91
92 bb0:
93 if (a != 0) goto ... else goto ...
94
95 (Assuming a is an integral type and x is a boolean or x is an
96 integral and a is a boolean.)
97
98 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
99 For these cases, we propagate A into all, possibly more than one,
100 COND_EXPRs that use X.
101
102 In addition to eliminating the variable and the statement which assigns
103 a value to the variable, we may be able to later thread the jump without
104 adding insane complexity in the dominator optimizer.
105
106 Also note these transformations can cascade. We handle this by having
107 a worklist of COND_EXPR statements to examine. As we make a change to
108 a statement, we put it back on the worklist to examine on the next
109 iteration of the main loop.
110
111 A second class of propagation opportunities arises for ADDR_EXPR
112 nodes.
113
114 ptr = &x->y->z;
115 res = *ptr;
116
117 Will get turned into
118
119 res = x->y->z;
120
121 Or
122 ptr = (type1*)&type2var;
123 res = *ptr
124
125 Will get turned into (if type1 and type2 are the same size
126 and neither have volatile on them):
127 res = VIEW_CONVERT_EXPR<type1>(type2var)
128
129 Or
130
131 ptr = &x[0];
132 ptr2 = ptr + <constant>;
133
134 Will get turned into
135
136 ptr2 = &x[constant/elementsize];
137
138 Or
139
140 ptr = &x[0];
141 offset = index * element_size;
142 offset_p = (pointer) offset;
143 ptr2 = ptr + offset_p
144
145 Will get turned into:
146
147 ptr2 = &x[index];
148
149 Or
150 ssa = (int) decl
151 res = ssa & 1
152
153 Provided that decl has known alignment >= 2, will get turned into
154
155 res = 0
156
157 We also propagate casts into SWITCH_EXPR and COND_EXPR conditions to
158 allow us to remove the cast and {NOT_EXPR,NEG_EXPR} into a subsequent
159 {NOT_EXPR,NEG_EXPR}.
160
161 This will (of course) be extended as other needs arise. */
162
163 static bool forward_propagate_addr_expr (tree name, tree rhs);
164
165 /* Set to true if we delete dead edges during the optimization. */
166 static bool cfg_changed;
167
168 static tree rhs_to_tree (tree type, gimple stmt);
169
170 /* Get the next statement we can propagate NAME's value into skipping
171 trivial copies. Returns the statement that is suitable as a
172 propagation destination or NULL_TREE if there is no such one.
173 This only returns destinations in a single-use chain. FINAL_NAME_P
174 if non-NULL is written to the ssa name that represents the use. */
175
176 static gimple
177 get_prop_dest_stmt (tree name, tree *final_name_p)
178 {
179 use_operand_p use;
180 gimple use_stmt;
181
182 do {
183 /* If name has multiple uses, bail out. */
184 if (!single_imm_use (name, &use, &use_stmt))
185 return NULL;
186
187 /* If this is not a trivial copy, we found it. */
188 if (!gimple_assign_ssa_name_copy_p (use_stmt)
189 || gimple_assign_rhs1 (use_stmt) != name)
190 break;
191
192 /* Continue searching uses of the copy destination. */
193 name = gimple_assign_lhs (use_stmt);
194 } while (1);
195
196 if (final_name_p)
197 *final_name_p = name;
198
199 return use_stmt;
200 }
201
202 /* Get the statement we can propagate from into NAME skipping
203 trivial copies. Returns the statement which defines the
204 propagation source or NULL_TREE if there is no such one.
205 If SINGLE_USE_ONLY is set considers only sources which have
206 a single use chain up to NAME. If SINGLE_USE_P is non-null,
207 it is set to whether the chain to NAME is a single use chain
208 or not. SINGLE_USE_P is not written to if SINGLE_USE_ONLY is set. */
209
210 static gimple
211 get_prop_source_stmt (tree name, bool single_use_only, bool *single_use_p)
212 {
213 bool single_use = true;
214
215 do {
216 gimple def_stmt = SSA_NAME_DEF_STMT (name);
217
218 if (!has_single_use (name))
219 {
220 single_use = false;
221 if (single_use_only)
222 return NULL;
223 }
224
225 /* If name is defined by a PHI node or is the default def, bail out. */
226 if (!is_gimple_assign (def_stmt))
227 return NULL;
228
229 /* If def_stmt is not a simple copy, we possibly found it. */
230 if (!gimple_assign_ssa_name_copy_p (def_stmt))
231 {
232 tree rhs;
233
234 if (!single_use_only && single_use_p)
235 *single_use_p = single_use;
236
237 /* We can look through pointer conversions in the search
238 for a useful stmt for the comparison folding. */
239 rhs = gimple_assign_rhs1 (def_stmt);
240 if (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt))
241 && TREE_CODE (rhs) == SSA_NAME
242 && POINTER_TYPE_P (TREE_TYPE (gimple_assign_lhs (def_stmt)))
243 && POINTER_TYPE_P (TREE_TYPE (rhs)))
244 name = rhs;
245 else
246 return def_stmt;
247 }
248 else
249 {
250 /* Continue searching the def of the copy source name. */
251 name = gimple_assign_rhs1 (def_stmt);
252 }
253 } while (1);
254 }
255
256 /* Checks if the destination ssa name in DEF_STMT can be used as
257 propagation source. Returns true if so, otherwise false. */
258
259 static bool
260 can_propagate_from (gimple def_stmt)
261 {
262 gcc_assert (is_gimple_assign (def_stmt));
263
264 /* If the rhs has side-effects we cannot propagate from it. */
265 if (gimple_has_volatile_ops (def_stmt))
266 return false;
267
268 /* If the rhs is a load we cannot propagate from it. */
269 if (TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)) == tcc_reference
270 || TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)) == tcc_declaration)
271 return false;
272
273 /* Constants can be always propagated. */
274 if (gimple_assign_single_p (def_stmt)
275 && is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt)))
276 return true;
277
278 /* We cannot propagate ssa names that occur in abnormal phi nodes. */
279 if (stmt_references_abnormal_ssa_name (def_stmt))
280 return false;
281
282 /* If the definition is a conversion of a pointer to a function type,
283 then we can not apply optimizations as some targets require
284 function pointers to be canonicalized and in this case this
285 optimization could eliminate a necessary canonicalization. */
286 if (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt)))
287 {
288 tree rhs = gimple_assign_rhs1 (def_stmt);
289 if (POINTER_TYPE_P (TREE_TYPE (rhs))
290 && TREE_CODE (TREE_TYPE (TREE_TYPE (rhs))) == FUNCTION_TYPE)
291 return false;
292 }
293
294 return true;
295 }
296
297 /* Remove a chain of dead statements starting at the definition of
298 NAME. The chain is linked via the first operand of the defining statements.
299 If NAME was replaced in its only use then this function can be used
300 to clean up dead stmts. The function handles already released SSA
301 names gracefully.
302 Returns true if cleanup-cfg has to run. */
303
304 static bool
305 remove_prop_source_from_use (tree name)
306 {
307 gimple_stmt_iterator gsi;
308 gimple stmt;
309 bool cfg_changed = false;
310
311 do {
312 basic_block bb;
313
314 if (SSA_NAME_IN_FREE_LIST (name)
315 || SSA_NAME_IS_DEFAULT_DEF (name)
316 || !has_zero_uses (name))
317 return cfg_changed;
318
319 stmt = SSA_NAME_DEF_STMT (name);
320 if (gimple_code (stmt) == GIMPLE_PHI
321 || gimple_has_side_effects (stmt))
322 return cfg_changed;
323
324 bb = gimple_bb (stmt);
325 gsi = gsi_for_stmt (stmt);
326 unlink_stmt_vdef (stmt);
327 if (gsi_remove (&gsi, true))
328 cfg_changed |= gimple_purge_dead_eh_edges (bb);
329 release_defs (stmt);
330
331 name = is_gimple_assign (stmt) ? gimple_assign_rhs1 (stmt) : NULL_TREE;
332 } while (name && TREE_CODE (name) == SSA_NAME);
333
334 return cfg_changed;
335 }
336
337 /* Return the rhs of a gimple_assign STMT in a form of a single tree,
338 converted to type TYPE.
339
340 This should disappear, but is needed so we can combine expressions and use
341 the fold() interfaces. Long term, we need to develop folding and combine
342 routines that deal with gimple exclusively . */
343
344 static tree
345 rhs_to_tree (tree type, gimple stmt)
346 {
347 location_t loc = gimple_location (stmt);
348 enum tree_code code = gimple_assign_rhs_code (stmt);
349 if (get_gimple_rhs_class (code) == GIMPLE_TERNARY_RHS)
350 return fold_build3_loc (loc, code, type, gimple_assign_rhs1 (stmt),
351 gimple_assign_rhs2 (stmt),
352 gimple_assign_rhs3 (stmt));
353 else if (get_gimple_rhs_class (code) == GIMPLE_BINARY_RHS)
354 return fold_build2_loc (loc, code, type, gimple_assign_rhs1 (stmt),
355 gimple_assign_rhs2 (stmt));
356 else if (get_gimple_rhs_class (code) == GIMPLE_UNARY_RHS)
357 return build1 (code, type, gimple_assign_rhs1 (stmt));
358 else if (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS)
359 return gimple_assign_rhs1 (stmt);
360 else
361 gcc_unreachable ();
362 }
363
364 /* Combine OP0 CODE OP1 in the context of a COND_EXPR. Returns
365 the folded result in a form suitable for COND_EXPR_COND or
366 NULL_TREE, if there is no suitable simplified form. If
367 INVARIANT_ONLY is true only gimple_min_invariant results are
368 considered simplified. */
369
370 static tree
371 combine_cond_expr_cond (gimple stmt, enum tree_code code, tree type,
372 tree op0, tree op1, bool invariant_only)
373 {
374 tree t;
375
376 gcc_assert (TREE_CODE_CLASS (code) == tcc_comparison);
377
378 fold_defer_overflow_warnings ();
379 t = fold_binary_loc (gimple_location (stmt), code, type, op0, op1);
380 if (!t)
381 {
382 fold_undefer_overflow_warnings (false, NULL, 0);
383 return NULL_TREE;
384 }
385
386 /* Require that we got a boolean type out if we put one in. */
387 gcc_assert (TREE_CODE (TREE_TYPE (t)) == TREE_CODE (type));
388
389 /* Canonicalize the combined condition for use in a COND_EXPR. */
390 t = canonicalize_cond_expr_cond (t);
391
392 /* Bail out if we required an invariant but didn't get one. */
393 if (!t || (invariant_only && !is_gimple_min_invariant (t)))
394 {
395 fold_undefer_overflow_warnings (false, NULL, 0);
396 return NULL_TREE;
397 }
398
399 fold_undefer_overflow_warnings (!gimple_no_warning_p (stmt), stmt, 0);
400
401 return t;
402 }
403
404 /* Combine the comparison OP0 CODE OP1 at LOC with the defining statements
405 of its operand. Return a new comparison tree or NULL_TREE if there
406 were no simplifying combines. */
407
408 static tree
409 forward_propagate_into_comparison_1 (gimple stmt,
410 enum tree_code code, tree type,
411 tree op0, tree op1)
412 {
413 tree tmp = NULL_TREE;
414 tree rhs0 = NULL_TREE, rhs1 = NULL_TREE;
415 bool single_use0_p = false, single_use1_p = false;
416
417 /* For comparisons use the first operand, that is likely to
418 simplify comparisons against constants. */
419 if (TREE_CODE (op0) == SSA_NAME)
420 {
421 gimple def_stmt = get_prop_source_stmt (op0, false, &single_use0_p);
422 if (def_stmt && can_propagate_from (def_stmt))
423 {
424 rhs0 = rhs_to_tree (TREE_TYPE (op1), def_stmt);
425 tmp = combine_cond_expr_cond (stmt, code, type,
426 rhs0, op1, !single_use0_p);
427 if (tmp)
428 return tmp;
429 }
430 }
431
432 /* If that wasn't successful, try the second operand. */
433 if (TREE_CODE (op1) == SSA_NAME)
434 {
435 gimple def_stmt = get_prop_source_stmt (op1, false, &single_use1_p);
436 if (def_stmt && can_propagate_from (def_stmt))
437 {
438 rhs1 = rhs_to_tree (TREE_TYPE (op0), def_stmt);
439 tmp = combine_cond_expr_cond (stmt, code, type,
440 op0, rhs1, !single_use1_p);
441 if (tmp)
442 return tmp;
443 }
444 }
445
446 /* If that wasn't successful either, try both operands. */
447 if (rhs0 != NULL_TREE
448 && rhs1 != NULL_TREE)
449 tmp = combine_cond_expr_cond (stmt, code, type,
450 rhs0, rhs1,
451 !(single_use0_p && single_use1_p));
452
453 return tmp;
454 }
455
456 /* Propagate from the ssa name definition statements of the assignment
457 from a comparison at *GSI into the conditional if that simplifies it.
458 Returns 1 if the stmt was modified and 2 if the CFG needs cleanup,
459 otherwise returns 0. */
460
461 static int
462 forward_propagate_into_comparison (gimple_stmt_iterator *gsi)
463 {
464 gimple stmt = gsi_stmt (*gsi);
465 tree tmp;
466 bool cfg_changed = false;
467 tree type = TREE_TYPE (gimple_assign_lhs (stmt));
468 tree rhs1 = gimple_assign_rhs1 (stmt);
469 tree rhs2 = gimple_assign_rhs2 (stmt);
470
471 /* Combine the comparison with defining statements. */
472 tmp = forward_propagate_into_comparison_1 (stmt,
473 gimple_assign_rhs_code (stmt),
474 type, rhs1, rhs2);
475 if (tmp && useless_type_conversion_p (type, TREE_TYPE (tmp)))
476 {
477 gimple_assign_set_rhs_from_tree (gsi, tmp);
478 fold_stmt (gsi);
479 update_stmt (gsi_stmt (*gsi));
480
481 if (TREE_CODE (rhs1) == SSA_NAME)
482 cfg_changed |= remove_prop_source_from_use (rhs1);
483 if (TREE_CODE (rhs2) == SSA_NAME)
484 cfg_changed |= remove_prop_source_from_use (rhs2);
485 return cfg_changed ? 2 : 1;
486 }
487
488 return 0;
489 }
490
491 /* Propagate from the ssa name definition statements of COND_EXPR
492 in GIMPLE_COND statement STMT into the conditional if that simplifies it.
493 Returns zero if no statement was changed, one if there were
494 changes and two if cfg_cleanup needs to run.
495
496 This must be kept in sync with forward_propagate_into_cond. */
497
498 static int
499 forward_propagate_into_gimple_cond (gimple stmt)
500 {
501 tree tmp;
502 enum tree_code code = gimple_cond_code (stmt);
503 bool cfg_changed = false;
504 tree rhs1 = gimple_cond_lhs (stmt);
505 tree rhs2 = gimple_cond_rhs (stmt);
506
507 /* We can do tree combining on SSA_NAME and comparison expressions. */
508 if (TREE_CODE_CLASS (gimple_cond_code (stmt)) != tcc_comparison)
509 return 0;
510
511 tmp = forward_propagate_into_comparison_1 (stmt, code,
512 boolean_type_node,
513 rhs1, rhs2);
514 if (tmp)
515 {
516 if (dump_file && tmp)
517 {
518 fprintf (dump_file, " Replaced '");
519 print_gimple_expr (dump_file, stmt, 0, 0);
520 fprintf (dump_file, "' with '");
521 print_generic_expr (dump_file, tmp, 0);
522 fprintf (dump_file, "'\n");
523 }
524
525 gimple_cond_set_condition_from_tree (stmt, unshare_expr (tmp));
526 update_stmt (stmt);
527
528 if (TREE_CODE (rhs1) == SSA_NAME)
529 cfg_changed |= remove_prop_source_from_use (rhs1);
530 if (TREE_CODE (rhs2) == SSA_NAME)
531 cfg_changed |= remove_prop_source_from_use (rhs2);
532 return (cfg_changed || is_gimple_min_invariant (tmp)) ? 2 : 1;
533 }
534
535 /* Canonicalize _Bool == 0 and _Bool != 1 to _Bool != 0 by swapping edges. */
536 if ((TREE_CODE (TREE_TYPE (rhs1)) == BOOLEAN_TYPE
537 || (INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
538 && TYPE_PRECISION (TREE_TYPE (rhs1)) == 1))
539 && ((code == EQ_EXPR
540 && integer_zerop (rhs2))
541 || (code == NE_EXPR
542 && integer_onep (rhs2))))
543 {
544 basic_block bb = gimple_bb (stmt);
545 gimple_cond_set_code (stmt, NE_EXPR);
546 gimple_cond_set_rhs (stmt, build_zero_cst (TREE_TYPE (rhs1)));
547 EDGE_SUCC (bb, 0)->flags ^= (EDGE_TRUE_VALUE|EDGE_FALSE_VALUE);
548 EDGE_SUCC (bb, 1)->flags ^= (EDGE_TRUE_VALUE|EDGE_FALSE_VALUE);
549 return 1;
550 }
551
552 return 0;
553 }
554
555
556 /* Propagate from the ssa name definition statements of COND_EXPR
557 in the rhs of statement STMT into the conditional if that simplifies it.
558 Returns true zero if the stmt was changed. */
559
560 static bool
561 forward_propagate_into_cond (gimple_stmt_iterator *gsi_p)
562 {
563 gimple stmt = gsi_stmt (*gsi_p);
564 tree tmp = NULL_TREE;
565 tree cond = gimple_assign_rhs1 (stmt);
566 bool swap = false;
567
568 /* We can do tree combining on SSA_NAME and comparison expressions. */
569 if (COMPARISON_CLASS_P (cond))
570 tmp = forward_propagate_into_comparison_1 (stmt, TREE_CODE (cond),
571 boolean_type_node,
572 TREE_OPERAND (cond, 0),
573 TREE_OPERAND (cond, 1));
574 else if (TREE_CODE (cond) == SSA_NAME)
575 {
576 enum tree_code code;
577 tree name = cond;
578 gimple def_stmt = get_prop_source_stmt (name, true, NULL);
579 if (!def_stmt || !can_propagate_from (def_stmt))
580 return 0;
581
582 code = gimple_assign_rhs_code (def_stmt);
583 if (TREE_CODE_CLASS (code) == tcc_comparison)
584 tmp = fold_build2_loc (gimple_location (def_stmt),
585 code,
586 boolean_type_node,
587 gimple_assign_rhs1 (def_stmt),
588 gimple_assign_rhs2 (def_stmt));
589 else if ((code == BIT_NOT_EXPR
590 && TYPE_PRECISION (TREE_TYPE (cond)) == 1)
591 || (code == BIT_XOR_EXPR
592 && integer_onep (gimple_assign_rhs2 (def_stmt))))
593 {
594 tmp = gimple_assign_rhs1 (def_stmt);
595 swap = true;
596 }
597 }
598
599 if (tmp
600 && is_gimple_condexpr (tmp))
601 {
602 if (dump_file && tmp)
603 {
604 fprintf (dump_file, " Replaced '");
605 print_generic_expr (dump_file, cond, 0);
606 fprintf (dump_file, "' with '");
607 print_generic_expr (dump_file, tmp, 0);
608 fprintf (dump_file, "'\n");
609 }
610
611 if (integer_onep (tmp))
612 gimple_assign_set_rhs_from_tree (gsi_p, gimple_assign_rhs2 (stmt));
613 else if (integer_zerop (tmp))
614 gimple_assign_set_rhs_from_tree (gsi_p, gimple_assign_rhs3 (stmt));
615 else
616 {
617 gimple_assign_set_rhs1 (stmt, unshare_expr (tmp));
618 if (swap)
619 {
620 tree t = gimple_assign_rhs2 (stmt);
621 gimple_assign_set_rhs2 (stmt, gimple_assign_rhs3 (stmt));
622 gimple_assign_set_rhs3 (stmt, t);
623 }
624 }
625 stmt = gsi_stmt (*gsi_p);
626 update_stmt (stmt);
627
628 return true;
629 }
630
631 return 0;
632 }
633
634 /* Propagate from the ssa name definition statements of COND_EXPR
635 values in the rhs of statement STMT into the conditional arms
636 if that simplifies it.
637 Returns true if the stmt was changed. */
638
639 static bool
640 combine_cond_exprs (gimple_stmt_iterator *gsi_p)
641 {
642 gimple stmt = gsi_stmt (*gsi_p);
643 tree cond, val1, val2;
644 bool changed = false;
645
646 cond = gimple_assign_rhs1 (stmt);
647 val1 = gimple_assign_rhs2 (stmt);
648 if (TREE_CODE (val1) == SSA_NAME)
649 {
650 gimple def_stmt = SSA_NAME_DEF_STMT (val1);
651 if (is_gimple_assign (def_stmt)
652 && gimple_assign_rhs_code (def_stmt) == gimple_assign_rhs_code (stmt)
653 && operand_equal_p (gimple_assign_rhs1 (def_stmt), cond, 0))
654 {
655 val1 = unshare_expr (gimple_assign_rhs2 (def_stmt));
656 gimple_assign_set_rhs2 (stmt, val1);
657 changed = true;
658 }
659 }
660 val2 = gimple_assign_rhs3 (stmt);
661 if (TREE_CODE (val2) == SSA_NAME)
662 {
663 gimple def_stmt = SSA_NAME_DEF_STMT (val2);
664 if (is_gimple_assign (def_stmt)
665 && gimple_assign_rhs_code (def_stmt) == gimple_assign_rhs_code (stmt)
666 && operand_equal_p (gimple_assign_rhs1 (def_stmt), cond, 0))
667 {
668 val2 = unshare_expr (gimple_assign_rhs3 (def_stmt));
669 gimple_assign_set_rhs3 (stmt, val2);
670 changed = true;
671 }
672 }
673 if (operand_equal_p (val1, val2, 0))
674 {
675 gimple_assign_set_rhs_from_tree (gsi_p, val1);
676 stmt = gsi_stmt (*gsi_p);
677 changed = true;
678 }
679
680 if (changed)
681 update_stmt (stmt);
682
683 return changed;
684 }
685
686 /* We've just substituted an ADDR_EXPR into stmt. Update all the
687 relevant data structures to match. */
688
689 static void
690 tidy_after_forward_propagate_addr (gimple stmt)
691 {
692 /* We may have turned a trapping insn into a non-trapping insn. */
693 if (maybe_clean_or_replace_eh_stmt (stmt, stmt)
694 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
695 cfg_changed = true;
696
697 if (TREE_CODE (gimple_assign_rhs1 (stmt)) == ADDR_EXPR)
698 recompute_tree_invariant_for_addr_expr (gimple_assign_rhs1 (stmt));
699 }
700
701 /* NAME is a SSA_NAME representing DEF_RHS which is of the form
702 ADDR_EXPR <whatever>.
703
704 Try to forward propagate the ADDR_EXPR into the use USE_STMT.
705 Often this will allow for removal of an ADDR_EXPR and INDIRECT_REF
706 node or for recovery of array indexing from pointer arithmetic.
707
708 Return true if the propagation was successful (the propagation can
709 be not totally successful, yet things may have been changed). */
710
711 static bool
712 forward_propagate_addr_expr_1 (tree name, tree def_rhs,
713 gimple_stmt_iterator *use_stmt_gsi,
714 bool single_use_p)
715 {
716 tree lhs, rhs, rhs2, array_ref;
717 gimple use_stmt = gsi_stmt (*use_stmt_gsi);
718 enum tree_code rhs_code;
719 bool res = true;
720
721 gcc_assert (TREE_CODE (def_rhs) == ADDR_EXPR);
722
723 lhs = gimple_assign_lhs (use_stmt);
724 rhs_code = gimple_assign_rhs_code (use_stmt);
725 rhs = gimple_assign_rhs1 (use_stmt);
726
727 /* Trivial cases. The use statement could be a trivial copy or a
728 useless conversion. Recurse to the uses of the lhs as copyprop does
729 not copy through different variant pointers and FRE does not catch
730 all useless conversions. Treat the case of a single-use name and
731 a conversion to def_rhs type separate, though. */
732 if (TREE_CODE (lhs) == SSA_NAME
733 && ((rhs_code == SSA_NAME && rhs == name)
734 || CONVERT_EXPR_CODE_P (rhs_code)))
735 {
736 /* Only recurse if we don't deal with a single use or we cannot
737 do the propagation to the current statement. In particular
738 we can end up with a conversion needed for a non-invariant
739 address which we cannot do in a single statement. */
740 if (!single_use_p
741 || (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (def_rhs))
742 && (!is_gimple_min_invariant (def_rhs)
743 || (INTEGRAL_TYPE_P (TREE_TYPE (lhs))
744 && POINTER_TYPE_P (TREE_TYPE (def_rhs))
745 && (TYPE_PRECISION (TREE_TYPE (lhs))
746 > TYPE_PRECISION (TREE_TYPE (def_rhs)))))))
747 return forward_propagate_addr_expr (lhs, def_rhs);
748
749 gimple_assign_set_rhs1 (use_stmt, unshare_expr (def_rhs));
750 if (useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (def_rhs)))
751 gimple_assign_set_rhs_code (use_stmt, TREE_CODE (def_rhs));
752 else
753 gimple_assign_set_rhs_code (use_stmt, NOP_EXPR);
754 return true;
755 }
756
757 /* Propagate through constant pointer adjustments. */
758 if (TREE_CODE (lhs) == SSA_NAME
759 && rhs_code == POINTER_PLUS_EXPR
760 && rhs == name
761 && TREE_CODE (gimple_assign_rhs2 (use_stmt)) == INTEGER_CST)
762 {
763 tree new_def_rhs;
764 /* As we come here with non-invariant addresses in def_rhs we need
765 to make sure we can build a valid constant offsetted address
766 for further propagation. Simply rely on fold building that
767 and check after the fact. */
768 new_def_rhs = fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (rhs)),
769 def_rhs,
770 fold_convert (ptr_type_node,
771 gimple_assign_rhs2 (use_stmt)));
772 if (TREE_CODE (new_def_rhs) == MEM_REF
773 && !is_gimple_mem_ref_addr (TREE_OPERAND (new_def_rhs, 0)))
774 return false;
775 new_def_rhs = build_fold_addr_expr_with_type (new_def_rhs,
776 TREE_TYPE (rhs));
777
778 /* Recurse. If we could propagate into all uses of lhs do not
779 bother to replace into the current use but just pretend we did. */
780 if (TREE_CODE (new_def_rhs) == ADDR_EXPR
781 && forward_propagate_addr_expr (lhs, new_def_rhs))
782 return true;
783
784 if (useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (new_def_rhs)))
785 gimple_assign_set_rhs_with_ops (use_stmt_gsi, TREE_CODE (new_def_rhs),
786 new_def_rhs, NULL_TREE);
787 else if (is_gimple_min_invariant (new_def_rhs))
788 gimple_assign_set_rhs_with_ops (use_stmt_gsi, NOP_EXPR,
789 new_def_rhs, NULL_TREE);
790 else
791 return false;
792 gcc_assert (gsi_stmt (*use_stmt_gsi) == use_stmt);
793 update_stmt (use_stmt);
794 return true;
795 }
796
797 /* Now strip away any outer COMPONENT_REF/ARRAY_REF nodes from the LHS.
798 ADDR_EXPR will not appear on the LHS. */
799 lhs = gimple_assign_lhs (use_stmt);
800 while (handled_component_p (lhs))
801 lhs = TREE_OPERAND (lhs, 0);
802
803 /* Now see if the LHS node is a MEM_REF using NAME. If so,
804 propagate the ADDR_EXPR into the use of NAME and fold the result. */
805 if (TREE_CODE (lhs) == MEM_REF
806 && TREE_OPERAND (lhs, 0) == name)
807 {
808 tree def_rhs_base;
809 HOST_WIDE_INT def_rhs_offset;
810 /* If the address is invariant we can always fold it. */
811 if ((def_rhs_base = get_addr_base_and_unit_offset (TREE_OPERAND (def_rhs, 0),
812 &def_rhs_offset)))
813 {
814 double_int off = mem_ref_offset (lhs);
815 tree new_ptr;
816 off = double_int_add (off,
817 shwi_to_double_int (def_rhs_offset));
818 if (TREE_CODE (def_rhs_base) == MEM_REF)
819 {
820 off = double_int_add (off, mem_ref_offset (def_rhs_base));
821 new_ptr = TREE_OPERAND (def_rhs_base, 0);
822 }
823 else
824 new_ptr = build_fold_addr_expr (def_rhs_base);
825 TREE_OPERAND (lhs, 0) = new_ptr;
826 TREE_OPERAND (lhs, 1)
827 = double_int_to_tree (TREE_TYPE (TREE_OPERAND (lhs, 1)), off);
828 tidy_after_forward_propagate_addr (use_stmt);
829 /* Continue propagating into the RHS if this was not the only use. */
830 if (single_use_p)
831 return true;
832 }
833 /* If the LHS is a plain dereference and the value type is the same as
834 that of the pointed-to type of the address we can put the
835 dereferenced address on the LHS preserving the original alias-type. */
836 else if (gimple_assign_lhs (use_stmt) == lhs
837 && integer_zerop (TREE_OPERAND (lhs, 1))
838 && useless_type_conversion_p
839 (TREE_TYPE (TREE_OPERAND (def_rhs, 0)),
840 TREE_TYPE (gimple_assign_rhs1 (use_stmt))))
841 {
842 tree *def_rhs_basep = &TREE_OPERAND (def_rhs, 0);
843 tree new_offset, new_base, saved, new_lhs;
844 while (handled_component_p (*def_rhs_basep))
845 def_rhs_basep = &TREE_OPERAND (*def_rhs_basep, 0);
846 saved = *def_rhs_basep;
847 if (TREE_CODE (*def_rhs_basep) == MEM_REF)
848 {
849 new_base = TREE_OPERAND (*def_rhs_basep, 0);
850 new_offset = fold_convert (TREE_TYPE (TREE_OPERAND (lhs, 1)),
851 TREE_OPERAND (*def_rhs_basep, 1));
852 }
853 else
854 {
855 new_base = build_fold_addr_expr (*def_rhs_basep);
856 new_offset = TREE_OPERAND (lhs, 1);
857 }
858 *def_rhs_basep = build2 (MEM_REF, TREE_TYPE (*def_rhs_basep),
859 new_base, new_offset);
860 TREE_THIS_VOLATILE (*def_rhs_basep) = TREE_THIS_VOLATILE (lhs);
861 TREE_SIDE_EFFECTS (*def_rhs_basep) = TREE_SIDE_EFFECTS (lhs);
862 TREE_THIS_NOTRAP (*def_rhs_basep) = TREE_THIS_NOTRAP (lhs);
863 new_lhs = unshare_expr (TREE_OPERAND (def_rhs, 0));
864 gimple_assign_set_lhs (use_stmt, new_lhs);
865 TREE_THIS_VOLATILE (new_lhs) = TREE_THIS_VOLATILE (lhs);
866 TREE_SIDE_EFFECTS (new_lhs) = TREE_SIDE_EFFECTS (lhs);
867 *def_rhs_basep = saved;
868 tidy_after_forward_propagate_addr (use_stmt);
869 /* Continue propagating into the RHS if this was not the
870 only use. */
871 if (single_use_p)
872 return true;
873 }
874 else
875 /* We can have a struct assignment dereferencing our name twice.
876 Note that we didn't propagate into the lhs to not falsely
877 claim we did when propagating into the rhs. */
878 res = false;
879 }
880
881 /* Strip away any outer COMPONENT_REF, ARRAY_REF or ADDR_EXPR
882 nodes from the RHS. */
883 rhs = gimple_assign_rhs1 (use_stmt);
884 if (TREE_CODE (rhs) == ADDR_EXPR)
885 rhs = TREE_OPERAND (rhs, 0);
886 while (handled_component_p (rhs))
887 rhs = TREE_OPERAND (rhs, 0);
888
889 /* Now see if the RHS node is a MEM_REF using NAME. If so,
890 propagate the ADDR_EXPR into the use of NAME and fold the result. */
891 if (TREE_CODE (rhs) == MEM_REF
892 && TREE_OPERAND (rhs, 0) == name)
893 {
894 tree def_rhs_base;
895 HOST_WIDE_INT def_rhs_offset;
896 if ((def_rhs_base = get_addr_base_and_unit_offset (TREE_OPERAND (def_rhs, 0),
897 &def_rhs_offset)))
898 {
899 double_int off = mem_ref_offset (rhs);
900 tree new_ptr;
901 off = double_int_add (off,
902 shwi_to_double_int (def_rhs_offset));
903 if (TREE_CODE (def_rhs_base) == MEM_REF)
904 {
905 off = double_int_add (off, mem_ref_offset (def_rhs_base));
906 new_ptr = TREE_OPERAND (def_rhs_base, 0);
907 }
908 else
909 new_ptr = build_fold_addr_expr (def_rhs_base);
910 TREE_OPERAND (rhs, 0) = new_ptr;
911 TREE_OPERAND (rhs, 1)
912 = double_int_to_tree (TREE_TYPE (TREE_OPERAND (rhs, 1)), off);
913 fold_stmt_inplace (use_stmt_gsi);
914 tidy_after_forward_propagate_addr (use_stmt);
915 return res;
916 }
917 /* If the RHS is a plain dereference and the value type is the same as
918 that of the pointed-to type of the address we can put the
919 dereferenced address on the RHS preserving the original alias-type. */
920 else if (gimple_assign_rhs1 (use_stmt) == rhs
921 && integer_zerop (TREE_OPERAND (rhs, 1))
922 && useless_type_conversion_p
923 (TREE_TYPE (gimple_assign_lhs (use_stmt)),
924 TREE_TYPE (TREE_OPERAND (def_rhs, 0))))
925 {
926 tree *def_rhs_basep = &TREE_OPERAND (def_rhs, 0);
927 tree new_offset, new_base, saved, new_rhs;
928 while (handled_component_p (*def_rhs_basep))
929 def_rhs_basep = &TREE_OPERAND (*def_rhs_basep, 0);
930 saved = *def_rhs_basep;
931 if (TREE_CODE (*def_rhs_basep) == MEM_REF)
932 {
933 new_base = TREE_OPERAND (*def_rhs_basep, 0);
934 new_offset = fold_convert (TREE_TYPE (TREE_OPERAND (rhs, 1)),
935 TREE_OPERAND (*def_rhs_basep, 1));
936 }
937 else
938 {
939 new_base = build_fold_addr_expr (*def_rhs_basep);
940 new_offset = TREE_OPERAND (rhs, 1);
941 }
942 *def_rhs_basep = build2 (MEM_REF, TREE_TYPE (*def_rhs_basep),
943 new_base, new_offset);
944 TREE_THIS_VOLATILE (*def_rhs_basep) = TREE_THIS_VOLATILE (rhs);
945 TREE_SIDE_EFFECTS (*def_rhs_basep) = TREE_SIDE_EFFECTS (rhs);
946 TREE_THIS_NOTRAP (*def_rhs_basep) = TREE_THIS_NOTRAP (rhs);
947 new_rhs = unshare_expr (TREE_OPERAND (def_rhs, 0));
948 gimple_assign_set_rhs1 (use_stmt, new_rhs);
949 TREE_THIS_VOLATILE (new_rhs) = TREE_THIS_VOLATILE (rhs);
950 TREE_SIDE_EFFECTS (new_rhs) = TREE_SIDE_EFFECTS (rhs);
951 *def_rhs_basep = saved;
952 fold_stmt_inplace (use_stmt_gsi);
953 tidy_after_forward_propagate_addr (use_stmt);
954 return res;
955 }
956 }
957
958 /* If the use of the ADDR_EXPR is not a POINTER_PLUS_EXPR, there
959 is nothing to do. */
960 if (gimple_assign_rhs_code (use_stmt) != POINTER_PLUS_EXPR
961 || gimple_assign_rhs1 (use_stmt) != name)
962 return false;
963
964 /* The remaining cases are all for turning pointer arithmetic into
965 array indexing. They only apply when we have the address of
966 element zero in an array. If that is not the case then there
967 is nothing to do. */
968 array_ref = TREE_OPERAND (def_rhs, 0);
969 if ((TREE_CODE (array_ref) != ARRAY_REF
970 || TREE_CODE (TREE_TYPE (TREE_OPERAND (array_ref, 0))) != ARRAY_TYPE
971 || TREE_CODE (TREE_OPERAND (array_ref, 1)) != INTEGER_CST)
972 && TREE_CODE (TREE_TYPE (array_ref)) != ARRAY_TYPE)
973 return false;
974
975 rhs2 = gimple_assign_rhs2 (use_stmt);
976 /* Optimize &x[C1] p+ C2 to &x p+ C3 with C3 = C1 * element_size + C2. */
977 if (TREE_CODE (rhs2) == INTEGER_CST)
978 {
979 tree new_rhs = build1_loc (gimple_location (use_stmt),
980 ADDR_EXPR, TREE_TYPE (def_rhs),
981 fold_build2 (MEM_REF,
982 TREE_TYPE (TREE_TYPE (def_rhs)),
983 unshare_expr (def_rhs),
984 fold_convert (ptr_type_node,
985 rhs2)));
986 gimple_assign_set_rhs_from_tree (use_stmt_gsi, new_rhs);
987 use_stmt = gsi_stmt (*use_stmt_gsi);
988 update_stmt (use_stmt);
989 tidy_after_forward_propagate_addr (use_stmt);
990 return true;
991 }
992
993 return false;
994 }
995
996 /* STMT is a statement of the form SSA_NAME = ADDR_EXPR <whatever>.
997
998 Try to forward propagate the ADDR_EXPR into all uses of the SSA_NAME.
999 Often this will allow for removal of an ADDR_EXPR and INDIRECT_REF
1000 node or for recovery of array indexing from pointer arithmetic.
1001 Returns true, if all uses have been propagated into. */
1002
1003 static bool
1004 forward_propagate_addr_expr (tree name, tree rhs)
1005 {
1006 int stmt_loop_depth = bb_loop_depth (gimple_bb (SSA_NAME_DEF_STMT (name)));
1007 imm_use_iterator iter;
1008 gimple use_stmt;
1009 bool all = true;
1010 bool single_use_p = has_single_use (name);
1011
1012 FOR_EACH_IMM_USE_STMT (use_stmt, iter, name)
1013 {
1014 bool result;
1015 tree use_rhs;
1016
1017 /* If the use is not in a simple assignment statement, then
1018 there is nothing we can do. */
1019 if (gimple_code (use_stmt) != GIMPLE_ASSIGN)
1020 {
1021 if (!is_gimple_debug (use_stmt))
1022 all = false;
1023 continue;
1024 }
1025
1026 /* If the use is in a deeper loop nest, then we do not want
1027 to propagate non-invariant ADDR_EXPRs into the loop as that
1028 is likely adding expression evaluations into the loop. */
1029 if (bb_loop_depth (gimple_bb (use_stmt)) > stmt_loop_depth
1030 && !is_gimple_min_invariant (rhs))
1031 {
1032 all = false;
1033 continue;
1034 }
1035
1036 {
1037 gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
1038 result = forward_propagate_addr_expr_1 (name, rhs, &gsi,
1039 single_use_p);
1040 /* If the use has moved to a different statement adjust
1041 the update machinery for the old statement too. */
1042 if (use_stmt != gsi_stmt (gsi))
1043 {
1044 update_stmt (use_stmt);
1045 use_stmt = gsi_stmt (gsi);
1046 }
1047
1048 update_stmt (use_stmt);
1049 }
1050 all &= result;
1051
1052 /* Remove intermediate now unused copy and conversion chains. */
1053 use_rhs = gimple_assign_rhs1 (use_stmt);
1054 if (result
1055 && TREE_CODE (gimple_assign_lhs (use_stmt)) == SSA_NAME
1056 && TREE_CODE (use_rhs) == SSA_NAME
1057 && has_zero_uses (gimple_assign_lhs (use_stmt)))
1058 {
1059 gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
1060 release_defs (use_stmt);
1061 gsi_remove (&gsi, true);
1062 }
1063 }
1064
1065 return all && has_zero_uses (name);
1066 }
1067
1068
1069 /* Forward propagate the comparison defined in *DEFGSI like
1070 cond_1 = x CMP y to uses of the form
1071 a_1 = (T')cond_1
1072 a_1 = !cond_1
1073 a_1 = cond_1 != 0
1074 Returns true if stmt is now unused. Advance DEFGSI to the next
1075 statement. */
1076
1077 static bool
1078 forward_propagate_comparison (gimple_stmt_iterator *defgsi)
1079 {
1080 gimple stmt = gsi_stmt (*defgsi);
1081 tree name = gimple_assign_lhs (stmt);
1082 gimple use_stmt;
1083 tree tmp = NULL_TREE;
1084 gimple_stmt_iterator gsi;
1085 enum tree_code code;
1086 tree lhs;
1087
1088 /* Don't propagate ssa names that occur in abnormal phis. */
1089 if ((TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
1090 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_assign_rhs1 (stmt)))
1091 || (TREE_CODE (gimple_assign_rhs2 (stmt)) == SSA_NAME
1092 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_assign_rhs2 (stmt))))
1093 goto bailout;
1094
1095 /* Do not un-cse comparisons. But propagate through copies. */
1096 use_stmt = get_prop_dest_stmt (name, &name);
1097 if (!use_stmt
1098 || !is_gimple_assign (use_stmt))
1099 goto bailout;
1100
1101 code = gimple_assign_rhs_code (use_stmt);
1102 lhs = gimple_assign_lhs (use_stmt);
1103 if (!INTEGRAL_TYPE_P (TREE_TYPE (lhs)))
1104 goto bailout;
1105
1106 /* We can propagate the condition into a statement that
1107 computes the logical negation of the comparison result. */
1108 if ((code == BIT_NOT_EXPR
1109 && TYPE_PRECISION (TREE_TYPE (lhs)) == 1)
1110 || (code == BIT_XOR_EXPR
1111 && integer_onep (gimple_assign_rhs2 (use_stmt))))
1112 {
1113 tree type = TREE_TYPE (gimple_assign_rhs1 (stmt));
1114 bool nans = HONOR_NANS (TYPE_MODE (type));
1115 enum tree_code inv_code;
1116 inv_code = invert_tree_comparison (gimple_assign_rhs_code (stmt), nans);
1117 if (inv_code == ERROR_MARK)
1118 goto bailout;
1119
1120 tmp = build2 (inv_code, TREE_TYPE (lhs), gimple_assign_rhs1 (stmt),
1121 gimple_assign_rhs2 (stmt));
1122 }
1123 else
1124 goto bailout;
1125
1126 gsi = gsi_for_stmt (use_stmt);
1127 gimple_assign_set_rhs_from_tree (&gsi, unshare_expr (tmp));
1128 use_stmt = gsi_stmt (gsi);
1129 update_stmt (use_stmt);
1130
1131 if (dump_file && (dump_flags & TDF_DETAILS))
1132 {
1133 fprintf (dump_file, " Replaced '");
1134 print_gimple_expr (dump_file, stmt, 0, dump_flags);
1135 fprintf (dump_file, "' with '");
1136 print_gimple_expr (dump_file, use_stmt, 0, dump_flags);
1137 fprintf (dump_file, "'\n");
1138 }
1139
1140 /* When we remove stmt now the iterator defgsi goes off it's current
1141 sequence, hence advance it now. */
1142 gsi_next (defgsi);
1143
1144 /* Remove defining statements. */
1145 return remove_prop_source_from_use (name);
1146
1147 bailout:
1148 gsi_next (defgsi);
1149 return false;
1150 }
1151
1152
1153 /* If we have lhs = ~x (STMT), look and see if earlier we had x = ~y.
1154 If so, we can change STMT into lhs = y which can later be copy
1155 propagated. Similarly for negation.
1156
1157 This could trivially be formulated as a forward propagation
1158 to immediate uses. However, we already had an implementation
1159 from DOM which used backward propagation via the use-def links.
1160
1161 It turns out that backward propagation is actually faster as
1162 there's less work to do for each NOT/NEG expression we find.
1163 Backwards propagation needs to look at the statement in a single
1164 backlink. Forward propagation needs to look at potentially more
1165 than one forward link.
1166
1167 Returns true when the statement was changed. */
1168
1169 static bool
1170 simplify_not_neg_expr (gimple_stmt_iterator *gsi_p)
1171 {
1172 gimple stmt = gsi_stmt (*gsi_p);
1173 tree rhs = gimple_assign_rhs1 (stmt);
1174 gimple rhs_def_stmt = SSA_NAME_DEF_STMT (rhs);
1175
1176 /* See if the RHS_DEF_STMT has the same form as our statement. */
1177 if (is_gimple_assign (rhs_def_stmt)
1178 && gimple_assign_rhs_code (rhs_def_stmt) == gimple_assign_rhs_code (stmt))
1179 {
1180 tree rhs_def_operand = gimple_assign_rhs1 (rhs_def_stmt);
1181
1182 /* Verify that RHS_DEF_OPERAND is a suitable SSA_NAME. */
1183 if (TREE_CODE (rhs_def_operand) == SSA_NAME
1184 && ! SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs_def_operand))
1185 {
1186 gimple_assign_set_rhs_from_tree (gsi_p, rhs_def_operand);
1187 stmt = gsi_stmt (*gsi_p);
1188 update_stmt (stmt);
1189 return true;
1190 }
1191 }
1192
1193 return false;
1194 }
1195
1196 /* Helper function for simplify_gimple_switch. Remove case labels that
1197 have values outside the range of the new type. */
1198
1199 static void
1200 simplify_gimple_switch_label_vec (gimple stmt, tree index_type)
1201 {
1202 unsigned int branch_num = gimple_switch_num_labels (stmt);
1203 VEC(tree, heap) *labels = VEC_alloc (tree, heap, branch_num);
1204 unsigned int i, len;
1205
1206 /* Collect the existing case labels in a VEC, and preprocess it as if
1207 we are gimplifying a GENERIC SWITCH_EXPR. */
1208 for (i = 1; i < branch_num; i++)
1209 VEC_quick_push (tree, labels, gimple_switch_label (stmt, i));
1210 preprocess_case_label_vec_for_gimple (labels, index_type, NULL);
1211
1212 /* If any labels were removed, replace the existing case labels
1213 in the GIMPLE_SWITCH statement with the correct ones.
1214 Note that the type updates were done in-place on the case labels,
1215 so we only have to replace the case labels in the GIMPLE_SWITCH
1216 if the number of labels changed. */
1217 len = VEC_length (tree, labels);
1218 if (len < branch_num - 1)
1219 {
1220 bitmap target_blocks;
1221 edge_iterator ei;
1222 edge e;
1223
1224 /* Corner case: *all* case labels have been removed as being
1225 out-of-range for INDEX_TYPE. Push one label and let the
1226 CFG cleanups deal with this further. */
1227 if (len == 0)
1228 {
1229 tree label, elt;
1230
1231 label = CASE_LABEL (gimple_switch_default_label (stmt));
1232 elt = build_case_label (build_int_cst (index_type, 0), NULL, label);
1233 VEC_quick_push (tree, labels, elt);
1234 len = 1;
1235 }
1236
1237 for (i = 0; i < VEC_length (tree, labels); i++)
1238 gimple_switch_set_label (stmt, i + 1, VEC_index (tree, labels, i));
1239 for (i++ ; i < branch_num; i++)
1240 gimple_switch_set_label (stmt, i, NULL_TREE);
1241 gimple_switch_set_num_labels (stmt, len + 1);
1242
1243 /* Cleanup any edges that are now dead. */
1244 target_blocks = BITMAP_ALLOC (NULL);
1245 for (i = 0; i < gimple_switch_num_labels (stmt); i++)
1246 {
1247 tree elt = gimple_switch_label (stmt, i);
1248 basic_block target = label_to_block (CASE_LABEL (elt));
1249 bitmap_set_bit (target_blocks, target->index);
1250 }
1251 for (ei = ei_start (gimple_bb (stmt)->succs); (e = ei_safe_edge (ei)); )
1252 {
1253 if (! bitmap_bit_p (target_blocks, e->dest->index))
1254 {
1255 remove_edge (e);
1256 cfg_changed = true;
1257 free_dominance_info (CDI_DOMINATORS);
1258 }
1259 else
1260 ei_next (&ei);
1261 }
1262 BITMAP_FREE (target_blocks);
1263 }
1264
1265 VEC_free (tree, heap, labels);
1266 }
1267
1268 /* STMT is a SWITCH_EXPR for which we attempt to find equivalent forms of
1269 the condition which we may be able to optimize better. */
1270
1271 static bool
1272 simplify_gimple_switch (gimple stmt)
1273 {
1274 tree cond = gimple_switch_index (stmt);
1275 tree def, to, ti;
1276 gimple def_stmt;
1277
1278 /* The optimization that we really care about is removing unnecessary
1279 casts. That will let us do much better in propagating the inferred
1280 constant at the switch target. */
1281 if (TREE_CODE (cond) == SSA_NAME)
1282 {
1283 def_stmt = SSA_NAME_DEF_STMT (cond);
1284 if (is_gimple_assign (def_stmt))
1285 {
1286 if (gimple_assign_rhs_code (def_stmt) == NOP_EXPR)
1287 {
1288 int need_precision;
1289 bool fail;
1290
1291 def = gimple_assign_rhs1 (def_stmt);
1292
1293 to = TREE_TYPE (cond);
1294 ti = TREE_TYPE (def);
1295
1296 /* If we have an extension that preserves value, then we
1297 can copy the source value into the switch. */
1298
1299 need_precision = TYPE_PRECISION (ti);
1300 fail = false;
1301 if (! INTEGRAL_TYPE_P (ti))
1302 fail = true;
1303 else if (TYPE_UNSIGNED (to) && !TYPE_UNSIGNED (ti))
1304 fail = true;
1305 else if (!TYPE_UNSIGNED (to) && TYPE_UNSIGNED (ti))
1306 need_precision += 1;
1307 if (TYPE_PRECISION (to) < need_precision)
1308 fail = true;
1309
1310 if (!fail)
1311 {
1312 gimple_switch_set_index (stmt, def);
1313 simplify_gimple_switch_label_vec (stmt, ti);
1314 update_stmt (stmt);
1315 return true;
1316 }
1317 }
1318 }
1319 }
1320
1321 return false;
1322 }
1323
1324 /* For pointers p2 and p1 return p2 - p1 if the
1325 difference is known and constant, otherwise return NULL. */
1326
1327 static tree
1328 constant_pointer_difference (tree p1, tree p2)
1329 {
1330 int i, j;
1331 #define CPD_ITERATIONS 5
1332 tree exps[2][CPD_ITERATIONS];
1333 tree offs[2][CPD_ITERATIONS];
1334 int cnt[2];
1335
1336 for (i = 0; i < 2; i++)
1337 {
1338 tree p = i ? p1 : p2;
1339 tree off = size_zero_node;
1340 gimple stmt;
1341 enum tree_code code;
1342
1343 /* For each of p1 and p2 we need to iterate at least
1344 twice, to handle ADDR_EXPR directly in p1/p2,
1345 SSA_NAME with ADDR_EXPR or POINTER_PLUS_EXPR etc.
1346 on definition's stmt RHS. Iterate a few extra times. */
1347 j = 0;
1348 do
1349 {
1350 if (!POINTER_TYPE_P (TREE_TYPE (p)))
1351 break;
1352 if (TREE_CODE (p) == ADDR_EXPR)
1353 {
1354 tree q = TREE_OPERAND (p, 0);
1355 HOST_WIDE_INT offset;
1356 tree base = get_addr_base_and_unit_offset (q, &offset);
1357 if (base)
1358 {
1359 q = base;
1360 if (offset)
1361 off = size_binop (PLUS_EXPR, off, size_int (offset));
1362 }
1363 if (TREE_CODE (q) == MEM_REF
1364 && TREE_CODE (TREE_OPERAND (q, 0)) == SSA_NAME)
1365 {
1366 p = TREE_OPERAND (q, 0);
1367 off = size_binop (PLUS_EXPR, off,
1368 double_int_to_tree (sizetype,
1369 mem_ref_offset (q)));
1370 }
1371 else
1372 {
1373 exps[i][j] = q;
1374 offs[i][j++] = off;
1375 break;
1376 }
1377 }
1378 if (TREE_CODE (p) != SSA_NAME)
1379 break;
1380 exps[i][j] = p;
1381 offs[i][j++] = off;
1382 if (j == CPD_ITERATIONS)
1383 break;
1384 stmt = SSA_NAME_DEF_STMT (p);
1385 if (!is_gimple_assign (stmt) || gimple_assign_lhs (stmt) != p)
1386 break;
1387 code = gimple_assign_rhs_code (stmt);
1388 if (code == POINTER_PLUS_EXPR)
1389 {
1390 if (TREE_CODE (gimple_assign_rhs2 (stmt)) != INTEGER_CST)
1391 break;
1392 off = size_binop (PLUS_EXPR, off, gimple_assign_rhs2 (stmt));
1393 p = gimple_assign_rhs1 (stmt);
1394 }
1395 else if (code == ADDR_EXPR || code == NOP_EXPR)
1396 p = gimple_assign_rhs1 (stmt);
1397 else
1398 break;
1399 }
1400 while (1);
1401 cnt[i] = j;
1402 }
1403
1404 for (i = 0; i < cnt[0]; i++)
1405 for (j = 0; j < cnt[1]; j++)
1406 if (exps[0][i] == exps[1][j])
1407 return size_binop (MINUS_EXPR, offs[0][i], offs[1][j]);
1408
1409 return NULL_TREE;
1410 }
1411
1412 /* *GSI_P is a GIMPLE_CALL to a builtin function.
1413 Optimize
1414 memcpy (p, "abcd", 4);
1415 memset (p + 4, ' ', 3);
1416 into
1417 memcpy (p, "abcd ", 7);
1418 call if the latter can be stored by pieces during expansion. */
1419
1420 static bool
1421 simplify_builtin_call (gimple_stmt_iterator *gsi_p, tree callee2)
1422 {
1423 gimple stmt1, stmt2 = gsi_stmt (*gsi_p);
1424 tree vuse = gimple_vuse (stmt2);
1425 if (vuse == NULL)
1426 return false;
1427 stmt1 = SSA_NAME_DEF_STMT (vuse);
1428
1429 switch (DECL_FUNCTION_CODE (callee2))
1430 {
1431 case BUILT_IN_MEMSET:
1432 if (gimple_call_num_args (stmt2) != 3
1433 || gimple_call_lhs (stmt2)
1434 || CHAR_BIT != 8
1435 || BITS_PER_UNIT != 8)
1436 break;
1437 else
1438 {
1439 tree callee1;
1440 tree ptr1, src1, str1, off1, len1, lhs1;
1441 tree ptr2 = gimple_call_arg (stmt2, 0);
1442 tree val2 = gimple_call_arg (stmt2, 1);
1443 tree len2 = gimple_call_arg (stmt2, 2);
1444 tree diff, vdef, new_str_cst;
1445 gimple use_stmt;
1446 unsigned int ptr1_align;
1447 unsigned HOST_WIDE_INT src_len;
1448 char *src_buf;
1449 use_operand_p use_p;
1450
1451 if (!host_integerp (val2, 0)
1452 || !host_integerp (len2, 1))
1453 break;
1454 if (is_gimple_call (stmt1))
1455 {
1456 /* If first stmt is a call, it needs to be memcpy
1457 or mempcpy, with string literal as second argument and
1458 constant length. */
1459 callee1 = gimple_call_fndecl (stmt1);
1460 if (callee1 == NULL_TREE
1461 || DECL_BUILT_IN_CLASS (callee1) != BUILT_IN_NORMAL
1462 || gimple_call_num_args (stmt1) != 3)
1463 break;
1464 if (DECL_FUNCTION_CODE (callee1) != BUILT_IN_MEMCPY
1465 && DECL_FUNCTION_CODE (callee1) != BUILT_IN_MEMPCPY)
1466 break;
1467 ptr1 = gimple_call_arg (stmt1, 0);
1468 src1 = gimple_call_arg (stmt1, 1);
1469 len1 = gimple_call_arg (stmt1, 2);
1470 lhs1 = gimple_call_lhs (stmt1);
1471 if (!host_integerp (len1, 1))
1472 break;
1473 str1 = string_constant (src1, &off1);
1474 if (str1 == NULL_TREE)
1475 break;
1476 if (!host_integerp (off1, 1)
1477 || compare_tree_int (off1, TREE_STRING_LENGTH (str1) - 1) > 0
1478 || compare_tree_int (len1, TREE_STRING_LENGTH (str1)
1479 - tree_low_cst (off1, 1)) > 0
1480 || TREE_CODE (TREE_TYPE (str1)) != ARRAY_TYPE
1481 || TYPE_MODE (TREE_TYPE (TREE_TYPE (str1)))
1482 != TYPE_MODE (char_type_node))
1483 break;
1484 }
1485 else if (gimple_assign_single_p (stmt1))
1486 {
1487 /* Otherwise look for length 1 memcpy optimized into
1488 assignment. */
1489 ptr1 = gimple_assign_lhs (stmt1);
1490 src1 = gimple_assign_rhs1 (stmt1);
1491 if (TREE_CODE (ptr1) != MEM_REF
1492 || TYPE_MODE (TREE_TYPE (ptr1)) != TYPE_MODE (char_type_node)
1493 || !host_integerp (src1, 0))
1494 break;
1495 ptr1 = build_fold_addr_expr (ptr1);
1496 callee1 = NULL_TREE;
1497 len1 = size_one_node;
1498 lhs1 = NULL_TREE;
1499 off1 = size_zero_node;
1500 str1 = NULL_TREE;
1501 }
1502 else
1503 break;
1504
1505 diff = constant_pointer_difference (ptr1, ptr2);
1506 if (diff == NULL && lhs1 != NULL)
1507 {
1508 diff = constant_pointer_difference (lhs1, ptr2);
1509 if (DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY
1510 && diff != NULL)
1511 diff = size_binop (PLUS_EXPR, diff,
1512 fold_convert (sizetype, len1));
1513 }
1514 /* If the difference between the second and first destination pointer
1515 is not constant, or is bigger than memcpy length, bail out. */
1516 if (diff == NULL
1517 || !host_integerp (diff, 1)
1518 || tree_int_cst_lt (len1, diff))
1519 break;
1520
1521 /* Use maximum of difference plus memset length and memcpy length
1522 as the new memcpy length, if it is too big, bail out. */
1523 src_len = tree_low_cst (diff, 1);
1524 src_len += tree_low_cst (len2, 1);
1525 if (src_len < (unsigned HOST_WIDE_INT) tree_low_cst (len1, 1))
1526 src_len = tree_low_cst (len1, 1);
1527 if (src_len > 1024)
1528 break;
1529
1530 /* If mempcpy value is used elsewhere, bail out, as mempcpy
1531 with bigger length will return different result. */
1532 if (lhs1 != NULL_TREE
1533 && DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY
1534 && (TREE_CODE (lhs1) != SSA_NAME
1535 || !single_imm_use (lhs1, &use_p, &use_stmt)
1536 || use_stmt != stmt2))
1537 break;
1538
1539 /* If anything reads memory in between memcpy and memset
1540 call, the modified memcpy call might change it. */
1541 vdef = gimple_vdef (stmt1);
1542 if (vdef != NULL
1543 && (!single_imm_use (vdef, &use_p, &use_stmt)
1544 || use_stmt != stmt2))
1545 break;
1546
1547 ptr1_align = get_pointer_alignment (ptr1);
1548 /* Construct the new source string literal. */
1549 src_buf = XALLOCAVEC (char, src_len + 1);
1550 if (callee1)
1551 memcpy (src_buf,
1552 TREE_STRING_POINTER (str1) + tree_low_cst (off1, 1),
1553 tree_low_cst (len1, 1));
1554 else
1555 src_buf[0] = tree_low_cst (src1, 0);
1556 memset (src_buf + tree_low_cst (diff, 1),
1557 tree_low_cst (val2, 1), tree_low_cst (len2, 1));
1558 src_buf[src_len] = '\0';
1559 /* Neither builtin_strncpy_read_str nor builtin_memcpy_read_str
1560 handle embedded '\0's. */
1561 if (strlen (src_buf) != src_len)
1562 break;
1563 rtl_profile_for_bb (gimple_bb (stmt2));
1564 /* If the new memcpy wouldn't be emitted by storing the literal
1565 by pieces, this optimization might enlarge .rodata too much,
1566 as commonly used string literals couldn't be shared any
1567 longer. */
1568 if (!can_store_by_pieces (src_len,
1569 builtin_strncpy_read_str,
1570 src_buf, ptr1_align, false))
1571 break;
1572
1573 new_str_cst = build_string_literal (src_len, src_buf);
1574 if (callee1)
1575 {
1576 /* If STMT1 is a mem{,p}cpy call, adjust it and remove
1577 memset call. */
1578 if (lhs1 && DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY)
1579 gimple_call_set_lhs (stmt1, NULL_TREE);
1580 gimple_call_set_arg (stmt1, 1, new_str_cst);
1581 gimple_call_set_arg (stmt1, 2,
1582 build_int_cst (TREE_TYPE (len1), src_len));
1583 update_stmt (stmt1);
1584 unlink_stmt_vdef (stmt2);
1585 gsi_remove (gsi_p, true);
1586 release_defs (stmt2);
1587 if (lhs1 && DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY)
1588 release_ssa_name (lhs1);
1589 return true;
1590 }
1591 else
1592 {
1593 /* Otherwise, if STMT1 is length 1 memcpy optimized into
1594 assignment, remove STMT1 and change memset call into
1595 memcpy call. */
1596 gimple_stmt_iterator gsi = gsi_for_stmt (stmt1);
1597
1598 if (!is_gimple_val (ptr1))
1599 ptr1 = force_gimple_operand_gsi (gsi_p, ptr1, true, NULL_TREE,
1600 true, GSI_SAME_STMT);
1601 gimple_call_set_fndecl (stmt2,
1602 builtin_decl_explicit (BUILT_IN_MEMCPY));
1603 gimple_call_set_arg (stmt2, 0, ptr1);
1604 gimple_call_set_arg (stmt2, 1, new_str_cst);
1605 gimple_call_set_arg (stmt2, 2,
1606 build_int_cst (TREE_TYPE (len2), src_len));
1607 unlink_stmt_vdef (stmt1);
1608 gsi_remove (&gsi, true);
1609 release_defs (stmt1);
1610 update_stmt (stmt2);
1611 return false;
1612 }
1613 }
1614 break;
1615 default:
1616 break;
1617 }
1618 return false;
1619 }
1620
1621 /* Checks if expression has type of one-bit precision, or is a known
1622 truth-valued expression. */
1623 static bool
1624 truth_valued_ssa_name (tree name)
1625 {
1626 gimple def;
1627 tree type = TREE_TYPE (name);
1628
1629 if (!INTEGRAL_TYPE_P (type))
1630 return false;
1631 /* Don't check here for BOOLEAN_TYPE as the precision isn't
1632 necessarily one and so ~X is not equal to !X. */
1633 if (TYPE_PRECISION (type) == 1)
1634 return true;
1635 def = SSA_NAME_DEF_STMT (name);
1636 if (is_gimple_assign (def))
1637 return truth_value_p (gimple_assign_rhs_code (def));
1638 return false;
1639 }
1640
1641 /* Helper routine for simplify_bitwise_binary_1 function.
1642 Return for the SSA name NAME the expression X if it mets condition
1643 NAME = !X. Otherwise return NULL_TREE.
1644 Detected patterns for NAME = !X are:
1645 !X and X == 0 for X with integral type.
1646 X ^ 1, X != 1,or ~X for X with integral type with precision of one. */
1647 static tree
1648 lookup_logical_inverted_value (tree name)
1649 {
1650 tree op1, op2;
1651 enum tree_code code;
1652 gimple def;
1653
1654 /* If name has none-intergal type, or isn't a SSA_NAME, then
1655 return. */
1656 if (TREE_CODE (name) != SSA_NAME
1657 || !INTEGRAL_TYPE_P (TREE_TYPE (name)))
1658 return NULL_TREE;
1659 def = SSA_NAME_DEF_STMT (name);
1660 if (!is_gimple_assign (def))
1661 return NULL_TREE;
1662
1663 code = gimple_assign_rhs_code (def);
1664 op1 = gimple_assign_rhs1 (def);
1665 op2 = NULL_TREE;
1666
1667 /* Get for EQ_EXPR or BIT_XOR_EXPR operation the second operand.
1668 If CODE isn't an EQ_EXPR, BIT_XOR_EXPR, or BIT_NOT_EXPR, then return. */
1669 if (code == EQ_EXPR || code == NE_EXPR
1670 || code == BIT_XOR_EXPR)
1671 op2 = gimple_assign_rhs2 (def);
1672
1673 switch (code)
1674 {
1675 case BIT_NOT_EXPR:
1676 if (truth_valued_ssa_name (name))
1677 return op1;
1678 break;
1679 case EQ_EXPR:
1680 /* Check if we have X == 0 and X has an integral type. */
1681 if (!INTEGRAL_TYPE_P (TREE_TYPE (op1)))
1682 break;
1683 if (integer_zerop (op2))
1684 return op1;
1685 break;
1686 case NE_EXPR:
1687 /* Check if we have X != 1 and X is a truth-valued. */
1688 if (!INTEGRAL_TYPE_P (TREE_TYPE (op1)))
1689 break;
1690 if (integer_onep (op2) && truth_valued_ssa_name (op1))
1691 return op1;
1692 break;
1693 case BIT_XOR_EXPR:
1694 /* Check if we have X ^ 1 and X is truth valued. */
1695 if (integer_onep (op2) && truth_valued_ssa_name (op1))
1696 return op1;
1697 break;
1698 default:
1699 break;
1700 }
1701
1702 return NULL_TREE;
1703 }
1704
1705 /* Optimize ARG1 CODE ARG2 to a constant for bitwise binary
1706 operations CODE, if one operand has the logically inverted
1707 value of the other. */
1708 static tree
1709 simplify_bitwise_binary_1 (enum tree_code code, tree type,
1710 tree arg1, tree arg2)
1711 {
1712 tree anot;
1713
1714 /* If CODE isn't a bitwise binary operation, return NULL_TREE. */
1715 if (code != BIT_AND_EXPR && code != BIT_IOR_EXPR
1716 && code != BIT_XOR_EXPR)
1717 return NULL_TREE;
1718
1719 /* First check if operands ARG1 and ARG2 are equal. If so
1720 return NULL_TREE as this optimization is handled fold_stmt. */
1721 if (arg1 == arg2)
1722 return NULL_TREE;
1723 /* See if we have in arguments logical-not patterns. */
1724 if (((anot = lookup_logical_inverted_value (arg1)) == NULL_TREE
1725 || anot != arg2)
1726 && ((anot = lookup_logical_inverted_value (arg2)) == NULL_TREE
1727 || anot != arg1))
1728 return NULL_TREE;
1729
1730 /* X & !X -> 0. */
1731 if (code == BIT_AND_EXPR)
1732 return fold_convert (type, integer_zero_node);
1733 /* X | !X -> 1 and X ^ !X -> 1, if X is truth-valued. */
1734 if (truth_valued_ssa_name (anot))
1735 return fold_convert (type, integer_one_node);
1736
1737 /* ??? Otherwise result is (X != 0 ? X : 1). not handled. */
1738 return NULL_TREE;
1739 }
1740
1741 /* Given a ssa_name in NAME see if it was defined by an assignment and
1742 set CODE to be the code and ARG1 to the first operand on the rhs and ARG2
1743 to the second operand on the rhs. */
1744
1745 static inline void
1746 defcodefor_name (tree name, enum tree_code *code, tree *arg1, tree *arg2)
1747 {
1748 gimple def;
1749 enum tree_code code1;
1750 tree arg11;
1751 tree arg21;
1752 tree arg31;
1753 enum gimple_rhs_class grhs_class;
1754
1755 code1 = TREE_CODE (name);
1756 arg11 = name;
1757 arg21 = NULL_TREE;
1758 grhs_class = get_gimple_rhs_class (code1);
1759
1760 if (code1 == SSA_NAME)
1761 {
1762 def = SSA_NAME_DEF_STMT (name);
1763
1764 if (def && is_gimple_assign (def)
1765 && can_propagate_from (def))
1766 {
1767 code1 = gimple_assign_rhs_code (def);
1768 arg11 = gimple_assign_rhs1 (def);
1769 arg21 = gimple_assign_rhs2 (def);
1770 arg31 = gimple_assign_rhs2 (def);
1771 }
1772 }
1773 else if (grhs_class == GIMPLE_TERNARY_RHS
1774 || GIMPLE_BINARY_RHS
1775 || GIMPLE_UNARY_RHS
1776 || GIMPLE_SINGLE_RHS)
1777 extract_ops_from_tree_1 (name, &code1, &arg11, &arg21, &arg31);
1778
1779 *code = code1;
1780 *arg1 = arg11;
1781 if (arg2)
1782 *arg2 = arg21;
1783 /* Ignore arg3 currently. */
1784 }
1785
1786 /* Simplify bitwise binary operations.
1787 Return true if a transformation applied, otherwise return false. */
1788
1789 static bool
1790 simplify_bitwise_binary (gimple_stmt_iterator *gsi)
1791 {
1792 gimple stmt = gsi_stmt (*gsi);
1793 tree arg1 = gimple_assign_rhs1 (stmt);
1794 tree arg2 = gimple_assign_rhs2 (stmt);
1795 enum tree_code code = gimple_assign_rhs_code (stmt);
1796 tree res;
1797 tree def1_arg1, def1_arg2, def2_arg1, def2_arg2;
1798 enum tree_code def1_code, def2_code;
1799
1800 defcodefor_name (arg1, &def1_code, &def1_arg1, &def1_arg2);
1801 defcodefor_name (arg2, &def2_code, &def2_arg1, &def2_arg2);
1802
1803 /* Try to fold (type) X op CST -> (type) (X op ((type-x) CST)). */
1804 if (TREE_CODE (arg2) == INTEGER_CST
1805 && CONVERT_EXPR_CODE_P (def1_code)
1806 && INTEGRAL_TYPE_P (TREE_TYPE (def1_arg1))
1807 && int_fits_type_p (arg2, TREE_TYPE (def1_arg1)))
1808 {
1809 gimple newop;
1810 tree tem = make_ssa_name (TREE_TYPE (def1_arg1), NULL);
1811 newop =
1812 gimple_build_assign_with_ops (code, tem, def1_arg1,
1813 fold_convert_loc (gimple_location (stmt),
1814 TREE_TYPE (def1_arg1),
1815 arg2));
1816 gimple_set_location (newop, gimple_location (stmt));
1817 gsi_insert_before (gsi, newop, GSI_SAME_STMT);
1818 gimple_assign_set_rhs_with_ops_1 (gsi, NOP_EXPR,
1819 tem, NULL_TREE, NULL_TREE);
1820 update_stmt (gsi_stmt (*gsi));
1821 return true;
1822 }
1823
1824 /* For bitwise binary operations apply operand conversions to the
1825 binary operation result instead of to the operands. This allows
1826 to combine successive conversions and bitwise binary operations. */
1827 if (CONVERT_EXPR_CODE_P (def1_code)
1828 && CONVERT_EXPR_CODE_P (def2_code)
1829 && types_compatible_p (TREE_TYPE (def1_arg1), TREE_TYPE (def2_arg1))
1830 /* Make sure that the conversion widens the operands, or has same
1831 precision, or that it changes the operation to a bitfield
1832 precision. */
1833 && ((TYPE_PRECISION (TREE_TYPE (def1_arg1))
1834 <= TYPE_PRECISION (TREE_TYPE (arg1)))
1835 || (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (arg1)))
1836 != MODE_INT)
1837 || (TYPE_PRECISION (TREE_TYPE (arg1))
1838 != GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg1))))))
1839 {
1840 gimple newop;
1841 tree tem = make_ssa_name (TREE_TYPE (def1_arg1), NULL);
1842 newop = gimple_build_assign_with_ops (code, tem, def1_arg1, def2_arg1);
1843 gimple_set_location (newop, gimple_location (stmt));
1844 gsi_insert_before (gsi, newop, GSI_SAME_STMT);
1845 gimple_assign_set_rhs_with_ops_1 (gsi, NOP_EXPR,
1846 tem, NULL_TREE, NULL_TREE);
1847 update_stmt (gsi_stmt (*gsi));
1848 return true;
1849 }
1850
1851
1852 /* Simplify (A & B) OP0 (C & B) to (A OP0 C) & B. */
1853 if (def1_code == def2_code
1854 && def1_code == BIT_AND_EXPR
1855 && operand_equal_for_phi_arg_p (def1_arg2,
1856 def2_arg2))
1857 {
1858 tree b = def1_arg2;
1859 tree a = def1_arg1;
1860 tree c = def2_arg1;
1861 tree inner = fold_build2 (code, TREE_TYPE (arg2), a, c);
1862 /* If A OP0 C (this usually means C is the same as A) is 0
1863 then fold it down correctly. */
1864 if (integer_zerop (inner))
1865 {
1866 gimple_assign_set_rhs_from_tree (gsi, inner);
1867 update_stmt (stmt);
1868 return true;
1869 }
1870 /* If A OP0 C (this usually means C is the same as A) is a ssa_name
1871 then fold it down correctly. */
1872 else if (TREE_CODE (inner) == SSA_NAME)
1873 {
1874 tree outer = fold_build2 (def1_code, TREE_TYPE (inner),
1875 inner, b);
1876 gimple_assign_set_rhs_from_tree (gsi, outer);
1877 update_stmt (stmt);
1878 return true;
1879 }
1880 else
1881 {
1882 gimple newop;
1883 tree tem;
1884 tem = make_ssa_name (TREE_TYPE (arg2), NULL);
1885 newop = gimple_build_assign_with_ops (code, tem, a, c);
1886 gimple_set_location (newop, gimple_location (stmt));
1887 /* Make sure to re-process the new stmt as it's walking upwards. */
1888 gsi_insert_before (gsi, newop, GSI_NEW_STMT);
1889 gimple_assign_set_rhs1 (stmt, tem);
1890 gimple_assign_set_rhs2 (stmt, b);
1891 gimple_assign_set_rhs_code (stmt, def1_code);
1892 update_stmt (stmt);
1893 return true;
1894 }
1895 }
1896
1897 /* (a | CST1) & CST2 -> (a & CST2) | (CST1 & CST2). */
1898 if (code == BIT_AND_EXPR
1899 && def1_code == BIT_IOR_EXPR
1900 && TREE_CODE (arg2) == INTEGER_CST
1901 && TREE_CODE (def1_arg2) == INTEGER_CST)
1902 {
1903 tree cst = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg2),
1904 arg2, def1_arg2);
1905 tree tem;
1906 gimple newop;
1907 if (integer_zerop (cst))
1908 {
1909 gimple_assign_set_rhs1 (stmt, def1_arg1);
1910 update_stmt (stmt);
1911 return true;
1912 }
1913 tem = make_ssa_name (TREE_TYPE (arg2), NULL);
1914 newop = gimple_build_assign_with_ops (BIT_AND_EXPR,
1915 tem, def1_arg1, arg2);
1916 gimple_set_location (newop, gimple_location (stmt));
1917 /* Make sure to re-process the new stmt as it's walking upwards. */
1918 gsi_insert_before (gsi, newop, GSI_NEW_STMT);
1919 gimple_assign_set_rhs1 (stmt, tem);
1920 gimple_assign_set_rhs2 (stmt, cst);
1921 gimple_assign_set_rhs_code (stmt, BIT_IOR_EXPR);
1922 update_stmt (stmt);
1923 return true;
1924 }
1925
1926 /* Combine successive equal operations with constants. */
1927 if ((code == BIT_AND_EXPR
1928 || code == BIT_IOR_EXPR
1929 || code == BIT_XOR_EXPR)
1930 && def1_code == code
1931 && TREE_CODE (arg2) == INTEGER_CST
1932 && TREE_CODE (def1_arg2) == INTEGER_CST)
1933 {
1934 tree cst = fold_build2 (code, TREE_TYPE (arg2),
1935 arg2, def1_arg2);
1936 gimple_assign_set_rhs1 (stmt, def1_arg1);
1937 gimple_assign_set_rhs2 (stmt, cst);
1938 update_stmt (stmt);
1939 return true;
1940 }
1941
1942 /* Canonicalize X ^ ~0 to ~X. */
1943 if (code == BIT_XOR_EXPR
1944 && TREE_CODE (arg2) == INTEGER_CST
1945 && integer_all_onesp (arg2))
1946 {
1947 gimple_assign_set_rhs_with_ops (gsi, BIT_NOT_EXPR, arg1, NULL_TREE);
1948 gcc_assert (gsi_stmt (*gsi) == stmt);
1949 update_stmt (stmt);
1950 return true;
1951 }
1952
1953 /* Try simple folding for X op !X, and X op X. */
1954 res = simplify_bitwise_binary_1 (code, TREE_TYPE (arg1), arg1, arg2);
1955 if (res != NULL_TREE)
1956 {
1957 gimple_assign_set_rhs_from_tree (gsi, res);
1958 update_stmt (gsi_stmt (*gsi));
1959 return true;
1960 }
1961
1962 if (code == BIT_AND_EXPR || code == BIT_IOR_EXPR)
1963 {
1964 enum tree_code ocode = code == BIT_AND_EXPR ? BIT_IOR_EXPR : BIT_AND_EXPR;
1965 if (def1_code == ocode)
1966 {
1967 tree x = arg2;
1968 enum tree_code coden;
1969 tree a1, a2;
1970 /* ( X | Y) & X -> X */
1971 /* ( X & Y) | X -> X */
1972 if (x == def1_arg1
1973 || x == def1_arg2)
1974 {
1975 gimple_assign_set_rhs_from_tree (gsi, x);
1976 update_stmt (gsi_stmt (*gsi));
1977 return true;
1978 }
1979
1980 defcodefor_name (def1_arg1, &coden, &a1, &a2);
1981 /* (~X | Y) & X -> X & Y */
1982 /* (~X & Y) | X -> X | Y */
1983 if (coden == BIT_NOT_EXPR && a1 == x)
1984 {
1985 gimple_assign_set_rhs_with_ops (gsi, code,
1986 x, def1_arg2);
1987 gcc_assert (gsi_stmt (*gsi) == stmt);
1988 update_stmt (stmt);
1989 return true;
1990 }
1991 defcodefor_name (def1_arg2, &coden, &a1, &a2);
1992 /* (Y | ~X) & X -> X & Y */
1993 /* (Y & ~X) | X -> X | Y */
1994 if (coden == BIT_NOT_EXPR && a1 == x)
1995 {
1996 gimple_assign_set_rhs_with_ops (gsi, code,
1997 x, def1_arg1);
1998 gcc_assert (gsi_stmt (*gsi) == stmt);
1999 update_stmt (stmt);
2000 return true;
2001 }
2002 }
2003 if (def2_code == ocode)
2004 {
2005 enum tree_code coden;
2006 tree a1;
2007 tree x = arg1;
2008 /* X & ( X | Y) -> X */
2009 /* X | ( X & Y) -> X */
2010 if (x == def2_arg1
2011 || x == def2_arg2)
2012 {
2013 gimple_assign_set_rhs_from_tree (gsi, x);
2014 update_stmt (gsi_stmt (*gsi));
2015 return true;
2016 }
2017 defcodefor_name (def2_arg1, &coden, &a1, NULL);
2018 /* (~X | Y) & X -> X & Y */
2019 /* (~X & Y) | X -> X | Y */
2020 if (coden == BIT_NOT_EXPR && a1 == x)
2021 {
2022 gimple_assign_set_rhs_with_ops (gsi, code,
2023 x, def2_arg2);
2024 gcc_assert (gsi_stmt (*gsi) == stmt);
2025 update_stmt (stmt);
2026 return true;
2027 }
2028 defcodefor_name (def2_arg2, &coden, &a1, NULL);
2029 /* (Y | ~X) & X -> X & Y */
2030 /* (Y & ~X) | X -> X | Y */
2031 if (coden == BIT_NOT_EXPR && a1 == x)
2032 {
2033 gimple_assign_set_rhs_with_ops (gsi, code,
2034 x, def2_arg1);
2035 gcc_assert (gsi_stmt (*gsi) == stmt);
2036 update_stmt (stmt);
2037 return true;
2038 }
2039 }
2040 }
2041
2042 return false;
2043 }
2044
2045
2046 /* Perform re-associations of the plus or minus statement STMT that are
2047 always permitted. Returns true if the CFG was changed. */
2048
2049 static bool
2050 associate_plusminus (gimple_stmt_iterator *gsi)
2051 {
2052 gimple stmt = gsi_stmt (*gsi);
2053 tree rhs1 = gimple_assign_rhs1 (stmt);
2054 tree rhs2 = gimple_assign_rhs2 (stmt);
2055 enum tree_code code = gimple_assign_rhs_code (stmt);
2056 bool changed;
2057
2058 /* We can't reassociate at all for saturating types. */
2059 if (TYPE_SATURATING (TREE_TYPE (rhs1)))
2060 return false;
2061
2062 /* First contract negates. */
2063 do
2064 {
2065 changed = false;
2066
2067 /* A +- (-B) -> A -+ B. */
2068 if (TREE_CODE (rhs2) == SSA_NAME)
2069 {
2070 gimple def_stmt = SSA_NAME_DEF_STMT (rhs2);
2071 if (is_gimple_assign (def_stmt)
2072 && gimple_assign_rhs_code (def_stmt) == NEGATE_EXPR
2073 && can_propagate_from (def_stmt))
2074 {
2075 code = (code == MINUS_EXPR) ? PLUS_EXPR : MINUS_EXPR;
2076 gimple_assign_set_rhs_code (stmt, code);
2077 rhs2 = gimple_assign_rhs1 (def_stmt);
2078 gimple_assign_set_rhs2 (stmt, rhs2);
2079 gimple_set_modified (stmt, true);
2080 changed = true;
2081 }
2082 }
2083
2084 /* (-A) + B -> B - A. */
2085 if (TREE_CODE (rhs1) == SSA_NAME
2086 && code == PLUS_EXPR)
2087 {
2088 gimple def_stmt = SSA_NAME_DEF_STMT (rhs1);
2089 if (is_gimple_assign (def_stmt)
2090 && gimple_assign_rhs_code (def_stmt) == NEGATE_EXPR
2091 && can_propagate_from (def_stmt))
2092 {
2093 code = MINUS_EXPR;
2094 gimple_assign_set_rhs_code (stmt, code);
2095 rhs1 = rhs2;
2096 gimple_assign_set_rhs1 (stmt, rhs1);
2097 rhs2 = gimple_assign_rhs1 (def_stmt);
2098 gimple_assign_set_rhs2 (stmt, rhs2);
2099 gimple_set_modified (stmt, true);
2100 changed = true;
2101 }
2102 }
2103 }
2104 while (changed);
2105
2106 /* We can't reassociate floating-point or fixed-point plus or minus
2107 because of saturation to +-Inf. */
2108 if (FLOAT_TYPE_P (TREE_TYPE (rhs1))
2109 || FIXED_POINT_TYPE_P (TREE_TYPE (rhs1)))
2110 goto out;
2111
2112 /* Second match patterns that allow contracting a plus-minus pair
2113 irrespective of overflow issues.
2114
2115 (A +- B) - A -> +- B
2116 (A +- B) -+ B -> A
2117 (CST +- A) +- CST -> CST +- A
2118 (A + CST) +- CST -> A + CST
2119 ~A + A -> -1
2120 ~A + 1 -> -A
2121 A - (A +- B) -> -+ B
2122 A +- (B +- A) -> +- B
2123 CST +- (CST +- A) -> CST +- A
2124 CST +- (A +- CST) -> CST +- A
2125 A + ~A -> -1
2126
2127 via commutating the addition and contracting operations to zero
2128 by reassociation. */
2129
2130 if (TREE_CODE (rhs1) == SSA_NAME)
2131 {
2132 gimple def_stmt = SSA_NAME_DEF_STMT (rhs1);
2133 if (is_gimple_assign (def_stmt) && can_propagate_from (def_stmt))
2134 {
2135 enum tree_code def_code = gimple_assign_rhs_code (def_stmt);
2136 if (def_code == PLUS_EXPR
2137 || def_code == MINUS_EXPR)
2138 {
2139 tree def_rhs1 = gimple_assign_rhs1 (def_stmt);
2140 tree def_rhs2 = gimple_assign_rhs2 (def_stmt);
2141 if (operand_equal_p (def_rhs1, rhs2, 0)
2142 && code == MINUS_EXPR)
2143 {
2144 /* (A +- B) - A -> +- B. */
2145 code = ((def_code == PLUS_EXPR)
2146 ? TREE_CODE (def_rhs2) : NEGATE_EXPR);
2147 rhs1 = def_rhs2;
2148 rhs2 = NULL_TREE;
2149 gimple_assign_set_rhs_with_ops (gsi, code, rhs1, NULL_TREE);
2150 gcc_assert (gsi_stmt (*gsi) == stmt);
2151 gimple_set_modified (stmt, true);
2152 }
2153 else if (operand_equal_p (def_rhs2, rhs2, 0)
2154 && code != def_code)
2155 {
2156 /* (A +- B) -+ B -> A. */
2157 code = TREE_CODE (def_rhs1);
2158 rhs1 = def_rhs1;
2159 rhs2 = NULL_TREE;
2160 gimple_assign_set_rhs_with_ops (gsi, code, rhs1, NULL_TREE);
2161 gcc_assert (gsi_stmt (*gsi) == stmt);
2162 gimple_set_modified (stmt, true);
2163 }
2164 else if (TREE_CODE (rhs2) == INTEGER_CST
2165 && TREE_CODE (def_rhs1) == INTEGER_CST)
2166 {
2167 /* (CST +- A) +- CST -> CST +- A. */
2168 tree cst = fold_binary (code, TREE_TYPE (rhs1),
2169 def_rhs1, rhs2);
2170 if (cst && !TREE_OVERFLOW (cst))
2171 {
2172 code = def_code;
2173 gimple_assign_set_rhs_code (stmt, code);
2174 rhs1 = cst;
2175 gimple_assign_set_rhs1 (stmt, rhs1);
2176 rhs2 = def_rhs2;
2177 gimple_assign_set_rhs2 (stmt, rhs2);
2178 gimple_set_modified (stmt, true);
2179 }
2180 }
2181 else if (TREE_CODE (rhs2) == INTEGER_CST
2182 && TREE_CODE (def_rhs2) == INTEGER_CST
2183 && def_code == PLUS_EXPR)
2184 {
2185 /* (A + CST) +- CST -> A + CST. */
2186 tree cst = fold_binary (code, TREE_TYPE (rhs1),
2187 def_rhs2, rhs2);
2188 if (cst && !TREE_OVERFLOW (cst))
2189 {
2190 code = PLUS_EXPR;
2191 gimple_assign_set_rhs_code (stmt, code);
2192 rhs1 = def_rhs1;
2193 gimple_assign_set_rhs1 (stmt, rhs1);
2194 rhs2 = cst;
2195 gimple_assign_set_rhs2 (stmt, rhs2);
2196 gimple_set_modified (stmt, true);
2197 }
2198 }
2199 }
2200 else if (def_code == BIT_NOT_EXPR
2201 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1)))
2202 {
2203 tree def_rhs1 = gimple_assign_rhs1 (def_stmt);
2204 if (code == PLUS_EXPR
2205 && operand_equal_p (def_rhs1, rhs2, 0))
2206 {
2207 /* ~A + A -> -1. */
2208 code = INTEGER_CST;
2209 rhs1 = build_int_cst_type (TREE_TYPE (rhs2), -1);
2210 rhs2 = NULL_TREE;
2211 gimple_assign_set_rhs_with_ops (gsi, code, rhs1, NULL_TREE);
2212 gcc_assert (gsi_stmt (*gsi) == stmt);
2213 gimple_set_modified (stmt, true);
2214 }
2215 else if (code == PLUS_EXPR
2216 && integer_onep (rhs1))
2217 {
2218 /* ~A + 1 -> -A. */
2219 code = NEGATE_EXPR;
2220 rhs1 = def_rhs1;
2221 rhs2 = NULL_TREE;
2222 gimple_assign_set_rhs_with_ops (gsi, code, rhs1, NULL_TREE);
2223 gcc_assert (gsi_stmt (*gsi) == stmt);
2224 gimple_set_modified (stmt, true);
2225 }
2226 }
2227 }
2228 }
2229
2230 if (rhs2 && TREE_CODE (rhs2) == SSA_NAME)
2231 {
2232 gimple def_stmt = SSA_NAME_DEF_STMT (rhs2);
2233 if (is_gimple_assign (def_stmt) && can_propagate_from (def_stmt))
2234 {
2235 enum tree_code def_code = gimple_assign_rhs_code (def_stmt);
2236 if (def_code == PLUS_EXPR
2237 || def_code == MINUS_EXPR)
2238 {
2239 tree def_rhs1 = gimple_assign_rhs1 (def_stmt);
2240 tree def_rhs2 = gimple_assign_rhs2 (def_stmt);
2241 if (operand_equal_p (def_rhs1, rhs1, 0)
2242 && code == MINUS_EXPR)
2243 {
2244 /* A - (A +- B) -> -+ B. */
2245 code = ((def_code == PLUS_EXPR)
2246 ? NEGATE_EXPR : TREE_CODE (def_rhs2));
2247 rhs1 = def_rhs2;
2248 rhs2 = NULL_TREE;
2249 gimple_assign_set_rhs_with_ops (gsi, code, rhs1, NULL_TREE);
2250 gcc_assert (gsi_stmt (*gsi) == stmt);
2251 gimple_set_modified (stmt, true);
2252 }
2253 else if (operand_equal_p (def_rhs2, rhs1, 0)
2254 && code != def_code)
2255 {
2256 /* A +- (B +- A) -> +- B. */
2257 code = ((code == PLUS_EXPR)
2258 ? TREE_CODE (def_rhs1) : NEGATE_EXPR);
2259 rhs1 = def_rhs1;
2260 rhs2 = NULL_TREE;
2261 gimple_assign_set_rhs_with_ops (gsi, code, rhs1, NULL_TREE);
2262 gcc_assert (gsi_stmt (*gsi) == stmt);
2263 gimple_set_modified (stmt, true);
2264 }
2265 else if (TREE_CODE (rhs1) == INTEGER_CST
2266 && TREE_CODE (def_rhs1) == INTEGER_CST)
2267 {
2268 /* CST +- (CST +- A) -> CST +- A. */
2269 tree cst = fold_binary (code, TREE_TYPE (rhs2),
2270 rhs1, def_rhs1);
2271 if (cst && !TREE_OVERFLOW (cst))
2272 {
2273 code = (code == def_code ? PLUS_EXPR : MINUS_EXPR);
2274 gimple_assign_set_rhs_code (stmt, code);
2275 rhs1 = cst;
2276 gimple_assign_set_rhs1 (stmt, rhs1);
2277 rhs2 = def_rhs2;
2278 gimple_assign_set_rhs2 (stmt, rhs2);
2279 gimple_set_modified (stmt, true);
2280 }
2281 }
2282 else if (TREE_CODE (rhs1) == INTEGER_CST
2283 && TREE_CODE (def_rhs2) == INTEGER_CST)
2284 {
2285 /* CST +- (A +- CST) -> CST +- A. */
2286 tree cst = fold_binary (def_code == code
2287 ? PLUS_EXPR : MINUS_EXPR,
2288 TREE_TYPE (rhs2),
2289 rhs1, def_rhs2);
2290 if (cst && !TREE_OVERFLOW (cst))
2291 {
2292 rhs1 = cst;
2293 gimple_assign_set_rhs1 (stmt, rhs1);
2294 rhs2 = def_rhs1;
2295 gimple_assign_set_rhs2 (stmt, rhs2);
2296 gimple_set_modified (stmt, true);
2297 }
2298 }
2299 }
2300 else if (def_code == BIT_NOT_EXPR
2301 && INTEGRAL_TYPE_P (TREE_TYPE (rhs2)))
2302 {
2303 tree def_rhs1 = gimple_assign_rhs1 (def_stmt);
2304 if (code == PLUS_EXPR
2305 && operand_equal_p (def_rhs1, rhs1, 0))
2306 {
2307 /* A + ~A -> -1. */
2308 code = INTEGER_CST;
2309 rhs1 = build_int_cst_type (TREE_TYPE (rhs1), -1);
2310 rhs2 = NULL_TREE;
2311 gimple_assign_set_rhs_with_ops (gsi, code, rhs1, NULL_TREE);
2312 gcc_assert (gsi_stmt (*gsi) == stmt);
2313 gimple_set_modified (stmt, true);
2314 }
2315 }
2316 }
2317 }
2318
2319 out:
2320 if (gimple_modified_p (stmt))
2321 {
2322 fold_stmt_inplace (gsi);
2323 update_stmt (stmt);
2324 if (maybe_clean_or_replace_eh_stmt (stmt, stmt)
2325 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
2326 return true;
2327 }
2328
2329 return false;
2330 }
2331
2332 /* Associate operands of a POINTER_PLUS_EXPR assignmen at *GSI. Returns
2333 true if anything changed, false otherwise. */
2334
2335 static bool
2336 associate_pointerplus (gimple_stmt_iterator *gsi)
2337 {
2338 gimple stmt = gsi_stmt (*gsi);
2339 gimple def_stmt;
2340 tree ptr, rhs, algn;
2341
2342 /* Pattern match
2343 tem = (sizetype) ptr;
2344 tem = tem & algn;
2345 tem = -tem;
2346 ... = ptr p+ tem;
2347 and produce the simpler and easier to analyze with respect to alignment
2348 ... = ptr & ~algn; */
2349 ptr = gimple_assign_rhs1 (stmt);
2350 rhs = gimple_assign_rhs2 (stmt);
2351 if (TREE_CODE (rhs) != SSA_NAME)
2352 return false;
2353 def_stmt = SSA_NAME_DEF_STMT (rhs);
2354 if (!is_gimple_assign (def_stmt)
2355 || gimple_assign_rhs_code (def_stmt) != NEGATE_EXPR)
2356 return false;
2357 rhs = gimple_assign_rhs1 (def_stmt);
2358 if (TREE_CODE (rhs) != SSA_NAME)
2359 return false;
2360 def_stmt = SSA_NAME_DEF_STMT (rhs);
2361 if (!is_gimple_assign (def_stmt)
2362 || gimple_assign_rhs_code (def_stmt) != BIT_AND_EXPR)
2363 return false;
2364 rhs = gimple_assign_rhs1 (def_stmt);
2365 algn = gimple_assign_rhs2 (def_stmt);
2366 if (TREE_CODE (rhs) != SSA_NAME
2367 || TREE_CODE (algn) != INTEGER_CST)
2368 return false;
2369 def_stmt = SSA_NAME_DEF_STMT (rhs);
2370 if (!is_gimple_assign (def_stmt)
2371 || !CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt)))
2372 return false;
2373 if (gimple_assign_rhs1 (def_stmt) != ptr)
2374 return false;
2375
2376 algn = double_int_to_tree (TREE_TYPE (ptr),
2377 double_int_not (tree_to_double_int (algn)));
2378 gimple_assign_set_rhs_with_ops (gsi, BIT_AND_EXPR, ptr, algn);
2379 fold_stmt_inplace (gsi);
2380 update_stmt (stmt);
2381
2382 return true;
2383 }
2384
2385 /* Combine two conversions in a row for the second conversion at *GSI.
2386 Returns 1 if there were any changes made, 2 if cfg-cleanup needs to
2387 run. Else it returns 0. */
2388
2389 static int
2390 combine_conversions (gimple_stmt_iterator *gsi)
2391 {
2392 gimple stmt = gsi_stmt (*gsi);
2393 gimple def_stmt;
2394 tree op0, lhs;
2395 enum tree_code code = gimple_assign_rhs_code (stmt);
2396 enum tree_code code2;
2397
2398 gcc_checking_assert (CONVERT_EXPR_CODE_P (code)
2399 || code == FLOAT_EXPR
2400 || code == FIX_TRUNC_EXPR);
2401
2402 lhs = gimple_assign_lhs (stmt);
2403 op0 = gimple_assign_rhs1 (stmt);
2404 if (useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (op0)))
2405 {
2406 gimple_assign_set_rhs_code (stmt, TREE_CODE (op0));
2407 return 1;
2408 }
2409
2410 if (TREE_CODE (op0) != SSA_NAME)
2411 return 0;
2412
2413 def_stmt = SSA_NAME_DEF_STMT (op0);
2414 if (!is_gimple_assign (def_stmt))
2415 return 0;
2416
2417 code2 = gimple_assign_rhs_code (def_stmt);
2418
2419 if (CONVERT_EXPR_CODE_P (code2) || code2 == FLOAT_EXPR)
2420 {
2421 tree defop0 = gimple_assign_rhs1 (def_stmt);
2422 tree type = TREE_TYPE (lhs);
2423 tree inside_type = TREE_TYPE (defop0);
2424 tree inter_type = TREE_TYPE (op0);
2425 int inside_int = INTEGRAL_TYPE_P (inside_type);
2426 int inside_ptr = POINTER_TYPE_P (inside_type);
2427 int inside_float = FLOAT_TYPE_P (inside_type);
2428 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
2429 unsigned int inside_prec = TYPE_PRECISION (inside_type);
2430 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
2431 int inter_int = INTEGRAL_TYPE_P (inter_type);
2432 int inter_ptr = POINTER_TYPE_P (inter_type);
2433 int inter_float = FLOAT_TYPE_P (inter_type);
2434 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
2435 unsigned int inter_prec = TYPE_PRECISION (inter_type);
2436 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
2437 int final_int = INTEGRAL_TYPE_P (type);
2438 int final_ptr = POINTER_TYPE_P (type);
2439 int final_float = FLOAT_TYPE_P (type);
2440 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
2441 unsigned int final_prec = TYPE_PRECISION (type);
2442 int final_unsignedp = TYPE_UNSIGNED (type);
2443
2444 /* Don't propagate ssa names that occur in abnormal phis. */
2445 if (TREE_CODE (defop0) == SSA_NAME
2446 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (defop0))
2447 return 0;
2448
2449 /* In addition to the cases of two conversions in a row
2450 handled below, if we are converting something to its own
2451 type via an object of identical or wider precision, neither
2452 conversion is needed. */
2453 if (useless_type_conversion_p (type, inside_type)
2454 && (((inter_int || inter_ptr) && final_int)
2455 || (inter_float && final_float))
2456 && inter_prec >= final_prec)
2457 {
2458 gimple_assign_set_rhs1 (stmt, unshare_expr (defop0));
2459 gimple_assign_set_rhs_code (stmt, TREE_CODE (defop0));
2460 update_stmt (stmt);
2461 return remove_prop_source_from_use (op0) ? 2 : 1;
2462 }
2463
2464 /* Likewise, if the intermediate and initial types are either both
2465 float or both integer, we don't need the middle conversion if the
2466 former is wider than the latter and doesn't change the signedness
2467 (for integers). Avoid this if the final type is a pointer since
2468 then we sometimes need the middle conversion. Likewise if the
2469 final type has a precision not equal to the size of its mode. */
2470 if (((inter_int && inside_int)
2471 || (inter_float && inside_float)
2472 || (inter_vec && inside_vec))
2473 && inter_prec >= inside_prec
2474 && (inter_float || inter_vec
2475 || inter_unsignedp == inside_unsignedp)
2476 && ! (final_prec != GET_MODE_PRECISION (TYPE_MODE (type))
2477 && TYPE_MODE (type) == TYPE_MODE (inter_type))
2478 && ! final_ptr
2479 && (! final_vec || inter_prec == inside_prec))
2480 {
2481 gimple_assign_set_rhs1 (stmt, defop0);
2482 update_stmt (stmt);
2483 return remove_prop_source_from_use (op0) ? 2 : 1;
2484 }
2485
2486 /* If we have a sign-extension of a zero-extended value, we can
2487 replace that by a single zero-extension. Likewise if the
2488 final conversion does not change precision we can drop the
2489 intermediate conversion. */
2490 if (inside_int && inter_int && final_int
2491 && ((inside_prec < inter_prec && inter_prec < final_prec
2492 && inside_unsignedp && !inter_unsignedp)
2493 || final_prec == inter_prec))
2494 {
2495 gimple_assign_set_rhs1 (stmt, defop0);
2496 update_stmt (stmt);
2497 return remove_prop_source_from_use (op0) ? 2 : 1;
2498 }
2499
2500 /* Two conversions in a row are not needed unless:
2501 - some conversion is floating-point (overstrict for now), or
2502 - some conversion is a vector (overstrict for now), or
2503 - the intermediate type is narrower than both initial and
2504 final, or
2505 - the intermediate type and innermost type differ in signedness,
2506 and the outermost type is wider than the intermediate, or
2507 - the initial type is a pointer type and the precisions of the
2508 intermediate and final types differ, or
2509 - the final type is a pointer type and the precisions of the
2510 initial and intermediate types differ. */
2511 if (! inside_float && ! inter_float && ! final_float
2512 && ! inside_vec && ! inter_vec && ! final_vec
2513 && (inter_prec >= inside_prec || inter_prec >= final_prec)
2514 && ! (inside_int && inter_int
2515 && inter_unsignedp != inside_unsignedp
2516 && inter_prec < final_prec)
2517 && ((inter_unsignedp && inter_prec > inside_prec)
2518 == (final_unsignedp && final_prec > inter_prec))
2519 && ! (inside_ptr && inter_prec != final_prec)
2520 && ! (final_ptr && inside_prec != inter_prec)
2521 && ! (final_prec != GET_MODE_PRECISION (TYPE_MODE (type))
2522 && TYPE_MODE (type) == TYPE_MODE (inter_type)))
2523 {
2524 gimple_assign_set_rhs1 (stmt, defop0);
2525 update_stmt (stmt);
2526 return remove_prop_source_from_use (op0) ? 2 : 1;
2527 }
2528
2529 /* A truncation to an unsigned type should be canonicalized as
2530 bitwise and of a mask. */
2531 if (final_int && inter_int && inside_int
2532 && final_prec == inside_prec
2533 && final_prec > inter_prec
2534 && inter_unsignedp)
2535 {
2536 tree tem;
2537 tem = fold_build2 (BIT_AND_EXPR, inside_type,
2538 defop0,
2539 double_int_to_tree
2540 (inside_type, double_int_mask (inter_prec)));
2541 if (!useless_type_conversion_p (type, inside_type))
2542 {
2543 tem = force_gimple_operand_gsi (gsi, tem, true, NULL_TREE, true,
2544 GSI_SAME_STMT);
2545 gimple_assign_set_rhs1 (stmt, tem);
2546 }
2547 else
2548 gimple_assign_set_rhs_from_tree (gsi, tem);
2549 update_stmt (gsi_stmt (*gsi));
2550 return 1;
2551 }
2552
2553 /* If we are converting an integer to a floating-point that can
2554 represent it exactly and back to an integer, we can skip the
2555 floating-point conversion. */
2556 if (inside_int && inter_float && final_int &&
2557 (unsigned) significand_size (TYPE_MODE (inter_type))
2558 >= inside_prec - !inside_unsignedp)
2559 {
2560 if (useless_type_conversion_p (type, inside_type))
2561 {
2562 gimple_assign_set_rhs1 (stmt, unshare_expr (defop0));
2563 gimple_assign_set_rhs_code (stmt, TREE_CODE (defop0));
2564 update_stmt (stmt);
2565 return remove_prop_source_from_use (op0) ? 2 : 1;
2566 }
2567 else
2568 {
2569 gimple_assign_set_rhs1 (stmt, defop0);
2570 gimple_assign_set_rhs_code (stmt, CONVERT_EXPR);
2571 update_stmt (stmt);
2572 return remove_prop_source_from_use (op0) ? 2 : 1;
2573 }
2574 }
2575 }
2576
2577 return 0;
2578 }
2579
2580 /* Main entry point for the forward propagation and statement combine
2581 optimizer. */
2582
2583 static unsigned int
2584 ssa_forward_propagate_and_combine (void)
2585 {
2586 basic_block bb;
2587 unsigned int todoflags = 0;
2588
2589 cfg_changed = false;
2590
2591 FOR_EACH_BB (bb)
2592 {
2593 gimple_stmt_iterator gsi;
2594
2595 /* Apply forward propagation to all stmts in the basic-block.
2596 Note we update GSI within the loop as necessary. */
2597 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); )
2598 {
2599 gimple stmt = gsi_stmt (gsi);
2600 tree lhs, rhs;
2601 enum tree_code code;
2602
2603 if (!is_gimple_assign (stmt))
2604 {
2605 gsi_next (&gsi);
2606 continue;
2607 }
2608
2609 lhs = gimple_assign_lhs (stmt);
2610 rhs = gimple_assign_rhs1 (stmt);
2611 code = gimple_assign_rhs_code (stmt);
2612 if (TREE_CODE (lhs) != SSA_NAME
2613 || has_zero_uses (lhs))
2614 {
2615 gsi_next (&gsi);
2616 continue;
2617 }
2618
2619 /* If this statement sets an SSA_NAME to an address,
2620 try to propagate the address into the uses of the SSA_NAME. */
2621 if (code == ADDR_EXPR
2622 /* Handle pointer conversions on invariant addresses
2623 as well, as this is valid gimple. */
2624 || (CONVERT_EXPR_CODE_P (code)
2625 && TREE_CODE (rhs) == ADDR_EXPR
2626 && POINTER_TYPE_P (TREE_TYPE (lhs))))
2627 {
2628 tree base = get_base_address (TREE_OPERAND (rhs, 0));
2629 if ((!base
2630 || !DECL_P (base)
2631 || decl_address_invariant_p (base))
2632 && !stmt_references_abnormal_ssa_name (stmt)
2633 && forward_propagate_addr_expr (lhs, rhs))
2634 {
2635 release_defs (stmt);
2636 todoflags |= TODO_remove_unused_locals;
2637 gsi_remove (&gsi, true);
2638 }
2639 else
2640 gsi_next (&gsi);
2641 }
2642 else if (code == POINTER_PLUS_EXPR)
2643 {
2644 tree off = gimple_assign_rhs2 (stmt);
2645 if (TREE_CODE (off) == INTEGER_CST
2646 && can_propagate_from (stmt)
2647 && !simple_iv_increment_p (stmt)
2648 /* ??? Better adjust the interface to that function
2649 instead of building new trees here. */
2650 && forward_propagate_addr_expr
2651 (lhs,
2652 build1_loc (gimple_location (stmt),
2653 ADDR_EXPR, TREE_TYPE (rhs),
2654 fold_build2 (MEM_REF,
2655 TREE_TYPE (TREE_TYPE (rhs)),
2656 rhs,
2657 fold_convert (ptr_type_node,
2658 off)))))
2659 {
2660 release_defs (stmt);
2661 todoflags |= TODO_remove_unused_locals;
2662 gsi_remove (&gsi, true);
2663 }
2664 else if (is_gimple_min_invariant (rhs))
2665 {
2666 /* Make sure to fold &a[0] + off_1 here. */
2667 fold_stmt_inplace (&gsi);
2668 update_stmt (stmt);
2669 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
2670 gsi_next (&gsi);
2671 }
2672 else
2673 gsi_next (&gsi);
2674 }
2675 else if (TREE_CODE_CLASS (code) == tcc_comparison)
2676 {
2677 if (forward_propagate_comparison (&gsi))
2678 cfg_changed = true;
2679 }
2680 else
2681 gsi_next (&gsi);
2682 }
2683
2684 /* Combine stmts with the stmts defining their operands.
2685 Note we update GSI within the loop as necessary. */
2686 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
2687 {
2688 gimple stmt = gsi_stmt (gsi);
2689 bool changed = false;
2690
2691 /* Mark stmt as potentially needing revisiting. */
2692 gimple_set_plf (stmt, GF_PLF_1, false);
2693
2694 switch (gimple_code (stmt))
2695 {
2696 case GIMPLE_ASSIGN:
2697 {
2698 tree rhs1 = gimple_assign_rhs1 (stmt);
2699 enum tree_code code = gimple_assign_rhs_code (stmt);
2700
2701 if ((code == BIT_NOT_EXPR
2702 || code == NEGATE_EXPR)
2703 && TREE_CODE (rhs1) == SSA_NAME)
2704 changed = simplify_not_neg_expr (&gsi);
2705 else if (code == COND_EXPR
2706 || code == VEC_COND_EXPR)
2707 {
2708 /* In this case the entire COND_EXPR is in rhs1. */
2709 if (forward_propagate_into_cond (&gsi)
2710 || combine_cond_exprs (&gsi))
2711 {
2712 changed = true;
2713 stmt = gsi_stmt (gsi);
2714 }
2715 }
2716 else if (TREE_CODE_CLASS (code) == tcc_comparison)
2717 {
2718 int did_something;
2719 did_something = forward_propagate_into_comparison (&gsi);
2720 if (did_something == 2)
2721 cfg_changed = true;
2722 changed = did_something != 0;
2723 }
2724 else if (code == BIT_AND_EXPR
2725 || code == BIT_IOR_EXPR
2726 || code == BIT_XOR_EXPR)
2727 changed = simplify_bitwise_binary (&gsi);
2728 else if (code == PLUS_EXPR
2729 || code == MINUS_EXPR)
2730 changed = associate_plusminus (&gsi);
2731 else if (code == POINTER_PLUS_EXPR)
2732 changed = associate_pointerplus (&gsi);
2733 else if (CONVERT_EXPR_CODE_P (code)
2734 || code == FLOAT_EXPR
2735 || code == FIX_TRUNC_EXPR)
2736 {
2737 int did_something = combine_conversions (&gsi);
2738 if (did_something == 2)
2739 cfg_changed = true;
2740 changed = did_something != 0;
2741 }
2742 break;
2743 }
2744
2745 case GIMPLE_SWITCH:
2746 changed = simplify_gimple_switch (stmt);
2747 break;
2748
2749 case GIMPLE_COND:
2750 {
2751 int did_something;
2752 did_something = forward_propagate_into_gimple_cond (stmt);
2753 if (did_something == 2)
2754 cfg_changed = true;
2755 changed = did_something != 0;
2756 break;
2757 }
2758
2759 case GIMPLE_CALL:
2760 {
2761 tree callee = gimple_call_fndecl (stmt);
2762 if (callee != NULL_TREE
2763 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
2764 changed = simplify_builtin_call (&gsi, callee);
2765 break;
2766 }
2767
2768 default:;
2769 }
2770
2771 if (changed)
2772 {
2773 /* If the stmt changed then re-visit it and the statements
2774 inserted before it. */
2775 for (; !gsi_end_p (gsi); gsi_prev (&gsi))
2776 if (gimple_plf (gsi_stmt (gsi), GF_PLF_1))
2777 break;
2778 if (gsi_end_p (gsi))
2779 gsi = gsi_start_bb (bb);
2780 else
2781 gsi_next (&gsi);
2782 }
2783 else
2784 {
2785 /* Stmt no longer needs to be revisited. */
2786 gimple_set_plf (stmt, GF_PLF_1, true);
2787 gsi_next (&gsi);
2788 }
2789 }
2790 }
2791
2792 if (cfg_changed)
2793 todoflags |= TODO_cleanup_cfg;
2794
2795 return todoflags;
2796 }
2797
2798
2799 static bool
2800 gate_forwprop (void)
2801 {
2802 return flag_tree_forwprop;
2803 }
2804
2805 struct gimple_opt_pass pass_forwprop =
2806 {
2807 {
2808 GIMPLE_PASS,
2809 "forwprop", /* name */
2810 gate_forwprop, /* gate */
2811 ssa_forward_propagate_and_combine, /* execute */
2812 NULL, /* sub */
2813 NULL, /* next */
2814 0, /* static_pass_number */
2815 TV_TREE_FORWPROP, /* tv_id */
2816 PROP_cfg | PROP_ssa, /* properties_required */
2817 0, /* properties_provided */
2818 0, /* properties_destroyed */
2819 0, /* todo_flags_start */
2820 TODO_ggc_collect
2821 | TODO_update_ssa
2822 | TODO_verify_ssa /* todo_flags_finish */
2823 }
2824 };