]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/tree-ssa-forwprop.c
coretypes.h (gimple_seq, [...]): Typedef as gimple.
[thirdparty/gcc.git] / gcc / tree-ssa-forwprop.c
1 /* Forward propagation of expressions for single use variables.
2 Copyright (C) 2004, 2005, 2007, 2008, 2009, 2010, 2011, 2012
3 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "tm_p.h"
27 #include "basic-block.h"
28 #include "timevar.h"
29 #include "gimple-pretty-print.h"
30 #include "tree-flow.h"
31 #include "tree-pass.h"
32 #include "tree-dump.h"
33 #include "langhooks.h"
34 #include "flags.h"
35 #include "gimple.h"
36 #include "expr.h"
37
38 /* This pass propagates the RHS of assignment statements into use
39 sites of the LHS of the assignment. It's basically a specialized
40 form of tree combination. It is hoped all of this can disappear
41 when we have a generalized tree combiner.
42
43 One class of common cases we handle is forward propagating a single use
44 variable into a COND_EXPR.
45
46 bb0:
47 x = a COND b;
48 if (x) goto ... else goto ...
49
50 Will be transformed into:
51
52 bb0:
53 if (a COND b) goto ... else goto ...
54
55 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
56
57 Or (assuming c1 and c2 are constants):
58
59 bb0:
60 x = a + c1;
61 if (x EQ/NEQ c2) goto ... else goto ...
62
63 Will be transformed into:
64
65 bb0:
66 if (a EQ/NEQ (c2 - c1)) goto ... else goto ...
67
68 Similarly for x = a - c1.
69
70 Or
71
72 bb0:
73 x = !a
74 if (x) goto ... else goto ...
75
76 Will be transformed into:
77
78 bb0:
79 if (a == 0) goto ... else goto ...
80
81 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
82 For these cases, we propagate A into all, possibly more than one,
83 COND_EXPRs that use X.
84
85 Or
86
87 bb0:
88 x = (typecast) a
89 if (x) goto ... else goto ...
90
91 Will be transformed into:
92
93 bb0:
94 if (a != 0) goto ... else goto ...
95
96 (Assuming a is an integral type and x is a boolean or x is an
97 integral and a is a boolean.)
98
99 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
100 For these cases, we propagate A into all, possibly more than one,
101 COND_EXPRs that use X.
102
103 In addition to eliminating the variable and the statement which assigns
104 a value to the variable, we may be able to later thread the jump without
105 adding insane complexity in the dominator optimizer.
106
107 Also note these transformations can cascade. We handle this by having
108 a worklist of COND_EXPR statements to examine. As we make a change to
109 a statement, we put it back on the worklist to examine on the next
110 iteration of the main loop.
111
112 A second class of propagation opportunities arises for ADDR_EXPR
113 nodes.
114
115 ptr = &x->y->z;
116 res = *ptr;
117
118 Will get turned into
119
120 res = x->y->z;
121
122 Or
123 ptr = (type1*)&type2var;
124 res = *ptr
125
126 Will get turned into (if type1 and type2 are the same size
127 and neither have volatile on them):
128 res = VIEW_CONVERT_EXPR<type1>(type2var)
129
130 Or
131
132 ptr = &x[0];
133 ptr2 = ptr + <constant>;
134
135 Will get turned into
136
137 ptr2 = &x[constant/elementsize];
138
139 Or
140
141 ptr = &x[0];
142 offset = index * element_size;
143 offset_p = (pointer) offset;
144 ptr2 = ptr + offset_p
145
146 Will get turned into:
147
148 ptr2 = &x[index];
149
150 Or
151 ssa = (int) decl
152 res = ssa & 1
153
154 Provided that decl has known alignment >= 2, will get turned into
155
156 res = 0
157
158 We also propagate casts into SWITCH_EXPR and COND_EXPR conditions to
159 allow us to remove the cast and {NOT_EXPR,NEG_EXPR} into a subsequent
160 {NOT_EXPR,NEG_EXPR}.
161
162 This will (of course) be extended as other needs arise. */
163
164 static bool forward_propagate_addr_expr (tree name, tree rhs);
165
166 /* Set to true if we delete dead edges during the optimization. */
167 static bool cfg_changed;
168
169 static tree rhs_to_tree (tree type, gimple stmt);
170
171 /* Get the next statement we can propagate NAME's value into skipping
172 trivial copies. Returns the statement that is suitable as a
173 propagation destination or NULL_TREE if there is no such one.
174 This only returns destinations in a single-use chain. FINAL_NAME_P
175 if non-NULL is written to the ssa name that represents the use. */
176
177 static gimple
178 get_prop_dest_stmt (tree name, tree *final_name_p)
179 {
180 use_operand_p use;
181 gimple use_stmt;
182
183 do {
184 /* If name has multiple uses, bail out. */
185 if (!single_imm_use (name, &use, &use_stmt))
186 return NULL;
187
188 /* If this is not a trivial copy, we found it. */
189 if (!gimple_assign_ssa_name_copy_p (use_stmt)
190 || gimple_assign_rhs1 (use_stmt) != name)
191 break;
192
193 /* Continue searching uses of the copy destination. */
194 name = gimple_assign_lhs (use_stmt);
195 } while (1);
196
197 if (final_name_p)
198 *final_name_p = name;
199
200 return use_stmt;
201 }
202
203 /* Get the statement we can propagate from into NAME skipping
204 trivial copies. Returns the statement which defines the
205 propagation source or NULL_TREE if there is no such one.
206 If SINGLE_USE_ONLY is set considers only sources which have
207 a single use chain up to NAME. If SINGLE_USE_P is non-null,
208 it is set to whether the chain to NAME is a single use chain
209 or not. SINGLE_USE_P is not written to if SINGLE_USE_ONLY is set. */
210
211 static gimple
212 get_prop_source_stmt (tree name, bool single_use_only, bool *single_use_p)
213 {
214 bool single_use = true;
215
216 do {
217 gimple def_stmt = SSA_NAME_DEF_STMT (name);
218
219 if (!has_single_use (name))
220 {
221 single_use = false;
222 if (single_use_only)
223 return NULL;
224 }
225
226 /* If name is defined by a PHI node or is the default def, bail out. */
227 if (!is_gimple_assign (def_stmt))
228 return NULL;
229
230 /* If def_stmt is not a simple copy, we possibly found it. */
231 if (!gimple_assign_ssa_name_copy_p (def_stmt))
232 {
233 tree rhs;
234
235 if (!single_use_only && single_use_p)
236 *single_use_p = single_use;
237
238 /* We can look through pointer conversions in the search
239 for a useful stmt for the comparison folding. */
240 rhs = gimple_assign_rhs1 (def_stmt);
241 if (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt))
242 && TREE_CODE (rhs) == SSA_NAME
243 && POINTER_TYPE_P (TREE_TYPE (gimple_assign_lhs (def_stmt)))
244 && POINTER_TYPE_P (TREE_TYPE (rhs)))
245 name = rhs;
246 else
247 return def_stmt;
248 }
249 else
250 {
251 /* Continue searching the def of the copy source name. */
252 name = gimple_assign_rhs1 (def_stmt);
253 }
254 } while (1);
255 }
256
257 /* Checks if the destination ssa name in DEF_STMT can be used as
258 propagation source. Returns true if so, otherwise false. */
259
260 static bool
261 can_propagate_from (gimple def_stmt)
262 {
263 gcc_assert (is_gimple_assign (def_stmt));
264
265 /* If the rhs has side-effects we cannot propagate from it. */
266 if (gimple_has_volatile_ops (def_stmt))
267 return false;
268
269 /* If the rhs is a load we cannot propagate from it. */
270 if (TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)) == tcc_reference
271 || TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)) == tcc_declaration)
272 return false;
273
274 /* Constants can be always propagated. */
275 if (gimple_assign_single_p (def_stmt)
276 && is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt)))
277 return true;
278
279 /* We cannot propagate ssa names that occur in abnormal phi nodes. */
280 if (stmt_references_abnormal_ssa_name (def_stmt))
281 return false;
282
283 /* If the definition is a conversion of a pointer to a function type,
284 then we can not apply optimizations as some targets require
285 function pointers to be canonicalized and in this case this
286 optimization could eliminate a necessary canonicalization. */
287 if (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt)))
288 {
289 tree rhs = gimple_assign_rhs1 (def_stmt);
290 if (POINTER_TYPE_P (TREE_TYPE (rhs))
291 && TREE_CODE (TREE_TYPE (TREE_TYPE (rhs))) == FUNCTION_TYPE)
292 return false;
293 }
294
295 return true;
296 }
297
298 /* Remove a chain of dead statements starting at the definition of
299 NAME. The chain is linked via the first operand of the defining statements.
300 If NAME was replaced in its only use then this function can be used
301 to clean up dead stmts. The function handles already released SSA
302 names gracefully.
303 Returns true if cleanup-cfg has to run. */
304
305 static bool
306 remove_prop_source_from_use (tree name)
307 {
308 gimple_stmt_iterator gsi;
309 gimple stmt;
310 bool cfg_changed = false;
311
312 do {
313 basic_block bb;
314
315 if (SSA_NAME_IN_FREE_LIST (name)
316 || SSA_NAME_IS_DEFAULT_DEF (name)
317 || !has_zero_uses (name))
318 return cfg_changed;
319
320 stmt = SSA_NAME_DEF_STMT (name);
321 if (gimple_code (stmt) == GIMPLE_PHI
322 || gimple_has_side_effects (stmt))
323 return cfg_changed;
324
325 bb = gimple_bb (stmt);
326 gsi = gsi_for_stmt (stmt);
327 unlink_stmt_vdef (stmt);
328 if (gsi_remove (&gsi, true))
329 cfg_changed |= gimple_purge_dead_eh_edges (bb);
330 release_defs (stmt);
331
332 name = is_gimple_assign (stmt) ? gimple_assign_rhs1 (stmt) : NULL_TREE;
333 } while (name && TREE_CODE (name) == SSA_NAME);
334
335 return cfg_changed;
336 }
337
338 /* Return the rhs of a gimple_assign STMT in a form of a single tree,
339 converted to type TYPE.
340
341 This should disappear, but is needed so we can combine expressions and use
342 the fold() interfaces. Long term, we need to develop folding and combine
343 routines that deal with gimple exclusively . */
344
345 static tree
346 rhs_to_tree (tree type, gimple stmt)
347 {
348 location_t loc = gimple_location (stmt);
349 enum tree_code code = gimple_assign_rhs_code (stmt);
350 if (get_gimple_rhs_class (code) == GIMPLE_TERNARY_RHS)
351 return fold_build3_loc (loc, code, type, gimple_assign_rhs1 (stmt),
352 gimple_assign_rhs2 (stmt),
353 gimple_assign_rhs3 (stmt));
354 else if (get_gimple_rhs_class (code) == GIMPLE_BINARY_RHS)
355 return fold_build2_loc (loc, code, type, gimple_assign_rhs1 (stmt),
356 gimple_assign_rhs2 (stmt));
357 else if (get_gimple_rhs_class (code) == GIMPLE_UNARY_RHS)
358 return build1 (code, type, gimple_assign_rhs1 (stmt));
359 else if (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS)
360 return gimple_assign_rhs1 (stmt);
361 else
362 gcc_unreachable ();
363 }
364
365 /* Combine OP0 CODE OP1 in the context of a COND_EXPR. Returns
366 the folded result in a form suitable for COND_EXPR_COND or
367 NULL_TREE, if there is no suitable simplified form. If
368 INVARIANT_ONLY is true only gimple_min_invariant results are
369 considered simplified. */
370
371 static tree
372 combine_cond_expr_cond (gimple stmt, enum tree_code code, tree type,
373 tree op0, tree op1, bool invariant_only)
374 {
375 tree t;
376
377 gcc_assert (TREE_CODE_CLASS (code) == tcc_comparison);
378
379 fold_defer_overflow_warnings ();
380 t = fold_binary_loc (gimple_location (stmt), code, type, op0, op1);
381 if (!t)
382 {
383 fold_undefer_overflow_warnings (false, NULL, 0);
384 return NULL_TREE;
385 }
386
387 /* Require that we got a boolean type out if we put one in. */
388 gcc_assert (TREE_CODE (TREE_TYPE (t)) == TREE_CODE (type));
389
390 /* Canonicalize the combined condition for use in a COND_EXPR. */
391 t = canonicalize_cond_expr_cond (t);
392
393 /* Bail out if we required an invariant but didn't get one. */
394 if (!t || (invariant_only && !is_gimple_min_invariant (t)))
395 {
396 fold_undefer_overflow_warnings (false, NULL, 0);
397 return NULL_TREE;
398 }
399
400 fold_undefer_overflow_warnings (!gimple_no_warning_p (stmt), stmt, 0);
401
402 return t;
403 }
404
405 /* Combine the comparison OP0 CODE OP1 at LOC with the defining statements
406 of its operand. Return a new comparison tree or NULL_TREE if there
407 were no simplifying combines. */
408
409 static tree
410 forward_propagate_into_comparison_1 (gimple stmt,
411 enum tree_code code, tree type,
412 tree op0, tree op1)
413 {
414 tree tmp = NULL_TREE;
415 tree rhs0 = NULL_TREE, rhs1 = NULL_TREE;
416 bool single_use0_p = false, single_use1_p = false;
417
418 /* For comparisons use the first operand, that is likely to
419 simplify comparisons against constants. */
420 if (TREE_CODE (op0) == SSA_NAME)
421 {
422 gimple def_stmt = get_prop_source_stmt (op0, false, &single_use0_p);
423 if (def_stmt && can_propagate_from (def_stmt))
424 {
425 rhs0 = rhs_to_tree (TREE_TYPE (op1), def_stmt);
426 tmp = combine_cond_expr_cond (stmt, code, type,
427 rhs0, op1, !single_use0_p);
428 if (tmp)
429 return tmp;
430 }
431 }
432
433 /* If that wasn't successful, try the second operand. */
434 if (TREE_CODE (op1) == SSA_NAME)
435 {
436 gimple def_stmt = get_prop_source_stmt (op1, false, &single_use1_p);
437 if (def_stmt && can_propagate_from (def_stmt))
438 {
439 rhs1 = rhs_to_tree (TREE_TYPE (op0), def_stmt);
440 tmp = combine_cond_expr_cond (stmt, code, type,
441 op0, rhs1, !single_use1_p);
442 if (tmp)
443 return tmp;
444 }
445 }
446
447 /* If that wasn't successful either, try both operands. */
448 if (rhs0 != NULL_TREE
449 && rhs1 != NULL_TREE)
450 tmp = combine_cond_expr_cond (stmt, code, type,
451 rhs0, rhs1,
452 !(single_use0_p && single_use1_p));
453
454 return tmp;
455 }
456
457 /* Propagate from the ssa name definition statements of the assignment
458 from a comparison at *GSI into the conditional if that simplifies it.
459 Returns 1 if the stmt was modified and 2 if the CFG needs cleanup,
460 otherwise returns 0. */
461
462 static int
463 forward_propagate_into_comparison (gimple_stmt_iterator *gsi)
464 {
465 gimple stmt = gsi_stmt (*gsi);
466 tree tmp;
467 bool cfg_changed = false;
468 tree type = TREE_TYPE (gimple_assign_lhs (stmt));
469 tree rhs1 = gimple_assign_rhs1 (stmt);
470 tree rhs2 = gimple_assign_rhs2 (stmt);
471
472 /* Combine the comparison with defining statements. */
473 tmp = forward_propagate_into_comparison_1 (stmt,
474 gimple_assign_rhs_code (stmt),
475 type, rhs1, rhs2);
476 if (tmp && useless_type_conversion_p (type, TREE_TYPE (tmp)))
477 {
478 gimple_assign_set_rhs_from_tree (gsi, tmp);
479 fold_stmt (gsi);
480 update_stmt (gsi_stmt (*gsi));
481
482 if (TREE_CODE (rhs1) == SSA_NAME)
483 cfg_changed |= remove_prop_source_from_use (rhs1);
484 if (TREE_CODE (rhs2) == SSA_NAME)
485 cfg_changed |= remove_prop_source_from_use (rhs2);
486 return cfg_changed ? 2 : 1;
487 }
488
489 return 0;
490 }
491
492 /* Propagate from the ssa name definition statements of COND_EXPR
493 in GIMPLE_COND statement STMT into the conditional if that simplifies it.
494 Returns zero if no statement was changed, one if there were
495 changes and two if cfg_cleanup needs to run.
496
497 This must be kept in sync with forward_propagate_into_cond. */
498
499 static int
500 forward_propagate_into_gimple_cond (gimple stmt)
501 {
502 tree tmp;
503 enum tree_code code = gimple_cond_code (stmt);
504 bool cfg_changed = false;
505 tree rhs1 = gimple_cond_lhs (stmt);
506 tree rhs2 = gimple_cond_rhs (stmt);
507
508 /* We can do tree combining on SSA_NAME and comparison expressions. */
509 if (TREE_CODE_CLASS (gimple_cond_code (stmt)) != tcc_comparison)
510 return 0;
511
512 tmp = forward_propagate_into_comparison_1 (stmt, code,
513 boolean_type_node,
514 rhs1, rhs2);
515 if (tmp)
516 {
517 if (dump_file && tmp)
518 {
519 fprintf (dump_file, " Replaced '");
520 print_gimple_expr (dump_file, stmt, 0, 0);
521 fprintf (dump_file, "' with '");
522 print_generic_expr (dump_file, tmp, 0);
523 fprintf (dump_file, "'\n");
524 }
525
526 gimple_cond_set_condition_from_tree (stmt, unshare_expr (tmp));
527 update_stmt (stmt);
528
529 if (TREE_CODE (rhs1) == SSA_NAME)
530 cfg_changed |= remove_prop_source_from_use (rhs1);
531 if (TREE_CODE (rhs2) == SSA_NAME)
532 cfg_changed |= remove_prop_source_from_use (rhs2);
533 return (cfg_changed || is_gimple_min_invariant (tmp)) ? 2 : 1;
534 }
535
536 /* Canonicalize _Bool == 0 and _Bool != 1 to _Bool != 0 by swapping edges. */
537 if ((TREE_CODE (TREE_TYPE (rhs1)) == BOOLEAN_TYPE
538 || (INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
539 && TYPE_PRECISION (TREE_TYPE (rhs1)) == 1))
540 && ((code == EQ_EXPR
541 && integer_zerop (rhs2))
542 || (code == NE_EXPR
543 && integer_onep (rhs2))))
544 {
545 basic_block bb = gimple_bb (stmt);
546 gimple_cond_set_code (stmt, NE_EXPR);
547 gimple_cond_set_rhs (stmt, build_zero_cst (TREE_TYPE (rhs1)));
548 EDGE_SUCC (bb, 0)->flags ^= (EDGE_TRUE_VALUE|EDGE_FALSE_VALUE);
549 EDGE_SUCC (bb, 1)->flags ^= (EDGE_TRUE_VALUE|EDGE_FALSE_VALUE);
550 return 1;
551 }
552
553 return 0;
554 }
555
556
557 /* Propagate from the ssa name definition statements of COND_EXPR
558 in the rhs of statement STMT into the conditional if that simplifies it.
559 Returns true zero if the stmt was changed. */
560
561 static bool
562 forward_propagate_into_cond (gimple_stmt_iterator *gsi_p)
563 {
564 gimple stmt = gsi_stmt (*gsi_p);
565 tree tmp = NULL_TREE;
566 tree cond = gimple_assign_rhs1 (stmt);
567 bool swap = false;
568
569 /* We can do tree combining on SSA_NAME and comparison expressions. */
570 if (COMPARISON_CLASS_P (cond))
571 tmp = forward_propagate_into_comparison_1 (stmt, TREE_CODE (cond),
572 boolean_type_node,
573 TREE_OPERAND (cond, 0),
574 TREE_OPERAND (cond, 1));
575 else if (TREE_CODE (cond) == SSA_NAME)
576 {
577 enum tree_code code;
578 tree name = cond;
579 gimple def_stmt = get_prop_source_stmt (name, true, NULL);
580 if (!def_stmt || !can_propagate_from (def_stmt))
581 return 0;
582
583 code = gimple_assign_rhs_code (def_stmt);
584 if (TREE_CODE_CLASS (code) == tcc_comparison)
585 tmp = fold_build2_loc (gimple_location (def_stmt),
586 code,
587 boolean_type_node,
588 gimple_assign_rhs1 (def_stmt),
589 gimple_assign_rhs2 (def_stmt));
590 else if ((code == BIT_NOT_EXPR
591 && TYPE_PRECISION (TREE_TYPE (cond)) == 1)
592 || (code == BIT_XOR_EXPR
593 && integer_onep (gimple_assign_rhs2 (def_stmt))))
594 {
595 tmp = gimple_assign_rhs1 (def_stmt);
596 swap = true;
597 }
598 }
599
600 if (tmp
601 && is_gimple_condexpr (tmp))
602 {
603 if (dump_file && tmp)
604 {
605 fprintf (dump_file, " Replaced '");
606 print_generic_expr (dump_file, cond, 0);
607 fprintf (dump_file, "' with '");
608 print_generic_expr (dump_file, tmp, 0);
609 fprintf (dump_file, "'\n");
610 }
611
612 if (integer_onep (tmp))
613 gimple_assign_set_rhs_from_tree (gsi_p, gimple_assign_rhs2 (stmt));
614 else if (integer_zerop (tmp))
615 gimple_assign_set_rhs_from_tree (gsi_p, gimple_assign_rhs3 (stmt));
616 else
617 {
618 gimple_assign_set_rhs1 (stmt, unshare_expr (tmp));
619 if (swap)
620 {
621 tree t = gimple_assign_rhs2 (stmt);
622 gimple_assign_set_rhs2 (stmt, gimple_assign_rhs3 (stmt));
623 gimple_assign_set_rhs3 (stmt, t);
624 }
625 }
626 stmt = gsi_stmt (*gsi_p);
627 update_stmt (stmt);
628
629 return true;
630 }
631
632 return 0;
633 }
634
635 /* Propagate from the ssa name definition statements of COND_EXPR
636 values in the rhs of statement STMT into the conditional arms
637 if that simplifies it.
638 Returns true if the stmt was changed. */
639
640 static bool
641 combine_cond_exprs (gimple_stmt_iterator *gsi_p)
642 {
643 gimple stmt = gsi_stmt (*gsi_p);
644 tree cond, val1, val2;
645 bool changed = false;
646
647 cond = gimple_assign_rhs1 (stmt);
648 val1 = gimple_assign_rhs2 (stmt);
649 if (TREE_CODE (val1) == SSA_NAME)
650 {
651 gimple def_stmt = SSA_NAME_DEF_STMT (val1);
652 if (is_gimple_assign (def_stmt)
653 && gimple_assign_rhs_code (def_stmt) == gimple_assign_rhs_code (stmt)
654 && operand_equal_p (gimple_assign_rhs1 (def_stmt), cond, 0))
655 {
656 val1 = unshare_expr (gimple_assign_rhs2 (def_stmt));
657 gimple_assign_set_rhs2 (stmt, val1);
658 changed = true;
659 }
660 }
661 val2 = gimple_assign_rhs3 (stmt);
662 if (TREE_CODE (val2) == SSA_NAME)
663 {
664 gimple def_stmt = SSA_NAME_DEF_STMT (val2);
665 if (is_gimple_assign (def_stmt)
666 && gimple_assign_rhs_code (def_stmt) == gimple_assign_rhs_code (stmt)
667 && operand_equal_p (gimple_assign_rhs1 (def_stmt), cond, 0))
668 {
669 val2 = unshare_expr (gimple_assign_rhs3 (def_stmt));
670 gimple_assign_set_rhs3 (stmt, val2);
671 changed = true;
672 }
673 }
674 if (operand_equal_p (val1, val2, 0))
675 {
676 gimple_assign_set_rhs_from_tree (gsi_p, val1);
677 stmt = gsi_stmt (*gsi_p);
678 changed = true;
679 }
680
681 if (changed)
682 update_stmt (stmt);
683
684 return changed;
685 }
686
687 /* We've just substituted an ADDR_EXPR into stmt. Update all the
688 relevant data structures to match. */
689
690 static void
691 tidy_after_forward_propagate_addr (gimple stmt)
692 {
693 /* We may have turned a trapping insn into a non-trapping insn. */
694 if (maybe_clean_or_replace_eh_stmt (stmt, stmt)
695 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
696 cfg_changed = true;
697
698 if (TREE_CODE (gimple_assign_rhs1 (stmt)) == ADDR_EXPR)
699 recompute_tree_invariant_for_addr_expr (gimple_assign_rhs1 (stmt));
700 }
701
702 /* DEF_RHS contains the address of the 0th element in an array.
703 USE_STMT uses type of DEF_RHS to compute the address of an
704 arbitrary element within the array. The (variable) byte offset
705 of the element is contained in OFFSET.
706
707 We walk back through the use-def chains of OFFSET to verify that
708 it is indeed computing the offset of an element within the array
709 and extract the index corresponding to the given byte offset.
710
711 We then try to fold the entire address expression into a form
712 &array[index].
713
714 If we are successful, we replace the right hand side of USE_STMT
715 with the new address computation. */
716
717 static bool
718 forward_propagate_addr_into_variable_array_index (tree offset,
719 tree def_rhs,
720 gimple_stmt_iterator *use_stmt_gsi)
721 {
722 tree index, tunit;
723 gimple offset_def, use_stmt = gsi_stmt (*use_stmt_gsi);
724 tree new_rhs, tmp;
725
726 if (TREE_CODE (TREE_OPERAND (def_rhs, 0)) == ARRAY_REF)
727 tunit = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (def_rhs)));
728 else if (TREE_CODE (TREE_TYPE (TREE_OPERAND (def_rhs, 0))) == ARRAY_TYPE)
729 tunit = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (TREE_TYPE (def_rhs))));
730 else
731 return false;
732 if (!host_integerp (tunit, 1))
733 return false;
734
735 /* Get the offset's defining statement. */
736 offset_def = SSA_NAME_DEF_STMT (offset);
737
738 /* Try to find an expression for a proper index. This is either a
739 multiplication expression by the element size or just the ssa name we came
740 along in case the element size is one. In that case, however, we do not
741 allow multiplications because they can be computing index to a higher
742 level dimension (PR 37861). */
743 if (integer_onep (tunit))
744 {
745 if (is_gimple_assign (offset_def)
746 && gimple_assign_rhs_code (offset_def) == MULT_EXPR)
747 return false;
748
749 index = offset;
750 }
751 else
752 {
753 /* The statement which defines OFFSET before type conversion
754 must be a simple GIMPLE_ASSIGN. */
755 if (!is_gimple_assign (offset_def))
756 return false;
757
758 /* The RHS of the statement which defines OFFSET must be a
759 multiplication of an object by the size of the array elements.
760 This implicitly verifies that the size of the array elements
761 is constant. */
762 if (gimple_assign_rhs_code (offset_def) == MULT_EXPR
763 && TREE_CODE (gimple_assign_rhs2 (offset_def)) == INTEGER_CST
764 && tree_int_cst_equal (gimple_assign_rhs2 (offset_def), tunit))
765 {
766 /* The first operand to the MULT_EXPR is the desired index. */
767 index = gimple_assign_rhs1 (offset_def);
768 }
769 /* If we have idx * tunit + CST * tunit re-associate that. */
770 else if ((gimple_assign_rhs_code (offset_def) == PLUS_EXPR
771 || gimple_assign_rhs_code (offset_def) == MINUS_EXPR)
772 && TREE_CODE (gimple_assign_rhs1 (offset_def)) == SSA_NAME
773 && TREE_CODE (gimple_assign_rhs2 (offset_def)) == INTEGER_CST
774 && (tmp = div_if_zero_remainder (EXACT_DIV_EXPR,
775 gimple_assign_rhs2 (offset_def),
776 tunit)) != NULL_TREE)
777 {
778 gimple offset_def2 = SSA_NAME_DEF_STMT (gimple_assign_rhs1 (offset_def));
779 if (is_gimple_assign (offset_def2)
780 && gimple_assign_rhs_code (offset_def2) == MULT_EXPR
781 && TREE_CODE (gimple_assign_rhs2 (offset_def2)) == INTEGER_CST
782 && tree_int_cst_equal (gimple_assign_rhs2 (offset_def2), tunit))
783 {
784 index = fold_build2 (gimple_assign_rhs_code (offset_def),
785 TREE_TYPE (offset),
786 gimple_assign_rhs1 (offset_def2), tmp);
787 }
788 else
789 return false;
790 }
791 else
792 return false;
793 }
794
795 /* Replace the pointer addition with array indexing. */
796 index = force_gimple_operand_gsi (use_stmt_gsi, index, true, NULL_TREE,
797 true, GSI_SAME_STMT);
798 if (TREE_CODE (TREE_OPERAND (def_rhs, 0)) == ARRAY_REF)
799 {
800 new_rhs = unshare_expr (def_rhs);
801 TREE_OPERAND (TREE_OPERAND (new_rhs, 0), 1) = index;
802 }
803 else
804 {
805 new_rhs = build4 (ARRAY_REF, TREE_TYPE (TREE_TYPE (TREE_TYPE (def_rhs))),
806 unshare_expr (TREE_OPERAND (def_rhs, 0)),
807 index, integer_zero_node, NULL_TREE);
808 new_rhs = build_fold_addr_expr (new_rhs);
809 if (!useless_type_conversion_p (TREE_TYPE (gimple_assign_lhs (use_stmt)),
810 TREE_TYPE (new_rhs)))
811 {
812 new_rhs = force_gimple_operand_gsi (use_stmt_gsi, new_rhs, true,
813 NULL_TREE, true, GSI_SAME_STMT);
814 new_rhs = fold_convert (TREE_TYPE (gimple_assign_lhs (use_stmt)),
815 new_rhs);
816 }
817 }
818 gimple_assign_set_rhs_from_tree (use_stmt_gsi, new_rhs);
819 fold_stmt (use_stmt_gsi);
820 tidy_after_forward_propagate_addr (gsi_stmt (*use_stmt_gsi));
821 return true;
822 }
823
824 /* NAME is a SSA_NAME representing DEF_RHS which is of the form
825 ADDR_EXPR <whatever>.
826
827 Try to forward propagate the ADDR_EXPR into the use USE_STMT.
828 Often this will allow for removal of an ADDR_EXPR and INDIRECT_REF
829 node or for recovery of array indexing from pointer arithmetic.
830
831 Return true if the propagation was successful (the propagation can
832 be not totally successful, yet things may have been changed). */
833
834 static bool
835 forward_propagate_addr_expr_1 (tree name, tree def_rhs,
836 gimple_stmt_iterator *use_stmt_gsi,
837 bool single_use_p)
838 {
839 tree lhs, rhs, rhs2, array_ref;
840 gimple use_stmt = gsi_stmt (*use_stmt_gsi);
841 enum tree_code rhs_code;
842 bool res = true;
843
844 gcc_assert (TREE_CODE (def_rhs) == ADDR_EXPR);
845
846 lhs = gimple_assign_lhs (use_stmt);
847 rhs_code = gimple_assign_rhs_code (use_stmt);
848 rhs = gimple_assign_rhs1 (use_stmt);
849
850 /* Trivial cases. The use statement could be a trivial copy or a
851 useless conversion. Recurse to the uses of the lhs as copyprop does
852 not copy through different variant pointers and FRE does not catch
853 all useless conversions. Treat the case of a single-use name and
854 a conversion to def_rhs type separate, though. */
855 if (TREE_CODE (lhs) == SSA_NAME
856 && ((rhs_code == SSA_NAME && rhs == name)
857 || CONVERT_EXPR_CODE_P (rhs_code)))
858 {
859 /* Only recurse if we don't deal with a single use or we cannot
860 do the propagation to the current statement. In particular
861 we can end up with a conversion needed for a non-invariant
862 address which we cannot do in a single statement. */
863 if (!single_use_p
864 || (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (def_rhs))
865 && (!is_gimple_min_invariant (def_rhs)
866 || (INTEGRAL_TYPE_P (TREE_TYPE (lhs))
867 && POINTER_TYPE_P (TREE_TYPE (def_rhs))
868 && (TYPE_PRECISION (TREE_TYPE (lhs))
869 > TYPE_PRECISION (TREE_TYPE (def_rhs)))))))
870 return forward_propagate_addr_expr (lhs, def_rhs);
871
872 gimple_assign_set_rhs1 (use_stmt, unshare_expr (def_rhs));
873 if (useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (def_rhs)))
874 gimple_assign_set_rhs_code (use_stmt, TREE_CODE (def_rhs));
875 else
876 gimple_assign_set_rhs_code (use_stmt, NOP_EXPR);
877 return true;
878 }
879
880 /* Propagate through constant pointer adjustments. */
881 if (TREE_CODE (lhs) == SSA_NAME
882 && rhs_code == POINTER_PLUS_EXPR
883 && rhs == name
884 && TREE_CODE (gimple_assign_rhs2 (use_stmt)) == INTEGER_CST)
885 {
886 tree new_def_rhs;
887 /* As we come here with non-invariant addresses in def_rhs we need
888 to make sure we can build a valid constant offsetted address
889 for further propagation. Simply rely on fold building that
890 and check after the fact. */
891 new_def_rhs = fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (rhs)),
892 def_rhs,
893 fold_convert (ptr_type_node,
894 gimple_assign_rhs2 (use_stmt)));
895 if (TREE_CODE (new_def_rhs) == MEM_REF
896 && !is_gimple_mem_ref_addr (TREE_OPERAND (new_def_rhs, 0)))
897 return false;
898 new_def_rhs = build_fold_addr_expr_with_type (new_def_rhs,
899 TREE_TYPE (rhs));
900
901 /* Recurse. If we could propagate into all uses of lhs do not
902 bother to replace into the current use but just pretend we did. */
903 if (TREE_CODE (new_def_rhs) == ADDR_EXPR
904 && forward_propagate_addr_expr (lhs, new_def_rhs))
905 return true;
906
907 if (useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (new_def_rhs)))
908 gimple_assign_set_rhs_with_ops (use_stmt_gsi, TREE_CODE (new_def_rhs),
909 new_def_rhs, NULL_TREE);
910 else if (is_gimple_min_invariant (new_def_rhs))
911 gimple_assign_set_rhs_with_ops (use_stmt_gsi, NOP_EXPR,
912 new_def_rhs, NULL_TREE);
913 else
914 return false;
915 gcc_assert (gsi_stmt (*use_stmt_gsi) == use_stmt);
916 update_stmt (use_stmt);
917 return true;
918 }
919
920 /* Now strip away any outer COMPONENT_REF/ARRAY_REF nodes from the LHS.
921 ADDR_EXPR will not appear on the LHS. */
922 lhs = gimple_assign_lhs (use_stmt);
923 while (handled_component_p (lhs))
924 lhs = TREE_OPERAND (lhs, 0);
925
926 /* Now see if the LHS node is a MEM_REF using NAME. If so,
927 propagate the ADDR_EXPR into the use of NAME and fold the result. */
928 if (TREE_CODE (lhs) == MEM_REF
929 && TREE_OPERAND (lhs, 0) == name)
930 {
931 tree def_rhs_base;
932 HOST_WIDE_INT def_rhs_offset;
933 /* If the address is invariant we can always fold it. */
934 if ((def_rhs_base = get_addr_base_and_unit_offset (TREE_OPERAND (def_rhs, 0),
935 &def_rhs_offset)))
936 {
937 double_int off = mem_ref_offset (lhs);
938 tree new_ptr;
939 off = double_int_add (off,
940 shwi_to_double_int (def_rhs_offset));
941 if (TREE_CODE (def_rhs_base) == MEM_REF)
942 {
943 off = double_int_add (off, mem_ref_offset (def_rhs_base));
944 new_ptr = TREE_OPERAND (def_rhs_base, 0);
945 }
946 else
947 new_ptr = build_fold_addr_expr (def_rhs_base);
948 TREE_OPERAND (lhs, 0) = new_ptr;
949 TREE_OPERAND (lhs, 1)
950 = double_int_to_tree (TREE_TYPE (TREE_OPERAND (lhs, 1)), off);
951 tidy_after_forward_propagate_addr (use_stmt);
952 /* Continue propagating into the RHS if this was not the only use. */
953 if (single_use_p)
954 return true;
955 }
956 /* If the LHS is a plain dereference and the value type is the same as
957 that of the pointed-to type of the address we can put the
958 dereferenced address on the LHS preserving the original alias-type. */
959 else if (gimple_assign_lhs (use_stmt) == lhs
960 && integer_zerop (TREE_OPERAND (lhs, 1))
961 && useless_type_conversion_p
962 (TREE_TYPE (TREE_OPERAND (def_rhs, 0)),
963 TREE_TYPE (gimple_assign_rhs1 (use_stmt))))
964 {
965 tree *def_rhs_basep = &TREE_OPERAND (def_rhs, 0);
966 tree new_offset, new_base, saved, new_lhs;
967 while (handled_component_p (*def_rhs_basep))
968 def_rhs_basep = &TREE_OPERAND (*def_rhs_basep, 0);
969 saved = *def_rhs_basep;
970 if (TREE_CODE (*def_rhs_basep) == MEM_REF)
971 {
972 new_base = TREE_OPERAND (*def_rhs_basep, 0);
973 new_offset = fold_convert (TREE_TYPE (TREE_OPERAND (lhs, 1)),
974 TREE_OPERAND (*def_rhs_basep, 1));
975 }
976 else
977 {
978 new_base = build_fold_addr_expr (*def_rhs_basep);
979 new_offset = TREE_OPERAND (lhs, 1);
980 }
981 *def_rhs_basep = build2 (MEM_REF, TREE_TYPE (*def_rhs_basep),
982 new_base, new_offset);
983 TREE_THIS_VOLATILE (*def_rhs_basep) = TREE_THIS_VOLATILE (lhs);
984 TREE_SIDE_EFFECTS (*def_rhs_basep) = TREE_SIDE_EFFECTS (lhs);
985 TREE_THIS_NOTRAP (*def_rhs_basep) = TREE_THIS_NOTRAP (lhs);
986 new_lhs = unshare_expr (TREE_OPERAND (def_rhs, 0));
987 gimple_assign_set_lhs (use_stmt, new_lhs);
988 TREE_THIS_VOLATILE (new_lhs) = TREE_THIS_VOLATILE (lhs);
989 TREE_SIDE_EFFECTS (new_lhs) = TREE_SIDE_EFFECTS (lhs);
990 *def_rhs_basep = saved;
991 tidy_after_forward_propagate_addr (use_stmt);
992 /* Continue propagating into the RHS if this was not the
993 only use. */
994 if (single_use_p)
995 return true;
996 }
997 else
998 /* We can have a struct assignment dereferencing our name twice.
999 Note that we didn't propagate into the lhs to not falsely
1000 claim we did when propagating into the rhs. */
1001 res = false;
1002 }
1003
1004 /* Strip away any outer COMPONENT_REF, ARRAY_REF or ADDR_EXPR
1005 nodes from the RHS. */
1006 rhs = gimple_assign_rhs1 (use_stmt);
1007 if (TREE_CODE (rhs) == ADDR_EXPR)
1008 rhs = TREE_OPERAND (rhs, 0);
1009 while (handled_component_p (rhs))
1010 rhs = TREE_OPERAND (rhs, 0);
1011
1012 /* Now see if the RHS node is a MEM_REF using NAME. If so,
1013 propagate the ADDR_EXPR into the use of NAME and fold the result. */
1014 if (TREE_CODE (rhs) == MEM_REF
1015 && TREE_OPERAND (rhs, 0) == name)
1016 {
1017 tree def_rhs_base;
1018 HOST_WIDE_INT def_rhs_offset;
1019 if ((def_rhs_base = get_addr_base_and_unit_offset (TREE_OPERAND (def_rhs, 0),
1020 &def_rhs_offset)))
1021 {
1022 double_int off = mem_ref_offset (rhs);
1023 tree new_ptr;
1024 off = double_int_add (off,
1025 shwi_to_double_int (def_rhs_offset));
1026 if (TREE_CODE (def_rhs_base) == MEM_REF)
1027 {
1028 off = double_int_add (off, mem_ref_offset (def_rhs_base));
1029 new_ptr = TREE_OPERAND (def_rhs_base, 0);
1030 }
1031 else
1032 new_ptr = build_fold_addr_expr (def_rhs_base);
1033 TREE_OPERAND (rhs, 0) = new_ptr;
1034 TREE_OPERAND (rhs, 1)
1035 = double_int_to_tree (TREE_TYPE (TREE_OPERAND (rhs, 1)), off);
1036 fold_stmt_inplace (use_stmt_gsi);
1037 tidy_after_forward_propagate_addr (use_stmt);
1038 return res;
1039 }
1040 /* If the RHS is a plain dereference and the value type is the same as
1041 that of the pointed-to type of the address we can put the
1042 dereferenced address on the RHS preserving the original alias-type. */
1043 else if (gimple_assign_rhs1 (use_stmt) == rhs
1044 && integer_zerop (TREE_OPERAND (rhs, 1))
1045 && useless_type_conversion_p
1046 (TREE_TYPE (gimple_assign_lhs (use_stmt)),
1047 TREE_TYPE (TREE_OPERAND (def_rhs, 0))))
1048 {
1049 tree *def_rhs_basep = &TREE_OPERAND (def_rhs, 0);
1050 tree new_offset, new_base, saved, new_rhs;
1051 while (handled_component_p (*def_rhs_basep))
1052 def_rhs_basep = &TREE_OPERAND (*def_rhs_basep, 0);
1053 saved = *def_rhs_basep;
1054 if (TREE_CODE (*def_rhs_basep) == MEM_REF)
1055 {
1056 new_base = TREE_OPERAND (*def_rhs_basep, 0);
1057 new_offset = fold_convert (TREE_TYPE (TREE_OPERAND (rhs, 1)),
1058 TREE_OPERAND (*def_rhs_basep, 1));
1059 }
1060 else
1061 {
1062 new_base = build_fold_addr_expr (*def_rhs_basep);
1063 new_offset = TREE_OPERAND (rhs, 1);
1064 }
1065 *def_rhs_basep = build2 (MEM_REF, TREE_TYPE (*def_rhs_basep),
1066 new_base, new_offset);
1067 TREE_THIS_VOLATILE (*def_rhs_basep) = TREE_THIS_VOLATILE (rhs);
1068 TREE_SIDE_EFFECTS (*def_rhs_basep) = TREE_SIDE_EFFECTS (rhs);
1069 TREE_THIS_NOTRAP (*def_rhs_basep) = TREE_THIS_NOTRAP (rhs);
1070 new_rhs = unshare_expr (TREE_OPERAND (def_rhs, 0));
1071 gimple_assign_set_rhs1 (use_stmt, new_rhs);
1072 TREE_THIS_VOLATILE (new_rhs) = TREE_THIS_VOLATILE (rhs);
1073 TREE_SIDE_EFFECTS (new_rhs) = TREE_SIDE_EFFECTS (rhs);
1074 *def_rhs_basep = saved;
1075 fold_stmt_inplace (use_stmt_gsi);
1076 tidy_after_forward_propagate_addr (use_stmt);
1077 return res;
1078 }
1079 }
1080
1081 /* If the use of the ADDR_EXPR is not a POINTER_PLUS_EXPR, there
1082 is nothing to do. */
1083 if (gimple_assign_rhs_code (use_stmt) != POINTER_PLUS_EXPR
1084 || gimple_assign_rhs1 (use_stmt) != name)
1085 return false;
1086
1087 /* The remaining cases are all for turning pointer arithmetic into
1088 array indexing. They only apply when we have the address of
1089 element zero in an array. If that is not the case then there
1090 is nothing to do. */
1091 array_ref = TREE_OPERAND (def_rhs, 0);
1092 if ((TREE_CODE (array_ref) != ARRAY_REF
1093 || TREE_CODE (TREE_TYPE (TREE_OPERAND (array_ref, 0))) != ARRAY_TYPE
1094 || TREE_CODE (TREE_OPERAND (array_ref, 1)) != INTEGER_CST)
1095 && TREE_CODE (TREE_TYPE (array_ref)) != ARRAY_TYPE)
1096 return false;
1097
1098 rhs2 = gimple_assign_rhs2 (use_stmt);
1099 /* Optimize &x[C1] p+ C2 to &x p+ C3 with C3 = C1 * element_size + C2. */
1100 if (TREE_CODE (rhs2) == INTEGER_CST)
1101 {
1102 tree new_rhs = build1_loc (gimple_location (use_stmt),
1103 ADDR_EXPR, TREE_TYPE (def_rhs),
1104 fold_build2 (MEM_REF,
1105 TREE_TYPE (TREE_TYPE (def_rhs)),
1106 unshare_expr (def_rhs),
1107 fold_convert (ptr_type_node,
1108 rhs2)));
1109 gimple_assign_set_rhs_from_tree (use_stmt_gsi, new_rhs);
1110 use_stmt = gsi_stmt (*use_stmt_gsi);
1111 update_stmt (use_stmt);
1112 tidy_after_forward_propagate_addr (use_stmt);
1113 return true;
1114 }
1115
1116 /* Try to optimize &x[0] p+ OFFSET where OFFSET is defined by
1117 converting a multiplication of an index by the size of the
1118 array elements, then the result is converted into the proper
1119 type for the arithmetic. */
1120 if (TREE_CODE (rhs2) == SSA_NAME
1121 && (TREE_CODE (array_ref) != ARRAY_REF
1122 || integer_zerop (TREE_OPERAND (array_ref, 1)))
1123 && useless_type_conversion_p (TREE_TYPE (name), TREE_TYPE (def_rhs))
1124 /* Avoid problems with IVopts creating PLUS_EXPRs with a
1125 different type than their operands. */
1126 && useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (def_rhs)))
1127 return forward_propagate_addr_into_variable_array_index (rhs2, def_rhs,
1128 use_stmt_gsi);
1129 return false;
1130 }
1131
1132 /* STMT is a statement of the form SSA_NAME = ADDR_EXPR <whatever>.
1133
1134 Try to forward propagate the ADDR_EXPR into all uses of the SSA_NAME.
1135 Often this will allow for removal of an ADDR_EXPR and INDIRECT_REF
1136 node or for recovery of array indexing from pointer arithmetic.
1137 Returns true, if all uses have been propagated into. */
1138
1139 static bool
1140 forward_propagate_addr_expr (tree name, tree rhs)
1141 {
1142 int stmt_loop_depth = gimple_bb (SSA_NAME_DEF_STMT (name))->loop_depth;
1143 imm_use_iterator iter;
1144 gimple use_stmt;
1145 bool all = true;
1146 bool single_use_p = has_single_use (name);
1147
1148 FOR_EACH_IMM_USE_STMT (use_stmt, iter, name)
1149 {
1150 bool result;
1151 tree use_rhs;
1152
1153 /* If the use is not in a simple assignment statement, then
1154 there is nothing we can do. */
1155 if (gimple_code (use_stmt) != GIMPLE_ASSIGN)
1156 {
1157 if (!is_gimple_debug (use_stmt))
1158 all = false;
1159 continue;
1160 }
1161
1162 /* If the use is in a deeper loop nest, then we do not want
1163 to propagate non-invariant ADDR_EXPRs into the loop as that
1164 is likely adding expression evaluations into the loop. */
1165 if (gimple_bb (use_stmt)->loop_depth > stmt_loop_depth
1166 && !is_gimple_min_invariant (rhs))
1167 {
1168 all = false;
1169 continue;
1170 }
1171
1172 {
1173 gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
1174 result = forward_propagate_addr_expr_1 (name, rhs, &gsi,
1175 single_use_p);
1176 /* If the use has moved to a different statement adjust
1177 the update machinery for the old statement too. */
1178 if (use_stmt != gsi_stmt (gsi))
1179 {
1180 update_stmt (use_stmt);
1181 use_stmt = gsi_stmt (gsi);
1182 }
1183
1184 update_stmt (use_stmt);
1185 }
1186 all &= result;
1187
1188 /* Remove intermediate now unused copy and conversion chains. */
1189 use_rhs = gimple_assign_rhs1 (use_stmt);
1190 if (result
1191 && TREE_CODE (gimple_assign_lhs (use_stmt)) == SSA_NAME
1192 && TREE_CODE (use_rhs) == SSA_NAME
1193 && has_zero_uses (gimple_assign_lhs (use_stmt)))
1194 {
1195 gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
1196 release_defs (use_stmt);
1197 gsi_remove (&gsi, true);
1198 }
1199 }
1200
1201 return all && has_zero_uses (name);
1202 }
1203
1204
1205 /* Forward propagate the comparison defined in *DEFGSI like
1206 cond_1 = x CMP y to uses of the form
1207 a_1 = (T')cond_1
1208 a_1 = !cond_1
1209 a_1 = cond_1 != 0
1210 Returns true if stmt is now unused. Advance DEFGSI to the next
1211 statement. */
1212
1213 static bool
1214 forward_propagate_comparison (gimple_stmt_iterator *defgsi)
1215 {
1216 gimple stmt = gsi_stmt (*defgsi);
1217 tree name = gimple_assign_lhs (stmt);
1218 gimple use_stmt;
1219 tree tmp = NULL_TREE;
1220 gimple_stmt_iterator gsi;
1221 enum tree_code code;
1222 tree lhs;
1223
1224 /* Don't propagate ssa names that occur in abnormal phis. */
1225 if ((TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
1226 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_assign_rhs1 (stmt)))
1227 || (TREE_CODE (gimple_assign_rhs2 (stmt)) == SSA_NAME
1228 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_assign_rhs2 (stmt))))
1229 goto bailout;
1230
1231 /* Do not un-cse comparisons. But propagate through copies. */
1232 use_stmt = get_prop_dest_stmt (name, &name);
1233 if (!use_stmt
1234 || !is_gimple_assign (use_stmt))
1235 goto bailout;
1236
1237 code = gimple_assign_rhs_code (use_stmt);
1238 lhs = gimple_assign_lhs (use_stmt);
1239 if (!INTEGRAL_TYPE_P (TREE_TYPE (lhs)))
1240 goto bailout;
1241
1242 /* We can propagate the condition into a statement that
1243 computes the logical negation of the comparison result. */
1244 if ((code == BIT_NOT_EXPR
1245 && TYPE_PRECISION (TREE_TYPE (lhs)) == 1)
1246 || (code == BIT_XOR_EXPR
1247 && integer_onep (gimple_assign_rhs2 (use_stmt))))
1248 {
1249 tree type = TREE_TYPE (gimple_assign_rhs1 (stmt));
1250 bool nans = HONOR_NANS (TYPE_MODE (type));
1251 enum tree_code inv_code;
1252 inv_code = invert_tree_comparison (gimple_assign_rhs_code (stmt), nans);
1253 if (inv_code == ERROR_MARK)
1254 goto bailout;
1255
1256 tmp = build2 (inv_code, TREE_TYPE (lhs), gimple_assign_rhs1 (stmt),
1257 gimple_assign_rhs2 (stmt));
1258 }
1259 else
1260 goto bailout;
1261
1262 gsi = gsi_for_stmt (use_stmt);
1263 gimple_assign_set_rhs_from_tree (&gsi, unshare_expr (tmp));
1264 use_stmt = gsi_stmt (gsi);
1265 update_stmt (use_stmt);
1266
1267 if (dump_file && (dump_flags & TDF_DETAILS))
1268 {
1269 fprintf (dump_file, " Replaced '");
1270 print_gimple_expr (dump_file, stmt, 0, dump_flags);
1271 fprintf (dump_file, "' with '");
1272 print_gimple_expr (dump_file, use_stmt, 0, dump_flags);
1273 fprintf (dump_file, "'\n");
1274 }
1275
1276 /* When we remove stmt now the iterator defgsi goes off it's current
1277 sequence, hence advance it now. */
1278 gsi_next (defgsi);
1279
1280 /* Remove defining statements. */
1281 return remove_prop_source_from_use (name);
1282
1283 bailout:
1284 gsi_next (defgsi);
1285 return false;
1286 }
1287
1288
1289 /* If we have lhs = ~x (STMT), look and see if earlier we had x = ~y.
1290 If so, we can change STMT into lhs = y which can later be copy
1291 propagated. Similarly for negation.
1292
1293 This could trivially be formulated as a forward propagation
1294 to immediate uses. However, we already had an implementation
1295 from DOM which used backward propagation via the use-def links.
1296
1297 It turns out that backward propagation is actually faster as
1298 there's less work to do for each NOT/NEG expression we find.
1299 Backwards propagation needs to look at the statement in a single
1300 backlink. Forward propagation needs to look at potentially more
1301 than one forward link.
1302
1303 Returns true when the statement was changed. */
1304
1305 static bool
1306 simplify_not_neg_expr (gimple_stmt_iterator *gsi_p)
1307 {
1308 gimple stmt = gsi_stmt (*gsi_p);
1309 tree rhs = gimple_assign_rhs1 (stmt);
1310 gimple rhs_def_stmt = SSA_NAME_DEF_STMT (rhs);
1311
1312 /* See if the RHS_DEF_STMT has the same form as our statement. */
1313 if (is_gimple_assign (rhs_def_stmt)
1314 && gimple_assign_rhs_code (rhs_def_stmt) == gimple_assign_rhs_code (stmt))
1315 {
1316 tree rhs_def_operand = gimple_assign_rhs1 (rhs_def_stmt);
1317
1318 /* Verify that RHS_DEF_OPERAND is a suitable SSA_NAME. */
1319 if (TREE_CODE (rhs_def_operand) == SSA_NAME
1320 && ! SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs_def_operand))
1321 {
1322 gimple_assign_set_rhs_from_tree (gsi_p, rhs_def_operand);
1323 stmt = gsi_stmt (*gsi_p);
1324 update_stmt (stmt);
1325 return true;
1326 }
1327 }
1328
1329 return false;
1330 }
1331
1332 /* Helper function for simplify_gimple_switch. Remove case labels that
1333 have values outside the range of the new type. */
1334
1335 static void
1336 simplify_gimple_switch_label_vec (gimple stmt, tree index_type)
1337 {
1338 unsigned int branch_num = gimple_switch_num_labels (stmt);
1339 VEC(tree, heap) *labels = VEC_alloc (tree, heap, branch_num);
1340 unsigned int i, len;
1341
1342 /* Collect the existing case labels in a VEC, and preprocess it as if
1343 we are gimplifying a GENERIC SWITCH_EXPR. */
1344 for (i = 1; i < branch_num; i++)
1345 VEC_quick_push (tree, labels, gimple_switch_label (stmt, i));
1346 preprocess_case_label_vec_for_gimple (labels, index_type, NULL);
1347
1348 /* If any labels were removed, replace the existing case labels
1349 in the GIMPLE_SWITCH statement with the correct ones.
1350 Note that the type updates were done in-place on the case labels,
1351 so we only have to replace the case labels in the GIMPLE_SWITCH
1352 if the number of labels changed. */
1353 len = VEC_length (tree, labels);
1354 if (len < branch_num - 1)
1355 {
1356 bitmap target_blocks;
1357 edge_iterator ei;
1358 edge e;
1359
1360 /* Corner case: *all* case labels have been removed as being
1361 out-of-range for INDEX_TYPE. Push one label and let the
1362 CFG cleanups deal with this further. */
1363 if (len == 0)
1364 {
1365 tree label, elt;
1366
1367 label = CASE_LABEL (gimple_switch_default_label (stmt));
1368 elt = build_case_label (build_int_cst (index_type, 0), NULL, label);
1369 VEC_quick_push (tree, labels, elt);
1370 len = 1;
1371 }
1372
1373 for (i = 0; i < VEC_length (tree, labels); i++)
1374 gimple_switch_set_label (stmt, i + 1, VEC_index (tree, labels, i));
1375 for (i++ ; i < branch_num; i++)
1376 gimple_switch_set_label (stmt, i, NULL_TREE);
1377 gimple_switch_set_num_labels (stmt, len + 1);
1378
1379 /* Cleanup any edges that are now dead. */
1380 target_blocks = BITMAP_ALLOC (NULL);
1381 for (i = 0; i < gimple_switch_num_labels (stmt); i++)
1382 {
1383 tree elt = gimple_switch_label (stmt, i);
1384 basic_block target = label_to_block (CASE_LABEL (elt));
1385 bitmap_set_bit (target_blocks, target->index);
1386 }
1387 for (ei = ei_start (gimple_bb (stmt)->succs); (e = ei_safe_edge (ei)); )
1388 {
1389 if (! bitmap_bit_p (target_blocks, e->dest->index))
1390 {
1391 remove_edge (e);
1392 cfg_changed = true;
1393 free_dominance_info (CDI_DOMINATORS);
1394 }
1395 else
1396 ei_next (&ei);
1397 }
1398 BITMAP_FREE (target_blocks);
1399 }
1400
1401 VEC_free (tree, heap, labels);
1402 }
1403
1404 /* STMT is a SWITCH_EXPR for which we attempt to find equivalent forms of
1405 the condition which we may be able to optimize better. */
1406
1407 static bool
1408 simplify_gimple_switch (gimple stmt)
1409 {
1410 tree cond = gimple_switch_index (stmt);
1411 tree def, to, ti;
1412 gimple def_stmt;
1413
1414 /* The optimization that we really care about is removing unnecessary
1415 casts. That will let us do much better in propagating the inferred
1416 constant at the switch target. */
1417 if (TREE_CODE (cond) == SSA_NAME)
1418 {
1419 def_stmt = SSA_NAME_DEF_STMT (cond);
1420 if (is_gimple_assign (def_stmt))
1421 {
1422 if (gimple_assign_rhs_code (def_stmt) == NOP_EXPR)
1423 {
1424 int need_precision;
1425 bool fail;
1426
1427 def = gimple_assign_rhs1 (def_stmt);
1428
1429 to = TREE_TYPE (cond);
1430 ti = TREE_TYPE (def);
1431
1432 /* If we have an extension that preserves value, then we
1433 can copy the source value into the switch. */
1434
1435 need_precision = TYPE_PRECISION (ti);
1436 fail = false;
1437 if (! INTEGRAL_TYPE_P (ti))
1438 fail = true;
1439 else if (TYPE_UNSIGNED (to) && !TYPE_UNSIGNED (ti))
1440 fail = true;
1441 else if (!TYPE_UNSIGNED (to) && TYPE_UNSIGNED (ti))
1442 need_precision += 1;
1443 if (TYPE_PRECISION (to) < need_precision)
1444 fail = true;
1445
1446 if (!fail)
1447 {
1448 gimple_switch_set_index (stmt, def);
1449 simplify_gimple_switch_label_vec (stmt, ti);
1450 update_stmt (stmt);
1451 return true;
1452 }
1453 }
1454 }
1455 }
1456
1457 return false;
1458 }
1459
1460 /* For pointers p2 and p1 return p2 - p1 if the
1461 difference is known and constant, otherwise return NULL. */
1462
1463 static tree
1464 constant_pointer_difference (tree p1, tree p2)
1465 {
1466 int i, j;
1467 #define CPD_ITERATIONS 5
1468 tree exps[2][CPD_ITERATIONS];
1469 tree offs[2][CPD_ITERATIONS];
1470 int cnt[2];
1471
1472 for (i = 0; i < 2; i++)
1473 {
1474 tree p = i ? p1 : p2;
1475 tree off = size_zero_node;
1476 gimple stmt;
1477 enum tree_code code;
1478
1479 /* For each of p1 and p2 we need to iterate at least
1480 twice, to handle ADDR_EXPR directly in p1/p2,
1481 SSA_NAME with ADDR_EXPR or POINTER_PLUS_EXPR etc.
1482 on definition's stmt RHS. Iterate a few extra times. */
1483 j = 0;
1484 do
1485 {
1486 if (!POINTER_TYPE_P (TREE_TYPE (p)))
1487 break;
1488 if (TREE_CODE (p) == ADDR_EXPR)
1489 {
1490 tree q = TREE_OPERAND (p, 0);
1491 HOST_WIDE_INT offset;
1492 tree base = get_addr_base_and_unit_offset (q, &offset);
1493 if (base)
1494 {
1495 q = base;
1496 if (offset)
1497 off = size_binop (PLUS_EXPR, off, size_int (offset));
1498 }
1499 if (TREE_CODE (q) == MEM_REF
1500 && TREE_CODE (TREE_OPERAND (q, 0)) == SSA_NAME)
1501 {
1502 p = TREE_OPERAND (q, 0);
1503 off = size_binop (PLUS_EXPR, off,
1504 double_int_to_tree (sizetype,
1505 mem_ref_offset (q)));
1506 }
1507 else
1508 {
1509 exps[i][j] = q;
1510 offs[i][j++] = off;
1511 break;
1512 }
1513 }
1514 if (TREE_CODE (p) != SSA_NAME)
1515 break;
1516 exps[i][j] = p;
1517 offs[i][j++] = off;
1518 if (j == CPD_ITERATIONS)
1519 break;
1520 stmt = SSA_NAME_DEF_STMT (p);
1521 if (!is_gimple_assign (stmt) || gimple_assign_lhs (stmt) != p)
1522 break;
1523 code = gimple_assign_rhs_code (stmt);
1524 if (code == POINTER_PLUS_EXPR)
1525 {
1526 if (TREE_CODE (gimple_assign_rhs2 (stmt)) != INTEGER_CST)
1527 break;
1528 off = size_binop (PLUS_EXPR, off, gimple_assign_rhs2 (stmt));
1529 p = gimple_assign_rhs1 (stmt);
1530 }
1531 else if (code == ADDR_EXPR || code == NOP_EXPR)
1532 p = gimple_assign_rhs1 (stmt);
1533 else
1534 break;
1535 }
1536 while (1);
1537 cnt[i] = j;
1538 }
1539
1540 for (i = 0; i < cnt[0]; i++)
1541 for (j = 0; j < cnt[1]; j++)
1542 if (exps[0][i] == exps[1][j])
1543 return size_binop (MINUS_EXPR, offs[0][i], offs[1][j]);
1544
1545 return NULL_TREE;
1546 }
1547
1548 /* *GSI_P is a GIMPLE_CALL to a builtin function.
1549 Optimize
1550 memcpy (p, "abcd", 4);
1551 memset (p + 4, ' ', 3);
1552 into
1553 memcpy (p, "abcd ", 7);
1554 call if the latter can be stored by pieces during expansion. */
1555
1556 static bool
1557 simplify_builtin_call (gimple_stmt_iterator *gsi_p, tree callee2)
1558 {
1559 gimple stmt1, stmt2 = gsi_stmt (*gsi_p);
1560 tree vuse = gimple_vuse (stmt2);
1561 if (vuse == NULL)
1562 return false;
1563 stmt1 = SSA_NAME_DEF_STMT (vuse);
1564
1565 switch (DECL_FUNCTION_CODE (callee2))
1566 {
1567 case BUILT_IN_MEMSET:
1568 if (gimple_call_num_args (stmt2) != 3
1569 || gimple_call_lhs (stmt2)
1570 || CHAR_BIT != 8
1571 || BITS_PER_UNIT != 8)
1572 break;
1573 else
1574 {
1575 tree callee1;
1576 tree ptr1, src1, str1, off1, len1, lhs1;
1577 tree ptr2 = gimple_call_arg (stmt2, 0);
1578 tree val2 = gimple_call_arg (stmt2, 1);
1579 tree len2 = gimple_call_arg (stmt2, 2);
1580 tree diff, vdef, new_str_cst;
1581 gimple use_stmt;
1582 unsigned int ptr1_align;
1583 unsigned HOST_WIDE_INT src_len;
1584 char *src_buf;
1585 use_operand_p use_p;
1586
1587 if (!host_integerp (val2, 0)
1588 || !host_integerp (len2, 1))
1589 break;
1590 if (is_gimple_call (stmt1))
1591 {
1592 /* If first stmt is a call, it needs to be memcpy
1593 or mempcpy, with string literal as second argument and
1594 constant length. */
1595 callee1 = gimple_call_fndecl (stmt1);
1596 if (callee1 == NULL_TREE
1597 || DECL_BUILT_IN_CLASS (callee1) != BUILT_IN_NORMAL
1598 || gimple_call_num_args (stmt1) != 3)
1599 break;
1600 if (DECL_FUNCTION_CODE (callee1) != BUILT_IN_MEMCPY
1601 && DECL_FUNCTION_CODE (callee1) != BUILT_IN_MEMPCPY)
1602 break;
1603 ptr1 = gimple_call_arg (stmt1, 0);
1604 src1 = gimple_call_arg (stmt1, 1);
1605 len1 = gimple_call_arg (stmt1, 2);
1606 lhs1 = gimple_call_lhs (stmt1);
1607 if (!host_integerp (len1, 1))
1608 break;
1609 str1 = string_constant (src1, &off1);
1610 if (str1 == NULL_TREE)
1611 break;
1612 if (!host_integerp (off1, 1)
1613 || compare_tree_int (off1, TREE_STRING_LENGTH (str1) - 1) > 0
1614 || compare_tree_int (len1, TREE_STRING_LENGTH (str1)
1615 - tree_low_cst (off1, 1)) > 0
1616 || TREE_CODE (TREE_TYPE (str1)) != ARRAY_TYPE
1617 || TYPE_MODE (TREE_TYPE (TREE_TYPE (str1)))
1618 != TYPE_MODE (char_type_node))
1619 break;
1620 }
1621 else if (gimple_assign_single_p (stmt1))
1622 {
1623 /* Otherwise look for length 1 memcpy optimized into
1624 assignment. */
1625 ptr1 = gimple_assign_lhs (stmt1);
1626 src1 = gimple_assign_rhs1 (stmt1);
1627 if (TREE_CODE (ptr1) != MEM_REF
1628 || TYPE_MODE (TREE_TYPE (ptr1)) != TYPE_MODE (char_type_node)
1629 || !host_integerp (src1, 0))
1630 break;
1631 ptr1 = build_fold_addr_expr (ptr1);
1632 callee1 = NULL_TREE;
1633 len1 = size_one_node;
1634 lhs1 = NULL_TREE;
1635 off1 = size_zero_node;
1636 str1 = NULL_TREE;
1637 }
1638 else
1639 break;
1640
1641 diff = constant_pointer_difference (ptr1, ptr2);
1642 if (diff == NULL && lhs1 != NULL)
1643 {
1644 diff = constant_pointer_difference (lhs1, ptr2);
1645 if (DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY
1646 && diff != NULL)
1647 diff = size_binop (PLUS_EXPR, diff,
1648 fold_convert (sizetype, len1));
1649 }
1650 /* If the difference between the second and first destination pointer
1651 is not constant, or is bigger than memcpy length, bail out. */
1652 if (diff == NULL
1653 || !host_integerp (diff, 1)
1654 || tree_int_cst_lt (len1, diff))
1655 break;
1656
1657 /* Use maximum of difference plus memset length and memcpy length
1658 as the new memcpy length, if it is too big, bail out. */
1659 src_len = tree_low_cst (diff, 1);
1660 src_len += tree_low_cst (len2, 1);
1661 if (src_len < (unsigned HOST_WIDE_INT) tree_low_cst (len1, 1))
1662 src_len = tree_low_cst (len1, 1);
1663 if (src_len > 1024)
1664 break;
1665
1666 /* If mempcpy value is used elsewhere, bail out, as mempcpy
1667 with bigger length will return different result. */
1668 if (lhs1 != NULL_TREE
1669 && DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY
1670 && (TREE_CODE (lhs1) != SSA_NAME
1671 || !single_imm_use (lhs1, &use_p, &use_stmt)
1672 || use_stmt != stmt2))
1673 break;
1674
1675 /* If anything reads memory in between memcpy and memset
1676 call, the modified memcpy call might change it. */
1677 vdef = gimple_vdef (stmt1);
1678 if (vdef != NULL
1679 && (!single_imm_use (vdef, &use_p, &use_stmt)
1680 || use_stmt != stmt2))
1681 break;
1682
1683 ptr1_align = get_pointer_alignment (ptr1);
1684 /* Construct the new source string literal. */
1685 src_buf = XALLOCAVEC (char, src_len + 1);
1686 if (callee1)
1687 memcpy (src_buf,
1688 TREE_STRING_POINTER (str1) + tree_low_cst (off1, 1),
1689 tree_low_cst (len1, 1));
1690 else
1691 src_buf[0] = tree_low_cst (src1, 0);
1692 memset (src_buf + tree_low_cst (diff, 1),
1693 tree_low_cst (val2, 1), tree_low_cst (len2, 1));
1694 src_buf[src_len] = '\0';
1695 /* Neither builtin_strncpy_read_str nor builtin_memcpy_read_str
1696 handle embedded '\0's. */
1697 if (strlen (src_buf) != src_len)
1698 break;
1699 rtl_profile_for_bb (gimple_bb (stmt2));
1700 /* If the new memcpy wouldn't be emitted by storing the literal
1701 by pieces, this optimization might enlarge .rodata too much,
1702 as commonly used string literals couldn't be shared any
1703 longer. */
1704 if (!can_store_by_pieces (src_len,
1705 builtin_strncpy_read_str,
1706 src_buf, ptr1_align, false))
1707 break;
1708
1709 new_str_cst = build_string_literal (src_len, src_buf);
1710 if (callee1)
1711 {
1712 /* If STMT1 is a mem{,p}cpy call, adjust it and remove
1713 memset call. */
1714 if (lhs1 && DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY)
1715 gimple_call_set_lhs (stmt1, NULL_TREE);
1716 gimple_call_set_arg (stmt1, 1, new_str_cst);
1717 gimple_call_set_arg (stmt1, 2,
1718 build_int_cst (TREE_TYPE (len1), src_len));
1719 update_stmt (stmt1);
1720 unlink_stmt_vdef (stmt2);
1721 gsi_remove (gsi_p, true);
1722 release_defs (stmt2);
1723 if (lhs1 && DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY)
1724 release_ssa_name (lhs1);
1725 return true;
1726 }
1727 else
1728 {
1729 /* Otherwise, if STMT1 is length 1 memcpy optimized into
1730 assignment, remove STMT1 and change memset call into
1731 memcpy call. */
1732 gimple_stmt_iterator gsi = gsi_for_stmt (stmt1);
1733
1734 if (!is_gimple_val (ptr1))
1735 ptr1 = force_gimple_operand_gsi (gsi_p, ptr1, true, NULL_TREE,
1736 true, GSI_SAME_STMT);
1737 gimple_call_set_fndecl (stmt2,
1738 builtin_decl_explicit (BUILT_IN_MEMCPY));
1739 gimple_call_set_arg (stmt2, 0, ptr1);
1740 gimple_call_set_arg (stmt2, 1, new_str_cst);
1741 gimple_call_set_arg (stmt2, 2,
1742 build_int_cst (TREE_TYPE (len2), src_len));
1743 unlink_stmt_vdef (stmt1);
1744 gsi_remove (&gsi, true);
1745 release_defs (stmt1);
1746 update_stmt (stmt2);
1747 return false;
1748 }
1749 }
1750 break;
1751 default:
1752 break;
1753 }
1754 return false;
1755 }
1756
1757 /* Checks if expression has type of one-bit precision, or is a known
1758 truth-valued expression. */
1759 static bool
1760 truth_valued_ssa_name (tree name)
1761 {
1762 gimple def;
1763 tree type = TREE_TYPE (name);
1764
1765 if (!INTEGRAL_TYPE_P (type))
1766 return false;
1767 /* Don't check here for BOOLEAN_TYPE as the precision isn't
1768 necessarily one and so ~X is not equal to !X. */
1769 if (TYPE_PRECISION (type) == 1)
1770 return true;
1771 def = SSA_NAME_DEF_STMT (name);
1772 if (is_gimple_assign (def))
1773 return truth_value_p (gimple_assign_rhs_code (def));
1774 return false;
1775 }
1776
1777 /* Helper routine for simplify_bitwise_binary_1 function.
1778 Return for the SSA name NAME the expression X if it mets condition
1779 NAME = !X. Otherwise return NULL_TREE.
1780 Detected patterns for NAME = !X are:
1781 !X and X == 0 for X with integral type.
1782 X ^ 1, X != 1,or ~X for X with integral type with precision of one. */
1783 static tree
1784 lookup_logical_inverted_value (tree name)
1785 {
1786 tree op1, op2;
1787 enum tree_code code;
1788 gimple def;
1789
1790 /* If name has none-intergal type, or isn't a SSA_NAME, then
1791 return. */
1792 if (TREE_CODE (name) != SSA_NAME
1793 || !INTEGRAL_TYPE_P (TREE_TYPE (name)))
1794 return NULL_TREE;
1795 def = SSA_NAME_DEF_STMT (name);
1796 if (!is_gimple_assign (def))
1797 return NULL_TREE;
1798
1799 code = gimple_assign_rhs_code (def);
1800 op1 = gimple_assign_rhs1 (def);
1801 op2 = NULL_TREE;
1802
1803 /* Get for EQ_EXPR or BIT_XOR_EXPR operation the second operand.
1804 If CODE isn't an EQ_EXPR, BIT_XOR_EXPR, or BIT_NOT_EXPR, then return. */
1805 if (code == EQ_EXPR || code == NE_EXPR
1806 || code == BIT_XOR_EXPR)
1807 op2 = gimple_assign_rhs2 (def);
1808
1809 switch (code)
1810 {
1811 case BIT_NOT_EXPR:
1812 if (truth_valued_ssa_name (name))
1813 return op1;
1814 break;
1815 case EQ_EXPR:
1816 /* Check if we have X == 0 and X has an integral type. */
1817 if (!INTEGRAL_TYPE_P (TREE_TYPE (op1)))
1818 break;
1819 if (integer_zerop (op2))
1820 return op1;
1821 break;
1822 case NE_EXPR:
1823 /* Check if we have X != 1 and X is a truth-valued. */
1824 if (!INTEGRAL_TYPE_P (TREE_TYPE (op1)))
1825 break;
1826 if (integer_onep (op2) && truth_valued_ssa_name (op1))
1827 return op1;
1828 break;
1829 case BIT_XOR_EXPR:
1830 /* Check if we have X ^ 1 and X is truth valued. */
1831 if (integer_onep (op2) && truth_valued_ssa_name (op1))
1832 return op1;
1833 break;
1834 default:
1835 break;
1836 }
1837
1838 return NULL_TREE;
1839 }
1840
1841 /* Optimize ARG1 CODE ARG2 to a constant for bitwise binary
1842 operations CODE, if one operand has the logically inverted
1843 value of the other. */
1844 static tree
1845 simplify_bitwise_binary_1 (enum tree_code code, tree type,
1846 tree arg1, tree arg2)
1847 {
1848 tree anot;
1849
1850 /* If CODE isn't a bitwise binary operation, return NULL_TREE. */
1851 if (code != BIT_AND_EXPR && code != BIT_IOR_EXPR
1852 && code != BIT_XOR_EXPR)
1853 return NULL_TREE;
1854
1855 /* First check if operands ARG1 and ARG2 are equal. If so
1856 return NULL_TREE as this optimization is handled fold_stmt. */
1857 if (arg1 == arg2)
1858 return NULL_TREE;
1859 /* See if we have in arguments logical-not patterns. */
1860 if (((anot = lookup_logical_inverted_value (arg1)) == NULL_TREE
1861 || anot != arg2)
1862 && ((anot = lookup_logical_inverted_value (arg2)) == NULL_TREE
1863 || anot != arg1))
1864 return NULL_TREE;
1865
1866 /* X & !X -> 0. */
1867 if (code == BIT_AND_EXPR)
1868 return fold_convert (type, integer_zero_node);
1869 /* X | !X -> 1 and X ^ !X -> 1, if X is truth-valued. */
1870 if (truth_valued_ssa_name (anot))
1871 return fold_convert (type, integer_one_node);
1872
1873 /* ??? Otherwise result is (X != 0 ? X : 1). not handled. */
1874 return NULL_TREE;
1875 }
1876
1877 /* Given a ssa_name in NAME see if it was defined by an assignment and
1878 set CODE to be the code and ARG1 to the first operand on the rhs and ARG2
1879 to the second operand on the rhs. */
1880
1881 static inline void
1882 defcodefor_name (tree name, enum tree_code *code, tree *arg1, tree *arg2)
1883 {
1884 gimple def;
1885 enum tree_code code1;
1886 tree arg11;
1887 tree arg21;
1888 tree arg31;
1889 enum gimple_rhs_class grhs_class;
1890
1891 code1 = TREE_CODE (name);
1892 arg11 = name;
1893 arg21 = NULL_TREE;
1894 grhs_class = get_gimple_rhs_class (code1);
1895
1896 if (code1 == SSA_NAME)
1897 {
1898 def = SSA_NAME_DEF_STMT (name);
1899
1900 if (def && is_gimple_assign (def)
1901 && can_propagate_from (def))
1902 {
1903 code1 = gimple_assign_rhs_code (def);
1904 arg11 = gimple_assign_rhs1 (def);
1905 arg21 = gimple_assign_rhs2 (def);
1906 arg31 = gimple_assign_rhs2 (def);
1907 }
1908 }
1909 else if (grhs_class == GIMPLE_TERNARY_RHS
1910 || GIMPLE_BINARY_RHS
1911 || GIMPLE_UNARY_RHS
1912 || GIMPLE_SINGLE_RHS)
1913 extract_ops_from_tree_1 (name, &code1, &arg11, &arg21, &arg31);
1914
1915 *code = code1;
1916 *arg1 = arg11;
1917 if (arg2)
1918 *arg2 = arg21;
1919 /* Ignore arg3 currently. */
1920 }
1921
1922 /* Simplify bitwise binary operations.
1923 Return true if a transformation applied, otherwise return false. */
1924
1925 static bool
1926 simplify_bitwise_binary (gimple_stmt_iterator *gsi)
1927 {
1928 gimple stmt = gsi_stmt (*gsi);
1929 tree arg1 = gimple_assign_rhs1 (stmt);
1930 tree arg2 = gimple_assign_rhs2 (stmt);
1931 enum tree_code code = gimple_assign_rhs_code (stmt);
1932 tree res;
1933 tree def1_arg1, def1_arg2, def2_arg1, def2_arg2;
1934 enum tree_code def1_code, def2_code;
1935
1936 defcodefor_name (arg1, &def1_code, &def1_arg1, &def1_arg2);
1937 defcodefor_name (arg2, &def2_code, &def2_arg1, &def2_arg2);
1938
1939 /* Try to fold (type) X op CST -> (type) (X op ((type-x) CST)). */
1940 if (TREE_CODE (arg2) == INTEGER_CST
1941 && CONVERT_EXPR_CODE_P (def1_code)
1942 && INTEGRAL_TYPE_P (TREE_TYPE (def1_arg1))
1943 && int_fits_type_p (arg2, TREE_TYPE (def1_arg1)))
1944 {
1945 gimple newop;
1946 tree tem = create_tmp_reg (TREE_TYPE (def1_arg1), NULL);
1947 newop =
1948 gimple_build_assign_with_ops (code, tem, def1_arg1,
1949 fold_convert_loc (gimple_location (stmt),
1950 TREE_TYPE (def1_arg1),
1951 arg2));
1952 tem = make_ssa_name (tem, newop);
1953 gimple_assign_set_lhs (newop, tem);
1954 gimple_set_location (newop, gimple_location (stmt));
1955 gsi_insert_before (gsi, newop, GSI_SAME_STMT);
1956 gimple_assign_set_rhs_with_ops_1 (gsi, NOP_EXPR,
1957 tem, NULL_TREE, NULL_TREE);
1958 update_stmt (gsi_stmt (*gsi));
1959 return true;
1960 }
1961
1962 /* For bitwise binary operations apply operand conversions to the
1963 binary operation result instead of to the operands. This allows
1964 to combine successive conversions and bitwise binary operations. */
1965 if (CONVERT_EXPR_CODE_P (def1_code)
1966 && CONVERT_EXPR_CODE_P (def2_code)
1967 && types_compatible_p (TREE_TYPE (def1_arg1), TREE_TYPE (def2_arg1))
1968 /* Make sure that the conversion widens the operands, or has same
1969 precision, or that it changes the operation to a bitfield
1970 precision. */
1971 && ((TYPE_PRECISION (TREE_TYPE (def1_arg1))
1972 <= TYPE_PRECISION (TREE_TYPE (arg1)))
1973 || (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (arg1)))
1974 != MODE_INT)
1975 || (TYPE_PRECISION (TREE_TYPE (arg1))
1976 != GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg1))))))
1977 {
1978 gimple newop;
1979 tree tem = create_tmp_reg (TREE_TYPE (def1_arg1),
1980 NULL);
1981 newop = gimple_build_assign_with_ops (code, tem, def1_arg1, def2_arg1);
1982 tem = make_ssa_name (tem, newop);
1983 gimple_assign_set_lhs (newop, tem);
1984 gimple_set_location (newop, gimple_location (stmt));
1985 gsi_insert_before (gsi, newop, GSI_SAME_STMT);
1986 gimple_assign_set_rhs_with_ops_1 (gsi, NOP_EXPR,
1987 tem, NULL_TREE, NULL_TREE);
1988 update_stmt (gsi_stmt (*gsi));
1989 return true;
1990 }
1991
1992
1993 /* Simplify (A & B) OP0 (C & B) to (A OP0 C) & B. */
1994 if (def1_code == def2_code
1995 && def1_code == BIT_AND_EXPR
1996 && operand_equal_for_phi_arg_p (def1_arg2,
1997 def2_arg2))
1998 {
1999 tree b = def1_arg2;
2000 tree a = def1_arg1;
2001 tree c = def2_arg1;
2002 tree inner = fold_build2 (code, TREE_TYPE (arg2), a, c);
2003 /* If A OP0 C (this usually means C is the same as A) is 0
2004 then fold it down correctly. */
2005 if (integer_zerop (inner))
2006 {
2007 gimple_assign_set_rhs_from_tree (gsi, inner);
2008 update_stmt (stmt);
2009 return true;
2010 }
2011 /* If A OP0 C (this usually means C is the same as A) is a ssa_name
2012 then fold it down correctly. */
2013 else if (TREE_CODE (inner) == SSA_NAME)
2014 {
2015 tree outer = fold_build2 (def1_code, TREE_TYPE (inner),
2016 inner, b);
2017 gimple_assign_set_rhs_from_tree (gsi, outer);
2018 update_stmt (stmt);
2019 return true;
2020 }
2021 else
2022 {
2023 gimple newop;
2024 tree tem;
2025 tem = create_tmp_reg (TREE_TYPE (arg2), NULL);
2026 newop = gimple_build_assign_with_ops (code, tem, a, c);
2027 tem = make_ssa_name (tem, newop);
2028 gimple_assign_set_lhs (newop, tem);
2029 gimple_set_location (newop, gimple_location (stmt));
2030 /* Make sure to re-process the new stmt as it's walking upwards. */
2031 gsi_insert_before (gsi, newop, GSI_NEW_STMT);
2032 gimple_assign_set_rhs1 (stmt, tem);
2033 gimple_assign_set_rhs2 (stmt, b);
2034 gimple_assign_set_rhs_code (stmt, def1_code);
2035 update_stmt (stmt);
2036 return true;
2037 }
2038 }
2039
2040 /* (a | CST1) & CST2 -> (a & CST2) | (CST1 & CST2). */
2041 if (code == BIT_AND_EXPR
2042 && def1_code == BIT_IOR_EXPR
2043 && TREE_CODE (arg2) == INTEGER_CST
2044 && TREE_CODE (def1_arg2) == INTEGER_CST)
2045 {
2046 tree cst = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg2),
2047 arg2, def1_arg2);
2048 tree tem;
2049 gimple newop;
2050 if (integer_zerop (cst))
2051 {
2052 gimple_assign_set_rhs1 (stmt, def1_arg1);
2053 update_stmt (stmt);
2054 return true;
2055 }
2056 tem = create_tmp_reg (TREE_TYPE (arg2), NULL);
2057 newop = gimple_build_assign_with_ops (BIT_AND_EXPR,
2058 tem, def1_arg1, arg2);
2059 tem = make_ssa_name (tem, newop);
2060 gimple_assign_set_lhs (newop, tem);
2061 gimple_set_location (newop, gimple_location (stmt));
2062 /* Make sure to re-process the new stmt as it's walking upwards. */
2063 gsi_insert_before (gsi, newop, GSI_NEW_STMT);
2064 gimple_assign_set_rhs1 (stmt, tem);
2065 gimple_assign_set_rhs2 (stmt, cst);
2066 gimple_assign_set_rhs_code (stmt, BIT_IOR_EXPR);
2067 update_stmt (stmt);
2068 return true;
2069 }
2070
2071 /* Combine successive equal operations with constants. */
2072 if ((code == BIT_AND_EXPR
2073 || code == BIT_IOR_EXPR
2074 || code == BIT_XOR_EXPR)
2075 && def1_code == code
2076 && TREE_CODE (arg2) == INTEGER_CST
2077 && TREE_CODE (def1_arg2) == INTEGER_CST)
2078 {
2079 tree cst = fold_build2 (code, TREE_TYPE (arg2),
2080 arg2, def1_arg2);
2081 gimple_assign_set_rhs1 (stmt, def1_arg1);
2082 gimple_assign_set_rhs2 (stmt, cst);
2083 update_stmt (stmt);
2084 return true;
2085 }
2086
2087 /* Canonicalize X ^ ~0 to ~X. */
2088 if (code == BIT_XOR_EXPR
2089 && TREE_CODE (arg2) == INTEGER_CST
2090 && integer_all_onesp (arg2))
2091 {
2092 gimple_assign_set_rhs_with_ops (gsi, BIT_NOT_EXPR, arg1, NULL_TREE);
2093 gcc_assert (gsi_stmt (*gsi) == stmt);
2094 update_stmt (stmt);
2095 return true;
2096 }
2097
2098 /* Try simple folding for X op !X, and X op X. */
2099 res = simplify_bitwise_binary_1 (code, TREE_TYPE (arg1), arg1, arg2);
2100 if (res != NULL_TREE)
2101 {
2102 gimple_assign_set_rhs_from_tree (gsi, res);
2103 update_stmt (gsi_stmt (*gsi));
2104 return true;
2105 }
2106
2107 if (code == BIT_AND_EXPR || code == BIT_IOR_EXPR)
2108 {
2109 enum tree_code ocode = code == BIT_AND_EXPR ? BIT_IOR_EXPR : BIT_AND_EXPR;
2110 if (def1_code == ocode)
2111 {
2112 tree x = arg2;
2113 enum tree_code coden;
2114 tree a1, a2;
2115 /* ( X | Y) & X -> X */
2116 /* ( X & Y) | X -> X */
2117 if (x == def1_arg1
2118 || x == def1_arg2)
2119 {
2120 gimple_assign_set_rhs_from_tree (gsi, x);
2121 update_stmt (gsi_stmt (*gsi));
2122 return true;
2123 }
2124
2125 defcodefor_name (def1_arg1, &coden, &a1, &a2);
2126 /* (~X | Y) & X -> X & Y */
2127 /* (~X & Y) | X -> X | Y */
2128 if (coden == BIT_NOT_EXPR && a1 == x)
2129 {
2130 gimple_assign_set_rhs_with_ops (gsi, code,
2131 x, def1_arg2);
2132 gcc_assert (gsi_stmt (*gsi) == stmt);
2133 update_stmt (stmt);
2134 return true;
2135 }
2136 defcodefor_name (def1_arg2, &coden, &a1, &a2);
2137 /* (Y | ~X) & X -> X & Y */
2138 /* (Y & ~X) | X -> X | Y */
2139 if (coden == BIT_NOT_EXPR && a1 == x)
2140 {
2141 gimple_assign_set_rhs_with_ops (gsi, code,
2142 x, def1_arg1);
2143 gcc_assert (gsi_stmt (*gsi) == stmt);
2144 update_stmt (stmt);
2145 return true;
2146 }
2147 }
2148 if (def2_code == ocode)
2149 {
2150 enum tree_code coden;
2151 tree a1;
2152 tree x = arg1;
2153 /* X & ( X | Y) -> X */
2154 /* X | ( X & Y) -> X */
2155 if (x == def2_arg1
2156 || x == def2_arg2)
2157 {
2158 gimple_assign_set_rhs_from_tree (gsi, x);
2159 update_stmt (gsi_stmt (*gsi));
2160 return true;
2161 }
2162 defcodefor_name (def2_arg1, &coden, &a1, NULL);
2163 /* (~X | Y) & X -> X & Y */
2164 /* (~X & Y) | X -> X | Y */
2165 if (coden == BIT_NOT_EXPR && a1 == x)
2166 {
2167 gimple_assign_set_rhs_with_ops (gsi, code,
2168 x, def2_arg2);
2169 gcc_assert (gsi_stmt (*gsi) == stmt);
2170 update_stmt (stmt);
2171 return true;
2172 }
2173 defcodefor_name (def2_arg2, &coden, &a1, NULL);
2174 /* (Y | ~X) & X -> X & Y */
2175 /* (Y & ~X) | X -> X | Y */
2176 if (coden == BIT_NOT_EXPR && a1 == x)
2177 {
2178 gimple_assign_set_rhs_with_ops (gsi, code,
2179 x, def2_arg1);
2180 gcc_assert (gsi_stmt (*gsi) == stmt);
2181 update_stmt (stmt);
2182 return true;
2183 }
2184 }
2185 }
2186
2187 return false;
2188 }
2189
2190
2191 /* Perform re-associations of the plus or minus statement STMT that are
2192 always permitted. Returns true if the CFG was changed. */
2193
2194 static bool
2195 associate_plusminus (gimple_stmt_iterator *gsi)
2196 {
2197 gimple stmt = gsi_stmt (*gsi);
2198 tree rhs1 = gimple_assign_rhs1 (stmt);
2199 tree rhs2 = gimple_assign_rhs2 (stmt);
2200 enum tree_code code = gimple_assign_rhs_code (stmt);
2201 bool changed;
2202
2203 /* We can't reassociate at all for saturating types. */
2204 if (TYPE_SATURATING (TREE_TYPE (rhs1)))
2205 return false;
2206
2207 /* First contract negates. */
2208 do
2209 {
2210 changed = false;
2211
2212 /* A +- (-B) -> A -+ B. */
2213 if (TREE_CODE (rhs2) == SSA_NAME)
2214 {
2215 gimple def_stmt = SSA_NAME_DEF_STMT (rhs2);
2216 if (is_gimple_assign (def_stmt)
2217 && gimple_assign_rhs_code (def_stmt) == NEGATE_EXPR
2218 && can_propagate_from (def_stmt))
2219 {
2220 code = (code == MINUS_EXPR) ? PLUS_EXPR : MINUS_EXPR;
2221 gimple_assign_set_rhs_code (stmt, code);
2222 rhs2 = gimple_assign_rhs1 (def_stmt);
2223 gimple_assign_set_rhs2 (stmt, rhs2);
2224 gimple_set_modified (stmt, true);
2225 changed = true;
2226 }
2227 }
2228
2229 /* (-A) + B -> B - A. */
2230 if (TREE_CODE (rhs1) == SSA_NAME
2231 && code == PLUS_EXPR)
2232 {
2233 gimple def_stmt = SSA_NAME_DEF_STMT (rhs1);
2234 if (is_gimple_assign (def_stmt)
2235 && gimple_assign_rhs_code (def_stmt) == NEGATE_EXPR
2236 && can_propagate_from (def_stmt))
2237 {
2238 code = MINUS_EXPR;
2239 gimple_assign_set_rhs_code (stmt, code);
2240 rhs1 = rhs2;
2241 gimple_assign_set_rhs1 (stmt, rhs1);
2242 rhs2 = gimple_assign_rhs1 (def_stmt);
2243 gimple_assign_set_rhs2 (stmt, rhs2);
2244 gimple_set_modified (stmt, true);
2245 changed = true;
2246 }
2247 }
2248 }
2249 while (changed);
2250
2251 /* We can't reassociate floating-point or fixed-point plus or minus
2252 because of saturation to +-Inf. */
2253 if (FLOAT_TYPE_P (TREE_TYPE (rhs1))
2254 || FIXED_POINT_TYPE_P (TREE_TYPE (rhs1)))
2255 goto out;
2256
2257 /* Second match patterns that allow contracting a plus-minus pair
2258 irrespective of overflow issues.
2259
2260 (A +- B) - A -> +- B
2261 (A +- B) -+ B -> A
2262 (CST +- A) +- CST -> CST +- A
2263 (A + CST) +- CST -> A + CST
2264 ~A + A -> -1
2265 ~A + 1 -> -A
2266 A - (A +- B) -> -+ B
2267 A +- (B +- A) -> +- B
2268 CST +- (CST +- A) -> CST +- A
2269 CST +- (A +- CST) -> CST +- A
2270 A + ~A -> -1
2271
2272 via commutating the addition and contracting operations to zero
2273 by reassociation. */
2274
2275 if (TREE_CODE (rhs1) == SSA_NAME)
2276 {
2277 gimple def_stmt = SSA_NAME_DEF_STMT (rhs1);
2278 if (is_gimple_assign (def_stmt) && can_propagate_from (def_stmt))
2279 {
2280 enum tree_code def_code = gimple_assign_rhs_code (def_stmt);
2281 if (def_code == PLUS_EXPR
2282 || def_code == MINUS_EXPR)
2283 {
2284 tree def_rhs1 = gimple_assign_rhs1 (def_stmt);
2285 tree def_rhs2 = gimple_assign_rhs2 (def_stmt);
2286 if (operand_equal_p (def_rhs1, rhs2, 0)
2287 && code == MINUS_EXPR)
2288 {
2289 /* (A +- B) - A -> +- B. */
2290 code = ((def_code == PLUS_EXPR)
2291 ? TREE_CODE (def_rhs2) : NEGATE_EXPR);
2292 rhs1 = def_rhs2;
2293 rhs2 = NULL_TREE;
2294 gimple_assign_set_rhs_with_ops (gsi, code, rhs1, NULL_TREE);
2295 gcc_assert (gsi_stmt (*gsi) == stmt);
2296 gimple_set_modified (stmt, true);
2297 }
2298 else if (operand_equal_p (def_rhs2, rhs2, 0)
2299 && code != def_code)
2300 {
2301 /* (A +- B) -+ B -> A. */
2302 code = TREE_CODE (def_rhs1);
2303 rhs1 = def_rhs1;
2304 rhs2 = NULL_TREE;
2305 gimple_assign_set_rhs_with_ops (gsi, code, rhs1, NULL_TREE);
2306 gcc_assert (gsi_stmt (*gsi) == stmt);
2307 gimple_set_modified (stmt, true);
2308 }
2309 else if (TREE_CODE (rhs2) == INTEGER_CST
2310 && TREE_CODE (def_rhs1) == INTEGER_CST)
2311 {
2312 /* (CST +- A) +- CST -> CST +- A. */
2313 tree cst = fold_binary (code, TREE_TYPE (rhs1),
2314 def_rhs1, rhs2);
2315 if (cst && !TREE_OVERFLOW (cst))
2316 {
2317 code = def_code;
2318 gimple_assign_set_rhs_code (stmt, code);
2319 rhs1 = cst;
2320 gimple_assign_set_rhs1 (stmt, rhs1);
2321 rhs2 = def_rhs2;
2322 gimple_assign_set_rhs2 (stmt, rhs2);
2323 gimple_set_modified (stmt, true);
2324 }
2325 }
2326 else if (TREE_CODE (rhs2) == INTEGER_CST
2327 && TREE_CODE (def_rhs2) == INTEGER_CST
2328 && def_code == PLUS_EXPR)
2329 {
2330 /* (A + CST) +- CST -> A + CST. */
2331 tree cst = fold_binary (code, TREE_TYPE (rhs1),
2332 def_rhs2, rhs2);
2333 if (cst && !TREE_OVERFLOW (cst))
2334 {
2335 code = PLUS_EXPR;
2336 gimple_assign_set_rhs_code (stmt, code);
2337 rhs1 = def_rhs1;
2338 gimple_assign_set_rhs1 (stmt, rhs1);
2339 rhs2 = cst;
2340 gimple_assign_set_rhs2 (stmt, rhs2);
2341 gimple_set_modified (stmt, true);
2342 }
2343 }
2344 }
2345 else if (def_code == BIT_NOT_EXPR
2346 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1)))
2347 {
2348 tree def_rhs1 = gimple_assign_rhs1 (def_stmt);
2349 if (code == PLUS_EXPR
2350 && operand_equal_p (def_rhs1, rhs2, 0))
2351 {
2352 /* ~A + A -> -1. */
2353 code = INTEGER_CST;
2354 rhs1 = build_int_cst_type (TREE_TYPE (rhs2), -1);
2355 rhs2 = NULL_TREE;
2356 gimple_assign_set_rhs_with_ops (gsi, code, rhs1, NULL_TREE);
2357 gcc_assert (gsi_stmt (*gsi) == stmt);
2358 gimple_set_modified (stmt, true);
2359 }
2360 else if (code == PLUS_EXPR
2361 && integer_onep (rhs1))
2362 {
2363 /* ~A + 1 -> -A. */
2364 code = NEGATE_EXPR;
2365 rhs1 = def_rhs1;
2366 rhs2 = NULL_TREE;
2367 gimple_assign_set_rhs_with_ops (gsi, code, rhs1, NULL_TREE);
2368 gcc_assert (gsi_stmt (*gsi) == stmt);
2369 gimple_set_modified (stmt, true);
2370 }
2371 }
2372 }
2373 }
2374
2375 if (rhs2 && TREE_CODE (rhs2) == SSA_NAME)
2376 {
2377 gimple def_stmt = SSA_NAME_DEF_STMT (rhs2);
2378 if (is_gimple_assign (def_stmt) && can_propagate_from (def_stmt))
2379 {
2380 enum tree_code def_code = gimple_assign_rhs_code (def_stmt);
2381 if (def_code == PLUS_EXPR
2382 || def_code == MINUS_EXPR)
2383 {
2384 tree def_rhs1 = gimple_assign_rhs1 (def_stmt);
2385 tree def_rhs2 = gimple_assign_rhs2 (def_stmt);
2386 if (operand_equal_p (def_rhs1, rhs1, 0)
2387 && code == MINUS_EXPR)
2388 {
2389 /* A - (A +- B) -> -+ B. */
2390 code = ((def_code == PLUS_EXPR)
2391 ? NEGATE_EXPR : TREE_CODE (def_rhs2));
2392 rhs1 = def_rhs2;
2393 rhs2 = NULL_TREE;
2394 gimple_assign_set_rhs_with_ops (gsi, code, rhs1, NULL_TREE);
2395 gcc_assert (gsi_stmt (*gsi) == stmt);
2396 gimple_set_modified (stmt, true);
2397 }
2398 else if (operand_equal_p (def_rhs2, rhs1, 0)
2399 && code != def_code)
2400 {
2401 /* A +- (B +- A) -> +- B. */
2402 code = ((code == PLUS_EXPR)
2403 ? TREE_CODE (def_rhs1) : NEGATE_EXPR);
2404 rhs1 = def_rhs1;
2405 rhs2 = NULL_TREE;
2406 gimple_assign_set_rhs_with_ops (gsi, code, rhs1, NULL_TREE);
2407 gcc_assert (gsi_stmt (*gsi) == stmt);
2408 gimple_set_modified (stmt, true);
2409 }
2410 else if (TREE_CODE (rhs1) == INTEGER_CST
2411 && TREE_CODE (def_rhs1) == INTEGER_CST)
2412 {
2413 /* CST +- (CST +- A) -> CST +- A. */
2414 tree cst = fold_binary (code, TREE_TYPE (rhs2),
2415 rhs1, def_rhs1);
2416 if (cst && !TREE_OVERFLOW (cst))
2417 {
2418 code = (code == def_code ? PLUS_EXPR : MINUS_EXPR);
2419 gimple_assign_set_rhs_code (stmt, code);
2420 rhs1 = cst;
2421 gimple_assign_set_rhs1 (stmt, rhs1);
2422 rhs2 = def_rhs2;
2423 gimple_assign_set_rhs2 (stmt, rhs2);
2424 gimple_set_modified (stmt, true);
2425 }
2426 }
2427 else if (TREE_CODE (rhs1) == INTEGER_CST
2428 && TREE_CODE (def_rhs2) == INTEGER_CST)
2429 {
2430 /* CST +- (A +- CST) -> CST +- A. */
2431 tree cst = fold_binary (def_code == code
2432 ? PLUS_EXPR : MINUS_EXPR,
2433 TREE_TYPE (rhs2),
2434 rhs1, def_rhs2);
2435 if (cst && !TREE_OVERFLOW (cst))
2436 {
2437 rhs1 = cst;
2438 gimple_assign_set_rhs1 (stmt, rhs1);
2439 rhs2 = def_rhs1;
2440 gimple_assign_set_rhs2 (stmt, rhs2);
2441 gimple_set_modified (stmt, true);
2442 }
2443 }
2444 }
2445 else if (def_code == BIT_NOT_EXPR
2446 && INTEGRAL_TYPE_P (TREE_TYPE (rhs2)))
2447 {
2448 tree def_rhs1 = gimple_assign_rhs1 (def_stmt);
2449 if (code == PLUS_EXPR
2450 && operand_equal_p (def_rhs1, rhs1, 0))
2451 {
2452 /* A + ~A -> -1. */
2453 code = INTEGER_CST;
2454 rhs1 = build_int_cst_type (TREE_TYPE (rhs1), -1);
2455 rhs2 = NULL_TREE;
2456 gimple_assign_set_rhs_with_ops (gsi, code, rhs1, NULL_TREE);
2457 gcc_assert (gsi_stmt (*gsi) == stmt);
2458 gimple_set_modified (stmt, true);
2459 }
2460 }
2461 }
2462 }
2463
2464 out:
2465 if (gimple_modified_p (stmt))
2466 {
2467 fold_stmt_inplace (gsi);
2468 update_stmt (stmt);
2469 if (maybe_clean_or_replace_eh_stmt (stmt, stmt)
2470 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
2471 return true;
2472 }
2473
2474 return false;
2475 }
2476
2477 /* Combine two conversions in a row for the second conversion at *GSI.
2478 Returns 1 if there were any changes made, 2 if cfg-cleanup needs to
2479 run. Else it returns 0. */
2480
2481 static int
2482 combine_conversions (gimple_stmt_iterator *gsi)
2483 {
2484 gimple stmt = gsi_stmt (*gsi);
2485 gimple def_stmt;
2486 tree op0, lhs;
2487 enum tree_code code = gimple_assign_rhs_code (stmt);
2488 enum tree_code code2;
2489
2490 gcc_checking_assert (CONVERT_EXPR_CODE_P (code)
2491 || code == FLOAT_EXPR
2492 || code == FIX_TRUNC_EXPR);
2493
2494 lhs = gimple_assign_lhs (stmt);
2495 op0 = gimple_assign_rhs1 (stmt);
2496 if (useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (op0)))
2497 {
2498 gimple_assign_set_rhs_code (stmt, TREE_CODE (op0));
2499 return 1;
2500 }
2501
2502 if (TREE_CODE (op0) != SSA_NAME)
2503 return 0;
2504
2505 def_stmt = SSA_NAME_DEF_STMT (op0);
2506 if (!is_gimple_assign (def_stmt))
2507 return 0;
2508
2509 code2 = gimple_assign_rhs_code (def_stmt);
2510
2511 if (CONVERT_EXPR_CODE_P (code2) || code2 == FLOAT_EXPR)
2512 {
2513 tree defop0 = gimple_assign_rhs1 (def_stmt);
2514 tree type = TREE_TYPE (lhs);
2515 tree inside_type = TREE_TYPE (defop0);
2516 tree inter_type = TREE_TYPE (op0);
2517 int inside_int = INTEGRAL_TYPE_P (inside_type);
2518 int inside_ptr = POINTER_TYPE_P (inside_type);
2519 int inside_float = FLOAT_TYPE_P (inside_type);
2520 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
2521 unsigned int inside_prec = TYPE_PRECISION (inside_type);
2522 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
2523 int inter_int = INTEGRAL_TYPE_P (inter_type);
2524 int inter_ptr = POINTER_TYPE_P (inter_type);
2525 int inter_float = FLOAT_TYPE_P (inter_type);
2526 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
2527 unsigned int inter_prec = TYPE_PRECISION (inter_type);
2528 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
2529 int final_int = INTEGRAL_TYPE_P (type);
2530 int final_ptr = POINTER_TYPE_P (type);
2531 int final_float = FLOAT_TYPE_P (type);
2532 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
2533 unsigned int final_prec = TYPE_PRECISION (type);
2534 int final_unsignedp = TYPE_UNSIGNED (type);
2535
2536 /* In addition to the cases of two conversions in a row
2537 handled below, if we are converting something to its own
2538 type via an object of identical or wider precision, neither
2539 conversion is needed. */
2540 if (useless_type_conversion_p (type, inside_type)
2541 && (((inter_int || inter_ptr) && final_int)
2542 || (inter_float && final_float))
2543 && inter_prec >= final_prec)
2544 {
2545 gimple_assign_set_rhs1 (stmt, unshare_expr (defop0));
2546 gimple_assign_set_rhs_code (stmt, TREE_CODE (defop0));
2547 update_stmt (stmt);
2548 return remove_prop_source_from_use (op0) ? 2 : 1;
2549 }
2550
2551 /* Likewise, if the intermediate and initial types are either both
2552 float or both integer, we don't need the middle conversion if the
2553 former is wider than the latter and doesn't change the signedness
2554 (for integers). Avoid this if the final type is a pointer since
2555 then we sometimes need the middle conversion. Likewise if the
2556 final type has a precision not equal to the size of its mode. */
2557 if (((inter_int && inside_int)
2558 || (inter_float && inside_float)
2559 || (inter_vec && inside_vec))
2560 && inter_prec >= inside_prec
2561 && (inter_float || inter_vec
2562 || inter_unsignedp == inside_unsignedp)
2563 && ! (final_prec != GET_MODE_PRECISION (TYPE_MODE (type))
2564 && TYPE_MODE (type) == TYPE_MODE (inter_type))
2565 && ! final_ptr
2566 && (! final_vec || inter_prec == inside_prec))
2567 {
2568 gimple_assign_set_rhs1 (stmt, defop0);
2569 update_stmt (stmt);
2570 return remove_prop_source_from_use (op0) ? 2 : 1;
2571 }
2572
2573 /* If we have a sign-extension of a zero-extended value, we can
2574 replace that by a single zero-extension. Likewise if the
2575 final conversion does not change precision we can drop the
2576 intermediate conversion. */
2577 if (inside_int && inter_int && final_int
2578 && ((inside_prec < inter_prec && inter_prec < final_prec
2579 && inside_unsignedp && !inter_unsignedp)
2580 || final_prec == inter_prec))
2581 {
2582 gimple_assign_set_rhs1 (stmt, defop0);
2583 update_stmt (stmt);
2584 return remove_prop_source_from_use (op0) ? 2 : 1;
2585 }
2586
2587 /* Two conversions in a row are not needed unless:
2588 - some conversion is floating-point (overstrict for now), or
2589 - some conversion is a vector (overstrict for now), or
2590 - the intermediate type is narrower than both initial and
2591 final, or
2592 - the intermediate type and innermost type differ in signedness,
2593 and the outermost type is wider than the intermediate, or
2594 - the initial type is a pointer type and the precisions of the
2595 intermediate and final types differ, or
2596 - the final type is a pointer type and the precisions of the
2597 initial and intermediate types differ. */
2598 if (! inside_float && ! inter_float && ! final_float
2599 && ! inside_vec && ! inter_vec && ! final_vec
2600 && (inter_prec >= inside_prec || inter_prec >= final_prec)
2601 && ! (inside_int && inter_int
2602 && inter_unsignedp != inside_unsignedp
2603 && inter_prec < final_prec)
2604 && ((inter_unsignedp && inter_prec > inside_prec)
2605 == (final_unsignedp && final_prec > inter_prec))
2606 && ! (inside_ptr && inter_prec != final_prec)
2607 && ! (final_ptr && inside_prec != inter_prec)
2608 && ! (final_prec != GET_MODE_PRECISION (TYPE_MODE (type))
2609 && TYPE_MODE (type) == TYPE_MODE (inter_type)))
2610 {
2611 gimple_assign_set_rhs1 (stmt, defop0);
2612 update_stmt (stmt);
2613 return remove_prop_source_from_use (op0) ? 2 : 1;
2614 }
2615
2616 /* A truncation to an unsigned type should be canonicalized as
2617 bitwise and of a mask. */
2618 if (final_int && inter_int && inside_int
2619 && final_prec == inside_prec
2620 && final_prec > inter_prec
2621 && inter_unsignedp)
2622 {
2623 tree tem;
2624 tem = fold_build2 (BIT_AND_EXPR, inside_type,
2625 defop0,
2626 double_int_to_tree
2627 (inside_type, double_int_mask (inter_prec)));
2628 if (!useless_type_conversion_p (type, inside_type))
2629 {
2630 tem = force_gimple_operand_gsi (gsi, tem, true, NULL_TREE, true,
2631 GSI_SAME_STMT);
2632 gimple_assign_set_rhs1 (stmt, tem);
2633 }
2634 else
2635 gimple_assign_set_rhs_from_tree (gsi, tem);
2636 update_stmt (gsi_stmt (*gsi));
2637 return 1;
2638 }
2639
2640 /* If we are converting an integer to a floating-point that can
2641 represent it exactly and back to an integer, we can skip the
2642 floating-point conversion. */
2643 if (inside_int && inter_float && final_int &&
2644 (unsigned) significand_size (TYPE_MODE (inter_type))
2645 >= inside_prec - !inside_unsignedp)
2646 {
2647 if (useless_type_conversion_p (type, inside_type))
2648 {
2649 gimple_assign_set_rhs1 (stmt, unshare_expr (defop0));
2650 gimple_assign_set_rhs_code (stmt, TREE_CODE (defop0));
2651 update_stmt (stmt);
2652 return remove_prop_source_from_use (op0) ? 2 : 1;
2653 }
2654 else
2655 {
2656 gimple_assign_set_rhs1 (stmt, defop0);
2657 gimple_assign_set_rhs_code (stmt, CONVERT_EXPR);
2658 update_stmt (stmt);
2659 return remove_prop_source_from_use (op0) ? 2 : 1;
2660 }
2661 }
2662 }
2663
2664 return 0;
2665 }
2666
2667 /* Main entry point for the forward propagation and statement combine
2668 optimizer. */
2669
2670 static unsigned int
2671 ssa_forward_propagate_and_combine (void)
2672 {
2673 basic_block bb;
2674 unsigned int todoflags = 0;
2675
2676 cfg_changed = false;
2677
2678 FOR_EACH_BB (bb)
2679 {
2680 gimple_stmt_iterator gsi, prev;
2681 bool prev_initialized;
2682
2683 /* Apply forward propagation to all stmts in the basic-block.
2684 Note we update GSI within the loop as necessary. */
2685 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); )
2686 {
2687 gimple stmt = gsi_stmt (gsi);
2688 tree lhs, rhs;
2689 enum tree_code code;
2690
2691 if (!is_gimple_assign (stmt))
2692 {
2693 gsi_next (&gsi);
2694 continue;
2695 }
2696
2697 lhs = gimple_assign_lhs (stmt);
2698 rhs = gimple_assign_rhs1 (stmt);
2699 code = gimple_assign_rhs_code (stmt);
2700 if (TREE_CODE (lhs) != SSA_NAME
2701 || has_zero_uses (lhs))
2702 {
2703 gsi_next (&gsi);
2704 continue;
2705 }
2706
2707 /* If this statement sets an SSA_NAME to an address,
2708 try to propagate the address into the uses of the SSA_NAME. */
2709 if (code == ADDR_EXPR
2710 /* Handle pointer conversions on invariant addresses
2711 as well, as this is valid gimple. */
2712 || (CONVERT_EXPR_CODE_P (code)
2713 && TREE_CODE (rhs) == ADDR_EXPR
2714 && POINTER_TYPE_P (TREE_TYPE (lhs))))
2715 {
2716 tree base = get_base_address (TREE_OPERAND (rhs, 0));
2717 if ((!base
2718 || !DECL_P (base)
2719 || decl_address_invariant_p (base))
2720 && !stmt_references_abnormal_ssa_name (stmt)
2721 && forward_propagate_addr_expr (lhs, rhs))
2722 {
2723 release_defs (stmt);
2724 todoflags |= TODO_remove_unused_locals;
2725 gsi_remove (&gsi, true);
2726 }
2727 else
2728 gsi_next (&gsi);
2729 }
2730 else if (code == POINTER_PLUS_EXPR)
2731 {
2732 tree off = gimple_assign_rhs2 (stmt);
2733 if (TREE_CODE (off) == INTEGER_CST
2734 && can_propagate_from (stmt)
2735 && !simple_iv_increment_p (stmt)
2736 /* ??? Better adjust the interface to that function
2737 instead of building new trees here. */
2738 && forward_propagate_addr_expr
2739 (lhs,
2740 build1_loc (gimple_location (stmt),
2741 ADDR_EXPR, TREE_TYPE (rhs),
2742 fold_build2 (MEM_REF,
2743 TREE_TYPE (TREE_TYPE (rhs)),
2744 rhs,
2745 fold_convert (ptr_type_node,
2746 off)))))
2747 {
2748 release_defs (stmt);
2749 todoflags |= TODO_remove_unused_locals;
2750 gsi_remove (&gsi, true);
2751 }
2752 else if (is_gimple_min_invariant (rhs))
2753 {
2754 /* Make sure to fold &a[0] + off_1 here. */
2755 fold_stmt_inplace (&gsi);
2756 update_stmt (stmt);
2757 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
2758 gsi_next (&gsi);
2759 }
2760 else
2761 gsi_next (&gsi);
2762 }
2763 else if (TREE_CODE_CLASS (code) == tcc_comparison)
2764 {
2765 if (forward_propagate_comparison (&gsi))
2766 cfg_changed = true;
2767 }
2768 else
2769 gsi_next (&gsi);
2770 }
2771
2772 /* Combine stmts with the stmts defining their operands.
2773 Note we update GSI within the loop as necessary. */
2774 prev_initialized = false;
2775 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
2776 {
2777 gimple stmt = gsi_stmt (gsi);
2778 bool changed = false;
2779
2780 switch (gimple_code (stmt))
2781 {
2782 case GIMPLE_ASSIGN:
2783 {
2784 tree rhs1 = gimple_assign_rhs1 (stmt);
2785 enum tree_code code = gimple_assign_rhs_code (stmt);
2786
2787 if ((code == BIT_NOT_EXPR
2788 || code == NEGATE_EXPR)
2789 && TREE_CODE (rhs1) == SSA_NAME)
2790 changed = simplify_not_neg_expr (&gsi);
2791 else if (code == COND_EXPR
2792 || code == VEC_COND_EXPR)
2793 {
2794 /* In this case the entire COND_EXPR is in rhs1. */
2795 if (forward_propagate_into_cond (&gsi)
2796 || combine_cond_exprs (&gsi))
2797 {
2798 changed = true;
2799 stmt = gsi_stmt (gsi);
2800 }
2801 }
2802 else if (TREE_CODE_CLASS (code) == tcc_comparison)
2803 {
2804 int did_something;
2805 did_something = forward_propagate_into_comparison (&gsi);
2806 if (did_something == 2)
2807 cfg_changed = true;
2808 changed = did_something != 0;
2809 }
2810 else if (code == BIT_AND_EXPR
2811 || code == BIT_IOR_EXPR
2812 || code == BIT_XOR_EXPR)
2813 changed = simplify_bitwise_binary (&gsi);
2814 else if (code == PLUS_EXPR
2815 || code == MINUS_EXPR)
2816 changed = associate_plusminus (&gsi);
2817 else if (CONVERT_EXPR_CODE_P (code)
2818 || code == FLOAT_EXPR
2819 || code == FIX_TRUNC_EXPR)
2820 {
2821 int did_something = combine_conversions (&gsi);
2822 if (did_something == 2)
2823 cfg_changed = true;
2824 changed = did_something != 0;
2825 }
2826 break;
2827 }
2828
2829 case GIMPLE_SWITCH:
2830 changed = simplify_gimple_switch (stmt);
2831 break;
2832
2833 case GIMPLE_COND:
2834 {
2835 int did_something;
2836 did_something = forward_propagate_into_gimple_cond (stmt);
2837 if (did_something == 2)
2838 cfg_changed = true;
2839 changed = did_something != 0;
2840 break;
2841 }
2842
2843 case GIMPLE_CALL:
2844 {
2845 tree callee = gimple_call_fndecl (stmt);
2846 if (callee != NULL_TREE
2847 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
2848 changed = simplify_builtin_call (&gsi, callee);
2849 break;
2850 }
2851
2852 default:;
2853 }
2854
2855 if (changed)
2856 {
2857 /* If the stmt changed then re-visit it and the statements
2858 inserted before it. */
2859 if (!prev_initialized)
2860 gsi = gsi_start_bb (bb);
2861 else
2862 {
2863 gsi = prev;
2864 gsi_next (&gsi);
2865 }
2866 }
2867 else
2868 {
2869 prev = gsi;
2870 prev_initialized = true;
2871 gsi_next (&gsi);
2872 }
2873 }
2874 }
2875
2876 if (cfg_changed)
2877 todoflags |= TODO_cleanup_cfg;
2878
2879 return todoflags;
2880 }
2881
2882
2883 static bool
2884 gate_forwprop (void)
2885 {
2886 return flag_tree_forwprop;
2887 }
2888
2889 struct gimple_opt_pass pass_forwprop =
2890 {
2891 {
2892 GIMPLE_PASS,
2893 "forwprop", /* name */
2894 gate_forwprop, /* gate */
2895 ssa_forward_propagate_and_combine, /* execute */
2896 NULL, /* sub */
2897 NULL, /* next */
2898 0, /* static_pass_number */
2899 TV_TREE_FORWPROP, /* tv_id */
2900 PROP_cfg | PROP_ssa, /* properties_required */
2901 0, /* properties_provided */
2902 0, /* properties_destroyed */
2903 0, /* todo_flags_start */
2904 TODO_ggc_collect
2905 | TODO_update_ssa
2906 | TODO_verify_ssa /* todo_flags_finish */
2907 }
2908 };