]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/tree-ssa-forwprop.c
replace several uses of the anon namespace with GCC_FINAL
[thirdparty/gcc.git] / gcc / tree-ssa-forwprop.c
1 /* Forward propagation of expressions for single use variables.
2 Copyright (C) 2004-2015 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
10
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "cfghooks.h"
25 #include "tree.h"
26 #include "gimple.h"
27 #include "rtl.h"
28 #include "ssa.h"
29 #include "alias.h"
30 #include "fold-const.h"
31 #include "stor-layout.h"
32 #include "tm_p.h"
33 #include "gimple-pretty-print.h"
34 #include "internal-fn.h"
35 #include "gimple-fold.h"
36 #include "tree-eh.h"
37 #include "gimplify.h"
38 #include "gimple-iterator.h"
39 #include "gimplify-me.h"
40 #include "tree-cfg.h"
41 #include "flags.h"
42 #include "insn-config.h"
43 #include "expmed.h"
44 #include "dojump.h"
45 #include "explow.h"
46 #include "calls.h"
47 #include "emit-rtl.h"
48 #include "varasm.h"
49 #include "stmt.h"
50 #include "expr.h"
51 #include "tree-dfa.h"
52 #include "tree-pass.h"
53 #include "langhooks.h"
54 #include "diagnostic.h"
55 #include "cfgloop.h"
56 #include "insn-codes.h"
57 #include "optabs.h"
58 #include "tree-ssa-propagate.h"
59 #include "tree-ssa-dom.h"
60 #include "builtins.h"
61 #include "tree-cfgcleanup.h"
62 #include "tree-into-ssa.h"
63 #include "cfganal.h"
64
65 /* This pass propagates the RHS of assignment statements into use
66 sites of the LHS of the assignment. It's basically a specialized
67 form of tree combination. It is hoped all of this can disappear
68 when we have a generalized tree combiner.
69
70 One class of common cases we handle is forward propagating a single use
71 variable into a COND_EXPR.
72
73 bb0:
74 x = a COND b;
75 if (x) goto ... else goto ...
76
77 Will be transformed into:
78
79 bb0:
80 if (a COND b) goto ... else goto ...
81
82 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
83
84 Or (assuming c1 and c2 are constants):
85
86 bb0:
87 x = a + c1;
88 if (x EQ/NEQ c2) goto ... else goto ...
89
90 Will be transformed into:
91
92 bb0:
93 if (a EQ/NEQ (c2 - c1)) goto ... else goto ...
94
95 Similarly for x = a - c1.
96
97 Or
98
99 bb0:
100 x = !a
101 if (x) goto ... else goto ...
102
103 Will be transformed into:
104
105 bb0:
106 if (a == 0) goto ... else goto ...
107
108 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
109 For these cases, we propagate A into all, possibly more than one,
110 COND_EXPRs that use X.
111
112 Or
113
114 bb0:
115 x = (typecast) a
116 if (x) goto ... else goto ...
117
118 Will be transformed into:
119
120 bb0:
121 if (a != 0) goto ... else goto ...
122
123 (Assuming a is an integral type and x is a boolean or x is an
124 integral and a is a boolean.)
125
126 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
127 For these cases, we propagate A into all, possibly more than one,
128 COND_EXPRs that use X.
129
130 In addition to eliminating the variable and the statement which assigns
131 a value to the variable, we may be able to later thread the jump without
132 adding insane complexity in the dominator optimizer.
133
134 Also note these transformations can cascade. We handle this by having
135 a worklist of COND_EXPR statements to examine. As we make a change to
136 a statement, we put it back on the worklist to examine on the next
137 iteration of the main loop.
138
139 A second class of propagation opportunities arises for ADDR_EXPR
140 nodes.
141
142 ptr = &x->y->z;
143 res = *ptr;
144
145 Will get turned into
146
147 res = x->y->z;
148
149 Or
150 ptr = (type1*)&type2var;
151 res = *ptr
152
153 Will get turned into (if type1 and type2 are the same size
154 and neither have volatile on them):
155 res = VIEW_CONVERT_EXPR<type1>(type2var)
156
157 Or
158
159 ptr = &x[0];
160 ptr2 = ptr + <constant>;
161
162 Will get turned into
163
164 ptr2 = &x[constant/elementsize];
165
166 Or
167
168 ptr = &x[0];
169 offset = index * element_size;
170 offset_p = (pointer) offset;
171 ptr2 = ptr + offset_p
172
173 Will get turned into:
174
175 ptr2 = &x[index];
176
177 Or
178 ssa = (int) decl
179 res = ssa & 1
180
181 Provided that decl has known alignment >= 2, will get turned into
182
183 res = 0
184
185 We also propagate casts into SWITCH_EXPR and COND_EXPR conditions to
186 allow us to remove the cast and {NOT_EXPR,NEG_EXPR} into a subsequent
187 {NOT_EXPR,NEG_EXPR}.
188
189 This will (of course) be extended as other needs arise. */
190
191 static bool forward_propagate_addr_expr (tree, tree, bool);
192
193 /* Set to true if we delete dead edges during the optimization. */
194 static bool cfg_changed;
195
196 static tree rhs_to_tree (tree type, gimple stmt);
197
198 static bitmap to_purge;
199
200 /* Const-and-copy lattice. */
201 static vec<tree> lattice;
202
203 /* Set the lattice entry for NAME to VAL. */
204 static void
205 fwprop_set_lattice_val (tree name, tree val)
206 {
207 if (TREE_CODE (name) == SSA_NAME)
208 {
209 if (SSA_NAME_VERSION (name) >= lattice.length ())
210 {
211 lattice.reserve (num_ssa_names - lattice.length ());
212 lattice.quick_grow_cleared (num_ssa_names);
213 }
214 lattice[SSA_NAME_VERSION (name)] = val;
215 }
216 }
217
218 /* Invalidate the lattice entry for NAME, done when releasing SSA names. */
219 static void
220 fwprop_invalidate_lattice (tree name)
221 {
222 if (name
223 && TREE_CODE (name) == SSA_NAME
224 && SSA_NAME_VERSION (name) < lattice.length ())
225 lattice[SSA_NAME_VERSION (name)] = NULL_TREE;
226 }
227
228
229 /* Get the statement we can propagate from into NAME skipping
230 trivial copies. Returns the statement which defines the
231 propagation source or NULL_TREE if there is no such one.
232 If SINGLE_USE_ONLY is set considers only sources which have
233 a single use chain up to NAME. If SINGLE_USE_P is non-null,
234 it is set to whether the chain to NAME is a single use chain
235 or not. SINGLE_USE_P is not written to if SINGLE_USE_ONLY is set. */
236
237 static gimple
238 get_prop_source_stmt (tree name, bool single_use_only, bool *single_use_p)
239 {
240 bool single_use = true;
241
242 do {
243 gimple def_stmt = SSA_NAME_DEF_STMT (name);
244
245 if (!has_single_use (name))
246 {
247 single_use = false;
248 if (single_use_only)
249 return NULL;
250 }
251
252 /* If name is defined by a PHI node or is the default def, bail out. */
253 if (!is_gimple_assign (def_stmt))
254 return NULL;
255
256 /* If def_stmt is a simple copy, continue looking. */
257 if (gimple_assign_rhs_code (def_stmt) == SSA_NAME)
258 name = gimple_assign_rhs1 (def_stmt);
259 else
260 {
261 if (!single_use_only && single_use_p)
262 *single_use_p = single_use;
263
264 return def_stmt;
265 }
266 } while (1);
267 }
268
269 /* Checks if the destination ssa name in DEF_STMT can be used as
270 propagation source. Returns true if so, otherwise false. */
271
272 static bool
273 can_propagate_from (gimple def_stmt)
274 {
275 gcc_assert (is_gimple_assign (def_stmt));
276
277 /* If the rhs has side-effects we cannot propagate from it. */
278 if (gimple_has_volatile_ops (def_stmt))
279 return false;
280
281 /* If the rhs is a load we cannot propagate from it. */
282 if (TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)) == tcc_reference
283 || TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)) == tcc_declaration)
284 return false;
285
286 /* Constants can be always propagated. */
287 if (gimple_assign_single_p (def_stmt)
288 && is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt)))
289 return true;
290
291 /* We cannot propagate ssa names that occur in abnormal phi nodes. */
292 if (stmt_references_abnormal_ssa_name (def_stmt))
293 return false;
294
295 /* If the definition is a conversion of a pointer to a function type,
296 then we can not apply optimizations as some targets require
297 function pointers to be canonicalized and in this case this
298 optimization could eliminate a necessary canonicalization. */
299 if (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt)))
300 {
301 tree rhs = gimple_assign_rhs1 (def_stmt);
302 if (POINTER_TYPE_P (TREE_TYPE (rhs))
303 && TREE_CODE (TREE_TYPE (TREE_TYPE (rhs))) == FUNCTION_TYPE)
304 return false;
305 }
306
307 return true;
308 }
309
310 /* Remove a chain of dead statements starting at the definition of
311 NAME. The chain is linked via the first operand of the defining statements.
312 If NAME was replaced in its only use then this function can be used
313 to clean up dead stmts. The function handles already released SSA
314 names gracefully.
315 Returns true if cleanup-cfg has to run. */
316
317 static bool
318 remove_prop_source_from_use (tree name)
319 {
320 gimple_stmt_iterator gsi;
321 gimple stmt;
322 bool cfg_changed = false;
323
324 do {
325 basic_block bb;
326
327 if (SSA_NAME_IN_FREE_LIST (name)
328 || SSA_NAME_IS_DEFAULT_DEF (name)
329 || !has_zero_uses (name))
330 return cfg_changed;
331
332 stmt = SSA_NAME_DEF_STMT (name);
333 if (gimple_code (stmt) == GIMPLE_PHI
334 || gimple_has_side_effects (stmt))
335 return cfg_changed;
336
337 bb = gimple_bb (stmt);
338 gsi = gsi_for_stmt (stmt);
339 unlink_stmt_vdef (stmt);
340 if (gsi_remove (&gsi, true))
341 bitmap_set_bit (to_purge, bb->index);
342 fwprop_invalidate_lattice (gimple_get_lhs (stmt));
343 release_defs (stmt);
344
345 name = is_gimple_assign (stmt) ? gimple_assign_rhs1 (stmt) : NULL_TREE;
346 } while (name && TREE_CODE (name) == SSA_NAME);
347
348 return cfg_changed;
349 }
350
351 /* Return the rhs of a gassign *STMT in a form of a single tree,
352 converted to type TYPE.
353
354 This should disappear, but is needed so we can combine expressions and use
355 the fold() interfaces. Long term, we need to develop folding and combine
356 routines that deal with gimple exclusively . */
357
358 static tree
359 rhs_to_tree (tree type, gimple stmt)
360 {
361 location_t loc = gimple_location (stmt);
362 enum tree_code code = gimple_assign_rhs_code (stmt);
363 if (get_gimple_rhs_class (code) == GIMPLE_TERNARY_RHS)
364 return fold_build3_loc (loc, code, type, gimple_assign_rhs1 (stmt),
365 gimple_assign_rhs2 (stmt),
366 gimple_assign_rhs3 (stmt));
367 else if (get_gimple_rhs_class (code) == GIMPLE_BINARY_RHS)
368 return fold_build2_loc (loc, code, type, gimple_assign_rhs1 (stmt),
369 gimple_assign_rhs2 (stmt));
370 else if (get_gimple_rhs_class (code) == GIMPLE_UNARY_RHS)
371 return build1 (code, type, gimple_assign_rhs1 (stmt));
372 else if (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS)
373 return gimple_assign_rhs1 (stmt);
374 else
375 gcc_unreachable ();
376 }
377
378 /* Combine OP0 CODE OP1 in the context of a COND_EXPR. Returns
379 the folded result in a form suitable for COND_EXPR_COND or
380 NULL_TREE, if there is no suitable simplified form. If
381 INVARIANT_ONLY is true only gimple_min_invariant results are
382 considered simplified. */
383
384 static tree
385 combine_cond_expr_cond (gimple stmt, enum tree_code code, tree type,
386 tree op0, tree op1, bool invariant_only)
387 {
388 tree t;
389
390 gcc_assert (TREE_CODE_CLASS (code) == tcc_comparison);
391
392 fold_defer_overflow_warnings ();
393 t = fold_binary_loc (gimple_location (stmt), code, type, op0, op1);
394 if (!t)
395 {
396 fold_undefer_overflow_warnings (false, NULL, 0);
397 return NULL_TREE;
398 }
399
400 /* Require that we got a boolean type out if we put one in. */
401 gcc_assert (TREE_CODE (TREE_TYPE (t)) == TREE_CODE (type));
402
403 /* Canonicalize the combined condition for use in a COND_EXPR. */
404 t = canonicalize_cond_expr_cond (t);
405
406 /* Bail out if we required an invariant but didn't get one. */
407 if (!t || (invariant_only && !is_gimple_min_invariant (t)))
408 {
409 fold_undefer_overflow_warnings (false, NULL, 0);
410 return NULL_TREE;
411 }
412
413 fold_undefer_overflow_warnings (!gimple_no_warning_p (stmt), stmt, 0);
414
415 return t;
416 }
417
418 /* Combine the comparison OP0 CODE OP1 at LOC with the defining statements
419 of its operand. Return a new comparison tree or NULL_TREE if there
420 were no simplifying combines. */
421
422 static tree
423 forward_propagate_into_comparison_1 (gimple stmt,
424 enum tree_code code, tree type,
425 tree op0, tree op1)
426 {
427 tree tmp = NULL_TREE;
428 tree rhs0 = NULL_TREE, rhs1 = NULL_TREE;
429 bool single_use0_p = false, single_use1_p = false;
430
431 /* For comparisons use the first operand, that is likely to
432 simplify comparisons against constants. */
433 if (TREE_CODE (op0) == SSA_NAME)
434 {
435 gimple def_stmt = get_prop_source_stmt (op0, false, &single_use0_p);
436 if (def_stmt && can_propagate_from (def_stmt))
437 {
438 enum tree_code def_code = gimple_assign_rhs_code (def_stmt);
439 bool invariant_only_p = !single_use0_p;
440
441 rhs0 = rhs_to_tree (TREE_TYPE (op1), def_stmt);
442
443 /* Always combine comparisons or conversions from booleans. */
444 if (TREE_CODE (op1) == INTEGER_CST
445 && ((CONVERT_EXPR_CODE_P (def_code)
446 && TREE_CODE (TREE_TYPE (TREE_OPERAND (rhs0, 0)))
447 == BOOLEAN_TYPE)
448 || TREE_CODE_CLASS (def_code) == tcc_comparison))
449 invariant_only_p = false;
450
451 tmp = combine_cond_expr_cond (stmt, code, type,
452 rhs0, op1, invariant_only_p);
453 if (tmp)
454 return tmp;
455 }
456 }
457
458 /* If that wasn't successful, try the second operand. */
459 if (TREE_CODE (op1) == SSA_NAME)
460 {
461 gimple def_stmt = get_prop_source_stmt (op1, false, &single_use1_p);
462 if (def_stmt && can_propagate_from (def_stmt))
463 {
464 rhs1 = rhs_to_tree (TREE_TYPE (op0), def_stmt);
465 tmp = combine_cond_expr_cond (stmt, code, type,
466 op0, rhs1, !single_use1_p);
467 if (tmp)
468 return tmp;
469 }
470 }
471
472 /* If that wasn't successful either, try both operands. */
473 if (rhs0 != NULL_TREE
474 && rhs1 != NULL_TREE)
475 tmp = combine_cond_expr_cond (stmt, code, type,
476 rhs0, rhs1,
477 !(single_use0_p && single_use1_p));
478
479 return tmp;
480 }
481
482 /* Propagate from the ssa name definition statements of the assignment
483 from a comparison at *GSI into the conditional if that simplifies it.
484 Returns 1 if the stmt was modified and 2 if the CFG needs cleanup,
485 otherwise returns 0. */
486
487 static int
488 forward_propagate_into_comparison (gimple_stmt_iterator *gsi)
489 {
490 gimple stmt = gsi_stmt (*gsi);
491 tree tmp;
492 bool cfg_changed = false;
493 tree type = TREE_TYPE (gimple_assign_lhs (stmt));
494 tree rhs1 = gimple_assign_rhs1 (stmt);
495 tree rhs2 = gimple_assign_rhs2 (stmt);
496
497 /* Combine the comparison with defining statements. */
498 tmp = forward_propagate_into_comparison_1 (stmt,
499 gimple_assign_rhs_code (stmt),
500 type, rhs1, rhs2);
501 if (tmp && useless_type_conversion_p (type, TREE_TYPE (tmp)))
502 {
503 gimple_assign_set_rhs_from_tree (gsi, tmp);
504 fold_stmt (gsi);
505 update_stmt (gsi_stmt (*gsi));
506
507 if (TREE_CODE (rhs1) == SSA_NAME)
508 cfg_changed |= remove_prop_source_from_use (rhs1);
509 if (TREE_CODE (rhs2) == SSA_NAME)
510 cfg_changed |= remove_prop_source_from_use (rhs2);
511 return cfg_changed ? 2 : 1;
512 }
513
514 return 0;
515 }
516
517 /* Propagate from the ssa name definition statements of COND_EXPR
518 in GIMPLE_COND statement STMT into the conditional if that simplifies it.
519 Returns zero if no statement was changed, one if there were
520 changes and two if cfg_cleanup needs to run.
521
522 This must be kept in sync with forward_propagate_into_cond. */
523
524 static int
525 forward_propagate_into_gimple_cond (gcond *stmt)
526 {
527 tree tmp;
528 enum tree_code code = gimple_cond_code (stmt);
529 bool cfg_changed = false;
530 tree rhs1 = gimple_cond_lhs (stmt);
531 tree rhs2 = gimple_cond_rhs (stmt);
532
533 /* We can do tree combining on SSA_NAME and comparison expressions. */
534 if (TREE_CODE_CLASS (gimple_cond_code (stmt)) != tcc_comparison)
535 return 0;
536
537 tmp = forward_propagate_into_comparison_1 (stmt, code,
538 boolean_type_node,
539 rhs1, rhs2);
540 if (tmp)
541 {
542 if (dump_file && tmp)
543 {
544 fprintf (dump_file, " Replaced '");
545 print_gimple_expr (dump_file, stmt, 0, 0);
546 fprintf (dump_file, "' with '");
547 print_generic_expr (dump_file, tmp, 0);
548 fprintf (dump_file, "'\n");
549 }
550
551 gimple_cond_set_condition_from_tree (stmt, unshare_expr (tmp));
552 update_stmt (stmt);
553
554 if (TREE_CODE (rhs1) == SSA_NAME)
555 cfg_changed |= remove_prop_source_from_use (rhs1);
556 if (TREE_CODE (rhs2) == SSA_NAME)
557 cfg_changed |= remove_prop_source_from_use (rhs2);
558 return (cfg_changed || is_gimple_min_invariant (tmp)) ? 2 : 1;
559 }
560
561 /* Canonicalize _Bool == 0 and _Bool != 1 to _Bool != 0 by swapping edges. */
562 if ((TREE_CODE (TREE_TYPE (rhs1)) == BOOLEAN_TYPE
563 || (INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
564 && TYPE_PRECISION (TREE_TYPE (rhs1)) == 1))
565 && ((code == EQ_EXPR
566 && integer_zerop (rhs2))
567 || (code == NE_EXPR
568 && integer_onep (rhs2))))
569 {
570 basic_block bb = gimple_bb (stmt);
571 gimple_cond_set_code (stmt, NE_EXPR);
572 gimple_cond_set_rhs (stmt, build_zero_cst (TREE_TYPE (rhs1)));
573 EDGE_SUCC (bb, 0)->flags ^= (EDGE_TRUE_VALUE|EDGE_FALSE_VALUE);
574 EDGE_SUCC (bb, 1)->flags ^= (EDGE_TRUE_VALUE|EDGE_FALSE_VALUE);
575 return 1;
576 }
577
578 return 0;
579 }
580
581
582 /* Propagate from the ssa name definition statements of COND_EXPR
583 in the rhs of statement STMT into the conditional if that simplifies it.
584 Returns true zero if the stmt was changed. */
585
586 static bool
587 forward_propagate_into_cond (gimple_stmt_iterator *gsi_p)
588 {
589 gimple stmt = gsi_stmt (*gsi_p);
590 tree tmp = NULL_TREE;
591 tree cond = gimple_assign_rhs1 (stmt);
592 enum tree_code code = gimple_assign_rhs_code (stmt);
593
594 /* We can do tree combining on SSA_NAME and comparison expressions. */
595 if (COMPARISON_CLASS_P (cond))
596 tmp = forward_propagate_into_comparison_1 (stmt, TREE_CODE (cond),
597 TREE_TYPE (cond),
598 TREE_OPERAND (cond, 0),
599 TREE_OPERAND (cond, 1));
600 else if (TREE_CODE (cond) == SSA_NAME)
601 {
602 enum tree_code def_code;
603 tree name = cond;
604 gimple def_stmt = get_prop_source_stmt (name, true, NULL);
605 if (!def_stmt || !can_propagate_from (def_stmt))
606 return 0;
607
608 def_code = gimple_assign_rhs_code (def_stmt);
609 if (TREE_CODE_CLASS (def_code) == tcc_comparison)
610 tmp = fold_build2_loc (gimple_location (def_stmt),
611 def_code,
612 TREE_TYPE (cond),
613 gimple_assign_rhs1 (def_stmt),
614 gimple_assign_rhs2 (def_stmt));
615 }
616
617 if (tmp
618 && is_gimple_condexpr (tmp))
619 {
620 if (dump_file && tmp)
621 {
622 fprintf (dump_file, " Replaced '");
623 print_generic_expr (dump_file, cond, 0);
624 fprintf (dump_file, "' with '");
625 print_generic_expr (dump_file, tmp, 0);
626 fprintf (dump_file, "'\n");
627 }
628
629 if ((code == VEC_COND_EXPR) ? integer_all_onesp (tmp)
630 : integer_onep (tmp))
631 gimple_assign_set_rhs_from_tree (gsi_p, gimple_assign_rhs2 (stmt));
632 else if (integer_zerop (tmp))
633 gimple_assign_set_rhs_from_tree (gsi_p, gimple_assign_rhs3 (stmt));
634 else
635 gimple_assign_set_rhs1 (stmt, unshare_expr (tmp));
636 stmt = gsi_stmt (*gsi_p);
637 update_stmt (stmt);
638
639 return true;
640 }
641
642 return 0;
643 }
644
645 /* We've just substituted an ADDR_EXPR into stmt. Update all the
646 relevant data structures to match. */
647
648 static void
649 tidy_after_forward_propagate_addr (gimple stmt)
650 {
651 /* We may have turned a trapping insn into a non-trapping insn. */
652 if (maybe_clean_or_replace_eh_stmt (stmt, stmt))
653 bitmap_set_bit (to_purge, gimple_bb (stmt)->index);
654
655 if (TREE_CODE (gimple_assign_rhs1 (stmt)) == ADDR_EXPR)
656 recompute_tree_invariant_for_addr_expr (gimple_assign_rhs1 (stmt));
657 }
658
659 /* NAME is a SSA_NAME representing DEF_RHS which is of the form
660 ADDR_EXPR <whatever>.
661
662 Try to forward propagate the ADDR_EXPR into the use USE_STMT.
663 Often this will allow for removal of an ADDR_EXPR and INDIRECT_REF
664 node or for recovery of array indexing from pointer arithmetic.
665
666 Return true if the propagation was successful (the propagation can
667 be not totally successful, yet things may have been changed). */
668
669 static bool
670 forward_propagate_addr_expr_1 (tree name, tree def_rhs,
671 gimple_stmt_iterator *use_stmt_gsi,
672 bool single_use_p)
673 {
674 tree lhs, rhs, rhs2, array_ref;
675 gimple use_stmt = gsi_stmt (*use_stmt_gsi);
676 enum tree_code rhs_code;
677 bool res = true;
678
679 gcc_assert (TREE_CODE (def_rhs) == ADDR_EXPR);
680
681 lhs = gimple_assign_lhs (use_stmt);
682 rhs_code = gimple_assign_rhs_code (use_stmt);
683 rhs = gimple_assign_rhs1 (use_stmt);
684
685 /* Do not perform copy-propagation but recurse through copy chains. */
686 if (TREE_CODE (lhs) == SSA_NAME
687 && rhs_code == SSA_NAME)
688 return forward_propagate_addr_expr (lhs, def_rhs, single_use_p);
689
690 /* The use statement could be a conversion. Recurse to the uses of the
691 lhs as copyprop does not copy through pointer to integer to pointer
692 conversions and FRE does not catch all cases either.
693 Treat the case of a single-use name and
694 a conversion to def_rhs type separate, though. */
695 if (TREE_CODE (lhs) == SSA_NAME
696 && CONVERT_EXPR_CODE_P (rhs_code))
697 {
698 /* If there is a point in a conversion chain where the types match
699 so we can remove a conversion re-materialize the address here
700 and stop. */
701 if (single_use_p
702 && useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (def_rhs)))
703 {
704 gimple_assign_set_rhs1 (use_stmt, unshare_expr (def_rhs));
705 gimple_assign_set_rhs_code (use_stmt, TREE_CODE (def_rhs));
706 return true;
707 }
708
709 /* Else recurse if the conversion preserves the address value. */
710 if ((INTEGRAL_TYPE_P (TREE_TYPE (lhs))
711 || POINTER_TYPE_P (TREE_TYPE (lhs)))
712 && (TYPE_PRECISION (TREE_TYPE (lhs))
713 >= TYPE_PRECISION (TREE_TYPE (def_rhs))))
714 return forward_propagate_addr_expr (lhs, def_rhs, single_use_p);
715
716 return false;
717 }
718
719 /* If this isn't a conversion chain from this on we only can propagate
720 into compatible pointer contexts. */
721 if (!types_compatible_p (TREE_TYPE (name), TREE_TYPE (def_rhs)))
722 return false;
723
724 /* Propagate through constant pointer adjustments. */
725 if (TREE_CODE (lhs) == SSA_NAME
726 && rhs_code == POINTER_PLUS_EXPR
727 && rhs == name
728 && TREE_CODE (gimple_assign_rhs2 (use_stmt)) == INTEGER_CST)
729 {
730 tree new_def_rhs;
731 /* As we come here with non-invariant addresses in def_rhs we need
732 to make sure we can build a valid constant offsetted address
733 for further propagation. Simply rely on fold building that
734 and check after the fact. */
735 new_def_rhs = fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (rhs)),
736 def_rhs,
737 fold_convert (ptr_type_node,
738 gimple_assign_rhs2 (use_stmt)));
739 if (TREE_CODE (new_def_rhs) == MEM_REF
740 && !is_gimple_mem_ref_addr (TREE_OPERAND (new_def_rhs, 0)))
741 return false;
742 new_def_rhs = build_fold_addr_expr_with_type (new_def_rhs,
743 TREE_TYPE (rhs));
744
745 /* Recurse. If we could propagate into all uses of lhs do not
746 bother to replace into the current use but just pretend we did. */
747 if (TREE_CODE (new_def_rhs) == ADDR_EXPR
748 && forward_propagate_addr_expr (lhs, new_def_rhs, single_use_p))
749 return true;
750
751 if (useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (new_def_rhs)))
752 gimple_assign_set_rhs_with_ops (use_stmt_gsi, TREE_CODE (new_def_rhs),
753 new_def_rhs);
754 else if (is_gimple_min_invariant (new_def_rhs))
755 gimple_assign_set_rhs_with_ops (use_stmt_gsi, NOP_EXPR, new_def_rhs);
756 else
757 return false;
758 gcc_assert (gsi_stmt (*use_stmt_gsi) == use_stmt);
759 update_stmt (use_stmt);
760 return true;
761 }
762
763 /* Now strip away any outer COMPONENT_REF/ARRAY_REF nodes from the LHS.
764 ADDR_EXPR will not appear on the LHS. */
765 tree *lhsp = gimple_assign_lhs_ptr (use_stmt);
766 while (handled_component_p (*lhsp))
767 lhsp = &TREE_OPERAND (*lhsp, 0);
768 lhs = *lhsp;
769
770 /* Now see if the LHS node is a MEM_REF using NAME. If so,
771 propagate the ADDR_EXPR into the use of NAME and fold the result. */
772 if (TREE_CODE (lhs) == MEM_REF
773 && TREE_OPERAND (lhs, 0) == name)
774 {
775 tree def_rhs_base;
776 HOST_WIDE_INT def_rhs_offset;
777 /* If the address is invariant we can always fold it. */
778 if ((def_rhs_base = get_addr_base_and_unit_offset (TREE_OPERAND (def_rhs, 0),
779 &def_rhs_offset)))
780 {
781 offset_int off = mem_ref_offset (lhs);
782 tree new_ptr;
783 off += def_rhs_offset;
784 if (TREE_CODE (def_rhs_base) == MEM_REF)
785 {
786 off += mem_ref_offset (def_rhs_base);
787 new_ptr = TREE_OPERAND (def_rhs_base, 0);
788 }
789 else
790 new_ptr = build_fold_addr_expr (def_rhs_base);
791 TREE_OPERAND (lhs, 0) = new_ptr;
792 TREE_OPERAND (lhs, 1)
793 = wide_int_to_tree (TREE_TYPE (TREE_OPERAND (lhs, 1)), off);
794 tidy_after_forward_propagate_addr (use_stmt);
795 /* Continue propagating into the RHS if this was not the only use. */
796 if (single_use_p)
797 return true;
798 }
799 /* If the LHS is a plain dereference and the value type is the same as
800 that of the pointed-to type of the address we can put the
801 dereferenced address on the LHS preserving the original alias-type. */
802 else if (integer_zerop (TREE_OPERAND (lhs, 1))
803 && ((gimple_assign_lhs (use_stmt) == lhs
804 && useless_type_conversion_p
805 (TREE_TYPE (TREE_OPERAND (def_rhs, 0)),
806 TREE_TYPE (gimple_assign_rhs1 (use_stmt))))
807 || types_compatible_p (TREE_TYPE (lhs),
808 TREE_TYPE (TREE_OPERAND (def_rhs, 0))))
809 /* Don't forward anything into clobber stmts if it would result
810 in the lhs no longer being a MEM_REF. */
811 && (!gimple_clobber_p (use_stmt)
812 || TREE_CODE (TREE_OPERAND (def_rhs, 0)) == MEM_REF))
813 {
814 tree *def_rhs_basep = &TREE_OPERAND (def_rhs, 0);
815 tree new_offset, new_base, saved, new_lhs;
816 while (handled_component_p (*def_rhs_basep))
817 def_rhs_basep = &TREE_OPERAND (*def_rhs_basep, 0);
818 saved = *def_rhs_basep;
819 if (TREE_CODE (*def_rhs_basep) == MEM_REF)
820 {
821 new_base = TREE_OPERAND (*def_rhs_basep, 0);
822 new_offset = fold_convert (TREE_TYPE (TREE_OPERAND (lhs, 1)),
823 TREE_OPERAND (*def_rhs_basep, 1));
824 }
825 else
826 {
827 new_base = build_fold_addr_expr (*def_rhs_basep);
828 new_offset = TREE_OPERAND (lhs, 1);
829 }
830 *def_rhs_basep = build2 (MEM_REF, TREE_TYPE (*def_rhs_basep),
831 new_base, new_offset);
832 TREE_THIS_VOLATILE (*def_rhs_basep) = TREE_THIS_VOLATILE (lhs);
833 TREE_SIDE_EFFECTS (*def_rhs_basep) = TREE_SIDE_EFFECTS (lhs);
834 TREE_THIS_NOTRAP (*def_rhs_basep) = TREE_THIS_NOTRAP (lhs);
835 new_lhs = unshare_expr (TREE_OPERAND (def_rhs, 0));
836 *lhsp = new_lhs;
837 TREE_THIS_VOLATILE (new_lhs) = TREE_THIS_VOLATILE (lhs);
838 TREE_SIDE_EFFECTS (new_lhs) = TREE_SIDE_EFFECTS (lhs);
839 *def_rhs_basep = saved;
840 tidy_after_forward_propagate_addr (use_stmt);
841 /* Continue propagating into the RHS if this was not the
842 only use. */
843 if (single_use_p)
844 return true;
845 }
846 else
847 /* We can have a struct assignment dereferencing our name twice.
848 Note that we didn't propagate into the lhs to not falsely
849 claim we did when propagating into the rhs. */
850 res = false;
851 }
852
853 /* Strip away any outer COMPONENT_REF, ARRAY_REF or ADDR_EXPR
854 nodes from the RHS. */
855 tree *rhsp = gimple_assign_rhs1_ptr (use_stmt);
856 if (TREE_CODE (*rhsp) == ADDR_EXPR)
857 rhsp = &TREE_OPERAND (*rhsp, 0);
858 while (handled_component_p (*rhsp))
859 rhsp = &TREE_OPERAND (*rhsp, 0);
860 rhs = *rhsp;
861
862 /* Now see if the RHS node is a MEM_REF using NAME. If so,
863 propagate the ADDR_EXPR into the use of NAME and fold the result. */
864 if (TREE_CODE (rhs) == MEM_REF
865 && TREE_OPERAND (rhs, 0) == name)
866 {
867 tree def_rhs_base;
868 HOST_WIDE_INT def_rhs_offset;
869 if ((def_rhs_base = get_addr_base_and_unit_offset (TREE_OPERAND (def_rhs, 0),
870 &def_rhs_offset)))
871 {
872 offset_int off = mem_ref_offset (rhs);
873 tree new_ptr;
874 off += def_rhs_offset;
875 if (TREE_CODE (def_rhs_base) == MEM_REF)
876 {
877 off += mem_ref_offset (def_rhs_base);
878 new_ptr = TREE_OPERAND (def_rhs_base, 0);
879 }
880 else
881 new_ptr = build_fold_addr_expr (def_rhs_base);
882 TREE_OPERAND (rhs, 0) = new_ptr;
883 TREE_OPERAND (rhs, 1)
884 = wide_int_to_tree (TREE_TYPE (TREE_OPERAND (rhs, 1)), off);
885 fold_stmt_inplace (use_stmt_gsi);
886 tidy_after_forward_propagate_addr (use_stmt);
887 return res;
888 }
889 /* If the RHS is a plain dereference and the value type is the same as
890 that of the pointed-to type of the address we can put the
891 dereferenced address on the RHS preserving the original alias-type. */
892 else if (integer_zerop (TREE_OPERAND (rhs, 1))
893 && ((gimple_assign_rhs1 (use_stmt) == rhs
894 && useless_type_conversion_p
895 (TREE_TYPE (gimple_assign_lhs (use_stmt)),
896 TREE_TYPE (TREE_OPERAND (def_rhs, 0))))
897 || types_compatible_p (TREE_TYPE (rhs),
898 TREE_TYPE (TREE_OPERAND (def_rhs, 0)))))
899 {
900 tree *def_rhs_basep = &TREE_OPERAND (def_rhs, 0);
901 tree new_offset, new_base, saved, new_rhs;
902 while (handled_component_p (*def_rhs_basep))
903 def_rhs_basep = &TREE_OPERAND (*def_rhs_basep, 0);
904 saved = *def_rhs_basep;
905 if (TREE_CODE (*def_rhs_basep) == MEM_REF)
906 {
907 new_base = TREE_OPERAND (*def_rhs_basep, 0);
908 new_offset = fold_convert (TREE_TYPE (TREE_OPERAND (rhs, 1)),
909 TREE_OPERAND (*def_rhs_basep, 1));
910 }
911 else
912 {
913 new_base = build_fold_addr_expr (*def_rhs_basep);
914 new_offset = TREE_OPERAND (rhs, 1);
915 }
916 *def_rhs_basep = build2 (MEM_REF, TREE_TYPE (*def_rhs_basep),
917 new_base, new_offset);
918 TREE_THIS_VOLATILE (*def_rhs_basep) = TREE_THIS_VOLATILE (rhs);
919 TREE_SIDE_EFFECTS (*def_rhs_basep) = TREE_SIDE_EFFECTS (rhs);
920 TREE_THIS_NOTRAP (*def_rhs_basep) = TREE_THIS_NOTRAP (rhs);
921 new_rhs = unshare_expr (TREE_OPERAND (def_rhs, 0));
922 *rhsp = new_rhs;
923 TREE_THIS_VOLATILE (new_rhs) = TREE_THIS_VOLATILE (rhs);
924 TREE_SIDE_EFFECTS (new_rhs) = TREE_SIDE_EFFECTS (rhs);
925 *def_rhs_basep = saved;
926 fold_stmt_inplace (use_stmt_gsi);
927 tidy_after_forward_propagate_addr (use_stmt);
928 return res;
929 }
930 }
931
932 /* If the use of the ADDR_EXPR is not a POINTER_PLUS_EXPR, there
933 is nothing to do. */
934 if (gimple_assign_rhs_code (use_stmt) != POINTER_PLUS_EXPR
935 || gimple_assign_rhs1 (use_stmt) != name)
936 return false;
937
938 /* The remaining cases are all for turning pointer arithmetic into
939 array indexing. They only apply when we have the address of
940 element zero in an array. If that is not the case then there
941 is nothing to do. */
942 array_ref = TREE_OPERAND (def_rhs, 0);
943 if ((TREE_CODE (array_ref) != ARRAY_REF
944 || TREE_CODE (TREE_TYPE (TREE_OPERAND (array_ref, 0))) != ARRAY_TYPE
945 || TREE_CODE (TREE_OPERAND (array_ref, 1)) != INTEGER_CST)
946 && TREE_CODE (TREE_TYPE (array_ref)) != ARRAY_TYPE)
947 return false;
948
949 rhs2 = gimple_assign_rhs2 (use_stmt);
950 /* Optimize &x[C1] p+ C2 to &x p+ C3 with C3 = C1 * element_size + C2. */
951 if (TREE_CODE (rhs2) == INTEGER_CST)
952 {
953 tree new_rhs = build1_loc (gimple_location (use_stmt),
954 ADDR_EXPR, TREE_TYPE (def_rhs),
955 fold_build2 (MEM_REF,
956 TREE_TYPE (TREE_TYPE (def_rhs)),
957 unshare_expr (def_rhs),
958 fold_convert (ptr_type_node,
959 rhs2)));
960 gimple_assign_set_rhs_from_tree (use_stmt_gsi, new_rhs);
961 use_stmt = gsi_stmt (*use_stmt_gsi);
962 update_stmt (use_stmt);
963 tidy_after_forward_propagate_addr (use_stmt);
964 return true;
965 }
966
967 return false;
968 }
969
970 /* STMT is a statement of the form SSA_NAME = ADDR_EXPR <whatever>.
971
972 Try to forward propagate the ADDR_EXPR into all uses of the SSA_NAME.
973 Often this will allow for removal of an ADDR_EXPR and INDIRECT_REF
974 node or for recovery of array indexing from pointer arithmetic.
975
976 PARENT_SINGLE_USE_P tells if, when in a recursive invocation, NAME was
977 the single use in the previous invocation. Pass true when calling
978 this as toplevel.
979
980 Returns true, if all uses have been propagated into. */
981
982 static bool
983 forward_propagate_addr_expr (tree name, tree rhs, bool parent_single_use_p)
984 {
985 imm_use_iterator iter;
986 gimple use_stmt;
987 bool all = true;
988 bool single_use_p = parent_single_use_p && has_single_use (name);
989
990 FOR_EACH_IMM_USE_STMT (use_stmt, iter, name)
991 {
992 bool result;
993 tree use_rhs;
994
995 /* If the use is not in a simple assignment statement, then
996 there is nothing we can do. */
997 if (!is_gimple_assign (use_stmt))
998 {
999 if (!is_gimple_debug (use_stmt))
1000 all = false;
1001 continue;
1002 }
1003
1004 gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
1005 result = forward_propagate_addr_expr_1 (name, rhs, &gsi,
1006 single_use_p);
1007 /* If the use has moved to a different statement adjust
1008 the update machinery for the old statement too. */
1009 if (use_stmt != gsi_stmt (gsi))
1010 {
1011 update_stmt (use_stmt);
1012 use_stmt = gsi_stmt (gsi);
1013 }
1014 update_stmt (use_stmt);
1015 all &= result;
1016
1017 /* Remove intermediate now unused copy and conversion chains. */
1018 use_rhs = gimple_assign_rhs1 (use_stmt);
1019 if (result
1020 && TREE_CODE (gimple_assign_lhs (use_stmt)) == SSA_NAME
1021 && TREE_CODE (use_rhs) == SSA_NAME
1022 && has_zero_uses (gimple_assign_lhs (use_stmt)))
1023 {
1024 gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
1025 fwprop_invalidate_lattice (gimple_get_lhs (use_stmt));
1026 release_defs (use_stmt);
1027 gsi_remove (&gsi, true);
1028 }
1029 }
1030
1031 return all && has_zero_uses (name);
1032 }
1033
1034
1035 /* Helper function for simplify_gimple_switch. Remove case labels that
1036 have values outside the range of the new type. */
1037
1038 static void
1039 simplify_gimple_switch_label_vec (gswitch *stmt, tree index_type)
1040 {
1041 unsigned int branch_num = gimple_switch_num_labels (stmt);
1042 auto_vec<tree> labels (branch_num);
1043 unsigned int i, len;
1044
1045 /* Collect the existing case labels in a VEC, and preprocess it as if
1046 we are gimplifying a GENERIC SWITCH_EXPR. */
1047 for (i = 1; i < branch_num; i++)
1048 labels.quick_push (gimple_switch_label (stmt, i));
1049 preprocess_case_label_vec_for_gimple (labels, index_type, NULL);
1050
1051 /* If any labels were removed, replace the existing case labels
1052 in the GIMPLE_SWITCH statement with the correct ones.
1053 Note that the type updates were done in-place on the case labels,
1054 so we only have to replace the case labels in the GIMPLE_SWITCH
1055 if the number of labels changed. */
1056 len = labels.length ();
1057 if (len < branch_num - 1)
1058 {
1059 bitmap target_blocks;
1060 edge_iterator ei;
1061 edge e;
1062
1063 /* Corner case: *all* case labels have been removed as being
1064 out-of-range for INDEX_TYPE. Push one label and let the
1065 CFG cleanups deal with this further. */
1066 if (len == 0)
1067 {
1068 tree label, elt;
1069
1070 label = CASE_LABEL (gimple_switch_default_label (stmt));
1071 elt = build_case_label (build_int_cst (index_type, 0), NULL, label);
1072 labels.quick_push (elt);
1073 len = 1;
1074 }
1075
1076 for (i = 0; i < labels.length (); i++)
1077 gimple_switch_set_label (stmt, i + 1, labels[i]);
1078 for (i++ ; i < branch_num; i++)
1079 gimple_switch_set_label (stmt, i, NULL_TREE);
1080 gimple_switch_set_num_labels (stmt, len + 1);
1081
1082 /* Cleanup any edges that are now dead. */
1083 target_blocks = BITMAP_ALLOC (NULL);
1084 for (i = 0; i < gimple_switch_num_labels (stmt); i++)
1085 {
1086 tree elt = gimple_switch_label (stmt, i);
1087 basic_block target = label_to_block (CASE_LABEL (elt));
1088 bitmap_set_bit (target_blocks, target->index);
1089 }
1090 for (ei = ei_start (gimple_bb (stmt)->succs); (e = ei_safe_edge (ei)); )
1091 {
1092 if (! bitmap_bit_p (target_blocks, e->dest->index))
1093 {
1094 remove_edge (e);
1095 cfg_changed = true;
1096 free_dominance_info (CDI_DOMINATORS);
1097 }
1098 else
1099 ei_next (&ei);
1100 }
1101 BITMAP_FREE (target_blocks);
1102 }
1103 }
1104
1105 /* STMT is a SWITCH_EXPR for which we attempt to find equivalent forms of
1106 the condition which we may be able to optimize better. */
1107
1108 static bool
1109 simplify_gimple_switch (gswitch *stmt)
1110 {
1111 /* The optimization that we really care about is removing unnecessary
1112 casts. That will let us do much better in propagating the inferred
1113 constant at the switch target. */
1114 tree cond = gimple_switch_index (stmt);
1115 if (TREE_CODE (cond) == SSA_NAME)
1116 {
1117 gimple def_stmt = SSA_NAME_DEF_STMT (cond);
1118 if (gimple_assign_cast_p (def_stmt))
1119 {
1120 tree def = gimple_assign_rhs1 (def_stmt);
1121 if (TREE_CODE (def) != SSA_NAME)
1122 return false;
1123
1124 /* If we have an extension or sign-change that preserves the
1125 values we check against then we can copy the source value into
1126 the switch. */
1127 tree ti = TREE_TYPE (def);
1128 if (INTEGRAL_TYPE_P (ti)
1129 && TYPE_PRECISION (ti) <= TYPE_PRECISION (TREE_TYPE (cond)))
1130 {
1131 size_t n = gimple_switch_num_labels (stmt);
1132 tree min = NULL_TREE, max = NULL_TREE;
1133 if (n > 1)
1134 {
1135 min = CASE_LOW (gimple_switch_label (stmt, 1));
1136 if (CASE_HIGH (gimple_switch_label (stmt, n - 1)))
1137 max = CASE_HIGH (gimple_switch_label (stmt, n - 1));
1138 else
1139 max = CASE_LOW (gimple_switch_label (stmt, n - 1));
1140 }
1141 if ((!min || int_fits_type_p (min, ti))
1142 && (!max || int_fits_type_p (max, ti)))
1143 {
1144 gimple_switch_set_index (stmt, def);
1145 simplify_gimple_switch_label_vec (stmt, ti);
1146 update_stmt (stmt);
1147 return true;
1148 }
1149 }
1150 }
1151 }
1152
1153 return false;
1154 }
1155
1156 /* For pointers p2 and p1 return p2 - p1 if the
1157 difference is known and constant, otherwise return NULL. */
1158
1159 static tree
1160 constant_pointer_difference (tree p1, tree p2)
1161 {
1162 int i, j;
1163 #define CPD_ITERATIONS 5
1164 tree exps[2][CPD_ITERATIONS];
1165 tree offs[2][CPD_ITERATIONS];
1166 int cnt[2];
1167
1168 for (i = 0; i < 2; i++)
1169 {
1170 tree p = i ? p1 : p2;
1171 tree off = size_zero_node;
1172 gimple stmt;
1173 enum tree_code code;
1174
1175 /* For each of p1 and p2 we need to iterate at least
1176 twice, to handle ADDR_EXPR directly in p1/p2,
1177 SSA_NAME with ADDR_EXPR or POINTER_PLUS_EXPR etc.
1178 on definition's stmt RHS. Iterate a few extra times. */
1179 j = 0;
1180 do
1181 {
1182 if (!POINTER_TYPE_P (TREE_TYPE (p)))
1183 break;
1184 if (TREE_CODE (p) == ADDR_EXPR)
1185 {
1186 tree q = TREE_OPERAND (p, 0);
1187 HOST_WIDE_INT offset;
1188 tree base = get_addr_base_and_unit_offset (q, &offset);
1189 if (base)
1190 {
1191 q = base;
1192 if (offset)
1193 off = size_binop (PLUS_EXPR, off, size_int (offset));
1194 }
1195 if (TREE_CODE (q) == MEM_REF
1196 && TREE_CODE (TREE_OPERAND (q, 0)) == SSA_NAME)
1197 {
1198 p = TREE_OPERAND (q, 0);
1199 off = size_binop (PLUS_EXPR, off,
1200 wide_int_to_tree (sizetype,
1201 mem_ref_offset (q)));
1202 }
1203 else
1204 {
1205 exps[i][j] = q;
1206 offs[i][j++] = off;
1207 break;
1208 }
1209 }
1210 if (TREE_CODE (p) != SSA_NAME)
1211 break;
1212 exps[i][j] = p;
1213 offs[i][j++] = off;
1214 if (j == CPD_ITERATIONS)
1215 break;
1216 stmt = SSA_NAME_DEF_STMT (p);
1217 if (!is_gimple_assign (stmt) || gimple_assign_lhs (stmt) != p)
1218 break;
1219 code = gimple_assign_rhs_code (stmt);
1220 if (code == POINTER_PLUS_EXPR)
1221 {
1222 if (TREE_CODE (gimple_assign_rhs2 (stmt)) != INTEGER_CST)
1223 break;
1224 off = size_binop (PLUS_EXPR, off, gimple_assign_rhs2 (stmt));
1225 p = gimple_assign_rhs1 (stmt);
1226 }
1227 else if (code == ADDR_EXPR || CONVERT_EXPR_CODE_P (code))
1228 p = gimple_assign_rhs1 (stmt);
1229 else
1230 break;
1231 }
1232 while (1);
1233 cnt[i] = j;
1234 }
1235
1236 for (i = 0; i < cnt[0]; i++)
1237 for (j = 0; j < cnt[1]; j++)
1238 if (exps[0][i] == exps[1][j])
1239 return size_binop (MINUS_EXPR, offs[0][i], offs[1][j]);
1240
1241 return NULL_TREE;
1242 }
1243
1244 /* *GSI_P is a GIMPLE_CALL to a builtin function.
1245 Optimize
1246 memcpy (p, "abcd", 4);
1247 memset (p + 4, ' ', 3);
1248 into
1249 memcpy (p, "abcd ", 7);
1250 call if the latter can be stored by pieces during expansion. */
1251
1252 static bool
1253 simplify_builtin_call (gimple_stmt_iterator *gsi_p, tree callee2)
1254 {
1255 gimple stmt1, stmt2 = gsi_stmt (*gsi_p);
1256 tree vuse = gimple_vuse (stmt2);
1257 if (vuse == NULL)
1258 return false;
1259 stmt1 = SSA_NAME_DEF_STMT (vuse);
1260
1261 switch (DECL_FUNCTION_CODE (callee2))
1262 {
1263 case BUILT_IN_MEMSET:
1264 if (gimple_call_num_args (stmt2) != 3
1265 || gimple_call_lhs (stmt2)
1266 || CHAR_BIT != 8
1267 || BITS_PER_UNIT != 8)
1268 break;
1269 else
1270 {
1271 tree callee1;
1272 tree ptr1, src1, str1, off1, len1, lhs1;
1273 tree ptr2 = gimple_call_arg (stmt2, 0);
1274 tree val2 = gimple_call_arg (stmt2, 1);
1275 tree len2 = gimple_call_arg (stmt2, 2);
1276 tree diff, vdef, new_str_cst;
1277 gimple use_stmt;
1278 unsigned int ptr1_align;
1279 unsigned HOST_WIDE_INT src_len;
1280 char *src_buf;
1281 use_operand_p use_p;
1282
1283 if (!tree_fits_shwi_p (val2)
1284 || !tree_fits_uhwi_p (len2)
1285 || compare_tree_int (len2, 1024) == 1)
1286 break;
1287 if (is_gimple_call (stmt1))
1288 {
1289 /* If first stmt is a call, it needs to be memcpy
1290 or mempcpy, with string literal as second argument and
1291 constant length. */
1292 callee1 = gimple_call_fndecl (stmt1);
1293 if (callee1 == NULL_TREE
1294 || DECL_BUILT_IN_CLASS (callee1) != BUILT_IN_NORMAL
1295 || gimple_call_num_args (stmt1) != 3)
1296 break;
1297 if (DECL_FUNCTION_CODE (callee1) != BUILT_IN_MEMCPY
1298 && DECL_FUNCTION_CODE (callee1) != BUILT_IN_MEMPCPY)
1299 break;
1300 ptr1 = gimple_call_arg (stmt1, 0);
1301 src1 = gimple_call_arg (stmt1, 1);
1302 len1 = gimple_call_arg (stmt1, 2);
1303 lhs1 = gimple_call_lhs (stmt1);
1304 if (!tree_fits_uhwi_p (len1))
1305 break;
1306 str1 = string_constant (src1, &off1);
1307 if (str1 == NULL_TREE)
1308 break;
1309 if (!tree_fits_uhwi_p (off1)
1310 || compare_tree_int (off1, TREE_STRING_LENGTH (str1) - 1) > 0
1311 || compare_tree_int (len1, TREE_STRING_LENGTH (str1)
1312 - tree_to_uhwi (off1)) > 0
1313 || TREE_CODE (TREE_TYPE (str1)) != ARRAY_TYPE
1314 || TYPE_MODE (TREE_TYPE (TREE_TYPE (str1)))
1315 != TYPE_MODE (char_type_node))
1316 break;
1317 }
1318 else if (gimple_assign_single_p (stmt1))
1319 {
1320 /* Otherwise look for length 1 memcpy optimized into
1321 assignment. */
1322 ptr1 = gimple_assign_lhs (stmt1);
1323 src1 = gimple_assign_rhs1 (stmt1);
1324 if (TREE_CODE (ptr1) != MEM_REF
1325 || TYPE_MODE (TREE_TYPE (ptr1)) != TYPE_MODE (char_type_node)
1326 || !tree_fits_shwi_p (src1))
1327 break;
1328 ptr1 = build_fold_addr_expr (ptr1);
1329 callee1 = NULL_TREE;
1330 len1 = size_one_node;
1331 lhs1 = NULL_TREE;
1332 off1 = size_zero_node;
1333 str1 = NULL_TREE;
1334 }
1335 else
1336 break;
1337
1338 diff = constant_pointer_difference (ptr1, ptr2);
1339 if (diff == NULL && lhs1 != NULL)
1340 {
1341 diff = constant_pointer_difference (lhs1, ptr2);
1342 if (DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY
1343 && diff != NULL)
1344 diff = size_binop (PLUS_EXPR, diff,
1345 fold_convert (sizetype, len1));
1346 }
1347 /* If the difference between the second and first destination pointer
1348 is not constant, or is bigger than memcpy length, bail out. */
1349 if (diff == NULL
1350 || !tree_fits_uhwi_p (diff)
1351 || tree_int_cst_lt (len1, diff)
1352 || compare_tree_int (diff, 1024) == 1)
1353 break;
1354
1355 /* Use maximum of difference plus memset length and memcpy length
1356 as the new memcpy length, if it is too big, bail out. */
1357 src_len = tree_to_uhwi (diff);
1358 src_len += tree_to_uhwi (len2);
1359 if (src_len < tree_to_uhwi (len1))
1360 src_len = tree_to_uhwi (len1);
1361 if (src_len > 1024)
1362 break;
1363
1364 /* If mempcpy value is used elsewhere, bail out, as mempcpy
1365 with bigger length will return different result. */
1366 if (lhs1 != NULL_TREE
1367 && DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY
1368 && (TREE_CODE (lhs1) != SSA_NAME
1369 || !single_imm_use (lhs1, &use_p, &use_stmt)
1370 || use_stmt != stmt2))
1371 break;
1372
1373 /* If anything reads memory in between memcpy and memset
1374 call, the modified memcpy call might change it. */
1375 vdef = gimple_vdef (stmt1);
1376 if (vdef != NULL
1377 && (!single_imm_use (vdef, &use_p, &use_stmt)
1378 || use_stmt != stmt2))
1379 break;
1380
1381 ptr1_align = get_pointer_alignment (ptr1);
1382 /* Construct the new source string literal. */
1383 src_buf = XALLOCAVEC (char, src_len + 1);
1384 if (callee1)
1385 memcpy (src_buf,
1386 TREE_STRING_POINTER (str1) + tree_to_uhwi (off1),
1387 tree_to_uhwi (len1));
1388 else
1389 src_buf[0] = tree_to_shwi (src1);
1390 memset (src_buf + tree_to_uhwi (diff),
1391 tree_to_shwi (val2), tree_to_uhwi (len2));
1392 src_buf[src_len] = '\0';
1393 /* Neither builtin_strncpy_read_str nor builtin_memcpy_read_str
1394 handle embedded '\0's. */
1395 if (strlen (src_buf) != src_len)
1396 break;
1397 rtl_profile_for_bb (gimple_bb (stmt2));
1398 /* If the new memcpy wouldn't be emitted by storing the literal
1399 by pieces, this optimization might enlarge .rodata too much,
1400 as commonly used string literals couldn't be shared any
1401 longer. */
1402 if (!can_store_by_pieces (src_len,
1403 builtin_strncpy_read_str,
1404 src_buf, ptr1_align, false))
1405 break;
1406
1407 new_str_cst = build_string_literal (src_len, src_buf);
1408 if (callee1)
1409 {
1410 /* If STMT1 is a mem{,p}cpy call, adjust it and remove
1411 memset call. */
1412 if (lhs1 && DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY)
1413 gimple_call_set_lhs (stmt1, NULL_TREE);
1414 gimple_call_set_arg (stmt1, 1, new_str_cst);
1415 gimple_call_set_arg (stmt1, 2,
1416 build_int_cst (TREE_TYPE (len1), src_len));
1417 update_stmt (stmt1);
1418 unlink_stmt_vdef (stmt2);
1419 gsi_remove (gsi_p, true);
1420 fwprop_invalidate_lattice (gimple_get_lhs (stmt2));
1421 release_defs (stmt2);
1422 if (lhs1 && DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY)
1423 {
1424 fwprop_invalidate_lattice (lhs1);
1425 release_ssa_name (lhs1);
1426 }
1427 return true;
1428 }
1429 else
1430 {
1431 /* Otherwise, if STMT1 is length 1 memcpy optimized into
1432 assignment, remove STMT1 and change memset call into
1433 memcpy call. */
1434 gimple_stmt_iterator gsi = gsi_for_stmt (stmt1);
1435
1436 if (!is_gimple_val (ptr1))
1437 ptr1 = force_gimple_operand_gsi (gsi_p, ptr1, true, NULL_TREE,
1438 true, GSI_SAME_STMT);
1439 gimple_call_set_fndecl (stmt2,
1440 builtin_decl_explicit (BUILT_IN_MEMCPY));
1441 gimple_call_set_arg (stmt2, 0, ptr1);
1442 gimple_call_set_arg (stmt2, 1, new_str_cst);
1443 gimple_call_set_arg (stmt2, 2,
1444 build_int_cst (TREE_TYPE (len2), src_len));
1445 unlink_stmt_vdef (stmt1);
1446 gsi_remove (&gsi, true);
1447 fwprop_invalidate_lattice (gimple_get_lhs (stmt1));
1448 release_defs (stmt1);
1449 update_stmt (stmt2);
1450 return false;
1451 }
1452 }
1453 break;
1454 default:
1455 break;
1456 }
1457 return false;
1458 }
1459
1460 /* Given a ssa_name in NAME see if it was defined by an assignment and
1461 set CODE to be the code and ARG1 to the first operand on the rhs and ARG2
1462 to the second operand on the rhs. */
1463
1464 static inline void
1465 defcodefor_name (tree name, enum tree_code *code, tree *arg1, tree *arg2)
1466 {
1467 gimple def;
1468 enum tree_code code1;
1469 tree arg11;
1470 tree arg21;
1471 tree arg31;
1472 enum gimple_rhs_class grhs_class;
1473
1474 code1 = TREE_CODE (name);
1475 arg11 = name;
1476 arg21 = NULL_TREE;
1477 grhs_class = get_gimple_rhs_class (code1);
1478
1479 if (code1 == SSA_NAME)
1480 {
1481 def = SSA_NAME_DEF_STMT (name);
1482
1483 if (def && is_gimple_assign (def)
1484 && can_propagate_from (def))
1485 {
1486 code1 = gimple_assign_rhs_code (def);
1487 arg11 = gimple_assign_rhs1 (def);
1488 arg21 = gimple_assign_rhs2 (def);
1489 arg31 = gimple_assign_rhs2 (def);
1490 }
1491 }
1492 else if (grhs_class == GIMPLE_TERNARY_RHS
1493 || GIMPLE_BINARY_RHS
1494 || GIMPLE_UNARY_RHS
1495 || GIMPLE_SINGLE_RHS)
1496 extract_ops_from_tree_1 (name, &code1, &arg11, &arg21, &arg31);
1497
1498 *code = code1;
1499 *arg1 = arg11;
1500 if (arg2)
1501 *arg2 = arg21;
1502 /* Ignore arg3 currently. */
1503 }
1504
1505
1506 /* Recognize rotation patterns. Return true if a transformation
1507 applied, otherwise return false.
1508
1509 We are looking for X with unsigned type T with bitsize B, OP being
1510 +, | or ^, some type T2 wider than T and
1511 (X << CNT1) OP (X >> CNT2) iff CNT1 + CNT2 == B
1512 ((T) ((T2) X << CNT1)) OP ((T) ((T2) X >> CNT2)) iff CNT1 + CNT2 == B
1513 (X << Y) OP (X >> (B - Y))
1514 (X << (int) Y) OP (X >> (int) (B - Y))
1515 ((T) ((T2) X << Y)) OP ((T) ((T2) X >> (B - Y)))
1516 ((T) ((T2) X << (int) Y)) OP ((T) ((T2) X >> (int) (B - Y)))
1517 (X << Y) | (X >> ((-Y) & (B - 1)))
1518 (X << (int) Y) | (X >> (int) ((-Y) & (B - 1)))
1519 ((T) ((T2) X << Y)) | ((T) ((T2) X >> ((-Y) & (B - 1))))
1520 ((T) ((T2) X << (int) Y)) | ((T) ((T2) X >> (int) ((-Y) & (B - 1))))
1521
1522 and transform these into:
1523 X r<< CNT1
1524 X r<< Y
1525
1526 Note, in the patterns with T2 type, the type of OP operands
1527 might be even a signed type, but should have precision B. */
1528
1529 static bool
1530 simplify_rotate (gimple_stmt_iterator *gsi)
1531 {
1532 gimple stmt = gsi_stmt (*gsi);
1533 tree arg[2], rtype, rotcnt = NULL_TREE;
1534 tree def_arg1[2], def_arg2[2];
1535 enum tree_code def_code[2];
1536 tree lhs;
1537 int i;
1538 bool swapped_p = false;
1539 gimple g;
1540
1541 arg[0] = gimple_assign_rhs1 (stmt);
1542 arg[1] = gimple_assign_rhs2 (stmt);
1543 rtype = TREE_TYPE (arg[0]);
1544
1545 /* Only create rotates in complete modes. Other cases are not
1546 expanded properly. */
1547 if (!INTEGRAL_TYPE_P (rtype)
1548 || TYPE_PRECISION (rtype) != GET_MODE_PRECISION (TYPE_MODE (rtype)))
1549 return false;
1550
1551 for (i = 0; i < 2; i++)
1552 defcodefor_name (arg[i], &def_code[i], &def_arg1[i], &def_arg2[i]);
1553
1554 /* Look through narrowing conversions. */
1555 if (CONVERT_EXPR_CODE_P (def_code[0])
1556 && CONVERT_EXPR_CODE_P (def_code[1])
1557 && INTEGRAL_TYPE_P (TREE_TYPE (def_arg1[0]))
1558 && INTEGRAL_TYPE_P (TREE_TYPE (def_arg1[1]))
1559 && TYPE_PRECISION (TREE_TYPE (def_arg1[0]))
1560 == TYPE_PRECISION (TREE_TYPE (def_arg1[1]))
1561 && TYPE_PRECISION (TREE_TYPE (def_arg1[0])) > TYPE_PRECISION (rtype)
1562 && has_single_use (arg[0])
1563 && has_single_use (arg[1]))
1564 {
1565 for (i = 0; i < 2; i++)
1566 {
1567 arg[i] = def_arg1[i];
1568 defcodefor_name (arg[i], &def_code[i], &def_arg1[i], &def_arg2[i]);
1569 }
1570 }
1571
1572 /* One operand has to be LSHIFT_EXPR and one RSHIFT_EXPR. */
1573 for (i = 0; i < 2; i++)
1574 if (def_code[i] != LSHIFT_EXPR && def_code[i] != RSHIFT_EXPR)
1575 return false;
1576 else if (!has_single_use (arg[i]))
1577 return false;
1578 if (def_code[0] == def_code[1])
1579 return false;
1580
1581 /* If we've looked through narrowing conversions before, look through
1582 widening conversions from unsigned type with the same precision
1583 as rtype here. */
1584 if (TYPE_PRECISION (TREE_TYPE (def_arg1[0])) != TYPE_PRECISION (rtype))
1585 for (i = 0; i < 2; i++)
1586 {
1587 tree tem;
1588 enum tree_code code;
1589 defcodefor_name (def_arg1[i], &code, &tem, NULL);
1590 if (!CONVERT_EXPR_CODE_P (code)
1591 || !INTEGRAL_TYPE_P (TREE_TYPE (tem))
1592 || TYPE_PRECISION (TREE_TYPE (tem)) != TYPE_PRECISION (rtype))
1593 return false;
1594 def_arg1[i] = tem;
1595 }
1596 /* Both shifts have to use the same first operand. */
1597 if (TREE_CODE (def_arg1[0]) != SSA_NAME || def_arg1[0] != def_arg1[1])
1598 return false;
1599 if (!TYPE_UNSIGNED (TREE_TYPE (def_arg1[0])))
1600 return false;
1601
1602 /* CNT1 + CNT2 == B case above. */
1603 if (tree_fits_uhwi_p (def_arg2[0])
1604 && tree_fits_uhwi_p (def_arg2[1])
1605 && tree_to_uhwi (def_arg2[0])
1606 + tree_to_uhwi (def_arg2[1]) == TYPE_PRECISION (rtype))
1607 rotcnt = def_arg2[0];
1608 else if (TREE_CODE (def_arg2[0]) != SSA_NAME
1609 || TREE_CODE (def_arg2[1]) != SSA_NAME)
1610 return false;
1611 else
1612 {
1613 tree cdef_arg1[2], cdef_arg2[2], def_arg2_alt[2];
1614 enum tree_code cdef_code[2];
1615 /* Look through conversion of the shift count argument.
1616 The C/C++ FE cast any shift count argument to integer_type_node.
1617 The only problem might be if the shift count type maximum value
1618 is equal or smaller than number of bits in rtype. */
1619 for (i = 0; i < 2; i++)
1620 {
1621 def_arg2_alt[i] = def_arg2[i];
1622 defcodefor_name (def_arg2[i], &cdef_code[i],
1623 &cdef_arg1[i], &cdef_arg2[i]);
1624 if (CONVERT_EXPR_CODE_P (cdef_code[i])
1625 && INTEGRAL_TYPE_P (TREE_TYPE (cdef_arg1[i]))
1626 && TYPE_PRECISION (TREE_TYPE (cdef_arg1[i]))
1627 > floor_log2 (TYPE_PRECISION (rtype))
1628 && TYPE_PRECISION (TREE_TYPE (cdef_arg1[i]))
1629 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (cdef_arg1[i]))))
1630 {
1631 def_arg2_alt[i] = cdef_arg1[i];
1632 defcodefor_name (def_arg2_alt[i], &cdef_code[i],
1633 &cdef_arg1[i], &cdef_arg2[i]);
1634 }
1635 }
1636 for (i = 0; i < 2; i++)
1637 /* Check for one shift count being Y and the other B - Y,
1638 with optional casts. */
1639 if (cdef_code[i] == MINUS_EXPR
1640 && tree_fits_shwi_p (cdef_arg1[i])
1641 && tree_to_shwi (cdef_arg1[i]) == TYPE_PRECISION (rtype)
1642 && TREE_CODE (cdef_arg2[i]) == SSA_NAME)
1643 {
1644 tree tem;
1645 enum tree_code code;
1646
1647 if (cdef_arg2[i] == def_arg2[1 - i]
1648 || cdef_arg2[i] == def_arg2_alt[1 - i])
1649 {
1650 rotcnt = cdef_arg2[i];
1651 break;
1652 }
1653 defcodefor_name (cdef_arg2[i], &code, &tem, NULL);
1654 if (CONVERT_EXPR_CODE_P (code)
1655 && INTEGRAL_TYPE_P (TREE_TYPE (tem))
1656 && TYPE_PRECISION (TREE_TYPE (tem))
1657 > floor_log2 (TYPE_PRECISION (rtype))
1658 && TYPE_PRECISION (TREE_TYPE (tem))
1659 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (tem)))
1660 && (tem == def_arg2[1 - i]
1661 || tem == def_arg2_alt[1 - i]))
1662 {
1663 rotcnt = tem;
1664 break;
1665 }
1666 }
1667 /* The above sequence isn't safe for Y being 0,
1668 because then one of the shifts triggers undefined behavior.
1669 This alternative is safe even for rotation count of 0.
1670 One shift count is Y and the other (-Y) & (B - 1). */
1671 else if (cdef_code[i] == BIT_AND_EXPR
1672 && tree_fits_shwi_p (cdef_arg2[i])
1673 && tree_to_shwi (cdef_arg2[i])
1674 == TYPE_PRECISION (rtype) - 1
1675 && TREE_CODE (cdef_arg1[i]) == SSA_NAME
1676 && gimple_assign_rhs_code (stmt) == BIT_IOR_EXPR)
1677 {
1678 tree tem;
1679 enum tree_code code;
1680
1681 defcodefor_name (cdef_arg1[i], &code, &tem, NULL);
1682 if (CONVERT_EXPR_CODE_P (code)
1683 && INTEGRAL_TYPE_P (TREE_TYPE (tem))
1684 && TYPE_PRECISION (TREE_TYPE (tem))
1685 > floor_log2 (TYPE_PRECISION (rtype))
1686 && TYPE_PRECISION (TREE_TYPE (tem))
1687 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (tem))))
1688 defcodefor_name (tem, &code, &tem, NULL);
1689
1690 if (code == NEGATE_EXPR)
1691 {
1692 if (tem == def_arg2[1 - i] || tem == def_arg2_alt[1 - i])
1693 {
1694 rotcnt = tem;
1695 break;
1696 }
1697 defcodefor_name (tem, &code, &tem, NULL);
1698 if (CONVERT_EXPR_CODE_P (code)
1699 && INTEGRAL_TYPE_P (TREE_TYPE (tem))
1700 && TYPE_PRECISION (TREE_TYPE (tem))
1701 > floor_log2 (TYPE_PRECISION (rtype))
1702 && TYPE_PRECISION (TREE_TYPE (tem))
1703 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (tem)))
1704 && (tem == def_arg2[1 - i]
1705 || tem == def_arg2_alt[1 - i]))
1706 {
1707 rotcnt = tem;
1708 break;
1709 }
1710 }
1711 }
1712 if (rotcnt == NULL_TREE)
1713 return false;
1714 swapped_p = i != 1;
1715 }
1716
1717 if (!useless_type_conversion_p (TREE_TYPE (def_arg2[0]),
1718 TREE_TYPE (rotcnt)))
1719 {
1720 g = gimple_build_assign (make_ssa_name (TREE_TYPE (def_arg2[0])),
1721 NOP_EXPR, rotcnt);
1722 gsi_insert_before (gsi, g, GSI_SAME_STMT);
1723 rotcnt = gimple_assign_lhs (g);
1724 }
1725 lhs = gimple_assign_lhs (stmt);
1726 if (!useless_type_conversion_p (rtype, TREE_TYPE (def_arg1[0])))
1727 lhs = make_ssa_name (TREE_TYPE (def_arg1[0]));
1728 g = gimple_build_assign (lhs,
1729 ((def_code[0] == LSHIFT_EXPR) ^ swapped_p)
1730 ? LROTATE_EXPR : RROTATE_EXPR, def_arg1[0], rotcnt);
1731 if (!useless_type_conversion_p (rtype, TREE_TYPE (def_arg1[0])))
1732 {
1733 gsi_insert_before (gsi, g, GSI_SAME_STMT);
1734 g = gimple_build_assign (gimple_assign_lhs (stmt), NOP_EXPR, lhs);
1735 }
1736 gsi_replace (gsi, g, false);
1737 return true;
1738 }
1739
1740 /* Combine an element access with a shuffle. Returns true if there were
1741 any changes made, else it returns false. */
1742
1743 static bool
1744 simplify_bitfield_ref (gimple_stmt_iterator *gsi)
1745 {
1746 gimple stmt = gsi_stmt (*gsi);
1747 gimple def_stmt;
1748 tree op, op0, op1, op2;
1749 tree elem_type;
1750 unsigned idx, n, size;
1751 enum tree_code code;
1752
1753 op = gimple_assign_rhs1 (stmt);
1754 gcc_checking_assert (TREE_CODE (op) == BIT_FIELD_REF);
1755
1756 op0 = TREE_OPERAND (op, 0);
1757 if (TREE_CODE (op0) != SSA_NAME
1758 || TREE_CODE (TREE_TYPE (op0)) != VECTOR_TYPE)
1759 return false;
1760
1761 def_stmt = get_prop_source_stmt (op0, false, NULL);
1762 if (!def_stmt || !can_propagate_from (def_stmt))
1763 return false;
1764
1765 op1 = TREE_OPERAND (op, 1);
1766 op2 = TREE_OPERAND (op, 2);
1767 code = gimple_assign_rhs_code (def_stmt);
1768
1769 if (code == CONSTRUCTOR)
1770 {
1771 tree tem = fold_ternary (BIT_FIELD_REF, TREE_TYPE (op),
1772 gimple_assign_rhs1 (def_stmt), op1, op2);
1773 if (!tem || !valid_gimple_rhs_p (tem))
1774 return false;
1775 gimple_assign_set_rhs_from_tree (gsi, tem);
1776 update_stmt (gsi_stmt (*gsi));
1777 return true;
1778 }
1779
1780 elem_type = TREE_TYPE (TREE_TYPE (op0));
1781 if (TREE_TYPE (op) != elem_type)
1782 return false;
1783
1784 size = TREE_INT_CST_LOW (TYPE_SIZE (elem_type));
1785 n = TREE_INT_CST_LOW (op1) / size;
1786 if (n != 1)
1787 return false;
1788 idx = TREE_INT_CST_LOW (op2) / size;
1789
1790 if (code == VEC_PERM_EXPR)
1791 {
1792 tree p, m, index, tem;
1793 unsigned nelts;
1794 m = gimple_assign_rhs3 (def_stmt);
1795 if (TREE_CODE (m) != VECTOR_CST)
1796 return false;
1797 nelts = VECTOR_CST_NELTS (m);
1798 idx = TREE_INT_CST_LOW (VECTOR_CST_ELT (m, idx));
1799 idx %= 2 * nelts;
1800 if (idx < nelts)
1801 {
1802 p = gimple_assign_rhs1 (def_stmt);
1803 }
1804 else
1805 {
1806 p = gimple_assign_rhs2 (def_stmt);
1807 idx -= nelts;
1808 }
1809 index = build_int_cst (TREE_TYPE (TREE_TYPE (m)), idx * size);
1810 tem = build3 (BIT_FIELD_REF, TREE_TYPE (op),
1811 unshare_expr (p), op1, index);
1812 gimple_assign_set_rhs1 (stmt, tem);
1813 fold_stmt (gsi);
1814 update_stmt (gsi_stmt (*gsi));
1815 return true;
1816 }
1817
1818 return false;
1819 }
1820
1821 /* Determine whether applying the 2 permutations (mask1 then mask2)
1822 gives back one of the input. */
1823
1824 static int
1825 is_combined_permutation_identity (tree mask1, tree mask2)
1826 {
1827 tree mask;
1828 unsigned int nelts, i, j;
1829 bool maybe_identity1 = true;
1830 bool maybe_identity2 = true;
1831
1832 gcc_checking_assert (TREE_CODE (mask1) == VECTOR_CST
1833 && TREE_CODE (mask2) == VECTOR_CST);
1834 mask = fold_ternary (VEC_PERM_EXPR, TREE_TYPE (mask1), mask1, mask1, mask2);
1835 gcc_assert (TREE_CODE (mask) == VECTOR_CST);
1836
1837 nelts = VECTOR_CST_NELTS (mask);
1838 for (i = 0; i < nelts; i++)
1839 {
1840 tree val = VECTOR_CST_ELT (mask, i);
1841 gcc_assert (TREE_CODE (val) == INTEGER_CST);
1842 j = TREE_INT_CST_LOW (val) & (2 * nelts - 1);
1843 if (j == i)
1844 maybe_identity2 = false;
1845 else if (j == i + nelts)
1846 maybe_identity1 = false;
1847 else
1848 return 0;
1849 }
1850 return maybe_identity1 ? 1 : maybe_identity2 ? 2 : 0;
1851 }
1852
1853 /* Combine a shuffle with its arguments. Returns 1 if there were any
1854 changes made, 2 if cfg-cleanup needs to run. Else it returns 0. */
1855
1856 static int
1857 simplify_permutation (gimple_stmt_iterator *gsi)
1858 {
1859 gimple stmt = gsi_stmt (*gsi);
1860 gimple def_stmt;
1861 tree op0, op1, op2, op3, arg0, arg1;
1862 enum tree_code code;
1863 bool single_use_op0 = false;
1864
1865 gcc_checking_assert (gimple_assign_rhs_code (stmt) == VEC_PERM_EXPR);
1866
1867 op0 = gimple_assign_rhs1 (stmt);
1868 op1 = gimple_assign_rhs2 (stmt);
1869 op2 = gimple_assign_rhs3 (stmt);
1870
1871 if (TREE_CODE (op2) != VECTOR_CST)
1872 return 0;
1873
1874 if (TREE_CODE (op0) == VECTOR_CST)
1875 {
1876 code = VECTOR_CST;
1877 arg0 = op0;
1878 }
1879 else if (TREE_CODE (op0) == SSA_NAME)
1880 {
1881 def_stmt = get_prop_source_stmt (op0, false, &single_use_op0);
1882 if (!def_stmt || !can_propagate_from (def_stmt))
1883 return 0;
1884
1885 code = gimple_assign_rhs_code (def_stmt);
1886 arg0 = gimple_assign_rhs1 (def_stmt);
1887 }
1888 else
1889 return 0;
1890
1891 /* Two consecutive shuffles. */
1892 if (code == VEC_PERM_EXPR)
1893 {
1894 tree orig;
1895 int ident;
1896
1897 if (op0 != op1)
1898 return 0;
1899 op3 = gimple_assign_rhs3 (def_stmt);
1900 if (TREE_CODE (op3) != VECTOR_CST)
1901 return 0;
1902 ident = is_combined_permutation_identity (op3, op2);
1903 if (!ident)
1904 return 0;
1905 orig = (ident == 1) ? gimple_assign_rhs1 (def_stmt)
1906 : gimple_assign_rhs2 (def_stmt);
1907 gimple_assign_set_rhs1 (stmt, unshare_expr (orig));
1908 gimple_assign_set_rhs_code (stmt, TREE_CODE (orig));
1909 gimple_set_num_ops (stmt, 2);
1910 update_stmt (stmt);
1911 return remove_prop_source_from_use (op0) ? 2 : 1;
1912 }
1913
1914 /* Shuffle of a constructor. */
1915 else if (code == CONSTRUCTOR || code == VECTOR_CST)
1916 {
1917 tree opt;
1918 bool ret = false;
1919 if (op0 != op1)
1920 {
1921 if (TREE_CODE (op0) == SSA_NAME && !single_use_op0)
1922 return 0;
1923
1924 if (TREE_CODE (op1) == VECTOR_CST)
1925 arg1 = op1;
1926 else if (TREE_CODE (op1) == SSA_NAME)
1927 {
1928 enum tree_code code2;
1929
1930 gimple def_stmt2 = get_prop_source_stmt (op1, true, NULL);
1931 if (!def_stmt2 || !can_propagate_from (def_stmt2))
1932 return 0;
1933
1934 code2 = gimple_assign_rhs_code (def_stmt2);
1935 if (code2 != CONSTRUCTOR && code2 != VECTOR_CST)
1936 return 0;
1937 arg1 = gimple_assign_rhs1 (def_stmt2);
1938 }
1939 else
1940 return 0;
1941 }
1942 else
1943 {
1944 /* Already used twice in this statement. */
1945 if (TREE_CODE (op0) == SSA_NAME && num_imm_uses (op0) > 2)
1946 return 0;
1947 arg1 = arg0;
1948 }
1949 opt = fold_ternary (VEC_PERM_EXPR, TREE_TYPE (op0), arg0, arg1, op2);
1950 if (!opt
1951 || (TREE_CODE (opt) != CONSTRUCTOR && TREE_CODE (opt) != VECTOR_CST))
1952 return 0;
1953 gimple_assign_set_rhs_from_tree (gsi, opt);
1954 update_stmt (gsi_stmt (*gsi));
1955 if (TREE_CODE (op0) == SSA_NAME)
1956 ret = remove_prop_source_from_use (op0);
1957 if (op0 != op1 && TREE_CODE (op1) == SSA_NAME)
1958 ret |= remove_prop_source_from_use (op1);
1959 return ret ? 2 : 1;
1960 }
1961
1962 return 0;
1963 }
1964
1965 /* Recognize a VEC_PERM_EXPR. Returns true if there were any changes. */
1966
1967 static bool
1968 simplify_vector_constructor (gimple_stmt_iterator *gsi)
1969 {
1970 gimple stmt = gsi_stmt (*gsi);
1971 gimple def_stmt;
1972 tree op, op2, orig, type, elem_type;
1973 unsigned elem_size, nelts, i;
1974 enum tree_code code;
1975 constructor_elt *elt;
1976 unsigned char *sel;
1977 bool maybe_ident;
1978
1979 gcc_checking_assert (gimple_assign_rhs_code (stmt) == CONSTRUCTOR);
1980
1981 op = gimple_assign_rhs1 (stmt);
1982 type = TREE_TYPE (op);
1983 gcc_checking_assert (TREE_CODE (type) == VECTOR_TYPE);
1984
1985 nelts = TYPE_VECTOR_SUBPARTS (type);
1986 elem_type = TREE_TYPE (type);
1987 elem_size = TREE_INT_CST_LOW (TYPE_SIZE (elem_type));
1988
1989 sel = XALLOCAVEC (unsigned char, nelts);
1990 orig = NULL;
1991 maybe_ident = true;
1992 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (op), i, elt)
1993 {
1994 tree ref, op1;
1995
1996 if (i >= nelts)
1997 return false;
1998
1999 if (TREE_CODE (elt->value) != SSA_NAME)
2000 return false;
2001 def_stmt = get_prop_source_stmt (elt->value, false, NULL);
2002 if (!def_stmt)
2003 return false;
2004 code = gimple_assign_rhs_code (def_stmt);
2005 if (code != BIT_FIELD_REF)
2006 return false;
2007 op1 = gimple_assign_rhs1 (def_stmt);
2008 ref = TREE_OPERAND (op1, 0);
2009 if (orig)
2010 {
2011 if (ref != orig)
2012 return false;
2013 }
2014 else
2015 {
2016 if (TREE_CODE (ref) != SSA_NAME)
2017 return false;
2018 if (!useless_type_conversion_p (type, TREE_TYPE (ref)))
2019 return false;
2020 orig = ref;
2021 }
2022 if (TREE_INT_CST_LOW (TREE_OPERAND (op1, 1)) != elem_size)
2023 return false;
2024 sel[i] = TREE_INT_CST_LOW (TREE_OPERAND (op1, 2)) / elem_size;
2025 if (sel[i] != i) maybe_ident = false;
2026 }
2027 if (i < nelts)
2028 return false;
2029
2030 if (maybe_ident)
2031 gimple_assign_set_rhs_from_tree (gsi, orig);
2032 else
2033 {
2034 tree mask_type, *mask_elts;
2035
2036 if (!can_vec_perm_p (TYPE_MODE (type), false, sel))
2037 return false;
2038 mask_type
2039 = build_vector_type (build_nonstandard_integer_type (elem_size, 1),
2040 nelts);
2041 if (GET_MODE_CLASS (TYPE_MODE (mask_type)) != MODE_VECTOR_INT
2042 || GET_MODE_SIZE (TYPE_MODE (mask_type))
2043 != GET_MODE_SIZE (TYPE_MODE (type)))
2044 return false;
2045 mask_elts = XALLOCAVEC (tree, nelts);
2046 for (i = 0; i < nelts; i++)
2047 mask_elts[i] = build_int_cst (TREE_TYPE (mask_type), sel[i]);
2048 op2 = build_vector (mask_type, mask_elts);
2049 gimple_assign_set_rhs_with_ops (gsi, VEC_PERM_EXPR, orig, orig, op2);
2050 }
2051 update_stmt (gsi_stmt (*gsi));
2052 return true;
2053 }
2054
2055
2056 /* Primitive "lattice" function for gimple_simplify. */
2057
2058 static tree
2059 fwprop_ssa_val (tree name)
2060 {
2061 /* First valueize NAME. */
2062 if (TREE_CODE (name) == SSA_NAME
2063 && SSA_NAME_VERSION (name) < lattice.length ())
2064 {
2065 tree val = lattice[SSA_NAME_VERSION (name)];
2066 if (val)
2067 name = val;
2068 }
2069 /* We continue matching along SSA use-def edges for SSA names
2070 that are not single-use. Currently there are no patterns
2071 that would cause any issues with that. */
2072 return name;
2073 }
2074
2075 /* Main entry point for the forward propagation and statement combine
2076 optimizer. */
2077
2078 static const pass_data pass_data_forwprop =
2079 {
2080 GIMPLE_PASS, /* type */
2081 "forwprop", /* name */
2082 OPTGROUP_NONE, /* optinfo_flags */
2083 TV_TREE_FORWPROP, /* tv_id */
2084 ( PROP_cfg | PROP_ssa ), /* properties_required */
2085 0, /* properties_provided */
2086 0, /* properties_destroyed */
2087 0, /* todo_flags_start */
2088 TODO_update_ssa, /* todo_flags_finish */
2089 };
2090
2091 class pass_forwprop GCC_FINAL : public gimple_opt_pass
2092 {
2093 public:
2094 pass_forwprop (gcc::context *ctxt)
2095 : gimple_opt_pass (pass_data_forwprop, ctxt)
2096 {}
2097
2098 /* opt_pass methods: */
2099 opt_pass * clone () { return new pass_forwprop (m_ctxt); }
2100 virtual bool gate (function *) { return flag_tree_forwprop; }
2101 virtual unsigned int execute (function *);
2102
2103 }; // class pass_forwprop
2104
2105 unsigned int
2106 pass_forwprop::execute (function *fun)
2107 {
2108 unsigned int todoflags = 0;
2109
2110 cfg_changed = false;
2111
2112 /* Combine stmts with the stmts defining their operands. Do that
2113 in an order that guarantees visiting SSA defs before SSA uses. */
2114 lattice.create (num_ssa_names);
2115 lattice.quick_grow_cleared (num_ssa_names);
2116 int *postorder = XNEWVEC (int, n_basic_blocks_for_fn (fun));
2117 int postorder_num = inverted_post_order_compute (postorder);
2118 auto_vec<gimple, 4> to_fixup;
2119 to_purge = BITMAP_ALLOC (NULL);
2120 for (int i = 0; i < postorder_num; ++i)
2121 {
2122 gimple_stmt_iterator gsi;
2123 basic_block bb = BASIC_BLOCK_FOR_FN (fun, postorder[i]);
2124
2125 /* Apply forward propagation to all stmts in the basic-block.
2126 Note we update GSI within the loop as necessary. */
2127 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); )
2128 {
2129 gimple stmt = gsi_stmt (gsi);
2130 tree lhs, rhs;
2131 enum tree_code code;
2132
2133 if (!is_gimple_assign (stmt))
2134 {
2135 gsi_next (&gsi);
2136 continue;
2137 }
2138
2139 lhs = gimple_assign_lhs (stmt);
2140 rhs = gimple_assign_rhs1 (stmt);
2141 code = gimple_assign_rhs_code (stmt);
2142 if (TREE_CODE (lhs) != SSA_NAME
2143 || has_zero_uses (lhs))
2144 {
2145 gsi_next (&gsi);
2146 continue;
2147 }
2148
2149 /* If this statement sets an SSA_NAME to an address,
2150 try to propagate the address into the uses of the SSA_NAME. */
2151 if (code == ADDR_EXPR
2152 /* Handle pointer conversions on invariant addresses
2153 as well, as this is valid gimple. */
2154 || (CONVERT_EXPR_CODE_P (code)
2155 && TREE_CODE (rhs) == ADDR_EXPR
2156 && POINTER_TYPE_P (TREE_TYPE (lhs))))
2157 {
2158 tree base = get_base_address (TREE_OPERAND (rhs, 0));
2159 if ((!base
2160 || !DECL_P (base)
2161 || decl_address_invariant_p (base))
2162 && !stmt_references_abnormal_ssa_name (stmt)
2163 && forward_propagate_addr_expr (lhs, rhs, true))
2164 {
2165 fwprop_invalidate_lattice (gimple_get_lhs (stmt));
2166 release_defs (stmt);
2167 gsi_remove (&gsi, true);
2168 }
2169 else
2170 gsi_next (&gsi);
2171 }
2172 else if (code == POINTER_PLUS_EXPR)
2173 {
2174 tree off = gimple_assign_rhs2 (stmt);
2175 if (TREE_CODE (off) == INTEGER_CST
2176 && can_propagate_from (stmt)
2177 && !simple_iv_increment_p (stmt)
2178 /* ??? Better adjust the interface to that function
2179 instead of building new trees here. */
2180 && forward_propagate_addr_expr
2181 (lhs,
2182 build1_loc (gimple_location (stmt),
2183 ADDR_EXPR, TREE_TYPE (rhs),
2184 fold_build2 (MEM_REF,
2185 TREE_TYPE (TREE_TYPE (rhs)),
2186 rhs,
2187 fold_convert (ptr_type_node,
2188 off))), true))
2189 {
2190 fwprop_invalidate_lattice (gimple_get_lhs (stmt));
2191 release_defs (stmt);
2192 gsi_remove (&gsi, true);
2193 }
2194 else if (is_gimple_min_invariant (rhs))
2195 {
2196 /* Make sure to fold &a[0] + off_1 here. */
2197 fold_stmt_inplace (&gsi);
2198 update_stmt (stmt);
2199 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
2200 gsi_next (&gsi);
2201 }
2202 else
2203 gsi_next (&gsi);
2204 }
2205 else if (TREE_CODE (TREE_TYPE (lhs)) == COMPLEX_TYPE
2206 && gimple_assign_load_p (stmt)
2207 && !gimple_has_volatile_ops (stmt)
2208 && (TREE_CODE (gimple_assign_rhs1 (stmt))
2209 != TARGET_MEM_REF)
2210 && !stmt_can_throw_internal (stmt))
2211 {
2212 /* Rewrite loads used only in real/imagpart extractions to
2213 component-wise loads. */
2214 use_operand_p use_p;
2215 imm_use_iterator iter;
2216 bool rewrite = true;
2217 FOR_EACH_IMM_USE_FAST (use_p, iter, lhs)
2218 {
2219 gimple use_stmt = USE_STMT (use_p);
2220 if (is_gimple_debug (use_stmt))
2221 continue;
2222 if (!is_gimple_assign (use_stmt)
2223 || (gimple_assign_rhs_code (use_stmt) != REALPART_EXPR
2224 && gimple_assign_rhs_code (use_stmt) != IMAGPART_EXPR))
2225 {
2226 rewrite = false;
2227 break;
2228 }
2229 }
2230 if (rewrite)
2231 {
2232 gimple use_stmt;
2233 FOR_EACH_IMM_USE_STMT (use_stmt, iter, lhs)
2234 {
2235 if (is_gimple_debug (use_stmt))
2236 {
2237 if (gimple_debug_bind_p (use_stmt))
2238 {
2239 gimple_debug_bind_reset_value (use_stmt);
2240 update_stmt (use_stmt);
2241 }
2242 continue;
2243 }
2244
2245 tree new_rhs = build1 (gimple_assign_rhs_code (use_stmt),
2246 TREE_TYPE (TREE_TYPE (rhs)),
2247 unshare_expr (rhs));
2248 gimple new_stmt
2249 = gimple_build_assign (gimple_assign_lhs (use_stmt),
2250 new_rhs);
2251
2252 location_t loc = gimple_location (use_stmt);
2253 gimple_set_location (new_stmt, loc);
2254 gimple_stmt_iterator gsi2 = gsi_for_stmt (use_stmt);
2255 unlink_stmt_vdef (use_stmt);
2256 gsi_remove (&gsi2, true);
2257
2258 gsi_insert_before (&gsi, new_stmt, GSI_SAME_STMT);
2259 }
2260
2261 release_defs (stmt);
2262 gsi_remove (&gsi, true);
2263 }
2264 else
2265 gsi_next (&gsi);
2266 }
2267 else if (code == COMPLEX_EXPR)
2268 {
2269 /* Rewrite stores of a single-use complex build expression
2270 to component-wise stores. */
2271 use_operand_p use_p;
2272 gimple use_stmt;
2273 if (single_imm_use (lhs, &use_p, &use_stmt)
2274 && gimple_store_p (use_stmt)
2275 && !gimple_has_volatile_ops (use_stmt)
2276 && is_gimple_assign (use_stmt)
2277 && (TREE_CODE (gimple_assign_lhs (use_stmt))
2278 != TARGET_MEM_REF))
2279 {
2280 tree use_lhs = gimple_assign_lhs (use_stmt);
2281 tree new_lhs = build1 (REALPART_EXPR,
2282 TREE_TYPE (TREE_TYPE (use_lhs)),
2283 unshare_expr (use_lhs));
2284 gimple new_stmt = gimple_build_assign (new_lhs, rhs);
2285 location_t loc = gimple_location (use_stmt);
2286 gimple_set_location (new_stmt, loc);
2287 gimple_set_vuse (new_stmt, gimple_vuse (use_stmt));
2288 gimple_set_vdef (new_stmt, make_ssa_name (gimple_vop (cfun)));
2289 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
2290 gimple_set_vuse (use_stmt, gimple_vdef (new_stmt));
2291 gimple_stmt_iterator gsi2 = gsi_for_stmt (use_stmt);
2292 gsi_insert_before (&gsi2, new_stmt, GSI_SAME_STMT);
2293
2294 new_lhs = build1 (IMAGPART_EXPR,
2295 TREE_TYPE (TREE_TYPE (use_lhs)),
2296 unshare_expr (use_lhs));
2297 gimple_assign_set_lhs (use_stmt, new_lhs);
2298 gimple_assign_set_rhs1 (use_stmt, gimple_assign_rhs2 (stmt));
2299 update_stmt (use_stmt);
2300
2301 release_defs (stmt);
2302 gsi_remove (&gsi, true);
2303 }
2304 else
2305 gsi_next (&gsi);
2306 }
2307 else
2308 gsi_next (&gsi);
2309 }
2310
2311 /* Combine stmts with the stmts defining their operands.
2312 Note we update GSI within the loop as necessary. */
2313 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
2314 {
2315 gimple stmt = gsi_stmt (gsi);
2316 gimple orig_stmt = stmt;
2317 bool changed = false;
2318 bool was_noreturn = (is_gimple_call (stmt)
2319 && gimple_call_noreturn_p (stmt));
2320
2321 /* Mark stmt as potentially needing revisiting. */
2322 gimple_set_plf (stmt, GF_PLF_1, false);
2323
2324 if (fold_stmt (&gsi, fwprop_ssa_val))
2325 {
2326 changed = true;
2327 stmt = gsi_stmt (gsi);
2328 if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt))
2329 bitmap_set_bit (to_purge, bb->index);
2330 if (!was_noreturn
2331 && is_gimple_call (stmt) && gimple_call_noreturn_p (stmt))
2332 to_fixup.safe_push (stmt);
2333 /* Cleanup the CFG if we simplified a condition to
2334 true or false. */
2335 if (gcond *cond = dyn_cast <gcond *> (stmt))
2336 if (gimple_cond_true_p (cond)
2337 || gimple_cond_false_p (cond))
2338 cfg_changed = true;
2339 update_stmt (stmt);
2340 }
2341
2342 switch (gimple_code (stmt))
2343 {
2344 case GIMPLE_ASSIGN:
2345 {
2346 tree rhs1 = gimple_assign_rhs1 (stmt);
2347 enum tree_code code = gimple_assign_rhs_code (stmt);
2348
2349 if (code == COND_EXPR
2350 || code == VEC_COND_EXPR)
2351 {
2352 /* In this case the entire COND_EXPR is in rhs1. */
2353 if (forward_propagate_into_cond (&gsi))
2354 {
2355 changed = true;
2356 stmt = gsi_stmt (gsi);
2357 }
2358 }
2359 else if (TREE_CODE_CLASS (code) == tcc_comparison)
2360 {
2361 int did_something;
2362 did_something = forward_propagate_into_comparison (&gsi);
2363 if (did_something == 2)
2364 cfg_changed = true;
2365 changed = did_something != 0;
2366 }
2367 else if ((code == PLUS_EXPR
2368 || code == BIT_IOR_EXPR
2369 || code == BIT_XOR_EXPR)
2370 && simplify_rotate (&gsi))
2371 changed = true;
2372 else if (code == VEC_PERM_EXPR)
2373 {
2374 int did_something = simplify_permutation (&gsi);
2375 if (did_something == 2)
2376 cfg_changed = true;
2377 changed = did_something != 0;
2378 }
2379 else if (code == BIT_FIELD_REF)
2380 changed = simplify_bitfield_ref (&gsi);
2381 else if (code == CONSTRUCTOR
2382 && TREE_CODE (TREE_TYPE (rhs1)) == VECTOR_TYPE)
2383 changed = simplify_vector_constructor (&gsi);
2384 break;
2385 }
2386
2387 case GIMPLE_SWITCH:
2388 changed = simplify_gimple_switch (as_a <gswitch *> (stmt));
2389 break;
2390
2391 case GIMPLE_COND:
2392 {
2393 int did_something
2394 = forward_propagate_into_gimple_cond (as_a <gcond *> (stmt));
2395 if (did_something == 2)
2396 cfg_changed = true;
2397 changed = did_something != 0;
2398 break;
2399 }
2400
2401 case GIMPLE_CALL:
2402 {
2403 tree callee = gimple_call_fndecl (stmt);
2404 if (callee != NULL_TREE
2405 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
2406 changed = simplify_builtin_call (&gsi, callee);
2407 break;
2408 }
2409
2410 default:;
2411 }
2412
2413 if (changed)
2414 {
2415 /* If the stmt changed then re-visit it and the statements
2416 inserted before it. */
2417 for (; !gsi_end_p (gsi); gsi_prev (&gsi))
2418 if (gimple_plf (gsi_stmt (gsi), GF_PLF_1))
2419 break;
2420 if (gsi_end_p (gsi))
2421 gsi = gsi_start_bb (bb);
2422 else
2423 gsi_next (&gsi);
2424 }
2425 else
2426 {
2427 /* Stmt no longer needs to be revisited. */
2428 gimple_set_plf (stmt, GF_PLF_1, true);
2429
2430 /* Fill up the lattice. */
2431 if (gimple_assign_single_p (stmt))
2432 {
2433 tree lhs = gimple_assign_lhs (stmt);
2434 tree rhs = gimple_assign_rhs1 (stmt);
2435 if (TREE_CODE (lhs) == SSA_NAME)
2436 {
2437 tree val = lhs;
2438 if (TREE_CODE (rhs) == SSA_NAME)
2439 val = fwprop_ssa_val (rhs);
2440 else if (is_gimple_min_invariant (rhs))
2441 val = rhs;
2442 fwprop_set_lattice_val (lhs, val);
2443 }
2444 }
2445
2446 gsi_next (&gsi);
2447 }
2448 }
2449 }
2450 free (postorder);
2451 lattice.release ();
2452
2453 /* Fixup stmts that became noreturn calls. This may require splitting
2454 blocks and thus isn't possible during the walk. Do this
2455 in reverse order so we don't inadvertedly remove a stmt we want to
2456 fixup by visiting a dominating now noreturn call first. */
2457 while (!to_fixup.is_empty ())
2458 {
2459 gimple stmt = to_fixup.pop ();
2460 if (dump_file && dump_flags & TDF_DETAILS)
2461 {
2462 fprintf (dump_file, "Fixing up noreturn call ");
2463 print_gimple_stmt (dump_file, stmt, 0, 0);
2464 fprintf (dump_file, "\n");
2465 }
2466 cfg_changed |= fixup_noreturn_call (stmt);
2467 }
2468
2469 cfg_changed |= gimple_purge_all_dead_eh_edges (to_purge);
2470 BITMAP_FREE (to_purge);
2471
2472 if (cfg_changed)
2473 todoflags |= TODO_cleanup_cfg;
2474
2475 return todoflags;
2476 }
2477
2478 gimple_opt_pass *
2479 make_pass_forwprop (gcc::context *ctxt)
2480 {
2481 return new pass_forwprop (ctxt);
2482 }