]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/tree-ssa-forwprop.c
alias.c: Reorder #include statements and remove duplicates.
[thirdparty/gcc.git] / gcc / tree-ssa-forwprop.c
1 /* Forward propagation of expressions for single use variables.
2 Copyright (C) 2004-2015 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
10
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "rtl.h"
25 #include "tree.h"
26 #include "gimple.h"
27 #include "cfghooks.h"
28 #include "tree-pass.h"
29 #include "tm_p.h"
30 #include "ssa.h"
31 #include "expmed.h"
32 #include "optabs-query.h"
33 #include "insn-config.h"
34 #include "emit-rtl.h"
35 #include "gimple-pretty-print.h"
36 #include "diagnostic.h"
37 #include "alias.h"
38 #include "fold-const.h"
39 #include "stor-layout.h"
40 #include "internal-fn.h"
41 #include "gimple-fold.h"
42 #include "tree-eh.h"
43 #include "gimplify.h"
44 #include "gimple-iterator.h"
45 #include "gimplify-me.h"
46 #include "tree-cfg.h"
47 #include "flags.h"
48 #include "dojump.h"
49 #include "explow.h"
50 #include "calls.h"
51 #include "varasm.h"
52 #include "stmt.h"
53 #include "expr.h"
54 #include "tree-dfa.h"
55 #include "langhooks.h"
56 #include "cfgloop.h"
57 #include "tree-ssa-propagate.h"
58 #include "tree-ssa-dom.h"
59 #include "builtins.h"
60 #include "tree-cfgcleanup.h"
61 #include "tree-into-ssa.h"
62 #include "cfganal.h"
63
64 /* This pass propagates the RHS of assignment statements into use
65 sites of the LHS of the assignment. It's basically a specialized
66 form of tree combination. It is hoped all of this can disappear
67 when we have a generalized tree combiner.
68
69 One class of common cases we handle is forward propagating a single use
70 variable into a COND_EXPR.
71
72 bb0:
73 x = a COND b;
74 if (x) goto ... else goto ...
75
76 Will be transformed into:
77
78 bb0:
79 if (a COND b) goto ... else goto ...
80
81 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
82
83 Or (assuming c1 and c2 are constants):
84
85 bb0:
86 x = a + c1;
87 if (x EQ/NEQ c2) goto ... else goto ...
88
89 Will be transformed into:
90
91 bb0:
92 if (a EQ/NEQ (c2 - c1)) goto ... else goto ...
93
94 Similarly for x = a - c1.
95
96 Or
97
98 bb0:
99 x = !a
100 if (x) goto ... else goto ...
101
102 Will be transformed into:
103
104 bb0:
105 if (a == 0) goto ... else goto ...
106
107 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
108 For these cases, we propagate A into all, possibly more than one,
109 COND_EXPRs that use X.
110
111 Or
112
113 bb0:
114 x = (typecast) a
115 if (x) goto ... else goto ...
116
117 Will be transformed into:
118
119 bb0:
120 if (a != 0) goto ... else goto ...
121
122 (Assuming a is an integral type and x is a boolean or x is an
123 integral and a is a boolean.)
124
125 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
126 For these cases, we propagate A into all, possibly more than one,
127 COND_EXPRs that use X.
128
129 In addition to eliminating the variable and the statement which assigns
130 a value to the variable, we may be able to later thread the jump without
131 adding insane complexity in the dominator optimizer.
132
133 Also note these transformations can cascade. We handle this by having
134 a worklist of COND_EXPR statements to examine. As we make a change to
135 a statement, we put it back on the worklist to examine on the next
136 iteration of the main loop.
137
138 A second class of propagation opportunities arises for ADDR_EXPR
139 nodes.
140
141 ptr = &x->y->z;
142 res = *ptr;
143
144 Will get turned into
145
146 res = x->y->z;
147
148 Or
149 ptr = (type1*)&type2var;
150 res = *ptr
151
152 Will get turned into (if type1 and type2 are the same size
153 and neither have volatile on them):
154 res = VIEW_CONVERT_EXPR<type1>(type2var)
155
156 Or
157
158 ptr = &x[0];
159 ptr2 = ptr + <constant>;
160
161 Will get turned into
162
163 ptr2 = &x[constant/elementsize];
164
165 Or
166
167 ptr = &x[0];
168 offset = index * element_size;
169 offset_p = (pointer) offset;
170 ptr2 = ptr + offset_p
171
172 Will get turned into:
173
174 ptr2 = &x[index];
175
176 Or
177 ssa = (int) decl
178 res = ssa & 1
179
180 Provided that decl has known alignment >= 2, will get turned into
181
182 res = 0
183
184 We also propagate casts into SWITCH_EXPR and COND_EXPR conditions to
185 allow us to remove the cast and {NOT_EXPR,NEG_EXPR} into a subsequent
186 {NOT_EXPR,NEG_EXPR}.
187
188 This will (of course) be extended as other needs arise. */
189
190 static bool forward_propagate_addr_expr (tree, tree, bool);
191
192 /* Set to true if we delete dead edges during the optimization. */
193 static bool cfg_changed;
194
195 static tree rhs_to_tree (tree type, gimple *stmt);
196
197 static bitmap to_purge;
198
199 /* Const-and-copy lattice. */
200 static vec<tree> lattice;
201
202 /* Set the lattice entry for NAME to VAL. */
203 static void
204 fwprop_set_lattice_val (tree name, tree val)
205 {
206 if (TREE_CODE (name) == SSA_NAME)
207 {
208 if (SSA_NAME_VERSION (name) >= lattice.length ())
209 {
210 lattice.reserve (num_ssa_names - lattice.length ());
211 lattice.quick_grow_cleared (num_ssa_names);
212 }
213 lattice[SSA_NAME_VERSION (name)] = val;
214 }
215 }
216
217 /* Invalidate the lattice entry for NAME, done when releasing SSA names. */
218 static void
219 fwprop_invalidate_lattice (tree name)
220 {
221 if (name
222 && TREE_CODE (name) == SSA_NAME
223 && SSA_NAME_VERSION (name) < lattice.length ())
224 lattice[SSA_NAME_VERSION (name)] = NULL_TREE;
225 }
226
227
228 /* Get the statement we can propagate from into NAME skipping
229 trivial copies. Returns the statement which defines the
230 propagation source or NULL_TREE if there is no such one.
231 If SINGLE_USE_ONLY is set considers only sources which have
232 a single use chain up to NAME. If SINGLE_USE_P is non-null,
233 it is set to whether the chain to NAME is a single use chain
234 or not. SINGLE_USE_P is not written to if SINGLE_USE_ONLY is set. */
235
236 static gimple *
237 get_prop_source_stmt (tree name, bool single_use_only, bool *single_use_p)
238 {
239 bool single_use = true;
240
241 do {
242 gimple *def_stmt = SSA_NAME_DEF_STMT (name);
243
244 if (!has_single_use (name))
245 {
246 single_use = false;
247 if (single_use_only)
248 return NULL;
249 }
250
251 /* If name is defined by a PHI node or is the default def, bail out. */
252 if (!is_gimple_assign (def_stmt))
253 return NULL;
254
255 /* If def_stmt is a simple copy, continue looking. */
256 if (gimple_assign_rhs_code (def_stmt) == SSA_NAME)
257 name = gimple_assign_rhs1 (def_stmt);
258 else
259 {
260 if (!single_use_only && single_use_p)
261 *single_use_p = single_use;
262
263 return def_stmt;
264 }
265 } while (1);
266 }
267
268 /* Checks if the destination ssa name in DEF_STMT can be used as
269 propagation source. Returns true if so, otherwise false. */
270
271 static bool
272 can_propagate_from (gimple *def_stmt)
273 {
274 gcc_assert (is_gimple_assign (def_stmt));
275
276 /* If the rhs has side-effects we cannot propagate from it. */
277 if (gimple_has_volatile_ops (def_stmt))
278 return false;
279
280 /* If the rhs is a load we cannot propagate from it. */
281 if (TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)) == tcc_reference
282 || TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)) == tcc_declaration)
283 return false;
284
285 /* Constants can be always propagated. */
286 if (gimple_assign_single_p (def_stmt)
287 && is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt)))
288 return true;
289
290 /* We cannot propagate ssa names that occur in abnormal phi nodes. */
291 if (stmt_references_abnormal_ssa_name (def_stmt))
292 return false;
293
294 /* If the definition is a conversion of a pointer to a function type,
295 then we can not apply optimizations as some targets require
296 function pointers to be canonicalized and in this case this
297 optimization could eliminate a necessary canonicalization. */
298 if (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt)))
299 {
300 tree rhs = gimple_assign_rhs1 (def_stmt);
301 if (POINTER_TYPE_P (TREE_TYPE (rhs))
302 && TREE_CODE (TREE_TYPE (TREE_TYPE (rhs))) == FUNCTION_TYPE)
303 return false;
304 }
305
306 return true;
307 }
308
309 /* Remove a chain of dead statements starting at the definition of
310 NAME. The chain is linked via the first operand of the defining statements.
311 If NAME was replaced in its only use then this function can be used
312 to clean up dead stmts. The function handles already released SSA
313 names gracefully.
314 Returns true if cleanup-cfg has to run. */
315
316 static bool
317 remove_prop_source_from_use (tree name)
318 {
319 gimple_stmt_iterator gsi;
320 gimple *stmt;
321 bool cfg_changed = false;
322
323 do {
324 basic_block bb;
325
326 if (SSA_NAME_IN_FREE_LIST (name)
327 || SSA_NAME_IS_DEFAULT_DEF (name)
328 || !has_zero_uses (name))
329 return cfg_changed;
330
331 stmt = SSA_NAME_DEF_STMT (name);
332 if (gimple_code (stmt) == GIMPLE_PHI
333 || gimple_has_side_effects (stmt))
334 return cfg_changed;
335
336 bb = gimple_bb (stmt);
337 gsi = gsi_for_stmt (stmt);
338 unlink_stmt_vdef (stmt);
339 if (gsi_remove (&gsi, true))
340 bitmap_set_bit (to_purge, bb->index);
341 fwprop_invalidate_lattice (gimple_get_lhs (stmt));
342 release_defs (stmt);
343
344 name = is_gimple_assign (stmt) ? gimple_assign_rhs1 (stmt) : NULL_TREE;
345 } while (name && TREE_CODE (name) == SSA_NAME);
346
347 return cfg_changed;
348 }
349
350 /* Return the rhs of a gassign *STMT in a form of a single tree,
351 converted to type TYPE.
352
353 This should disappear, but is needed so we can combine expressions and use
354 the fold() interfaces. Long term, we need to develop folding and combine
355 routines that deal with gimple exclusively . */
356
357 static tree
358 rhs_to_tree (tree type, gimple *stmt)
359 {
360 location_t loc = gimple_location (stmt);
361 enum tree_code code = gimple_assign_rhs_code (stmt);
362 if (get_gimple_rhs_class (code) == GIMPLE_TERNARY_RHS)
363 return fold_build3_loc (loc, code, type, gimple_assign_rhs1 (stmt),
364 gimple_assign_rhs2 (stmt),
365 gimple_assign_rhs3 (stmt));
366 else if (get_gimple_rhs_class (code) == GIMPLE_BINARY_RHS)
367 return fold_build2_loc (loc, code, type, gimple_assign_rhs1 (stmt),
368 gimple_assign_rhs2 (stmt));
369 else if (get_gimple_rhs_class (code) == GIMPLE_UNARY_RHS)
370 return build1 (code, type, gimple_assign_rhs1 (stmt));
371 else if (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS)
372 return gimple_assign_rhs1 (stmt);
373 else
374 gcc_unreachable ();
375 }
376
377 /* Combine OP0 CODE OP1 in the context of a COND_EXPR. Returns
378 the folded result in a form suitable for COND_EXPR_COND or
379 NULL_TREE, if there is no suitable simplified form. If
380 INVARIANT_ONLY is true only gimple_min_invariant results are
381 considered simplified. */
382
383 static tree
384 combine_cond_expr_cond (gimple *stmt, enum tree_code code, tree type,
385 tree op0, tree op1, bool invariant_only)
386 {
387 tree t;
388
389 gcc_assert (TREE_CODE_CLASS (code) == tcc_comparison);
390
391 fold_defer_overflow_warnings ();
392 t = fold_binary_loc (gimple_location (stmt), code, type, op0, op1);
393 if (!t)
394 {
395 fold_undefer_overflow_warnings (false, NULL, 0);
396 return NULL_TREE;
397 }
398
399 /* Require that we got a boolean type out if we put one in. */
400 gcc_assert (TREE_CODE (TREE_TYPE (t)) == TREE_CODE (type));
401
402 /* Canonicalize the combined condition for use in a COND_EXPR. */
403 t = canonicalize_cond_expr_cond (t);
404
405 /* Bail out if we required an invariant but didn't get one. */
406 if (!t || (invariant_only && !is_gimple_min_invariant (t)))
407 {
408 fold_undefer_overflow_warnings (false, NULL, 0);
409 return NULL_TREE;
410 }
411
412 fold_undefer_overflow_warnings (!gimple_no_warning_p (stmt), stmt, 0);
413
414 return t;
415 }
416
417 /* Combine the comparison OP0 CODE OP1 at LOC with the defining statements
418 of its operand. Return a new comparison tree or NULL_TREE if there
419 were no simplifying combines. */
420
421 static tree
422 forward_propagate_into_comparison_1 (gimple *stmt,
423 enum tree_code code, tree type,
424 tree op0, tree op1)
425 {
426 tree tmp = NULL_TREE;
427 tree rhs0 = NULL_TREE, rhs1 = NULL_TREE;
428 bool single_use0_p = false, single_use1_p = false;
429
430 /* For comparisons use the first operand, that is likely to
431 simplify comparisons against constants. */
432 if (TREE_CODE (op0) == SSA_NAME)
433 {
434 gimple *def_stmt = get_prop_source_stmt (op0, false, &single_use0_p);
435 if (def_stmt && can_propagate_from (def_stmt))
436 {
437 enum tree_code def_code = gimple_assign_rhs_code (def_stmt);
438 bool invariant_only_p = !single_use0_p;
439
440 rhs0 = rhs_to_tree (TREE_TYPE (op1), def_stmt);
441
442 /* Always combine comparisons or conversions from booleans. */
443 if (TREE_CODE (op1) == INTEGER_CST
444 && ((CONVERT_EXPR_CODE_P (def_code)
445 && TREE_CODE (TREE_TYPE (TREE_OPERAND (rhs0, 0)))
446 == BOOLEAN_TYPE)
447 || TREE_CODE_CLASS (def_code) == tcc_comparison))
448 invariant_only_p = false;
449
450 tmp = combine_cond_expr_cond (stmt, code, type,
451 rhs0, op1, invariant_only_p);
452 if (tmp)
453 return tmp;
454 }
455 }
456
457 /* If that wasn't successful, try the second operand. */
458 if (TREE_CODE (op1) == SSA_NAME)
459 {
460 gimple *def_stmt = get_prop_source_stmt (op1, false, &single_use1_p);
461 if (def_stmt && can_propagate_from (def_stmt))
462 {
463 rhs1 = rhs_to_tree (TREE_TYPE (op0), def_stmt);
464 tmp = combine_cond_expr_cond (stmt, code, type,
465 op0, rhs1, !single_use1_p);
466 if (tmp)
467 return tmp;
468 }
469 }
470
471 /* If that wasn't successful either, try both operands. */
472 if (rhs0 != NULL_TREE
473 && rhs1 != NULL_TREE)
474 tmp = combine_cond_expr_cond (stmt, code, type,
475 rhs0, rhs1,
476 !(single_use0_p && single_use1_p));
477
478 return tmp;
479 }
480
481 /* Propagate from the ssa name definition statements of the assignment
482 from a comparison at *GSI into the conditional if that simplifies it.
483 Returns 1 if the stmt was modified and 2 if the CFG needs cleanup,
484 otherwise returns 0. */
485
486 static int
487 forward_propagate_into_comparison (gimple_stmt_iterator *gsi)
488 {
489 gimple *stmt = gsi_stmt (*gsi);
490 tree tmp;
491 bool cfg_changed = false;
492 tree type = TREE_TYPE (gimple_assign_lhs (stmt));
493 tree rhs1 = gimple_assign_rhs1 (stmt);
494 tree rhs2 = gimple_assign_rhs2 (stmt);
495
496 /* Combine the comparison with defining statements. */
497 tmp = forward_propagate_into_comparison_1 (stmt,
498 gimple_assign_rhs_code (stmt),
499 type, rhs1, rhs2);
500 if (tmp && useless_type_conversion_p (type, TREE_TYPE (tmp)))
501 {
502 gimple_assign_set_rhs_from_tree (gsi, tmp);
503 fold_stmt (gsi);
504 update_stmt (gsi_stmt (*gsi));
505
506 if (TREE_CODE (rhs1) == SSA_NAME)
507 cfg_changed |= remove_prop_source_from_use (rhs1);
508 if (TREE_CODE (rhs2) == SSA_NAME)
509 cfg_changed |= remove_prop_source_from_use (rhs2);
510 return cfg_changed ? 2 : 1;
511 }
512
513 return 0;
514 }
515
516 /* Propagate from the ssa name definition statements of COND_EXPR
517 in GIMPLE_COND statement STMT into the conditional if that simplifies it.
518 Returns zero if no statement was changed, one if there were
519 changes and two if cfg_cleanup needs to run.
520
521 This must be kept in sync with forward_propagate_into_cond. */
522
523 static int
524 forward_propagate_into_gimple_cond (gcond *stmt)
525 {
526 tree tmp;
527 enum tree_code code = gimple_cond_code (stmt);
528 bool cfg_changed = false;
529 tree rhs1 = gimple_cond_lhs (stmt);
530 tree rhs2 = gimple_cond_rhs (stmt);
531
532 /* We can do tree combining on SSA_NAME and comparison expressions. */
533 if (TREE_CODE_CLASS (gimple_cond_code (stmt)) != tcc_comparison)
534 return 0;
535
536 tmp = forward_propagate_into_comparison_1 (stmt, code,
537 boolean_type_node,
538 rhs1, rhs2);
539 if (tmp)
540 {
541 if (dump_file && tmp)
542 {
543 fprintf (dump_file, " Replaced '");
544 print_gimple_expr (dump_file, stmt, 0, 0);
545 fprintf (dump_file, "' with '");
546 print_generic_expr (dump_file, tmp, 0);
547 fprintf (dump_file, "'\n");
548 }
549
550 gimple_cond_set_condition_from_tree (stmt, unshare_expr (tmp));
551 update_stmt (stmt);
552
553 if (TREE_CODE (rhs1) == SSA_NAME)
554 cfg_changed |= remove_prop_source_from_use (rhs1);
555 if (TREE_CODE (rhs2) == SSA_NAME)
556 cfg_changed |= remove_prop_source_from_use (rhs2);
557 return (cfg_changed || is_gimple_min_invariant (tmp)) ? 2 : 1;
558 }
559
560 /* Canonicalize _Bool == 0 and _Bool != 1 to _Bool != 0 by swapping edges. */
561 if ((TREE_CODE (TREE_TYPE (rhs1)) == BOOLEAN_TYPE
562 || (INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
563 && TYPE_PRECISION (TREE_TYPE (rhs1)) == 1))
564 && ((code == EQ_EXPR
565 && integer_zerop (rhs2))
566 || (code == NE_EXPR
567 && integer_onep (rhs2))))
568 {
569 basic_block bb = gimple_bb (stmt);
570 gimple_cond_set_code (stmt, NE_EXPR);
571 gimple_cond_set_rhs (stmt, build_zero_cst (TREE_TYPE (rhs1)));
572 EDGE_SUCC (bb, 0)->flags ^= (EDGE_TRUE_VALUE|EDGE_FALSE_VALUE);
573 EDGE_SUCC (bb, 1)->flags ^= (EDGE_TRUE_VALUE|EDGE_FALSE_VALUE);
574 return 1;
575 }
576
577 return 0;
578 }
579
580
581 /* Propagate from the ssa name definition statements of COND_EXPR
582 in the rhs of statement STMT into the conditional if that simplifies it.
583 Returns true zero if the stmt was changed. */
584
585 static bool
586 forward_propagate_into_cond (gimple_stmt_iterator *gsi_p)
587 {
588 gimple *stmt = gsi_stmt (*gsi_p);
589 tree tmp = NULL_TREE;
590 tree cond = gimple_assign_rhs1 (stmt);
591 enum tree_code code = gimple_assign_rhs_code (stmt);
592
593 /* We can do tree combining on SSA_NAME and comparison expressions. */
594 if (COMPARISON_CLASS_P (cond))
595 tmp = forward_propagate_into_comparison_1 (stmt, TREE_CODE (cond),
596 TREE_TYPE (cond),
597 TREE_OPERAND (cond, 0),
598 TREE_OPERAND (cond, 1));
599 else if (TREE_CODE (cond) == SSA_NAME)
600 {
601 enum tree_code def_code;
602 tree name = cond;
603 gimple *def_stmt = get_prop_source_stmt (name, true, NULL);
604 if (!def_stmt || !can_propagate_from (def_stmt))
605 return 0;
606
607 def_code = gimple_assign_rhs_code (def_stmt);
608 if (TREE_CODE_CLASS (def_code) == tcc_comparison)
609 tmp = fold_build2_loc (gimple_location (def_stmt),
610 def_code,
611 TREE_TYPE (cond),
612 gimple_assign_rhs1 (def_stmt),
613 gimple_assign_rhs2 (def_stmt));
614 }
615
616 if (tmp
617 && is_gimple_condexpr (tmp))
618 {
619 if (dump_file && tmp)
620 {
621 fprintf (dump_file, " Replaced '");
622 print_generic_expr (dump_file, cond, 0);
623 fprintf (dump_file, "' with '");
624 print_generic_expr (dump_file, tmp, 0);
625 fprintf (dump_file, "'\n");
626 }
627
628 if ((code == VEC_COND_EXPR) ? integer_all_onesp (tmp)
629 : integer_onep (tmp))
630 gimple_assign_set_rhs_from_tree (gsi_p, gimple_assign_rhs2 (stmt));
631 else if (integer_zerop (tmp))
632 gimple_assign_set_rhs_from_tree (gsi_p, gimple_assign_rhs3 (stmt));
633 else
634 gimple_assign_set_rhs1 (stmt, unshare_expr (tmp));
635 stmt = gsi_stmt (*gsi_p);
636 update_stmt (stmt);
637
638 return true;
639 }
640
641 return 0;
642 }
643
644 /* We've just substituted an ADDR_EXPR into stmt. Update all the
645 relevant data structures to match. */
646
647 static void
648 tidy_after_forward_propagate_addr (gimple *stmt)
649 {
650 /* We may have turned a trapping insn into a non-trapping insn. */
651 if (maybe_clean_or_replace_eh_stmt (stmt, stmt))
652 bitmap_set_bit (to_purge, gimple_bb (stmt)->index);
653
654 if (TREE_CODE (gimple_assign_rhs1 (stmt)) == ADDR_EXPR)
655 recompute_tree_invariant_for_addr_expr (gimple_assign_rhs1 (stmt));
656 }
657
658 /* NAME is a SSA_NAME representing DEF_RHS which is of the form
659 ADDR_EXPR <whatever>.
660
661 Try to forward propagate the ADDR_EXPR into the use USE_STMT.
662 Often this will allow for removal of an ADDR_EXPR and INDIRECT_REF
663 node or for recovery of array indexing from pointer arithmetic.
664
665 Return true if the propagation was successful (the propagation can
666 be not totally successful, yet things may have been changed). */
667
668 static bool
669 forward_propagate_addr_expr_1 (tree name, tree def_rhs,
670 gimple_stmt_iterator *use_stmt_gsi,
671 bool single_use_p)
672 {
673 tree lhs, rhs, rhs2, array_ref;
674 gimple *use_stmt = gsi_stmt (*use_stmt_gsi);
675 enum tree_code rhs_code;
676 bool res = true;
677
678 gcc_assert (TREE_CODE (def_rhs) == ADDR_EXPR);
679
680 lhs = gimple_assign_lhs (use_stmt);
681 rhs_code = gimple_assign_rhs_code (use_stmt);
682 rhs = gimple_assign_rhs1 (use_stmt);
683
684 /* Do not perform copy-propagation but recurse through copy chains. */
685 if (TREE_CODE (lhs) == SSA_NAME
686 && rhs_code == SSA_NAME)
687 return forward_propagate_addr_expr (lhs, def_rhs, single_use_p);
688
689 /* The use statement could be a conversion. Recurse to the uses of the
690 lhs as copyprop does not copy through pointer to integer to pointer
691 conversions and FRE does not catch all cases either.
692 Treat the case of a single-use name and
693 a conversion to def_rhs type separate, though. */
694 if (TREE_CODE (lhs) == SSA_NAME
695 && CONVERT_EXPR_CODE_P (rhs_code))
696 {
697 /* If there is a point in a conversion chain where the types match
698 so we can remove a conversion re-materialize the address here
699 and stop. */
700 if (single_use_p
701 && useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (def_rhs)))
702 {
703 gimple_assign_set_rhs1 (use_stmt, unshare_expr (def_rhs));
704 gimple_assign_set_rhs_code (use_stmt, TREE_CODE (def_rhs));
705 return true;
706 }
707
708 /* Else recurse if the conversion preserves the address value. */
709 if ((INTEGRAL_TYPE_P (TREE_TYPE (lhs))
710 || POINTER_TYPE_P (TREE_TYPE (lhs)))
711 && (TYPE_PRECISION (TREE_TYPE (lhs))
712 >= TYPE_PRECISION (TREE_TYPE (def_rhs))))
713 return forward_propagate_addr_expr (lhs, def_rhs, single_use_p);
714
715 return false;
716 }
717
718 /* If this isn't a conversion chain from this on we only can propagate
719 into compatible pointer contexts. */
720 if (!types_compatible_p (TREE_TYPE (name), TREE_TYPE (def_rhs)))
721 return false;
722
723 /* Propagate through constant pointer adjustments. */
724 if (TREE_CODE (lhs) == SSA_NAME
725 && rhs_code == POINTER_PLUS_EXPR
726 && rhs == name
727 && TREE_CODE (gimple_assign_rhs2 (use_stmt)) == INTEGER_CST)
728 {
729 tree new_def_rhs;
730 /* As we come here with non-invariant addresses in def_rhs we need
731 to make sure we can build a valid constant offsetted address
732 for further propagation. Simply rely on fold building that
733 and check after the fact. */
734 new_def_rhs = fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (rhs)),
735 def_rhs,
736 fold_convert (ptr_type_node,
737 gimple_assign_rhs2 (use_stmt)));
738 if (TREE_CODE (new_def_rhs) == MEM_REF
739 && !is_gimple_mem_ref_addr (TREE_OPERAND (new_def_rhs, 0)))
740 return false;
741 new_def_rhs = build_fold_addr_expr_with_type (new_def_rhs,
742 TREE_TYPE (rhs));
743
744 /* Recurse. If we could propagate into all uses of lhs do not
745 bother to replace into the current use but just pretend we did. */
746 if (TREE_CODE (new_def_rhs) == ADDR_EXPR
747 && forward_propagate_addr_expr (lhs, new_def_rhs, single_use_p))
748 return true;
749
750 if (useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (new_def_rhs)))
751 gimple_assign_set_rhs_with_ops (use_stmt_gsi, TREE_CODE (new_def_rhs),
752 new_def_rhs);
753 else if (is_gimple_min_invariant (new_def_rhs))
754 gimple_assign_set_rhs_with_ops (use_stmt_gsi, NOP_EXPR, new_def_rhs);
755 else
756 return false;
757 gcc_assert (gsi_stmt (*use_stmt_gsi) == use_stmt);
758 update_stmt (use_stmt);
759 return true;
760 }
761
762 /* Now strip away any outer COMPONENT_REF/ARRAY_REF nodes from the LHS.
763 ADDR_EXPR will not appear on the LHS. */
764 tree *lhsp = gimple_assign_lhs_ptr (use_stmt);
765 while (handled_component_p (*lhsp))
766 lhsp = &TREE_OPERAND (*lhsp, 0);
767 lhs = *lhsp;
768
769 /* Now see if the LHS node is a MEM_REF using NAME. If so,
770 propagate the ADDR_EXPR into the use of NAME and fold the result. */
771 if (TREE_CODE (lhs) == MEM_REF
772 && TREE_OPERAND (lhs, 0) == name)
773 {
774 tree def_rhs_base;
775 HOST_WIDE_INT def_rhs_offset;
776 /* If the address is invariant we can always fold it. */
777 if ((def_rhs_base = get_addr_base_and_unit_offset (TREE_OPERAND (def_rhs, 0),
778 &def_rhs_offset)))
779 {
780 offset_int off = mem_ref_offset (lhs);
781 tree new_ptr;
782 off += def_rhs_offset;
783 if (TREE_CODE (def_rhs_base) == MEM_REF)
784 {
785 off += mem_ref_offset (def_rhs_base);
786 new_ptr = TREE_OPERAND (def_rhs_base, 0);
787 }
788 else
789 new_ptr = build_fold_addr_expr (def_rhs_base);
790 TREE_OPERAND (lhs, 0) = new_ptr;
791 TREE_OPERAND (lhs, 1)
792 = wide_int_to_tree (TREE_TYPE (TREE_OPERAND (lhs, 1)), off);
793 tidy_after_forward_propagate_addr (use_stmt);
794 /* Continue propagating into the RHS if this was not the only use. */
795 if (single_use_p)
796 return true;
797 }
798 /* If the LHS is a plain dereference and the value type is the same as
799 that of the pointed-to type of the address we can put the
800 dereferenced address on the LHS preserving the original alias-type. */
801 else if (integer_zerop (TREE_OPERAND (lhs, 1))
802 && ((gimple_assign_lhs (use_stmt) == lhs
803 && useless_type_conversion_p
804 (TREE_TYPE (TREE_OPERAND (def_rhs, 0)),
805 TREE_TYPE (gimple_assign_rhs1 (use_stmt))))
806 || types_compatible_p (TREE_TYPE (lhs),
807 TREE_TYPE (TREE_OPERAND (def_rhs, 0))))
808 /* Don't forward anything into clobber stmts if it would result
809 in the lhs no longer being a MEM_REF. */
810 && (!gimple_clobber_p (use_stmt)
811 || TREE_CODE (TREE_OPERAND (def_rhs, 0)) == MEM_REF))
812 {
813 tree *def_rhs_basep = &TREE_OPERAND (def_rhs, 0);
814 tree new_offset, new_base, saved, new_lhs;
815 while (handled_component_p (*def_rhs_basep))
816 def_rhs_basep = &TREE_OPERAND (*def_rhs_basep, 0);
817 saved = *def_rhs_basep;
818 if (TREE_CODE (*def_rhs_basep) == MEM_REF)
819 {
820 new_base = TREE_OPERAND (*def_rhs_basep, 0);
821 new_offset = fold_convert (TREE_TYPE (TREE_OPERAND (lhs, 1)),
822 TREE_OPERAND (*def_rhs_basep, 1));
823 }
824 else
825 {
826 new_base = build_fold_addr_expr (*def_rhs_basep);
827 new_offset = TREE_OPERAND (lhs, 1);
828 }
829 *def_rhs_basep = build2 (MEM_REF, TREE_TYPE (*def_rhs_basep),
830 new_base, new_offset);
831 TREE_THIS_VOLATILE (*def_rhs_basep) = TREE_THIS_VOLATILE (lhs);
832 TREE_SIDE_EFFECTS (*def_rhs_basep) = TREE_SIDE_EFFECTS (lhs);
833 TREE_THIS_NOTRAP (*def_rhs_basep) = TREE_THIS_NOTRAP (lhs);
834 new_lhs = unshare_expr (TREE_OPERAND (def_rhs, 0));
835 *lhsp = new_lhs;
836 TREE_THIS_VOLATILE (new_lhs) = TREE_THIS_VOLATILE (lhs);
837 TREE_SIDE_EFFECTS (new_lhs) = TREE_SIDE_EFFECTS (lhs);
838 *def_rhs_basep = saved;
839 tidy_after_forward_propagate_addr (use_stmt);
840 /* Continue propagating into the RHS if this was not the
841 only use. */
842 if (single_use_p)
843 return true;
844 }
845 else
846 /* We can have a struct assignment dereferencing our name twice.
847 Note that we didn't propagate into the lhs to not falsely
848 claim we did when propagating into the rhs. */
849 res = false;
850 }
851
852 /* Strip away any outer COMPONENT_REF, ARRAY_REF or ADDR_EXPR
853 nodes from the RHS. */
854 tree *rhsp = gimple_assign_rhs1_ptr (use_stmt);
855 if (TREE_CODE (*rhsp) == ADDR_EXPR)
856 rhsp = &TREE_OPERAND (*rhsp, 0);
857 while (handled_component_p (*rhsp))
858 rhsp = &TREE_OPERAND (*rhsp, 0);
859 rhs = *rhsp;
860
861 /* Now see if the RHS node is a MEM_REF using NAME. If so,
862 propagate the ADDR_EXPR into the use of NAME and fold the result. */
863 if (TREE_CODE (rhs) == MEM_REF
864 && TREE_OPERAND (rhs, 0) == name)
865 {
866 tree def_rhs_base;
867 HOST_WIDE_INT def_rhs_offset;
868 if ((def_rhs_base = get_addr_base_and_unit_offset (TREE_OPERAND (def_rhs, 0),
869 &def_rhs_offset)))
870 {
871 offset_int off = mem_ref_offset (rhs);
872 tree new_ptr;
873 off += def_rhs_offset;
874 if (TREE_CODE (def_rhs_base) == MEM_REF)
875 {
876 off += mem_ref_offset (def_rhs_base);
877 new_ptr = TREE_OPERAND (def_rhs_base, 0);
878 }
879 else
880 new_ptr = build_fold_addr_expr (def_rhs_base);
881 TREE_OPERAND (rhs, 0) = new_ptr;
882 TREE_OPERAND (rhs, 1)
883 = wide_int_to_tree (TREE_TYPE (TREE_OPERAND (rhs, 1)), off);
884 fold_stmt_inplace (use_stmt_gsi);
885 tidy_after_forward_propagate_addr (use_stmt);
886 return res;
887 }
888 /* If the RHS is a plain dereference and the value type is the same as
889 that of the pointed-to type of the address we can put the
890 dereferenced address on the RHS preserving the original alias-type. */
891 else if (integer_zerop (TREE_OPERAND (rhs, 1))
892 && ((gimple_assign_rhs1 (use_stmt) == rhs
893 && useless_type_conversion_p
894 (TREE_TYPE (gimple_assign_lhs (use_stmt)),
895 TREE_TYPE (TREE_OPERAND (def_rhs, 0))))
896 || types_compatible_p (TREE_TYPE (rhs),
897 TREE_TYPE (TREE_OPERAND (def_rhs, 0)))))
898 {
899 tree *def_rhs_basep = &TREE_OPERAND (def_rhs, 0);
900 tree new_offset, new_base, saved, new_rhs;
901 while (handled_component_p (*def_rhs_basep))
902 def_rhs_basep = &TREE_OPERAND (*def_rhs_basep, 0);
903 saved = *def_rhs_basep;
904 if (TREE_CODE (*def_rhs_basep) == MEM_REF)
905 {
906 new_base = TREE_OPERAND (*def_rhs_basep, 0);
907 new_offset = fold_convert (TREE_TYPE (TREE_OPERAND (rhs, 1)),
908 TREE_OPERAND (*def_rhs_basep, 1));
909 }
910 else
911 {
912 new_base = build_fold_addr_expr (*def_rhs_basep);
913 new_offset = TREE_OPERAND (rhs, 1);
914 }
915 *def_rhs_basep = build2 (MEM_REF, TREE_TYPE (*def_rhs_basep),
916 new_base, new_offset);
917 TREE_THIS_VOLATILE (*def_rhs_basep) = TREE_THIS_VOLATILE (rhs);
918 TREE_SIDE_EFFECTS (*def_rhs_basep) = TREE_SIDE_EFFECTS (rhs);
919 TREE_THIS_NOTRAP (*def_rhs_basep) = TREE_THIS_NOTRAP (rhs);
920 new_rhs = unshare_expr (TREE_OPERAND (def_rhs, 0));
921 *rhsp = new_rhs;
922 TREE_THIS_VOLATILE (new_rhs) = TREE_THIS_VOLATILE (rhs);
923 TREE_SIDE_EFFECTS (new_rhs) = TREE_SIDE_EFFECTS (rhs);
924 *def_rhs_basep = saved;
925 fold_stmt_inplace (use_stmt_gsi);
926 tidy_after_forward_propagate_addr (use_stmt);
927 return res;
928 }
929 }
930
931 /* If the use of the ADDR_EXPR is not a POINTER_PLUS_EXPR, there
932 is nothing to do. */
933 if (gimple_assign_rhs_code (use_stmt) != POINTER_PLUS_EXPR
934 || gimple_assign_rhs1 (use_stmt) != name)
935 return false;
936
937 /* The remaining cases are all for turning pointer arithmetic into
938 array indexing. They only apply when we have the address of
939 element zero in an array. If that is not the case then there
940 is nothing to do. */
941 array_ref = TREE_OPERAND (def_rhs, 0);
942 if ((TREE_CODE (array_ref) != ARRAY_REF
943 || TREE_CODE (TREE_TYPE (TREE_OPERAND (array_ref, 0))) != ARRAY_TYPE
944 || TREE_CODE (TREE_OPERAND (array_ref, 1)) != INTEGER_CST)
945 && TREE_CODE (TREE_TYPE (array_ref)) != ARRAY_TYPE)
946 return false;
947
948 rhs2 = gimple_assign_rhs2 (use_stmt);
949 /* Optimize &x[C1] p+ C2 to &x p+ C3 with C3 = C1 * element_size + C2. */
950 if (TREE_CODE (rhs2) == INTEGER_CST)
951 {
952 tree new_rhs = build1_loc (gimple_location (use_stmt),
953 ADDR_EXPR, TREE_TYPE (def_rhs),
954 fold_build2 (MEM_REF,
955 TREE_TYPE (TREE_TYPE (def_rhs)),
956 unshare_expr (def_rhs),
957 fold_convert (ptr_type_node,
958 rhs2)));
959 gimple_assign_set_rhs_from_tree (use_stmt_gsi, new_rhs);
960 use_stmt = gsi_stmt (*use_stmt_gsi);
961 update_stmt (use_stmt);
962 tidy_after_forward_propagate_addr (use_stmt);
963 return true;
964 }
965
966 return false;
967 }
968
969 /* STMT is a statement of the form SSA_NAME = ADDR_EXPR <whatever>.
970
971 Try to forward propagate the ADDR_EXPR into all uses of the SSA_NAME.
972 Often this will allow for removal of an ADDR_EXPR and INDIRECT_REF
973 node or for recovery of array indexing from pointer arithmetic.
974
975 PARENT_SINGLE_USE_P tells if, when in a recursive invocation, NAME was
976 the single use in the previous invocation. Pass true when calling
977 this as toplevel.
978
979 Returns true, if all uses have been propagated into. */
980
981 static bool
982 forward_propagate_addr_expr (tree name, tree rhs, bool parent_single_use_p)
983 {
984 imm_use_iterator iter;
985 gimple *use_stmt;
986 bool all = true;
987 bool single_use_p = parent_single_use_p && has_single_use (name);
988
989 FOR_EACH_IMM_USE_STMT (use_stmt, iter, name)
990 {
991 bool result;
992 tree use_rhs;
993
994 /* If the use is not in a simple assignment statement, then
995 there is nothing we can do. */
996 if (!is_gimple_assign (use_stmt))
997 {
998 if (!is_gimple_debug (use_stmt))
999 all = false;
1000 continue;
1001 }
1002
1003 gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
1004 result = forward_propagate_addr_expr_1 (name, rhs, &gsi,
1005 single_use_p);
1006 /* If the use has moved to a different statement adjust
1007 the update machinery for the old statement too. */
1008 if (use_stmt != gsi_stmt (gsi))
1009 {
1010 update_stmt (use_stmt);
1011 use_stmt = gsi_stmt (gsi);
1012 }
1013 update_stmt (use_stmt);
1014 all &= result;
1015
1016 /* Remove intermediate now unused copy and conversion chains. */
1017 use_rhs = gimple_assign_rhs1 (use_stmt);
1018 if (result
1019 && TREE_CODE (gimple_assign_lhs (use_stmt)) == SSA_NAME
1020 && TREE_CODE (use_rhs) == SSA_NAME
1021 && has_zero_uses (gimple_assign_lhs (use_stmt)))
1022 {
1023 gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
1024 fwprop_invalidate_lattice (gimple_get_lhs (use_stmt));
1025 release_defs (use_stmt);
1026 gsi_remove (&gsi, true);
1027 }
1028 }
1029
1030 return all && has_zero_uses (name);
1031 }
1032
1033
1034 /* Helper function for simplify_gimple_switch. Remove case labels that
1035 have values outside the range of the new type. */
1036
1037 static void
1038 simplify_gimple_switch_label_vec (gswitch *stmt, tree index_type)
1039 {
1040 unsigned int branch_num = gimple_switch_num_labels (stmt);
1041 auto_vec<tree> labels (branch_num);
1042 unsigned int i, len;
1043
1044 /* Collect the existing case labels in a VEC, and preprocess it as if
1045 we are gimplifying a GENERIC SWITCH_EXPR. */
1046 for (i = 1; i < branch_num; i++)
1047 labels.quick_push (gimple_switch_label (stmt, i));
1048 preprocess_case_label_vec_for_gimple (labels, index_type, NULL);
1049
1050 /* If any labels were removed, replace the existing case labels
1051 in the GIMPLE_SWITCH statement with the correct ones.
1052 Note that the type updates were done in-place on the case labels,
1053 so we only have to replace the case labels in the GIMPLE_SWITCH
1054 if the number of labels changed. */
1055 len = labels.length ();
1056 if (len < branch_num - 1)
1057 {
1058 bitmap target_blocks;
1059 edge_iterator ei;
1060 edge e;
1061
1062 /* Corner case: *all* case labels have been removed as being
1063 out-of-range for INDEX_TYPE. Push one label and let the
1064 CFG cleanups deal with this further. */
1065 if (len == 0)
1066 {
1067 tree label, elt;
1068
1069 label = CASE_LABEL (gimple_switch_default_label (stmt));
1070 elt = build_case_label (build_int_cst (index_type, 0), NULL, label);
1071 labels.quick_push (elt);
1072 len = 1;
1073 }
1074
1075 for (i = 0; i < labels.length (); i++)
1076 gimple_switch_set_label (stmt, i + 1, labels[i]);
1077 for (i++ ; i < branch_num; i++)
1078 gimple_switch_set_label (stmt, i, NULL_TREE);
1079 gimple_switch_set_num_labels (stmt, len + 1);
1080
1081 /* Cleanup any edges that are now dead. */
1082 target_blocks = BITMAP_ALLOC (NULL);
1083 for (i = 0; i < gimple_switch_num_labels (stmt); i++)
1084 {
1085 tree elt = gimple_switch_label (stmt, i);
1086 basic_block target = label_to_block (CASE_LABEL (elt));
1087 bitmap_set_bit (target_blocks, target->index);
1088 }
1089 for (ei = ei_start (gimple_bb (stmt)->succs); (e = ei_safe_edge (ei)); )
1090 {
1091 if (! bitmap_bit_p (target_blocks, e->dest->index))
1092 {
1093 remove_edge (e);
1094 cfg_changed = true;
1095 free_dominance_info (CDI_DOMINATORS);
1096 }
1097 else
1098 ei_next (&ei);
1099 }
1100 BITMAP_FREE (target_blocks);
1101 }
1102 }
1103
1104 /* STMT is a SWITCH_EXPR for which we attempt to find equivalent forms of
1105 the condition which we may be able to optimize better. */
1106
1107 static bool
1108 simplify_gimple_switch (gswitch *stmt)
1109 {
1110 /* The optimization that we really care about is removing unnecessary
1111 casts. That will let us do much better in propagating the inferred
1112 constant at the switch target. */
1113 tree cond = gimple_switch_index (stmt);
1114 if (TREE_CODE (cond) == SSA_NAME)
1115 {
1116 gimple *def_stmt = SSA_NAME_DEF_STMT (cond);
1117 if (gimple_assign_cast_p (def_stmt))
1118 {
1119 tree def = gimple_assign_rhs1 (def_stmt);
1120 if (TREE_CODE (def) != SSA_NAME)
1121 return false;
1122
1123 /* If we have an extension or sign-change that preserves the
1124 values we check against then we can copy the source value into
1125 the switch. */
1126 tree ti = TREE_TYPE (def);
1127 if (INTEGRAL_TYPE_P (ti)
1128 && TYPE_PRECISION (ti) <= TYPE_PRECISION (TREE_TYPE (cond)))
1129 {
1130 size_t n = gimple_switch_num_labels (stmt);
1131 tree min = NULL_TREE, max = NULL_TREE;
1132 if (n > 1)
1133 {
1134 min = CASE_LOW (gimple_switch_label (stmt, 1));
1135 if (CASE_HIGH (gimple_switch_label (stmt, n - 1)))
1136 max = CASE_HIGH (gimple_switch_label (stmt, n - 1));
1137 else
1138 max = CASE_LOW (gimple_switch_label (stmt, n - 1));
1139 }
1140 if ((!min || int_fits_type_p (min, ti))
1141 && (!max || int_fits_type_p (max, ti)))
1142 {
1143 gimple_switch_set_index (stmt, def);
1144 simplify_gimple_switch_label_vec (stmt, ti);
1145 update_stmt (stmt);
1146 return true;
1147 }
1148 }
1149 }
1150 }
1151
1152 return false;
1153 }
1154
1155 /* For pointers p2 and p1 return p2 - p1 if the
1156 difference is known and constant, otherwise return NULL. */
1157
1158 static tree
1159 constant_pointer_difference (tree p1, tree p2)
1160 {
1161 int i, j;
1162 #define CPD_ITERATIONS 5
1163 tree exps[2][CPD_ITERATIONS];
1164 tree offs[2][CPD_ITERATIONS];
1165 int cnt[2];
1166
1167 for (i = 0; i < 2; i++)
1168 {
1169 tree p = i ? p1 : p2;
1170 tree off = size_zero_node;
1171 gimple *stmt;
1172 enum tree_code code;
1173
1174 /* For each of p1 and p2 we need to iterate at least
1175 twice, to handle ADDR_EXPR directly in p1/p2,
1176 SSA_NAME with ADDR_EXPR or POINTER_PLUS_EXPR etc.
1177 on definition's stmt RHS. Iterate a few extra times. */
1178 j = 0;
1179 do
1180 {
1181 if (!POINTER_TYPE_P (TREE_TYPE (p)))
1182 break;
1183 if (TREE_CODE (p) == ADDR_EXPR)
1184 {
1185 tree q = TREE_OPERAND (p, 0);
1186 HOST_WIDE_INT offset;
1187 tree base = get_addr_base_and_unit_offset (q, &offset);
1188 if (base)
1189 {
1190 q = base;
1191 if (offset)
1192 off = size_binop (PLUS_EXPR, off, size_int (offset));
1193 }
1194 if (TREE_CODE (q) == MEM_REF
1195 && TREE_CODE (TREE_OPERAND (q, 0)) == SSA_NAME)
1196 {
1197 p = TREE_OPERAND (q, 0);
1198 off = size_binop (PLUS_EXPR, off,
1199 wide_int_to_tree (sizetype,
1200 mem_ref_offset (q)));
1201 }
1202 else
1203 {
1204 exps[i][j] = q;
1205 offs[i][j++] = off;
1206 break;
1207 }
1208 }
1209 if (TREE_CODE (p) != SSA_NAME)
1210 break;
1211 exps[i][j] = p;
1212 offs[i][j++] = off;
1213 if (j == CPD_ITERATIONS)
1214 break;
1215 stmt = SSA_NAME_DEF_STMT (p);
1216 if (!is_gimple_assign (stmt) || gimple_assign_lhs (stmt) != p)
1217 break;
1218 code = gimple_assign_rhs_code (stmt);
1219 if (code == POINTER_PLUS_EXPR)
1220 {
1221 if (TREE_CODE (gimple_assign_rhs2 (stmt)) != INTEGER_CST)
1222 break;
1223 off = size_binop (PLUS_EXPR, off, gimple_assign_rhs2 (stmt));
1224 p = gimple_assign_rhs1 (stmt);
1225 }
1226 else if (code == ADDR_EXPR || CONVERT_EXPR_CODE_P (code))
1227 p = gimple_assign_rhs1 (stmt);
1228 else
1229 break;
1230 }
1231 while (1);
1232 cnt[i] = j;
1233 }
1234
1235 for (i = 0; i < cnt[0]; i++)
1236 for (j = 0; j < cnt[1]; j++)
1237 if (exps[0][i] == exps[1][j])
1238 return size_binop (MINUS_EXPR, offs[0][i], offs[1][j]);
1239
1240 return NULL_TREE;
1241 }
1242
1243 /* *GSI_P is a GIMPLE_CALL to a builtin function.
1244 Optimize
1245 memcpy (p, "abcd", 4);
1246 memset (p + 4, ' ', 3);
1247 into
1248 memcpy (p, "abcd ", 7);
1249 call if the latter can be stored by pieces during expansion. */
1250
1251 static bool
1252 simplify_builtin_call (gimple_stmt_iterator *gsi_p, tree callee2)
1253 {
1254 gimple *stmt1, *stmt2 = gsi_stmt (*gsi_p);
1255 tree vuse = gimple_vuse (stmt2);
1256 if (vuse == NULL)
1257 return false;
1258 stmt1 = SSA_NAME_DEF_STMT (vuse);
1259
1260 switch (DECL_FUNCTION_CODE (callee2))
1261 {
1262 case BUILT_IN_MEMSET:
1263 if (gimple_call_num_args (stmt2) != 3
1264 || gimple_call_lhs (stmt2)
1265 || CHAR_BIT != 8
1266 || BITS_PER_UNIT != 8)
1267 break;
1268 else
1269 {
1270 tree callee1;
1271 tree ptr1, src1, str1, off1, len1, lhs1;
1272 tree ptr2 = gimple_call_arg (stmt2, 0);
1273 tree val2 = gimple_call_arg (stmt2, 1);
1274 tree len2 = gimple_call_arg (stmt2, 2);
1275 tree diff, vdef, new_str_cst;
1276 gimple *use_stmt;
1277 unsigned int ptr1_align;
1278 unsigned HOST_WIDE_INT src_len;
1279 char *src_buf;
1280 use_operand_p use_p;
1281
1282 if (!tree_fits_shwi_p (val2)
1283 || !tree_fits_uhwi_p (len2)
1284 || compare_tree_int (len2, 1024) == 1)
1285 break;
1286 if (is_gimple_call (stmt1))
1287 {
1288 /* If first stmt is a call, it needs to be memcpy
1289 or mempcpy, with string literal as second argument and
1290 constant length. */
1291 callee1 = gimple_call_fndecl (stmt1);
1292 if (callee1 == NULL_TREE
1293 || DECL_BUILT_IN_CLASS (callee1) != BUILT_IN_NORMAL
1294 || gimple_call_num_args (stmt1) != 3)
1295 break;
1296 if (DECL_FUNCTION_CODE (callee1) != BUILT_IN_MEMCPY
1297 && DECL_FUNCTION_CODE (callee1) != BUILT_IN_MEMPCPY)
1298 break;
1299 ptr1 = gimple_call_arg (stmt1, 0);
1300 src1 = gimple_call_arg (stmt1, 1);
1301 len1 = gimple_call_arg (stmt1, 2);
1302 lhs1 = gimple_call_lhs (stmt1);
1303 if (!tree_fits_uhwi_p (len1))
1304 break;
1305 str1 = string_constant (src1, &off1);
1306 if (str1 == NULL_TREE)
1307 break;
1308 if (!tree_fits_uhwi_p (off1)
1309 || compare_tree_int (off1, TREE_STRING_LENGTH (str1) - 1) > 0
1310 || compare_tree_int (len1, TREE_STRING_LENGTH (str1)
1311 - tree_to_uhwi (off1)) > 0
1312 || TREE_CODE (TREE_TYPE (str1)) != ARRAY_TYPE
1313 || TYPE_MODE (TREE_TYPE (TREE_TYPE (str1)))
1314 != TYPE_MODE (char_type_node))
1315 break;
1316 }
1317 else if (gimple_assign_single_p (stmt1))
1318 {
1319 /* Otherwise look for length 1 memcpy optimized into
1320 assignment. */
1321 ptr1 = gimple_assign_lhs (stmt1);
1322 src1 = gimple_assign_rhs1 (stmt1);
1323 if (TREE_CODE (ptr1) != MEM_REF
1324 || TYPE_MODE (TREE_TYPE (ptr1)) != TYPE_MODE (char_type_node)
1325 || !tree_fits_shwi_p (src1))
1326 break;
1327 ptr1 = build_fold_addr_expr (ptr1);
1328 callee1 = NULL_TREE;
1329 len1 = size_one_node;
1330 lhs1 = NULL_TREE;
1331 off1 = size_zero_node;
1332 str1 = NULL_TREE;
1333 }
1334 else
1335 break;
1336
1337 diff = constant_pointer_difference (ptr1, ptr2);
1338 if (diff == NULL && lhs1 != NULL)
1339 {
1340 diff = constant_pointer_difference (lhs1, ptr2);
1341 if (DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY
1342 && diff != NULL)
1343 diff = size_binop (PLUS_EXPR, diff,
1344 fold_convert (sizetype, len1));
1345 }
1346 /* If the difference between the second and first destination pointer
1347 is not constant, or is bigger than memcpy length, bail out. */
1348 if (diff == NULL
1349 || !tree_fits_uhwi_p (diff)
1350 || tree_int_cst_lt (len1, diff)
1351 || compare_tree_int (diff, 1024) == 1)
1352 break;
1353
1354 /* Use maximum of difference plus memset length and memcpy length
1355 as the new memcpy length, if it is too big, bail out. */
1356 src_len = tree_to_uhwi (diff);
1357 src_len += tree_to_uhwi (len2);
1358 if (src_len < tree_to_uhwi (len1))
1359 src_len = tree_to_uhwi (len1);
1360 if (src_len > 1024)
1361 break;
1362
1363 /* If mempcpy value is used elsewhere, bail out, as mempcpy
1364 with bigger length will return different result. */
1365 if (lhs1 != NULL_TREE
1366 && DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY
1367 && (TREE_CODE (lhs1) != SSA_NAME
1368 || !single_imm_use (lhs1, &use_p, &use_stmt)
1369 || use_stmt != stmt2))
1370 break;
1371
1372 /* If anything reads memory in between memcpy and memset
1373 call, the modified memcpy call might change it. */
1374 vdef = gimple_vdef (stmt1);
1375 if (vdef != NULL
1376 && (!single_imm_use (vdef, &use_p, &use_stmt)
1377 || use_stmt != stmt2))
1378 break;
1379
1380 ptr1_align = get_pointer_alignment (ptr1);
1381 /* Construct the new source string literal. */
1382 src_buf = XALLOCAVEC (char, src_len + 1);
1383 if (callee1)
1384 memcpy (src_buf,
1385 TREE_STRING_POINTER (str1) + tree_to_uhwi (off1),
1386 tree_to_uhwi (len1));
1387 else
1388 src_buf[0] = tree_to_shwi (src1);
1389 memset (src_buf + tree_to_uhwi (diff),
1390 tree_to_shwi (val2), tree_to_uhwi (len2));
1391 src_buf[src_len] = '\0';
1392 /* Neither builtin_strncpy_read_str nor builtin_memcpy_read_str
1393 handle embedded '\0's. */
1394 if (strlen (src_buf) != src_len)
1395 break;
1396 rtl_profile_for_bb (gimple_bb (stmt2));
1397 /* If the new memcpy wouldn't be emitted by storing the literal
1398 by pieces, this optimization might enlarge .rodata too much,
1399 as commonly used string literals couldn't be shared any
1400 longer. */
1401 if (!can_store_by_pieces (src_len,
1402 builtin_strncpy_read_str,
1403 src_buf, ptr1_align, false))
1404 break;
1405
1406 new_str_cst = build_string_literal (src_len, src_buf);
1407 if (callee1)
1408 {
1409 /* If STMT1 is a mem{,p}cpy call, adjust it and remove
1410 memset call. */
1411 if (lhs1 && DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY)
1412 gimple_call_set_lhs (stmt1, NULL_TREE);
1413 gimple_call_set_arg (stmt1, 1, new_str_cst);
1414 gimple_call_set_arg (stmt1, 2,
1415 build_int_cst (TREE_TYPE (len1), src_len));
1416 update_stmt (stmt1);
1417 unlink_stmt_vdef (stmt2);
1418 gsi_remove (gsi_p, true);
1419 fwprop_invalidate_lattice (gimple_get_lhs (stmt2));
1420 release_defs (stmt2);
1421 if (lhs1 && DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY)
1422 {
1423 fwprop_invalidate_lattice (lhs1);
1424 release_ssa_name (lhs1);
1425 }
1426 return true;
1427 }
1428 else
1429 {
1430 /* Otherwise, if STMT1 is length 1 memcpy optimized into
1431 assignment, remove STMT1 and change memset call into
1432 memcpy call. */
1433 gimple_stmt_iterator gsi = gsi_for_stmt (stmt1);
1434
1435 if (!is_gimple_val (ptr1))
1436 ptr1 = force_gimple_operand_gsi (gsi_p, ptr1, true, NULL_TREE,
1437 true, GSI_SAME_STMT);
1438 gimple_call_set_fndecl (stmt2,
1439 builtin_decl_explicit (BUILT_IN_MEMCPY));
1440 gimple_call_set_arg (stmt2, 0, ptr1);
1441 gimple_call_set_arg (stmt2, 1, new_str_cst);
1442 gimple_call_set_arg (stmt2, 2,
1443 build_int_cst (TREE_TYPE (len2), src_len));
1444 unlink_stmt_vdef (stmt1);
1445 gsi_remove (&gsi, true);
1446 fwprop_invalidate_lattice (gimple_get_lhs (stmt1));
1447 release_defs (stmt1);
1448 update_stmt (stmt2);
1449 return false;
1450 }
1451 }
1452 break;
1453 default:
1454 break;
1455 }
1456 return false;
1457 }
1458
1459 /* Given a ssa_name in NAME see if it was defined by an assignment and
1460 set CODE to be the code and ARG1 to the first operand on the rhs and ARG2
1461 to the second operand on the rhs. */
1462
1463 static inline void
1464 defcodefor_name (tree name, enum tree_code *code, tree *arg1, tree *arg2)
1465 {
1466 gimple *def;
1467 enum tree_code code1;
1468 tree arg11;
1469 tree arg21;
1470 tree arg31;
1471 enum gimple_rhs_class grhs_class;
1472
1473 code1 = TREE_CODE (name);
1474 arg11 = name;
1475 arg21 = NULL_TREE;
1476 grhs_class = get_gimple_rhs_class (code1);
1477
1478 if (code1 == SSA_NAME)
1479 {
1480 def = SSA_NAME_DEF_STMT (name);
1481
1482 if (def && is_gimple_assign (def)
1483 && can_propagate_from (def))
1484 {
1485 code1 = gimple_assign_rhs_code (def);
1486 arg11 = gimple_assign_rhs1 (def);
1487 arg21 = gimple_assign_rhs2 (def);
1488 arg31 = gimple_assign_rhs2 (def);
1489 }
1490 }
1491 else if (grhs_class == GIMPLE_TERNARY_RHS
1492 || GIMPLE_BINARY_RHS
1493 || GIMPLE_UNARY_RHS
1494 || GIMPLE_SINGLE_RHS)
1495 extract_ops_from_tree_1 (name, &code1, &arg11, &arg21, &arg31);
1496
1497 *code = code1;
1498 *arg1 = arg11;
1499 if (arg2)
1500 *arg2 = arg21;
1501 /* Ignore arg3 currently. */
1502 }
1503
1504
1505 /* Recognize rotation patterns. Return true if a transformation
1506 applied, otherwise return false.
1507
1508 We are looking for X with unsigned type T with bitsize B, OP being
1509 +, | or ^, some type T2 wider than T and
1510 (X << CNT1) OP (X >> CNT2) iff CNT1 + CNT2 == B
1511 ((T) ((T2) X << CNT1)) OP ((T) ((T2) X >> CNT2)) iff CNT1 + CNT2 == B
1512 (X << Y) OP (X >> (B - Y))
1513 (X << (int) Y) OP (X >> (int) (B - Y))
1514 ((T) ((T2) X << Y)) OP ((T) ((T2) X >> (B - Y)))
1515 ((T) ((T2) X << (int) Y)) OP ((T) ((T2) X >> (int) (B - Y)))
1516 (X << Y) | (X >> ((-Y) & (B - 1)))
1517 (X << (int) Y) | (X >> (int) ((-Y) & (B - 1)))
1518 ((T) ((T2) X << Y)) | ((T) ((T2) X >> ((-Y) & (B - 1))))
1519 ((T) ((T2) X << (int) Y)) | ((T) ((T2) X >> (int) ((-Y) & (B - 1))))
1520
1521 and transform these into:
1522 X r<< CNT1
1523 X r<< Y
1524
1525 Note, in the patterns with T2 type, the type of OP operands
1526 might be even a signed type, but should have precision B. */
1527
1528 static bool
1529 simplify_rotate (gimple_stmt_iterator *gsi)
1530 {
1531 gimple *stmt = gsi_stmt (*gsi);
1532 tree arg[2], rtype, rotcnt = NULL_TREE;
1533 tree def_arg1[2], def_arg2[2];
1534 enum tree_code def_code[2];
1535 tree lhs;
1536 int i;
1537 bool swapped_p = false;
1538 gimple *g;
1539
1540 arg[0] = gimple_assign_rhs1 (stmt);
1541 arg[1] = gimple_assign_rhs2 (stmt);
1542 rtype = TREE_TYPE (arg[0]);
1543
1544 /* Only create rotates in complete modes. Other cases are not
1545 expanded properly. */
1546 if (!INTEGRAL_TYPE_P (rtype)
1547 || TYPE_PRECISION (rtype) != GET_MODE_PRECISION (TYPE_MODE (rtype)))
1548 return false;
1549
1550 for (i = 0; i < 2; i++)
1551 defcodefor_name (arg[i], &def_code[i], &def_arg1[i], &def_arg2[i]);
1552
1553 /* Look through narrowing conversions. */
1554 if (CONVERT_EXPR_CODE_P (def_code[0])
1555 && CONVERT_EXPR_CODE_P (def_code[1])
1556 && INTEGRAL_TYPE_P (TREE_TYPE (def_arg1[0]))
1557 && INTEGRAL_TYPE_P (TREE_TYPE (def_arg1[1]))
1558 && TYPE_PRECISION (TREE_TYPE (def_arg1[0]))
1559 == TYPE_PRECISION (TREE_TYPE (def_arg1[1]))
1560 && TYPE_PRECISION (TREE_TYPE (def_arg1[0])) > TYPE_PRECISION (rtype)
1561 && has_single_use (arg[0])
1562 && has_single_use (arg[1]))
1563 {
1564 for (i = 0; i < 2; i++)
1565 {
1566 arg[i] = def_arg1[i];
1567 defcodefor_name (arg[i], &def_code[i], &def_arg1[i], &def_arg2[i]);
1568 }
1569 }
1570
1571 /* One operand has to be LSHIFT_EXPR and one RSHIFT_EXPR. */
1572 for (i = 0; i < 2; i++)
1573 if (def_code[i] != LSHIFT_EXPR && def_code[i] != RSHIFT_EXPR)
1574 return false;
1575 else if (!has_single_use (arg[i]))
1576 return false;
1577 if (def_code[0] == def_code[1])
1578 return false;
1579
1580 /* If we've looked through narrowing conversions before, look through
1581 widening conversions from unsigned type with the same precision
1582 as rtype here. */
1583 if (TYPE_PRECISION (TREE_TYPE (def_arg1[0])) != TYPE_PRECISION (rtype))
1584 for (i = 0; i < 2; i++)
1585 {
1586 tree tem;
1587 enum tree_code code;
1588 defcodefor_name (def_arg1[i], &code, &tem, NULL);
1589 if (!CONVERT_EXPR_CODE_P (code)
1590 || !INTEGRAL_TYPE_P (TREE_TYPE (tem))
1591 || TYPE_PRECISION (TREE_TYPE (tem)) != TYPE_PRECISION (rtype))
1592 return false;
1593 def_arg1[i] = tem;
1594 }
1595 /* Both shifts have to use the same first operand. */
1596 if (TREE_CODE (def_arg1[0]) != SSA_NAME || def_arg1[0] != def_arg1[1])
1597 return false;
1598 if (!TYPE_UNSIGNED (TREE_TYPE (def_arg1[0])))
1599 return false;
1600
1601 /* CNT1 + CNT2 == B case above. */
1602 if (tree_fits_uhwi_p (def_arg2[0])
1603 && tree_fits_uhwi_p (def_arg2[1])
1604 && tree_to_uhwi (def_arg2[0])
1605 + tree_to_uhwi (def_arg2[1]) == TYPE_PRECISION (rtype))
1606 rotcnt = def_arg2[0];
1607 else if (TREE_CODE (def_arg2[0]) != SSA_NAME
1608 || TREE_CODE (def_arg2[1]) != SSA_NAME)
1609 return false;
1610 else
1611 {
1612 tree cdef_arg1[2], cdef_arg2[2], def_arg2_alt[2];
1613 enum tree_code cdef_code[2];
1614 /* Look through conversion of the shift count argument.
1615 The C/C++ FE cast any shift count argument to integer_type_node.
1616 The only problem might be if the shift count type maximum value
1617 is equal or smaller than number of bits in rtype. */
1618 for (i = 0; i < 2; i++)
1619 {
1620 def_arg2_alt[i] = def_arg2[i];
1621 defcodefor_name (def_arg2[i], &cdef_code[i],
1622 &cdef_arg1[i], &cdef_arg2[i]);
1623 if (CONVERT_EXPR_CODE_P (cdef_code[i])
1624 && INTEGRAL_TYPE_P (TREE_TYPE (cdef_arg1[i]))
1625 && TYPE_PRECISION (TREE_TYPE (cdef_arg1[i]))
1626 > floor_log2 (TYPE_PRECISION (rtype))
1627 && TYPE_PRECISION (TREE_TYPE (cdef_arg1[i]))
1628 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (cdef_arg1[i]))))
1629 {
1630 def_arg2_alt[i] = cdef_arg1[i];
1631 defcodefor_name (def_arg2_alt[i], &cdef_code[i],
1632 &cdef_arg1[i], &cdef_arg2[i]);
1633 }
1634 }
1635 for (i = 0; i < 2; i++)
1636 /* Check for one shift count being Y and the other B - Y,
1637 with optional casts. */
1638 if (cdef_code[i] == MINUS_EXPR
1639 && tree_fits_shwi_p (cdef_arg1[i])
1640 && tree_to_shwi (cdef_arg1[i]) == TYPE_PRECISION (rtype)
1641 && TREE_CODE (cdef_arg2[i]) == SSA_NAME)
1642 {
1643 tree tem;
1644 enum tree_code code;
1645
1646 if (cdef_arg2[i] == def_arg2[1 - i]
1647 || cdef_arg2[i] == def_arg2_alt[1 - i])
1648 {
1649 rotcnt = cdef_arg2[i];
1650 break;
1651 }
1652 defcodefor_name (cdef_arg2[i], &code, &tem, NULL);
1653 if (CONVERT_EXPR_CODE_P (code)
1654 && INTEGRAL_TYPE_P (TREE_TYPE (tem))
1655 && TYPE_PRECISION (TREE_TYPE (tem))
1656 > floor_log2 (TYPE_PRECISION (rtype))
1657 && TYPE_PRECISION (TREE_TYPE (tem))
1658 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (tem)))
1659 && (tem == def_arg2[1 - i]
1660 || tem == def_arg2_alt[1 - i]))
1661 {
1662 rotcnt = tem;
1663 break;
1664 }
1665 }
1666 /* The above sequence isn't safe for Y being 0,
1667 because then one of the shifts triggers undefined behavior.
1668 This alternative is safe even for rotation count of 0.
1669 One shift count is Y and the other (-Y) & (B - 1). */
1670 else if (cdef_code[i] == BIT_AND_EXPR
1671 && tree_fits_shwi_p (cdef_arg2[i])
1672 && tree_to_shwi (cdef_arg2[i])
1673 == TYPE_PRECISION (rtype) - 1
1674 && TREE_CODE (cdef_arg1[i]) == SSA_NAME
1675 && gimple_assign_rhs_code (stmt) == BIT_IOR_EXPR)
1676 {
1677 tree tem;
1678 enum tree_code code;
1679
1680 defcodefor_name (cdef_arg1[i], &code, &tem, NULL);
1681 if (CONVERT_EXPR_CODE_P (code)
1682 && INTEGRAL_TYPE_P (TREE_TYPE (tem))
1683 && TYPE_PRECISION (TREE_TYPE (tem))
1684 > floor_log2 (TYPE_PRECISION (rtype))
1685 && TYPE_PRECISION (TREE_TYPE (tem))
1686 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (tem))))
1687 defcodefor_name (tem, &code, &tem, NULL);
1688
1689 if (code == NEGATE_EXPR)
1690 {
1691 if (tem == def_arg2[1 - i] || tem == def_arg2_alt[1 - i])
1692 {
1693 rotcnt = tem;
1694 break;
1695 }
1696 defcodefor_name (tem, &code, &tem, NULL);
1697 if (CONVERT_EXPR_CODE_P (code)
1698 && INTEGRAL_TYPE_P (TREE_TYPE (tem))
1699 && TYPE_PRECISION (TREE_TYPE (tem))
1700 > floor_log2 (TYPE_PRECISION (rtype))
1701 && TYPE_PRECISION (TREE_TYPE (tem))
1702 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (tem)))
1703 && (tem == def_arg2[1 - i]
1704 || tem == def_arg2_alt[1 - i]))
1705 {
1706 rotcnt = tem;
1707 break;
1708 }
1709 }
1710 }
1711 if (rotcnt == NULL_TREE)
1712 return false;
1713 swapped_p = i != 1;
1714 }
1715
1716 if (!useless_type_conversion_p (TREE_TYPE (def_arg2[0]),
1717 TREE_TYPE (rotcnt)))
1718 {
1719 g = gimple_build_assign (make_ssa_name (TREE_TYPE (def_arg2[0])),
1720 NOP_EXPR, rotcnt);
1721 gsi_insert_before (gsi, g, GSI_SAME_STMT);
1722 rotcnt = gimple_assign_lhs (g);
1723 }
1724 lhs = gimple_assign_lhs (stmt);
1725 if (!useless_type_conversion_p (rtype, TREE_TYPE (def_arg1[0])))
1726 lhs = make_ssa_name (TREE_TYPE (def_arg1[0]));
1727 g = gimple_build_assign (lhs,
1728 ((def_code[0] == LSHIFT_EXPR) ^ swapped_p)
1729 ? LROTATE_EXPR : RROTATE_EXPR, def_arg1[0], rotcnt);
1730 if (!useless_type_conversion_p (rtype, TREE_TYPE (def_arg1[0])))
1731 {
1732 gsi_insert_before (gsi, g, GSI_SAME_STMT);
1733 g = gimple_build_assign (gimple_assign_lhs (stmt), NOP_EXPR, lhs);
1734 }
1735 gsi_replace (gsi, g, false);
1736 return true;
1737 }
1738
1739 /* Combine an element access with a shuffle. Returns true if there were
1740 any changes made, else it returns false. */
1741
1742 static bool
1743 simplify_bitfield_ref (gimple_stmt_iterator *gsi)
1744 {
1745 gimple *stmt = gsi_stmt (*gsi);
1746 gimple *def_stmt;
1747 tree op, op0, op1, op2;
1748 tree elem_type;
1749 unsigned idx, n, size;
1750 enum tree_code code;
1751
1752 op = gimple_assign_rhs1 (stmt);
1753 gcc_checking_assert (TREE_CODE (op) == BIT_FIELD_REF);
1754
1755 op0 = TREE_OPERAND (op, 0);
1756 if (TREE_CODE (op0) != SSA_NAME
1757 || TREE_CODE (TREE_TYPE (op0)) != VECTOR_TYPE)
1758 return false;
1759
1760 def_stmt = get_prop_source_stmt (op0, false, NULL);
1761 if (!def_stmt || !can_propagate_from (def_stmt))
1762 return false;
1763
1764 op1 = TREE_OPERAND (op, 1);
1765 op2 = TREE_OPERAND (op, 2);
1766 code = gimple_assign_rhs_code (def_stmt);
1767
1768 if (code == CONSTRUCTOR)
1769 {
1770 tree tem = fold_ternary (BIT_FIELD_REF, TREE_TYPE (op),
1771 gimple_assign_rhs1 (def_stmt), op1, op2);
1772 if (!tem || !valid_gimple_rhs_p (tem))
1773 return false;
1774 gimple_assign_set_rhs_from_tree (gsi, tem);
1775 update_stmt (gsi_stmt (*gsi));
1776 return true;
1777 }
1778
1779 elem_type = TREE_TYPE (TREE_TYPE (op0));
1780 if (TREE_TYPE (op) != elem_type)
1781 return false;
1782
1783 size = TREE_INT_CST_LOW (TYPE_SIZE (elem_type));
1784 n = TREE_INT_CST_LOW (op1) / size;
1785 if (n != 1)
1786 return false;
1787 idx = TREE_INT_CST_LOW (op2) / size;
1788
1789 if (code == VEC_PERM_EXPR)
1790 {
1791 tree p, m, index, tem;
1792 unsigned nelts;
1793 m = gimple_assign_rhs3 (def_stmt);
1794 if (TREE_CODE (m) != VECTOR_CST)
1795 return false;
1796 nelts = VECTOR_CST_NELTS (m);
1797 idx = TREE_INT_CST_LOW (VECTOR_CST_ELT (m, idx));
1798 idx %= 2 * nelts;
1799 if (idx < nelts)
1800 {
1801 p = gimple_assign_rhs1 (def_stmt);
1802 }
1803 else
1804 {
1805 p = gimple_assign_rhs2 (def_stmt);
1806 idx -= nelts;
1807 }
1808 index = build_int_cst (TREE_TYPE (TREE_TYPE (m)), idx * size);
1809 tem = build3 (BIT_FIELD_REF, TREE_TYPE (op),
1810 unshare_expr (p), op1, index);
1811 gimple_assign_set_rhs1 (stmt, tem);
1812 fold_stmt (gsi);
1813 update_stmt (gsi_stmt (*gsi));
1814 return true;
1815 }
1816
1817 return false;
1818 }
1819
1820 /* Determine whether applying the 2 permutations (mask1 then mask2)
1821 gives back one of the input. */
1822
1823 static int
1824 is_combined_permutation_identity (tree mask1, tree mask2)
1825 {
1826 tree mask;
1827 unsigned int nelts, i, j;
1828 bool maybe_identity1 = true;
1829 bool maybe_identity2 = true;
1830
1831 gcc_checking_assert (TREE_CODE (mask1) == VECTOR_CST
1832 && TREE_CODE (mask2) == VECTOR_CST);
1833 mask = fold_ternary (VEC_PERM_EXPR, TREE_TYPE (mask1), mask1, mask1, mask2);
1834 gcc_assert (TREE_CODE (mask) == VECTOR_CST);
1835
1836 nelts = VECTOR_CST_NELTS (mask);
1837 for (i = 0; i < nelts; i++)
1838 {
1839 tree val = VECTOR_CST_ELT (mask, i);
1840 gcc_assert (TREE_CODE (val) == INTEGER_CST);
1841 j = TREE_INT_CST_LOW (val) & (2 * nelts - 1);
1842 if (j == i)
1843 maybe_identity2 = false;
1844 else if (j == i + nelts)
1845 maybe_identity1 = false;
1846 else
1847 return 0;
1848 }
1849 return maybe_identity1 ? 1 : maybe_identity2 ? 2 : 0;
1850 }
1851
1852 /* Combine a shuffle with its arguments. Returns 1 if there were any
1853 changes made, 2 if cfg-cleanup needs to run. Else it returns 0. */
1854
1855 static int
1856 simplify_permutation (gimple_stmt_iterator *gsi)
1857 {
1858 gimple *stmt = gsi_stmt (*gsi);
1859 gimple *def_stmt;
1860 tree op0, op1, op2, op3, arg0, arg1;
1861 enum tree_code code;
1862 bool single_use_op0 = false;
1863
1864 gcc_checking_assert (gimple_assign_rhs_code (stmt) == VEC_PERM_EXPR);
1865
1866 op0 = gimple_assign_rhs1 (stmt);
1867 op1 = gimple_assign_rhs2 (stmt);
1868 op2 = gimple_assign_rhs3 (stmt);
1869
1870 if (TREE_CODE (op2) != VECTOR_CST)
1871 return 0;
1872
1873 if (TREE_CODE (op0) == VECTOR_CST)
1874 {
1875 code = VECTOR_CST;
1876 arg0 = op0;
1877 }
1878 else if (TREE_CODE (op0) == SSA_NAME)
1879 {
1880 def_stmt = get_prop_source_stmt (op0, false, &single_use_op0);
1881 if (!def_stmt || !can_propagate_from (def_stmt))
1882 return 0;
1883
1884 code = gimple_assign_rhs_code (def_stmt);
1885 arg0 = gimple_assign_rhs1 (def_stmt);
1886 }
1887 else
1888 return 0;
1889
1890 /* Two consecutive shuffles. */
1891 if (code == VEC_PERM_EXPR)
1892 {
1893 tree orig;
1894 int ident;
1895
1896 if (op0 != op1)
1897 return 0;
1898 op3 = gimple_assign_rhs3 (def_stmt);
1899 if (TREE_CODE (op3) != VECTOR_CST)
1900 return 0;
1901 ident = is_combined_permutation_identity (op3, op2);
1902 if (!ident)
1903 return 0;
1904 orig = (ident == 1) ? gimple_assign_rhs1 (def_stmt)
1905 : gimple_assign_rhs2 (def_stmt);
1906 gimple_assign_set_rhs1 (stmt, unshare_expr (orig));
1907 gimple_assign_set_rhs_code (stmt, TREE_CODE (orig));
1908 gimple_set_num_ops (stmt, 2);
1909 update_stmt (stmt);
1910 return remove_prop_source_from_use (op0) ? 2 : 1;
1911 }
1912
1913 /* Shuffle of a constructor. */
1914 else if (code == CONSTRUCTOR || code == VECTOR_CST)
1915 {
1916 tree opt;
1917 bool ret = false;
1918 if (op0 != op1)
1919 {
1920 if (TREE_CODE (op0) == SSA_NAME && !single_use_op0)
1921 return 0;
1922
1923 if (TREE_CODE (op1) == VECTOR_CST)
1924 arg1 = op1;
1925 else if (TREE_CODE (op1) == SSA_NAME)
1926 {
1927 enum tree_code code2;
1928
1929 gimple *def_stmt2 = get_prop_source_stmt (op1, true, NULL);
1930 if (!def_stmt2 || !can_propagate_from (def_stmt2))
1931 return 0;
1932
1933 code2 = gimple_assign_rhs_code (def_stmt2);
1934 if (code2 != CONSTRUCTOR && code2 != VECTOR_CST)
1935 return 0;
1936 arg1 = gimple_assign_rhs1 (def_stmt2);
1937 }
1938 else
1939 return 0;
1940 }
1941 else
1942 {
1943 /* Already used twice in this statement. */
1944 if (TREE_CODE (op0) == SSA_NAME && num_imm_uses (op0) > 2)
1945 return 0;
1946 arg1 = arg0;
1947 }
1948 opt = fold_ternary (VEC_PERM_EXPR, TREE_TYPE (op0), arg0, arg1, op2);
1949 if (!opt
1950 || (TREE_CODE (opt) != CONSTRUCTOR && TREE_CODE (opt) != VECTOR_CST))
1951 return 0;
1952 gimple_assign_set_rhs_from_tree (gsi, opt);
1953 update_stmt (gsi_stmt (*gsi));
1954 if (TREE_CODE (op0) == SSA_NAME)
1955 ret = remove_prop_source_from_use (op0);
1956 if (op0 != op1 && TREE_CODE (op1) == SSA_NAME)
1957 ret |= remove_prop_source_from_use (op1);
1958 return ret ? 2 : 1;
1959 }
1960
1961 return 0;
1962 }
1963
1964 /* Recognize a VEC_PERM_EXPR. Returns true if there were any changes. */
1965
1966 static bool
1967 simplify_vector_constructor (gimple_stmt_iterator *gsi)
1968 {
1969 gimple *stmt = gsi_stmt (*gsi);
1970 gimple *def_stmt;
1971 tree op, op2, orig, type, elem_type;
1972 unsigned elem_size, nelts, i;
1973 enum tree_code code;
1974 constructor_elt *elt;
1975 unsigned char *sel;
1976 bool maybe_ident;
1977
1978 gcc_checking_assert (gimple_assign_rhs_code (stmt) == CONSTRUCTOR);
1979
1980 op = gimple_assign_rhs1 (stmt);
1981 type = TREE_TYPE (op);
1982 gcc_checking_assert (TREE_CODE (type) == VECTOR_TYPE);
1983
1984 nelts = TYPE_VECTOR_SUBPARTS (type);
1985 elem_type = TREE_TYPE (type);
1986 elem_size = TREE_INT_CST_LOW (TYPE_SIZE (elem_type));
1987
1988 sel = XALLOCAVEC (unsigned char, nelts);
1989 orig = NULL;
1990 maybe_ident = true;
1991 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (op), i, elt)
1992 {
1993 tree ref, op1;
1994
1995 if (i >= nelts)
1996 return false;
1997
1998 if (TREE_CODE (elt->value) != SSA_NAME)
1999 return false;
2000 def_stmt = get_prop_source_stmt (elt->value, false, NULL);
2001 if (!def_stmt)
2002 return false;
2003 code = gimple_assign_rhs_code (def_stmt);
2004 if (code != BIT_FIELD_REF)
2005 return false;
2006 op1 = gimple_assign_rhs1 (def_stmt);
2007 ref = TREE_OPERAND (op1, 0);
2008 if (orig)
2009 {
2010 if (ref != orig)
2011 return false;
2012 }
2013 else
2014 {
2015 if (TREE_CODE (ref) != SSA_NAME)
2016 return false;
2017 if (!useless_type_conversion_p (type, TREE_TYPE (ref)))
2018 return false;
2019 orig = ref;
2020 }
2021 if (TREE_INT_CST_LOW (TREE_OPERAND (op1, 1)) != elem_size)
2022 return false;
2023 sel[i] = TREE_INT_CST_LOW (TREE_OPERAND (op1, 2)) / elem_size;
2024 if (sel[i] != i) maybe_ident = false;
2025 }
2026 if (i < nelts)
2027 return false;
2028
2029 if (maybe_ident)
2030 gimple_assign_set_rhs_from_tree (gsi, orig);
2031 else
2032 {
2033 tree mask_type, *mask_elts;
2034
2035 if (!can_vec_perm_p (TYPE_MODE (type), false, sel))
2036 return false;
2037 mask_type
2038 = build_vector_type (build_nonstandard_integer_type (elem_size, 1),
2039 nelts);
2040 if (GET_MODE_CLASS (TYPE_MODE (mask_type)) != MODE_VECTOR_INT
2041 || GET_MODE_SIZE (TYPE_MODE (mask_type))
2042 != GET_MODE_SIZE (TYPE_MODE (type)))
2043 return false;
2044 mask_elts = XALLOCAVEC (tree, nelts);
2045 for (i = 0; i < nelts; i++)
2046 mask_elts[i] = build_int_cst (TREE_TYPE (mask_type), sel[i]);
2047 op2 = build_vector (mask_type, mask_elts);
2048 gimple_assign_set_rhs_with_ops (gsi, VEC_PERM_EXPR, orig, orig, op2);
2049 }
2050 update_stmt (gsi_stmt (*gsi));
2051 return true;
2052 }
2053
2054
2055 /* Primitive "lattice" function for gimple_simplify. */
2056
2057 static tree
2058 fwprop_ssa_val (tree name)
2059 {
2060 /* First valueize NAME. */
2061 if (TREE_CODE (name) == SSA_NAME
2062 && SSA_NAME_VERSION (name) < lattice.length ())
2063 {
2064 tree val = lattice[SSA_NAME_VERSION (name)];
2065 if (val)
2066 name = val;
2067 }
2068 /* We continue matching along SSA use-def edges for SSA names
2069 that are not single-use. Currently there are no patterns
2070 that would cause any issues with that. */
2071 return name;
2072 }
2073
2074 /* Main entry point for the forward propagation and statement combine
2075 optimizer. */
2076
2077 namespace {
2078
2079 const pass_data pass_data_forwprop =
2080 {
2081 GIMPLE_PASS, /* type */
2082 "forwprop", /* name */
2083 OPTGROUP_NONE, /* optinfo_flags */
2084 TV_TREE_FORWPROP, /* tv_id */
2085 ( PROP_cfg | PROP_ssa ), /* properties_required */
2086 0, /* properties_provided */
2087 0, /* properties_destroyed */
2088 0, /* todo_flags_start */
2089 TODO_update_ssa, /* todo_flags_finish */
2090 };
2091
2092 class pass_forwprop : public gimple_opt_pass
2093 {
2094 public:
2095 pass_forwprop (gcc::context *ctxt)
2096 : gimple_opt_pass (pass_data_forwprop, ctxt)
2097 {}
2098
2099 /* opt_pass methods: */
2100 opt_pass * clone () { return new pass_forwprop (m_ctxt); }
2101 virtual bool gate (function *) { return flag_tree_forwprop; }
2102 virtual unsigned int execute (function *);
2103
2104 }; // class pass_forwprop
2105
2106 unsigned int
2107 pass_forwprop::execute (function *fun)
2108 {
2109 unsigned int todoflags = 0;
2110
2111 cfg_changed = false;
2112
2113 /* Combine stmts with the stmts defining their operands. Do that
2114 in an order that guarantees visiting SSA defs before SSA uses. */
2115 lattice.create (num_ssa_names);
2116 lattice.quick_grow_cleared (num_ssa_names);
2117 int *postorder = XNEWVEC (int, n_basic_blocks_for_fn (fun));
2118 int postorder_num = inverted_post_order_compute (postorder);
2119 auto_vec<gimple *, 4> to_fixup;
2120 to_purge = BITMAP_ALLOC (NULL);
2121 for (int i = 0; i < postorder_num; ++i)
2122 {
2123 gimple_stmt_iterator gsi;
2124 basic_block bb = BASIC_BLOCK_FOR_FN (fun, postorder[i]);
2125
2126 /* Apply forward propagation to all stmts in the basic-block.
2127 Note we update GSI within the loop as necessary. */
2128 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); )
2129 {
2130 gimple *stmt = gsi_stmt (gsi);
2131 tree lhs, rhs;
2132 enum tree_code code;
2133
2134 if (!is_gimple_assign (stmt))
2135 {
2136 gsi_next (&gsi);
2137 continue;
2138 }
2139
2140 lhs = gimple_assign_lhs (stmt);
2141 rhs = gimple_assign_rhs1 (stmt);
2142 code = gimple_assign_rhs_code (stmt);
2143 if (TREE_CODE (lhs) != SSA_NAME
2144 || has_zero_uses (lhs))
2145 {
2146 gsi_next (&gsi);
2147 continue;
2148 }
2149
2150 /* If this statement sets an SSA_NAME to an address,
2151 try to propagate the address into the uses of the SSA_NAME. */
2152 if (code == ADDR_EXPR
2153 /* Handle pointer conversions on invariant addresses
2154 as well, as this is valid gimple. */
2155 || (CONVERT_EXPR_CODE_P (code)
2156 && TREE_CODE (rhs) == ADDR_EXPR
2157 && POINTER_TYPE_P (TREE_TYPE (lhs))))
2158 {
2159 tree base = get_base_address (TREE_OPERAND (rhs, 0));
2160 if ((!base
2161 || !DECL_P (base)
2162 || decl_address_invariant_p (base))
2163 && !stmt_references_abnormal_ssa_name (stmt)
2164 && forward_propagate_addr_expr (lhs, rhs, true))
2165 {
2166 fwprop_invalidate_lattice (gimple_get_lhs (stmt));
2167 release_defs (stmt);
2168 gsi_remove (&gsi, true);
2169 }
2170 else
2171 gsi_next (&gsi);
2172 }
2173 else if (code == POINTER_PLUS_EXPR)
2174 {
2175 tree off = gimple_assign_rhs2 (stmt);
2176 if (TREE_CODE (off) == INTEGER_CST
2177 && can_propagate_from (stmt)
2178 && !simple_iv_increment_p (stmt)
2179 /* ??? Better adjust the interface to that function
2180 instead of building new trees here. */
2181 && forward_propagate_addr_expr
2182 (lhs,
2183 build1_loc (gimple_location (stmt),
2184 ADDR_EXPR, TREE_TYPE (rhs),
2185 fold_build2 (MEM_REF,
2186 TREE_TYPE (TREE_TYPE (rhs)),
2187 rhs,
2188 fold_convert (ptr_type_node,
2189 off))), true))
2190 {
2191 fwprop_invalidate_lattice (gimple_get_lhs (stmt));
2192 release_defs (stmt);
2193 gsi_remove (&gsi, true);
2194 }
2195 else if (is_gimple_min_invariant (rhs))
2196 {
2197 /* Make sure to fold &a[0] + off_1 here. */
2198 fold_stmt_inplace (&gsi);
2199 update_stmt (stmt);
2200 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
2201 gsi_next (&gsi);
2202 }
2203 else
2204 gsi_next (&gsi);
2205 }
2206 else if (TREE_CODE (TREE_TYPE (lhs)) == COMPLEX_TYPE
2207 && gimple_assign_load_p (stmt)
2208 && !gimple_has_volatile_ops (stmt)
2209 && (TREE_CODE (gimple_assign_rhs1 (stmt))
2210 != TARGET_MEM_REF)
2211 && !stmt_can_throw_internal (stmt))
2212 {
2213 /* Rewrite loads used only in real/imagpart extractions to
2214 component-wise loads. */
2215 use_operand_p use_p;
2216 imm_use_iterator iter;
2217 bool rewrite = true;
2218 FOR_EACH_IMM_USE_FAST (use_p, iter, lhs)
2219 {
2220 gimple *use_stmt = USE_STMT (use_p);
2221 if (is_gimple_debug (use_stmt))
2222 continue;
2223 if (!is_gimple_assign (use_stmt)
2224 || (gimple_assign_rhs_code (use_stmt) != REALPART_EXPR
2225 && gimple_assign_rhs_code (use_stmt) != IMAGPART_EXPR))
2226 {
2227 rewrite = false;
2228 break;
2229 }
2230 }
2231 if (rewrite)
2232 {
2233 gimple *use_stmt;
2234 FOR_EACH_IMM_USE_STMT (use_stmt, iter, lhs)
2235 {
2236 if (is_gimple_debug (use_stmt))
2237 {
2238 if (gimple_debug_bind_p (use_stmt))
2239 {
2240 gimple_debug_bind_reset_value (use_stmt);
2241 update_stmt (use_stmt);
2242 }
2243 continue;
2244 }
2245
2246 tree new_rhs = build1 (gimple_assign_rhs_code (use_stmt),
2247 TREE_TYPE (TREE_TYPE (rhs)),
2248 unshare_expr (rhs));
2249 gimple *new_stmt
2250 = gimple_build_assign (gimple_assign_lhs (use_stmt),
2251 new_rhs);
2252
2253 location_t loc = gimple_location (use_stmt);
2254 gimple_set_location (new_stmt, loc);
2255 gimple_stmt_iterator gsi2 = gsi_for_stmt (use_stmt);
2256 unlink_stmt_vdef (use_stmt);
2257 gsi_remove (&gsi2, true);
2258
2259 gsi_insert_before (&gsi, new_stmt, GSI_SAME_STMT);
2260 }
2261
2262 release_defs (stmt);
2263 gsi_remove (&gsi, true);
2264 }
2265 else
2266 gsi_next (&gsi);
2267 }
2268 else if (code == COMPLEX_EXPR)
2269 {
2270 /* Rewrite stores of a single-use complex build expression
2271 to component-wise stores. */
2272 use_operand_p use_p;
2273 gimple *use_stmt;
2274 if (single_imm_use (lhs, &use_p, &use_stmt)
2275 && gimple_store_p (use_stmt)
2276 && !gimple_has_volatile_ops (use_stmt)
2277 && is_gimple_assign (use_stmt)
2278 && (TREE_CODE (gimple_assign_lhs (use_stmt))
2279 != TARGET_MEM_REF))
2280 {
2281 tree use_lhs = gimple_assign_lhs (use_stmt);
2282 tree new_lhs = build1 (REALPART_EXPR,
2283 TREE_TYPE (TREE_TYPE (use_lhs)),
2284 unshare_expr (use_lhs));
2285 gimple *new_stmt = gimple_build_assign (new_lhs, rhs);
2286 location_t loc = gimple_location (use_stmt);
2287 gimple_set_location (new_stmt, loc);
2288 gimple_set_vuse (new_stmt, gimple_vuse (use_stmt));
2289 gimple_set_vdef (new_stmt, make_ssa_name (gimple_vop (cfun)));
2290 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
2291 gimple_set_vuse (use_stmt, gimple_vdef (new_stmt));
2292 gimple_stmt_iterator gsi2 = gsi_for_stmt (use_stmt);
2293 gsi_insert_before (&gsi2, new_stmt, GSI_SAME_STMT);
2294
2295 new_lhs = build1 (IMAGPART_EXPR,
2296 TREE_TYPE (TREE_TYPE (use_lhs)),
2297 unshare_expr (use_lhs));
2298 gimple_assign_set_lhs (use_stmt, new_lhs);
2299 gimple_assign_set_rhs1 (use_stmt, gimple_assign_rhs2 (stmt));
2300 update_stmt (use_stmt);
2301
2302 release_defs (stmt);
2303 gsi_remove (&gsi, true);
2304 }
2305 else
2306 gsi_next (&gsi);
2307 }
2308 else
2309 gsi_next (&gsi);
2310 }
2311
2312 /* Combine stmts with the stmts defining their operands.
2313 Note we update GSI within the loop as necessary. */
2314 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
2315 {
2316 gimple *stmt = gsi_stmt (gsi);
2317 gimple *orig_stmt = stmt;
2318 bool changed = false;
2319 bool was_noreturn = (is_gimple_call (stmt)
2320 && gimple_call_noreturn_p (stmt));
2321
2322 /* Mark stmt as potentially needing revisiting. */
2323 gimple_set_plf (stmt, GF_PLF_1, false);
2324
2325 if (fold_stmt (&gsi, fwprop_ssa_val))
2326 {
2327 changed = true;
2328 stmt = gsi_stmt (gsi);
2329 if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt))
2330 bitmap_set_bit (to_purge, bb->index);
2331 if (!was_noreturn
2332 && is_gimple_call (stmt) && gimple_call_noreturn_p (stmt))
2333 to_fixup.safe_push (stmt);
2334 /* Cleanup the CFG if we simplified a condition to
2335 true or false. */
2336 if (gcond *cond = dyn_cast <gcond *> (stmt))
2337 if (gimple_cond_true_p (cond)
2338 || gimple_cond_false_p (cond))
2339 cfg_changed = true;
2340 update_stmt (stmt);
2341 }
2342
2343 switch (gimple_code (stmt))
2344 {
2345 case GIMPLE_ASSIGN:
2346 {
2347 tree rhs1 = gimple_assign_rhs1 (stmt);
2348 enum tree_code code = gimple_assign_rhs_code (stmt);
2349
2350 if (code == COND_EXPR
2351 || code == VEC_COND_EXPR)
2352 {
2353 /* In this case the entire COND_EXPR is in rhs1. */
2354 if (forward_propagate_into_cond (&gsi))
2355 {
2356 changed = true;
2357 stmt = gsi_stmt (gsi);
2358 }
2359 }
2360 else if (TREE_CODE_CLASS (code) == tcc_comparison)
2361 {
2362 int did_something;
2363 did_something = forward_propagate_into_comparison (&gsi);
2364 if (did_something == 2)
2365 cfg_changed = true;
2366 changed = did_something != 0;
2367 }
2368 else if ((code == PLUS_EXPR
2369 || code == BIT_IOR_EXPR
2370 || code == BIT_XOR_EXPR)
2371 && simplify_rotate (&gsi))
2372 changed = true;
2373 else if (code == VEC_PERM_EXPR)
2374 {
2375 int did_something = simplify_permutation (&gsi);
2376 if (did_something == 2)
2377 cfg_changed = true;
2378 changed = did_something != 0;
2379 }
2380 else if (code == BIT_FIELD_REF)
2381 changed = simplify_bitfield_ref (&gsi);
2382 else if (code == CONSTRUCTOR
2383 && TREE_CODE (TREE_TYPE (rhs1)) == VECTOR_TYPE)
2384 changed = simplify_vector_constructor (&gsi);
2385 break;
2386 }
2387
2388 case GIMPLE_SWITCH:
2389 changed = simplify_gimple_switch (as_a <gswitch *> (stmt));
2390 break;
2391
2392 case GIMPLE_COND:
2393 {
2394 int did_something
2395 = forward_propagate_into_gimple_cond (as_a <gcond *> (stmt));
2396 if (did_something == 2)
2397 cfg_changed = true;
2398 changed = did_something != 0;
2399 break;
2400 }
2401
2402 case GIMPLE_CALL:
2403 {
2404 tree callee = gimple_call_fndecl (stmt);
2405 if (callee != NULL_TREE
2406 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
2407 changed = simplify_builtin_call (&gsi, callee);
2408 break;
2409 }
2410
2411 default:;
2412 }
2413
2414 if (changed)
2415 {
2416 /* If the stmt changed then re-visit it and the statements
2417 inserted before it. */
2418 for (; !gsi_end_p (gsi); gsi_prev (&gsi))
2419 if (gimple_plf (gsi_stmt (gsi), GF_PLF_1))
2420 break;
2421 if (gsi_end_p (gsi))
2422 gsi = gsi_start_bb (bb);
2423 else
2424 gsi_next (&gsi);
2425 }
2426 else
2427 {
2428 /* Stmt no longer needs to be revisited. */
2429 gimple_set_plf (stmt, GF_PLF_1, true);
2430
2431 /* Fill up the lattice. */
2432 if (gimple_assign_single_p (stmt))
2433 {
2434 tree lhs = gimple_assign_lhs (stmt);
2435 tree rhs = gimple_assign_rhs1 (stmt);
2436 if (TREE_CODE (lhs) == SSA_NAME)
2437 {
2438 tree val = lhs;
2439 if (TREE_CODE (rhs) == SSA_NAME)
2440 val = fwprop_ssa_val (rhs);
2441 else if (is_gimple_min_invariant (rhs))
2442 val = rhs;
2443 fwprop_set_lattice_val (lhs, val);
2444 }
2445 }
2446
2447 gsi_next (&gsi);
2448 }
2449 }
2450 }
2451 free (postorder);
2452 lattice.release ();
2453
2454 /* Fixup stmts that became noreturn calls. This may require splitting
2455 blocks and thus isn't possible during the walk. Do this
2456 in reverse order so we don't inadvertedly remove a stmt we want to
2457 fixup by visiting a dominating now noreturn call first. */
2458 while (!to_fixup.is_empty ())
2459 {
2460 gimple *stmt = to_fixup.pop ();
2461 if (dump_file && dump_flags & TDF_DETAILS)
2462 {
2463 fprintf (dump_file, "Fixing up noreturn call ");
2464 print_gimple_stmt (dump_file, stmt, 0, 0);
2465 fprintf (dump_file, "\n");
2466 }
2467 cfg_changed |= fixup_noreturn_call (stmt);
2468 }
2469
2470 cfg_changed |= gimple_purge_all_dead_eh_edges (to_purge);
2471 BITMAP_FREE (to_purge);
2472
2473 if (cfg_changed)
2474 todoflags |= TODO_cleanup_cfg;
2475
2476 return todoflags;
2477 }
2478
2479 } // anon namespace
2480
2481 gimple_opt_pass *
2482 make_pass_forwprop (gcc::context *ctxt)
2483 {
2484 return new pass_forwprop (ctxt);
2485 }