]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/tree-ssa-forwprop.c
Add missing gimple_call_set_fntype
[thirdparty/gcc.git] / gcc / tree-ssa-forwprop.c
1 /* Forward propagation of expressions for single use variables.
2 Copyright (C) 2004-2019 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
10
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "rtl.h"
25 #include "tree.h"
26 #include "gimple.h"
27 #include "cfghooks.h"
28 #include "tree-pass.h"
29 #include "ssa.h"
30 #include "expmed.h"
31 #include "optabs-query.h"
32 #include "gimple-pretty-print.h"
33 #include "fold-const.h"
34 #include "stor-layout.h"
35 #include "gimple-fold.h"
36 #include "tree-eh.h"
37 #include "gimplify.h"
38 #include "gimple-iterator.h"
39 #include "gimplify-me.h"
40 #include "tree-cfg.h"
41 #include "expr.h"
42 #include "tree-dfa.h"
43 #include "tree-ssa-propagate.h"
44 #include "tree-ssa-dom.h"
45 #include "builtins.h"
46 #include "tree-cfgcleanup.h"
47 #include "cfganal.h"
48 #include "optabs-tree.h"
49 #include "tree-vector-builder.h"
50 #include "vec-perm-indices.h"
51
52 /* This pass propagates the RHS of assignment statements into use
53 sites of the LHS of the assignment. It's basically a specialized
54 form of tree combination. It is hoped all of this can disappear
55 when we have a generalized tree combiner.
56
57 One class of common cases we handle is forward propagating a single use
58 variable into a COND_EXPR.
59
60 bb0:
61 x = a COND b;
62 if (x) goto ... else goto ...
63
64 Will be transformed into:
65
66 bb0:
67 if (a COND b) goto ... else goto ...
68
69 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
70
71 Or (assuming c1 and c2 are constants):
72
73 bb0:
74 x = a + c1;
75 if (x EQ/NEQ c2) goto ... else goto ...
76
77 Will be transformed into:
78
79 bb0:
80 if (a EQ/NEQ (c2 - c1)) goto ... else goto ...
81
82 Similarly for x = a - c1.
83
84 Or
85
86 bb0:
87 x = !a
88 if (x) goto ... else goto ...
89
90 Will be transformed into:
91
92 bb0:
93 if (a == 0) goto ... else goto ...
94
95 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
96 For these cases, we propagate A into all, possibly more than one,
97 COND_EXPRs that use X.
98
99 Or
100
101 bb0:
102 x = (typecast) a
103 if (x) goto ... else goto ...
104
105 Will be transformed into:
106
107 bb0:
108 if (a != 0) goto ... else goto ...
109
110 (Assuming a is an integral type and x is a boolean or x is an
111 integral and a is a boolean.)
112
113 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
114 For these cases, we propagate A into all, possibly more than one,
115 COND_EXPRs that use X.
116
117 In addition to eliminating the variable and the statement which assigns
118 a value to the variable, we may be able to later thread the jump without
119 adding insane complexity in the dominator optimizer.
120
121 Also note these transformations can cascade. We handle this by having
122 a worklist of COND_EXPR statements to examine. As we make a change to
123 a statement, we put it back on the worklist to examine on the next
124 iteration of the main loop.
125
126 A second class of propagation opportunities arises for ADDR_EXPR
127 nodes.
128
129 ptr = &x->y->z;
130 res = *ptr;
131
132 Will get turned into
133
134 res = x->y->z;
135
136 Or
137 ptr = (type1*)&type2var;
138 res = *ptr
139
140 Will get turned into (if type1 and type2 are the same size
141 and neither have volatile on them):
142 res = VIEW_CONVERT_EXPR<type1>(type2var)
143
144 Or
145
146 ptr = &x[0];
147 ptr2 = ptr + <constant>;
148
149 Will get turned into
150
151 ptr2 = &x[constant/elementsize];
152
153 Or
154
155 ptr = &x[0];
156 offset = index * element_size;
157 offset_p = (pointer) offset;
158 ptr2 = ptr + offset_p
159
160 Will get turned into:
161
162 ptr2 = &x[index];
163
164 Or
165 ssa = (int) decl
166 res = ssa & 1
167
168 Provided that decl has known alignment >= 2, will get turned into
169
170 res = 0
171
172 We also propagate casts into SWITCH_EXPR and COND_EXPR conditions to
173 allow us to remove the cast and {NOT_EXPR,NEG_EXPR} into a subsequent
174 {NOT_EXPR,NEG_EXPR}.
175
176 This will (of course) be extended as other needs arise. */
177
178 static bool forward_propagate_addr_expr (tree, tree, bool);
179
180 /* Set to true if we delete dead edges during the optimization. */
181 static bool cfg_changed;
182
183 static tree rhs_to_tree (tree type, gimple *stmt);
184
185 static bitmap to_purge;
186
187 /* Const-and-copy lattice. */
188 static vec<tree> lattice;
189
190 /* Set the lattice entry for NAME to VAL. */
191 static void
192 fwprop_set_lattice_val (tree name, tree val)
193 {
194 if (TREE_CODE (name) == SSA_NAME)
195 {
196 if (SSA_NAME_VERSION (name) >= lattice.length ())
197 {
198 lattice.reserve (num_ssa_names - lattice.length ());
199 lattice.quick_grow_cleared (num_ssa_names);
200 }
201 lattice[SSA_NAME_VERSION (name)] = val;
202 }
203 }
204
205 /* Invalidate the lattice entry for NAME, done when releasing SSA names. */
206 static void
207 fwprop_invalidate_lattice (tree name)
208 {
209 if (name
210 && TREE_CODE (name) == SSA_NAME
211 && SSA_NAME_VERSION (name) < lattice.length ())
212 lattice[SSA_NAME_VERSION (name)] = NULL_TREE;
213 }
214
215
216 /* Get the statement we can propagate from into NAME skipping
217 trivial copies. Returns the statement which defines the
218 propagation source or NULL_TREE if there is no such one.
219 If SINGLE_USE_ONLY is set considers only sources which have
220 a single use chain up to NAME. If SINGLE_USE_P is non-null,
221 it is set to whether the chain to NAME is a single use chain
222 or not. SINGLE_USE_P is not written to if SINGLE_USE_ONLY is set. */
223
224 static gimple *
225 get_prop_source_stmt (tree name, bool single_use_only, bool *single_use_p)
226 {
227 bool single_use = true;
228
229 do {
230 gimple *def_stmt = SSA_NAME_DEF_STMT (name);
231
232 if (!has_single_use (name))
233 {
234 single_use = false;
235 if (single_use_only)
236 return NULL;
237 }
238
239 /* If name is defined by a PHI node or is the default def, bail out. */
240 if (!is_gimple_assign (def_stmt))
241 return NULL;
242
243 /* If def_stmt is a simple copy, continue looking. */
244 if (gimple_assign_rhs_code (def_stmt) == SSA_NAME)
245 name = gimple_assign_rhs1 (def_stmt);
246 else
247 {
248 if (!single_use_only && single_use_p)
249 *single_use_p = single_use;
250
251 return def_stmt;
252 }
253 } while (1);
254 }
255
256 /* Checks if the destination ssa name in DEF_STMT can be used as
257 propagation source. Returns true if so, otherwise false. */
258
259 static bool
260 can_propagate_from (gimple *def_stmt)
261 {
262 gcc_assert (is_gimple_assign (def_stmt));
263
264 /* If the rhs has side-effects we cannot propagate from it. */
265 if (gimple_has_volatile_ops (def_stmt))
266 return false;
267
268 /* If the rhs is a load we cannot propagate from it. */
269 if (TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)) == tcc_reference
270 || TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)) == tcc_declaration)
271 return false;
272
273 /* Constants can be always propagated. */
274 if (gimple_assign_single_p (def_stmt)
275 && is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt)))
276 return true;
277
278 /* We cannot propagate ssa names that occur in abnormal phi nodes. */
279 if (stmt_references_abnormal_ssa_name (def_stmt))
280 return false;
281
282 /* If the definition is a conversion of a pointer to a function type,
283 then we cannot apply optimizations as some targets require
284 function pointers to be canonicalized and in this case this
285 optimization could eliminate a necessary canonicalization. */
286 if (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt)))
287 {
288 tree rhs = gimple_assign_rhs1 (def_stmt);
289 if (POINTER_TYPE_P (TREE_TYPE (rhs))
290 && TREE_CODE (TREE_TYPE (TREE_TYPE (rhs))) == FUNCTION_TYPE)
291 return false;
292 }
293
294 return true;
295 }
296
297 /* Remove a chain of dead statements starting at the definition of
298 NAME. The chain is linked via the first operand of the defining statements.
299 If NAME was replaced in its only use then this function can be used
300 to clean up dead stmts. The function handles already released SSA
301 names gracefully.
302 Returns true if cleanup-cfg has to run. */
303
304 static bool
305 remove_prop_source_from_use (tree name)
306 {
307 gimple_stmt_iterator gsi;
308 gimple *stmt;
309 bool cfg_changed = false;
310
311 do {
312 basic_block bb;
313
314 if (SSA_NAME_IN_FREE_LIST (name)
315 || SSA_NAME_IS_DEFAULT_DEF (name)
316 || !has_zero_uses (name))
317 return cfg_changed;
318
319 stmt = SSA_NAME_DEF_STMT (name);
320 if (gimple_code (stmt) == GIMPLE_PHI
321 || gimple_has_side_effects (stmt))
322 return cfg_changed;
323
324 bb = gimple_bb (stmt);
325 gsi = gsi_for_stmt (stmt);
326 unlink_stmt_vdef (stmt);
327 if (gsi_remove (&gsi, true))
328 bitmap_set_bit (to_purge, bb->index);
329 fwprop_invalidate_lattice (gimple_get_lhs (stmt));
330 release_defs (stmt);
331
332 name = is_gimple_assign (stmt) ? gimple_assign_rhs1 (stmt) : NULL_TREE;
333 } while (name && TREE_CODE (name) == SSA_NAME);
334
335 return cfg_changed;
336 }
337
338 /* Return the rhs of a gassign *STMT in a form of a single tree,
339 converted to type TYPE.
340
341 This should disappear, but is needed so we can combine expressions and use
342 the fold() interfaces. Long term, we need to develop folding and combine
343 routines that deal with gimple exclusively . */
344
345 static tree
346 rhs_to_tree (tree type, gimple *stmt)
347 {
348 location_t loc = gimple_location (stmt);
349 enum tree_code code = gimple_assign_rhs_code (stmt);
350 switch (get_gimple_rhs_class (code))
351 {
352 case GIMPLE_TERNARY_RHS:
353 return fold_build3_loc (loc, code, type, gimple_assign_rhs1 (stmt),
354 gimple_assign_rhs2 (stmt),
355 gimple_assign_rhs3 (stmt));
356 case GIMPLE_BINARY_RHS:
357 return fold_build2_loc (loc, code, type, gimple_assign_rhs1 (stmt),
358 gimple_assign_rhs2 (stmt));
359 case GIMPLE_UNARY_RHS:
360 return build1 (code, type, gimple_assign_rhs1 (stmt));
361 case GIMPLE_SINGLE_RHS:
362 return gimple_assign_rhs1 (stmt);
363 default:
364 gcc_unreachable ();
365 }
366 }
367
368 /* Combine OP0 CODE OP1 in the context of a COND_EXPR. Returns
369 the folded result in a form suitable for COND_EXPR_COND or
370 NULL_TREE, if there is no suitable simplified form. If
371 INVARIANT_ONLY is true only gimple_min_invariant results are
372 considered simplified. */
373
374 static tree
375 combine_cond_expr_cond (gimple *stmt, enum tree_code code, tree type,
376 tree op0, tree op1, bool invariant_only)
377 {
378 tree t;
379
380 gcc_assert (TREE_CODE_CLASS (code) == tcc_comparison);
381
382 fold_defer_overflow_warnings ();
383 t = fold_binary_loc (gimple_location (stmt), code, type, op0, op1);
384 if (!t)
385 {
386 fold_undefer_overflow_warnings (false, NULL, 0);
387 return NULL_TREE;
388 }
389
390 /* Require that we got a boolean type out if we put one in. */
391 gcc_assert (TREE_CODE (TREE_TYPE (t)) == TREE_CODE (type));
392
393 /* Canonicalize the combined condition for use in a COND_EXPR. */
394 t = canonicalize_cond_expr_cond (t);
395
396 /* Bail out if we required an invariant but didn't get one. */
397 if (!t || (invariant_only && !is_gimple_min_invariant (t)))
398 {
399 fold_undefer_overflow_warnings (false, NULL, 0);
400 return NULL_TREE;
401 }
402
403 fold_undefer_overflow_warnings (!gimple_no_warning_p (stmt), stmt, 0);
404
405 return t;
406 }
407
408 /* Combine the comparison OP0 CODE OP1 at LOC with the defining statements
409 of its operand. Return a new comparison tree or NULL_TREE if there
410 were no simplifying combines. */
411
412 static tree
413 forward_propagate_into_comparison_1 (gimple *stmt,
414 enum tree_code code, tree type,
415 tree op0, tree op1)
416 {
417 tree tmp = NULL_TREE;
418 tree rhs0 = NULL_TREE, rhs1 = NULL_TREE;
419 bool single_use0_p = false, single_use1_p = false;
420
421 /* For comparisons use the first operand, that is likely to
422 simplify comparisons against constants. */
423 if (TREE_CODE (op0) == SSA_NAME)
424 {
425 gimple *def_stmt = get_prop_source_stmt (op0, false, &single_use0_p);
426 if (def_stmt && can_propagate_from (def_stmt))
427 {
428 enum tree_code def_code = gimple_assign_rhs_code (def_stmt);
429 bool invariant_only_p = !single_use0_p;
430
431 rhs0 = rhs_to_tree (TREE_TYPE (op1), def_stmt);
432
433 /* Always combine comparisons or conversions from booleans. */
434 if (TREE_CODE (op1) == INTEGER_CST
435 && ((CONVERT_EXPR_CODE_P (def_code)
436 && TREE_CODE (TREE_TYPE (TREE_OPERAND (rhs0, 0)))
437 == BOOLEAN_TYPE)
438 || TREE_CODE_CLASS (def_code) == tcc_comparison))
439 invariant_only_p = false;
440
441 tmp = combine_cond_expr_cond (stmt, code, type,
442 rhs0, op1, invariant_only_p);
443 if (tmp)
444 return tmp;
445 }
446 }
447
448 /* If that wasn't successful, try the second operand. */
449 if (TREE_CODE (op1) == SSA_NAME)
450 {
451 gimple *def_stmt = get_prop_source_stmt (op1, false, &single_use1_p);
452 if (def_stmt && can_propagate_from (def_stmt))
453 {
454 rhs1 = rhs_to_tree (TREE_TYPE (op0), def_stmt);
455 tmp = combine_cond_expr_cond (stmt, code, type,
456 op0, rhs1, !single_use1_p);
457 if (tmp)
458 return tmp;
459 }
460 }
461
462 /* If that wasn't successful either, try both operands. */
463 if (rhs0 != NULL_TREE
464 && rhs1 != NULL_TREE)
465 tmp = combine_cond_expr_cond (stmt, code, type,
466 rhs0, rhs1,
467 !(single_use0_p && single_use1_p));
468
469 return tmp;
470 }
471
472 /* Propagate from the ssa name definition statements of the assignment
473 from a comparison at *GSI into the conditional if that simplifies it.
474 Returns 1 if the stmt was modified and 2 if the CFG needs cleanup,
475 otherwise returns 0. */
476
477 static int
478 forward_propagate_into_comparison (gimple_stmt_iterator *gsi)
479 {
480 gimple *stmt = gsi_stmt (*gsi);
481 tree tmp;
482 bool cfg_changed = false;
483 tree type = TREE_TYPE (gimple_assign_lhs (stmt));
484 tree rhs1 = gimple_assign_rhs1 (stmt);
485 tree rhs2 = gimple_assign_rhs2 (stmt);
486
487 /* Combine the comparison with defining statements. */
488 tmp = forward_propagate_into_comparison_1 (stmt,
489 gimple_assign_rhs_code (stmt),
490 type, rhs1, rhs2);
491 if (tmp && useless_type_conversion_p (type, TREE_TYPE (tmp)))
492 {
493 gimple_assign_set_rhs_from_tree (gsi, tmp);
494 fold_stmt (gsi);
495 update_stmt (gsi_stmt (*gsi));
496
497 if (TREE_CODE (rhs1) == SSA_NAME)
498 cfg_changed |= remove_prop_source_from_use (rhs1);
499 if (TREE_CODE (rhs2) == SSA_NAME)
500 cfg_changed |= remove_prop_source_from_use (rhs2);
501 return cfg_changed ? 2 : 1;
502 }
503
504 return 0;
505 }
506
507 /* Propagate from the ssa name definition statements of COND_EXPR
508 in GIMPLE_COND statement STMT into the conditional if that simplifies it.
509 Returns zero if no statement was changed, one if there were
510 changes and two if cfg_cleanup needs to run.
511
512 This must be kept in sync with forward_propagate_into_cond. */
513
514 static int
515 forward_propagate_into_gimple_cond (gcond *stmt)
516 {
517 tree tmp;
518 enum tree_code code = gimple_cond_code (stmt);
519 bool cfg_changed = false;
520 tree rhs1 = gimple_cond_lhs (stmt);
521 tree rhs2 = gimple_cond_rhs (stmt);
522
523 /* We can do tree combining on SSA_NAME and comparison expressions. */
524 if (TREE_CODE_CLASS (gimple_cond_code (stmt)) != tcc_comparison)
525 return 0;
526
527 tmp = forward_propagate_into_comparison_1 (stmt, code,
528 boolean_type_node,
529 rhs1, rhs2);
530 if (tmp)
531 {
532 if (dump_file && tmp)
533 {
534 fprintf (dump_file, " Replaced '");
535 print_gimple_expr (dump_file, stmt, 0);
536 fprintf (dump_file, "' with '");
537 print_generic_expr (dump_file, tmp);
538 fprintf (dump_file, "'\n");
539 }
540
541 gimple_cond_set_condition_from_tree (stmt, unshare_expr (tmp));
542 update_stmt (stmt);
543
544 if (TREE_CODE (rhs1) == SSA_NAME)
545 cfg_changed |= remove_prop_source_from_use (rhs1);
546 if (TREE_CODE (rhs2) == SSA_NAME)
547 cfg_changed |= remove_prop_source_from_use (rhs2);
548 return (cfg_changed || is_gimple_min_invariant (tmp)) ? 2 : 1;
549 }
550
551 /* Canonicalize _Bool == 0 and _Bool != 1 to _Bool != 0 by swapping edges. */
552 if ((TREE_CODE (TREE_TYPE (rhs1)) == BOOLEAN_TYPE
553 || (INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
554 && TYPE_PRECISION (TREE_TYPE (rhs1)) == 1))
555 && ((code == EQ_EXPR
556 && integer_zerop (rhs2))
557 || (code == NE_EXPR
558 && integer_onep (rhs2))))
559 {
560 basic_block bb = gimple_bb (stmt);
561 gimple_cond_set_code (stmt, NE_EXPR);
562 gimple_cond_set_rhs (stmt, build_zero_cst (TREE_TYPE (rhs1)));
563 EDGE_SUCC (bb, 0)->flags ^= (EDGE_TRUE_VALUE|EDGE_FALSE_VALUE);
564 EDGE_SUCC (bb, 1)->flags ^= (EDGE_TRUE_VALUE|EDGE_FALSE_VALUE);
565 return 1;
566 }
567
568 return 0;
569 }
570
571
572 /* Propagate from the ssa name definition statements of COND_EXPR
573 in the rhs of statement STMT into the conditional if that simplifies it.
574 Returns true zero if the stmt was changed. */
575
576 static bool
577 forward_propagate_into_cond (gimple_stmt_iterator *gsi_p)
578 {
579 gimple *stmt = gsi_stmt (*gsi_p);
580 tree tmp = NULL_TREE;
581 tree cond = gimple_assign_rhs1 (stmt);
582 enum tree_code code = gimple_assign_rhs_code (stmt);
583
584 /* We can do tree combining on SSA_NAME and comparison expressions. */
585 if (COMPARISON_CLASS_P (cond))
586 tmp = forward_propagate_into_comparison_1 (stmt, TREE_CODE (cond),
587 TREE_TYPE (cond),
588 TREE_OPERAND (cond, 0),
589 TREE_OPERAND (cond, 1));
590 else if (TREE_CODE (cond) == SSA_NAME)
591 {
592 enum tree_code def_code;
593 tree name = cond;
594 gimple *def_stmt = get_prop_source_stmt (name, true, NULL);
595 if (!def_stmt || !can_propagate_from (def_stmt))
596 return 0;
597
598 def_code = gimple_assign_rhs_code (def_stmt);
599 if (TREE_CODE_CLASS (def_code) == tcc_comparison)
600 tmp = fold_build2_loc (gimple_location (def_stmt),
601 def_code,
602 TREE_TYPE (cond),
603 gimple_assign_rhs1 (def_stmt),
604 gimple_assign_rhs2 (def_stmt));
605 }
606
607 if (tmp
608 && is_gimple_condexpr (tmp))
609 {
610 if (dump_file && tmp)
611 {
612 fprintf (dump_file, " Replaced '");
613 print_generic_expr (dump_file, cond);
614 fprintf (dump_file, "' with '");
615 print_generic_expr (dump_file, tmp);
616 fprintf (dump_file, "'\n");
617 }
618
619 if ((code == VEC_COND_EXPR) ? integer_all_onesp (tmp)
620 : integer_onep (tmp))
621 gimple_assign_set_rhs_from_tree (gsi_p, gimple_assign_rhs2 (stmt));
622 else if (integer_zerop (tmp))
623 gimple_assign_set_rhs_from_tree (gsi_p, gimple_assign_rhs3 (stmt));
624 else
625 gimple_assign_set_rhs1 (stmt, unshare_expr (tmp));
626 stmt = gsi_stmt (*gsi_p);
627 update_stmt (stmt);
628
629 return true;
630 }
631
632 return 0;
633 }
634
635 /* We've just substituted an ADDR_EXPR into stmt. Update all the
636 relevant data structures to match. */
637
638 static void
639 tidy_after_forward_propagate_addr (gimple *stmt)
640 {
641 /* We may have turned a trapping insn into a non-trapping insn. */
642 if (maybe_clean_or_replace_eh_stmt (stmt, stmt))
643 bitmap_set_bit (to_purge, gimple_bb (stmt)->index);
644
645 if (TREE_CODE (gimple_assign_rhs1 (stmt)) == ADDR_EXPR)
646 recompute_tree_invariant_for_addr_expr (gimple_assign_rhs1 (stmt));
647 }
648
649 /* NAME is a SSA_NAME representing DEF_RHS which is of the form
650 ADDR_EXPR <whatever>.
651
652 Try to forward propagate the ADDR_EXPR into the use USE_STMT.
653 Often this will allow for removal of an ADDR_EXPR and INDIRECT_REF
654 node or for recovery of array indexing from pointer arithmetic.
655
656 Return true if the propagation was successful (the propagation can
657 be not totally successful, yet things may have been changed). */
658
659 static bool
660 forward_propagate_addr_expr_1 (tree name, tree def_rhs,
661 gimple_stmt_iterator *use_stmt_gsi,
662 bool single_use_p)
663 {
664 tree lhs, rhs, rhs2, array_ref;
665 gimple *use_stmt = gsi_stmt (*use_stmt_gsi);
666 enum tree_code rhs_code;
667 bool res = true;
668
669 gcc_assert (TREE_CODE (def_rhs) == ADDR_EXPR);
670
671 lhs = gimple_assign_lhs (use_stmt);
672 rhs_code = gimple_assign_rhs_code (use_stmt);
673 rhs = gimple_assign_rhs1 (use_stmt);
674
675 /* Do not perform copy-propagation but recurse through copy chains. */
676 if (TREE_CODE (lhs) == SSA_NAME
677 && rhs_code == SSA_NAME)
678 return forward_propagate_addr_expr (lhs, def_rhs, single_use_p);
679
680 /* The use statement could be a conversion. Recurse to the uses of the
681 lhs as copyprop does not copy through pointer to integer to pointer
682 conversions and FRE does not catch all cases either.
683 Treat the case of a single-use name and
684 a conversion to def_rhs type separate, though. */
685 if (TREE_CODE (lhs) == SSA_NAME
686 && CONVERT_EXPR_CODE_P (rhs_code))
687 {
688 /* If there is a point in a conversion chain where the types match
689 so we can remove a conversion re-materialize the address here
690 and stop. */
691 if (single_use_p
692 && useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (def_rhs)))
693 {
694 gimple_assign_set_rhs1 (use_stmt, unshare_expr (def_rhs));
695 gimple_assign_set_rhs_code (use_stmt, TREE_CODE (def_rhs));
696 return true;
697 }
698
699 /* Else recurse if the conversion preserves the address value. */
700 if ((INTEGRAL_TYPE_P (TREE_TYPE (lhs))
701 || POINTER_TYPE_P (TREE_TYPE (lhs)))
702 && (TYPE_PRECISION (TREE_TYPE (lhs))
703 >= TYPE_PRECISION (TREE_TYPE (def_rhs))))
704 return forward_propagate_addr_expr (lhs, def_rhs, single_use_p);
705
706 return false;
707 }
708
709 /* If this isn't a conversion chain from this on we only can propagate
710 into compatible pointer contexts. */
711 if (!types_compatible_p (TREE_TYPE (name), TREE_TYPE (def_rhs)))
712 return false;
713
714 /* Propagate through constant pointer adjustments. */
715 if (TREE_CODE (lhs) == SSA_NAME
716 && rhs_code == POINTER_PLUS_EXPR
717 && rhs == name
718 && TREE_CODE (gimple_assign_rhs2 (use_stmt)) == INTEGER_CST)
719 {
720 tree new_def_rhs;
721 /* As we come here with non-invariant addresses in def_rhs we need
722 to make sure we can build a valid constant offsetted address
723 for further propagation. Simply rely on fold building that
724 and check after the fact. */
725 new_def_rhs = fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (rhs)),
726 def_rhs,
727 fold_convert (ptr_type_node,
728 gimple_assign_rhs2 (use_stmt)));
729 if (TREE_CODE (new_def_rhs) == MEM_REF
730 && !is_gimple_mem_ref_addr (TREE_OPERAND (new_def_rhs, 0)))
731 return false;
732 new_def_rhs = build_fold_addr_expr_with_type (new_def_rhs,
733 TREE_TYPE (rhs));
734
735 /* Recurse. If we could propagate into all uses of lhs do not
736 bother to replace into the current use but just pretend we did. */
737 if (TREE_CODE (new_def_rhs) == ADDR_EXPR
738 && forward_propagate_addr_expr (lhs, new_def_rhs, single_use_p))
739 return true;
740
741 if (useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (new_def_rhs)))
742 gimple_assign_set_rhs_with_ops (use_stmt_gsi, TREE_CODE (new_def_rhs),
743 new_def_rhs);
744 else if (is_gimple_min_invariant (new_def_rhs))
745 gimple_assign_set_rhs_with_ops (use_stmt_gsi, NOP_EXPR, new_def_rhs);
746 else
747 return false;
748 gcc_assert (gsi_stmt (*use_stmt_gsi) == use_stmt);
749 update_stmt (use_stmt);
750 return true;
751 }
752
753 /* Now strip away any outer COMPONENT_REF/ARRAY_REF nodes from the LHS.
754 ADDR_EXPR will not appear on the LHS. */
755 tree *lhsp = gimple_assign_lhs_ptr (use_stmt);
756 while (handled_component_p (*lhsp))
757 lhsp = &TREE_OPERAND (*lhsp, 0);
758 lhs = *lhsp;
759
760 /* Now see if the LHS node is a MEM_REF using NAME. If so,
761 propagate the ADDR_EXPR into the use of NAME and fold the result. */
762 if (TREE_CODE (lhs) == MEM_REF
763 && TREE_OPERAND (lhs, 0) == name)
764 {
765 tree def_rhs_base;
766 poly_int64 def_rhs_offset;
767 /* If the address is invariant we can always fold it. */
768 if ((def_rhs_base = get_addr_base_and_unit_offset (TREE_OPERAND (def_rhs, 0),
769 &def_rhs_offset)))
770 {
771 poly_offset_int off = mem_ref_offset (lhs);
772 tree new_ptr;
773 off += def_rhs_offset;
774 if (TREE_CODE (def_rhs_base) == MEM_REF)
775 {
776 off += mem_ref_offset (def_rhs_base);
777 new_ptr = TREE_OPERAND (def_rhs_base, 0);
778 }
779 else
780 new_ptr = build_fold_addr_expr (def_rhs_base);
781 TREE_OPERAND (lhs, 0) = new_ptr;
782 TREE_OPERAND (lhs, 1)
783 = wide_int_to_tree (TREE_TYPE (TREE_OPERAND (lhs, 1)), off);
784 tidy_after_forward_propagate_addr (use_stmt);
785 /* Continue propagating into the RHS if this was not the only use. */
786 if (single_use_p)
787 return true;
788 }
789 /* If the LHS is a plain dereference and the value type is the same as
790 that of the pointed-to type of the address we can put the
791 dereferenced address on the LHS preserving the original alias-type. */
792 else if (integer_zerop (TREE_OPERAND (lhs, 1))
793 && ((gimple_assign_lhs (use_stmt) == lhs
794 && useless_type_conversion_p
795 (TREE_TYPE (TREE_OPERAND (def_rhs, 0)),
796 TREE_TYPE (gimple_assign_rhs1 (use_stmt))))
797 || types_compatible_p (TREE_TYPE (lhs),
798 TREE_TYPE (TREE_OPERAND (def_rhs, 0))))
799 /* Don't forward anything into clobber stmts if it would result
800 in the lhs no longer being a MEM_REF. */
801 && (!gimple_clobber_p (use_stmt)
802 || TREE_CODE (TREE_OPERAND (def_rhs, 0)) == MEM_REF))
803 {
804 tree *def_rhs_basep = &TREE_OPERAND (def_rhs, 0);
805 tree new_offset, new_base, saved, new_lhs;
806 while (handled_component_p (*def_rhs_basep))
807 def_rhs_basep = &TREE_OPERAND (*def_rhs_basep, 0);
808 saved = *def_rhs_basep;
809 if (TREE_CODE (*def_rhs_basep) == MEM_REF)
810 {
811 new_base = TREE_OPERAND (*def_rhs_basep, 0);
812 new_offset = fold_convert (TREE_TYPE (TREE_OPERAND (lhs, 1)),
813 TREE_OPERAND (*def_rhs_basep, 1));
814 }
815 else
816 {
817 new_base = build_fold_addr_expr (*def_rhs_basep);
818 new_offset = TREE_OPERAND (lhs, 1);
819 }
820 *def_rhs_basep = build2 (MEM_REF, TREE_TYPE (*def_rhs_basep),
821 new_base, new_offset);
822 TREE_THIS_VOLATILE (*def_rhs_basep) = TREE_THIS_VOLATILE (lhs);
823 TREE_SIDE_EFFECTS (*def_rhs_basep) = TREE_SIDE_EFFECTS (lhs);
824 TREE_THIS_NOTRAP (*def_rhs_basep) = TREE_THIS_NOTRAP (lhs);
825 new_lhs = unshare_expr (TREE_OPERAND (def_rhs, 0));
826 *lhsp = new_lhs;
827 TREE_THIS_VOLATILE (new_lhs) = TREE_THIS_VOLATILE (lhs);
828 TREE_SIDE_EFFECTS (new_lhs) = TREE_SIDE_EFFECTS (lhs);
829 *def_rhs_basep = saved;
830 tidy_after_forward_propagate_addr (use_stmt);
831 /* Continue propagating into the RHS if this was not the
832 only use. */
833 if (single_use_p)
834 return true;
835 }
836 else
837 /* We can have a struct assignment dereferencing our name twice.
838 Note that we didn't propagate into the lhs to not falsely
839 claim we did when propagating into the rhs. */
840 res = false;
841 }
842
843 /* Strip away any outer COMPONENT_REF, ARRAY_REF or ADDR_EXPR
844 nodes from the RHS. */
845 tree *rhsp = gimple_assign_rhs1_ptr (use_stmt);
846 if (TREE_CODE (*rhsp) == ADDR_EXPR)
847 rhsp = &TREE_OPERAND (*rhsp, 0);
848 while (handled_component_p (*rhsp))
849 rhsp = &TREE_OPERAND (*rhsp, 0);
850 rhs = *rhsp;
851
852 /* Now see if the RHS node is a MEM_REF using NAME. If so,
853 propagate the ADDR_EXPR into the use of NAME and fold the result. */
854 if (TREE_CODE (rhs) == MEM_REF
855 && TREE_OPERAND (rhs, 0) == name)
856 {
857 tree def_rhs_base;
858 poly_int64 def_rhs_offset;
859 if ((def_rhs_base = get_addr_base_and_unit_offset (TREE_OPERAND (def_rhs, 0),
860 &def_rhs_offset)))
861 {
862 poly_offset_int off = mem_ref_offset (rhs);
863 tree new_ptr;
864 off += def_rhs_offset;
865 if (TREE_CODE (def_rhs_base) == MEM_REF)
866 {
867 off += mem_ref_offset (def_rhs_base);
868 new_ptr = TREE_OPERAND (def_rhs_base, 0);
869 }
870 else
871 new_ptr = build_fold_addr_expr (def_rhs_base);
872 TREE_OPERAND (rhs, 0) = new_ptr;
873 TREE_OPERAND (rhs, 1)
874 = wide_int_to_tree (TREE_TYPE (TREE_OPERAND (rhs, 1)), off);
875 fold_stmt_inplace (use_stmt_gsi);
876 tidy_after_forward_propagate_addr (use_stmt);
877 return res;
878 }
879 /* If the RHS is a plain dereference and the value type is the same as
880 that of the pointed-to type of the address we can put the
881 dereferenced address on the RHS preserving the original alias-type. */
882 else if (integer_zerop (TREE_OPERAND (rhs, 1))
883 && ((gimple_assign_rhs1 (use_stmt) == rhs
884 && useless_type_conversion_p
885 (TREE_TYPE (gimple_assign_lhs (use_stmt)),
886 TREE_TYPE (TREE_OPERAND (def_rhs, 0))))
887 || types_compatible_p (TREE_TYPE (rhs),
888 TREE_TYPE (TREE_OPERAND (def_rhs, 0)))))
889 {
890 tree *def_rhs_basep = &TREE_OPERAND (def_rhs, 0);
891 tree new_offset, new_base, saved, new_rhs;
892 while (handled_component_p (*def_rhs_basep))
893 def_rhs_basep = &TREE_OPERAND (*def_rhs_basep, 0);
894 saved = *def_rhs_basep;
895 if (TREE_CODE (*def_rhs_basep) == MEM_REF)
896 {
897 new_base = TREE_OPERAND (*def_rhs_basep, 0);
898 new_offset = fold_convert (TREE_TYPE (TREE_OPERAND (rhs, 1)),
899 TREE_OPERAND (*def_rhs_basep, 1));
900 }
901 else
902 {
903 new_base = build_fold_addr_expr (*def_rhs_basep);
904 new_offset = TREE_OPERAND (rhs, 1);
905 }
906 *def_rhs_basep = build2 (MEM_REF, TREE_TYPE (*def_rhs_basep),
907 new_base, new_offset);
908 TREE_THIS_VOLATILE (*def_rhs_basep) = TREE_THIS_VOLATILE (rhs);
909 TREE_SIDE_EFFECTS (*def_rhs_basep) = TREE_SIDE_EFFECTS (rhs);
910 TREE_THIS_NOTRAP (*def_rhs_basep) = TREE_THIS_NOTRAP (rhs);
911 new_rhs = unshare_expr (TREE_OPERAND (def_rhs, 0));
912 *rhsp = new_rhs;
913 TREE_THIS_VOLATILE (new_rhs) = TREE_THIS_VOLATILE (rhs);
914 TREE_SIDE_EFFECTS (new_rhs) = TREE_SIDE_EFFECTS (rhs);
915 *def_rhs_basep = saved;
916 fold_stmt_inplace (use_stmt_gsi);
917 tidy_after_forward_propagate_addr (use_stmt);
918 return res;
919 }
920 }
921
922 /* If the use of the ADDR_EXPR is not a POINTER_PLUS_EXPR, there
923 is nothing to do. */
924 if (gimple_assign_rhs_code (use_stmt) != POINTER_PLUS_EXPR
925 || gimple_assign_rhs1 (use_stmt) != name)
926 return false;
927
928 /* The remaining cases are all for turning pointer arithmetic into
929 array indexing. They only apply when we have the address of
930 element zero in an array. If that is not the case then there
931 is nothing to do. */
932 array_ref = TREE_OPERAND (def_rhs, 0);
933 if ((TREE_CODE (array_ref) != ARRAY_REF
934 || TREE_CODE (TREE_TYPE (TREE_OPERAND (array_ref, 0))) != ARRAY_TYPE
935 || TREE_CODE (TREE_OPERAND (array_ref, 1)) != INTEGER_CST)
936 && TREE_CODE (TREE_TYPE (array_ref)) != ARRAY_TYPE)
937 return false;
938
939 rhs2 = gimple_assign_rhs2 (use_stmt);
940 /* Optimize &x[C1] p+ C2 to &x p+ C3 with C3 = C1 * element_size + C2. */
941 if (TREE_CODE (rhs2) == INTEGER_CST)
942 {
943 tree new_rhs = build1_loc (gimple_location (use_stmt),
944 ADDR_EXPR, TREE_TYPE (def_rhs),
945 fold_build2 (MEM_REF,
946 TREE_TYPE (TREE_TYPE (def_rhs)),
947 unshare_expr (def_rhs),
948 fold_convert (ptr_type_node,
949 rhs2)));
950 gimple_assign_set_rhs_from_tree (use_stmt_gsi, new_rhs);
951 use_stmt = gsi_stmt (*use_stmt_gsi);
952 update_stmt (use_stmt);
953 tidy_after_forward_propagate_addr (use_stmt);
954 return true;
955 }
956
957 return false;
958 }
959
960 /* STMT is a statement of the form SSA_NAME = ADDR_EXPR <whatever>.
961
962 Try to forward propagate the ADDR_EXPR into all uses of the SSA_NAME.
963 Often this will allow for removal of an ADDR_EXPR and INDIRECT_REF
964 node or for recovery of array indexing from pointer arithmetic.
965
966 PARENT_SINGLE_USE_P tells if, when in a recursive invocation, NAME was
967 the single use in the previous invocation. Pass true when calling
968 this as toplevel.
969
970 Returns true, if all uses have been propagated into. */
971
972 static bool
973 forward_propagate_addr_expr (tree name, tree rhs, bool parent_single_use_p)
974 {
975 imm_use_iterator iter;
976 gimple *use_stmt;
977 bool all = true;
978 bool single_use_p = parent_single_use_p && has_single_use (name);
979
980 FOR_EACH_IMM_USE_STMT (use_stmt, iter, name)
981 {
982 bool result;
983 tree use_rhs;
984
985 /* If the use is not in a simple assignment statement, then
986 there is nothing we can do. */
987 if (!is_gimple_assign (use_stmt))
988 {
989 if (!is_gimple_debug (use_stmt))
990 all = false;
991 continue;
992 }
993
994 gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
995 result = forward_propagate_addr_expr_1 (name, rhs, &gsi,
996 single_use_p);
997 /* If the use has moved to a different statement adjust
998 the update machinery for the old statement too. */
999 if (use_stmt != gsi_stmt (gsi))
1000 {
1001 update_stmt (use_stmt);
1002 use_stmt = gsi_stmt (gsi);
1003 }
1004 update_stmt (use_stmt);
1005 all &= result;
1006
1007 /* Remove intermediate now unused copy and conversion chains. */
1008 use_rhs = gimple_assign_rhs1 (use_stmt);
1009 if (result
1010 && TREE_CODE (gimple_assign_lhs (use_stmt)) == SSA_NAME
1011 && TREE_CODE (use_rhs) == SSA_NAME
1012 && has_zero_uses (gimple_assign_lhs (use_stmt)))
1013 {
1014 gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
1015 fwprop_invalidate_lattice (gimple_get_lhs (use_stmt));
1016 release_defs (use_stmt);
1017 gsi_remove (&gsi, true);
1018 }
1019 }
1020
1021 return all && has_zero_uses (name);
1022 }
1023
1024
1025 /* Helper function for simplify_gimple_switch. Remove case labels that
1026 have values outside the range of the new type. */
1027
1028 static void
1029 simplify_gimple_switch_label_vec (gswitch *stmt, tree index_type)
1030 {
1031 unsigned int branch_num = gimple_switch_num_labels (stmt);
1032 auto_vec<tree> labels (branch_num);
1033 unsigned int i, len;
1034
1035 /* Collect the existing case labels in a VEC, and preprocess it as if
1036 we are gimplifying a GENERIC SWITCH_EXPR. */
1037 for (i = 1; i < branch_num; i++)
1038 labels.quick_push (gimple_switch_label (stmt, i));
1039 preprocess_case_label_vec_for_gimple (labels, index_type, NULL);
1040
1041 /* If any labels were removed, replace the existing case labels
1042 in the GIMPLE_SWITCH statement with the correct ones.
1043 Note that the type updates were done in-place on the case labels,
1044 so we only have to replace the case labels in the GIMPLE_SWITCH
1045 if the number of labels changed. */
1046 len = labels.length ();
1047 if (len < branch_num - 1)
1048 {
1049 bitmap target_blocks;
1050 edge_iterator ei;
1051 edge e;
1052
1053 /* Corner case: *all* case labels have been removed as being
1054 out-of-range for INDEX_TYPE. Push one label and let the
1055 CFG cleanups deal with this further. */
1056 if (len == 0)
1057 {
1058 tree label, elt;
1059
1060 label = CASE_LABEL (gimple_switch_default_label (stmt));
1061 elt = build_case_label (build_int_cst (index_type, 0), NULL, label);
1062 labels.quick_push (elt);
1063 len = 1;
1064 }
1065
1066 for (i = 0; i < labels.length (); i++)
1067 gimple_switch_set_label (stmt, i + 1, labels[i]);
1068 for (i++ ; i < branch_num; i++)
1069 gimple_switch_set_label (stmt, i, NULL_TREE);
1070 gimple_switch_set_num_labels (stmt, len + 1);
1071
1072 /* Cleanup any edges that are now dead. */
1073 target_blocks = BITMAP_ALLOC (NULL);
1074 for (i = 0; i < gimple_switch_num_labels (stmt); i++)
1075 {
1076 tree elt = gimple_switch_label (stmt, i);
1077 basic_block target = label_to_block (cfun, CASE_LABEL (elt));
1078 bitmap_set_bit (target_blocks, target->index);
1079 }
1080 for (ei = ei_start (gimple_bb (stmt)->succs); (e = ei_safe_edge (ei)); )
1081 {
1082 if (! bitmap_bit_p (target_blocks, e->dest->index))
1083 {
1084 remove_edge (e);
1085 cfg_changed = true;
1086 free_dominance_info (CDI_DOMINATORS);
1087 }
1088 else
1089 ei_next (&ei);
1090 }
1091 BITMAP_FREE (target_blocks);
1092 }
1093 }
1094
1095 /* STMT is a SWITCH_EXPR for which we attempt to find equivalent forms of
1096 the condition which we may be able to optimize better. */
1097
1098 static bool
1099 simplify_gimple_switch (gswitch *stmt)
1100 {
1101 /* The optimization that we really care about is removing unnecessary
1102 casts. That will let us do much better in propagating the inferred
1103 constant at the switch target. */
1104 tree cond = gimple_switch_index (stmt);
1105 if (TREE_CODE (cond) == SSA_NAME)
1106 {
1107 gimple *def_stmt = SSA_NAME_DEF_STMT (cond);
1108 if (gimple_assign_cast_p (def_stmt))
1109 {
1110 tree def = gimple_assign_rhs1 (def_stmt);
1111 if (TREE_CODE (def) != SSA_NAME)
1112 return false;
1113
1114 /* If we have an extension or sign-change that preserves the
1115 values we check against then we can copy the source value into
1116 the switch. */
1117 tree ti = TREE_TYPE (def);
1118 if (INTEGRAL_TYPE_P (ti)
1119 && TYPE_PRECISION (ti) <= TYPE_PRECISION (TREE_TYPE (cond)))
1120 {
1121 size_t n = gimple_switch_num_labels (stmt);
1122 tree min = NULL_TREE, max = NULL_TREE;
1123 if (n > 1)
1124 {
1125 min = CASE_LOW (gimple_switch_label (stmt, 1));
1126 if (CASE_HIGH (gimple_switch_label (stmt, n - 1)))
1127 max = CASE_HIGH (gimple_switch_label (stmt, n - 1));
1128 else
1129 max = CASE_LOW (gimple_switch_label (stmt, n - 1));
1130 }
1131 if ((!min || int_fits_type_p (min, ti))
1132 && (!max || int_fits_type_p (max, ti)))
1133 {
1134 gimple_switch_set_index (stmt, def);
1135 simplify_gimple_switch_label_vec (stmt, ti);
1136 update_stmt (stmt);
1137 return true;
1138 }
1139 }
1140 }
1141 }
1142
1143 return false;
1144 }
1145
1146 /* For pointers p2 and p1 return p2 - p1 if the
1147 difference is known and constant, otherwise return NULL. */
1148
1149 static tree
1150 constant_pointer_difference (tree p1, tree p2)
1151 {
1152 int i, j;
1153 #define CPD_ITERATIONS 5
1154 tree exps[2][CPD_ITERATIONS];
1155 tree offs[2][CPD_ITERATIONS];
1156 int cnt[2];
1157
1158 for (i = 0; i < 2; i++)
1159 {
1160 tree p = i ? p1 : p2;
1161 tree off = size_zero_node;
1162 gimple *stmt;
1163 enum tree_code code;
1164
1165 /* For each of p1 and p2 we need to iterate at least
1166 twice, to handle ADDR_EXPR directly in p1/p2,
1167 SSA_NAME with ADDR_EXPR or POINTER_PLUS_EXPR etc.
1168 on definition's stmt RHS. Iterate a few extra times. */
1169 j = 0;
1170 do
1171 {
1172 if (!POINTER_TYPE_P (TREE_TYPE (p)))
1173 break;
1174 if (TREE_CODE (p) == ADDR_EXPR)
1175 {
1176 tree q = TREE_OPERAND (p, 0);
1177 poly_int64 offset;
1178 tree base = get_addr_base_and_unit_offset (q, &offset);
1179 if (base)
1180 {
1181 q = base;
1182 if (maybe_ne (offset, 0))
1183 off = size_binop (PLUS_EXPR, off, size_int (offset));
1184 }
1185 if (TREE_CODE (q) == MEM_REF
1186 && TREE_CODE (TREE_OPERAND (q, 0)) == SSA_NAME)
1187 {
1188 p = TREE_OPERAND (q, 0);
1189 off = size_binop (PLUS_EXPR, off,
1190 wide_int_to_tree (sizetype,
1191 mem_ref_offset (q)));
1192 }
1193 else
1194 {
1195 exps[i][j] = q;
1196 offs[i][j++] = off;
1197 break;
1198 }
1199 }
1200 if (TREE_CODE (p) != SSA_NAME)
1201 break;
1202 exps[i][j] = p;
1203 offs[i][j++] = off;
1204 if (j == CPD_ITERATIONS)
1205 break;
1206 stmt = SSA_NAME_DEF_STMT (p);
1207 if (!is_gimple_assign (stmt) || gimple_assign_lhs (stmt) != p)
1208 break;
1209 code = gimple_assign_rhs_code (stmt);
1210 if (code == POINTER_PLUS_EXPR)
1211 {
1212 if (TREE_CODE (gimple_assign_rhs2 (stmt)) != INTEGER_CST)
1213 break;
1214 off = size_binop (PLUS_EXPR, off, gimple_assign_rhs2 (stmt));
1215 p = gimple_assign_rhs1 (stmt);
1216 }
1217 else if (code == ADDR_EXPR || CONVERT_EXPR_CODE_P (code))
1218 p = gimple_assign_rhs1 (stmt);
1219 else
1220 break;
1221 }
1222 while (1);
1223 cnt[i] = j;
1224 }
1225
1226 for (i = 0; i < cnt[0]; i++)
1227 for (j = 0; j < cnt[1]; j++)
1228 if (exps[0][i] == exps[1][j])
1229 return size_binop (MINUS_EXPR, offs[0][i], offs[1][j]);
1230
1231 return NULL_TREE;
1232 }
1233
1234 /* *GSI_P is a GIMPLE_CALL to a builtin function.
1235 Optimize
1236 memcpy (p, "abcd", 4);
1237 memset (p + 4, ' ', 3);
1238 into
1239 memcpy (p, "abcd ", 7);
1240 call if the latter can be stored by pieces during expansion. */
1241
1242 static bool
1243 simplify_builtin_call (gimple_stmt_iterator *gsi_p, tree callee2)
1244 {
1245 gimple *stmt1, *stmt2 = gsi_stmt (*gsi_p);
1246 tree vuse = gimple_vuse (stmt2);
1247 if (vuse == NULL)
1248 return false;
1249 stmt1 = SSA_NAME_DEF_STMT (vuse);
1250
1251 switch (DECL_FUNCTION_CODE (callee2))
1252 {
1253 case BUILT_IN_MEMSET:
1254 if (gimple_call_num_args (stmt2) != 3
1255 || gimple_call_lhs (stmt2)
1256 || CHAR_BIT != 8
1257 || BITS_PER_UNIT != 8)
1258 break;
1259 else
1260 {
1261 tree callee1;
1262 tree ptr1, src1, str1, off1, len1, lhs1;
1263 tree ptr2 = gimple_call_arg (stmt2, 0);
1264 tree val2 = gimple_call_arg (stmt2, 1);
1265 tree len2 = gimple_call_arg (stmt2, 2);
1266 tree diff, vdef, new_str_cst;
1267 gimple *use_stmt;
1268 unsigned int ptr1_align;
1269 unsigned HOST_WIDE_INT src_len;
1270 char *src_buf;
1271 use_operand_p use_p;
1272
1273 if (!tree_fits_shwi_p (val2)
1274 || !tree_fits_uhwi_p (len2)
1275 || compare_tree_int (len2, 1024) == 1)
1276 break;
1277 if (is_gimple_call (stmt1))
1278 {
1279 /* If first stmt is a call, it needs to be memcpy
1280 or mempcpy, with string literal as second argument and
1281 constant length. */
1282 callee1 = gimple_call_fndecl (stmt1);
1283 if (callee1 == NULL_TREE
1284 || !fndecl_built_in_p (callee1, BUILT_IN_NORMAL)
1285 || gimple_call_num_args (stmt1) != 3)
1286 break;
1287 if (DECL_FUNCTION_CODE (callee1) != BUILT_IN_MEMCPY
1288 && DECL_FUNCTION_CODE (callee1) != BUILT_IN_MEMPCPY)
1289 break;
1290 ptr1 = gimple_call_arg (stmt1, 0);
1291 src1 = gimple_call_arg (stmt1, 1);
1292 len1 = gimple_call_arg (stmt1, 2);
1293 lhs1 = gimple_call_lhs (stmt1);
1294 if (!tree_fits_uhwi_p (len1))
1295 break;
1296 str1 = string_constant (src1, &off1, NULL, NULL);
1297 if (str1 == NULL_TREE)
1298 break;
1299 if (!tree_fits_uhwi_p (off1)
1300 || compare_tree_int (off1, TREE_STRING_LENGTH (str1) - 1) > 0
1301 || compare_tree_int (len1, TREE_STRING_LENGTH (str1)
1302 - tree_to_uhwi (off1)) > 0
1303 || TREE_CODE (TREE_TYPE (str1)) != ARRAY_TYPE
1304 || TYPE_MODE (TREE_TYPE (TREE_TYPE (str1)))
1305 != TYPE_MODE (char_type_node))
1306 break;
1307 }
1308 else if (gimple_assign_single_p (stmt1))
1309 {
1310 /* Otherwise look for length 1 memcpy optimized into
1311 assignment. */
1312 ptr1 = gimple_assign_lhs (stmt1);
1313 src1 = gimple_assign_rhs1 (stmt1);
1314 if (TREE_CODE (ptr1) != MEM_REF
1315 || TYPE_MODE (TREE_TYPE (ptr1)) != TYPE_MODE (char_type_node)
1316 || !tree_fits_shwi_p (src1))
1317 break;
1318 ptr1 = build_fold_addr_expr (ptr1);
1319 callee1 = NULL_TREE;
1320 len1 = size_one_node;
1321 lhs1 = NULL_TREE;
1322 off1 = size_zero_node;
1323 str1 = NULL_TREE;
1324 }
1325 else
1326 break;
1327
1328 diff = constant_pointer_difference (ptr1, ptr2);
1329 if (diff == NULL && lhs1 != NULL)
1330 {
1331 diff = constant_pointer_difference (lhs1, ptr2);
1332 if (DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY
1333 && diff != NULL)
1334 diff = size_binop (PLUS_EXPR, diff,
1335 fold_convert (sizetype, len1));
1336 }
1337 /* If the difference between the second and first destination pointer
1338 is not constant, or is bigger than memcpy length, bail out. */
1339 if (diff == NULL
1340 || !tree_fits_uhwi_p (diff)
1341 || tree_int_cst_lt (len1, diff)
1342 || compare_tree_int (diff, 1024) == 1)
1343 break;
1344
1345 /* Use maximum of difference plus memset length and memcpy length
1346 as the new memcpy length, if it is too big, bail out. */
1347 src_len = tree_to_uhwi (diff);
1348 src_len += tree_to_uhwi (len2);
1349 if (src_len < tree_to_uhwi (len1))
1350 src_len = tree_to_uhwi (len1);
1351 if (src_len > 1024)
1352 break;
1353
1354 /* If mempcpy value is used elsewhere, bail out, as mempcpy
1355 with bigger length will return different result. */
1356 if (lhs1 != NULL_TREE
1357 && DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY
1358 && (TREE_CODE (lhs1) != SSA_NAME
1359 || !single_imm_use (lhs1, &use_p, &use_stmt)
1360 || use_stmt != stmt2))
1361 break;
1362
1363 /* If anything reads memory in between memcpy and memset
1364 call, the modified memcpy call might change it. */
1365 vdef = gimple_vdef (stmt1);
1366 if (vdef != NULL
1367 && (!single_imm_use (vdef, &use_p, &use_stmt)
1368 || use_stmt != stmt2))
1369 break;
1370
1371 ptr1_align = get_pointer_alignment (ptr1);
1372 /* Construct the new source string literal. */
1373 src_buf = XALLOCAVEC (char, src_len + 1);
1374 if (callee1)
1375 memcpy (src_buf,
1376 TREE_STRING_POINTER (str1) + tree_to_uhwi (off1),
1377 tree_to_uhwi (len1));
1378 else
1379 src_buf[0] = tree_to_shwi (src1);
1380 memset (src_buf + tree_to_uhwi (diff),
1381 tree_to_shwi (val2), tree_to_uhwi (len2));
1382 src_buf[src_len] = '\0';
1383 /* Neither builtin_strncpy_read_str nor builtin_memcpy_read_str
1384 handle embedded '\0's. */
1385 if (strlen (src_buf) != src_len)
1386 break;
1387 rtl_profile_for_bb (gimple_bb (stmt2));
1388 /* If the new memcpy wouldn't be emitted by storing the literal
1389 by pieces, this optimization might enlarge .rodata too much,
1390 as commonly used string literals couldn't be shared any
1391 longer. */
1392 if (!can_store_by_pieces (src_len,
1393 builtin_strncpy_read_str,
1394 src_buf, ptr1_align, false))
1395 break;
1396
1397 new_str_cst = build_string_literal (src_len, src_buf);
1398 if (callee1)
1399 {
1400 /* If STMT1 is a mem{,p}cpy call, adjust it and remove
1401 memset call. */
1402 if (lhs1 && DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY)
1403 gimple_call_set_lhs (stmt1, NULL_TREE);
1404 gimple_call_set_arg (stmt1, 1, new_str_cst);
1405 gimple_call_set_arg (stmt1, 2,
1406 build_int_cst (TREE_TYPE (len1), src_len));
1407 update_stmt (stmt1);
1408 unlink_stmt_vdef (stmt2);
1409 gsi_replace (gsi_p, gimple_build_nop (), false);
1410 fwprop_invalidate_lattice (gimple_get_lhs (stmt2));
1411 release_defs (stmt2);
1412 if (lhs1 && DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY)
1413 {
1414 fwprop_invalidate_lattice (lhs1);
1415 release_ssa_name (lhs1);
1416 }
1417 return true;
1418 }
1419 else
1420 {
1421 /* Otherwise, if STMT1 is length 1 memcpy optimized into
1422 assignment, remove STMT1 and change memset call into
1423 memcpy call. */
1424 gimple_stmt_iterator gsi = gsi_for_stmt (stmt1);
1425
1426 if (!is_gimple_val (ptr1))
1427 ptr1 = force_gimple_operand_gsi (gsi_p, ptr1, true, NULL_TREE,
1428 true, GSI_SAME_STMT);
1429 tree fndecl = builtin_decl_explicit (BUILT_IN_MEMCPY);
1430 gimple_call_set_fndecl (stmt2, fndecl);
1431 gimple_call_set_fntype (as_a <gcall *> (stmt2),
1432 TREE_TYPE (fndecl));
1433 gimple_call_set_arg (stmt2, 0, ptr1);
1434 gimple_call_set_arg (stmt2, 1, new_str_cst);
1435 gimple_call_set_arg (stmt2, 2,
1436 build_int_cst (TREE_TYPE (len2), src_len));
1437 unlink_stmt_vdef (stmt1);
1438 gsi_remove (&gsi, true);
1439 fwprop_invalidate_lattice (gimple_get_lhs (stmt1));
1440 release_defs (stmt1);
1441 update_stmt (stmt2);
1442 return false;
1443 }
1444 }
1445 break;
1446 default:
1447 break;
1448 }
1449 return false;
1450 }
1451
1452 /* Given a ssa_name in NAME see if it was defined by an assignment and
1453 set CODE to be the code and ARG1 to the first operand on the rhs and ARG2
1454 to the second operand on the rhs. */
1455
1456 static inline void
1457 defcodefor_name (tree name, enum tree_code *code, tree *arg1, tree *arg2)
1458 {
1459 gimple *def;
1460 enum tree_code code1;
1461 tree arg11;
1462 tree arg21;
1463 tree arg31;
1464 enum gimple_rhs_class grhs_class;
1465
1466 code1 = TREE_CODE (name);
1467 arg11 = name;
1468 arg21 = NULL_TREE;
1469 arg31 = NULL_TREE;
1470 grhs_class = get_gimple_rhs_class (code1);
1471
1472 if (code1 == SSA_NAME)
1473 {
1474 def = SSA_NAME_DEF_STMT (name);
1475
1476 if (def && is_gimple_assign (def)
1477 && can_propagate_from (def))
1478 {
1479 code1 = gimple_assign_rhs_code (def);
1480 arg11 = gimple_assign_rhs1 (def);
1481 arg21 = gimple_assign_rhs2 (def);
1482 arg31 = gimple_assign_rhs3 (def);
1483 }
1484 }
1485 else if (grhs_class != GIMPLE_SINGLE_RHS)
1486 code1 = ERROR_MARK;
1487
1488 *code = code1;
1489 *arg1 = arg11;
1490 if (arg2)
1491 *arg2 = arg21;
1492 if (arg31)
1493 *code = ERROR_MARK;
1494 }
1495
1496
1497 /* Recognize rotation patterns. Return true if a transformation
1498 applied, otherwise return false.
1499
1500 We are looking for X with unsigned type T with bitsize B, OP being
1501 +, | or ^, some type T2 wider than T. For:
1502 (X << CNT1) OP (X >> CNT2) iff CNT1 + CNT2 == B
1503 ((T) ((T2) X << CNT1)) OP ((T) ((T2) X >> CNT2)) iff CNT1 + CNT2 == B
1504
1505 transform these into:
1506 X r<< CNT1
1507
1508 Or for:
1509 (X << Y) OP (X >> (B - Y))
1510 (X << (int) Y) OP (X >> (int) (B - Y))
1511 ((T) ((T2) X << Y)) OP ((T) ((T2) X >> (B - Y)))
1512 ((T) ((T2) X << (int) Y)) OP ((T) ((T2) X >> (int) (B - Y)))
1513 (X << Y) | (X >> ((-Y) & (B - 1)))
1514 (X << (int) Y) | (X >> (int) ((-Y) & (B - 1)))
1515 ((T) ((T2) X << Y)) | ((T) ((T2) X >> ((-Y) & (B - 1))))
1516 ((T) ((T2) X << (int) Y)) | ((T) ((T2) X >> (int) ((-Y) & (B - 1))))
1517
1518 transform these into:
1519 X r<< Y
1520
1521 Or for:
1522 (X << (Y & (B - 1))) | (X >> ((-Y) & (B - 1)))
1523 (X << (int) (Y & (B - 1))) | (X >> (int) ((-Y) & (B - 1)))
1524 ((T) ((T2) X << (Y & (B - 1)))) | ((T) ((T2) X >> ((-Y) & (B - 1))))
1525 ((T) ((T2) X << (int) (Y & (B - 1)))) \
1526 | ((T) ((T2) X >> (int) ((-Y) & (B - 1))))
1527
1528 transform these into:
1529 X r<< (Y & (B - 1))
1530
1531 Note, in the patterns with T2 type, the type of OP operands
1532 might be even a signed type, but should have precision B.
1533 Expressions with & (B - 1) should be recognized only if B is
1534 a power of 2. */
1535
1536 static bool
1537 simplify_rotate (gimple_stmt_iterator *gsi)
1538 {
1539 gimple *stmt = gsi_stmt (*gsi);
1540 tree arg[2], rtype, rotcnt = NULL_TREE;
1541 tree def_arg1[2], def_arg2[2];
1542 enum tree_code def_code[2];
1543 tree lhs;
1544 int i;
1545 bool swapped_p = false;
1546 gimple *g;
1547
1548 arg[0] = gimple_assign_rhs1 (stmt);
1549 arg[1] = gimple_assign_rhs2 (stmt);
1550 rtype = TREE_TYPE (arg[0]);
1551
1552 /* Only create rotates in complete modes. Other cases are not
1553 expanded properly. */
1554 if (!INTEGRAL_TYPE_P (rtype)
1555 || !type_has_mode_precision_p (rtype))
1556 return false;
1557
1558 for (i = 0; i < 2; i++)
1559 defcodefor_name (arg[i], &def_code[i], &def_arg1[i], &def_arg2[i]);
1560
1561 /* Look through narrowing conversions. */
1562 if (CONVERT_EXPR_CODE_P (def_code[0])
1563 && CONVERT_EXPR_CODE_P (def_code[1])
1564 && INTEGRAL_TYPE_P (TREE_TYPE (def_arg1[0]))
1565 && INTEGRAL_TYPE_P (TREE_TYPE (def_arg1[1]))
1566 && TYPE_PRECISION (TREE_TYPE (def_arg1[0]))
1567 == TYPE_PRECISION (TREE_TYPE (def_arg1[1]))
1568 && TYPE_PRECISION (TREE_TYPE (def_arg1[0])) > TYPE_PRECISION (rtype)
1569 && has_single_use (arg[0])
1570 && has_single_use (arg[1]))
1571 {
1572 for (i = 0; i < 2; i++)
1573 {
1574 arg[i] = def_arg1[i];
1575 defcodefor_name (arg[i], &def_code[i], &def_arg1[i], &def_arg2[i]);
1576 }
1577 }
1578
1579 /* One operand has to be LSHIFT_EXPR and one RSHIFT_EXPR. */
1580 for (i = 0; i < 2; i++)
1581 if (def_code[i] != LSHIFT_EXPR && def_code[i] != RSHIFT_EXPR)
1582 return false;
1583 else if (!has_single_use (arg[i]))
1584 return false;
1585 if (def_code[0] == def_code[1])
1586 return false;
1587
1588 /* If we've looked through narrowing conversions before, look through
1589 widening conversions from unsigned type with the same precision
1590 as rtype here. */
1591 if (TYPE_PRECISION (TREE_TYPE (def_arg1[0])) != TYPE_PRECISION (rtype))
1592 for (i = 0; i < 2; i++)
1593 {
1594 tree tem;
1595 enum tree_code code;
1596 defcodefor_name (def_arg1[i], &code, &tem, NULL);
1597 if (!CONVERT_EXPR_CODE_P (code)
1598 || !INTEGRAL_TYPE_P (TREE_TYPE (tem))
1599 || TYPE_PRECISION (TREE_TYPE (tem)) != TYPE_PRECISION (rtype))
1600 return false;
1601 def_arg1[i] = tem;
1602 }
1603 /* Both shifts have to use the same first operand. */
1604 if (!operand_equal_for_phi_arg_p (def_arg1[0], def_arg1[1])
1605 || !types_compatible_p (TREE_TYPE (def_arg1[0]),
1606 TREE_TYPE (def_arg1[1])))
1607 return false;
1608 if (!TYPE_UNSIGNED (TREE_TYPE (def_arg1[0])))
1609 return false;
1610
1611 /* CNT1 + CNT2 == B case above. */
1612 if (tree_fits_uhwi_p (def_arg2[0])
1613 && tree_fits_uhwi_p (def_arg2[1])
1614 && tree_to_uhwi (def_arg2[0])
1615 + tree_to_uhwi (def_arg2[1]) == TYPE_PRECISION (rtype))
1616 rotcnt = def_arg2[0];
1617 else if (TREE_CODE (def_arg2[0]) != SSA_NAME
1618 || TREE_CODE (def_arg2[1]) != SSA_NAME)
1619 return false;
1620 else
1621 {
1622 tree cdef_arg1[2], cdef_arg2[2], def_arg2_alt[2];
1623 enum tree_code cdef_code[2];
1624 /* Look through conversion of the shift count argument.
1625 The C/C++ FE cast any shift count argument to integer_type_node.
1626 The only problem might be if the shift count type maximum value
1627 is equal or smaller than number of bits in rtype. */
1628 for (i = 0; i < 2; i++)
1629 {
1630 def_arg2_alt[i] = def_arg2[i];
1631 defcodefor_name (def_arg2[i], &cdef_code[i],
1632 &cdef_arg1[i], &cdef_arg2[i]);
1633 if (CONVERT_EXPR_CODE_P (cdef_code[i])
1634 && INTEGRAL_TYPE_P (TREE_TYPE (cdef_arg1[i]))
1635 && TYPE_PRECISION (TREE_TYPE (cdef_arg1[i]))
1636 > floor_log2 (TYPE_PRECISION (rtype))
1637 && type_has_mode_precision_p (TREE_TYPE (cdef_arg1[i])))
1638 {
1639 def_arg2_alt[i] = cdef_arg1[i];
1640 defcodefor_name (def_arg2_alt[i], &cdef_code[i],
1641 &cdef_arg1[i], &cdef_arg2[i]);
1642 }
1643 }
1644 for (i = 0; i < 2; i++)
1645 /* Check for one shift count being Y and the other B - Y,
1646 with optional casts. */
1647 if (cdef_code[i] == MINUS_EXPR
1648 && tree_fits_shwi_p (cdef_arg1[i])
1649 && tree_to_shwi (cdef_arg1[i]) == TYPE_PRECISION (rtype)
1650 && TREE_CODE (cdef_arg2[i]) == SSA_NAME)
1651 {
1652 tree tem;
1653 enum tree_code code;
1654
1655 if (cdef_arg2[i] == def_arg2[1 - i]
1656 || cdef_arg2[i] == def_arg2_alt[1 - i])
1657 {
1658 rotcnt = cdef_arg2[i];
1659 break;
1660 }
1661 defcodefor_name (cdef_arg2[i], &code, &tem, NULL);
1662 if (CONVERT_EXPR_CODE_P (code)
1663 && INTEGRAL_TYPE_P (TREE_TYPE (tem))
1664 && TYPE_PRECISION (TREE_TYPE (tem))
1665 > floor_log2 (TYPE_PRECISION (rtype))
1666 && type_has_mode_precision_p (TREE_TYPE (tem))
1667 && (tem == def_arg2[1 - i]
1668 || tem == def_arg2_alt[1 - i]))
1669 {
1670 rotcnt = tem;
1671 break;
1672 }
1673 }
1674 /* The above sequence isn't safe for Y being 0,
1675 because then one of the shifts triggers undefined behavior.
1676 This alternative is safe even for rotation count of 0.
1677 One shift count is Y and the other (-Y) & (B - 1).
1678 Or one shift count is Y & (B - 1) and the other (-Y) & (B - 1). */
1679 else if (cdef_code[i] == BIT_AND_EXPR
1680 && pow2p_hwi (TYPE_PRECISION (rtype))
1681 && tree_fits_shwi_p (cdef_arg2[i])
1682 && tree_to_shwi (cdef_arg2[i])
1683 == TYPE_PRECISION (rtype) - 1
1684 && TREE_CODE (cdef_arg1[i]) == SSA_NAME
1685 && gimple_assign_rhs_code (stmt) == BIT_IOR_EXPR)
1686 {
1687 tree tem;
1688 enum tree_code code;
1689
1690 defcodefor_name (cdef_arg1[i], &code, &tem, NULL);
1691 if (CONVERT_EXPR_CODE_P (code)
1692 && INTEGRAL_TYPE_P (TREE_TYPE (tem))
1693 && TYPE_PRECISION (TREE_TYPE (tem))
1694 > floor_log2 (TYPE_PRECISION (rtype))
1695 && type_has_mode_precision_p (TREE_TYPE (tem)))
1696 defcodefor_name (tem, &code, &tem, NULL);
1697
1698 if (code == NEGATE_EXPR)
1699 {
1700 if (tem == def_arg2[1 - i] || tem == def_arg2_alt[1 - i])
1701 {
1702 rotcnt = tem;
1703 break;
1704 }
1705 tree tem2;
1706 defcodefor_name (tem, &code, &tem2, NULL);
1707 if (CONVERT_EXPR_CODE_P (code)
1708 && INTEGRAL_TYPE_P (TREE_TYPE (tem2))
1709 && TYPE_PRECISION (TREE_TYPE (tem2))
1710 > floor_log2 (TYPE_PRECISION (rtype))
1711 && type_has_mode_precision_p (TREE_TYPE (tem2)))
1712 {
1713 if (tem2 == def_arg2[1 - i]
1714 || tem2 == def_arg2_alt[1 - i])
1715 {
1716 rotcnt = tem2;
1717 break;
1718 }
1719 }
1720 else
1721 tem2 = NULL_TREE;
1722
1723 if (cdef_code[1 - i] == BIT_AND_EXPR
1724 && tree_fits_shwi_p (cdef_arg2[1 - i])
1725 && tree_to_shwi (cdef_arg2[1 - i])
1726 == TYPE_PRECISION (rtype) - 1
1727 && TREE_CODE (cdef_arg1[1 - i]) == SSA_NAME)
1728 {
1729 if (tem == cdef_arg1[1 - i]
1730 || tem2 == cdef_arg1[1 - i])
1731 {
1732 rotcnt = def_arg2[1 - i];
1733 break;
1734 }
1735 tree tem3;
1736 defcodefor_name (cdef_arg1[1 - i], &code, &tem3, NULL);
1737 if (CONVERT_EXPR_CODE_P (code)
1738 && INTEGRAL_TYPE_P (TREE_TYPE (tem3))
1739 && TYPE_PRECISION (TREE_TYPE (tem3))
1740 > floor_log2 (TYPE_PRECISION (rtype))
1741 && type_has_mode_precision_p (TREE_TYPE (tem3)))
1742 {
1743 if (tem == tem3 || tem2 == tem3)
1744 {
1745 rotcnt = def_arg2[1 - i];
1746 break;
1747 }
1748 }
1749 }
1750 }
1751 }
1752 if (rotcnt == NULL_TREE)
1753 return false;
1754 swapped_p = i != 1;
1755 }
1756
1757 if (!useless_type_conversion_p (TREE_TYPE (def_arg2[0]),
1758 TREE_TYPE (rotcnt)))
1759 {
1760 g = gimple_build_assign (make_ssa_name (TREE_TYPE (def_arg2[0])),
1761 NOP_EXPR, rotcnt);
1762 gsi_insert_before (gsi, g, GSI_SAME_STMT);
1763 rotcnt = gimple_assign_lhs (g);
1764 }
1765 lhs = gimple_assign_lhs (stmt);
1766 if (!useless_type_conversion_p (rtype, TREE_TYPE (def_arg1[0])))
1767 lhs = make_ssa_name (TREE_TYPE (def_arg1[0]));
1768 g = gimple_build_assign (lhs,
1769 ((def_code[0] == LSHIFT_EXPR) ^ swapped_p)
1770 ? LROTATE_EXPR : RROTATE_EXPR, def_arg1[0], rotcnt);
1771 if (!useless_type_conversion_p (rtype, TREE_TYPE (def_arg1[0])))
1772 {
1773 gsi_insert_before (gsi, g, GSI_SAME_STMT);
1774 g = gimple_build_assign (gimple_assign_lhs (stmt), NOP_EXPR, lhs);
1775 }
1776 gsi_replace (gsi, g, false);
1777 return true;
1778 }
1779
1780 /* Combine an element access with a shuffle. Returns true if there were
1781 any changes made, else it returns false. */
1782
1783 static bool
1784 simplify_bitfield_ref (gimple_stmt_iterator *gsi)
1785 {
1786 gimple *stmt = gsi_stmt (*gsi);
1787 gimple *def_stmt;
1788 tree op, op0, op1, op2;
1789 tree elem_type;
1790 unsigned idx, size;
1791 enum tree_code code;
1792
1793 op = gimple_assign_rhs1 (stmt);
1794 gcc_checking_assert (TREE_CODE (op) == BIT_FIELD_REF);
1795
1796 op0 = TREE_OPERAND (op, 0);
1797 if (TREE_CODE (op0) != SSA_NAME
1798 || TREE_CODE (TREE_TYPE (op0)) != VECTOR_TYPE)
1799 return false;
1800
1801 def_stmt = get_prop_source_stmt (op0, false, NULL);
1802 if (!def_stmt || !can_propagate_from (def_stmt))
1803 return false;
1804
1805 op1 = TREE_OPERAND (op, 1);
1806 op2 = TREE_OPERAND (op, 2);
1807 code = gimple_assign_rhs_code (def_stmt);
1808
1809 if (code == CONSTRUCTOR)
1810 {
1811 tree tem = fold_ternary (BIT_FIELD_REF, TREE_TYPE (op),
1812 gimple_assign_rhs1 (def_stmt), op1, op2);
1813 if (!tem || !valid_gimple_rhs_p (tem))
1814 return false;
1815 gimple_assign_set_rhs_from_tree (gsi, tem);
1816 update_stmt (gsi_stmt (*gsi));
1817 return true;
1818 }
1819
1820 elem_type = TREE_TYPE (TREE_TYPE (op0));
1821 if (TREE_TYPE (op) != elem_type)
1822 return false;
1823
1824 size = TREE_INT_CST_LOW (TYPE_SIZE (elem_type));
1825 if (maybe_ne (bit_field_size (op), size))
1826 return false;
1827
1828 if (code == VEC_PERM_EXPR
1829 && constant_multiple_p (bit_field_offset (op), size, &idx))
1830 {
1831 tree p, m, tem;
1832 unsigned HOST_WIDE_INT nelts;
1833 m = gimple_assign_rhs3 (def_stmt);
1834 if (TREE_CODE (m) != VECTOR_CST
1835 || !VECTOR_CST_NELTS (m).is_constant (&nelts))
1836 return false;
1837 idx = TREE_INT_CST_LOW (VECTOR_CST_ELT (m, idx));
1838 idx %= 2 * nelts;
1839 if (idx < nelts)
1840 {
1841 p = gimple_assign_rhs1 (def_stmt);
1842 }
1843 else
1844 {
1845 p = gimple_assign_rhs2 (def_stmt);
1846 idx -= nelts;
1847 }
1848 tem = build3 (BIT_FIELD_REF, TREE_TYPE (op),
1849 unshare_expr (p), op1, bitsize_int (idx * size));
1850 gimple_assign_set_rhs1 (stmt, tem);
1851 fold_stmt (gsi);
1852 update_stmt (gsi_stmt (*gsi));
1853 return true;
1854 }
1855
1856 return false;
1857 }
1858
1859 /* Determine whether applying the 2 permutations (mask1 then mask2)
1860 gives back one of the input. */
1861
1862 static int
1863 is_combined_permutation_identity (tree mask1, tree mask2)
1864 {
1865 tree mask;
1866 unsigned HOST_WIDE_INT nelts, i, j;
1867 bool maybe_identity1 = true;
1868 bool maybe_identity2 = true;
1869
1870 gcc_checking_assert (TREE_CODE (mask1) == VECTOR_CST
1871 && TREE_CODE (mask2) == VECTOR_CST);
1872 mask = fold_ternary (VEC_PERM_EXPR, TREE_TYPE (mask1), mask1, mask1, mask2);
1873 if (mask == NULL_TREE || TREE_CODE (mask) != VECTOR_CST)
1874 return 0;
1875
1876 if (!VECTOR_CST_NELTS (mask).is_constant (&nelts))
1877 return 0;
1878 for (i = 0; i < nelts; i++)
1879 {
1880 tree val = VECTOR_CST_ELT (mask, i);
1881 gcc_assert (TREE_CODE (val) == INTEGER_CST);
1882 j = TREE_INT_CST_LOW (val) & (2 * nelts - 1);
1883 if (j == i)
1884 maybe_identity2 = false;
1885 else if (j == i + nelts)
1886 maybe_identity1 = false;
1887 else
1888 return 0;
1889 }
1890 return maybe_identity1 ? 1 : maybe_identity2 ? 2 : 0;
1891 }
1892
1893 /* Combine a shuffle with its arguments. Returns 1 if there were any
1894 changes made, 2 if cfg-cleanup needs to run. Else it returns 0. */
1895
1896 static int
1897 simplify_permutation (gimple_stmt_iterator *gsi)
1898 {
1899 gimple *stmt = gsi_stmt (*gsi);
1900 gimple *def_stmt;
1901 tree op0, op1, op2, op3, arg0, arg1;
1902 enum tree_code code;
1903 bool single_use_op0 = false;
1904
1905 gcc_checking_assert (gimple_assign_rhs_code (stmt) == VEC_PERM_EXPR);
1906
1907 op0 = gimple_assign_rhs1 (stmt);
1908 op1 = gimple_assign_rhs2 (stmt);
1909 op2 = gimple_assign_rhs3 (stmt);
1910
1911 if (TREE_CODE (op2) != VECTOR_CST)
1912 return 0;
1913
1914 if (TREE_CODE (op0) == VECTOR_CST)
1915 {
1916 code = VECTOR_CST;
1917 arg0 = op0;
1918 }
1919 else if (TREE_CODE (op0) == SSA_NAME)
1920 {
1921 def_stmt = get_prop_source_stmt (op0, false, &single_use_op0);
1922 if (!def_stmt || !can_propagate_from (def_stmt))
1923 return 0;
1924
1925 code = gimple_assign_rhs_code (def_stmt);
1926 arg0 = gimple_assign_rhs1 (def_stmt);
1927 }
1928 else
1929 return 0;
1930
1931 /* Two consecutive shuffles. */
1932 if (code == VEC_PERM_EXPR)
1933 {
1934 tree orig;
1935 int ident;
1936
1937 if (op0 != op1)
1938 return 0;
1939 op3 = gimple_assign_rhs3 (def_stmt);
1940 if (TREE_CODE (op3) != VECTOR_CST)
1941 return 0;
1942 ident = is_combined_permutation_identity (op3, op2);
1943 if (!ident)
1944 return 0;
1945 orig = (ident == 1) ? gimple_assign_rhs1 (def_stmt)
1946 : gimple_assign_rhs2 (def_stmt);
1947 gimple_assign_set_rhs1 (stmt, unshare_expr (orig));
1948 gimple_assign_set_rhs_code (stmt, TREE_CODE (orig));
1949 gimple_set_num_ops (stmt, 2);
1950 update_stmt (stmt);
1951 return remove_prop_source_from_use (op0) ? 2 : 1;
1952 }
1953
1954 /* Shuffle of a constructor. */
1955 else if (code == CONSTRUCTOR || code == VECTOR_CST)
1956 {
1957 tree opt;
1958 bool ret = false;
1959 if (op0 != op1)
1960 {
1961 if (TREE_CODE (op0) == SSA_NAME && !single_use_op0)
1962 return 0;
1963
1964 if (TREE_CODE (op1) == VECTOR_CST)
1965 arg1 = op1;
1966 else if (TREE_CODE (op1) == SSA_NAME)
1967 {
1968 enum tree_code code2;
1969
1970 gimple *def_stmt2 = get_prop_source_stmt (op1, true, NULL);
1971 if (!def_stmt2 || !can_propagate_from (def_stmt2))
1972 return 0;
1973
1974 code2 = gimple_assign_rhs_code (def_stmt2);
1975 if (code2 != CONSTRUCTOR && code2 != VECTOR_CST)
1976 return 0;
1977 arg1 = gimple_assign_rhs1 (def_stmt2);
1978 }
1979 else
1980 return 0;
1981 }
1982 else
1983 {
1984 /* Already used twice in this statement. */
1985 if (TREE_CODE (op0) == SSA_NAME && num_imm_uses (op0) > 2)
1986 return 0;
1987 arg1 = arg0;
1988 }
1989 opt = fold_ternary (VEC_PERM_EXPR, TREE_TYPE (op0), arg0, arg1, op2);
1990 if (!opt
1991 || (TREE_CODE (opt) != CONSTRUCTOR && TREE_CODE (opt) != VECTOR_CST))
1992 return 0;
1993 gimple_assign_set_rhs_from_tree (gsi, opt);
1994 update_stmt (gsi_stmt (*gsi));
1995 if (TREE_CODE (op0) == SSA_NAME)
1996 ret = remove_prop_source_from_use (op0);
1997 if (op0 != op1 && TREE_CODE (op1) == SSA_NAME)
1998 ret |= remove_prop_source_from_use (op1);
1999 return ret ? 2 : 1;
2000 }
2001
2002 return 0;
2003 }
2004
2005 /* Get the BIT_FIELD_REF definition of VAL, if any, looking through
2006 conversions with code CONV_CODE or update it if still ERROR_MARK.
2007 Return NULL_TREE if no such matching def was found. */
2008
2009 static tree
2010 get_bit_field_ref_def (tree val, enum tree_code &conv_code)
2011 {
2012 if (TREE_CODE (val) != SSA_NAME)
2013 return NULL_TREE ;
2014 gimple *def_stmt = get_prop_source_stmt (val, false, NULL);
2015 if (!def_stmt)
2016 return NULL_TREE;
2017 enum tree_code code = gimple_assign_rhs_code (def_stmt);
2018 if (code == FLOAT_EXPR
2019 || code == FIX_TRUNC_EXPR)
2020 {
2021 tree op1 = gimple_assign_rhs1 (def_stmt);
2022 if (conv_code == ERROR_MARK)
2023 {
2024 if (maybe_ne (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (val))),
2025 GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op1)))))
2026 return NULL_TREE;
2027 conv_code = code;
2028 }
2029 else if (conv_code != code)
2030 return NULL_TREE;
2031 if (TREE_CODE (op1) != SSA_NAME)
2032 return NULL_TREE;
2033 def_stmt = SSA_NAME_DEF_STMT (op1);
2034 if (! is_gimple_assign (def_stmt))
2035 return NULL_TREE;
2036 code = gimple_assign_rhs_code (def_stmt);
2037 }
2038 if (code != BIT_FIELD_REF)
2039 return NULL_TREE;
2040 return gimple_assign_rhs1 (def_stmt);
2041 }
2042
2043 /* Recognize a VEC_PERM_EXPR. Returns true if there were any changes. */
2044
2045 static bool
2046 simplify_vector_constructor (gimple_stmt_iterator *gsi)
2047 {
2048 gimple *stmt = gsi_stmt (*gsi);
2049 tree op, op2, orig[2], type, elem_type;
2050 unsigned elem_size, i;
2051 unsigned HOST_WIDE_INT nelts;
2052 enum tree_code conv_code;
2053 constructor_elt *elt;
2054 bool maybe_ident;
2055
2056 gcc_checking_assert (gimple_assign_rhs_code (stmt) == CONSTRUCTOR);
2057
2058 op = gimple_assign_rhs1 (stmt);
2059 type = TREE_TYPE (op);
2060 gcc_checking_assert (TREE_CODE (type) == VECTOR_TYPE);
2061
2062 if (!TYPE_VECTOR_SUBPARTS (type).is_constant (&nelts))
2063 return false;
2064 elem_type = TREE_TYPE (type);
2065 elem_size = TREE_INT_CST_LOW (TYPE_SIZE (elem_type));
2066
2067 vec_perm_builder sel (nelts, nelts, 1);
2068 orig[0] = NULL;
2069 orig[1] = NULL;
2070 conv_code = ERROR_MARK;
2071 maybe_ident = true;
2072 tree one_constant = NULL_TREE;
2073 tree one_nonconstant = NULL_TREE;
2074 auto_vec<tree> constants;
2075 constants.safe_grow_cleared (nelts);
2076 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (op), i, elt)
2077 {
2078 tree ref, op1;
2079 unsigned int elem;
2080
2081 if (i >= nelts)
2082 return false;
2083
2084 /* Look for elements extracted and possibly converted from
2085 another vector. */
2086 op1 = get_bit_field_ref_def (elt->value, conv_code);
2087 if (op1
2088 && TREE_CODE ((ref = TREE_OPERAND (op1, 0))) == SSA_NAME
2089 && VECTOR_TYPE_P (TREE_TYPE (ref))
2090 && useless_type_conversion_p (TREE_TYPE (op1),
2091 TREE_TYPE (TREE_TYPE (ref)))
2092 && known_eq (bit_field_size (op1), elem_size)
2093 && constant_multiple_p (bit_field_offset (op1),
2094 elem_size, &elem))
2095 {
2096 unsigned int j;
2097 for (j = 0; j < 2; ++j)
2098 {
2099 if (!orig[j])
2100 {
2101 if (j == 0
2102 || useless_type_conversion_p (TREE_TYPE (orig[0]),
2103 TREE_TYPE (ref)))
2104 break;
2105 }
2106 else if (ref == orig[j])
2107 break;
2108 }
2109 /* Found a suitable vector element. */
2110 if (j < 2)
2111 {
2112 orig[j] = ref;
2113 if (j)
2114 elem += nelts;
2115 if (elem != i)
2116 maybe_ident = false;
2117 sel.quick_push (elem);
2118 continue;
2119 }
2120 /* Else fallthru. */
2121 }
2122 /* Handle elements not extracted from a vector.
2123 1. constants by permuting with constant vector
2124 2. a unique non-constant element by permuting with a splat vector */
2125 if (orig[1]
2126 && orig[1] != error_mark_node)
2127 return false;
2128 orig[1] = error_mark_node;
2129 if (CONSTANT_CLASS_P (elt->value))
2130 {
2131 if (one_nonconstant)
2132 return false;
2133 if (!one_constant)
2134 one_constant = elt->value;
2135 constants[i] = elt->value;
2136 }
2137 else
2138 {
2139 if (one_constant)
2140 return false;
2141 if (!one_nonconstant)
2142 one_nonconstant = elt->value;
2143 else if (!operand_equal_p (one_nonconstant, elt->value, 0))
2144 return false;
2145 }
2146 sel.quick_push (i + nelts);
2147 maybe_ident = false;
2148 }
2149 if (i < nelts)
2150 return false;
2151
2152 if (! orig[0]
2153 || ! VECTOR_TYPE_P (TREE_TYPE (orig[0]))
2154 || maybe_ne (TYPE_VECTOR_SUBPARTS (type),
2155 TYPE_VECTOR_SUBPARTS (TREE_TYPE (orig[0]))))
2156 return false;
2157
2158 tree tem;
2159 if (conv_code != ERROR_MARK
2160 && (! supportable_convert_operation (conv_code, type,
2161 TREE_TYPE (orig[0]),
2162 &tem, &conv_code)
2163 || conv_code == CALL_EXPR))
2164 return false;
2165
2166 if (maybe_ident)
2167 {
2168 if (conv_code == ERROR_MARK)
2169 gimple_assign_set_rhs_from_tree (gsi, orig[0]);
2170 else
2171 gimple_assign_set_rhs_with_ops (gsi, conv_code, orig[0],
2172 NULL_TREE, NULL_TREE);
2173 }
2174 else
2175 {
2176 tree mask_type;
2177
2178 vec_perm_indices indices (sel, orig[1] ? 2 : 1, nelts);
2179 if (!can_vec_perm_const_p (TYPE_MODE (type), indices))
2180 return false;
2181 mask_type
2182 = build_vector_type (build_nonstandard_integer_type (elem_size, 1),
2183 nelts);
2184 if (GET_MODE_CLASS (TYPE_MODE (mask_type)) != MODE_VECTOR_INT
2185 || maybe_ne (GET_MODE_SIZE (TYPE_MODE (mask_type)),
2186 GET_MODE_SIZE (TYPE_MODE (type))))
2187 return false;
2188 op2 = vec_perm_indices_to_tree (mask_type, indices);
2189 bool convert_orig0 = false;
2190 if (!orig[1])
2191 orig[1] = orig[0];
2192 else if (orig[1] == error_mark_node
2193 && one_nonconstant)
2194 {
2195 gimple_seq seq = NULL;
2196 orig[1] = gimple_build_vector_from_val (&seq, UNKNOWN_LOCATION,
2197 type, one_nonconstant);
2198 gsi_insert_seq_before (gsi, seq, GSI_SAME_STMT);
2199 convert_orig0 = true;
2200 }
2201 else if (orig[1] == error_mark_node)
2202 {
2203 tree_vector_builder vec (type, nelts, 1);
2204 for (unsigned i = 0; i < nelts; ++i)
2205 if (constants[i])
2206 vec.quick_push (constants[i]);
2207 else
2208 /* ??? Push a don't-care value. */
2209 vec.quick_push (one_constant);
2210 orig[1] = vec.build ();
2211 convert_orig0 = true;
2212 }
2213 if (conv_code == ERROR_MARK)
2214 gimple_assign_set_rhs_with_ops (gsi, VEC_PERM_EXPR, orig[0],
2215 orig[1], op2);
2216 else if (convert_orig0)
2217 {
2218 gimple *conv
2219 = gimple_build_assign (make_ssa_name (type), conv_code, orig[0]);
2220 orig[0] = gimple_assign_lhs (conv);
2221 gsi_insert_before (gsi, conv, GSI_SAME_STMT);
2222 gimple_assign_set_rhs_with_ops (gsi, VEC_PERM_EXPR,
2223 orig[0], orig[1], op2);
2224 }
2225 else
2226 {
2227 gimple *perm
2228 = gimple_build_assign (make_ssa_name (TREE_TYPE (orig[0])),
2229 VEC_PERM_EXPR, orig[0], orig[1], op2);
2230 orig[0] = gimple_assign_lhs (perm);
2231 gsi_insert_before (gsi, perm, GSI_SAME_STMT);
2232 gimple_assign_set_rhs_with_ops (gsi, conv_code, orig[0],
2233 NULL_TREE, NULL_TREE);
2234 }
2235 }
2236 update_stmt (gsi_stmt (*gsi));
2237 return true;
2238 }
2239
2240
2241 /* Primitive "lattice" function for gimple_simplify. */
2242
2243 static tree
2244 fwprop_ssa_val (tree name)
2245 {
2246 /* First valueize NAME. */
2247 if (TREE_CODE (name) == SSA_NAME
2248 && SSA_NAME_VERSION (name) < lattice.length ())
2249 {
2250 tree val = lattice[SSA_NAME_VERSION (name)];
2251 if (val)
2252 name = val;
2253 }
2254 /* We continue matching along SSA use-def edges for SSA names
2255 that are not single-use. Currently there are no patterns
2256 that would cause any issues with that. */
2257 return name;
2258 }
2259
2260 /* Main entry point for the forward propagation and statement combine
2261 optimizer. */
2262
2263 namespace {
2264
2265 const pass_data pass_data_forwprop =
2266 {
2267 GIMPLE_PASS, /* type */
2268 "forwprop", /* name */
2269 OPTGROUP_NONE, /* optinfo_flags */
2270 TV_TREE_FORWPROP, /* tv_id */
2271 ( PROP_cfg | PROP_ssa ), /* properties_required */
2272 0, /* properties_provided */
2273 0, /* properties_destroyed */
2274 0, /* todo_flags_start */
2275 TODO_update_ssa, /* todo_flags_finish */
2276 };
2277
2278 class pass_forwprop : public gimple_opt_pass
2279 {
2280 public:
2281 pass_forwprop (gcc::context *ctxt)
2282 : gimple_opt_pass (pass_data_forwprop, ctxt)
2283 {}
2284
2285 /* opt_pass methods: */
2286 opt_pass * clone () { return new pass_forwprop (m_ctxt); }
2287 virtual bool gate (function *) { return flag_tree_forwprop; }
2288 virtual unsigned int execute (function *);
2289
2290 }; // class pass_forwprop
2291
2292 unsigned int
2293 pass_forwprop::execute (function *fun)
2294 {
2295 unsigned int todoflags = 0;
2296
2297 cfg_changed = false;
2298
2299 /* Combine stmts with the stmts defining their operands. Do that
2300 in an order that guarantees visiting SSA defs before SSA uses. */
2301 lattice.create (num_ssa_names);
2302 lattice.quick_grow_cleared (num_ssa_names);
2303 int *postorder = XNEWVEC (int, n_basic_blocks_for_fn (fun));
2304 int postorder_num = pre_and_rev_post_order_compute_fn (cfun, NULL,
2305 postorder, false);
2306 auto_vec<gimple *, 4> to_fixup;
2307 auto_vec<gimple *, 32> to_remove;
2308 to_purge = BITMAP_ALLOC (NULL);
2309 for (int i = 0; i < postorder_num; ++i)
2310 {
2311 gimple_stmt_iterator gsi;
2312 basic_block bb = BASIC_BLOCK_FOR_FN (fun, postorder[i]);
2313
2314 /* Record degenerate PHIs in the lattice. */
2315 for (gphi_iterator si = gsi_start_phis (bb); !gsi_end_p (si);
2316 gsi_next (&si))
2317 {
2318 gphi *phi = si.phi ();
2319 tree res = gimple_phi_result (phi);
2320 if (virtual_operand_p (res))
2321 continue;
2322
2323 use_operand_p use_p;
2324 ssa_op_iter it;
2325 tree first = NULL_TREE;
2326 bool all_same = true;
2327 FOR_EACH_PHI_ARG (use_p, phi, it, SSA_OP_USE)
2328 {
2329 tree use = USE_FROM_PTR (use_p);
2330 if (! first)
2331 first = use;
2332 else if (! operand_equal_p (first, use, 0))
2333 {
2334 all_same = false;
2335 break;
2336 }
2337 }
2338 if (all_same)
2339 {
2340 if (may_propagate_copy (res, first))
2341 to_remove.safe_push (phi);
2342 fwprop_set_lattice_val (res, first);
2343 }
2344 }
2345
2346 /* Apply forward propagation to all stmts in the basic-block.
2347 Note we update GSI within the loop as necessary. */
2348 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); )
2349 {
2350 gimple *stmt = gsi_stmt (gsi);
2351 tree lhs, rhs;
2352 enum tree_code code;
2353
2354 if (!is_gimple_assign (stmt))
2355 {
2356 gsi_next (&gsi);
2357 continue;
2358 }
2359
2360 lhs = gimple_assign_lhs (stmt);
2361 rhs = gimple_assign_rhs1 (stmt);
2362 code = gimple_assign_rhs_code (stmt);
2363 if (TREE_CODE (lhs) != SSA_NAME
2364 || has_zero_uses (lhs))
2365 {
2366 gsi_next (&gsi);
2367 continue;
2368 }
2369
2370 /* If this statement sets an SSA_NAME to an address,
2371 try to propagate the address into the uses of the SSA_NAME. */
2372 if (code == ADDR_EXPR
2373 /* Handle pointer conversions on invariant addresses
2374 as well, as this is valid gimple. */
2375 || (CONVERT_EXPR_CODE_P (code)
2376 && TREE_CODE (rhs) == ADDR_EXPR
2377 && POINTER_TYPE_P (TREE_TYPE (lhs))))
2378 {
2379 tree base = get_base_address (TREE_OPERAND (rhs, 0));
2380 if ((!base
2381 || !DECL_P (base)
2382 || decl_address_invariant_p (base))
2383 && !stmt_references_abnormal_ssa_name (stmt)
2384 && forward_propagate_addr_expr (lhs, rhs, true))
2385 {
2386 fwprop_invalidate_lattice (gimple_get_lhs (stmt));
2387 release_defs (stmt);
2388 gsi_remove (&gsi, true);
2389 }
2390 else
2391 gsi_next (&gsi);
2392 }
2393 else if (code == POINTER_PLUS_EXPR)
2394 {
2395 tree off = gimple_assign_rhs2 (stmt);
2396 if (TREE_CODE (off) == INTEGER_CST
2397 && can_propagate_from (stmt)
2398 && !simple_iv_increment_p (stmt)
2399 /* ??? Better adjust the interface to that function
2400 instead of building new trees here. */
2401 && forward_propagate_addr_expr
2402 (lhs,
2403 build1_loc (gimple_location (stmt),
2404 ADDR_EXPR, TREE_TYPE (rhs),
2405 fold_build2 (MEM_REF,
2406 TREE_TYPE (TREE_TYPE (rhs)),
2407 rhs,
2408 fold_convert (ptr_type_node,
2409 off))), true))
2410 {
2411 fwprop_invalidate_lattice (gimple_get_lhs (stmt));
2412 release_defs (stmt);
2413 gsi_remove (&gsi, true);
2414 }
2415 else if (is_gimple_min_invariant (rhs))
2416 {
2417 /* Make sure to fold &a[0] + off_1 here. */
2418 fold_stmt_inplace (&gsi);
2419 update_stmt (stmt);
2420 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
2421 gsi_next (&gsi);
2422 }
2423 else
2424 gsi_next (&gsi);
2425 }
2426 else if (TREE_CODE (TREE_TYPE (lhs)) == COMPLEX_TYPE
2427 && gimple_assign_load_p (stmt)
2428 && !gimple_has_volatile_ops (stmt)
2429 && (TREE_CODE (gimple_assign_rhs1 (stmt))
2430 != TARGET_MEM_REF)
2431 && !stmt_can_throw_internal (cfun, stmt))
2432 {
2433 /* Rewrite loads used only in real/imagpart extractions to
2434 component-wise loads. */
2435 use_operand_p use_p;
2436 imm_use_iterator iter;
2437 bool rewrite = true;
2438 FOR_EACH_IMM_USE_FAST (use_p, iter, lhs)
2439 {
2440 gimple *use_stmt = USE_STMT (use_p);
2441 if (is_gimple_debug (use_stmt))
2442 continue;
2443 if (!is_gimple_assign (use_stmt)
2444 || (gimple_assign_rhs_code (use_stmt) != REALPART_EXPR
2445 && gimple_assign_rhs_code (use_stmt) != IMAGPART_EXPR))
2446 {
2447 rewrite = false;
2448 break;
2449 }
2450 }
2451 if (rewrite)
2452 {
2453 gimple *use_stmt;
2454 FOR_EACH_IMM_USE_STMT (use_stmt, iter, lhs)
2455 {
2456 if (is_gimple_debug (use_stmt))
2457 {
2458 if (gimple_debug_bind_p (use_stmt))
2459 {
2460 gimple_debug_bind_reset_value (use_stmt);
2461 update_stmt (use_stmt);
2462 }
2463 continue;
2464 }
2465
2466 tree new_rhs = build1 (gimple_assign_rhs_code (use_stmt),
2467 TREE_TYPE (TREE_TYPE (rhs)),
2468 unshare_expr (rhs));
2469 gimple *new_stmt
2470 = gimple_build_assign (gimple_assign_lhs (use_stmt),
2471 new_rhs);
2472
2473 location_t loc = gimple_location (use_stmt);
2474 gimple_set_location (new_stmt, loc);
2475 gimple_stmt_iterator gsi2 = gsi_for_stmt (use_stmt);
2476 unlink_stmt_vdef (use_stmt);
2477 gsi_remove (&gsi2, true);
2478
2479 gsi_insert_before (&gsi, new_stmt, GSI_SAME_STMT);
2480 }
2481
2482 release_defs (stmt);
2483 gsi_remove (&gsi, true);
2484 }
2485 else
2486 gsi_next (&gsi);
2487 }
2488 else if (TREE_CODE (TREE_TYPE (lhs)) == VECTOR_TYPE
2489 && TYPE_MODE (TREE_TYPE (lhs)) == BLKmode
2490 && gimple_assign_load_p (stmt)
2491 && !gimple_has_volatile_ops (stmt)
2492 && (TREE_CODE (gimple_assign_rhs1 (stmt))
2493 != TARGET_MEM_REF)
2494 && !stmt_can_throw_internal (cfun, stmt))
2495 {
2496 /* Rewrite loads used only in BIT_FIELD_REF extractions to
2497 component-wise loads. */
2498 use_operand_p use_p;
2499 imm_use_iterator iter;
2500 bool rewrite = true;
2501 FOR_EACH_IMM_USE_FAST (use_p, iter, lhs)
2502 {
2503 gimple *use_stmt = USE_STMT (use_p);
2504 if (is_gimple_debug (use_stmt))
2505 continue;
2506 if (!is_gimple_assign (use_stmt)
2507 || gimple_assign_rhs_code (use_stmt) != BIT_FIELD_REF)
2508 {
2509 rewrite = false;
2510 break;
2511 }
2512 }
2513 if (rewrite)
2514 {
2515 gimple *use_stmt;
2516 FOR_EACH_IMM_USE_STMT (use_stmt, iter, lhs)
2517 {
2518 if (is_gimple_debug (use_stmt))
2519 {
2520 if (gimple_debug_bind_p (use_stmt))
2521 {
2522 gimple_debug_bind_reset_value (use_stmt);
2523 update_stmt (use_stmt);
2524 }
2525 continue;
2526 }
2527
2528 tree bfr = gimple_assign_rhs1 (use_stmt);
2529 tree new_rhs = fold_build3 (BIT_FIELD_REF,
2530 TREE_TYPE (bfr),
2531 unshare_expr (rhs),
2532 TREE_OPERAND (bfr, 1),
2533 TREE_OPERAND (bfr, 2));
2534 gimple *new_stmt
2535 = gimple_build_assign (gimple_assign_lhs (use_stmt),
2536 new_rhs);
2537
2538 location_t loc = gimple_location (use_stmt);
2539 gimple_set_location (new_stmt, loc);
2540 gimple_stmt_iterator gsi2 = gsi_for_stmt (use_stmt);
2541 unlink_stmt_vdef (use_stmt);
2542 gsi_remove (&gsi2, true);
2543
2544 gsi_insert_before (&gsi, new_stmt, GSI_SAME_STMT);
2545 }
2546
2547 release_defs (stmt);
2548 gsi_remove (&gsi, true);
2549 }
2550 else
2551 gsi_next (&gsi);
2552 }
2553
2554 else if (code == COMPLEX_EXPR)
2555 {
2556 /* Rewrite stores of a single-use complex build expression
2557 to component-wise stores. */
2558 use_operand_p use_p;
2559 gimple *use_stmt;
2560 if (single_imm_use (lhs, &use_p, &use_stmt)
2561 && gimple_store_p (use_stmt)
2562 && !gimple_has_volatile_ops (use_stmt)
2563 && is_gimple_assign (use_stmt)
2564 && (TREE_CODE (gimple_assign_lhs (use_stmt))
2565 != TARGET_MEM_REF))
2566 {
2567 tree use_lhs = gimple_assign_lhs (use_stmt);
2568 tree new_lhs = build1 (REALPART_EXPR,
2569 TREE_TYPE (TREE_TYPE (use_lhs)),
2570 unshare_expr (use_lhs));
2571 gimple *new_stmt = gimple_build_assign (new_lhs, rhs);
2572 location_t loc = gimple_location (use_stmt);
2573 gimple_set_location (new_stmt, loc);
2574 gimple_set_vuse (new_stmt, gimple_vuse (use_stmt));
2575 gimple_set_vdef (new_stmt, make_ssa_name (gimple_vop (cfun)));
2576 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
2577 gimple_set_vuse (use_stmt, gimple_vdef (new_stmt));
2578 gimple_stmt_iterator gsi2 = gsi_for_stmt (use_stmt);
2579 gsi_insert_before (&gsi2, new_stmt, GSI_SAME_STMT);
2580
2581 new_lhs = build1 (IMAGPART_EXPR,
2582 TREE_TYPE (TREE_TYPE (use_lhs)),
2583 unshare_expr (use_lhs));
2584 gimple_assign_set_lhs (use_stmt, new_lhs);
2585 gimple_assign_set_rhs1 (use_stmt, gimple_assign_rhs2 (stmt));
2586 update_stmt (use_stmt);
2587
2588 release_defs (stmt);
2589 gsi_remove (&gsi, true);
2590 }
2591 else
2592 gsi_next (&gsi);
2593 }
2594 else if (code == CONSTRUCTOR
2595 && VECTOR_TYPE_P (TREE_TYPE (rhs))
2596 && TYPE_MODE (TREE_TYPE (rhs)) == BLKmode
2597 && CONSTRUCTOR_NELTS (rhs) > 0
2598 && (!VECTOR_TYPE_P (TREE_TYPE (CONSTRUCTOR_ELT (rhs, 0)->value))
2599 || (TYPE_MODE (TREE_TYPE (CONSTRUCTOR_ELT (rhs, 0)->value))
2600 != BLKmode)))
2601 {
2602 /* Rewrite stores of a single-use vector constructors
2603 to component-wise stores if the mode isn't supported. */
2604 use_operand_p use_p;
2605 gimple *use_stmt;
2606 if (single_imm_use (lhs, &use_p, &use_stmt)
2607 && gimple_store_p (use_stmt)
2608 && !gimple_has_volatile_ops (use_stmt)
2609 && !stmt_can_throw_internal (cfun, use_stmt)
2610 && is_gimple_assign (use_stmt)
2611 && (TREE_CODE (gimple_assign_lhs (use_stmt))
2612 != TARGET_MEM_REF))
2613 {
2614 tree elt_t = TREE_TYPE (CONSTRUCTOR_ELT (rhs, 0)->value);
2615 unsigned HOST_WIDE_INT elt_w
2616 = tree_to_uhwi (TYPE_SIZE (elt_t));
2617 unsigned HOST_WIDE_INT n
2618 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (rhs)));
2619 for (unsigned HOST_WIDE_INT bi = 0; bi < n; bi += elt_w)
2620 {
2621 unsigned HOST_WIDE_INT ci = bi / elt_w;
2622 tree new_rhs;
2623 if (ci < CONSTRUCTOR_NELTS (rhs))
2624 new_rhs = CONSTRUCTOR_ELT (rhs, ci)->value;
2625 else
2626 new_rhs = build_zero_cst (elt_t);
2627 tree use_lhs = gimple_assign_lhs (use_stmt);
2628 tree new_lhs = build3 (BIT_FIELD_REF,
2629 elt_t,
2630 unshare_expr (use_lhs),
2631 bitsize_int (elt_w),
2632 bitsize_int (bi));
2633 gimple *new_stmt = gimple_build_assign (new_lhs, new_rhs);
2634 location_t loc = gimple_location (use_stmt);
2635 gimple_set_location (new_stmt, loc);
2636 gimple_set_vuse (new_stmt, gimple_vuse (use_stmt));
2637 gimple_set_vdef (new_stmt,
2638 make_ssa_name (gimple_vop (cfun)));
2639 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
2640 gimple_set_vuse (use_stmt, gimple_vdef (new_stmt));
2641 gimple_stmt_iterator gsi2 = gsi_for_stmt (use_stmt);
2642 gsi_insert_before (&gsi2, new_stmt, GSI_SAME_STMT);
2643 }
2644 gimple_stmt_iterator gsi2 = gsi_for_stmt (use_stmt);
2645 unlink_stmt_vdef (use_stmt);
2646 release_defs (use_stmt);
2647 gsi_remove (&gsi2, true);
2648 release_defs (stmt);
2649 gsi_remove (&gsi, true);
2650 }
2651 else
2652 gsi_next (&gsi);
2653 }
2654 else
2655 gsi_next (&gsi);
2656 }
2657
2658 /* Combine stmts with the stmts defining their operands.
2659 Note we update GSI within the loop as necessary. */
2660 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2661 {
2662 gimple *stmt = gsi_stmt (gsi);
2663
2664 /* Mark stmt as potentially needing revisiting. */
2665 gimple_set_plf (stmt, GF_PLF_1, false);
2666
2667 /* Substitute from our lattice. We need to do so only once. */
2668 bool substituted_p = false;
2669 use_operand_p usep;
2670 ssa_op_iter iter;
2671 FOR_EACH_SSA_USE_OPERAND (usep, stmt, iter, SSA_OP_USE)
2672 {
2673 tree use = USE_FROM_PTR (usep);
2674 tree val = fwprop_ssa_val (use);
2675 if (val && val != use && may_propagate_copy (use, val))
2676 {
2677 propagate_value (usep, val);
2678 substituted_p = true;
2679 }
2680 }
2681 if (substituted_p
2682 && is_gimple_assign (stmt)
2683 && gimple_assign_rhs_code (stmt) == ADDR_EXPR)
2684 recompute_tree_invariant_for_addr_expr (gimple_assign_rhs1 (stmt));
2685
2686 bool changed;
2687 do
2688 {
2689 gimple *orig_stmt = stmt = gsi_stmt (gsi);
2690 bool was_noreturn = (is_gimple_call (stmt)
2691 && gimple_call_noreturn_p (stmt));
2692 changed = false;
2693
2694 if (fold_stmt (&gsi, fwprop_ssa_val))
2695 {
2696 changed = true;
2697 stmt = gsi_stmt (gsi);
2698 /* Cleanup the CFG if we simplified a condition to
2699 true or false. */
2700 if (gcond *cond = dyn_cast <gcond *> (stmt))
2701 if (gimple_cond_true_p (cond)
2702 || gimple_cond_false_p (cond))
2703 cfg_changed = true;
2704 }
2705
2706 if (changed || substituted_p)
2707 {
2708 if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt))
2709 bitmap_set_bit (to_purge, bb->index);
2710 if (!was_noreturn
2711 && is_gimple_call (stmt) && gimple_call_noreturn_p (stmt))
2712 to_fixup.safe_push (stmt);
2713 update_stmt (stmt);
2714 substituted_p = false;
2715 }
2716
2717 switch (gimple_code (stmt))
2718 {
2719 case GIMPLE_ASSIGN:
2720 {
2721 tree rhs1 = gimple_assign_rhs1 (stmt);
2722 enum tree_code code = gimple_assign_rhs_code (stmt);
2723
2724 if (code == COND_EXPR
2725 || code == VEC_COND_EXPR)
2726 {
2727 /* In this case the entire COND_EXPR is in rhs1. */
2728 if (forward_propagate_into_cond (&gsi))
2729 {
2730 changed = true;
2731 stmt = gsi_stmt (gsi);
2732 }
2733 }
2734 else if (TREE_CODE_CLASS (code) == tcc_comparison)
2735 {
2736 int did_something;
2737 did_something = forward_propagate_into_comparison (&gsi);
2738 if (maybe_clean_or_replace_eh_stmt (stmt, gsi_stmt (gsi)))
2739 bitmap_set_bit (to_purge, bb->index);
2740 if (did_something == 2)
2741 cfg_changed = true;
2742 changed = did_something != 0;
2743 }
2744 else if ((code == PLUS_EXPR
2745 || code == BIT_IOR_EXPR
2746 || code == BIT_XOR_EXPR)
2747 && simplify_rotate (&gsi))
2748 changed = true;
2749 else if (code == VEC_PERM_EXPR)
2750 {
2751 int did_something = simplify_permutation (&gsi);
2752 if (did_something == 2)
2753 cfg_changed = true;
2754 changed = did_something != 0;
2755 }
2756 else if (code == BIT_FIELD_REF)
2757 changed = simplify_bitfield_ref (&gsi);
2758 else if (code == CONSTRUCTOR
2759 && TREE_CODE (TREE_TYPE (rhs1)) == VECTOR_TYPE)
2760 changed = simplify_vector_constructor (&gsi);
2761 break;
2762 }
2763
2764 case GIMPLE_SWITCH:
2765 changed = simplify_gimple_switch (as_a <gswitch *> (stmt));
2766 break;
2767
2768 case GIMPLE_COND:
2769 {
2770 int did_something = forward_propagate_into_gimple_cond
2771 (as_a <gcond *> (stmt));
2772 if (did_something == 2)
2773 cfg_changed = true;
2774 changed = did_something != 0;
2775 break;
2776 }
2777
2778 case GIMPLE_CALL:
2779 {
2780 tree callee = gimple_call_fndecl (stmt);
2781 if (callee != NULL_TREE
2782 && fndecl_built_in_p (callee, BUILT_IN_NORMAL))
2783 changed = simplify_builtin_call (&gsi, callee);
2784 break;
2785 }
2786
2787 default:;
2788 }
2789
2790 if (changed)
2791 {
2792 /* If the stmt changed then re-visit it and the statements
2793 inserted before it. */
2794 for (; !gsi_end_p (gsi); gsi_prev (&gsi))
2795 if (gimple_plf (gsi_stmt (gsi), GF_PLF_1))
2796 break;
2797 if (gsi_end_p (gsi))
2798 gsi = gsi_start_bb (bb);
2799 else
2800 gsi_next (&gsi);
2801 }
2802 }
2803 while (changed);
2804
2805 /* Stmt no longer needs to be revisited. */
2806 stmt = gsi_stmt (gsi);
2807 gcc_checking_assert (!gimple_plf (stmt, GF_PLF_1));
2808 gimple_set_plf (stmt, GF_PLF_1, true);
2809
2810 /* Fill up the lattice. */
2811 if (gimple_assign_single_p (stmt))
2812 {
2813 tree lhs = gimple_assign_lhs (stmt);
2814 tree rhs = gimple_assign_rhs1 (stmt);
2815 if (TREE_CODE (lhs) == SSA_NAME)
2816 {
2817 tree val = lhs;
2818 if (TREE_CODE (rhs) == SSA_NAME)
2819 val = fwprop_ssa_val (rhs);
2820 else if (is_gimple_min_invariant (rhs))
2821 val = rhs;
2822 /* If we can propagate the lattice-value mark the
2823 stmt for removal. */
2824 if (val != lhs
2825 && may_propagate_copy (lhs, val))
2826 to_remove.safe_push (stmt);
2827 fwprop_set_lattice_val (lhs, val);
2828 }
2829 }
2830 else if (gimple_nop_p (stmt))
2831 to_remove.safe_push (stmt);
2832 }
2833
2834 /* Substitute in destination PHI arguments. */
2835 edge_iterator ei;
2836 edge e;
2837 FOR_EACH_EDGE (e, ei, bb->succs)
2838 for (gphi_iterator gsi = gsi_start_phis (e->dest);
2839 !gsi_end_p (gsi); gsi_next (&gsi))
2840 {
2841 gphi *phi = gsi.phi ();
2842 use_operand_p use_p = PHI_ARG_DEF_PTR_FROM_EDGE (phi, e);
2843 tree arg = USE_FROM_PTR (use_p);
2844 if (TREE_CODE (arg) != SSA_NAME
2845 || virtual_operand_p (arg))
2846 continue;
2847 tree val = fwprop_ssa_val (arg);
2848 if (val != arg
2849 && may_propagate_copy (arg, val))
2850 propagate_value (use_p, val);
2851 }
2852 }
2853 free (postorder);
2854 lattice.release ();
2855
2856 /* Remove stmts in reverse order to make debug stmt creation possible. */
2857 while (!to_remove.is_empty())
2858 {
2859 gimple *stmt = to_remove.pop ();
2860 if (dump_file && (dump_flags & TDF_DETAILS))
2861 {
2862 fprintf (dump_file, "Removing dead stmt ");
2863 print_gimple_stmt (dump_file, stmt, 0);
2864 fprintf (dump_file, "\n");
2865 }
2866 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
2867 if (gimple_code (stmt) == GIMPLE_PHI)
2868 remove_phi_node (&gsi, true);
2869 else
2870 {
2871 unlink_stmt_vdef (stmt);
2872 gsi_remove (&gsi, true);
2873 release_defs (stmt);
2874 }
2875 }
2876
2877 /* Fixup stmts that became noreturn calls. This may require splitting
2878 blocks and thus isn't possible during the walk. Do this
2879 in reverse order so we don't inadvertedly remove a stmt we want to
2880 fixup by visiting a dominating now noreturn call first. */
2881 while (!to_fixup.is_empty ())
2882 {
2883 gimple *stmt = to_fixup.pop ();
2884 if (dump_file && dump_flags & TDF_DETAILS)
2885 {
2886 fprintf (dump_file, "Fixing up noreturn call ");
2887 print_gimple_stmt (dump_file, stmt, 0);
2888 fprintf (dump_file, "\n");
2889 }
2890 cfg_changed |= fixup_noreturn_call (stmt);
2891 }
2892
2893 cfg_changed |= gimple_purge_all_dead_eh_edges (to_purge);
2894 BITMAP_FREE (to_purge);
2895
2896 if (cfg_changed)
2897 todoflags |= TODO_cleanup_cfg;
2898
2899 return todoflags;
2900 }
2901
2902 } // anon namespace
2903
2904 gimple_opt_pass *
2905 make_pass_forwprop (gcc::context *ctxt)
2906 {
2907 return new pass_forwprop (ctxt);
2908 }