]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/tree-ssa-forwprop.c
re PR tree-optimization/92690 (vector CTOR optimization performs invalid conversion)
[thirdparty/gcc.git] / gcc / tree-ssa-forwprop.c
1 /* Forward propagation of expressions for single use variables.
2 Copyright (C) 2004-2019 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
10
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "rtl.h"
25 #include "tree.h"
26 #include "gimple.h"
27 #include "cfghooks.h"
28 #include "tree-pass.h"
29 #include "ssa.h"
30 #include "expmed.h"
31 #include "optabs-query.h"
32 #include "gimple-pretty-print.h"
33 #include "fold-const.h"
34 #include "stor-layout.h"
35 #include "gimple-fold.h"
36 #include "tree-eh.h"
37 #include "gimplify.h"
38 #include "gimple-iterator.h"
39 #include "gimplify-me.h"
40 #include "tree-cfg.h"
41 #include "expr.h"
42 #include "tree-dfa.h"
43 #include "tree-ssa-propagate.h"
44 #include "tree-ssa-dom.h"
45 #include "builtins.h"
46 #include "tree-cfgcleanup.h"
47 #include "cfganal.h"
48 #include "optabs-tree.h"
49 #include "tree-vector-builder.h"
50 #include "vec-perm-indices.h"
51
52 /* This pass propagates the RHS of assignment statements into use
53 sites of the LHS of the assignment. It's basically a specialized
54 form of tree combination. It is hoped all of this can disappear
55 when we have a generalized tree combiner.
56
57 One class of common cases we handle is forward propagating a single use
58 variable into a COND_EXPR.
59
60 bb0:
61 x = a COND b;
62 if (x) goto ... else goto ...
63
64 Will be transformed into:
65
66 bb0:
67 if (a COND b) goto ... else goto ...
68
69 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
70
71 Or (assuming c1 and c2 are constants):
72
73 bb0:
74 x = a + c1;
75 if (x EQ/NEQ c2) goto ... else goto ...
76
77 Will be transformed into:
78
79 bb0:
80 if (a EQ/NEQ (c2 - c1)) goto ... else goto ...
81
82 Similarly for x = a - c1.
83
84 Or
85
86 bb0:
87 x = !a
88 if (x) goto ... else goto ...
89
90 Will be transformed into:
91
92 bb0:
93 if (a == 0) goto ... else goto ...
94
95 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
96 For these cases, we propagate A into all, possibly more than one,
97 COND_EXPRs that use X.
98
99 Or
100
101 bb0:
102 x = (typecast) a
103 if (x) goto ... else goto ...
104
105 Will be transformed into:
106
107 bb0:
108 if (a != 0) goto ... else goto ...
109
110 (Assuming a is an integral type and x is a boolean or x is an
111 integral and a is a boolean.)
112
113 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
114 For these cases, we propagate A into all, possibly more than one,
115 COND_EXPRs that use X.
116
117 In addition to eliminating the variable and the statement which assigns
118 a value to the variable, we may be able to later thread the jump without
119 adding insane complexity in the dominator optimizer.
120
121 Also note these transformations can cascade. We handle this by having
122 a worklist of COND_EXPR statements to examine. As we make a change to
123 a statement, we put it back on the worklist to examine on the next
124 iteration of the main loop.
125
126 A second class of propagation opportunities arises for ADDR_EXPR
127 nodes.
128
129 ptr = &x->y->z;
130 res = *ptr;
131
132 Will get turned into
133
134 res = x->y->z;
135
136 Or
137 ptr = (type1*)&type2var;
138 res = *ptr
139
140 Will get turned into (if type1 and type2 are the same size
141 and neither have volatile on them):
142 res = VIEW_CONVERT_EXPR<type1>(type2var)
143
144 Or
145
146 ptr = &x[0];
147 ptr2 = ptr + <constant>;
148
149 Will get turned into
150
151 ptr2 = &x[constant/elementsize];
152
153 Or
154
155 ptr = &x[0];
156 offset = index * element_size;
157 offset_p = (pointer) offset;
158 ptr2 = ptr + offset_p
159
160 Will get turned into:
161
162 ptr2 = &x[index];
163
164 Or
165 ssa = (int) decl
166 res = ssa & 1
167
168 Provided that decl has known alignment >= 2, will get turned into
169
170 res = 0
171
172 We also propagate casts into SWITCH_EXPR and COND_EXPR conditions to
173 allow us to remove the cast and {NOT_EXPR,NEG_EXPR} into a subsequent
174 {NOT_EXPR,NEG_EXPR}.
175
176 This will (of course) be extended as other needs arise. */
177
178 static bool forward_propagate_addr_expr (tree, tree, bool);
179
180 /* Set to true if we delete dead edges during the optimization. */
181 static bool cfg_changed;
182
183 static tree rhs_to_tree (tree type, gimple *stmt);
184
185 static bitmap to_purge;
186
187 /* Const-and-copy lattice. */
188 static vec<tree> lattice;
189
190 /* Set the lattice entry for NAME to VAL. */
191 static void
192 fwprop_set_lattice_val (tree name, tree val)
193 {
194 if (TREE_CODE (name) == SSA_NAME)
195 {
196 if (SSA_NAME_VERSION (name) >= lattice.length ())
197 {
198 lattice.reserve (num_ssa_names - lattice.length ());
199 lattice.quick_grow_cleared (num_ssa_names);
200 }
201 lattice[SSA_NAME_VERSION (name)] = val;
202 }
203 }
204
205 /* Invalidate the lattice entry for NAME, done when releasing SSA names. */
206 static void
207 fwprop_invalidate_lattice (tree name)
208 {
209 if (name
210 && TREE_CODE (name) == SSA_NAME
211 && SSA_NAME_VERSION (name) < lattice.length ())
212 lattice[SSA_NAME_VERSION (name)] = NULL_TREE;
213 }
214
215
216 /* Get the statement we can propagate from into NAME skipping
217 trivial copies. Returns the statement which defines the
218 propagation source or NULL_TREE if there is no such one.
219 If SINGLE_USE_ONLY is set considers only sources which have
220 a single use chain up to NAME. If SINGLE_USE_P is non-null,
221 it is set to whether the chain to NAME is a single use chain
222 or not. SINGLE_USE_P is not written to if SINGLE_USE_ONLY is set. */
223
224 static gimple *
225 get_prop_source_stmt (tree name, bool single_use_only, bool *single_use_p)
226 {
227 bool single_use = true;
228
229 do {
230 gimple *def_stmt = SSA_NAME_DEF_STMT (name);
231
232 if (!has_single_use (name))
233 {
234 single_use = false;
235 if (single_use_only)
236 return NULL;
237 }
238
239 /* If name is defined by a PHI node or is the default def, bail out. */
240 if (!is_gimple_assign (def_stmt))
241 return NULL;
242
243 /* If def_stmt is a simple copy, continue looking. */
244 if (gimple_assign_rhs_code (def_stmt) == SSA_NAME)
245 name = gimple_assign_rhs1 (def_stmt);
246 else
247 {
248 if (!single_use_only && single_use_p)
249 *single_use_p = single_use;
250
251 return def_stmt;
252 }
253 } while (1);
254 }
255
256 /* Checks if the destination ssa name in DEF_STMT can be used as
257 propagation source. Returns true if so, otherwise false. */
258
259 static bool
260 can_propagate_from (gimple *def_stmt)
261 {
262 gcc_assert (is_gimple_assign (def_stmt));
263
264 /* If the rhs has side-effects we cannot propagate from it. */
265 if (gimple_has_volatile_ops (def_stmt))
266 return false;
267
268 /* If the rhs is a load we cannot propagate from it. */
269 if (TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)) == tcc_reference
270 || TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)) == tcc_declaration)
271 return false;
272
273 /* Constants can be always propagated. */
274 if (gimple_assign_single_p (def_stmt)
275 && is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt)))
276 return true;
277
278 /* We cannot propagate ssa names that occur in abnormal phi nodes. */
279 if (stmt_references_abnormal_ssa_name (def_stmt))
280 return false;
281
282 /* If the definition is a conversion of a pointer to a function type,
283 then we cannot apply optimizations as some targets require
284 function pointers to be canonicalized and in this case this
285 optimization could eliminate a necessary canonicalization. */
286 if (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt)))
287 {
288 tree rhs = gimple_assign_rhs1 (def_stmt);
289 if (POINTER_TYPE_P (TREE_TYPE (rhs))
290 && TREE_CODE (TREE_TYPE (TREE_TYPE (rhs))) == FUNCTION_TYPE)
291 return false;
292 }
293
294 return true;
295 }
296
297 /* Remove a chain of dead statements starting at the definition of
298 NAME. The chain is linked via the first operand of the defining statements.
299 If NAME was replaced in its only use then this function can be used
300 to clean up dead stmts. The function handles already released SSA
301 names gracefully.
302 Returns true if cleanup-cfg has to run. */
303
304 static bool
305 remove_prop_source_from_use (tree name)
306 {
307 gimple_stmt_iterator gsi;
308 gimple *stmt;
309 bool cfg_changed = false;
310
311 do {
312 basic_block bb;
313
314 if (SSA_NAME_IN_FREE_LIST (name)
315 || SSA_NAME_IS_DEFAULT_DEF (name)
316 || !has_zero_uses (name))
317 return cfg_changed;
318
319 stmt = SSA_NAME_DEF_STMT (name);
320 if (gimple_code (stmt) == GIMPLE_PHI
321 || gimple_has_side_effects (stmt))
322 return cfg_changed;
323
324 bb = gimple_bb (stmt);
325 gsi = gsi_for_stmt (stmt);
326 unlink_stmt_vdef (stmt);
327 if (gsi_remove (&gsi, true))
328 bitmap_set_bit (to_purge, bb->index);
329 fwprop_invalidate_lattice (gimple_get_lhs (stmt));
330 release_defs (stmt);
331
332 name = is_gimple_assign (stmt) ? gimple_assign_rhs1 (stmt) : NULL_TREE;
333 } while (name && TREE_CODE (name) == SSA_NAME);
334
335 return cfg_changed;
336 }
337
338 /* Return the rhs of a gassign *STMT in a form of a single tree,
339 converted to type TYPE.
340
341 This should disappear, but is needed so we can combine expressions and use
342 the fold() interfaces. Long term, we need to develop folding and combine
343 routines that deal with gimple exclusively . */
344
345 static tree
346 rhs_to_tree (tree type, gimple *stmt)
347 {
348 location_t loc = gimple_location (stmt);
349 enum tree_code code = gimple_assign_rhs_code (stmt);
350 switch (get_gimple_rhs_class (code))
351 {
352 case GIMPLE_TERNARY_RHS:
353 return fold_build3_loc (loc, code, type, gimple_assign_rhs1 (stmt),
354 gimple_assign_rhs2 (stmt),
355 gimple_assign_rhs3 (stmt));
356 case GIMPLE_BINARY_RHS:
357 return fold_build2_loc (loc, code, type, gimple_assign_rhs1 (stmt),
358 gimple_assign_rhs2 (stmt));
359 case GIMPLE_UNARY_RHS:
360 return build1 (code, type, gimple_assign_rhs1 (stmt));
361 case GIMPLE_SINGLE_RHS:
362 return gimple_assign_rhs1 (stmt);
363 default:
364 gcc_unreachable ();
365 }
366 }
367
368 /* Combine OP0 CODE OP1 in the context of a COND_EXPR. Returns
369 the folded result in a form suitable for COND_EXPR_COND or
370 NULL_TREE, if there is no suitable simplified form. If
371 INVARIANT_ONLY is true only gimple_min_invariant results are
372 considered simplified. */
373
374 static tree
375 combine_cond_expr_cond (gimple *stmt, enum tree_code code, tree type,
376 tree op0, tree op1, bool invariant_only)
377 {
378 tree t;
379
380 gcc_assert (TREE_CODE_CLASS (code) == tcc_comparison);
381
382 fold_defer_overflow_warnings ();
383 t = fold_binary_loc (gimple_location (stmt), code, type, op0, op1);
384 if (!t)
385 {
386 fold_undefer_overflow_warnings (false, NULL, 0);
387 return NULL_TREE;
388 }
389
390 /* Require that we got a boolean type out if we put one in. */
391 gcc_assert (TREE_CODE (TREE_TYPE (t)) == TREE_CODE (type));
392
393 /* Canonicalize the combined condition for use in a COND_EXPR. */
394 t = canonicalize_cond_expr_cond (t);
395
396 /* Bail out if we required an invariant but didn't get one. */
397 if (!t || (invariant_only && !is_gimple_min_invariant (t)))
398 {
399 fold_undefer_overflow_warnings (false, NULL, 0);
400 return NULL_TREE;
401 }
402
403 fold_undefer_overflow_warnings (!gimple_no_warning_p (stmt), stmt, 0);
404
405 return t;
406 }
407
408 /* Combine the comparison OP0 CODE OP1 at LOC with the defining statements
409 of its operand. Return a new comparison tree or NULL_TREE if there
410 were no simplifying combines. */
411
412 static tree
413 forward_propagate_into_comparison_1 (gimple *stmt,
414 enum tree_code code, tree type,
415 tree op0, tree op1)
416 {
417 tree tmp = NULL_TREE;
418 tree rhs0 = NULL_TREE, rhs1 = NULL_TREE;
419 bool single_use0_p = false, single_use1_p = false;
420
421 /* For comparisons use the first operand, that is likely to
422 simplify comparisons against constants. */
423 if (TREE_CODE (op0) == SSA_NAME)
424 {
425 gimple *def_stmt = get_prop_source_stmt (op0, false, &single_use0_p);
426 if (def_stmt && can_propagate_from (def_stmt))
427 {
428 enum tree_code def_code = gimple_assign_rhs_code (def_stmt);
429 bool invariant_only_p = !single_use0_p;
430
431 rhs0 = rhs_to_tree (TREE_TYPE (op1), def_stmt);
432
433 /* Always combine comparisons or conversions from booleans. */
434 if (TREE_CODE (op1) == INTEGER_CST
435 && ((CONVERT_EXPR_CODE_P (def_code)
436 && TREE_CODE (TREE_TYPE (TREE_OPERAND (rhs0, 0)))
437 == BOOLEAN_TYPE)
438 || TREE_CODE_CLASS (def_code) == tcc_comparison))
439 invariant_only_p = false;
440
441 tmp = combine_cond_expr_cond (stmt, code, type,
442 rhs0, op1, invariant_only_p);
443 if (tmp)
444 return tmp;
445 }
446 }
447
448 /* If that wasn't successful, try the second operand. */
449 if (TREE_CODE (op1) == SSA_NAME)
450 {
451 gimple *def_stmt = get_prop_source_stmt (op1, false, &single_use1_p);
452 if (def_stmt && can_propagate_from (def_stmt))
453 {
454 rhs1 = rhs_to_tree (TREE_TYPE (op0), def_stmt);
455 tmp = combine_cond_expr_cond (stmt, code, type,
456 op0, rhs1, !single_use1_p);
457 if (tmp)
458 return tmp;
459 }
460 }
461
462 /* If that wasn't successful either, try both operands. */
463 if (rhs0 != NULL_TREE
464 && rhs1 != NULL_TREE)
465 tmp = combine_cond_expr_cond (stmt, code, type,
466 rhs0, rhs1,
467 !(single_use0_p && single_use1_p));
468
469 return tmp;
470 }
471
472 /* Propagate from the ssa name definition statements of the assignment
473 from a comparison at *GSI into the conditional if that simplifies it.
474 Returns 1 if the stmt was modified and 2 if the CFG needs cleanup,
475 otherwise returns 0. */
476
477 static int
478 forward_propagate_into_comparison (gimple_stmt_iterator *gsi)
479 {
480 gimple *stmt = gsi_stmt (*gsi);
481 tree tmp;
482 bool cfg_changed = false;
483 tree type = TREE_TYPE (gimple_assign_lhs (stmt));
484 tree rhs1 = gimple_assign_rhs1 (stmt);
485 tree rhs2 = gimple_assign_rhs2 (stmt);
486
487 /* Combine the comparison with defining statements. */
488 tmp = forward_propagate_into_comparison_1 (stmt,
489 gimple_assign_rhs_code (stmt),
490 type, rhs1, rhs2);
491 if (tmp && useless_type_conversion_p (type, TREE_TYPE (tmp)))
492 {
493 gimple_assign_set_rhs_from_tree (gsi, tmp);
494 fold_stmt (gsi);
495 update_stmt (gsi_stmt (*gsi));
496
497 if (TREE_CODE (rhs1) == SSA_NAME)
498 cfg_changed |= remove_prop_source_from_use (rhs1);
499 if (TREE_CODE (rhs2) == SSA_NAME)
500 cfg_changed |= remove_prop_source_from_use (rhs2);
501 return cfg_changed ? 2 : 1;
502 }
503
504 return 0;
505 }
506
507 /* Propagate from the ssa name definition statements of COND_EXPR
508 in GIMPLE_COND statement STMT into the conditional if that simplifies it.
509 Returns zero if no statement was changed, one if there were
510 changes and two if cfg_cleanup needs to run.
511
512 This must be kept in sync with forward_propagate_into_cond. */
513
514 static int
515 forward_propagate_into_gimple_cond (gcond *stmt)
516 {
517 tree tmp;
518 enum tree_code code = gimple_cond_code (stmt);
519 bool cfg_changed = false;
520 tree rhs1 = gimple_cond_lhs (stmt);
521 tree rhs2 = gimple_cond_rhs (stmt);
522
523 /* We can do tree combining on SSA_NAME and comparison expressions. */
524 if (TREE_CODE_CLASS (gimple_cond_code (stmt)) != tcc_comparison)
525 return 0;
526
527 tmp = forward_propagate_into_comparison_1 (stmt, code,
528 boolean_type_node,
529 rhs1, rhs2);
530 if (tmp
531 && is_gimple_condexpr_for_cond (tmp))
532 {
533 if (dump_file)
534 {
535 fprintf (dump_file, " Replaced '");
536 print_gimple_expr (dump_file, stmt, 0);
537 fprintf (dump_file, "' with '");
538 print_generic_expr (dump_file, tmp);
539 fprintf (dump_file, "'\n");
540 }
541
542 gimple_cond_set_condition_from_tree (stmt, unshare_expr (tmp));
543 update_stmt (stmt);
544
545 if (TREE_CODE (rhs1) == SSA_NAME)
546 cfg_changed |= remove_prop_source_from_use (rhs1);
547 if (TREE_CODE (rhs2) == SSA_NAME)
548 cfg_changed |= remove_prop_source_from_use (rhs2);
549 return (cfg_changed || is_gimple_min_invariant (tmp)) ? 2 : 1;
550 }
551
552 /* Canonicalize _Bool == 0 and _Bool != 1 to _Bool != 0 by swapping edges. */
553 if ((TREE_CODE (TREE_TYPE (rhs1)) == BOOLEAN_TYPE
554 || (INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
555 && TYPE_PRECISION (TREE_TYPE (rhs1)) == 1))
556 && ((code == EQ_EXPR
557 && integer_zerop (rhs2))
558 || (code == NE_EXPR
559 && integer_onep (rhs2))))
560 {
561 basic_block bb = gimple_bb (stmt);
562 gimple_cond_set_code (stmt, NE_EXPR);
563 gimple_cond_set_rhs (stmt, build_zero_cst (TREE_TYPE (rhs1)));
564 EDGE_SUCC (bb, 0)->flags ^= (EDGE_TRUE_VALUE|EDGE_FALSE_VALUE);
565 EDGE_SUCC (bb, 1)->flags ^= (EDGE_TRUE_VALUE|EDGE_FALSE_VALUE);
566 return 1;
567 }
568
569 return 0;
570 }
571
572
573 /* Propagate from the ssa name definition statements of COND_EXPR
574 in the rhs of statement STMT into the conditional if that simplifies it.
575 Returns true zero if the stmt was changed. */
576
577 static bool
578 forward_propagate_into_cond (gimple_stmt_iterator *gsi_p)
579 {
580 gimple *stmt = gsi_stmt (*gsi_p);
581 tree tmp = NULL_TREE;
582 tree cond = gimple_assign_rhs1 (stmt);
583 enum tree_code code = gimple_assign_rhs_code (stmt);
584
585 /* We can do tree combining on SSA_NAME and comparison expressions. */
586 if (COMPARISON_CLASS_P (cond))
587 tmp = forward_propagate_into_comparison_1 (stmt, TREE_CODE (cond),
588 TREE_TYPE (cond),
589 TREE_OPERAND (cond, 0),
590 TREE_OPERAND (cond, 1));
591 else if (TREE_CODE (cond) == SSA_NAME)
592 {
593 enum tree_code def_code;
594 tree name = cond;
595 gimple *def_stmt = get_prop_source_stmt (name, true, NULL);
596 if (!def_stmt || !can_propagate_from (def_stmt))
597 return 0;
598
599 def_code = gimple_assign_rhs_code (def_stmt);
600 if (TREE_CODE_CLASS (def_code) == tcc_comparison)
601 tmp = fold_build2_loc (gimple_location (def_stmt),
602 def_code,
603 TREE_TYPE (cond),
604 gimple_assign_rhs1 (def_stmt),
605 gimple_assign_rhs2 (def_stmt));
606 }
607
608 if (tmp
609 && is_gimple_condexpr (tmp))
610 {
611 if (dump_file)
612 {
613 fprintf (dump_file, " Replaced '");
614 print_generic_expr (dump_file, cond);
615 fprintf (dump_file, "' with '");
616 print_generic_expr (dump_file, tmp);
617 fprintf (dump_file, "'\n");
618 }
619
620 if ((code == VEC_COND_EXPR) ? integer_all_onesp (tmp)
621 : integer_onep (tmp))
622 gimple_assign_set_rhs_from_tree (gsi_p, gimple_assign_rhs2 (stmt));
623 else if (integer_zerop (tmp))
624 gimple_assign_set_rhs_from_tree (gsi_p, gimple_assign_rhs3 (stmt));
625 else
626 gimple_assign_set_rhs1 (stmt, unshare_expr (tmp));
627 stmt = gsi_stmt (*gsi_p);
628 update_stmt (stmt);
629
630 return true;
631 }
632
633 return 0;
634 }
635
636 /* We've just substituted an ADDR_EXPR into stmt. Update all the
637 relevant data structures to match. */
638
639 static void
640 tidy_after_forward_propagate_addr (gimple *stmt)
641 {
642 /* We may have turned a trapping insn into a non-trapping insn. */
643 if (maybe_clean_or_replace_eh_stmt (stmt, stmt))
644 bitmap_set_bit (to_purge, gimple_bb (stmt)->index);
645
646 if (TREE_CODE (gimple_assign_rhs1 (stmt)) == ADDR_EXPR)
647 recompute_tree_invariant_for_addr_expr (gimple_assign_rhs1 (stmt));
648 }
649
650 /* NAME is a SSA_NAME representing DEF_RHS which is of the form
651 ADDR_EXPR <whatever>.
652
653 Try to forward propagate the ADDR_EXPR into the use USE_STMT.
654 Often this will allow for removal of an ADDR_EXPR and INDIRECT_REF
655 node or for recovery of array indexing from pointer arithmetic.
656
657 Return true if the propagation was successful (the propagation can
658 be not totally successful, yet things may have been changed). */
659
660 static bool
661 forward_propagate_addr_expr_1 (tree name, tree def_rhs,
662 gimple_stmt_iterator *use_stmt_gsi,
663 bool single_use_p)
664 {
665 tree lhs, rhs, rhs2, array_ref;
666 gimple *use_stmt = gsi_stmt (*use_stmt_gsi);
667 enum tree_code rhs_code;
668 bool res = true;
669
670 gcc_assert (TREE_CODE (def_rhs) == ADDR_EXPR);
671
672 lhs = gimple_assign_lhs (use_stmt);
673 rhs_code = gimple_assign_rhs_code (use_stmt);
674 rhs = gimple_assign_rhs1 (use_stmt);
675
676 /* Do not perform copy-propagation but recurse through copy chains. */
677 if (TREE_CODE (lhs) == SSA_NAME
678 && rhs_code == SSA_NAME)
679 return forward_propagate_addr_expr (lhs, def_rhs, single_use_p);
680
681 /* The use statement could be a conversion. Recurse to the uses of the
682 lhs as copyprop does not copy through pointer to integer to pointer
683 conversions and FRE does not catch all cases either.
684 Treat the case of a single-use name and
685 a conversion to def_rhs type separate, though. */
686 if (TREE_CODE (lhs) == SSA_NAME
687 && CONVERT_EXPR_CODE_P (rhs_code))
688 {
689 /* If there is a point in a conversion chain where the types match
690 so we can remove a conversion re-materialize the address here
691 and stop. */
692 if (single_use_p
693 && useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (def_rhs)))
694 {
695 gimple_assign_set_rhs1 (use_stmt, unshare_expr (def_rhs));
696 gimple_assign_set_rhs_code (use_stmt, TREE_CODE (def_rhs));
697 return true;
698 }
699
700 /* Else recurse if the conversion preserves the address value. */
701 if ((INTEGRAL_TYPE_P (TREE_TYPE (lhs))
702 || POINTER_TYPE_P (TREE_TYPE (lhs)))
703 && (TYPE_PRECISION (TREE_TYPE (lhs))
704 >= TYPE_PRECISION (TREE_TYPE (def_rhs))))
705 return forward_propagate_addr_expr (lhs, def_rhs, single_use_p);
706
707 return false;
708 }
709
710 /* If this isn't a conversion chain from this on we only can propagate
711 into compatible pointer contexts. */
712 if (!types_compatible_p (TREE_TYPE (name), TREE_TYPE (def_rhs)))
713 return false;
714
715 /* Propagate through constant pointer adjustments. */
716 if (TREE_CODE (lhs) == SSA_NAME
717 && rhs_code == POINTER_PLUS_EXPR
718 && rhs == name
719 && TREE_CODE (gimple_assign_rhs2 (use_stmt)) == INTEGER_CST)
720 {
721 tree new_def_rhs;
722 /* As we come here with non-invariant addresses in def_rhs we need
723 to make sure we can build a valid constant offsetted address
724 for further propagation. Simply rely on fold building that
725 and check after the fact. */
726 new_def_rhs = fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (rhs)),
727 def_rhs,
728 fold_convert (ptr_type_node,
729 gimple_assign_rhs2 (use_stmt)));
730 if (TREE_CODE (new_def_rhs) == MEM_REF
731 && !is_gimple_mem_ref_addr (TREE_OPERAND (new_def_rhs, 0)))
732 return false;
733 new_def_rhs = build_fold_addr_expr_with_type (new_def_rhs,
734 TREE_TYPE (rhs));
735
736 /* Recurse. If we could propagate into all uses of lhs do not
737 bother to replace into the current use but just pretend we did. */
738 if (TREE_CODE (new_def_rhs) == ADDR_EXPR
739 && forward_propagate_addr_expr (lhs, new_def_rhs, single_use_p))
740 return true;
741
742 if (useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (new_def_rhs)))
743 gimple_assign_set_rhs_with_ops (use_stmt_gsi, TREE_CODE (new_def_rhs),
744 new_def_rhs);
745 else if (is_gimple_min_invariant (new_def_rhs))
746 gimple_assign_set_rhs_with_ops (use_stmt_gsi, NOP_EXPR, new_def_rhs);
747 else
748 return false;
749 gcc_assert (gsi_stmt (*use_stmt_gsi) == use_stmt);
750 update_stmt (use_stmt);
751 return true;
752 }
753
754 /* Now strip away any outer COMPONENT_REF/ARRAY_REF nodes from the LHS.
755 ADDR_EXPR will not appear on the LHS. */
756 tree *lhsp = gimple_assign_lhs_ptr (use_stmt);
757 while (handled_component_p (*lhsp))
758 lhsp = &TREE_OPERAND (*lhsp, 0);
759 lhs = *lhsp;
760
761 /* Now see if the LHS node is a MEM_REF using NAME. If so,
762 propagate the ADDR_EXPR into the use of NAME and fold the result. */
763 if (TREE_CODE (lhs) == MEM_REF
764 && TREE_OPERAND (lhs, 0) == name)
765 {
766 tree def_rhs_base;
767 poly_int64 def_rhs_offset;
768 /* If the address is invariant we can always fold it. */
769 if ((def_rhs_base = get_addr_base_and_unit_offset (TREE_OPERAND (def_rhs, 0),
770 &def_rhs_offset)))
771 {
772 poly_offset_int off = mem_ref_offset (lhs);
773 tree new_ptr;
774 off += def_rhs_offset;
775 if (TREE_CODE (def_rhs_base) == MEM_REF)
776 {
777 off += mem_ref_offset (def_rhs_base);
778 new_ptr = TREE_OPERAND (def_rhs_base, 0);
779 }
780 else
781 new_ptr = build_fold_addr_expr (def_rhs_base);
782 TREE_OPERAND (lhs, 0) = new_ptr;
783 TREE_OPERAND (lhs, 1)
784 = wide_int_to_tree (TREE_TYPE (TREE_OPERAND (lhs, 1)), off);
785 tidy_after_forward_propagate_addr (use_stmt);
786 /* Continue propagating into the RHS if this was not the only use. */
787 if (single_use_p)
788 return true;
789 }
790 /* If the LHS is a plain dereference and the value type is the same as
791 that of the pointed-to type of the address we can put the
792 dereferenced address on the LHS preserving the original alias-type. */
793 else if (integer_zerop (TREE_OPERAND (lhs, 1))
794 && ((gimple_assign_lhs (use_stmt) == lhs
795 && useless_type_conversion_p
796 (TREE_TYPE (TREE_OPERAND (def_rhs, 0)),
797 TREE_TYPE (gimple_assign_rhs1 (use_stmt))))
798 || types_compatible_p (TREE_TYPE (lhs),
799 TREE_TYPE (TREE_OPERAND (def_rhs, 0))))
800 /* Don't forward anything into clobber stmts if it would result
801 in the lhs no longer being a MEM_REF. */
802 && (!gimple_clobber_p (use_stmt)
803 || TREE_CODE (TREE_OPERAND (def_rhs, 0)) == MEM_REF))
804 {
805 tree *def_rhs_basep = &TREE_OPERAND (def_rhs, 0);
806 tree new_offset, new_base, saved, new_lhs;
807 while (handled_component_p (*def_rhs_basep))
808 def_rhs_basep = &TREE_OPERAND (*def_rhs_basep, 0);
809 saved = *def_rhs_basep;
810 if (TREE_CODE (*def_rhs_basep) == MEM_REF)
811 {
812 new_base = TREE_OPERAND (*def_rhs_basep, 0);
813 new_offset = fold_convert (TREE_TYPE (TREE_OPERAND (lhs, 1)),
814 TREE_OPERAND (*def_rhs_basep, 1));
815 }
816 else
817 {
818 new_base = build_fold_addr_expr (*def_rhs_basep);
819 new_offset = TREE_OPERAND (lhs, 1);
820 }
821 *def_rhs_basep = build2 (MEM_REF, TREE_TYPE (*def_rhs_basep),
822 new_base, new_offset);
823 TREE_THIS_VOLATILE (*def_rhs_basep) = TREE_THIS_VOLATILE (lhs);
824 TREE_SIDE_EFFECTS (*def_rhs_basep) = TREE_SIDE_EFFECTS (lhs);
825 TREE_THIS_NOTRAP (*def_rhs_basep) = TREE_THIS_NOTRAP (lhs);
826 new_lhs = unshare_expr (TREE_OPERAND (def_rhs, 0));
827 *lhsp = new_lhs;
828 TREE_THIS_VOLATILE (new_lhs) = TREE_THIS_VOLATILE (lhs);
829 TREE_SIDE_EFFECTS (new_lhs) = TREE_SIDE_EFFECTS (lhs);
830 *def_rhs_basep = saved;
831 tidy_after_forward_propagate_addr (use_stmt);
832 /* Continue propagating into the RHS if this was not the
833 only use. */
834 if (single_use_p)
835 return true;
836 }
837 else
838 /* We can have a struct assignment dereferencing our name twice.
839 Note that we didn't propagate into the lhs to not falsely
840 claim we did when propagating into the rhs. */
841 res = false;
842 }
843
844 /* Strip away any outer COMPONENT_REF, ARRAY_REF or ADDR_EXPR
845 nodes from the RHS. */
846 tree *rhsp = gimple_assign_rhs1_ptr (use_stmt);
847 if (TREE_CODE (*rhsp) == ADDR_EXPR)
848 rhsp = &TREE_OPERAND (*rhsp, 0);
849 while (handled_component_p (*rhsp))
850 rhsp = &TREE_OPERAND (*rhsp, 0);
851 rhs = *rhsp;
852
853 /* Now see if the RHS node is a MEM_REF using NAME. If so,
854 propagate the ADDR_EXPR into the use of NAME and fold the result. */
855 if (TREE_CODE (rhs) == MEM_REF
856 && TREE_OPERAND (rhs, 0) == name)
857 {
858 tree def_rhs_base;
859 poly_int64 def_rhs_offset;
860 if ((def_rhs_base = get_addr_base_and_unit_offset (TREE_OPERAND (def_rhs, 0),
861 &def_rhs_offset)))
862 {
863 poly_offset_int off = mem_ref_offset (rhs);
864 tree new_ptr;
865 off += def_rhs_offset;
866 if (TREE_CODE (def_rhs_base) == MEM_REF)
867 {
868 off += mem_ref_offset (def_rhs_base);
869 new_ptr = TREE_OPERAND (def_rhs_base, 0);
870 }
871 else
872 new_ptr = build_fold_addr_expr (def_rhs_base);
873 TREE_OPERAND (rhs, 0) = new_ptr;
874 TREE_OPERAND (rhs, 1)
875 = wide_int_to_tree (TREE_TYPE (TREE_OPERAND (rhs, 1)), off);
876 fold_stmt_inplace (use_stmt_gsi);
877 tidy_after_forward_propagate_addr (use_stmt);
878 return res;
879 }
880 /* If the RHS is a plain dereference and the value type is the same as
881 that of the pointed-to type of the address we can put the
882 dereferenced address on the RHS preserving the original alias-type. */
883 else if (integer_zerop (TREE_OPERAND (rhs, 1))
884 && ((gimple_assign_rhs1 (use_stmt) == rhs
885 && useless_type_conversion_p
886 (TREE_TYPE (gimple_assign_lhs (use_stmt)),
887 TREE_TYPE (TREE_OPERAND (def_rhs, 0))))
888 || types_compatible_p (TREE_TYPE (rhs),
889 TREE_TYPE (TREE_OPERAND (def_rhs, 0)))))
890 {
891 tree *def_rhs_basep = &TREE_OPERAND (def_rhs, 0);
892 tree new_offset, new_base, saved, new_rhs;
893 while (handled_component_p (*def_rhs_basep))
894 def_rhs_basep = &TREE_OPERAND (*def_rhs_basep, 0);
895 saved = *def_rhs_basep;
896 if (TREE_CODE (*def_rhs_basep) == MEM_REF)
897 {
898 new_base = TREE_OPERAND (*def_rhs_basep, 0);
899 new_offset = fold_convert (TREE_TYPE (TREE_OPERAND (rhs, 1)),
900 TREE_OPERAND (*def_rhs_basep, 1));
901 }
902 else
903 {
904 new_base = build_fold_addr_expr (*def_rhs_basep);
905 new_offset = TREE_OPERAND (rhs, 1);
906 }
907 *def_rhs_basep = build2 (MEM_REF, TREE_TYPE (*def_rhs_basep),
908 new_base, new_offset);
909 TREE_THIS_VOLATILE (*def_rhs_basep) = TREE_THIS_VOLATILE (rhs);
910 TREE_SIDE_EFFECTS (*def_rhs_basep) = TREE_SIDE_EFFECTS (rhs);
911 TREE_THIS_NOTRAP (*def_rhs_basep) = TREE_THIS_NOTRAP (rhs);
912 new_rhs = unshare_expr (TREE_OPERAND (def_rhs, 0));
913 *rhsp = new_rhs;
914 TREE_THIS_VOLATILE (new_rhs) = TREE_THIS_VOLATILE (rhs);
915 TREE_SIDE_EFFECTS (new_rhs) = TREE_SIDE_EFFECTS (rhs);
916 *def_rhs_basep = saved;
917 fold_stmt_inplace (use_stmt_gsi);
918 tidy_after_forward_propagate_addr (use_stmt);
919 return res;
920 }
921 }
922
923 /* If the use of the ADDR_EXPR is not a POINTER_PLUS_EXPR, there
924 is nothing to do. */
925 if (gimple_assign_rhs_code (use_stmt) != POINTER_PLUS_EXPR
926 || gimple_assign_rhs1 (use_stmt) != name)
927 return false;
928
929 /* The remaining cases are all for turning pointer arithmetic into
930 array indexing. They only apply when we have the address of
931 element zero in an array. If that is not the case then there
932 is nothing to do. */
933 array_ref = TREE_OPERAND (def_rhs, 0);
934 if ((TREE_CODE (array_ref) != ARRAY_REF
935 || TREE_CODE (TREE_TYPE (TREE_OPERAND (array_ref, 0))) != ARRAY_TYPE
936 || TREE_CODE (TREE_OPERAND (array_ref, 1)) != INTEGER_CST)
937 && TREE_CODE (TREE_TYPE (array_ref)) != ARRAY_TYPE)
938 return false;
939
940 rhs2 = gimple_assign_rhs2 (use_stmt);
941 /* Optimize &x[C1] p+ C2 to &x p+ C3 with C3 = C1 * element_size + C2. */
942 if (TREE_CODE (rhs2) == INTEGER_CST)
943 {
944 tree new_rhs = build1_loc (gimple_location (use_stmt),
945 ADDR_EXPR, TREE_TYPE (def_rhs),
946 fold_build2 (MEM_REF,
947 TREE_TYPE (TREE_TYPE (def_rhs)),
948 unshare_expr (def_rhs),
949 fold_convert (ptr_type_node,
950 rhs2)));
951 gimple_assign_set_rhs_from_tree (use_stmt_gsi, new_rhs);
952 use_stmt = gsi_stmt (*use_stmt_gsi);
953 update_stmt (use_stmt);
954 tidy_after_forward_propagate_addr (use_stmt);
955 return true;
956 }
957
958 return false;
959 }
960
961 /* STMT is a statement of the form SSA_NAME = ADDR_EXPR <whatever>.
962
963 Try to forward propagate the ADDR_EXPR into all uses of the SSA_NAME.
964 Often this will allow for removal of an ADDR_EXPR and INDIRECT_REF
965 node or for recovery of array indexing from pointer arithmetic.
966
967 PARENT_SINGLE_USE_P tells if, when in a recursive invocation, NAME was
968 the single use in the previous invocation. Pass true when calling
969 this as toplevel.
970
971 Returns true, if all uses have been propagated into. */
972
973 static bool
974 forward_propagate_addr_expr (tree name, tree rhs, bool parent_single_use_p)
975 {
976 imm_use_iterator iter;
977 gimple *use_stmt;
978 bool all = true;
979 bool single_use_p = parent_single_use_p && has_single_use (name);
980
981 FOR_EACH_IMM_USE_STMT (use_stmt, iter, name)
982 {
983 bool result;
984 tree use_rhs;
985
986 /* If the use is not in a simple assignment statement, then
987 there is nothing we can do. */
988 if (!is_gimple_assign (use_stmt))
989 {
990 if (!is_gimple_debug (use_stmt))
991 all = false;
992 continue;
993 }
994
995 gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
996 result = forward_propagate_addr_expr_1 (name, rhs, &gsi,
997 single_use_p);
998 /* If the use has moved to a different statement adjust
999 the update machinery for the old statement too. */
1000 if (use_stmt != gsi_stmt (gsi))
1001 {
1002 update_stmt (use_stmt);
1003 use_stmt = gsi_stmt (gsi);
1004 }
1005 update_stmt (use_stmt);
1006 all &= result;
1007
1008 /* Remove intermediate now unused copy and conversion chains. */
1009 use_rhs = gimple_assign_rhs1 (use_stmt);
1010 if (result
1011 && TREE_CODE (gimple_assign_lhs (use_stmt)) == SSA_NAME
1012 && TREE_CODE (use_rhs) == SSA_NAME
1013 && has_zero_uses (gimple_assign_lhs (use_stmt)))
1014 {
1015 gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
1016 fwprop_invalidate_lattice (gimple_get_lhs (use_stmt));
1017 release_defs (use_stmt);
1018 gsi_remove (&gsi, true);
1019 }
1020 }
1021
1022 return all && has_zero_uses (name);
1023 }
1024
1025
1026 /* Helper function for simplify_gimple_switch. Remove case labels that
1027 have values outside the range of the new type. */
1028
1029 static void
1030 simplify_gimple_switch_label_vec (gswitch *stmt, tree index_type)
1031 {
1032 unsigned int branch_num = gimple_switch_num_labels (stmt);
1033 auto_vec<tree> labels (branch_num);
1034 unsigned int i, len;
1035
1036 /* Collect the existing case labels in a VEC, and preprocess it as if
1037 we are gimplifying a GENERIC SWITCH_EXPR. */
1038 for (i = 1; i < branch_num; i++)
1039 labels.quick_push (gimple_switch_label (stmt, i));
1040 preprocess_case_label_vec_for_gimple (labels, index_type, NULL);
1041
1042 /* If any labels were removed, replace the existing case labels
1043 in the GIMPLE_SWITCH statement with the correct ones.
1044 Note that the type updates were done in-place on the case labels,
1045 so we only have to replace the case labels in the GIMPLE_SWITCH
1046 if the number of labels changed. */
1047 len = labels.length ();
1048 if (len < branch_num - 1)
1049 {
1050 bitmap target_blocks;
1051 edge_iterator ei;
1052 edge e;
1053
1054 /* Corner case: *all* case labels have been removed as being
1055 out-of-range for INDEX_TYPE. Push one label and let the
1056 CFG cleanups deal with this further. */
1057 if (len == 0)
1058 {
1059 tree label, elt;
1060
1061 label = CASE_LABEL (gimple_switch_default_label (stmt));
1062 elt = build_case_label (build_int_cst (index_type, 0), NULL, label);
1063 labels.quick_push (elt);
1064 len = 1;
1065 }
1066
1067 for (i = 0; i < labels.length (); i++)
1068 gimple_switch_set_label (stmt, i + 1, labels[i]);
1069 for (i++ ; i < branch_num; i++)
1070 gimple_switch_set_label (stmt, i, NULL_TREE);
1071 gimple_switch_set_num_labels (stmt, len + 1);
1072
1073 /* Cleanup any edges that are now dead. */
1074 target_blocks = BITMAP_ALLOC (NULL);
1075 for (i = 0; i < gimple_switch_num_labels (stmt); i++)
1076 {
1077 tree elt = gimple_switch_label (stmt, i);
1078 basic_block target = label_to_block (cfun, CASE_LABEL (elt));
1079 bitmap_set_bit (target_blocks, target->index);
1080 }
1081 for (ei = ei_start (gimple_bb (stmt)->succs); (e = ei_safe_edge (ei)); )
1082 {
1083 if (! bitmap_bit_p (target_blocks, e->dest->index))
1084 {
1085 remove_edge (e);
1086 cfg_changed = true;
1087 free_dominance_info (CDI_DOMINATORS);
1088 }
1089 else
1090 ei_next (&ei);
1091 }
1092 BITMAP_FREE (target_blocks);
1093 }
1094 }
1095
1096 /* STMT is a SWITCH_EXPR for which we attempt to find equivalent forms of
1097 the condition which we may be able to optimize better. */
1098
1099 static bool
1100 simplify_gimple_switch (gswitch *stmt)
1101 {
1102 /* The optimization that we really care about is removing unnecessary
1103 casts. That will let us do much better in propagating the inferred
1104 constant at the switch target. */
1105 tree cond = gimple_switch_index (stmt);
1106 if (TREE_CODE (cond) == SSA_NAME)
1107 {
1108 gimple *def_stmt = SSA_NAME_DEF_STMT (cond);
1109 if (gimple_assign_cast_p (def_stmt))
1110 {
1111 tree def = gimple_assign_rhs1 (def_stmt);
1112 if (TREE_CODE (def) != SSA_NAME)
1113 return false;
1114
1115 /* If we have an extension or sign-change that preserves the
1116 values we check against then we can copy the source value into
1117 the switch. */
1118 tree ti = TREE_TYPE (def);
1119 if (INTEGRAL_TYPE_P (ti)
1120 && TYPE_PRECISION (ti) <= TYPE_PRECISION (TREE_TYPE (cond)))
1121 {
1122 size_t n = gimple_switch_num_labels (stmt);
1123 tree min = NULL_TREE, max = NULL_TREE;
1124 if (n > 1)
1125 {
1126 min = CASE_LOW (gimple_switch_label (stmt, 1));
1127 if (CASE_HIGH (gimple_switch_label (stmt, n - 1)))
1128 max = CASE_HIGH (gimple_switch_label (stmt, n - 1));
1129 else
1130 max = CASE_LOW (gimple_switch_label (stmt, n - 1));
1131 }
1132 if ((!min || int_fits_type_p (min, ti))
1133 && (!max || int_fits_type_p (max, ti)))
1134 {
1135 gimple_switch_set_index (stmt, def);
1136 simplify_gimple_switch_label_vec (stmt, ti);
1137 update_stmt (stmt);
1138 return true;
1139 }
1140 }
1141 }
1142 }
1143
1144 return false;
1145 }
1146
1147 /* For pointers p2 and p1 return p2 - p1 if the
1148 difference is known and constant, otherwise return NULL. */
1149
1150 static tree
1151 constant_pointer_difference (tree p1, tree p2)
1152 {
1153 int i, j;
1154 #define CPD_ITERATIONS 5
1155 tree exps[2][CPD_ITERATIONS];
1156 tree offs[2][CPD_ITERATIONS];
1157 int cnt[2];
1158
1159 for (i = 0; i < 2; i++)
1160 {
1161 tree p = i ? p1 : p2;
1162 tree off = size_zero_node;
1163 gimple *stmt;
1164 enum tree_code code;
1165
1166 /* For each of p1 and p2 we need to iterate at least
1167 twice, to handle ADDR_EXPR directly in p1/p2,
1168 SSA_NAME with ADDR_EXPR or POINTER_PLUS_EXPR etc.
1169 on definition's stmt RHS. Iterate a few extra times. */
1170 j = 0;
1171 do
1172 {
1173 if (!POINTER_TYPE_P (TREE_TYPE (p)))
1174 break;
1175 if (TREE_CODE (p) == ADDR_EXPR)
1176 {
1177 tree q = TREE_OPERAND (p, 0);
1178 poly_int64 offset;
1179 tree base = get_addr_base_and_unit_offset (q, &offset);
1180 if (base)
1181 {
1182 q = base;
1183 if (maybe_ne (offset, 0))
1184 off = size_binop (PLUS_EXPR, off, size_int (offset));
1185 }
1186 if (TREE_CODE (q) == MEM_REF
1187 && TREE_CODE (TREE_OPERAND (q, 0)) == SSA_NAME)
1188 {
1189 p = TREE_OPERAND (q, 0);
1190 off = size_binop (PLUS_EXPR, off,
1191 wide_int_to_tree (sizetype,
1192 mem_ref_offset (q)));
1193 }
1194 else
1195 {
1196 exps[i][j] = q;
1197 offs[i][j++] = off;
1198 break;
1199 }
1200 }
1201 if (TREE_CODE (p) != SSA_NAME)
1202 break;
1203 exps[i][j] = p;
1204 offs[i][j++] = off;
1205 if (j == CPD_ITERATIONS)
1206 break;
1207 stmt = SSA_NAME_DEF_STMT (p);
1208 if (!is_gimple_assign (stmt) || gimple_assign_lhs (stmt) != p)
1209 break;
1210 code = gimple_assign_rhs_code (stmt);
1211 if (code == POINTER_PLUS_EXPR)
1212 {
1213 if (TREE_CODE (gimple_assign_rhs2 (stmt)) != INTEGER_CST)
1214 break;
1215 off = size_binop (PLUS_EXPR, off, gimple_assign_rhs2 (stmt));
1216 p = gimple_assign_rhs1 (stmt);
1217 }
1218 else if (code == ADDR_EXPR || CONVERT_EXPR_CODE_P (code))
1219 p = gimple_assign_rhs1 (stmt);
1220 else
1221 break;
1222 }
1223 while (1);
1224 cnt[i] = j;
1225 }
1226
1227 for (i = 0; i < cnt[0]; i++)
1228 for (j = 0; j < cnt[1]; j++)
1229 if (exps[0][i] == exps[1][j])
1230 return size_binop (MINUS_EXPR, offs[0][i], offs[1][j]);
1231
1232 return NULL_TREE;
1233 }
1234
1235 /* *GSI_P is a GIMPLE_CALL to a builtin function.
1236 Optimize
1237 memcpy (p, "abcd", 4);
1238 memset (p + 4, ' ', 3);
1239 into
1240 memcpy (p, "abcd ", 7);
1241 call if the latter can be stored by pieces during expansion. */
1242
1243 static bool
1244 simplify_builtin_call (gimple_stmt_iterator *gsi_p, tree callee2)
1245 {
1246 gimple *stmt1, *stmt2 = gsi_stmt (*gsi_p);
1247 tree vuse = gimple_vuse (stmt2);
1248 if (vuse == NULL)
1249 return false;
1250 stmt1 = SSA_NAME_DEF_STMT (vuse);
1251
1252 switch (DECL_FUNCTION_CODE (callee2))
1253 {
1254 case BUILT_IN_MEMSET:
1255 if (gimple_call_num_args (stmt2) != 3
1256 || gimple_call_lhs (stmt2)
1257 || CHAR_BIT != 8
1258 || BITS_PER_UNIT != 8)
1259 break;
1260 else
1261 {
1262 tree callee1;
1263 tree ptr1, src1, str1, off1, len1, lhs1;
1264 tree ptr2 = gimple_call_arg (stmt2, 0);
1265 tree val2 = gimple_call_arg (stmt2, 1);
1266 tree len2 = gimple_call_arg (stmt2, 2);
1267 tree diff, vdef, new_str_cst;
1268 gimple *use_stmt;
1269 unsigned int ptr1_align;
1270 unsigned HOST_WIDE_INT src_len;
1271 char *src_buf;
1272 use_operand_p use_p;
1273
1274 if (!tree_fits_shwi_p (val2)
1275 || !tree_fits_uhwi_p (len2)
1276 || compare_tree_int (len2, 1024) == 1)
1277 break;
1278 if (is_gimple_call (stmt1))
1279 {
1280 /* If first stmt is a call, it needs to be memcpy
1281 or mempcpy, with string literal as second argument and
1282 constant length. */
1283 callee1 = gimple_call_fndecl (stmt1);
1284 if (callee1 == NULL_TREE
1285 || !fndecl_built_in_p (callee1, BUILT_IN_NORMAL)
1286 || gimple_call_num_args (stmt1) != 3)
1287 break;
1288 if (DECL_FUNCTION_CODE (callee1) != BUILT_IN_MEMCPY
1289 && DECL_FUNCTION_CODE (callee1) != BUILT_IN_MEMPCPY)
1290 break;
1291 ptr1 = gimple_call_arg (stmt1, 0);
1292 src1 = gimple_call_arg (stmt1, 1);
1293 len1 = gimple_call_arg (stmt1, 2);
1294 lhs1 = gimple_call_lhs (stmt1);
1295 if (!tree_fits_uhwi_p (len1))
1296 break;
1297 str1 = string_constant (src1, &off1, NULL, NULL);
1298 if (str1 == NULL_TREE)
1299 break;
1300 if (!tree_fits_uhwi_p (off1)
1301 || compare_tree_int (off1, TREE_STRING_LENGTH (str1) - 1) > 0
1302 || compare_tree_int (len1, TREE_STRING_LENGTH (str1)
1303 - tree_to_uhwi (off1)) > 0
1304 || TREE_CODE (TREE_TYPE (str1)) != ARRAY_TYPE
1305 || TYPE_MODE (TREE_TYPE (TREE_TYPE (str1)))
1306 != TYPE_MODE (char_type_node))
1307 break;
1308 }
1309 else if (gimple_assign_single_p (stmt1))
1310 {
1311 /* Otherwise look for length 1 memcpy optimized into
1312 assignment. */
1313 ptr1 = gimple_assign_lhs (stmt1);
1314 src1 = gimple_assign_rhs1 (stmt1);
1315 if (TREE_CODE (ptr1) != MEM_REF
1316 || TYPE_MODE (TREE_TYPE (ptr1)) != TYPE_MODE (char_type_node)
1317 || !tree_fits_shwi_p (src1))
1318 break;
1319 ptr1 = build_fold_addr_expr (ptr1);
1320 callee1 = NULL_TREE;
1321 len1 = size_one_node;
1322 lhs1 = NULL_TREE;
1323 off1 = size_zero_node;
1324 str1 = NULL_TREE;
1325 }
1326 else
1327 break;
1328
1329 diff = constant_pointer_difference (ptr1, ptr2);
1330 if (diff == NULL && lhs1 != NULL)
1331 {
1332 diff = constant_pointer_difference (lhs1, ptr2);
1333 if (DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY
1334 && diff != NULL)
1335 diff = size_binop (PLUS_EXPR, diff,
1336 fold_convert (sizetype, len1));
1337 }
1338 /* If the difference between the second and first destination pointer
1339 is not constant, or is bigger than memcpy length, bail out. */
1340 if (diff == NULL
1341 || !tree_fits_uhwi_p (diff)
1342 || tree_int_cst_lt (len1, diff)
1343 || compare_tree_int (diff, 1024) == 1)
1344 break;
1345
1346 /* Use maximum of difference plus memset length and memcpy length
1347 as the new memcpy length, if it is too big, bail out. */
1348 src_len = tree_to_uhwi (diff);
1349 src_len += tree_to_uhwi (len2);
1350 if (src_len < tree_to_uhwi (len1))
1351 src_len = tree_to_uhwi (len1);
1352 if (src_len > 1024)
1353 break;
1354
1355 /* If mempcpy value is used elsewhere, bail out, as mempcpy
1356 with bigger length will return different result. */
1357 if (lhs1 != NULL_TREE
1358 && DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY
1359 && (TREE_CODE (lhs1) != SSA_NAME
1360 || !single_imm_use (lhs1, &use_p, &use_stmt)
1361 || use_stmt != stmt2))
1362 break;
1363
1364 /* If anything reads memory in between memcpy and memset
1365 call, the modified memcpy call might change it. */
1366 vdef = gimple_vdef (stmt1);
1367 if (vdef != NULL
1368 && (!single_imm_use (vdef, &use_p, &use_stmt)
1369 || use_stmt != stmt2))
1370 break;
1371
1372 ptr1_align = get_pointer_alignment (ptr1);
1373 /* Construct the new source string literal. */
1374 src_buf = XALLOCAVEC (char, src_len + 1);
1375 if (callee1)
1376 memcpy (src_buf,
1377 TREE_STRING_POINTER (str1) + tree_to_uhwi (off1),
1378 tree_to_uhwi (len1));
1379 else
1380 src_buf[0] = tree_to_shwi (src1);
1381 memset (src_buf + tree_to_uhwi (diff),
1382 tree_to_shwi (val2), tree_to_uhwi (len2));
1383 src_buf[src_len] = '\0';
1384 /* Neither builtin_strncpy_read_str nor builtin_memcpy_read_str
1385 handle embedded '\0's. */
1386 if (strlen (src_buf) != src_len)
1387 break;
1388 rtl_profile_for_bb (gimple_bb (stmt2));
1389 /* If the new memcpy wouldn't be emitted by storing the literal
1390 by pieces, this optimization might enlarge .rodata too much,
1391 as commonly used string literals couldn't be shared any
1392 longer. */
1393 if (!can_store_by_pieces (src_len,
1394 builtin_strncpy_read_str,
1395 src_buf, ptr1_align, false))
1396 break;
1397
1398 new_str_cst = build_string_literal (src_len, src_buf);
1399 if (callee1)
1400 {
1401 /* If STMT1 is a mem{,p}cpy call, adjust it and remove
1402 memset call. */
1403 if (lhs1 && DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY)
1404 gimple_call_set_lhs (stmt1, NULL_TREE);
1405 gimple_call_set_arg (stmt1, 1, new_str_cst);
1406 gimple_call_set_arg (stmt1, 2,
1407 build_int_cst (TREE_TYPE (len1), src_len));
1408 update_stmt (stmt1);
1409 unlink_stmt_vdef (stmt2);
1410 gsi_replace (gsi_p, gimple_build_nop (), false);
1411 fwprop_invalidate_lattice (gimple_get_lhs (stmt2));
1412 release_defs (stmt2);
1413 if (lhs1 && DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY)
1414 {
1415 fwprop_invalidate_lattice (lhs1);
1416 release_ssa_name (lhs1);
1417 }
1418 return true;
1419 }
1420 else
1421 {
1422 /* Otherwise, if STMT1 is length 1 memcpy optimized into
1423 assignment, remove STMT1 and change memset call into
1424 memcpy call. */
1425 gimple_stmt_iterator gsi = gsi_for_stmt (stmt1);
1426
1427 if (!is_gimple_val (ptr1))
1428 ptr1 = force_gimple_operand_gsi (gsi_p, ptr1, true, NULL_TREE,
1429 true, GSI_SAME_STMT);
1430 tree fndecl = builtin_decl_explicit (BUILT_IN_MEMCPY);
1431 gimple_call_set_fndecl (stmt2, fndecl);
1432 gimple_call_set_fntype (as_a <gcall *> (stmt2),
1433 TREE_TYPE (fndecl));
1434 gimple_call_set_arg (stmt2, 0, ptr1);
1435 gimple_call_set_arg (stmt2, 1, new_str_cst);
1436 gimple_call_set_arg (stmt2, 2,
1437 build_int_cst (TREE_TYPE (len2), src_len));
1438 unlink_stmt_vdef (stmt1);
1439 gsi_remove (&gsi, true);
1440 fwprop_invalidate_lattice (gimple_get_lhs (stmt1));
1441 release_defs (stmt1);
1442 update_stmt (stmt2);
1443 return false;
1444 }
1445 }
1446 break;
1447 default:
1448 break;
1449 }
1450 return false;
1451 }
1452
1453 /* Given a ssa_name in NAME see if it was defined by an assignment and
1454 set CODE to be the code and ARG1 to the first operand on the rhs and ARG2
1455 to the second operand on the rhs. */
1456
1457 static inline void
1458 defcodefor_name (tree name, enum tree_code *code, tree *arg1, tree *arg2)
1459 {
1460 gimple *def;
1461 enum tree_code code1;
1462 tree arg11;
1463 tree arg21;
1464 tree arg31;
1465 enum gimple_rhs_class grhs_class;
1466
1467 code1 = TREE_CODE (name);
1468 arg11 = name;
1469 arg21 = NULL_TREE;
1470 arg31 = NULL_TREE;
1471 grhs_class = get_gimple_rhs_class (code1);
1472
1473 if (code1 == SSA_NAME)
1474 {
1475 def = SSA_NAME_DEF_STMT (name);
1476
1477 if (def && is_gimple_assign (def)
1478 && can_propagate_from (def))
1479 {
1480 code1 = gimple_assign_rhs_code (def);
1481 arg11 = gimple_assign_rhs1 (def);
1482 arg21 = gimple_assign_rhs2 (def);
1483 arg31 = gimple_assign_rhs3 (def);
1484 }
1485 }
1486 else if (grhs_class != GIMPLE_SINGLE_RHS)
1487 code1 = ERROR_MARK;
1488
1489 *code = code1;
1490 *arg1 = arg11;
1491 if (arg2)
1492 *arg2 = arg21;
1493 if (arg31)
1494 *code = ERROR_MARK;
1495 }
1496
1497
1498 /* Recognize rotation patterns. Return true if a transformation
1499 applied, otherwise return false.
1500
1501 We are looking for X with unsigned type T with bitsize B, OP being
1502 +, | or ^, some type T2 wider than T. For:
1503 (X << CNT1) OP (X >> CNT2) iff CNT1 + CNT2 == B
1504 ((T) ((T2) X << CNT1)) OP ((T) ((T2) X >> CNT2)) iff CNT1 + CNT2 == B
1505
1506 transform these into:
1507 X r<< CNT1
1508
1509 Or for:
1510 (X << Y) OP (X >> (B - Y))
1511 (X << (int) Y) OP (X >> (int) (B - Y))
1512 ((T) ((T2) X << Y)) OP ((T) ((T2) X >> (B - Y)))
1513 ((T) ((T2) X << (int) Y)) OP ((T) ((T2) X >> (int) (B - Y)))
1514 (X << Y) | (X >> ((-Y) & (B - 1)))
1515 (X << (int) Y) | (X >> (int) ((-Y) & (B - 1)))
1516 ((T) ((T2) X << Y)) | ((T) ((T2) X >> ((-Y) & (B - 1))))
1517 ((T) ((T2) X << (int) Y)) | ((T) ((T2) X >> (int) ((-Y) & (B - 1))))
1518
1519 transform these into:
1520 X r<< Y
1521
1522 Or for:
1523 (X << (Y & (B - 1))) | (X >> ((-Y) & (B - 1)))
1524 (X << (int) (Y & (B - 1))) | (X >> (int) ((-Y) & (B - 1)))
1525 ((T) ((T2) X << (Y & (B - 1)))) | ((T) ((T2) X >> ((-Y) & (B - 1))))
1526 ((T) ((T2) X << (int) (Y & (B - 1)))) \
1527 | ((T) ((T2) X >> (int) ((-Y) & (B - 1))))
1528
1529 transform these into:
1530 X r<< (Y & (B - 1))
1531
1532 Note, in the patterns with T2 type, the type of OP operands
1533 might be even a signed type, but should have precision B.
1534 Expressions with & (B - 1) should be recognized only if B is
1535 a power of 2. */
1536
1537 static bool
1538 simplify_rotate (gimple_stmt_iterator *gsi)
1539 {
1540 gimple *stmt = gsi_stmt (*gsi);
1541 tree arg[2], rtype, rotcnt = NULL_TREE;
1542 tree def_arg1[2], def_arg2[2];
1543 enum tree_code def_code[2];
1544 tree lhs;
1545 int i;
1546 bool swapped_p = false;
1547 gimple *g;
1548
1549 arg[0] = gimple_assign_rhs1 (stmt);
1550 arg[1] = gimple_assign_rhs2 (stmt);
1551 rtype = TREE_TYPE (arg[0]);
1552
1553 /* Only create rotates in complete modes. Other cases are not
1554 expanded properly. */
1555 if (!INTEGRAL_TYPE_P (rtype)
1556 || !type_has_mode_precision_p (rtype))
1557 return false;
1558
1559 for (i = 0; i < 2; i++)
1560 defcodefor_name (arg[i], &def_code[i], &def_arg1[i], &def_arg2[i]);
1561
1562 /* Look through narrowing conversions. */
1563 if (CONVERT_EXPR_CODE_P (def_code[0])
1564 && CONVERT_EXPR_CODE_P (def_code[1])
1565 && INTEGRAL_TYPE_P (TREE_TYPE (def_arg1[0]))
1566 && INTEGRAL_TYPE_P (TREE_TYPE (def_arg1[1]))
1567 && TYPE_PRECISION (TREE_TYPE (def_arg1[0]))
1568 == TYPE_PRECISION (TREE_TYPE (def_arg1[1]))
1569 && TYPE_PRECISION (TREE_TYPE (def_arg1[0])) > TYPE_PRECISION (rtype)
1570 && has_single_use (arg[0])
1571 && has_single_use (arg[1]))
1572 {
1573 for (i = 0; i < 2; i++)
1574 {
1575 arg[i] = def_arg1[i];
1576 defcodefor_name (arg[i], &def_code[i], &def_arg1[i], &def_arg2[i]);
1577 }
1578 }
1579
1580 /* One operand has to be LSHIFT_EXPR and one RSHIFT_EXPR. */
1581 for (i = 0; i < 2; i++)
1582 if (def_code[i] != LSHIFT_EXPR && def_code[i] != RSHIFT_EXPR)
1583 return false;
1584 else if (!has_single_use (arg[i]))
1585 return false;
1586 if (def_code[0] == def_code[1])
1587 return false;
1588
1589 /* If we've looked through narrowing conversions before, look through
1590 widening conversions from unsigned type with the same precision
1591 as rtype here. */
1592 if (TYPE_PRECISION (TREE_TYPE (def_arg1[0])) != TYPE_PRECISION (rtype))
1593 for (i = 0; i < 2; i++)
1594 {
1595 tree tem;
1596 enum tree_code code;
1597 defcodefor_name (def_arg1[i], &code, &tem, NULL);
1598 if (!CONVERT_EXPR_CODE_P (code)
1599 || !INTEGRAL_TYPE_P (TREE_TYPE (tem))
1600 || TYPE_PRECISION (TREE_TYPE (tem)) != TYPE_PRECISION (rtype))
1601 return false;
1602 def_arg1[i] = tem;
1603 }
1604 /* Both shifts have to use the same first operand. */
1605 if (!operand_equal_for_phi_arg_p (def_arg1[0], def_arg1[1])
1606 || !types_compatible_p (TREE_TYPE (def_arg1[0]),
1607 TREE_TYPE (def_arg1[1])))
1608 return false;
1609 if (!TYPE_UNSIGNED (TREE_TYPE (def_arg1[0])))
1610 return false;
1611
1612 /* CNT1 + CNT2 == B case above. */
1613 if (tree_fits_uhwi_p (def_arg2[0])
1614 && tree_fits_uhwi_p (def_arg2[1])
1615 && tree_to_uhwi (def_arg2[0])
1616 + tree_to_uhwi (def_arg2[1]) == TYPE_PRECISION (rtype))
1617 rotcnt = def_arg2[0];
1618 else if (TREE_CODE (def_arg2[0]) != SSA_NAME
1619 || TREE_CODE (def_arg2[1]) != SSA_NAME)
1620 return false;
1621 else
1622 {
1623 tree cdef_arg1[2], cdef_arg2[2], def_arg2_alt[2];
1624 enum tree_code cdef_code[2];
1625 /* Look through conversion of the shift count argument.
1626 The C/C++ FE cast any shift count argument to integer_type_node.
1627 The only problem might be if the shift count type maximum value
1628 is equal or smaller than number of bits in rtype. */
1629 for (i = 0; i < 2; i++)
1630 {
1631 def_arg2_alt[i] = def_arg2[i];
1632 defcodefor_name (def_arg2[i], &cdef_code[i],
1633 &cdef_arg1[i], &cdef_arg2[i]);
1634 if (CONVERT_EXPR_CODE_P (cdef_code[i])
1635 && INTEGRAL_TYPE_P (TREE_TYPE (cdef_arg1[i]))
1636 && TYPE_PRECISION (TREE_TYPE (cdef_arg1[i]))
1637 > floor_log2 (TYPE_PRECISION (rtype))
1638 && type_has_mode_precision_p (TREE_TYPE (cdef_arg1[i])))
1639 {
1640 def_arg2_alt[i] = cdef_arg1[i];
1641 defcodefor_name (def_arg2_alt[i], &cdef_code[i],
1642 &cdef_arg1[i], &cdef_arg2[i]);
1643 }
1644 }
1645 for (i = 0; i < 2; i++)
1646 /* Check for one shift count being Y and the other B - Y,
1647 with optional casts. */
1648 if (cdef_code[i] == MINUS_EXPR
1649 && tree_fits_shwi_p (cdef_arg1[i])
1650 && tree_to_shwi (cdef_arg1[i]) == TYPE_PRECISION (rtype)
1651 && TREE_CODE (cdef_arg2[i]) == SSA_NAME)
1652 {
1653 tree tem;
1654 enum tree_code code;
1655
1656 if (cdef_arg2[i] == def_arg2[1 - i]
1657 || cdef_arg2[i] == def_arg2_alt[1 - i])
1658 {
1659 rotcnt = cdef_arg2[i];
1660 break;
1661 }
1662 defcodefor_name (cdef_arg2[i], &code, &tem, NULL);
1663 if (CONVERT_EXPR_CODE_P (code)
1664 && INTEGRAL_TYPE_P (TREE_TYPE (tem))
1665 && TYPE_PRECISION (TREE_TYPE (tem))
1666 > floor_log2 (TYPE_PRECISION (rtype))
1667 && type_has_mode_precision_p (TREE_TYPE (tem))
1668 && (tem == def_arg2[1 - i]
1669 || tem == def_arg2_alt[1 - i]))
1670 {
1671 rotcnt = tem;
1672 break;
1673 }
1674 }
1675 /* The above sequence isn't safe for Y being 0,
1676 because then one of the shifts triggers undefined behavior.
1677 This alternative is safe even for rotation count of 0.
1678 One shift count is Y and the other (-Y) & (B - 1).
1679 Or one shift count is Y & (B - 1) and the other (-Y) & (B - 1). */
1680 else if (cdef_code[i] == BIT_AND_EXPR
1681 && pow2p_hwi (TYPE_PRECISION (rtype))
1682 && tree_fits_shwi_p (cdef_arg2[i])
1683 && tree_to_shwi (cdef_arg2[i])
1684 == TYPE_PRECISION (rtype) - 1
1685 && TREE_CODE (cdef_arg1[i]) == SSA_NAME
1686 && gimple_assign_rhs_code (stmt) == BIT_IOR_EXPR)
1687 {
1688 tree tem;
1689 enum tree_code code;
1690
1691 defcodefor_name (cdef_arg1[i], &code, &tem, NULL);
1692 if (CONVERT_EXPR_CODE_P (code)
1693 && INTEGRAL_TYPE_P (TREE_TYPE (tem))
1694 && TYPE_PRECISION (TREE_TYPE (tem))
1695 > floor_log2 (TYPE_PRECISION (rtype))
1696 && type_has_mode_precision_p (TREE_TYPE (tem)))
1697 defcodefor_name (tem, &code, &tem, NULL);
1698
1699 if (code == NEGATE_EXPR)
1700 {
1701 if (tem == def_arg2[1 - i] || tem == def_arg2_alt[1 - i])
1702 {
1703 rotcnt = tem;
1704 break;
1705 }
1706 tree tem2;
1707 defcodefor_name (tem, &code, &tem2, NULL);
1708 if (CONVERT_EXPR_CODE_P (code)
1709 && INTEGRAL_TYPE_P (TREE_TYPE (tem2))
1710 && TYPE_PRECISION (TREE_TYPE (tem2))
1711 > floor_log2 (TYPE_PRECISION (rtype))
1712 && type_has_mode_precision_p (TREE_TYPE (tem2)))
1713 {
1714 if (tem2 == def_arg2[1 - i]
1715 || tem2 == def_arg2_alt[1 - i])
1716 {
1717 rotcnt = tem2;
1718 break;
1719 }
1720 }
1721 else
1722 tem2 = NULL_TREE;
1723
1724 if (cdef_code[1 - i] == BIT_AND_EXPR
1725 && tree_fits_shwi_p (cdef_arg2[1 - i])
1726 && tree_to_shwi (cdef_arg2[1 - i])
1727 == TYPE_PRECISION (rtype) - 1
1728 && TREE_CODE (cdef_arg1[1 - i]) == SSA_NAME)
1729 {
1730 if (tem == cdef_arg1[1 - i]
1731 || tem2 == cdef_arg1[1 - i])
1732 {
1733 rotcnt = def_arg2[1 - i];
1734 break;
1735 }
1736 tree tem3;
1737 defcodefor_name (cdef_arg1[1 - i], &code, &tem3, NULL);
1738 if (CONVERT_EXPR_CODE_P (code)
1739 && INTEGRAL_TYPE_P (TREE_TYPE (tem3))
1740 && TYPE_PRECISION (TREE_TYPE (tem3))
1741 > floor_log2 (TYPE_PRECISION (rtype))
1742 && type_has_mode_precision_p (TREE_TYPE (tem3)))
1743 {
1744 if (tem == tem3 || tem2 == tem3)
1745 {
1746 rotcnt = def_arg2[1 - i];
1747 break;
1748 }
1749 }
1750 }
1751 }
1752 }
1753 if (rotcnt == NULL_TREE)
1754 return false;
1755 swapped_p = i != 1;
1756 }
1757
1758 if (!useless_type_conversion_p (TREE_TYPE (def_arg2[0]),
1759 TREE_TYPE (rotcnt)))
1760 {
1761 g = gimple_build_assign (make_ssa_name (TREE_TYPE (def_arg2[0])),
1762 NOP_EXPR, rotcnt);
1763 gsi_insert_before (gsi, g, GSI_SAME_STMT);
1764 rotcnt = gimple_assign_lhs (g);
1765 }
1766 lhs = gimple_assign_lhs (stmt);
1767 if (!useless_type_conversion_p (rtype, TREE_TYPE (def_arg1[0])))
1768 lhs = make_ssa_name (TREE_TYPE (def_arg1[0]));
1769 g = gimple_build_assign (lhs,
1770 ((def_code[0] == LSHIFT_EXPR) ^ swapped_p)
1771 ? LROTATE_EXPR : RROTATE_EXPR, def_arg1[0], rotcnt);
1772 if (!useless_type_conversion_p (rtype, TREE_TYPE (def_arg1[0])))
1773 {
1774 gsi_insert_before (gsi, g, GSI_SAME_STMT);
1775 g = gimple_build_assign (gimple_assign_lhs (stmt), NOP_EXPR, lhs);
1776 }
1777 gsi_replace (gsi, g, false);
1778 return true;
1779 }
1780
1781 /* Combine an element access with a shuffle. Returns true if there were
1782 any changes made, else it returns false. */
1783
1784 static bool
1785 simplify_bitfield_ref (gimple_stmt_iterator *gsi)
1786 {
1787 gimple *stmt = gsi_stmt (*gsi);
1788 gimple *def_stmt;
1789 tree op, op0, op1;
1790 tree elem_type;
1791 unsigned idx, size;
1792 enum tree_code code;
1793
1794 op = gimple_assign_rhs1 (stmt);
1795 gcc_checking_assert (TREE_CODE (op) == BIT_FIELD_REF);
1796
1797 op0 = TREE_OPERAND (op, 0);
1798 if (TREE_CODE (op0) != SSA_NAME
1799 || TREE_CODE (TREE_TYPE (op0)) != VECTOR_TYPE)
1800 return false;
1801
1802 def_stmt = get_prop_source_stmt (op0, false, NULL);
1803 if (!def_stmt || !can_propagate_from (def_stmt))
1804 return false;
1805
1806 op1 = TREE_OPERAND (op, 1);
1807 code = gimple_assign_rhs_code (def_stmt);
1808 elem_type = TREE_TYPE (TREE_TYPE (op0));
1809 if (TREE_TYPE (op) != elem_type)
1810 return false;
1811
1812 size = TREE_INT_CST_LOW (TYPE_SIZE (elem_type));
1813 if (maybe_ne (bit_field_size (op), size))
1814 return false;
1815
1816 if (code == VEC_PERM_EXPR
1817 && constant_multiple_p (bit_field_offset (op), size, &idx))
1818 {
1819 tree p, m, tem;
1820 unsigned HOST_WIDE_INT nelts;
1821 m = gimple_assign_rhs3 (def_stmt);
1822 if (TREE_CODE (m) != VECTOR_CST
1823 || !VECTOR_CST_NELTS (m).is_constant (&nelts))
1824 return false;
1825 idx = TREE_INT_CST_LOW (VECTOR_CST_ELT (m, idx));
1826 idx %= 2 * nelts;
1827 if (idx < nelts)
1828 {
1829 p = gimple_assign_rhs1 (def_stmt);
1830 }
1831 else
1832 {
1833 p = gimple_assign_rhs2 (def_stmt);
1834 idx -= nelts;
1835 }
1836 tem = build3 (BIT_FIELD_REF, TREE_TYPE (op),
1837 unshare_expr (p), op1, bitsize_int (idx * size));
1838 gimple_assign_set_rhs1 (stmt, tem);
1839 fold_stmt (gsi);
1840 update_stmt (gsi_stmt (*gsi));
1841 return true;
1842 }
1843
1844 return false;
1845 }
1846
1847 /* Determine whether applying the 2 permutations (mask1 then mask2)
1848 gives back one of the input. */
1849
1850 static int
1851 is_combined_permutation_identity (tree mask1, tree mask2)
1852 {
1853 tree mask;
1854 unsigned HOST_WIDE_INT nelts, i, j;
1855 bool maybe_identity1 = true;
1856 bool maybe_identity2 = true;
1857
1858 gcc_checking_assert (TREE_CODE (mask1) == VECTOR_CST
1859 && TREE_CODE (mask2) == VECTOR_CST);
1860 mask = fold_ternary (VEC_PERM_EXPR, TREE_TYPE (mask1), mask1, mask1, mask2);
1861 if (mask == NULL_TREE || TREE_CODE (mask) != VECTOR_CST)
1862 return 0;
1863
1864 if (!VECTOR_CST_NELTS (mask).is_constant (&nelts))
1865 return 0;
1866 for (i = 0; i < nelts; i++)
1867 {
1868 tree val = VECTOR_CST_ELT (mask, i);
1869 gcc_assert (TREE_CODE (val) == INTEGER_CST);
1870 j = TREE_INT_CST_LOW (val) & (2 * nelts - 1);
1871 if (j == i)
1872 maybe_identity2 = false;
1873 else if (j == i + nelts)
1874 maybe_identity1 = false;
1875 else
1876 return 0;
1877 }
1878 return maybe_identity1 ? 1 : maybe_identity2 ? 2 : 0;
1879 }
1880
1881 /* Combine a shuffle with its arguments. Returns 1 if there were any
1882 changes made, 2 if cfg-cleanup needs to run. Else it returns 0. */
1883
1884 static int
1885 simplify_permutation (gimple_stmt_iterator *gsi)
1886 {
1887 gimple *stmt = gsi_stmt (*gsi);
1888 gimple *def_stmt;
1889 tree op0, op1, op2, op3, arg0, arg1;
1890 enum tree_code code;
1891 bool single_use_op0 = false;
1892
1893 gcc_checking_assert (gimple_assign_rhs_code (stmt) == VEC_PERM_EXPR);
1894
1895 op0 = gimple_assign_rhs1 (stmt);
1896 op1 = gimple_assign_rhs2 (stmt);
1897 op2 = gimple_assign_rhs3 (stmt);
1898
1899 if (TREE_CODE (op2) != VECTOR_CST)
1900 return 0;
1901
1902 if (TREE_CODE (op0) == VECTOR_CST)
1903 {
1904 code = VECTOR_CST;
1905 arg0 = op0;
1906 }
1907 else if (TREE_CODE (op0) == SSA_NAME)
1908 {
1909 def_stmt = get_prop_source_stmt (op0, false, &single_use_op0);
1910 if (!def_stmt || !can_propagate_from (def_stmt))
1911 return 0;
1912
1913 code = gimple_assign_rhs_code (def_stmt);
1914 arg0 = gimple_assign_rhs1 (def_stmt);
1915 }
1916 else
1917 return 0;
1918
1919 /* Two consecutive shuffles. */
1920 if (code == VEC_PERM_EXPR)
1921 {
1922 tree orig;
1923 int ident;
1924
1925 if (op0 != op1)
1926 return 0;
1927 op3 = gimple_assign_rhs3 (def_stmt);
1928 if (TREE_CODE (op3) != VECTOR_CST)
1929 return 0;
1930 ident = is_combined_permutation_identity (op3, op2);
1931 if (!ident)
1932 return 0;
1933 orig = (ident == 1) ? gimple_assign_rhs1 (def_stmt)
1934 : gimple_assign_rhs2 (def_stmt);
1935 gimple_assign_set_rhs1 (stmt, unshare_expr (orig));
1936 gimple_assign_set_rhs_code (stmt, TREE_CODE (orig));
1937 gimple_set_num_ops (stmt, 2);
1938 update_stmt (stmt);
1939 return remove_prop_source_from_use (op0) ? 2 : 1;
1940 }
1941
1942 /* Shuffle of a constructor. */
1943 else if (code == CONSTRUCTOR || code == VECTOR_CST)
1944 {
1945 tree opt;
1946 bool ret = false;
1947 if (op0 != op1)
1948 {
1949 if (TREE_CODE (op0) == SSA_NAME && !single_use_op0)
1950 return 0;
1951
1952 if (TREE_CODE (op1) == VECTOR_CST)
1953 arg1 = op1;
1954 else if (TREE_CODE (op1) == SSA_NAME)
1955 {
1956 enum tree_code code2;
1957
1958 gimple *def_stmt2 = get_prop_source_stmt (op1, true, NULL);
1959 if (!def_stmt2 || !can_propagate_from (def_stmt2))
1960 return 0;
1961
1962 code2 = gimple_assign_rhs_code (def_stmt2);
1963 if (code2 != CONSTRUCTOR && code2 != VECTOR_CST)
1964 return 0;
1965 arg1 = gimple_assign_rhs1 (def_stmt2);
1966 }
1967 else
1968 return 0;
1969 }
1970 else
1971 {
1972 /* Already used twice in this statement. */
1973 if (TREE_CODE (op0) == SSA_NAME && num_imm_uses (op0) > 2)
1974 return 0;
1975 arg1 = arg0;
1976 }
1977 opt = fold_ternary (VEC_PERM_EXPR, TREE_TYPE (op0), arg0, arg1, op2);
1978 if (!opt
1979 || (TREE_CODE (opt) != CONSTRUCTOR && TREE_CODE (opt) != VECTOR_CST))
1980 return 0;
1981 gimple_assign_set_rhs_from_tree (gsi, opt);
1982 update_stmt (gsi_stmt (*gsi));
1983 if (TREE_CODE (op0) == SSA_NAME)
1984 ret = remove_prop_source_from_use (op0);
1985 if (op0 != op1 && TREE_CODE (op1) == SSA_NAME)
1986 ret |= remove_prop_source_from_use (op1);
1987 return ret ? 2 : 1;
1988 }
1989
1990 return 0;
1991 }
1992
1993 /* Get the BIT_FIELD_REF definition of VAL, if any, looking through
1994 conversions with code CONV_CODE or update it if still ERROR_MARK.
1995 Return NULL_TREE if no such matching def was found. */
1996
1997 static tree
1998 get_bit_field_ref_def (tree val, enum tree_code &conv_code)
1999 {
2000 if (TREE_CODE (val) != SSA_NAME)
2001 return NULL_TREE ;
2002 gimple *def_stmt = get_prop_source_stmt (val, false, NULL);
2003 if (!def_stmt)
2004 return NULL_TREE;
2005 enum tree_code code = gimple_assign_rhs_code (def_stmt);
2006 if (code == FLOAT_EXPR
2007 || code == FIX_TRUNC_EXPR)
2008 {
2009 tree op1 = gimple_assign_rhs1 (def_stmt);
2010 if (conv_code == ERROR_MARK)
2011 {
2012 if (maybe_ne (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (val))),
2013 GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op1)))))
2014 return NULL_TREE;
2015 conv_code = code;
2016 }
2017 else if (conv_code != code)
2018 return NULL_TREE;
2019 if (TREE_CODE (op1) != SSA_NAME)
2020 return NULL_TREE;
2021 def_stmt = SSA_NAME_DEF_STMT (op1);
2022 if (! is_gimple_assign (def_stmt))
2023 return NULL_TREE;
2024 code = gimple_assign_rhs_code (def_stmt);
2025 }
2026 if (code != BIT_FIELD_REF)
2027 return NULL_TREE;
2028 return gimple_assign_rhs1 (def_stmt);
2029 }
2030
2031 /* Recognize a VEC_PERM_EXPR. Returns true if there were any changes. */
2032
2033 static bool
2034 simplify_vector_constructor (gimple_stmt_iterator *gsi)
2035 {
2036 gimple *stmt = gsi_stmt (*gsi);
2037 tree op, orig[2], type, elem_type;
2038 unsigned elem_size, i;
2039 unsigned HOST_WIDE_INT nelts;
2040 unsigned HOST_WIDE_INT refnelts;
2041 enum tree_code conv_code;
2042 constructor_elt *elt;
2043 bool maybe_ident;
2044
2045 gcc_checking_assert (gimple_assign_rhs_code (stmt) == CONSTRUCTOR);
2046
2047 op = gimple_assign_rhs1 (stmt);
2048 type = TREE_TYPE (op);
2049 gcc_checking_assert (TREE_CODE (type) == VECTOR_TYPE);
2050
2051 if (!TYPE_VECTOR_SUBPARTS (type).is_constant (&nelts))
2052 return false;
2053 elem_type = TREE_TYPE (type);
2054 elem_size = TREE_INT_CST_LOW (TYPE_SIZE (elem_type));
2055
2056 orig[0] = NULL;
2057 orig[1] = NULL;
2058 conv_code = ERROR_MARK;
2059 maybe_ident = true;
2060 tree one_constant = NULL_TREE;
2061 tree one_nonconstant = NULL_TREE;
2062 auto_vec<tree> constants;
2063 constants.safe_grow_cleared (nelts);
2064 auto_vec<std::pair<unsigned, unsigned>, 64> elts;
2065 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (op), i, elt)
2066 {
2067 tree ref, op1;
2068 unsigned int elem;
2069
2070 if (i >= nelts)
2071 return false;
2072
2073 /* Look for elements extracted and possibly converted from
2074 another vector. */
2075 op1 = get_bit_field_ref_def (elt->value, conv_code);
2076 if (op1
2077 && TREE_CODE ((ref = TREE_OPERAND (op1, 0))) == SSA_NAME
2078 && VECTOR_TYPE_P (TREE_TYPE (ref))
2079 && useless_type_conversion_p (TREE_TYPE (op1),
2080 TREE_TYPE (TREE_TYPE (ref)))
2081 && known_eq (bit_field_size (op1), elem_size)
2082 && constant_multiple_p (bit_field_offset (op1),
2083 elem_size, &elem)
2084 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (ref)).is_constant (&refnelts))
2085 {
2086 unsigned int j;
2087 for (j = 0; j < 2; ++j)
2088 {
2089 if (!orig[j])
2090 {
2091 if (j == 0
2092 || useless_type_conversion_p (TREE_TYPE (orig[0]),
2093 TREE_TYPE (ref)))
2094 break;
2095 }
2096 else if (ref == orig[j])
2097 break;
2098 }
2099 /* Found a suitable vector element. */
2100 if (j < 2)
2101 {
2102 orig[j] = ref;
2103 if (elem != i || j != 0)
2104 maybe_ident = false;
2105 elts.safe_push (std::make_pair (j, elem));
2106 continue;
2107 }
2108 /* Else fallthru. */
2109 }
2110 /* Handle elements not extracted from a vector.
2111 1. constants by permuting with constant vector
2112 2. a unique non-constant element by permuting with a splat vector */
2113 if (orig[1]
2114 && orig[1] != error_mark_node)
2115 return false;
2116 orig[1] = error_mark_node;
2117 if (CONSTANT_CLASS_P (elt->value))
2118 {
2119 if (one_nonconstant)
2120 return false;
2121 if (!one_constant)
2122 one_constant = elt->value;
2123 constants[i] = elt->value;
2124 }
2125 else
2126 {
2127 if (one_constant)
2128 return false;
2129 if (!one_nonconstant)
2130 one_nonconstant = elt->value;
2131 else if (!operand_equal_p (one_nonconstant, elt->value, 0))
2132 return false;
2133 }
2134 elts.safe_push (std::make_pair (1, i));
2135 maybe_ident = false;
2136 }
2137 if (i < nelts)
2138 return false;
2139
2140 if (! orig[0]
2141 || ! VECTOR_TYPE_P (TREE_TYPE (orig[0])))
2142 return false;
2143 refnelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (orig[0])).to_constant ();
2144
2145 if (maybe_ident)
2146 {
2147 tree conv_src_type
2148 = (nelts != refnelts
2149 ? (conv_code != ERROR_MARK
2150 ? build_vector_type (TREE_TYPE (TREE_TYPE (orig[0])), nelts)
2151 : type)
2152 : TREE_TYPE (orig[0]));
2153 tree tem;
2154 if (conv_code != ERROR_MARK
2155 && (!supportable_convert_operation (conv_code, type, conv_src_type,
2156 &tem, &conv_code)
2157 || conv_code == CALL_EXPR))
2158 return false;
2159 if (nelts != refnelts)
2160 {
2161 gassign *lowpart
2162 = gimple_build_assign (make_ssa_name (conv_src_type),
2163 build3 (BIT_FIELD_REF, conv_src_type,
2164 orig[0], TYPE_SIZE (conv_src_type),
2165 bitsize_zero_node));
2166 gsi_insert_before (gsi, lowpart, GSI_SAME_STMT);
2167 orig[0] = gimple_assign_lhs (lowpart);
2168 }
2169 if (conv_code == ERROR_MARK)
2170 gimple_assign_set_rhs_from_tree (gsi, orig[0]);
2171 else
2172 gimple_assign_set_rhs_with_ops (gsi, conv_code, orig[0],
2173 NULL_TREE, NULL_TREE);
2174 }
2175 else
2176 {
2177 tree mask_type, perm_type, conv_src_type;
2178 perm_type = TREE_TYPE (orig[0]);
2179 conv_src_type = (nelts == refnelts
2180 ? perm_type
2181 : build_vector_type (TREE_TYPE (perm_type), nelts));
2182 tree tem;
2183 if (conv_code != ERROR_MARK
2184 && (!supportable_convert_operation (conv_code, type, conv_src_type,
2185 &tem, &conv_code)
2186 || conv_code == CALL_EXPR))
2187 return false;
2188
2189 /* Now that we know the number of elements of the source build the
2190 permute vector.
2191 ??? When the second vector has constant values we can shuffle
2192 it and its source indexes to make the permutation supported.
2193 For now it mimics a blend. */
2194 vec_perm_builder sel (refnelts, refnelts, 1);
2195 for (i = 0; i < elts.length (); ++i)
2196 sel.quick_push (elts[i].second + elts[i].first * refnelts);
2197 /* And fill the tail with "something". It's really don't care,
2198 and ideally we'd allow VEC_PERM to have a smaller destination
2199 vector. */
2200 for (; i < refnelts; ++i)
2201 sel.quick_push (i - elts.length ());
2202 vec_perm_indices indices (sel, orig[1] ? 2 : 1, refnelts);
2203 if (!can_vec_perm_const_p (TYPE_MODE (perm_type), indices))
2204 return false;
2205 mask_type
2206 = build_vector_type (build_nonstandard_integer_type (elem_size, 1),
2207 refnelts);
2208 if (GET_MODE_CLASS (TYPE_MODE (mask_type)) != MODE_VECTOR_INT
2209 || maybe_ne (GET_MODE_SIZE (TYPE_MODE (mask_type)),
2210 GET_MODE_SIZE (TYPE_MODE (perm_type))))
2211 return false;
2212 tree op2 = vec_perm_indices_to_tree (mask_type, indices);
2213 bool converted_orig1 = false;
2214 gimple_seq stmts = NULL;
2215 if (!orig[1])
2216 orig[1] = orig[0];
2217 else if (orig[1] == error_mark_node
2218 && one_nonconstant)
2219 {
2220 orig[1] = gimple_build_vector_from_val (&stmts, UNKNOWN_LOCATION,
2221 type, one_nonconstant);
2222 /* ??? We can see if we can safely convert to the original
2223 element type. */
2224 converted_orig1 = conv_code != ERROR_MARK;
2225 }
2226 else if (orig[1] == error_mark_node)
2227 {
2228 tree_vector_builder vec (type, nelts, 1);
2229 for (unsigned i = 0; i < nelts; ++i)
2230 if (constants[i])
2231 vec.quick_push (constants[i]);
2232 else
2233 /* ??? Push a don't-care value. */
2234 vec.quick_push (one_constant);
2235 orig[1] = vec.build ();
2236 /* ??? See if we can convert the vector to the original type. */
2237 converted_orig1 = conv_code != ERROR_MARK;
2238 }
2239 tree blend_op2 = NULL_TREE;
2240 if (converted_orig1)
2241 {
2242 /* Make sure we can do a blend in the target type. */
2243 vec_perm_builder sel (nelts, nelts, 1);
2244 for (i = 0; i < elts.length (); ++i)
2245 sel.quick_push (elts[i].first
2246 ? elts[i].second + nelts : i);
2247 vec_perm_indices indices (sel, 2, nelts);
2248 if (!can_vec_perm_const_p (TYPE_MODE (type), indices))
2249 return false;
2250 mask_type
2251 = build_vector_type (build_nonstandard_integer_type (elem_size, 1),
2252 nelts);
2253 if (GET_MODE_CLASS (TYPE_MODE (mask_type)) != MODE_VECTOR_INT
2254 || maybe_ne (GET_MODE_SIZE (TYPE_MODE (mask_type)),
2255 GET_MODE_SIZE (TYPE_MODE (type))))
2256 return false;
2257 blend_op2 = vec_perm_indices_to_tree (mask_type, indices);
2258 }
2259 tree orig1_for_perm
2260 = converted_orig1 ? build_zero_cst (perm_type) : orig[1];
2261 tree res = gimple_build (&stmts, VEC_PERM_EXPR, perm_type,
2262 orig[0], orig1_for_perm, op2);
2263 if (nelts != refnelts)
2264 res = gimple_build (&stmts, BIT_FIELD_REF,
2265 conv_code != ERROR_MARK ? conv_src_type : type,
2266 res, TYPE_SIZE (type), bitsize_zero_node);
2267 if (conv_code != ERROR_MARK)
2268 res = gimple_build (&stmts, conv_code, type, res);
2269 /* Blend in the actual constant. */
2270 if (converted_orig1)
2271 res = gimple_build (&stmts, VEC_PERM_EXPR, type,
2272 res, orig[1], blend_op2);
2273 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2274 gimple_assign_set_rhs_with_ops (gsi, SSA_NAME, res);
2275 }
2276 update_stmt (gsi_stmt (*gsi));
2277 return true;
2278 }
2279
2280
2281 /* Primitive "lattice" function for gimple_simplify. */
2282
2283 static tree
2284 fwprop_ssa_val (tree name)
2285 {
2286 /* First valueize NAME. */
2287 if (TREE_CODE (name) == SSA_NAME
2288 && SSA_NAME_VERSION (name) < lattice.length ())
2289 {
2290 tree val = lattice[SSA_NAME_VERSION (name)];
2291 if (val)
2292 name = val;
2293 }
2294 /* We continue matching along SSA use-def edges for SSA names
2295 that are not single-use. Currently there are no patterns
2296 that would cause any issues with that. */
2297 return name;
2298 }
2299
2300 /* Main entry point for the forward propagation and statement combine
2301 optimizer. */
2302
2303 namespace {
2304
2305 const pass_data pass_data_forwprop =
2306 {
2307 GIMPLE_PASS, /* type */
2308 "forwprop", /* name */
2309 OPTGROUP_NONE, /* optinfo_flags */
2310 TV_TREE_FORWPROP, /* tv_id */
2311 ( PROP_cfg | PROP_ssa ), /* properties_required */
2312 0, /* properties_provided */
2313 0, /* properties_destroyed */
2314 0, /* todo_flags_start */
2315 TODO_update_ssa, /* todo_flags_finish */
2316 };
2317
2318 class pass_forwprop : public gimple_opt_pass
2319 {
2320 public:
2321 pass_forwprop (gcc::context *ctxt)
2322 : gimple_opt_pass (pass_data_forwprop, ctxt)
2323 {}
2324
2325 /* opt_pass methods: */
2326 opt_pass * clone () { return new pass_forwprop (m_ctxt); }
2327 virtual bool gate (function *) { return flag_tree_forwprop; }
2328 virtual unsigned int execute (function *);
2329
2330 }; // class pass_forwprop
2331
2332 unsigned int
2333 pass_forwprop::execute (function *fun)
2334 {
2335 unsigned int todoflags = 0;
2336
2337 cfg_changed = false;
2338
2339 /* Combine stmts with the stmts defining their operands. Do that
2340 in an order that guarantees visiting SSA defs before SSA uses. */
2341 lattice.create (num_ssa_names);
2342 lattice.quick_grow_cleared (num_ssa_names);
2343 int *postorder = XNEWVEC (int, n_basic_blocks_for_fn (fun));
2344 int postorder_num = pre_and_rev_post_order_compute_fn (cfun, NULL,
2345 postorder, false);
2346 auto_vec<gimple *, 4> to_fixup;
2347 auto_vec<gimple *, 32> to_remove;
2348 to_purge = BITMAP_ALLOC (NULL);
2349 for (int i = 0; i < postorder_num; ++i)
2350 {
2351 gimple_stmt_iterator gsi;
2352 basic_block bb = BASIC_BLOCK_FOR_FN (fun, postorder[i]);
2353
2354 /* Record degenerate PHIs in the lattice. */
2355 for (gphi_iterator si = gsi_start_phis (bb); !gsi_end_p (si);
2356 gsi_next (&si))
2357 {
2358 gphi *phi = si.phi ();
2359 tree res = gimple_phi_result (phi);
2360 if (virtual_operand_p (res))
2361 continue;
2362
2363 use_operand_p use_p;
2364 ssa_op_iter it;
2365 tree first = NULL_TREE;
2366 bool all_same = true;
2367 FOR_EACH_PHI_ARG (use_p, phi, it, SSA_OP_USE)
2368 {
2369 tree use = USE_FROM_PTR (use_p);
2370 if (! first)
2371 first = use;
2372 else if (! operand_equal_p (first, use, 0))
2373 {
2374 all_same = false;
2375 break;
2376 }
2377 }
2378 if (all_same)
2379 {
2380 if (may_propagate_copy (res, first))
2381 to_remove.safe_push (phi);
2382 fwprop_set_lattice_val (res, first);
2383 }
2384 }
2385
2386 /* Apply forward propagation to all stmts in the basic-block.
2387 Note we update GSI within the loop as necessary. */
2388 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); )
2389 {
2390 gimple *stmt = gsi_stmt (gsi);
2391 tree lhs, rhs;
2392 enum tree_code code;
2393
2394 if (!is_gimple_assign (stmt))
2395 {
2396 gsi_next (&gsi);
2397 continue;
2398 }
2399
2400 lhs = gimple_assign_lhs (stmt);
2401 rhs = gimple_assign_rhs1 (stmt);
2402 code = gimple_assign_rhs_code (stmt);
2403 if (TREE_CODE (lhs) != SSA_NAME
2404 || has_zero_uses (lhs))
2405 {
2406 gsi_next (&gsi);
2407 continue;
2408 }
2409
2410 /* If this statement sets an SSA_NAME to an address,
2411 try to propagate the address into the uses of the SSA_NAME. */
2412 if (code == ADDR_EXPR
2413 /* Handle pointer conversions on invariant addresses
2414 as well, as this is valid gimple. */
2415 || (CONVERT_EXPR_CODE_P (code)
2416 && TREE_CODE (rhs) == ADDR_EXPR
2417 && POINTER_TYPE_P (TREE_TYPE (lhs))))
2418 {
2419 tree base = get_base_address (TREE_OPERAND (rhs, 0));
2420 if ((!base
2421 || !DECL_P (base)
2422 || decl_address_invariant_p (base))
2423 && !stmt_references_abnormal_ssa_name (stmt)
2424 && forward_propagate_addr_expr (lhs, rhs, true))
2425 {
2426 fwprop_invalidate_lattice (gimple_get_lhs (stmt));
2427 release_defs (stmt);
2428 gsi_remove (&gsi, true);
2429 }
2430 else
2431 gsi_next (&gsi);
2432 }
2433 else if (code == POINTER_PLUS_EXPR)
2434 {
2435 tree off = gimple_assign_rhs2 (stmt);
2436 if (TREE_CODE (off) == INTEGER_CST
2437 && can_propagate_from (stmt)
2438 && !simple_iv_increment_p (stmt)
2439 /* ??? Better adjust the interface to that function
2440 instead of building new trees here. */
2441 && forward_propagate_addr_expr
2442 (lhs,
2443 build1_loc (gimple_location (stmt),
2444 ADDR_EXPR, TREE_TYPE (rhs),
2445 fold_build2 (MEM_REF,
2446 TREE_TYPE (TREE_TYPE (rhs)),
2447 rhs,
2448 fold_convert (ptr_type_node,
2449 off))), true))
2450 {
2451 fwprop_invalidate_lattice (gimple_get_lhs (stmt));
2452 release_defs (stmt);
2453 gsi_remove (&gsi, true);
2454 }
2455 else if (is_gimple_min_invariant (rhs))
2456 {
2457 /* Make sure to fold &a[0] + off_1 here. */
2458 fold_stmt_inplace (&gsi);
2459 update_stmt (stmt);
2460 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
2461 gsi_next (&gsi);
2462 }
2463 else
2464 gsi_next (&gsi);
2465 }
2466 else if (TREE_CODE (TREE_TYPE (lhs)) == COMPLEX_TYPE
2467 && gimple_assign_load_p (stmt)
2468 && !gimple_has_volatile_ops (stmt)
2469 && (TREE_CODE (gimple_assign_rhs1 (stmt))
2470 != TARGET_MEM_REF)
2471 && !stmt_can_throw_internal (cfun, stmt))
2472 {
2473 /* Rewrite loads used only in real/imagpart extractions to
2474 component-wise loads. */
2475 use_operand_p use_p;
2476 imm_use_iterator iter;
2477 bool rewrite = true;
2478 FOR_EACH_IMM_USE_FAST (use_p, iter, lhs)
2479 {
2480 gimple *use_stmt = USE_STMT (use_p);
2481 if (is_gimple_debug (use_stmt))
2482 continue;
2483 if (!is_gimple_assign (use_stmt)
2484 || (gimple_assign_rhs_code (use_stmt) != REALPART_EXPR
2485 && gimple_assign_rhs_code (use_stmt) != IMAGPART_EXPR))
2486 {
2487 rewrite = false;
2488 break;
2489 }
2490 }
2491 if (rewrite)
2492 {
2493 gimple *use_stmt;
2494 FOR_EACH_IMM_USE_STMT (use_stmt, iter, lhs)
2495 {
2496 if (is_gimple_debug (use_stmt))
2497 {
2498 if (gimple_debug_bind_p (use_stmt))
2499 {
2500 gimple_debug_bind_reset_value (use_stmt);
2501 update_stmt (use_stmt);
2502 }
2503 continue;
2504 }
2505
2506 tree new_rhs = build1 (gimple_assign_rhs_code (use_stmt),
2507 TREE_TYPE (TREE_TYPE (rhs)),
2508 unshare_expr (rhs));
2509 gimple *new_stmt
2510 = gimple_build_assign (gimple_assign_lhs (use_stmt),
2511 new_rhs);
2512
2513 location_t loc = gimple_location (use_stmt);
2514 gimple_set_location (new_stmt, loc);
2515 gimple_stmt_iterator gsi2 = gsi_for_stmt (use_stmt);
2516 unlink_stmt_vdef (use_stmt);
2517 gsi_remove (&gsi2, true);
2518
2519 gsi_insert_before (&gsi, new_stmt, GSI_SAME_STMT);
2520 }
2521
2522 release_defs (stmt);
2523 gsi_remove (&gsi, true);
2524 }
2525 else
2526 gsi_next (&gsi);
2527 }
2528 else if (TREE_CODE (TREE_TYPE (lhs)) == VECTOR_TYPE
2529 && TYPE_MODE (TREE_TYPE (lhs)) == BLKmode
2530 && gimple_assign_load_p (stmt)
2531 && !gimple_has_volatile_ops (stmt)
2532 && (TREE_CODE (gimple_assign_rhs1 (stmt))
2533 != TARGET_MEM_REF)
2534 && !stmt_can_throw_internal (cfun, stmt))
2535 {
2536 /* Rewrite loads used only in BIT_FIELD_REF extractions to
2537 component-wise loads. */
2538 use_operand_p use_p;
2539 imm_use_iterator iter;
2540 bool rewrite = true;
2541 FOR_EACH_IMM_USE_FAST (use_p, iter, lhs)
2542 {
2543 gimple *use_stmt = USE_STMT (use_p);
2544 if (is_gimple_debug (use_stmt))
2545 continue;
2546 if (!is_gimple_assign (use_stmt)
2547 || gimple_assign_rhs_code (use_stmt) != BIT_FIELD_REF)
2548 {
2549 rewrite = false;
2550 break;
2551 }
2552 }
2553 if (rewrite)
2554 {
2555 gimple *use_stmt;
2556 FOR_EACH_IMM_USE_STMT (use_stmt, iter, lhs)
2557 {
2558 if (is_gimple_debug (use_stmt))
2559 {
2560 if (gimple_debug_bind_p (use_stmt))
2561 {
2562 gimple_debug_bind_reset_value (use_stmt);
2563 update_stmt (use_stmt);
2564 }
2565 continue;
2566 }
2567
2568 tree bfr = gimple_assign_rhs1 (use_stmt);
2569 tree new_rhs = fold_build3 (BIT_FIELD_REF,
2570 TREE_TYPE (bfr),
2571 unshare_expr (rhs),
2572 TREE_OPERAND (bfr, 1),
2573 TREE_OPERAND (bfr, 2));
2574 gimple *new_stmt
2575 = gimple_build_assign (gimple_assign_lhs (use_stmt),
2576 new_rhs);
2577
2578 location_t loc = gimple_location (use_stmt);
2579 gimple_set_location (new_stmt, loc);
2580 gimple_stmt_iterator gsi2 = gsi_for_stmt (use_stmt);
2581 unlink_stmt_vdef (use_stmt);
2582 gsi_remove (&gsi2, true);
2583
2584 gsi_insert_before (&gsi, new_stmt, GSI_SAME_STMT);
2585 }
2586
2587 release_defs (stmt);
2588 gsi_remove (&gsi, true);
2589 }
2590 else
2591 gsi_next (&gsi);
2592 }
2593
2594 else if (code == COMPLEX_EXPR)
2595 {
2596 /* Rewrite stores of a single-use complex build expression
2597 to component-wise stores. */
2598 use_operand_p use_p;
2599 gimple *use_stmt;
2600 if (single_imm_use (lhs, &use_p, &use_stmt)
2601 && gimple_store_p (use_stmt)
2602 && !gimple_has_volatile_ops (use_stmt)
2603 && is_gimple_assign (use_stmt)
2604 && (TREE_CODE (gimple_assign_lhs (use_stmt))
2605 != TARGET_MEM_REF))
2606 {
2607 tree use_lhs = gimple_assign_lhs (use_stmt);
2608 tree new_lhs = build1 (REALPART_EXPR,
2609 TREE_TYPE (TREE_TYPE (use_lhs)),
2610 unshare_expr (use_lhs));
2611 gimple *new_stmt = gimple_build_assign (new_lhs, rhs);
2612 location_t loc = gimple_location (use_stmt);
2613 gimple_set_location (new_stmt, loc);
2614 gimple_set_vuse (new_stmt, gimple_vuse (use_stmt));
2615 gimple_set_vdef (new_stmt, make_ssa_name (gimple_vop (cfun)));
2616 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
2617 gimple_set_vuse (use_stmt, gimple_vdef (new_stmt));
2618 gimple_stmt_iterator gsi2 = gsi_for_stmt (use_stmt);
2619 gsi_insert_before (&gsi2, new_stmt, GSI_SAME_STMT);
2620
2621 new_lhs = build1 (IMAGPART_EXPR,
2622 TREE_TYPE (TREE_TYPE (use_lhs)),
2623 unshare_expr (use_lhs));
2624 gimple_assign_set_lhs (use_stmt, new_lhs);
2625 gimple_assign_set_rhs1 (use_stmt, gimple_assign_rhs2 (stmt));
2626 update_stmt (use_stmt);
2627
2628 release_defs (stmt);
2629 gsi_remove (&gsi, true);
2630 }
2631 else
2632 gsi_next (&gsi);
2633 }
2634 else if (code == CONSTRUCTOR
2635 && VECTOR_TYPE_P (TREE_TYPE (rhs))
2636 && TYPE_MODE (TREE_TYPE (rhs)) == BLKmode
2637 && CONSTRUCTOR_NELTS (rhs) > 0
2638 && (!VECTOR_TYPE_P (TREE_TYPE (CONSTRUCTOR_ELT (rhs, 0)->value))
2639 || (TYPE_MODE (TREE_TYPE (CONSTRUCTOR_ELT (rhs, 0)->value))
2640 != BLKmode)))
2641 {
2642 /* Rewrite stores of a single-use vector constructors
2643 to component-wise stores if the mode isn't supported. */
2644 use_operand_p use_p;
2645 gimple *use_stmt;
2646 if (single_imm_use (lhs, &use_p, &use_stmt)
2647 && gimple_store_p (use_stmt)
2648 && !gimple_has_volatile_ops (use_stmt)
2649 && !stmt_can_throw_internal (cfun, use_stmt)
2650 && is_gimple_assign (use_stmt)
2651 && (TREE_CODE (gimple_assign_lhs (use_stmt))
2652 != TARGET_MEM_REF))
2653 {
2654 tree elt_t = TREE_TYPE (CONSTRUCTOR_ELT (rhs, 0)->value);
2655 unsigned HOST_WIDE_INT elt_w
2656 = tree_to_uhwi (TYPE_SIZE (elt_t));
2657 unsigned HOST_WIDE_INT n
2658 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (rhs)));
2659 for (unsigned HOST_WIDE_INT bi = 0; bi < n; bi += elt_w)
2660 {
2661 unsigned HOST_WIDE_INT ci = bi / elt_w;
2662 tree new_rhs;
2663 if (ci < CONSTRUCTOR_NELTS (rhs))
2664 new_rhs = CONSTRUCTOR_ELT (rhs, ci)->value;
2665 else
2666 new_rhs = build_zero_cst (elt_t);
2667 tree use_lhs = gimple_assign_lhs (use_stmt);
2668 tree new_lhs = build3 (BIT_FIELD_REF,
2669 elt_t,
2670 unshare_expr (use_lhs),
2671 bitsize_int (elt_w),
2672 bitsize_int (bi));
2673 gimple *new_stmt = gimple_build_assign (new_lhs, new_rhs);
2674 location_t loc = gimple_location (use_stmt);
2675 gimple_set_location (new_stmt, loc);
2676 gimple_set_vuse (new_stmt, gimple_vuse (use_stmt));
2677 gimple_set_vdef (new_stmt,
2678 make_ssa_name (gimple_vop (cfun)));
2679 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
2680 gimple_set_vuse (use_stmt, gimple_vdef (new_stmt));
2681 gimple_stmt_iterator gsi2 = gsi_for_stmt (use_stmt);
2682 gsi_insert_before (&gsi2, new_stmt, GSI_SAME_STMT);
2683 }
2684 gimple_stmt_iterator gsi2 = gsi_for_stmt (use_stmt);
2685 unlink_stmt_vdef (use_stmt);
2686 release_defs (use_stmt);
2687 gsi_remove (&gsi2, true);
2688 release_defs (stmt);
2689 gsi_remove (&gsi, true);
2690 }
2691 else
2692 gsi_next (&gsi);
2693 }
2694 else
2695 gsi_next (&gsi);
2696 }
2697
2698 /* Combine stmts with the stmts defining their operands.
2699 Note we update GSI within the loop as necessary. */
2700 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2701 {
2702 gimple *stmt = gsi_stmt (gsi);
2703
2704 /* Mark stmt as potentially needing revisiting. */
2705 gimple_set_plf (stmt, GF_PLF_1, false);
2706
2707 /* Substitute from our lattice. We need to do so only once. */
2708 bool substituted_p = false;
2709 use_operand_p usep;
2710 ssa_op_iter iter;
2711 FOR_EACH_SSA_USE_OPERAND (usep, stmt, iter, SSA_OP_USE)
2712 {
2713 tree use = USE_FROM_PTR (usep);
2714 tree val = fwprop_ssa_val (use);
2715 if (val && val != use && may_propagate_copy (use, val))
2716 {
2717 propagate_value (usep, val);
2718 substituted_p = true;
2719 }
2720 }
2721 if (substituted_p
2722 && is_gimple_assign (stmt)
2723 && gimple_assign_rhs_code (stmt) == ADDR_EXPR)
2724 recompute_tree_invariant_for_addr_expr (gimple_assign_rhs1 (stmt));
2725
2726 bool changed;
2727 do
2728 {
2729 gimple *orig_stmt = stmt = gsi_stmt (gsi);
2730 bool was_noreturn = (is_gimple_call (stmt)
2731 && gimple_call_noreturn_p (stmt));
2732 changed = false;
2733
2734 if (fold_stmt (&gsi, fwprop_ssa_val))
2735 {
2736 changed = true;
2737 stmt = gsi_stmt (gsi);
2738 /* Cleanup the CFG if we simplified a condition to
2739 true or false. */
2740 if (gcond *cond = dyn_cast <gcond *> (stmt))
2741 if (gimple_cond_true_p (cond)
2742 || gimple_cond_false_p (cond))
2743 cfg_changed = true;
2744 }
2745
2746 if (changed || substituted_p)
2747 {
2748 if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt))
2749 bitmap_set_bit (to_purge, bb->index);
2750 if (!was_noreturn
2751 && is_gimple_call (stmt) && gimple_call_noreturn_p (stmt))
2752 to_fixup.safe_push (stmt);
2753 update_stmt (stmt);
2754 substituted_p = false;
2755 }
2756
2757 switch (gimple_code (stmt))
2758 {
2759 case GIMPLE_ASSIGN:
2760 {
2761 tree rhs1 = gimple_assign_rhs1 (stmt);
2762 enum tree_code code = gimple_assign_rhs_code (stmt);
2763
2764 if (code == COND_EXPR
2765 || code == VEC_COND_EXPR)
2766 {
2767 /* In this case the entire COND_EXPR is in rhs1. */
2768 if (forward_propagate_into_cond (&gsi))
2769 {
2770 changed = true;
2771 stmt = gsi_stmt (gsi);
2772 }
2773 }
2774 else if (TREE_CODE_CLASS (code) == tcc_comparison)
2775 {
2776 int did_something;
2777 did_something = forward_propagate_into_comparison (&gsi);
2778 if (maybe_clean_or_replace_eh_stmt (stmt, gsi_stmt (gsi)))
2779 bitmap_set_bit (to_purge, bb->index);
2780 if (did_something == 2)
2781 cfg_changed = true;
2782 changed = did_something != 0;
2783 }
2784 else if ((code == PLUS_EXPR
2785 || code == BIT_IOR_EXPR
2786 || code == BIT_XOR_EXPR)
2787 && simplify_rotate (&gsi))
2788 changed = true;
2789 else if (code == VEC_PERM_EXPR)
2790 {
2791 int did_something = simplify_permutation (&gsi);
2792 if (did_something == 2)
2793 cfg_changed = true;
2794 changed = did_something != 0;
2795 }
2796 else if (code == BIT_FIELD_REF)
2797 changed = simplify_bitfield_ref (&gsi);
2798 else if (code == CONSTRUCTOR
2799 && TREE_CODE (TREE_TYPE (rhs1)) == VECTOR_TYPE)
2800 changed = simplify_vector_constructor (&gsi);
2801 break;
2802 }
2803
2804 case GIMPLE_SWITCH:
2805 changed = simplify_gimple_switch (as_a <gswitch *> (stmt));
2806 break;
2807
2808 case GIMPLE_COND:
2809 {
2810 int did_something = forward_propagate_into_gimple_cond
2811 (as_a <gcond *> (stmt));
2812 if (did_something == 2)
2813 cfg_changed = true;
2814 changed = did_something != 0;
2815 break;
2816 }
2817
2818 case GIMPLE_CALL:
2819 {
2820 tree callee = gimple_call_fndecl (stmt);
2821 if (callee != NULL_TREE
2822 && fndecl_built_in_p (callee, BUILT_IN_NORMAL))
2823 changed = simplify_builtin_call (&gsi, callee);
2824 break;
2825 }
2826
2827 default:;
2828 }
2829
2830 if (changed)
2831 {
2832 /* If the stmt changed then re-visit it and the statements
2833 inserted before it. */
2834 for (; !gsi_end_p (gsi); gsi_prev (&gsi))
2835 if (gimple_plf (gsi_stmt (gsi), GF_PLF_1))
2836 break;
2837 if (gsi_end_p (gsi))
2838 gsi = gsi_start_bb (bb);
2839 else
2840 gsi_next (&gsi);
2841 }
2842 }
2843 while (changed);
2844
2845 /* Stmt no longer needs to be revisited. */
2846 stmt = gsi_stmt (gsi);
2847 gcc_checking_assert (!gimple_plf (stmt, GF_PLF_1));
2848 gimple_set_plf (stmt, GF_PLF_1, true);
2849
2850 /* Fill up the lattice. */
2851 if (gimple_assign_single_p (stmt))
2852 {
2853 tree lhs = gimple_assign_lhs (stmt);
2854 tree rhs = gimple_assign_rhs1 (stmt);
2855 if (TREE_CODE (lhs) == SSA_NAME)
2856 {
2857 tree val = lhs;
2858 if (TREE_CODE (rhs) == SSA_NAME)
2859 val = fwprop_ssa_val (rhs);
2860 else if (is_gimple_min_invariant (rhs))
2861 val = rhs;
2862 /* If we can propagate the lattice-value mark the
2863 stmt for removal. */
2864 if (val != lhs
2865 && may_propagate_copy (lhs, val))
2866 to_remove.safe_push (stmt);
2867 fwprop_set_lattice_val (lhs, val);
2868 }
2869 }
2870 else if (gimple_nop_p (stmt))
2871 to_remove.safe_push (stmt);
2872 }
2873
2874 /* Substitute in destination PHI arguments. */
2875 edge_iterator ei;
2876 edge e;
2877 FOR_EACH_EDGE (e, ei, bb->succs)
2878 for (gphi_iterator gsi = gsi_start_phis (e->dest);
2879 !gsi_end_p (gsi); gsi_next (&gsi))
2880 {
2881 gphi *phi = gsi.phi ();
2882 use_operand_p use_p = PHI_ARG_DEF_PTR_FROM_EDGE (phi, e);
2883 tree arg = USE_FROM_PTR (use_p);
2884 if (TREE_CODE (arg) != SSA_NAME
2885 || virtual_operand_p (arg))
2886 continue;
2887 tree val = fwprop_ssa_val (arg);
2888 if (val != arg
2889 && may_propagate_copy (arg, val))
2890 propagate_value (use_p, val);
2891 }
2892 }
2893 free (postorder);
2894 lattice.release ();
2895
2896 /* Remove stmts in reverse order to make debug stmt creation possible. */
2897 while (!to_remove.is_empty())
2898 {
2899 gimple *stmt = to_remove.pop ();
2900 if (dump_file && (dump_flags & TDF_DETAILS))
2901 {
2902 fprintf (dump_file, "Removing dead stmt ");
2903 print_gimple_stmt (dump_file, stmt, 0);
2904 fprintf (dump_file, "\n");
2905 }
2906 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
2907 if (gimple_code (stmt) == GIMPLE_PHI)
2908 remove_phi_node (&gsi, true);
2909 else
2910 {
2911 unlink_stmt_vdef (stmt);
2912 gsi_remove (&gsi, true);
2913 release_defs (stmt);
2914 }
2915 }
2916
2917 /* Fixup stmts that became noreturn calls. This may require splitting
2918 blocks and thus isn't possible during the walk. Do this
2919 in reverse order so we don't inadvertedly remove a stmt we want to
2920 fixup by visiting a dominating now noreturn call first. */
2921 while (!to_fixup.is_empty ())
2922 {
2923 gimple *stmt = to_fixup.pop ();
2924 if (dump_file && dump_flags & TDF_DETAILS)
2925 {
2926 fprintf (dump_file, "Fixing up noreturn call ");
2927 print_gimple_stmt (dump_file, stmt, 0);
2928 fprintf (dump_file, "\n");
2929 }
2930 cfg_changed |= fixup_noreturn_call (stmt);
2931 }
2932
2933 cfg_changed |= gimple_purge_all_dead_eh_edges (to_purge);
2934 BITMAP_FREE (to_purge);
2935
2936 if (cfg_changed)
2937 todoflags |= TODO_cleanup_cfg;
2938
2939 return todoflags;
2940 }
2941
2942 } // anon namespace
2943
2944 gimple_opt_pass *
2945 make_pass_forwprop (gcc::context *ctxt)
2946 {
2947 return new pass_forwprop (ctxt);
2948 }