]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/tree-ssa-forwprop.c
Always combine comparisons or conversions from booleans.
[thirdparty/gcc.git] / gcc / tree-ssa-forwprop.c
1 /* Forward propagation of expressions for single use variables.
2 Copyright (C) 2004-2014 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
10
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tm.h"
24 #include "tree.h"
25 #include "stor-layout.h"
26 #include "tm_p.h"
27 #include "predict.h"
28 #include "vec.h"
29 #include "hashtab.h"
30 #include "hash-set.h"
31 #include "machmode.h"
32 #include "hard-reg-set.h"
33 #include "input.h"
34 #include "function.h"
35 #include "dominance.h"
36 #include "cfg.h"
37 #include "basic-block.h"
38 #include "gimple-pretty-print.h"
39 #include "tree-ssa-alias.h"
40 #include "internal-fn.h"
41 #include "gimple-fold.h"
42 #include "tree-eh.h"
43 #include "gimple-expr.h"
44 #include "is-a.h"
45 #include "gimple.h"
46 #include "gimplify.h"
47 #include "gimple-iterator.h"
48 #include "gimplify-me.h"
49 #include "gimple-ssa.h"
50 #include "tree-cfg.h"
51 #include "tree-phinodes.h"
52 #include "ssa-iterators.h"
53 #include "stringpool.h"
54 #include "tree-ssanames.h"
55 #include "expr.h"
56 #include "tree-dfa.h"
57 #include "tree-pass.h"
58 #include "langhooks.h"
59 #include "flags.h"
60 #include "diagnostic.h"
61 #include "expr.h"
62 #include "cfgloop.h"
63 #include "insn-codes.h"
64 #include "optabs.h"
65 #include "tree-ssa-propagate.h"
66 #include "tree-ssa-dom.h"
67 #include "builtins.h"
68 #include "tree-cfgcleanup.h"
69 #include "tree-into-ssa.h"
70 #include "cfganal.h"
71
72 /* This pass propagates the RHS of assignment statements into use
73 sites of the LHS of the assignment. It's basically a specialized
74 form of tree combination. It is hoped all of this can disappear
75 when we have a generalized tree combiner.
76
77 One class of common cases we handle is forward propagating a single use
78 variable into a COND_EXPR.
79
80 bb0:
81 x = a COND b;
82 if (x) goto ... else goto ...
83
84 Will be transformed into:
85
86 bb0:
87 if (a COND b) goto ... else goto ...
88
89 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
90
91 Or (assuming c1 and c2 are constants):
92
93 bb0:
94 x = a + c1;
95 if (x EQ/NEQ c2) goto ... else goto ...
96
97 Will be transformed into:
98
99 bb0:
100 if (a EQ/NEQ (c2 - c1)) goto ... else goto ...
101
102 Similarly for x = a - c1.
103
104 Or
105
106 bb0:
107 x = !a
108 if (x) goto ... else goto ...
109
110 Will be transformed into:
111
112 bb0:
113 if (a == 0) goto ... else goto ...
114
115 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
116 For these cases, we propagate A into all, possibly more than one,
117 COND_EXPRs that use X.
118
119 Or
120
121 bb0:
122 x = (typecast) a
123 if (x) goto ... else goto ...
124
125 Will be transformed into:
126
127 bb0:
128 if (a != 0) goto ... else goto ...
129
130 (Assuming a is an integral type and x is a boolean or x is an
131 integral and a is a boolean.)
132
133 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
134 For these cases, we propagate A into all, possibly more than one,
135 COND_EXPRs that use X.
136
137 In addition to eliminating the variable and the statement which assigns
138 a value to the variable, we may be able to later thread the jump without
139 adding insane complexity in the dominator optimizer.
140
141 Also note these transformations can cascade. We handle this by having
142 a worklist of COND_EXPR statements to examine. As we make a change to
143 a statement, we put it back on the worklist to examine on the next
144 iteration of the main loop.
145
146 A second class of propagation opportunities arises for ADDR_EXPR
147 nodes.
148
149 ptr = &x->y->z;
150 res = *ptr;
151
152 Will get turned into
153
154 res = x->y->z;
155
156 Or
157 ptr = (type1*)&type2var;
158 res = *ptr
159
160 Will get turned into (if type1 and type2 are the same size
161 and neither have volatile on them):
162 res = VIEW_CONVERT_EXPR<type1>(type2var)
163
164 Or
165
166 ptr = &x[0];
167 ptr2 = ptr + <constant>;
168
169 Will get turned into
170
171 ptr2 = &x[constant/elementsize];
172
173 Or
174
175 ptr = &x[0];
176 offset = index * element_size;
177 offset_p = (pointer) offset;
178 ptr2 = ptr + offset_p
179
180 Will get turned into:
181
182 ptr2 = &x[index];
183
184 Or
185 ssa = (int) decl
186 res = ssa & 1
187
188 Provided that decl has known alignment >= 2, will get turned into
189
190 res = 0
191
192 We also propagate casts into SWITCH_EXPR and COND_EXPR conditions to
193 allow us to remove the cast and {NOT_EXPR,NEG_EXPR} into a subsequent
194 {NOT_EXPR,NEG_EXPR}.
195
196 This will (of course) be extended as other needs arise. */
197
198 static bool forward_propagate_addr_expr (tree, tree, bool);
199
200 /* Set to true if we delete dead edges during the optimization. */
201 static bool cfg_changed;
202
203 static tree rhs_to_tree (tree type, gimple stmt);
204
205 static bitmap to_purge;
206
207 /* Const-and-copy lattice. */
208 static vec<tree> lattice;
209
210 /* Set the lattice entry for NAME to VAL. */
211 static void
212 fwprop_set_lattice_val (tree name, tree val)
213 {
214 if (TREE_CODE (name) == SSA_NAME)
215 {
216 if (SSA_NAME_VERSION (name) >= lattice.length ())
217 {
218 lattice.reserve (num_ssa_names - lattice.length ());
219 lattice.quick_grow_cleared (num_ssa_names);
220 }
221 lattice[SSA_NAME_VERSION (name)] = val;
222 }
223 }
224
225 /* Invalidate the lattice entry for NAME, done when releasing SSA names. */
226 static void
227 fwprop_invalidate_lattice (tree name)
228 {
229 if (name
230 && TREE_CODE (name) == SSA_NAME
231 && SSA_NAME_VERSION (name) < lattice.length ())
232 lattice[SSA_NAME_VERSION (name)] = NULL_TREE;
233 }
234
235
236 /* Get the statement we can propagate from into NAME skipping
237 trivial copies. Returns the statement which defines the
238 propagation source or NULL_TREE if there is no such one.
239 If SINGLE_USE_ONLY is set considers only sources which have
240 a single use chain up to NAME. If SINGLE_USE_P is non-null,
241 it is set to whether the chain to NAME is a single use chain
242 or not. SINGLE_USE_P is not written to if SINGLE_USE_ONLY is set. */
243
244 static gimple
245 get_prop_source_stmt (tree name, bool single_use_only, bool *single_use_p)
246 {
247 bool single_use = true;
248
249 do {
250 gimple def_stmt = SSA_NAME_DEF_STMT (name);
251
252 if (!has_single_use (name))
253 {
254 single_use = false;
255 if (single_use_only)
256 return NULL;
257 }
258
259 /* If name is defined by a PHI node or is the default def, bail out. */
260 if (!is_gimple_assign (def_stmt))
261 return NULL;
262
263 /* If def_stmt is a simple copy, continue looking. */
264 if (gimple_assign_rhs_code (def_stmt) == SSA_NAME)
265 name = gimple_assign_rhs1 (def_stmt);
266 else
267 {
268 if (!single_use_only && single_use_p)
269 *single_use_p = single_use;
270
271 return def_stmt;
272 }
273 } while (1);
274 }
275
276 /* Checks if the destination ssa name in DEF_STMT can be used as
277 propagation source. Returns true if so, otherwise false. */
278
279 static bool
280 can_propagate_from (gimple def_stmt)
281 {
282 gcc_assert (is_gimple_assign (def_stmt));
283
284 /* If the rhs has side-effects we cannot propagate from it. */
285 if (gimple_has_volatile_ops (def_stmt))
286 return false;
287
288 /* If the rhs is a load we cannot propagate from it. */
289 if (TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)) == tcc_reference
290 || TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)) == tcc_declaration)
291 return false;
292
293 /* Constants can be always propagated. */
294 if (gimple_assign_single_p (def_stmt)
295 && is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt)))
296 return true;
297
298 /* We cannot propagate ssa names that occur in abnormal phi nodes. */
299 if (stmt_references_abnormal_ssa_name (def_stmt))
300 return false;
301
302 /* If the definition is a conversion of a pointer to a function type,
303 then we can not apply optimizations as some targets require
304 function pointers to be canonicalized and in this case this
305 optimization could eliminate a necessary canonicalization. */
306 if (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt)))
307 {
308 tree rhs = gimple_assign_rhs1 (def_stmt);
309 if (POINTER_TYPE_P (TREE_TYPE (rhs))
310 && TREE_CODE (TREE_TYPE (TREE_TYPE (rhs))) == FUNCTION_TYPE)
311 return false;
312 }
313
314 return true;
315 }
316
317 /* Remove a chain of dead statements starting at the definition of
318 NAME. The chain is linked via the first operand of the defining statements.
319 If NAME was replaced in its only use then this function can be used
320 to clean up dead stmts. The function handles already released SSA
321 names gracefully.
322 Returns true if cleanup-cfg has to run. */
323
324 static bool
325 remove_prop_source_from_use (tree name)
326 {
327 gimple_stmt_iterator gsi;
328 gimple stmt;
329 bool cfg_changed = false;
330
331 do {
332 basic_block bb;
333
334 if (SSA_NAME_IN_FREE_LIST (name)
335 || SSA_NAME_IS_DEFAULT_DEF (name)
336 || !has_zero_uses (name))
337 return cfg_changed;
338
339 stmt = SSA_NAME_DEF_STMT (name);
340 if (gimple_code (stmt) == GIMPLE_PHI
341 || gimple_has_side_effects (stmt))
342 return cfg_changed;
343
344 bb = gimple_bb (stmt);
345 gsi = gsi_for_stmt (stmt);
346 unlink_stmt_vdef (stmt);
347 if (gsi_remove (&gsi, true))
348 bitmap_set_bit (to_purge, bb->index);
349 fwprop_invalidate_lattice (gimple_get_lhs (stmt));
350 release_defs (stmt);
351
352 name = is_gimple_assign (stmt) ? gimple_assign_rhs1 (stmt) : NULL_TREE;
353 } while (name && TREE_CODE (name) == SSA_NAME);
354
355 return cfg_changed;
356 }
357
358 /* Return the rhs of a gimple_assign STMT in a form of a single tree,
359 converted to type TYPE.
360
361 This should disappear, but is needed so we can combine expressions and use
362 the fold() interfaces. Long term, we need to develop folding and combine
363 routines that deal with gimple exclusively . */
364
365 static tree
366 rhs_to_tree (tree type, gimple stmt)
367 {
368 location_t loc = gimple_location (stmt);
369 enum tree_code code = gimple_assign_rhs_code (stmt);
370 if (get_gimple_rhs_class (code) == GIMPLE_TERNARY_RHS)
371 return fold_build3_loc (loc, code, type, gimple_assign_rhs1 (stmt),
372 gimple_assign_rhs2 (stmt),
373 gimple_assign_rhs3 (stmt));
374 else if (get_gimple_rhs_class (code) == GIMPLE_BINARY_RHS)
375 return fold_build2_loc (loc, code, type, gimple_assign_rhs1 (stmt),
376 gimple_assign_rhs2 (stmt));
377 else if (get_gimple_rhs_class (code) == GIMPLE_UNARY_RHS)
378 return build1 (code, type, gimple_assign_rhs1 (stmt));
379 else if (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS)
380 return gimple_assign_rhs1 (stmt);
381 else
382 gcc_unreachable ();
383 }
384
385 /* Combine OP0 CODE OP1 in the context of a COND_EXPR. Returns
386 the folded result in a form suitable for COND_EXPR_COND or
387 NULL_TREE, if there is no suitable simplified form. If
388 INVARIANT_ONLY is true only gimple_min_invariant results are
389 considered simplified. */
390
391 static tree
392 combine_cond_expr_cond (gimple stmt, enum tree_code code, tree type,
393 tree op0, tree op1, bool invariant_only)
394 {
395 tree t;
396
397 gcc_assert (TREE_CODE_CLASS (code) == tcc_comparison);
398
399 fold_defer_overflow_warnings ();
400 t = fold_binary_loc (gimple_location (stmt), code, type, op0, op1);
401 if (!t)
402 {
403 fold_undefer_overflow_warnings (false, NULL, 0);
404 return NULL_TREE;
405 }
406
407 /* Require that we got a boolean type out if we put one in. */
408 gcc_assert (TREE_CODE (TREE_TYPE (t)) == TREE_CODE (type));
409
410 /* Canonicalize the combined condition for use in a COND_EXPR. */
411 t = canonicalize_cond_expr_cond (t);
412
413 /* Bail out if we required an invariant but didn't get one. */
414 if (!t || (invariant_only && !is_gimple_min_invariant (t)))
415 {
416 fold_undefer_overflow_warnings (false, NULL, 0);
417 return NULL_TREE;
418 }
419
420 fold_undefer_overflow_warnings (!gimple_no_warning_p (stmt), stmt, 0);
421
422 return t;
423 }
424
425 /* Combine the comparison OP0 CODE OP1 at LOC with the defining statements
426 of its operand. Return a new comparison tree or NULL_TREE if there
427 were no simplifying combines. */
428
429 static tree
430 forward_propagate_into_comparison_1 (gimple stmt,
431 enum tree_code code, tree type,
432 tree op0, tree op1)
433 {
434 tree tmp = NULL_TREE;
435 tree rhs0 = NULL_TREE, rhs1 = NULL_TREE;
436 bool single_use0_p = false, single_use1_p = false;
437
438 /* For comparisons use the first operand, that is likely to
439 simplify comparisons against constants. */
440 if (TREE_CODE (op0) == SSA_NAME)
441 {
442 gimple def_stmt = get_prop_source_stmt (op0, false, &single_use0_p);
443 if (def_stmt && can_propagate_from (def_stmt))
444 {
445 enum tree_code def_code = gimple_assign_rhs_code (def_stmt);
446 bool invariant_only_p = !single_use0_p;
447
448 rhs0 = rhs_to_tree (TREE_TYPE (op1), def_stmt);
449
450 /* Always combine comparisons or conversions from booleans. */
451 if (TREE_CODE (op1) == INTEGER_CST
452 && ((CONVERT_EXPR_CODE_P (def_code)
453 && TREE_CODE (TREE_TYPE (TREE_OPERAND (rhs0, 0)))
454 == BOOLEAN_TYPE)
455 || TREE_CODE_CLASS (def_code) == tcc_comparison))
456 invariant_only_p = false;
457
458 tmp = combine_cond_expr_cond (stmt, code, type,
459 rhs0, op1, invariant_only_p);
460 if (tmp)
461 return tmp;
462 }
463 }
464
465 /* If that wasn't successful, try the second operand. */
466 if (TREE_CODE (op1) == SSA_NAME)
467 {
468 gimple def_stmt = get_prop_source_stmt (op1, false, &single_use1_p);
469 if (def_stmt && can_propagate_from (def_stmt))
470 {
471 rhs1 = rhs_to_tree (TREE_TYPE (op0), def_stmt);
472 tmp = combine_cond_expr_cond (stmt, code, type,
473 op0, rhs1, !single_use1_p);
474 if (tmp)
475 return tmp;
476 }
477 }
478
479 /* If that wasn't successful either, try both operands. */
480 if (rhs0 != NULL_TREE
481 && rhs1 != NULL_TREE)
482 tmp = combine_cond_expr_cond (stmt, code, type,
483 rhs0, rhs1,
484 !(single_use0_p && single_use1_p));
485
486 return tmp;
487 }
488
489 /* Propagate from the ssa name definition statements of the assignment
490 from a comparison at *GSI into the conditional if that simplifies it.
491 Returns 1 if the stmt was modified and 2 if the CFG needs cleanup,
492 otherwise returns 0. */
493
494 static int
495 forward_propagate_into_comparison (gimple_stmt_iterator *gsi)
496 {
497 gimple stmt = gsi_stmt (*gsi);
498 tree tmp;
499 bool cfg_changed = false;
500 tree type = TREE_TYPE (gimple_assign_lhs (stmt));
501 tree rhs1 = gimple_assign_rhs1 (stmt);
502 tree rhs2 = gimple_assign_rhs2 (stmt);
503
504 /* Combine the comparison with defining statements. */
505 tmp = forward_propagate_into_comparison_1 (stmt,
506 gimple_assign_rhs_code (stmt),
507 type, rhs1, rhs2);
508 if (tmp && useless_type_conversion_p (type, TREE_TYPE (tmp)))
509 {
510 gimple_assign_set_rhs_from_tree (gsi, tmp);
511 fold_stmt (gsi);
512 update_stmt (gsi_stmt (*gsi));
513
514 if (TREE_CODE (rhs1) == SSA_NAME)
515 cfg_changed |= remove_prop_source_from_use (rhs1);
516 if (TREE_CODE (rhs2) == SSA_NAME)
517 cfg_changed |= remove_prop_source_from_use (rhs2);
518 return cfg_changed ? 2 : 1;
519 }
520
521 return 0;
522 }
523
524 /* Propagate from the ssa name definition statements of COND_EXPR
525 in GIMPLE_COND statement STMT into the conditional if that simplifies it.
526 Returns zero if no statement was changed, one if there were
527 changes and two if cfg_cleanup needs to run.
528
529 This must be kept in sync with forward_propagate_into_cond. */
530
531 static int
532 forward_propagate_into_gimple_cond (gimple stmt)
533 {
534 tree tmp;
535 enum tree_code code = gimple_cond_code (stmt);
536 bool cfg_changed = false;
537 tree rhs1 = gimple_cond_lhs (stmt);
538 tree rhs2 = gimple_cond_rhs (stmt);
539
540 /* We can do tree combining on SSA_NAME and comparison expressions. */
541 if (TREE_CODE_CLASS (gimple_cond_code (stmt)) != tcc_comparison)
542 return 0;
543
544 tmp = forward_propagate_into_comparison_1 (stmt, code,
545 boolean_type_node,
546 rhs1, rhs2);
547 if (tmp)
548 {
549 if (dump_file && tmp)
550 {
551 fprintf (dump_file, " Replaced '");
552 print_gimple_expr (dump_file, stmt, 0, 0);
553 fprintf (dump_file, "' with '");
554 print_generic_expr (dump_file, tmp, 0);
555 fprintf (dump_file, "'\n");
556 }
557
558 gimple_cond_set_condition_from_tree (stmt, unshare_expr (tmp));
559 update_stmt (stmt);
560
561 if (TREE_CODE (rhs1) == SSA_NAME)
562 cfg_changed |= remove_prop_source_from_use (rhs1);
563 if (TREE_CODE (rhs2) == SSA_NAME)
564 cfg_changed |= remove_prop_source_from_use (rhs2);
565 return (cfg_changed || is_gimple_min_invariant (tmp)) ? 2 : 1;
566 }
567
568 /* Canonicalize _Bool == 0 and _Bool != 1 to _Bool != 0 by swapping edges. */
569 if ((TREE_CODE (TREE_TYPE (rhs1)) == BOOLEAN_TYPE
570 || (INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
571 && TYPE_PRECISION (TREE_TYPE (rhs1)) == 1))
572 && ((code == EQ_EXPR
573 && integer_zerop (rhs2))
574 || (code == NE_EXPR
575 && integer_onep (rhs2))))
576 {
577 basic_block bb = gimple_bb (stmt);
578 gimple_cond_set_code (stmt, NE_EXPR);
579 gimple_cond_set_rhs (stmt, build_zero_cst (TREE_TYPE (rhs1)));
580 EDGE_SUCC (bb, 0)->flags ^= (EDGE_TRUE_VALUE|EDGE_FALSE_VALUE);
581 EDGE_SUCC (bb, 1)->flags ^= (EDGE_TRUE_VALUE|EDGE_FALSE_VALUE);
582 return 1;
583 }
584
585 return 0;
586 }
587
588
589 /* Propagate from the ssa name definition statements of COND_EXPR
590 in the rhs of statement STMT into the conditional if that simplifies it.
591 Returns true zero if the stmt was changed. */
592
593 static bool
594 forward_propagate_into_cond (gimple_stmt_iterator *gsi_p)
595 {
596 gimple stmt = gsi_stmt (*gsi_p);
597 tree tmp = NULL_TREE;
598 tree cond = gimple_assign_rhs1 (stmt);
599 enum tree_code code = gimple_assign_rhs_code (stmt);
600
601 /* We can do tree combining on SSA_NAME and comparison expressions. */
602 if (COMPARISON_CLASS_P (cond))
603 tmp = forward_propagate_into_comparison_1 (stmt, TREE_CODE (cond),
604 TREE_TYPE (cond),
605 TREE_OPERAND (cond, 0),
606 TREE_OPERAND (cond, 1));
607 else if (TREE_CODE (cond) == SSA_NAME)
608 {
609 enum tree_code def_code;
610 tree name = cond;
611 gimple def_stmt = get_prop_source_stmt (name, true, NULL);
612 if (!def_stmt || !can_propagate_from (def_stmt))
613 return 0;
614
615 def_code = gimple_assign_rhs_code (def_stmt);
616 if (TREE_CODE_CLASS (def_code) == tcc_comparison)
617 tmp = fold_build2_loc (gimple_location (def_stmt),
618 def_code,
619 TREE_TYPE (cond),
620 gimple_assign_rhs1 (def_stmt),
621 gimple_assign_rhs2 (def_stmt));
622 }
623
624 if (tmp
625 && is_gimple_condexpr (tmp))
626 {
627 if (dump_file && tmp)
628 {
629 fprintf (dump_file, " Replaced '");
630 print_generic_expr (dump_file, cond, 0);
631 fprintf (dump_file, "' with '");
632 print_generic_expr (dump_file, tmp, 0);
633 fprintf (dump_file, "'\n");
634 }
635
636 if ((code == VEC_COND_EXPR) ? integer_all_onesp (tmp)
637 : integer_onep (tmp))
638 gimple_assign_set_rhs_from_tree (gsi_p, gimple_assign_rhs2 (stmt));
639 else if (integer_zerop (tmp))
640 gimple_assign_set_rhs_from_tree (gsi_p, gimple_assign_rhs3 (stmt));
641 else
642 gimple_assign_set_rhs1 (stmt, unshare_expr (tmp));
643 stmt = gsi_stmt (*gsi_p);
644 update_stmt (stmt);
645
646 return true;
647 }
648
649 return 0;
650 }
651
652 /* We've just substituted an ADDR_EXPR into stmt. Update all the
653 relevant data structures to match. */
654
655 static void
656 tidy_after_forward_propagate_addr (gimple stmt)
657 {
658 /* We may have turned a trapping insn into a non-trapping insn. */
659 if (maybe_clean_or_replace_eh_stmt (stmt, stmt))
660 bitmap_set_bit (to_purge, gimple_bb (stmt)->index);
661
662 if (TREE_CODE (gimple_assign_rhs1 (stmt)) == ADDR_EXPR)
663 recompute_tree_invariant_for_addr_expr (gimple_assign_rhs1 (stmt));
664 }
665
666 /* NAME is a SSA_NAME representing DEF_RHS which is of the form
667 ADDR_EXPR <whatever>.
668
669 Try to forward propagate the ADDR_EXPR into the use USE_STMT.
670 Often this will allow for removal of an ADDR_EXPR and INDIRECT_REF
671 node or for recovery of array indexing from pointer arithmetic.
672
673 Return true if the propagation was successful (the propagation can
674 be not totally successful, yet things may have been changed). */
675
676 static bool
677 forward_propagate_addr_expr_1 (tree name, tree def_rhs,
678 gimple_stmt_iterator *use_stmt_gsi,
679 bool single_use_p)
680 {
681 tree lhs, rhs, rhs2, array_ref;
682 gimple use_stmt = gsi_stmt (*use_stmt_gsi);
683 enum tree_code rhs_code;
684 bool res = true;
685
686 gcc_assert (TREE_CODE (def_rhs) == ADDR_EXPR);
687
688 lhs = gimple_assign_lhs (use_stmt);
689 rhs_code = gimple_assign_rhs_code (use_stmt);
690 rhs = gimple_assign_rhs1 (use_stmt);
691
692 /* Do not perform copy-propagation but recurse through copy chains. */
693 if (TREE_CODE (lhs) == SSA_NAME
694 && rhs_code == SSA_NAME)
695 return forward_propagate_addr_expr (lhs, def_rhs, single_use_p);
696
697 /* The use statement could be a conversion. Recurse to the uses of the
698 lhs as copyprop does not copy through pointer to integer to pointer
699 conversions and FRE does not catch all cases either.
700 Treat the case of a single-use name and
701 a conversion to def_rhs type separate, though. */
702 if (TREE_CODE (lhs) == SSA_NAME
703 && CONVERT_EXPR_CODE_P (rhs_code))
704 {
705 /* If there is a point in a conversion chain where the types match
706 so we can remove a conversion re-materialize the address here
707 and stop. */
708 if (single_use_p
709 && useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (def_rhs)))
710 {
711 gimple_assign_set_rhs1 (use_stmt, unshare_expr (def_rhs));
712 gimple_assign_set_rhs_code (use_stmt, TREE_CODE (def_rhs));
713 return true;
714 }
715
716 /* Else recurse if the conversion preserves the address value. */
717 if ((INTEGRAL_TYPE_P (TREE_TYPE (lhs))
718 || POINTER_TYPE_P (TREE_TYPE (lhs)))
719 && (TYPE_PRECISION (TREE_TYPE (lhs))
720 >= TYPE_PRECISION (TREE_TYPE (def_rhs))))
721 return forward_propagate_addr_expr (lhs, def_rhs, single_use_p);
722
723 return false;
724 }
725
726 /* If this isn't a conversion chain from this on we only can propagate
727 into compatible pointer contexts. */
728 if (!types_compatible_p (TREE_TYPE (name), TREE_TYPE (def_rhs)))
729 return false;
730
731 /* Propagate through constant pointer adjustments. */
732 if (TREE_CODE (lhs) == SSA_NAME
733 && rhs_code == POINTER_PLUS_EXPR
734 && rhs == name
735 && TREE_CODE (gimple_assign_rhs2 (use_stmt)) == INTEGER_CST)
736 {
737 tree new_def_rhs;
738 /* As we come here with non-invariant addresses in def_rhs we need
739 to make sure we can build a valid constant offsetted address
740 for further propagation. Simply rely on fold building that
741 and check after the fact. */
742 new_def_rhs = fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (rhs)),
743 def_rhs,
744 fold_convert (ptr_type_node,
745 gimple_assign_rhs2 (use_stmt)));
746 if (TREE_CODE (new_def_rhs) == MEM_REF
747 && !is_gimple_mem_ref_addr (TREE_OPERAND (new_def_rhs, 0)))
748 return false;
749 new_def_rhs = build_fold_addr_expr_with_type (new_def_rhs,
750 TREE_TYPE (rhs));
751
752 /* Recurse. If we could propagate into all uses of lhs do not
753 bother to replace into the current use but just pretend we did. */
754 if (TREE_CODE (new_def_rhs) == ADDR_EXPR
755 && forward_propagate_addr_expr (lhs, new_def_rhs, single_use_p))
756 return true;
757
758 if (useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (new_def_rhs)))
759 gimple_assign_set_rhs_with_ops (use_stmt_gsi, TREE_CODE (new_def_rhs),
760 new_def_rhs, NULL_TREE);
761 else if (is_gimple_min_invariant (new_def_rhs))
762 gimple_assign_set_rhs_with_ops (use_stmt_gsi, NOP_EXPR,
763 new_def_rhs, NULL_TREE);
764 else
765 return false;
766 gcc_assert (gsi_stmt (*use_stmt_gsi) == use_stmt);
767 update_stmt (use_stmt);
768 return true;
769 }
770
771 /* Now strip away any outer COMPONENT_REF/ARRAY_REF nodes from the LHS.
772 ADDR_EXPR will not appear on the LHS. */
773 tree *lhsp = gimple_assign_lhs_ptr (use_stmt);
774 while (handled_component_p (*lhsp))
775 lhsp = &TREE_OPERAND (*lhsp, 0);
776 lhs = *lhsp;
777
778 /* Now see if the LHS node is a MEM_REF using NAME. If so,
779 propagate the ADDR_EXPR into the use of NAME and fold the result. */
780 if (TREE_CODE (lhs) == MEM_REF
781 && TREE_OPERAND (lhs, 0) == name)
782 {
783 tree def_rhs_base;
784 HOST_WIDE_INT def_rhs_offset;
785 /* If the address is invariant we can always fold it. */
786 if ((def_rhs_base = get_addr_base_and_unit_offset (TREE_OPERAND (def_rhs, 0),
787 &def_rhs_offset)))
788 {
789 offset_int off = mem_ref_offset (lhs);
790 tree new_ptr;
791 off += def_rhs_offset;
792 if (TREE_CODE (def_rhs_base) == MEM_REF)
793 {
794 off += mem_ref_offset (def_rhs_base);
795 new_ptr = TREE_OPERAND (def_rhs_base, 0);
796 }
797 else
798 new_ptr = build_fold_addr_expr (def_rhs_base);
799 TREE_OPERAND (lhs, 0) = new_ptr;
800 TREE_OPERAND (lhs, 1)
801 = wide_int_to_tree (TREE_TYPE (TREE_OPERAND (lhs, 1)), off);
802 tidy_after_forward_propagate_addr (use_stmt);
803 /* Continue propagating into the RHS if this was not the only use. */
804 if (single_use_p)
805 return true;
806 }
807 /* If the LHS is a plain dereference and the value type is the same as
808 that of the pointed-to type of the address we can put the
809 dereferenced address on the LHS preserving the original alias-type. */
810 else if (integer_zerop (TREE_OPERAND (lhs, 1))
811 && ((gimple_assign_lhs (use_stmt) == lhs
812 && useless_type_conversion_p
813 (TREE_TYPE (TREE_OPERAND (def_rhs, 0)),
814 TREE_TYPE (gimple_assign_rhs1 (use_stmt))))
815 || types_compatible_p (TREE_TYPE (lhs),
816 TREE_TYPE (TREE_OPERAND (def_rhs, 0))))
817 /* Don't forward anything into clobber stmts if it would result
818 in the lhs no longer being a MEM_REF. */
819 && (!gimple_clobber_p (use_stmt)
820 || TREE_CODE (TREE_OPERAND (def_rhs, 0)) == MEM_REF))
821 {
822 tree *def_rhs_basep = &TREE_OPERAND (def_rhs, 0);
823 tree new_offset, new_base, saved, new_lhs;
824 while (handled_component_p (*def_rhs_basep))
825 def_rhs_basep = &TREE_OPERAND (*def_rhs_basep, 0);
826 saved = *def_rhs_basep;
827 if (TREE_CODE (*def_rhs_basep) == MEM_REF)
828 {
829 new_base = TREE_OPERAND (*def_rhs_basep, 0);
830 new_offset = fold_convert (TREE_TYPE (TREE_OPERAND (lhs, 1)),
831 TREE_OPERAND (*def_rhs_basep, 1));
832 }
833 else
834 {
835 new_base = build_fold_addr_expr (*def_rhs_basep);
836 new_offset = TREE_OPERAND (lhs, 1);
837 }
838 *def_rhs_basep = build2 (MEM_REF, TREE_TYPE (*def_rhs_basep),
839 new_base, new_offset);
840 TREE_THIS_VOLATILE (*def_rhs_basep) = TREE_THIS_VOLATILE (lhs);
841 TREE_SIDE_EFFECTS (*def_rhs_basep) = TREE_SIDE_EFFECTS (lhs);
842 TREE_THIS_NOTRAP (*def_rhs_basep) = TREE_THIS_NOTRAP (lhs);
843 new_lhs = unshare_expr (TREE_OPERAND (def_rhs, 0));
844 *lhsp = new_lhs;
845 TREE_THIS_VOLATILE (new_lhs) = TREE_THIS_VOLATILE (lhs);
846 TREE_SIDE_EFFECTS (new_lhs) = TREE_SIDE_EFFECTS (lhs);
847 *def_rhs_basep = saved;
848 tidy_after_forward_propagate_addr (use_stmt);
849 /* Continue propagating into the RHS if this was not the
850 only use. */
851 if (single_use_p)
852 return true;
853 }
854 else
855 /* We can have a struct assignment dereferencing our name twice.
856 Note that we didn't propagate into the lhs to not falsely
857 claim we did when propagating into the rhs. */
858 res = false;
859 }
860
861 /* Strip away any outer COMPONENT_REF, ARRAY_REF or ADDR_EXPR
862 nodes from the RHS. */
863 tree *rhsp = gimple_assign_rhs1_ptr (use_stmt);
864 if (TREE_CODE (*rhsp) == ADDR_EXPR)
865 rhsp = &TREE_OPERAND (*rhsp, 0);
866 while (handled_component_p (*rhsp))
867 rhsp = &TREE_OPERAND (*rhsp, 0);
868 rhs = *rhsp;
869
870 /* Now see if the RHS node is a MEM_REF using NAME. If so,
871 propagate the ADDR_EXPR into the use of NAME and fold the result. */
872 if (TREE_CODE (rhs) == MEM_REF
873 && TREE_OPERAND (rhs, 0) == name)
874 {
875 tree def_rhs_base;
876 HOST_WIDE_INT def_rhs_offset;
877 if ((def_rhs_base = get_addr_base_and_unit_offset (TREE_OPERAND (def_rhs, 0),
878 &def_rhs_offset)))
879 {
880 offset_int off = mem_ref_offset (rhs);
881 tree new_ptr;
882 off += def_rhs_offset;
883 if (TREE_CODE (def_rhs_base) == MEM_REF)
884 {
885 off += mem_ref_offset (def_rhs_base);
886 new_ptr = TREE_OPERAND (def_rhs_base, 0);
887 }
888 else
889 new_ptr = build_fold_addr_expr (def_rhs_base);
890 TREE_OPERAND (rhs, 0) = new_ptr;
891 TREE_OPERAND (rhs, 1)
892 = wide_int_to_tree (TREE_TYPE (TREE_OPERAND (rhs, 1)), off);
893 fold_stmt_inplace (use_stmt_gsi);
894 tidy_after_forward_propagate_addr (use_stmt);
895 return res;
896 }
897 /* If the RHS is a plain dereference and the value type is the same as
898 that of the pointed-to type of the address we can put the
899 dereferenced address on the RHS preserving the original alias-type. */
900 else if (integer_zerop (TREE_OPERAND (rhs, 1))
901 && ((gimple_assign_rhs1 (use_stmt) == rhs
902 && useless_type_conversion_p
903 (TREE_TYPE (gimple_assign_lhs (use_stmt)),
904 TREE_TYPE (TREE_OPERAND (def_rhs, 0))))
905 || types_compatible_p (TREE_TYPE (rhs),
906 TREE_TYPE (TREE_OPERAND (def_rhs, 0)))))
907 {
908 tree *def_rhs_basep = &TREE_OPERAND (def_rhs, 0);
909 tree new_offset, new_base, saved, new_rhs;
910 while (handled_component_p (*def_rhs_basep))
911 def_rhs_basep = &TREE_OPERAND (*def_rhs_basep, 0);
912 saved = *def_rhs_basep;
913 if (TREE_CODE (*def_rhs_basep) == MEM_REF)
914 {
915 new_base = TREE_OPERAND (*def_rhs_basep, 0);
916 new_offset = fold_convert (TREE_TYPE (TREE_OPERAND (rhs, 1)),
917 TREE_OPERAND (*def_rhs_basep, 1));
918 }
919 else
920 {
921 new_base = build_fold_addr_expr (*def_rhs_basep);
922 new_offset = TREE_OPERAND (rhs, 1);
923 }
924 *def_rhs_basep = build2 (MEM_REF, TREE_TYPE (*def_rhs_basep),
925 new_base, new_offset);
926 TREE_THIS_VOLATILE (*def_rhs_basep) = TREE_THIS_VOLATILE (rhs);
927 TREE_SIDE_EFFECTS (*def_rhs_basep) = TREE_SIDE_EFFECTS (rhs);
928 TREE_THIS_NOTRAP (*def_rhs_basep) = TREE_THIS_NOTRAP (rhs);
929 new_rhs = unshare_expr (TREE_OPERAND (def_rhs, 0));
930 *rhsp = new_rhs;
931 TREE_THIS_VOLATILE (new_rhs) = TREE_THIS_VOLATILE (rhs);
932 TREE_SIDE_EFFECTS (new_rhs) = TREE_SIDE_EFFECTS (rhs);
933 *def_rhs_basep = saved;
934 fold_stmt_inplace (use_stmt_gsi);
935 tidy_after_forward_propagate_addr (use_stmt);
936 return res;
937 }
938 }
939
940 /* If the use of the ADDR_EXPR is not a POINTER_PLUS_EXPR, there
941 is nothing to do. */
942 if (gimple_assign_rhs_code (use_stmt) != POINTER_PLUS_EXPR
943 || gimple_assign_rhs1 (use_stmt) != name)
944 return false;
945
946 /* The remaining cases are all for turning pointer arithmetic into
947 array indexing. They only apply when we have the address of
948 element zero in an array. If that is not the case then there
949 is nothing to do. */
950 array_ref = TREE_OPERAND (def_rhs, 0);
951 if ((TREE_CODE (array_ref) != ARRAY_REF
952 || TREE_CODE (TREE_TYPE (TREE_OPERAND (array_ref, 0))) != ARRAY_TYPE
953 || TREE_CODE (TREE_OPERAND (array_ref, 1)) != INTEGER_CST)
954 && TREE_CODE (TREE_TYPE (array_ref)) != ARRAY_TYPE)
955 return false;
956
957 rhs2 = gimple_assign_rhs2 (use_stmt);
958 /* Optimize &x[C1] p+ C2 to &x p+ C3 with C3 = C1 * element_size + C2. */
959 if (TREE_CODE (rhs2) == INTEGER_CST)
960 {
961 tree new_rhs = build1_loc (gimple_location (use_stmt),
962 ADDR_EXPR, TREE_TYPE (def_rhs),
963 fold_build2 (MEM_REF,
964 TREE_TYPE (TREE_TYPE (def_rhs)),
965 unshare_expr (def_rhs),
966 fold_convert (ptr_type_node,
967 rhs2)));
968 gimple_assign_set_rhs_from_tree (use_stmt_gsi, new_rhs);
969 use_stmt = gsi_stmt (*use_stmt_gsi);
970 update_stmt (use_stmt);
971 tidy_after_forward_propagate_addr (use_stmt);
972 return true;
973 }
974
975 return false;
976 }
977
978 /* STMT is a statement of the form SSA_NAME = ADDR_EXPR <whatever>.
979
980 Try to forward propagate the ADDR_EXPR into all uses of the SSA_NAME.
981 Often this will allow for removal of an ADDR_EXPR and INDIRECT_REF
982 node or for recovery of array indexing from pointer arithmetic.
983
984 PARENT_SINGLE_USE_P tells if, when in a recursive invocation, NAME was
985 the single use in the previous invocation. Pass true when calling
986 this as toplevel.
987
988 Returns true, if all uses have been propagated into. */
989
990 static bool
991 forward_propagate_addr_expr (tree name, tree rhs, bool parent_single_use_p)
992 {
993 imm_use_iterator iter;
994 gimple use_stmt;
995 bool all = true;
996 bool single_use_p = parent_single_use_p && has_single_use (name);
997
998 FOR_EACH_IMM_USE_STMT (use_stmt, iter, name)
999 {
1000 bool result;
1001 tree use_rhs;
1002
1003 /* If the use is not in a simple assignment statement, then
1004 there is nothing we can do. */
1005 if (!is_gimple_assign (use_stmt))
1006 {
1007 if (!is_gimple_debug (use_stmt))
1008 all = false;
1009 continue;
1010 }
1011
1012 gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
1013 result = forward_propagate_addr_expr_1 (name, rhs, &gsi,
1014 single_use_p);
1015 /* If the use has moved to a different statement adjust
1016 the update machinery for the old statement too. */
1017 if (use_stmt != gsi_stmt (gsi))
1018 {
1019 update_stmt (use_stmt);
1020 use_stmt = gsi_stmt (gsi);
1021 }
1022 update_stmt (use_stmt);
1023 all &= result;
1024
1025 /* Remove intermediate now unused copy and conversion chains. */
1026 use_rhs = gimple_assign_rhs1 (use_stmt);
1027 if (result
1028 && TREE_CODE (gimple_assign_lhs (use_stmt)) == SSA_NAME
1029 && TREE_CODE (use_rhs) == SSA_NAME
1030 && has_zero_uses (gimple_assign_lhs (use_stmt)))
1031 {
1032 gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
1033 fwprop_invalidate_lattice (gimple_get_lhs (use_stmt));
1034 release_defs (use_stmt);
1035 gsi_remove (&gsi, true);
1036 }
1037 }
1038
1039 return all && has_zero_uses (name);
1040 }
1041
1042
1043 /* Helper function for simplify_gimple_switch. Remove case labels that
1044 have values outside the range of the new type. */
1045
1046 static void
1047 simplify_gimple_switch_label_vec (gimple stmt, tree index_type)
1048 {
1049 unsigned int branch_num = gimple_switch_num_labels (stmt);
1050 auto_vec<tree> labels (branch_num);
1051 unsigned int i, len;
1052
1053 /* Collect the existing case labels in a VEC, and preprocess it as if
1054 we are gimplifying a GENERIC SWITCH_EXPR. */
1055 for (i = 1; i < branch_num; i++)
1056 labels.quick_push (gimple_switch_label (stmt, i));
1057 preprocess_case_label_vec_for_gimple (labels, index_type, NULL);
1058
1059 /* If any labels were removed, replace the existing case labels
1060 in the GIMPLE_SWITCH statement with the correct ones.
1061 Note that the type updates were done in-place on the case labels,
1062 so we only have to replace the case labels in the GIMPLE_SWITCH
1063 if the number of labels changed. */
1064 len = labels.length ();
1065 if (len < branch_num - 1)
1066 {
1067 bitmap target_blocks;
1068 edge_iterator ei;
1069 edge e;
1070
1071 /* Corner case: *all* case labels have been removed as being
1072 out-of-range for INDEX_TYPE. Push one label and let the
1073 CFG cleanups deal with this further. */
1074 if (len == 0)
1075 {
1076 tree label, elt;
1077
1078 label = CASE_LABEL (gimple_switch_default_label (stmt));
1079 elt = build_case_label (build_int_cst (index_type, 0), NULL, label);
1080 labels.quick_push (elt);
1081 len = 1;
1082 }
1083
1084 for (i = 0; i < labels.length (); i++)
1085 gimple_switch_set_label (stmt, i + 1, labels[i]);
1086 for (i++ ; i < branch_num; i++)
1087 gimple_switch_set_label (stmt, i, NULL_TREE);
1088 gimple_switch_set_num_labels (stmt, len + 1);
1089
1090 /* Cleanup any edges that are now dead. */
1091 target_blocks = BITMAP_ALLOC (NULL);
1092 for (i = 0; i < gimple_switch_num_labels (stmt); i++)
1093 {
1094 tree elt = gimple_switch_label (stmt, i);
1095 basic_block target = label_to_block (CASE_LABEL (elt));
1096 bitmap_set_bit (target_blocks, target->index);
1097 }
1098 for (ei = ei_start (gimple_bb (stmt)->succs); (e = ei_safe_edge (ei)); )
1099 {
1100 if (! bitmap_bit_p (target_blocks, e->dest->index))
1101 {
1102 remove_edge (e);
1103 cfg_changed = true;
1104 free_dominance_info (CDI_DOMINATORS);
1105 }
1106 else
1107 ei_next (&ei);
1108 }
1109 BITMAP_FREE (target_blocks);
1110 }
1111 }
1112
1113 /* STMT is a SWITCH_EXPR for which we attempt to find equivalent forms of
1114 the condition which we may be able to optimize better. */
1115
1116 static bool
1117 simplify_gimple_switch (gimple stmt)
1118 {
1119 /* The optimization that we really care about is removing unnecessary
1120 casts. That will let us do much better in propagating the inferred
1121 constant at the switch target. */
1122 tree cond = gimple_switch_index (stmt);
1123 if (TREE_CODE (cond) == SSA_NAME)
1124 {
1125 gimple def_stmt = SSA_NAME_DEF_STMT (cond);
1126 if (gimple_assign_cast_p (def_stmt))
1127 {
1128 tree def = gimple_assign_rhs1 (def_stmt);
1129 if (TREE_CODE (def) != SSA_NAME)
1130 return false;
1131
1132 /* If we have an extension or sign-change that preserves the
1133 values we check against then we can copy the source value into
1134 the switch. */
1135 tree ti = TREE_TYPE (def);
1136 if (INTEGRAL_TYPE_P (ti)
1137 && TYPE_PRECISION (ti) <= TYPE_PRECISION (TREE_TYPE (cond)))
1138 {
1139 size_t n = gimple_switch_num_labels (stmt);
1140 tree min = NULL_TREE, max = NULL_TREE;
1141 if (n > 1)
1142 {
1143 min = CASE_LOW (gimple_switch_label (stmt, 1));
1144 if (CASE_HIGH (gimple_switch_label (stmt, n - 1)))
1145 max = CASE_HIGH (gimple_switch_label (stmt, n - 1));
1146 else
1147 max = CASE_LOW (gimple_switch_label (stmt, n - 1));
1148 }
1149 if ((!min || int_fits_type_p (min, ti))
1150 && (!max || int_fits_type_p (max, ti)))
1151 {
1152 gimple_switch_set_index (stmt, def);
1153 simplify_gimple_switch_label_vec (stmt, ti);
1154 update_stmt (stmt);
1155 return true;
1156 }
1157 }
1158 }
1159 }
1160
1161 return false;
1162 }
1163
1164 /* For pointers p2 and p1 return p2 - p1 if the
1165 difference is known and constant, otherwise return NULL. */
1166
1167 static tree
1168 constant_pointer_difference (tree p1, tree p2)
1169 {
1170 int i, j;
1171 #define CPD_ITERATIONS 5
1172 tree exps[2][CPD_ITERATIONS];
1173 tree offs[2][CPD_ITERATIONS];
1174 int cnt[2];
1175
1176 for (i = 0; i < 2; i++)
1177 {
1178 tree p = i ? p1 : p2;
1179 tree off = size_zero_node;
1180 gimple stmt;
1181 enum tree_code code;
1182
1183 /* For each of p1 and p2 we need to iterate at least
1184 twice, to handle ADDR_EXPR directly in p1/p2,
1185 SSA_NAME with ADDR_EXPR or POINTER_PLUS_EXPR etc.
1186 on definition's stmt RHS. Iterate a few extra times. */
1187 j = 0;
1188 do
1189 {
1190 if (!POINTER_TYPE_P (TREE_TYPE (p)))
1191 break;
1192 if (TREE_CODE (p) == ADDR_EXPR)
1193 {
1194 tree q = TREE_OPERAND (p, 0);
1195 HOST_WIDE_INT offset;
1196 tree base = get_addr_base_and_unit_offset (q, &offset);
1197 if (base)
1198 {
1199 q = base;
1200 if (offset)
1201 off = size_binop (PLUS_EXPR, off, size_int (offset));
1202 }
1203 if (TREE_CODE (q) == MEM_REF
1204 && TREE_CODE (TREE_OPERAND (q, 0)) == SSA_NAME)
1205 {
1206 p = TREE_OPERAND (q, 0);
1207 off = size_binop (PLUS_EXPR, off,
1208 wide_int_to_tree (sizetype,
1209 mem_ref_offset (q)));
1210 }
1211 else
1212 {
1213 exps[i][j] = q;
1214 offs[i][j++] = off;
1215 break;
1216 }
1217 }
1218 if (TREE_CODE (p) != SSA_NAME)
1219 break;
1220 exps[i][j] = p;
1221 offs[i][j++] = off;
1222 if (j == CPD_ITERATIONS)
1223 break;
1224 stmt = SSA_NAME_DEF_STMT (p);
1225 if (!is_gimple_assign (stmt) || gimple_assign_lhs (stmt) != p)
1226 break;
1227 code = gimple_assign_rhs_code (stmt);
1228 if (code == POINTER_PLUS_EXPR)
1229 {
1230 if (TREE_CODE (gimple_assign_rhs2 (stmt)) != INTEGER_CST)
1231 break;
1232 off = size_binop (PLUS_EXPR, off, gimple_assign_rhs2 (stmt));
1233 p = gimple_assign_rhs1 (stmt);
1234 }
1235 else if (code == ADDR_EXPR || CONVERT_EXPR_CODE_P (code))
1236 p = gimple_assign_rhs1 (stmt);
1237 else
1238 break;
1239 }
1240 while (1);
1241 cnt[i] = j;
1242 }
1243
1244 for (i = 0; i < cnt[0]; i++)
1245 for (j = 0; j < cnt[1]; j++)
1246 if (exps[0][i] == exps[1][j])
1247 return size_binop (MINUS_EXPR, offs[0][i], offs[1][j]);
1248
1249 return NULL_TREE;
1250 }
1251
1252 /* *GSI_P is a GIMPLE_CALL to a builtin function.
1253 Optimize
1254 memcpy (p, "abcd", 4);
1255 memset (p + 4, ' ', 3);
1256 into
1257 memcpy (p, "abcd ", 7);
1258 call if the latter can be stored by pieces during expansion. */
1259
1260 static bool
1261 simplify_builtin_call (gimple_stmt_iterator *gsi_p, tree callee2)
1262 {
1263 gimple stmt1, stmt2 = gsi_stmt (*gsi_p);
1264 tree vuse = gimple_vuse (stmt2);
1265 if (vuse == NULL)
1266 return false;
1267 stmt1 = SSA_NAME_DEF_STMT (vuse);
1268
1269 switch (DECL_FUNCTION_CODE (callee2))
1270 {
1271 case BUILT_IN_MEMSET:
1272 if (gimple_call_num_args (stmt2) != 3
1273 || gimple_call_lhs (stmt2)
1274 || CHAR_BIT != 8
1275 || BITS_PER_UNIT != 8)
1276 break;
1277 else
1278 {
1279 tree callee1;
1280 tree ptr1, src1, str1, off1, len1, lhs1;
1281 tree ptr2 = gimple_call_arg (stmt2, 0);
1282 tree val2 = gimple_call_arg (stmt2, 1);
1283 tree len2 = gimple_call_arg (stmt2, 2);
1284 tree diff, vdef, new_str_cst;
1285 gimple use_stmt;
1286 unsigned int ptr1_align;
1287 unsigned HOST_WIDE_INT src_len;
1288 char *src_buf;
1289 use_operand_p use_p;
1290
1291 if (!tree_fits_shwi_p (val2)
1292 || !tree_fits_uhwi_p (len2))
1293 break;
1294 if (is_gimple_call (stmt1))
1295 {
1296 /* If first stmt is a call, it needs to be memcpy
1297 or mempcpy, with string literal as second argument and
1298 constant length. */
1299 callee1 = gimple_call_fndecl (stmt1);
1300 if (callee1 == NULL_TREE
1301 || DECL_BUILT_IN_CLASS (callee1) != BUILT_IN_NORMAL
1302 || gimple_call_num_args (stmt1) != 3)
1303 break;
1304 if (DECL_FUNCTION_CODE (callee1) != BUILT_IN_MEMCPY
1305 && DECL_FUNCTION_CODE (callee1) != BUILT_IN_MEMPCPY)
1306 break;
1307 ptr1 = gimple_call_arg (stmt1, 0);
1308 src1 = gimple_call_arg (stmt1, 1);
1309 len1 = gimple_call_arg (stmt1, 2);
1310 lhs1 = gimple_call_lhs (stmt1);
1311 if (!tree_fits_uhwi_p (len1))
1312 break;
1313 str1 = string_constant (src1, &off1);
1314 if (str1 == NULL_TREE)
1315 break;
1316 if (!tree_fits_uhwi_p (off1)
1317 || compare_tree_int (off1, TREE_STRING_LENGTH (str1) - 1) > 0
1318 || compare_tree_int (len1, TREE_STRING_LENGTH (str1)
1319 - tree_to_uhwi (off1)) > 0
1320 || TREE_CODE (TREE_TYPE (str1)) != ARRAY_TYPE
1321 || TYPE_MODE (TREE_TYPE (TREE_TYPE (str1)))
1322 != TYPE_MODE (char_type_node))
1323 break;
1324 }
1325 else if (gimple_assign_single_p (stmt1))
1326 {
1327 /* Otherwise look for length 1 memcpy optimized into
1328 assignment. */
1329 ptr1 = gimple_assign_lhs (stmt1);
1330 src1 = gimple_assign_rhs1 (stmt1);
1331 if (TREE_CODE (ptr1) != MEM_REF
1332 || TYPE_MODE (TREE_TYPE (ptr1)) != TYPE_MODE (char_type_node)
1333 || !tree_fits_shwi_p (src1))
1334 break;
1335 ptr1 = build_fold_addr_expr (ptr1);
1336 callee1 = NULL_TREE;
1337 len1 = size_one_node;
1338 lhs1 = NULL_TREE;
1339 off1 = size_zero_node;
1340 str1 = NULL_TREE;
1341 }
1342 else
1343 break;
1344
1345 diff = constant_pointer_difference (ptr1, ptr2);
1346 if (diff == NULL && lhs1 != NULL)
1347 {
1348 diff = constant_pointer_difference (lhs1, ptr2);
1349 if (DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY
1350 && diff != NULL)
1351 diff = size_binop (PLUS_EXPR, diff,
1352 fold_convert (sizetype, len1));
1353 }
1354 /* If the difference between the second and first destination pointer
1355 is not constant, or is bigger than memcpy length, bail out. */
1356 if (diff == NULL
1357 || !tree_fits_uhwi_p (diff)
1358 || tree_int_cst_lt (len1, diff))
1359 break;
1360
1361 /* Use maximum of difference plus memset length and memcpy length
1362 as the new memcpy length, if it is too big, bail out. */
1363 src_len = tree_to_uhwi (diff);
1364 src_len += tree_to_uhwi (len2);
1365 if (src_len < tree_to_uhwi (len1))
1366 src_len = tree_to_uhwi (len1);
1367 if (src_len > 1024)
1368 break;
1369
1370 /* If mempcpy value is used elsewhere, bail out, as mempcpy
1371 with bigger length will return different result. */
1372 if (lhs1 != NULL_TREE
1373 && DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY
1374 && (TREE_CODE (lhs1) != SSA_NAME
1375 || !single_imm_use (lhs1, &use_p, &use_stmt)
1376 || use_stmt != stmt2))
1377 break;
1378
1379 /* If anything reads memory in between memcpy and memset
1380 call, the modified memcpy call might change it. */
1381 vdef = gimple_vdef (stmt1);
1382 if (vdef != NULL
1383 && (!single_imm_use (vdef, &use_p, &use_stmt)
1384 || use_stmt != stmt2))
1385 break;
1386
1387 ptr1_align = get_pointer_alignment (ptr1);
1388 /* Construct the new source string literal. */
1389 src_buf = XALLOCAVEC (char, src_len + 1);
1390 if (callee1)
1391 memcpy (src_buf,
1392 TREE_STRING_POINTER (str1) + tree_to_uhwi (off1),
1393 tree_to_uhwi (len1));
1394 else
1395 src_buf[0] = tree_to_shwi (src1);
1396 memset (src_buf + tree_to_uhwi (diff),
1397 tree_to_shwi (val2), tree_to_uhwi (len2));
1398 src_buf[src_len] = '\0';
1399 /* Neither builtin_strncpy_read_str nor builtin_memcpy_read_str
1400 handle embedded '\0's. */
1401 if (strlen (src_buf) != src_len)
1402 break;
1403 rtl_profile_for_bb (gimple_bb (stmt2));
1404 /* If the new memcpy wouldn't be emitted by storing the literal
1405 by pieces, this optimization might enlarge .rodata too much,
1406 as commonly used string literals couldn't be shared any
1407 longer. */
1408 if (!can_store_by_pieces (src_len,
1409 builtin_strncpy_read_str,
1410 src_buf, ptr1_align, false))
1411 break;
1412
1413 new_str_cst = build_string_literal (src_len, src_buf);
1414 if (callee1)
1415 {
1416 /* If STMT1 is a mem{,p}cpy call, adjust it and remove
1417 memset call. */
1418 if (lhs1 && DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY)
1419 gimple_call_set_lhs (stmt1, NULL_TREE);
1420 gimple_call_set_arg (stmt1, 1, new_str_cst);
1421 gimple_call_set_arg (stmt1, 2,
1422 build_int_cst (TREE_TYPE (len1), src_len));
1423 update_stmt (stmt1);
1424 unlink_stmt_vdef (stmt2);
1425 gsi_remove (gsi_p, true);
1426 fwprop_invalidate_lattice (gimple_get_lhs (stmt2));
1427 release_defs (stmt2);
1428 if (lhs1 && DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY)
1429 {
1430 fwprop_invalidate_lattice (lhs1);
1431 release_ssa_name (lhs1);
1432 }
1433 return true;
1434 }
1435 else
1436 {
1437 /* Otherwise, if STMT1 is length 1 memcpy optimized into
1438 assignment, remove STMT1 and change memset call into
1439 memcpy call. */
1440 gimple_stmt_iterator gsi = gsi_for_stmt (stmt1);
1441
1442 if (!is_gimple_val (ptr1))
1443 ptr1 = force_gimple_operand_gsi (gsi_p, ptr1, true, NULL_TREE,
1444 true, GSI_SAME_STMT);
1445 gimple_call_set_fndecl (stmt2,
1446 builtin_decl_explicit (BUILT_IN_MEMCPY));
1447 gimple_call_set_arg (stmt2, 0, ptr1);
1448 gimple_call_set_arg (stmt2, 1, new_str_cst);
1449 gimple_call_set_arg (stmt2, 2,
1450 build_int_cst (TREE_TYPE (len2), src_len));
1451 unlink_stmt_vdef (stmt1);
1452 gsi_remove (&gsi, true);
1453 fwprop_invalidate_lattice (gimple_get_lhs (stmt1));
1454 release_defs (stmt1);
1455 update_stmt (stmt2);
1456 return false;
1457 }
1458 }
1459 break;
1460 default:
1461 break;
1462 }
1463 return false;
1464 }
1465
1466 /* Given a ssa_name in NAME see if it was defined by an assignment and
1467 set CODE to be the code and ARG1 to the first operand on the rhs and ARG2
1468 to the second operand on the rhs. */
1469
1470 static inline void
1471 defcodefor_name (tree name, enum tree_code *code, tree *arg1, tree *arg2)
1472 {
1473 gimple def;
1474 enum tree_code code1;
1475 tree arg11;
1476 tree arg21;
1477 tree arg31;
1478 enum gimple_rhs_class grhs_class;
1479
1480 code1 = TREE_CODE (name);
1481 arg11 = name;
1482 arg21 = NULL_TREE;
1483 grhs_class = get_gimple_rhs_class (code1);
1484
1485 if (code1 == SSA_NAME)
1486 {
1487 def = SSA_NAME_DEF_STMT (name);
1488
1489 if (def && is_gimple_assign (def)
1490 && can_propagate_from (def))
1491 {
1492 code1 = gimple_assign_rhs_code (def);
1493 arg11 = gimple_assign_rhs1 (def);
1494 arg21 = gimple_assign_rhs2 (def);
1495 arg31 = gimple_assign_rhs2 (def);
1496 }
1497 }
1498 else if (grhs_class == GIMPLE_TERNARY_RHS
1499 || GIMPLE_BINARY_RHS
1500 || GIMPLE_UNARY_RHS
1501 || GIMPLE_SINGLE_RHS)
1502 extract_ops_from_tree_1 (name, &code1, &arg11, &arg21, &arg31);
1503
1504 *code = code1;
1505 *arg1 = arg11;
1506 if (arg2)
1507 *arg2 = arg21;
1508 /* Ignore arg3 currently. */
1509 }
1510
1511
1512 /* Recognize rotation patterns. Return true if a transformation
1513 applied, otherwise return false.
1514
1515 We are looking for X with unsigned type T with bitsize B, OP being
1516 +, | or ^, some type T2 wider than T and
1517 (X << CNT1) OP (X >> CNT2) iff CNT1 + CNT2 == B
1518 ((T) ((T2) X << CNT1)) OP ((T) ((T2) X >> CNT2)) iff CNT1 + CNT2 == B
1519 (X << Y) OP (X >> (B - Y))
1520 (X << (int) Y) OP (X >> (int) (B - Y))
1521 ((T) ((T2) X << Y)) OP ((T) ((T2) X >> (B - Y)))
1522 ((T) ((T2) X << (int) Y)) OP ((T) ((T2) X >> (int) (B - Y)))
1523 (X << Y) | (X >> ((-Y) & (B - 1)))
1524 (X << (int) Y) | (X >> (int) ((-Y) & (B - 1)))
1525 ((T) ((T2) X << Y)) | ((T) ((T2) X >> ((-Y) & (B - 1))))
1526 ((T) ((T2) X << (int) Y)) | ((T) ((T2) X >> (int) ((-Y) & (B - 1))))
1527
1528 and transform these into:
1529 X r<< CNT1
1530 X r<< Y
1531
1532 Note, in the patterns with T2 type, the type of OP operands
1533 might be even a signed type, but should have precision B. */
1534
1535 static bool
1536 simplify_rotate (gimple_stmt_iterator *gsi)
1537 {
1538 gimple stmt = gsi_stmt (*gsi);
1539 tree arg[2], rtype, rotcnt = NULL_TREE;
1540 tree def_arg1[2], def_arg2[2];
1541 enum tree_code def_code[2];
1542 tree lhs;
1543 int i;
1544 bool swapped_p = false;
1545 gimple g;
1546
1547 arg[0] = gimple_assign_rhs1 (stmt);
1548 arg[1] = gimple_assign_rhs2 (stmt);
1549 rtype = TREE_TYPE (arg[0]);
1550
1551 /* Only create rotates in complete modes. Other cases are not
1552 expanded properly. */
1553 if (!INTEGRAL_TYPE_P (rtype)
1554 || TYPE_PRECISION (rtype) != GET_MODE_PRECISION (TYPE_MODE (rtype)))
1555 return false;
1556
1557 for (i = 0; i < 2; i++)
1558 defcodefor_name (arg[i], &def_code[i], &def_arg1[i], &def_arg2[i]);
1559
1560 /* Look through narrowing conversions. */
1561 if (CONVERT_EXPR_CODE_P (def_code[0])
1562 && CONVERT_EXPR_CODE_P (def_code[1])
1563 && INTEGRAL_TYPE_P (TREE_TYPE (def_arg1[0]))
1564 && INTEGRAL_TYPE_P (TREE_TYPE (def_arg1[1]))
1565 && TYPE_PRECISION (TREE_TYPE (def_arg1[0]))
1566 == TYPE_PRECISION (TREE_TYPE (def_arg1[1]))
1567 && TYPE_PRECISION (TREE_TYPE (def_arg1[0])) > TYPE_PRECISION (rtype)
1568 && has_single_use (arg[0])
1569 && has_single_use (arg[1]))
1570 {
1571 for (i = 0; i < 2; i++)
1572 {
1573 arg[i] = def_arg1[i];
1574 defcodefor_name (arg[i], &def_code[i], &def_arg1[i], &def_arg2[i]);
1575 }
1576 }
1577
1578 /* One operand has to be LSHIFT_EXPR and one RSHIFT_EXPR. */
1579 for (i = 0; i < 2; i++)
1580 if (def_code[i] != LSHIFT_EXPR && def_code[i] != RSHIFT_EXPR)
1581 return false;
1582 else if (!has_single_use (arg[i]))
1583 return false;
1584 if (def_code[0] == def_code[1])
1585 return false;
1586
1587 /* If we've looked through narrowing conversions before, look through
1588 widening conversions from unsigned type with the same precision
1589 as rtype here. */
1590 if (TYPE_PRECISION (TREE_TYPE (def_arg1[0])) != TYPE_PRECISION (rtype))
1591 for (i = 0; i < 2; i++)
1592 {
1593 tree tem;
1594 enum tree_code code;
1595 defcodefor_name (def_arg1[i], &code, &tem, NULL);
1596 if (!CONVERT_EXPR_CODE_P (code)
1597 || !INTEGRAL_TYPE_P (TREE_TYPE (tem))
1598 || TYPE_PRECISION (TREE_TYPE (tem)) != TYPE_PRECISION (rtype))
1599 return false;
1600 def_arg1[i] = tem;
1601 }
1602 /* Both shifts have to use the same first operand. */
1603 if (TREE_CODE (def_arg1[0]) != SSA_NAME || def_arg1[0] != def_arg1[1])
1604 return false;
1605 if (!TYPE_UNSIGNED (TREE_TYPE (def_arg1[0])))
1606 return false;
1607
1608 /* CNT1 + CNT2 == B case above. */
1609 if (tree_fits_uhwi_p (def_arg2[0])
1610 && tree_fits_uhwi_p (def_arg2[1])
1611 && tree_to_uhwi (def_arg2[0])
1612 + tree_to_uhwi (def_arg2[1]) == TYPE_PRECISION (rtype))
1613 rotcnt = def_arg2[0];
1614 else if (TREE_CODE (def_arg2[0]) != SSA_NAME
1615 || TREE_CODE (def_arg2[1]) != SSA_NAME)
1616 return false;
1617 else
1618 {
1619 tree cdef_arg1[2], cdef_arg2[2], def_arg2_alt[2];
1620 enum tree_code cdef_code[2];
1621 /* Look through conversion of the shift count argument.
1622 The C/C++ FE cast any shift count argument to integer_type_node.
1623 The only problem might be if the shift count type maximum value
1624 is equal or smaller than number of bits in rtype. */
1625 for (i = 0; i < 2; i++)
1626 {
1627 def_arg2_alt[i] = def_arg2[i];
1628 defcodefor_name (def_arg2[i], &cdef_code[i],
1629 &cdef_arg1[i], &cdef_arg2[i]);
1630 if (CONVERT_EXPR_CODE_P (cdef_code[i])
1631 && INTEGRAL_TYPE_P (TREE_TYPE (cdef_arg1[i]))
1632 && TYPE_PRECISION (TREE_TYPE (cdef_arg1[i]))
1633 > floor_log2 (TYPE_PRECISION (rtype))
1634 && TYPE_PRECISION (TREE_TYPE (cdef_arg1[i]))
1635 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (cdef_arg1[i]))))
1636 {
1637 def_arg2_alt[i] = cdef_arg1[i];
1638 defcodefor_name (def_arg2_alt[i], &cdef_code[i],
1639 &cdef_arg1[i], &cdef_arg2[i]);
1640 }
1641 }
1642 for (i = 0; i < 2; i++)
1643 /* Check for one shift count being Y and the other B - Y,
1644 with optional casts. */
1645 if (cdef_code[i] == MINUS_EXPR
1646 && tree_fits_shwi_p (cdef_arg1[i])
1647 && tree_to_shwi (cdef_arg1[i]) == TYPE_PRECISION (rtype)
1648 && TREE_CODE (cdef_arg2[i]) == SSA_NAME)
1649 {
1650 tree tem;
1651 enum tree_code code;
1652
1653 if (cdef_arg2[i] == def_arg2[1 - i]
1654 || cdef_arg2[i] == def_arg2_alt[1 - i])
1655 {
1656 rotcnt = cdef_arg2[i];
1657 break;
1658 }
1659 defcodefor_name (cdef_arg2[i], &code, &tem, NULL);
1660 if (CONVERT_EXPR_CODE_P (code)
1661 && INTEGRAL_TYPE_P (TREE_TYPE (tem))
1662 && TYPE_PRECISION (TREE_TYPE (tem))
1663 > floor_log2 (TYPE_PRECISION (rtype))
1664 && TYPE_PRECISION (TREE_TYPE (tem))
1665 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (tem)))
1666 && (tem == def_arg2[1 - i]
1667 || tem == def_arg2_alt[1 - i]))
1668 {
1669 rotcnt = tem;
1670 break;
1671 }
1672 }
1673 /* The above sequence isn't safe for Y being 0,
1674 because then one of the shifts triggers undefined behavior.
1675 This alternative is safe even for rotation count of 0.
1676 One shift count is Y and the other (-Y) & (B - 1). */
1677 else if (cdef_code[i] == BIT_AND_EXPR
1678 && tree_fits_shwi_p (cdef_arg2[i])
1679 && tree_to_shwi (cdef_arg2[i])
1680 == TYPE_PRECISION (rtype) - 1
1681 && TREE_CODE (cdef_arg1[i]) == SSA_NAME
1682 && gimple_assign_rhs_code (stmt) == BIT_IOR_EXPR)
1683 {
1684 tree tem;
1685 enum tree_code code;
1686
1687 defcodefor_name (cdef_arg1[i], &code, &tem, NULL);
1688 if (CONVERT_EXPR_CODE_P (code)
1689 && INTEGRAL_TYPE_P (TREE_TYPE (tem))
1690 && TYPE_PRECISION (TREE_TYPE (tem))
1691 > floor_log2 (TYPE_PRECISION (rtype))
1692 && TYPE_PRECISION (TREE_TYPE (tem))
1693 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (tem))))
1694 defcodefor_name (tem, &code, &tem, NULL);
1695
1696 if (code == NEGATE_EXPR)
1697 {
1698 if (tem == def_arg2[1 - i] || tem == def_arg2_alt[1 - i])
1699 {
1700 rotcnt = tem;
1701 break;
1702 }
1703 defcodefor_name (tem, &code, &tem, NULL);
1704 if (CONVERT_EXPR_CODE_P (code)
1705 && INTEGRAL_TYPE_P (TREE_TYPE (tem))
1706 && TYPE_PRECISION (TREE_TYPE (tem))
1707 > floor_log2 (TYPE_PRECISION (rtype))
1708 && TYPE_PRECISION (TREE_TYPE (tem))
1709 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (tem)))
1710 && (tem == def_arg2[1 - i]
1711 || tem == def_arg2_alt[1 - i]))
1712 {
1713 rotcnt = tem;
1714 break;
1715 }
1716 }
1717 }
1718 if (rotcnt == NULL_TREE)
1719 return false;
1720 swapped_p = i != 1;
1721 }
1722
1723 if (!useless_type_conversion_p (TREE_TYPE (def_arg2[0]),
1724 TREE_TYPE (rotcnt)))
1725 {
1726 g = gimple_build_assign_with_ops (NOP_EXPR,
1727 make_ssa_name (TREE_TYPE (def_arg2[0]),
1728 NULL),
1729 rotcnt, NULL_TREE);
1730 gsi_insert_before (gsi, g, GSI_SAME_STMT);
1731 rotcnt = gimple_assign_lhs (g);
1732 }
1733 lhs = gimple_assign_lhs (stmt);
1734 if (!useless_type_conversion_p (rtype, TREE_TYPE (def_arg1[0])))
1735 lhs = make_ssa_name (TREE_TYPE (def_arg1[0]), NULL);
1736 g = gimple_build_assign_with_ops (((def_code[0] == LSHIFT_EXPR) ^ swapped_p)
1737 ? LROTATE_EXPR : RROTATE_EXPR,
1738 lhs, def_arg1[0], rotcnt);
1739 if (!useless_type_conversion_p (rtype, TREE_TYPE (def_arg1[0])))
1740 {
1741 gsi_insert_before (gsi, g, GSI_SAME_STMT);
1742 g = gimple_build_assign_with_ops (NOP_EXPR, gimple_assign_lhs (stmt),
1743 lhs, NULL_TREE);
1744 }
1745 gsi_replace (gsi, g, false);
1746 return true;
1747 }
1748
1749 /* Combine an element access with a shuffle. Returns true if there were
1750 any changes made, else it returns false. */
1751
1752 static bool
1753 simplify_bitfield_ref (gimple_stmt_iterator *gsi)
1754 {
1755 gimple stmt = gsi_stmt (*gsi);
1756 gimple def_stmt;
1757 tree op, op0, op1, op2;
1758 tree elem_type;
1759 unsigned idx, n, size;
1760 enum tree_code code;
1761
1762 op = gimple_assign_rhs1 (stmt);
1763 gcc_checking_assert (TREE_CODE (op) == BIT_FIELD_REF);
1764
1765 op0 = TREE_OPERAND (op, 0);
1766 if (TREE_CODE (op0) != SSA_NAME
1767 || TREE_CODE (TREE_TYPE (op0)) != VECTOR_TYPE)
1768 return false;
1769
1770 def_stmt = get_prop_source_stmt (op0, false, NULL);
1771 if (!def_stmt || !can_propagate_from (def_stmt))
1772 return false;
1773
1774 op1 = TREE_OPERAND (op, 1);
1775 op2 = TREE_OPERAND (op, 2);
1776 code = gimple_assign_rhs_code (def_stmt);
1777
1778 if (code == CONSTRUCTOR)
1779 {
1780 tree tem = fold_ternary (BIT_FIELD_REF, TREE_TYPE (op),
1781 gimple_assign_rhs1 (def_stmt), op1, op2);
1782 if (!tem || !valid_gimple_rhs_p (tem))
1783 return false;
1784 gimple_assign_set_rhs_from_tree (gsi, tem);
1785 update_stmt (gsi_stmt (*gsi));
1786 return true;
1787 }
1788
1789 elem_type = TREE_TYPE (TREE_TYPE (op0));
1790 if (TREE_TYPE (op) != elem_type)
1791 return false;
1792
1793 size = TREE_INT_CST_LOW (TYPE_SIZE (elem_type));
1794 n = TREE_INT_CST_LOW (op1) / size;
1795 if (n != 1)
1796 return false;
1797 idx = TREE_INT_CST_LOW (op2) / size;
1798
1799 if (code == VEC_PERM_EXPR)
1800 {
1801 tree p, m, index, tem;
1802 unsigned nelts;
1803 m = gimple_assign_rhs3 (def_stmt);
1804 if (TREE_CODE (m) != VECTOR_CST)
1805 return false;
1806 nelts = VECTOR_CST_NELTS (m);
1807 idx = TREE_INT_CST_LOW (VECTOR_CST_ELT (m, idx));
1808 idx %= 2 * nelts;
1809 if (idx < nelts)
1810 {
1811 p = gimple_assign_rhs1 (def_stmt);
1812 }
1813 else
1814 {
1815 p = gimple_assign_rhs2 (def_stmt);
1816 idx -= nelts;
1817 }
1818 index = build_int_cst (TREE_TYPE (TREE_TYPE (m)), idx * size);
1819 tem = build3 (BIT_FIELD_REF, TREE_TYPE (op),
1820 unshare_expr (p), op1, index);
1821 gimple_assign_set_rhs1 (stmt, tem);
1822 fold_stmt (gsi);
1823 update_stmt (gsi_stmt (*gsi));
1824 return true;
1825 }
1826
1827 return false;
1828 }
1829
1830 /* Determine whether applying the 2 permutations (mask1 then mask2)
1831 gives back one of the input. */
1832
1833 static int
1834 is_combined_permutation_identity (tree mask1, tree mask2)
1835 {
1836 tree mask;
1837 unsigned int nelts, i, j;
1838 bool maybe_identity1 = true;
1839 bool maybe_identity2 = true;
1840
1841 gcc_checking_assert (TREE_CODE (mask1) == VECTOR_CST
1842 && TREE_CODE (mask2) == VECTOR_CST);
1843 mask = fold_ternary (VEC_PERM_EXPR, TREE_TYPE (mask1), mask1, mask1, mask2);
1844 gcc_assert (TREE_CODE (mask) == VECTOR_CST);
1845
1846 nelts = VECTOR_CST_NELTS (mask);
1847 for (i = 0; i < nelts; i++)
1848 {
1849 tree val = VECTOR_CST_ELT (mask, i);
1850 gcc_assert (TREE_CODE (val) == INTEGER_CST);
1851 j = TREE_INT_CST_LOW (val) & (2 * nelts - 1);
1852 if (j == i)
1853 maybe_identity2 = false;
1854 else if (j == i + nelts)
1855 maybe_identity1 = false;
1856 else
1857 return 0;
1858 }
1859 return maybe_identity1 ? 1 : maybe_identity2 ? 2 : 0;
1860 }
1861
1862 /* Combine a shuffle with its arguments. Returns 1 if there were any
1863 changes made, 2 if cfg-cleanup needs to run. Else it returns 0. */
1864
1865 static int
1866 simplify_permutation (gimple_stmt_iterator *gsi)
1867 {
1868 gimple stmt = gsi_stmt (*gsi);
1869 gimple def_stmt;
1870 tree op0, op1, op2, op3, arg0, arg1;
1871 enum tree_code code;
1872 bool single_use_op0 = false;
1873
1874 gcc_checking_assert (gimple_assign_rhs_code (stmt) == VEC_PERM_EXPR);
1875
1876 op0 = gimple_assign_rhs1 (stmt);
1877 op1 = gimple_assign_rhs2 (stmt);
1878 op2 = gimple_assign_rhs3 (stmt);
1879
1880 if (TREE_CODE (op2) != VECTOR_CST)
1881 return 0;
1882
1883 if (TREE_CODE (op0) == VECTOR_CST)
1884 {
1885 code = VECTOR_CST;
1886 arg0 = op0;
1887 }
1888 else if (TREE_CODE (op0) == SSA_NAME)
1889 {
1890 def_stmt = get_prop_source_stmt (op0, false, &single_use_op0);
1891 if (!def_stmt || !can_propagate_from (def_stmt))
1892 return 0;
1893
1894 code = gimple_assign_rhs_code (def_stmt);
1895 arg0 = gimple_assign_rhs1 (def_stmt);
1896 }
1897 else
1898 return 0;
1899
1900 /* Two consecutive shuffles. */
1901 if (code == VEC_PERM_EXPR)
1902 {
1903 tree orig;
1904 int ident;
1905
1906 if (op0 != op1)
1907 return 0;
1908 op3 = gimple_assign_rhs3 (def_stmt);
1909 if (TREE_CODE (op3) != VECTOR_CST)
1910 return 0;
1911 ident = is_combined_permutation_identity (op3, op2);
1912 if (!ident)
1913 return 0;
1914 orig = (ident == 1) ? gimple_assign_rhs1 (def_stmt)
1915 : gimple_assign_rhs2 (def_stmt);
1916 gimple_assign_set_rhs1 (stmt, unshare_expr (orig));
1917 gimple_assign_set_rhs_code (stmt, TREE_CODE (orig));
1918 gimple_set_num_ops (stmt, 2);
1919 update_stmt (stmt);
1920 return remove_prop_source_from_use (op0) ? 2 : 1;
1921 }
1922
1923 /* Shuffle of a constructor. */
1924 else if (code == CONSTRUCTOR || code == VECTOR_CST)
1925 {
1926 tree opt;
1927 bool ret = false;
1928 if (op0 != op1)
1929 {
1930 if (TREE_CODE (op0) == SSA_NAME && !single_use_op0)
1931 return 0;
1932
1933 if (TREE_CODE (op1) == VECTOR_CST)
1934 arg1 = op1;
1935 else if (TREE_CODE (op1) == SSA_NAME)
1936 {
1937 enum tree_code code2;
1938
1939 gimple def_stmt2 = get_prop_source_stmt (op1, true, NULL);
1940 if (!def_stmt2 || !can_propagate_from (def_stmt2))
1941 return 0;
1942
1943 code2 = gimple_assign_rhs_code (def_stmt2);
1944 if (code2 != CONSTRUCTOR && code2 != VECTOR_CST)
1945 return 0;
1946 arg1 = gimple_assign_rhs1 (def_stmt2);
1947 }
1948 else
1949 return 0;
1950 }
1951 else
1952 {
1953 /* Already used twice in this statement. */
1954 if (TREE_CODE (op0) == SSA_NAME && num_imm_uses (op0) > 2)
1955 return 0;
1956 arg1 = arg0;
1957 }
1958 opt = fold_ternary (VEC_PERM_EXPR, TREE_TYPE (op0), arg0, arg1, op2);
1959 if (!opt
1960 || (TREE_CODE (opt) != CONSTRUCTOR && TREE_CODE (opt) != VECTOR_CST))
1961 return 0;
1962 gimple_assign_set_rhs_from_tree (gsi, opt);
1963 update_stmt (gsi_stmt (*gsi));
1964 if (TREE_CODE (op0) == SSA_NAME)
1965 ret = remove_prop_source_from_use (op0);
1966 if (op0 != op1 && TREE_CODE (op1) == SSA_NAME)
1967 ret |= remove_prop_source_from_use (op1);
1968 return ret ? 2 : 1;
1969 }
1970
1971 return 0;
1972 }
1973
1974 /* Recognize a VEC_PERM_EXPR. Returns true if there were any changes. */
1975
1976 static bool
1977 simplify_vector_constructor (gimple_stmt_iterator *gsi)
1978 {
1979 gimple stmt = gsi_stmt (*gsi);
1980 gimple def_stmt;
1981 tree op, op2, orig, type, elem_type;
1982 unsigned elem_size, nelts, i;
1983 enum tree_code code;
1984 constructor_elt *elt;
1985 unsigned char *sel;
1986 bool maybe_ident;
1987
1988 gcc_checking_assert (gimple_assign_rhs_code (stmt) == CONSTRUCTOR);
1989
1990 op = gimple_assign_rhs1 (stmt);
1991 type = TREE_TYPE (op);
1992 gcc_checking_assert (TREE_CODE (type) == VECTOR_TYPE);
1993
1994 nelts = TYPE_VECTOR_SUBPARTS (type);
1995 elem_type = TREE_TYPE (type);
1996 elem_size = TREE_INT_CST_LOW (TYPE_SIZE (elem_type));
1997
1998 sel = XALLOCAVEC (unsigned char, nelts);
1999 orig = NULL;
2000 maybe_ident = true;
2001 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (op), i, elt)
2002 {
2003 tree ref, op1;
2004
2005 if (i >= nelts)
2006 return false;
2007
2008 if (TREE_CODE (elt->value) != SSA_NAME)
2009 return false;
2010 def_stmt = get_prop_source_stmt (elt->value, false, NULL);
2011 if (!def_stmt)
2012 return false;
2013 code = gimple_assign_rhs_code (def_stmt);
2014 if (code != BIT_FIELD_REF)
2015 return false;
2016 op1 = gimple_assign_rhs1 (def_stmt);
2017 ref = TREE_OPERAND (op1, 0);
2018 if (orig)
2019 {
2020 if (ref != orig)
2021 return false;
2022 }
2023 else
2024 {
2025 if (TREE_CODE (ref) != SSA_NAME)
2026 return false;
2027 if (!useless_type_conversion_p (type, TREE_TYPE (ref)))
2028 return false;
2029 orig = ref;
2030 }
2031 if (TREE_INT_CST_LOW (TREE_OPERAND (op1, 1)) != elem_size)
2032 return false;
2033 sel[i] = TREE_INT_CST_LOW (TREE_OPERAND (op1, 2)) / elem_size;
2034 if (sel[i] != i) maybe_ident = false;
2035 }
2036 if (i < nelts)
2037 return false;
2038
2039 if (maybe_ident)
2040 gimple_assign_set_rhs_from_tree (gsi, orig);
2041 else
2042 {
2043 tree mask_type, *mask_elts;
2044
2045 if (!can_vec_perm_p (TYPE_MODE (type), false, sel))
2046 return false;
2047 mask_type
2048 = build_vector_type (build_nonstandard_integer_type (elem_size, 1),
2049 nelts);
2050 if (GET_MODE_CLASS (TYPE_MODE (mask_type)) != MODE_VECTOR_INT
2051 || GET_MODE_SIZE (TYPE_MODE (mask_type))
2052 != GET_MODE_SIZE (TYPE_MODE (type)))
2053 return false;
2054 mask_elts = XALLOCAVEC (tree, nelts);
2055 for (i = 0; i < nelts; i++)
2056 mask_elts[i] = build_int_cst (TREE_TYPE (mask_type), sel[i]);
2057 op2 = build_vector (mask_type, mask_elts);
2058 gimple_assign_set_rhs_with_ops_1 (gsi, VEC_PERM_EXPR, orig, orig, op2);
2059 }
2060 update_stmt (gsi_stmt (*gsi));
2061 return true;
2062 }
2063
2064
2065 /* Primitive "lattice" function for gimple_simplify. */
2066
2067 static tree
2068 fwprop_ssa_val (tree name)
2069 {
2070 /* First valueize NAME. */
2071 if (TREE_CODE (name) == SSA_NAME
2072 && SSA_NAME_VERSION (name) < lattice.length ())
2073 {
2074 tree val = lattice[SSA_NAME_VERSION (name)];
2075 if (val)
2076 name = val;
2077 }
2078 /* We continue matching along SSA use-def edges for SSA names
2079 that are not single-use. Currently there are no patterns
2080 that would cause any issues with that. */
2081 return name;
2082 }
2083
2084 /* Main entry point for the forward propagation and statement combine
2085 optimizer. */
2086
2087 namespace {
2088
2089 const pass_data pass_data_forwprop =
2090 {
2091 GIMPLE_PASS, /* type */
2092 "forwprop", /* name */
2093 OPTGROUP_NONE, /* optinfo_flags */
2094 TV_TREE_FORWPROP, /* tv_id */
2095 ( PROP_cfg | PROP_ssa ), /* properties_required */
2096 0, /* properties_provided */
2097 0, /* properties_destroyed */
2098 0, /* todo_flags_start */
2099 TODO_update_ssa, /* todo_flags_finish */
2100 };
2101
2102 class pass_forwprop : public gimple_opt_pass
2103 {
2104 public:
2105 pass_forwprop (gcc::context *ctxt)
2106 : gimple_opt_pass (pass_data_forwprop, ctxt)
2107 {}
2108
2109 /* opt_pass methods: */
2110 opt_pass * clone () { return new pass_forwprop (m_ctxt); }
2111 virtual bool gate (function *) { return flag_tree_forwprop; }
2112 virtual unsigned int execute (function *);
2113
2114 }; // class pass_forwprop
2115
2116 unsigned int
2117 pass_forwprop::execute (function *fun)
2118 {
2119 unsigned int todoflags = 0;
2120
2121 cfg_changed = false;
2122
2123 /* Combine stmts with the stmts defining their operands. Do that
2124 in an order that guarantees visiting SSA defs before SSA uses. */
2125 lattice.create (num_ssa_names);
2126 lattice.quick_grow_cleared (num_ssa_names);
2127 int *postorder = XNEWVEC (int, n_basic_blocks_for_fn (fun));
2128 int postorder_num = inverted_post_order_compute (postorder);
2129 to_purge = BITMAP_ALLOC (NULL);
2130 for (int i = 0; i < postorder_num; ++i)
2131 {
2132 gimple_stmt_iterator gsi;
2133 basic_block bb = BASIC_BLOCK_FOR_FN (fun, postorder[i]);
2134
2135 /* Apply forward propagation to all stmts in the basic-block.
2136 Note we update GSI within the loop as necessary. */
2137 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); )
2138 {
2139 gimple stmt = gsi_stmt (gsi);
2140 tree lhs, rhs;
2141 enum tree_code code;
2142
2143 if (!is_gimple_assign (stmt))
2144 {
2145 gsi_next (&gsi);
2146 continue;
2147 }
2148
2149 lhs = gimple_assign_lhs (stmt);
2150 rhs = gimple_assign_rhs1 (stmt);
2151 code = gimple_assign_rhs_code (stmt);
2152 if (TREE_CODE (lhs) != SSA_NAME
2153 || has_zero_uses (lhs))
2154 {
2155 gsi_next (&gsi);
2156 continue;
2157 }
2158
2159 /* If this statement sets an SSA_NAME to an address,
2160 try to propagate the address into the uses of the SSA_NAME. */
2161 if (code == ADDR_EXPR
2162 /* Handle pointer conversions on invariant addresses
2163 as well, as this is valid gimple. */
2164 || (CONVERT_EXPR_CODE_P (code)
2165 && TREE_CODE (rhs) == ADDR_EXPR
2166 && POINTER_TYPE_P (TREE_TYPE (lhs))))
2167 {
2168 tree base = get_base_address (TREE_OPERAND (rhs, 0));
2169 if ((!base
2170 || !DECL_P (base)
2171 || decl_address_invariant_p (base))
2172 && !stmt_references_abnormal_ssa_name (stmt)
2173 && forward_propagate_addr_expr (lhs, rhs, true))
2174 {
2175 fwprop_invalidate_lattice (gimple_get_lhs (stmt));
2176 release_defs (stmt);
2177 gsi_remove (&gsi, true);
2178 }
2179 else
2180 gsi_next (&gsi);
2181 }
2182 else if (code == POINTER_PLUS_EXPR)
2183 {
2184 tree off = gimple_assign_rhs2 (stmt);
2185 if (TREE_CODE (off) == INTEGER_CST
2186 && can_propagate_from (stmt)
2187 && !simple_iv_increment_p (stmt)
2188 /* ??? Better adjust the interface to that function
2189 instead of building new trees here. */
2190 && forward_propagate_addr_expr
2191 (lhs,
2192 build1_loc (gimple_location (stmt),
2193 ADDR_EXPR, TREE_TYPE (rhs),
2194 fold_build2 (MEM_REF,
2195 TREE_TYPE (TREE_TYPE (rhs)),
2196 rhs,
2197 fold_convert (ptr_type_node,
2198 off))), true))
2199 {
2200 fwprop_invalidate_lattice (gimple_get_lhs (stmt));
2201 release_defs (stmt);
2202 gsi_remove (&gsi, true);
2203 }
2204 else if (is_gimple_min_invariant (rhs))
2205 {
2206 /* Make sure to fold &a[0] + off_1 here. */
2207 fold_stmt_inplace (&gsi);
2208 update_stmt (stmt);
2209 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
2210 gsi_next (&gsi);
2211 }
2212 else
2213 gsi_next (&gsi);
2214 }
2215 else
2216 gsi_next (&gsi);
2217 }
2218
2219 /* Combine stmts with the stmts defining their operands.
2220 Note we update GSI within the loop as necessary. */
2221 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
2222 {
2223 gimple stmt = gsi_stmt (gsi);
2224 gimple orig_stmt = stmt;
2225 bool changed = false;
2226
2227 /* Mark stmt as potentially needing revisiting. */
2228 gimple_set_plf (stmt, GF_PLF_1, false);
2229
2230 if (fold_stmt (&gsi, fwprop_ssa_val))
2231 {
2232 changed = true;
2233 stmt = gsi_stmt (gsi);
2234 if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt))
2235 bitmap_set_bit (to_purge, bb->index);
2236 /* Cleanup the CFG if we simplified a condition to
2237 true or false. */
2238 if (gimple_code (stmt) == GIMPLE_COND
2239 && (gimple_cond_true_p (stmt)
2240 || gimple_cond_false_p (stmt)))
2241 cfg_changed = true;
2242 update_stmt (stmt);
2243 }
2244
2245 switch (gimple_code (stmt))
2246 {
2247 case GIMPLE_ASSIGN:
2248 {
2249 tree rhs1 = gimple_assign_rhs1 (stmt);
2250 enum tree_code code = gimple_assign_rhs_code (stmt);
2251
2252 if (code == COND_EXPR
2253 || code == VEC_COND_EXPR)
2254 {
2255 /* In this case the entire COND_EXPR is in rhs1. */
2256 if (forward_propagate_into_cond (&gsi))
2257 {
2258 changed = true;
2259 stmt = gsi_stmt (gsi);
2260 }
2261 }
2262 else if (TREE_CODE_CLASS (code) == tcc_comparison)
2263 {
2264 int did_something;
2265 did_something = forward_propagate_into_comparison (&gsi);
2266 if (did_something == 2)
2267 cfg_changed = true;
2268 changed = did_something != 0;
2269 }
2270 else if ((code == PLUS_EXPR
2271 || code == BIT_IOR_EXPR
2272 || code == BIT_XOR_EXPR)
2273 && simplify_rotate (&gsi))
2274 changed = true;
2275 else if (code == VEC_PERM_EXPR)
2276 {
2277 int did_something = simplify_permutation (&gsi);
2278 if (did_something == 2)
2279 cfg_changed = true;
2280 changed = did_something != 0;
2281 }
2282 else if (code == BIT_FIELD_REF)
2283 changed = simplify_bitfield_ref (&gsi);
2284 else if (code == CONSTRUCTOR
2285 && TREE_CODE (TREE_TYPE (rhs1)) == VECTOR_TYPE)
2286 changed = simplify_vector_constructor (&gsi);
2287 break;
2288 }
2289
2290 case GIMPLE_SWITCH:
2291 changed = simplify_gimple_switch (stmt);
2292 break;
2293
2294 case GIMPLE_COND:
2295 {
2296 int did_something;
2297 did_something = forward_propagate_into_gimple_cond (stmt);
2298 if (did_something == 2)
2299 cfg_changed = true;
2300 changed = did_something != 0;
2301 break;
2302 }
2303
2304 case GIMPLE_CALL:
2305 {
2306 tree callee = gimple_call_fndecl (stmt);
2307 if (callee != NULL_TREE
2308 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
2309 changed = simplify_builtin_call (&gsi, callee);
2310 break;
2311 }
2312
2313 default:;
2314 }
2315
2316 if (changed)
2317 {
2318 /* If the stmt changed then re-visit it and the statements
2319 inserted before it. */
2320 for (; !gsi_end_p (gsi); gsi_prev (&gsi))
2321 if (gimple_plf (gsi_stmt (gsi), GF_PLF_1))
2322 break;
2323 if (gsi_end_p (gsi))
2324 gsi = gsi_start_bb (bb);
2325 else
2326 gsi_next (&gsi);
2327 }
2328 else
2329 {
2330 /* Stmt no longer needs to be revisited. */
2331 gimple_set_plf (stmt, GF_PLF_1, true);
2332
2333 /* Fill up the lattice. */
2334 if (gimple_assign_single_p (stmt))
2335 {
2336 tree lhs = gimple_assign_lhs (stmt);
2337 tree rhs = gimple_assign_rhs1 (stmt);
2338 if (TREE_CODE (lhs) == SSA_NAME)
2339 {
2340 tree val = lhs;
2341 if (TREE_CODE (rhs) == SSA_NAME)
2342 val = fwprop_ssa_val (rhs);
2343 else if (is_gimple_min_invariant (rhs))
2344 val = rhs;
2345 fwprop_set_lattice_val (lhs, val);
2346 }
2347 }
2348
2349 gsi_next (&gsi);
2350 }
2351 }
2352 }
2353 free (postorder);
2354 lattice.release ();
2355
2356 cfg_changed |= gimple_purge_all_dead_eh_edges (to_purge);
2357 BITMAP_FREE (to_purge);
2358
2359 if (cfg_changed)
2360 todoflags |= TODO_cleanup_cfg;
2361
2362 return todoflags;
2363 }
2364
2365 } // anon namespace
2366
2367 gimple_opt_pass *
2368 make_pass_forwprop (gcc::context *ctxt)
2369 {
2370 return new pass_forwprop (ctxt);
2371 }