]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/tree-ssa-forwprop.c
2014-11-01 Andrew MacLeod <amacleod@redhat,com>
[thirdparty/gcc.git] / gcc / tree-ssa-forwprop.c
CommitLineData
291d763b 1/* Forward propagation of expressions for single use variables.
3aea1f79 2 Copyright (C) 2004-2014 Free Software Foundation, Inc.
4ee9c684 3
4This file is part of GCC.
5
6GCC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
8c4c00c1 8the Free Software Foundation; either version 3, or (at your option)
4ee9c684 9any later version.
10
11GCC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
8c4c00c1 17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
4ee9c684 19
20#include "config.h"
21#include "system.h"
22#include "coretypes.h"
23#include "tm.h"
4ee9c684 24#include "tree.h"
9ed99284 25#include "stor-layout.h"
4ee9c684 26#include "tm_p.h"
94ea8568 27#include "predict.h"
28#include "vec.h"
29#include "hashtab.h"
30#include "hash-set.h"
31#include "machmode.h"
32#include "hard-reg-set.h"
33#include "input.h"
34#include "function.h"
35#include "dominance.h"
36#include "cfg.h"
4ee9c684 37#include "basic-block.h"
e5b1e080 38#include "gimple-pretty-print.h"
bc61cadb 39#include "tree-ssa-alias.h"
40#include "internal-fn.h"
41#include "gimple-fold.h"
42#include "tree-eh.h"
43#include "gimple-expr.h"
44#include "is-a.h"
073c1fd5 45#include "gimple.h"
a8783bee 46#include "gimplify.h"
dcf1a1ec 47#include "gimple-iterator.h"
e795d6e1 48#include "gimplify-me.h"
073c1fd5 49#include "gimple-ssa.h"
50#include "tree-cfg.h"
51#include "tree-phinodes.h"
52#include "ssa-iterators.h"
9ed99284 53#include "stringpool.h"
073c1fd5 54#include "tree-ssanames.h"
9ed99284 55#include "expr.h"
073c1fd5 56#include "tree-dfa.h"
4ee9c684 57#include "tree-pass.h"
291d763b 58#include "langhooks.h"
5adc1066 59#include "flags.h"
8f79c655 60#include "diagnostic.h"
27f931ff 61#include "expr.h"
6b42039a 62#include "cfgloop.h"
34517c64 63#include "insn-codes.h"
d1938a4b 64#include "optabs.h"
58bf5219 65#include "tree-ssa-propagate.h"
424a4a92 66#include "tree-ssa-dom.h"
f7715905 67#include "builtins.h"
f619ecae 68#include "tree-cfgcleanup.h"
69#include "tree-into-ssa.h"
94ea8568 70#include "cfganal.h"
4ee9c684 71
291d763b 72/* This pass propagates the RHS of assignment statements into use
73 sites of the LHS of the assignment. It's basically a specialized
8f628ee8 74 form of tree combination. It is hoped all of this can disappear
75 when we have a generalized tree combiner.
4ee9c684 76
291d763b 77 One class of common cases we handle is forward propagating a single use
48e1416a 78 variable into a COND_EXPR.
4ee9c684 79
80 bb0:
81 x = a COND b;
82 if (x) goto ... else goto ...
83
84 Will be transformed into:
85
86 bb0:
87 if (a COND b) goto ... else goto ...
48e1416a 88
4ee9c684 89 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
90
91 Or (assuming c1 and c2 are constants):
92
93 bb0:
48e1416a 94 x = a + c1;
4ee9c684 95 if (x EQ/NEQ c2) goto ... else goto ...
96
97 Will be transformed into:
98
99 bb0:
100 if (a EQ/NEQ (c2 - c1)) goto ... else goto ...
101
102 Similarly for x = a - c1.
48e1416a 103
4ee9c684 104 Or
105
106 bb0:
107 x = !a
108 if (x) goto ... else goto ...
109
110 Will be transformed into:
111
112 bb0:
113 if (a == 0) goto ... else goto ...
114
115 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
116 For these cases, we propagate A into all, possibly more than one,
117 COND_EXPRs that use X.
118
f5c8cff5 119 Or
120
121 bb0:
122 x = (typecast) a
123 if (x) goto ... else goto ...
124
125 Will be transformed into:
126
127 bb0:
128 if (a != 0) goto ... else goto ...
129
130 (Assuming a is an integral type and x is a boolean or x is an
131 integral and a is a boolean.)
132
133 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
134 For these cases, we propagate A into all, possibly more than one,
135 COND_EXPRs that use X.
136
4ee9c684 137 In addition to eliminating the variable and the statement which assigns
138 a value to the variable, we may be able to later thread the jump without
e6dfde59 139 adding insane complexity in the dominator optimizer.
4ee9c684 140
f5c8cff5 141 Also note these transformations can cascade. We handle this by having
142 a worklist of COND_EXPR statements to examine. As we make a change to
143 a statement, we put it back on the worklist to examine on the next
144 iteration of the main loop.
145
291d763b 146 A second class of propagation opportunities arises for ADDR_EXPR
147 nodes.
148
149 ptr = &x->y->z;
150 res = *ptr;
151
152 Will get turned into
153
154 res = x->y->z;
155
50f39ec6 156 Or
157 ptr = (type1*)&type2var;
158 res = *ptr
159
160 Will get turned into (if type1 and type2 are the same size
161 and neither have volatile on them):
162 res = VIEW_CONVERT_EXPR<type1>(type2var)
163
291d763b 164 Or
165
166 ptr = &x[0];
167 ptr2 = ptr + <constant>;
168
169 Will get turned into
170
171 ptr2 = &x[constant/elementsize];
172
173 Or
174
175 ptr = &x[0];
176 offset = index * element_size;
177 offset_p = (pointer) offset;
178 ptr2 = ptr + offset_p
179
180 Will get turned into:
181
182 ptr2 = &x[index];
183
1c4607fd 184 Or
185 ssa = (int) decl
186 res = ssa & 1
187
188 Provided that decl has known alignment >= 2, will get turned into
189
190 res = 0
191
8f628ee8 192 We also propagate casts into SWITCH_EXPR and COND_EXPR conditions to
193 allow us to remove the cast and {NOT_EXPR,NEG_EXPR} into a subsequent
194 {NOT_EXPR,NEG_EXPR}.
291d763b 195
4ee9c684 196 This will (of course) be extended as other needs arise. */
197
bfb89138 198static bool forward_propagate_addr_expr (tree, tree, bool);
148aa112 199
b59e1c90 200/* Set to true if we delete dead edges during the optimization. */
148aa112 201static bool cfg_changed;
202
75a70cf9 203static tree rhs_to_tree (tree type, gimple stmt);
148aa112 204
83a20baf 205/* Get the next statement we can propagate NAME's value into skipping
5adc1066 206 trivial copies. Returns the statement that is suitable as a
207 propagation destination or NULL_TREE if there is no such one.
208 This only returns destinations in a single-use chain. FINAL_NAME_P
209 if non-NULL is written to the ssa name that represents the use. */
a3451973 210
75a70cf9 211static gimple
5adc1066 212get_prop_dest_stmt (tree name, tree *final_name_p)
a3451973 213{
5adc1066 214 use_operand_p use;
75a70cf9 215 gimple use_stmt;
a3451973 216
5adc1066 217 do {
218 /* If name has multiple uses, bail out. */
219 if (!single_imm_use (name, &use, &use_stmt))
75a70cf9 220 return NULL;
a3451973 221
5adc1066 222 /* If this is not a trivial copy, we found it. */
8f0b877f 223 if (!gimple_assign_ssa_name_copy_p (use_stmt)
75a70cf9 224 || gimple_assign_rhs1 (use_stmt) != name)
5adc1066 225 break;
226
227 /* Continue searching uses of the copy destination. */
75a70cf9 228 name = gimple_assign_lhs (use_stmt);
5adc1066 229 } while (1);
230
231 if (final_name_p)
232 *final_name_p = name;
233
234 return use_stmt;
a3451973 235}
236
5adc1066 237/* Get the statement we can propagate from into NAME skipping
238 trivial copies. Returns the statement which defines the
239 propagation source or NULL_TREE if there is no such one.
240 If SINGLE_USE_ONLY is set considers only sources which have
241 a single use chain up to NAME. If SINGLE_USE_P is non-null,
242 it is set to whether the chain to NAME is a single use chain
243 or not. SINGLE_USE_P is not written to if SINGLE_USE_ONLY is set. */
4ee9c684 244
75a70cf9 245static gimple
5adc1066 246get_prop_source_stmt (tree name, bool single_use_only, bool *single_use_p)
f5c8cff5 247{
5adc1066 248 bool single_use = true;
249
250 do {
75a70cf9 251 gimple def_stmt = SSA_NAME_DEF_STMT (name);
5adc1066 252
253 if (!has_single_use (name))
254 {
255 single_use = false;
256 if (single_use_only)
75a70cf9 257 return NULL;
5adc1066 258 }
259
260 /* If name is defined by a PHI node or is the default def, bail out. */
8f0b877f 261 if (!is_gimple_assign (def_stmt))
75a70cf9 262 return NULL;
5adc1066 263
ab31ca23 264 /* If def_stmt is a simple copy, continue looking. */
265 if (gimple_assign_rhs_code (def_stmt) == SSA_NAME)
266 name = gimple_assign_rhs1 (def_stmt);
267 else
5adc1066 268 {
269 if (!single_use_only && single_use_p)
270 *single_use_p = single_use;
271
ab31ca23 272 return def_stmt;
5adc1066 273 }
5adc1066 274 } while (1);
275}
e6dfde59 276
5adc1066 277/* Checks if the destination ssa name in DEF_STMT can be used as
278 propagation source. Returns true if so, otherwise false. */
e6dfde59 279
5adc1066 280static bool
75a70cf9 281can_propagate_from (gimple def_stmt)
5adc1066 282{
75a70cf9 283 gcc_assert (is_gimple_assign (def_stmt));
8f0b877f 284
484b827b 285 /* If the rhs has side-effects we cannot propagate from it. */
75a70cf9 286 if (gimple_has_volatile_ops (def_stmt))
484b827b 287 return false;
288
289 /* If the rhs is a load we cannot propagate from it. */
75a70cf9 290 if (TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)) == tcc_reference
291 || TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)) == tcc_declaration)
484b827b 292 return false;
293
b9e98b8a 294 /* Constants can be always propagated. */
8f0b877f 295 if (gimple_assign_single_p (def_stmt)
296 && is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt)))
b9e98b8a 297 return true;
298
75a70cf9 299 /* We cannot propagate ssa names that occur in abnormal phi nodes. */
32cdcc42 300 if (stmt_references_abnormal_ssa_name (def_stmt))
301 return false;
4ee9c684 302
5adc1066 303 /* If the definition is a conversion of a pointer to a function type,
75a70cf9 304 then we can not apply optimizations as some targets require
305 function pointers to be canonicalized and in this case this
306 optimization could eliminate a necessary canonicalization. */
8f0b877f 307 if (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt)))
75a70cf9 308 {
309 tree rhs = gimple_assign_rhs1 (def_stmt);
310 if (POINTER_TYPE_P (TREE_TYPE (rhs))
311 && TREE_CODE (TREE_TYPE (TREE_TYPE (rhs))) == FUNCTION_TYPE)
312 return false;
313 }
8f0b877f 314
5adc1066 315 return true;
e6dfde59 316}
317
ff0739e0 318/* Remove a chain of dead statements starting at the definition of
319 NAME. The chain is linked via the first operand of the defining statements.
5d2361b0 320 If NAME was replaced in its only use then this function can be used
ff0739e0 321 to clean up dead stmts. The function handles already released SSA
322 names gracefully.
323 Returns true if cleanup-cfg has to run. */
8f628ee8 324
5adc1066 325static bool
5d2361b0 326remove_prop_source_from_use (tree name)
5adc1066 327{
75a70cf9 328 gimple_stmt_iterator gsi;
329 gimple stmt;
5d2361b0 330 bool cfg_changed = false;
8f628ee8 331
5adc1066 332 do {
5d2361b0 333 basic_block bb;
334
ff0739e0 335 if (SSA_NAME_IN_FREE_LIST (name)
336 || SSA_NAME_IS_DEFAULT_DEF (name)
337 || !has_zero_uses (name))
5d2361b0 338 return cfg_changed;
8f628ee8 339
5adc1066 340 stmt = SSA_NAME_DEF_STMT (name);
ff0739e0 341 if (gimple_code (stmt) == GIMPLE_PHI
342 || gimple_has_side_effects (stmt))
6f9714b3 343 return cfg_changed;
ff0739e0 344
345 bb = gimple_bb (stmt);
6f9714b3 346 gsi = gsi_for_stmt (stmt);
ff0739e0 347 unlink_stmt_vdef (stmt);
13ff78a4 348 if (gsi_remove (&gsi, true))
349 cfg_changed |= gimple_purge_dead_eh_edges (bb);
ff0739e0 350 release_defs (stmt);
8f628ee8 351
ff0739e0 352 name = is_gimple_assign (stmt) ? gimple_assign_rhs1 (stmt) : NULL_TREE;
75a70cf9 353 } while (name && TREE_CODE (name) == SSA_NAME);
8f628ee8 354
5d2361b0 355 return cfg_changed;
5adc1066 356}
8f628ee8 357
75a70cf9 358/* Return the rhs of a gimple_assign STMT in a form of a single tree,
359 converted to type TYPE.
48e1416a 360
75a70cf9 361 This should disappear, but is needed so we can combine expressions and use
362 the fold() interfaces. Long term, we need to develop folding and combine
363 routines that deal with gimple exclusively . */
364
365static tree
366rhs_to_tree (tree type, gimple stmt)
367{
389dd41b 368 location_t loc = gimple_location (stmt);
75a70cf9 369 enum tree_code code = gimple_assign_rhs_code (stmt);
57c45d70 370 if (get_gimple_rhs_class (code) == GIMPLE_TERNARY_RHS)
371 return fold_build3_loc (loc, code, type, gimple_assign_rhs1 (stmt),
372 gimple_assign_rhs2 (stmt),
373 gimple_assign_rhs3 (stmt));
374 else if (get_gimple_rhs_class (code) == GIMPLE_BINARY_RHS)
389dd41b 375 return fold_build2_loc (loc, code, type, gimple_assign_rhs1 (stmt),
fb8ed03f 376 gimple_assign_rhs2 (stmt));
75a70cf9 377 else if (get_gimple_rhs_class (code) == GIMPLE_UNARY_RHS)
fb8ed03f 378 return build1 (code, type, gimple_assign_rhs1 (stmt));
75a70cf9 379 else if (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS)
380 return gimple_assign_rhs1 (stmt);
381 else
382 gcc_unreachable ();
383}
384
5adc1066 385/* Combine OP0 CODE OP1 in the context of a COND_EXPR. Returns
386 the folded result in a form suitable for COND_EXPR_COND or
387 NULL_TREE, if there is no suitable simplified form. If
388 INVARIANT_ONLY is true only gimple_min_invariant results are
389 considered simplified. */
8f628ee8 390
391static tree
c73fee76 392combine_cond_expr_cond (gimple stmt, enum tree_code code, tree type,
5adc1066 393 tree op0, tree op1, bool invariant_only)
8f628ee8 394{
5adc1066 395 tree t;
8f628ee8 396
5adc1066 397 gcc_assert (TREE_CODE_CLASS (code) == tcc_comparison);
8f628ee8 398
c73fee76 399 fold_defer_overflow_warnings ();
400 t = fold_binary_loc (gimple_location (stmt), code, type, op0, op1);
5adc1066 401 if (!t)
c73fee76 402 {
403 fold_undefer_overflow_warnings (false, NULL, 0);
404 return NULL_TREE;
405 }
8f628ee8 406
5adc1066 407 /* Require that we got a boolean type out if we put one in. */
408 gcc_assert (TREE_CODE (TREE_TYPE (t)) == TREE_CODE (type));
8f628ee8 409
a7392604 410 /* Canonicalize the combined condition for use in a COND_EXPR. */
411 t = canonicalize_cond_expr_cond (t);
8f628ee8 412
5adc1066 413 /* Bail out if we required an invariant but didn't get one. */
75a70cf9 414 if (!t || (invariant_only && !is_gimple_min_invariant (t)))
c73fee76 415 {
416 fold_undefer_overflow_warnings (false, NULL, 0);
417 return NULL_TREE;
418 }
419
420 fold_undefer_overflow_warnings (!gimple_no_warning_p (stmt), stmt, 0);
8f628ee8 421
a7392604 422 return t;
8f628ee8 423}
424
c8126d25 425/* Combine the comparison OP0 CODE OP1 at LOC with the defining statements
426 of its operand. Return a new comparison tree or NULL_TREE if there
427 were no simplifying combines. */
428
429static tree
c73fee76 430forward_propagate_into_comparison_1 (gimple stmt,
678b2f5b 431 enum tree_code code, tree type,
432 tree op0, tree op1)
c8126d25 433{
434 tree tmp = NULL_TREE;
435 tree rhs0 = NULL_TREE, rhs1 = NULL_TREE;
436 bool single_use0_p = false, single_use1_p = false;
437
438 /* For comparisons use the first operand, that is likely to
439 simplify comparisons against constants. */
440 if (TREE_CODE (op0) == SSA_NAME)
441 {
442 gimple def_stmt = get_prop_source_stmt (op0, false, &single_use0_p);
443 if (def_stmt && can_propagate_from (def_stmt))
444 {
445 rhs0 = rhs_to_tree (TREE_TYPE (op1), def_stmt);
c73fee76 446 tmp = combine_cond_expr_cond (stmt, code, type,
c8126d25 447 rhs0, op1, !single_use0_p);
448 if (tmp)
449 return tmp;
450 }
451 }
452
453 /* If that wasn't successful, try the second operand. */
454 if (TREE_CODE (op1) == SSA_NAME)
455 {
456 gimple def_stmt = get_prop_source_stmt (op1, false, &single_use1_p);
457 if (def_stmt && can_propagate_from (def_stmt))
458 {
459 rhs1 = rhs_to_tree (TREE_TYPE (op0), def_stmt);
c73fee76 460 tmp = combine_cond_expr_cond (stmt, code, type,
c8126d25 461 op0, rhs1, !single_use1_p);
462 if (tmp)
463 return tmp;
464 }
465 }
466
467 /* If that wasn't successful either, try both operands. */
468 if (rhs0 != NULL_TREE
469 && rhs1 != NULL_TREE)
c73fee76 470 tmp = combine_cond_expr_cond (stmt, code, type,
c8126d25 471 rhs0, rhs1,
472 !(single_use0_p && single_use1_p));
473
474 return tmp;
475}
476
678b2f5b 477/* Propagate from the ssa name definition statements of the assignment
478 from a comparison at *GSI into the conditional if that simplifies it.
6f9714b3 479 Returns 1 if the stmt was modified and 2 if the CFG needs cleanup,
480 otherwise returns 0. */
c8126d25 481
6f9714b3 482static int
678b2f5b 483forward_propagate_into_comparison (gimple_stmt_iterator *gsi)
c8126d25 484{
678b2f5b 485 gimple stmt = gsi_stmt (*gsi);
486 tree tmp;
6f9714b3 487 bool cfg_changed = false;
56632de0 488 tree type = TREE_TYPE (gimple_assign_lhs (stmt));
6f9714b3 489 tree rhs1 = gimple_assign_rhs1 (stmt);
490 tree rhs2 = gimple_assign_rhs2 (stmt);
c8126d25 491
492 /* Combine the comparison with defining statements. */
c73fee76 493 tmp = forward_propagate_into_comparison_1 (stmt,
678b2f5b 494 gimple_assign_rhs_code (stmt),
56632de0 495 type, rhs1, rhs2);
496 if (tmp && useless_type_conversion_p (type, TREE_TYPE (tmp)))
c8126d25 497 {
678b2f5b 498 gimple_assign_set_rhs_from_tree (gsi, tmp);
50aacf4c 499 fold_stmt (gsi);
500 update_stmt (gsi_stmt (*gsi));
75200312 501
6f9714b3 502 if (TREE_CODE (rhs1) == SSA_NAME)
503 cfg_changed |= remove_prop_source_from_use (rhs1);
504 if (TREE_CODE (rhs2) == SSA_NAME)
505 cfg_changed |= remove_prop_source_from_use (rhs2);
506 return cfg_changed ? 2 : 1;
c8126d25 507 }
508
6f9714b3 509 return 0;
c8126d25 510}
511
5adc1066 512/* Propagate from the ssa name definition statements of COND_EXPR
75a70cf9 513 in GIMPLE_COND statement STMT into the conditional if that simplifies it.
514 Returns zero if no statement was changed, one if there were
515 changes and two if cfg_cleanup needs to run.
48e1416a 516
75a70cf9 517 This must be kept in sync with forward_propagate_into_cond. */
518
519static int
520forward_propagate_into_gimple_cond (gimple stmt)
521{
678b2f5b 522 tree tmp;
523 enum tree_code code = gimple_cond_code (stmt);
6f9714b3 524 bool cfg_changed = false;
525 tree rhs1 = gimple_cond_lhs (stmt);
526 tree rhs2 = gimple_cond_rhs (stmt);
678b2f5b 527
528 /* We can do tree combining on SSA_NAME and comparison expressions. */
529 if (TREE_CODE_CLASS (gimple_cond_code (stmt)) != tcc_comparison)
530 return 0;
531
c73fee76 532 tmp = forward_propagate_into_comparison_1 (stmt, code,
678b2f5b 533 boolean_type_node,
6f9714b3 534 rhs1, rhs2);
678b2f5b 535 if (tmp)
536 {
537 if (dump_file && tmp)
538 {
678b2f5b 539 fprintf (dump_file, " Replaced '");
6f9714b3 540 print_gimple_expr (dump_file, stmt, 0, 0);
678b2f5b 541 fprintf (dump_file, "' with '");
542 print_generic_expr (dump_file, tmp, 0);
543 fprintf (dump_file, "'\n");
544 }
75a70cf9 545
678b2f5b 546 gimple_cond_set_condition_from_tree (stmt, unshare_expr (tmp));
547 update_stmt (stmt);
75a70cf9 548
6f9714b3 549 if (TREE_CODE (rhs1) == SSA_NAME)
550 cfg_changed |= remove_prop_source_from_use (rhs1);
551 if (TREE_CODE (rhs2) == SSA_NAME)
552 cfg_changed |= remove_prop_source_from_use (rhs2);
553 return (cfg_changed || is_gimple_min_invariant (tmp)) ? 2 : 1;
678b2f5b 554 }
75a70cf9 555
10a6edd6 556 /* Canonicalize _Bool == 0 and _Bool != 1 to _Bool != 0 by swapping edges. */
557 if ((TREE_CODE (TREE_TYPE (rhs1)) == BOOLEAN_TYPE
558 || (INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
559 && TYPE_PRECISION (TREE_TYPE (rhs1)) == 1))
560 && ((code == EQ_EXPR
561 && integer_zerop (rhs2))
562 || (code == NE_EXPR
563 && integer_onep (rhs2))))
564 {
565 basic_block bb = gimple_bb (stmt);
566 gimple_cond_set_code (stmt, NE_EXPR);
567 gimple_cond_set_rhs (stmt, build_zero_cst (TREE_TYPE (rhs1)));
568 EDGE_SUCC (bb, 0)->flags ^= (EDGE_TRUE_VALUE|EDGE_FALSE_VALUE);
569 EDGE_SUCC (bb, 1)->flags ^= (EDGE_TRUE_VALUE|EDGE_FALSE_VALUE);
570 return 1;
571 }
572
6f9714b3 573 return 0;
75a70cf9 574}
575
576
577/* Propagate from the ssa name definition statements of COND_EXPR
578 in the rhs of statement STMT into the conditional if that simplifies it.
8a2caf10 579 Returns true zero if the stmt was changed. */
4ee9c684 580
8a2caf10 581static bool
75a70cf9 582forward_propagate_into_cond (gimple_stmt_iterator *gsi_p)
e6dfde59 583{
75a70cf9 584 gimple stmt = gsi_stmt (*gsi_p);
678b2f5b 585 tree tmp = NULL_TREE;
586 tree cond = gimple_assign_rhs1 (stmt);
def3cb70 587 enum tree_code code = gimple_assign_rhs_code (stmt);
10a6edd6 588 bool swap = false;
d080be9e 589
678b2f5b 590 /* We can do tree combining on SSA_NAME and comparison expressions. */
591 if (COMPARISON_CLASS_P (cond))
c73fee76 592 tmp = forward_propagate_into_comparison_1 (stmt, TREE_CODE (cond),
f2c1848b 593 TREE_TYPE (cond),
c8126d25 594 TREE_OPERAND (cond, 0),
595 TREE_OPERAND (cond, 1));
678b2f5b 596 else if (TREE_CODE (cond) == SSA_NAME)
597 {
def3cb70 598 enum tree_code def_code;
8a2caf10 599 tree name = cond;
678b2f5b 600 gimple def_stmt = get_prop_source_stmt (name, true, NULL);
601 if (!def_stmt || !can_propagate_from (def_stmt))
6f9714b3 602 return 0;
5adc1066 603
def3cb70 604 def_code = gimple_assign_rhs_code (def_stmt);
605 if (TREE_CODE_CLASS (def_code) == tcc_comparison)
8a2caf10 606 tmp = fold_build2_loc (gimple_location (def_stmt),
def3cb70 607 def_code,
bc112f18 608 TREE_TYPE (cond),
8a2caf10 609 gimple_assign_rhs1 (def_stmt),
610 gimple_assign_rhs2 (def_stmt));
def3cb70 611 else if (code == COND_EXPR
612 && ((def_code == BIT_NOT_EXPR
613 && TYPE_PRECISION (TREE_TYPE (cond)) == 1)
614 || (def_code == BIT_XOR_EXPR
615 && integer_onep (gimple_assign_rhs2 (def_stmt)))))
10a6edd6 616 {
617 tmp = gimple_assign_rhs1 (def_stmt);
618 swap = true;
619 }
678b2f5b 620 }
5adc1066 621
25f48be0 622 if (tmp
623 && is_gimple_condexpr (tmp))
678b2f5b 624 {
625 if (dump_file && tmp)
626 {
627 fprintf (dump_file, " Replaced '");
628 print_generic_expr (dump_file, cond, 0);
629 fprintf (dump_file, "' with '");
630 print_generic_expr (dump_file, tmp, 0);
631 fprintf (dump_file, "'\n");
632 }
d080be9e 633
def3cb70 634 if ((code == VEC_COND_EXPR) ? integer_all_onesp (tmp)
635 : integer_onep (tmp))
8a2caf10 636 gimple_assign_set_rhs_from_tree (gsi_p, gimple_assign_rhs2 (stmt));
637 else if (integer_zerop (tmp))
638 gimple_assign_set_rhs_from_tree (gsi_p, gimple_assign_rhs3 (stmt));
639 else
10a6edd6 640 {
641 gimple_assign_set_rhs1 (stmt, unshare_expr (tmp));
642 if (swap)
643 {
644 tree t = gimple_assign_rhs2 (stmt);
645 gimple_assign_set_rhs2 (stmt, gimple_assign_rhs3 (stmt));
646 gimple_assign_set_rhs3 (stmt, t);
647 }
648 }
678b2f5b 649 stmt = gsi_stmt (*gsi_p);
650 update_stmt (stmt);
5adc1066 651
8a2caf10 652 return true;
678b2f5b 653 }
d080be9e 654
6f9714b3 655 return 0;
4ee9c684 656}
657
360b78f3 658/* Propagate from the ssa name definition statements of COND_EXPR
659 values in the rhs of statement STMT into the conditional arms
660 if that simplifies it.
661 Returns true if the stmt was changed. */
662
663static bool
664combine_cond_exprs (gimple_stmt_iterator *gsi_p)
665{
666 gimple stmt = gsi_stmt (*gsi_p);
667 tree cond, val1, val2;
668 bool changed = false;
669
670 cond = gimple_assign_rhs1 (stmt);
671 val1 = gimple_assign_rhs2 (stmt);
672 if (TREE_CODE (val1) == SSA_NAME)
673 {
674 gimple def_stmt = SSA_NAME_DEF_STMT (val1);
675 if (is_gimple_assign (def_stmt)
676 && gimple_assign_rhs_code (def_stmt) == gimple_assign_rhs_code (stmt)
677 && operand_equal_p (gimple_assign_rhs1 (def_stmt), cond, 0))
678 {
679 val1 = unshare_expr (gimple_assign_rhs2 (def_stmt));
680 gimple_assign_set_rhs2 (stmt, val1);
681 changed = true;
682 }
683 }
684 val2 = gimple_assign_rhs3 (stmt);
685 if (TREE_CODE (val2) == SSA_NAME)
686 {
687 gimple def_stmt = SSA_NAME_DEF_STMT (val2);
688 if (is_gimple_assign (def_stmt)
689 && gimple_assign_rhs_code (def_stmt) == gimple_assign_rhs_code (stmt)
690 && operand_equal_p (gimple_assign_rhs1 (def_stmt), cond, 0))
691 {
692 val2 = unshare_expr (gimple_assign_rhs3 (def_stmt));
693 gimple_assign_set_rhs3 (stmt, val2);
694 changed = true;
695 }
696 }
697 if (operand_equal_p (val1, val2, 0))
698 {
699 gimple_assign_set_rhs_from_tree (gsi_p, val1);
700 stmt = gsi_stmt (*gsi_p);
701 changed = true;
702 }
703
704 if (changed)
705 update_stmt (stmt);
706
707 return changed;
708}
709
48e1416a 710/* We've just substituted an ADDR_EXPR into stmt. Update all the
148aa112 711 relevant data structures to match. */
712
713static void
75a70cf9 714tidy_after_forward_propagate_addr (gimple stmt)
148aa112 715{
148aa112 716 /* We may have turned a trapping insn into a non-trapping insn. */
717 if (maybe_clean_or_replace_eh_stmt (stmt, stmt)
75a70cf9 718 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
148aa112 719 cfg_changed = true;
f2fae51f 720
75a70cf9 721 if (TREE_CODE (gimple_assign_rhs1 (stmt)) == ADDR_EXPR)
722 recompute_tree_invariant_for_addr_expr (gimple_assign_rhs1 (stmt));
148aa112 723}
724
15ec875c 725/* NAME is a SSA_NAME representing DEF_RHS which is of the form
726 ADDR_EXPR <whatever>.
291d763b 727
3d5cfe81 728 Try to forward propagate the ADDR_EXPR into the use USE_STMT.
291d763b 729 Often this will allow for removal of an ADDR_EXPR and INDIRECT_REF
3d5cfe81 730 node or for recovery of array indexing from pointer arithmetic.
75a70cf9 731
6b5a5c42 732 Return true if the propagation was successful (the propagation can
733 be not totally successful, yet things may have been changed). */
291d763b 734
735static bool
75a70cf9 736forward_propagate_addr_expr_1 (tree name, tree def_rhs,
737 gimple_stmt_iterator *use_stmt_gsi,
6776dec8 738 bool single_use_p)
291d763b 739{
75a70cf9 740 tree lhs, rhs, rhs2, array_ref;
75a70cf9 741 gimple use_stmt = gsi_stmt (*use_stmt_gsi);
742 enum tree_code rhs_code;
9e019299 743 bool res = true;
291d763b 744
971c637a 745 gcc_assert (TREE_CODE (def_rhs) == ADDR_EXPR);
291d763b 746
75a70cf9 747 lhs = gimple_assign_lhs (use_stmt);
748 rhs_code = gimple_assign_rhs_code (use_stmt);
749 rhs = gimple_assign_rhs1 (use_stmt);
15ec875c 750
bfb89138 751 /* Do not perform copy-propagation but recurse through copy chains. */
752 if (TREE_CODE (lhs) == SSA_NAME
753 && rhs_code == SSA_NAME)
754 return forward_propagate_addr_expr (lhs, def_rhs, single_use_p);
755
756 /* The use statement could be a conversion. Recurse to the uses of the
757 lhs as copyprop does not copy through pointer to integer to pointer
758 conversions and FRE does not catch all cases either.
759 Treat the case of a single-use name and
6776dec8 760 a conversion to def_rhs type separate, though. */
971c637a 761 if (TREE_CODE (lhs) == SSA_NAME
bfb89138 762 && CONVERT_EXPR_CODE_P (rhs_code))
6776dec8 763 {
bfb89138 764 /* If there is a point in a conversion chain where the types match
765 so we can remove a conversion re-materialize the address here
766 and stop. */
767 if (single_use_p
768 && useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (def_rhs)))
769 {
770 gimple_assign_set_rhs1 (use_stmt, unshare_expr (def_rhs));
771 gimple_assign_set_rhs_code (use_stmt, TREE_CODE (def_rhs));
772 return true;
773 }
774
775 /* Else recurse if the conversion preserves the address value. */
776 if ((INTEGRAL_TYPE_P (TREE_TYPE (lhs))
777 || POINTER_TYPE_P (TREE_TYPE (lhs)))
778 && (TYPE_PRECISION (TREE_TYPE (lhs))
779 >= TYPE_PRECISION (TREE_TYPE (def_rhs))))
780 return forward_propagate_addr_expr (lhs, def_rhs, single_use_p);
781
782 return false;
6776dec8 783 }
971c637a 784
bfb89138 785 /* If this isn't a conversion chain from this on we only can propagate
786 into compatible pointer contexts. */
787 if (!types_compatible_p (TREE_TYPE (name), TREE_TYPE (def_rhs)))
788 return false;
789
182cf5a9 790 /* Propagate through constant pointer adjustments. */
791 if (TREE_CODE (lhs) == SSA_NAME
792 && rhs_code == POINTER_PLUS_EXPR
793 && rhs == name
794 && TREE_CODE (gimple_assign_rhs2 (use_stmt)) == INTEGER_CST)
795 {
796 tree new_def_rhs;
797 /* As we come here with non-invariant addresses in def_rhs we need
798 to make sure we can build a valid constant offsetted address
799 for further propagation. Simply rely on fold building that
800 and check after the fact. */
801 new_def_rhs = fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (rhs)),
802 def_rhs,
803 fold_convert (ptr_type_node,
804 gimple_assign_rhs2 (use_stmt)));
805 if (TREE_CODE (new_def_rhs) == MEM_REF
f5d03f27 806 && !is_gimple_mem_ref_addr (TREE_OPERAND (new_def_rhs, 0)))
182cf5a9 807 return false;
808 new_def_rhs = build_fold_addr_expr_with_type (new_def_rhs,
809 TREE_TYPE (rhs));
810
811 /* Recurse. If we could propagate into all uses of lhs do not
812 bother to replace into the current use but just pretend we did. */
813 if (TREE_CODE (new_def_rhs) == ADDR_EXPR
bfb89138 814 && forward_propagate_addr_expr (lhs, new_def_rhs, single_use_p))
182cf5a9 815 return true;
816
817 if (useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (new_def_rhs)))
818 gimple_assign_set_rhs_with_ops (use_stmt_gsi, TREE_CODE (new_def_rhs),
819 new_def_rhs, NULL_TREE);
820 else if (is_gimple_min_invariant (new_def_rhs))
821 gimple_assign_set_rhs_with_ops (use_stmt_gsi, NOP_EXPR,
822 new_def_rhs, NULL_TREE);
823 else
824 return false;
825 gcc_assert (gsi_stmt (*use_stmt_gsi) == use_stmt);
826 update_stmt (use_stmt);
827 return true;
828 }
829
48e1416a 830 /* Now strip away any outer COMPONENT_REF/ARRAY_REF nodes from the LHS.
971c637a 831 ADDR_EXPR will not appear on the LHS. */
d0d1ecb8 832 tree *lhsp = gimple_assign_lhs_ptr (use_stmt);
833 while (handled_component_p (*lhsp))
834 lhsp = &TREE_OPERAND (*lhsp, 0);
835 lhs = *lhsp;
971c637a 836
182cf5a9 837 /* Now see if the LHS node is a MEM_REF using NAME. If so,
971c637a 838 propagate the ADDR_EXPR into the use of NAME and fold the result. */
182cf5a9 839 if (TREE_CODE (lhs) == MEM_REF
9e019299 840 && TREE_OPERAND (lhs, 0) == name)
971c637a 841 {
182cf5a9 842 tree def_rhs_base;
843 HOST_WIDE_INT def_rhs_offset;
844 /* If the address is invariant we can always fold it. */
845 if ((def_rhs_base = get_addr_base_and_unit_offset (TREE_OPERAND (def_rhs, 0),
846 &def_rhs_offset)))
9e019299 847 {
5de9d3ed 848 offset_int off = mem_ref_offset (lhs);
182cf5a9 849 tree new_ptr;
e913b5cd 850 off += def_rhs_offset;
182cf5a9 851 if (TREE_CODE (def_rhs_base) == MEM_REF)
852 {
cf8f0e63 853 off += mem_ref_offset (def_rhs_base);
182cf5a9 854 new_ptr = TREE_OPERAND (def_rhs_base, 0);
855 }
856 else
857 new_ptr = build_fold_addr_expr (def_rhs_base);
858 TREE_OPERAND (lhs, 0) = new_ptr;
859 TREE_OPERAND (lhs, 1)
e913b5cd 860 = wide_int_to_tree (TREE_TYPE (TREE_OPERAND (lhs, 1)), off);
9e019299 861 tidy_after_forward_propagate_addr (use_stmt);
9e019299 862 /* Continue propagating into the RHS if this was not the only use. */
863 if (single_use_p)
864 return true;
865 }
182cf5a9 866 /* If the LHS is a plain dereference and the value type is the same as
867 that of the pointed-to type of the address we can put the
868 dereferenced address on the LHS preserving the original alias-type. */
d0d1ecb8 869 else if (integer_zerop (TREE_OPERAND (lhs, 1))
870 && ((gimple_assign_lhs (use_stmt) == lhs
871 && useless_type_conversion_p
872 (TREE_TYPE (TREE_OPERAND (def_rhs, 0)),
873 TREE_TYPE (gimple_assign_rhs1 (use_stmt))))
874 || types_compatible_p (TREE_TYPE (lhs),
875 TREE_TYPE (TREE_OPERAND (def_rhs, 0))))
f6e2e4ff 876 /* Don't forward anything into clobber stmts if it would result
877 in the lhs no longer being a MEM_REF. */
878 && (!gimple_clobber_p (use_stmt)
879 || TREE_CODE (TREE_OPERAND (def_rhs, 0)) == MEM_REF))
182cf5a9 880 {
881 tree *def_rhs_basep = &TREE_OPERAND (def_rhs, 0);
98d96c6f 882 tree new_offset, new_base, saved, new_lhs;
182cf5a9 883 while (handled_component_p (*def_rhs_basep))
884 def_rhs_basep = &TREE_OPERAND (*def_rhs_basep, 0);
885 saved = *def_rhs_basep;
886 if (TREE_CODE (*def_rhs_basep) == MEM_REF)
887 {
888 new_base = TREE_OPERAND (*def_rhs_basep, 0);
b97e39a0 889 new_offset = fold_convert (TREE_TYPE (TREE_OPERAND (lhs, 1)),
890 TREE_OPERAND (*def_rhs_basep, 1));
182cf5a9 891 }
892 else
893 {
894 new_base = build_fold_addr_expr (*def_rhs_basep);
895 new_offset = TREE_OPERAND (lhs, 1);
896 }
897 *def_rhs_basep = build2 (MEM_REF, TREE_TYPE (*def_rhs_basep),
898 new_base, new_offset);
2e5dc41c 899 TREE_THIS_VOLATILE (*def_rhs_basep) = TREE_THIS_VOLATILE (lhs);
31fa5b0d 900 TREE_SIDE_EFFECTS (*def_rhs_basep) = TREE_SIDE_EFFECTS (lhs);
2e5dc41c 901 TREE_THIS_NOTRAP (*def_rhs_basep) = TREE_THIS_NOTRAP (lhs);
98d96c6f 902 new_lhs = unshare_expr (TREE_OPERAND (def_rhs, 0));
d0d1ecb8 903 *lhsp = new_lhs;
98d96c6f 904 TREE_THIS_VOLATILE (new_lhs) = TREE_THIS_VOLATILE (lhs);
31fa5b0d 905 TREE_SIDE_EFFECTS (new_lhs) = TREE_SIDE_EFFECTS (lhs);
182cf5a9 906 *def_rhs_basep = saved;
907 tidy_after_forward_propagate_addr (use_stmt);
908 /* Continue propagating into the RHS if this was not the
909 only use. */
910 if (single_use_p)
911 return true;
912 }
9e019299 913 else
914 /* We can have a struct assignment dereferencing our name twice.
915 Note that we didn't propagate into the lhs to not falsely
916 claim we did when propagating into the rhs. */
917 res = false;
971c637a 918 }
15ec875c 919
631d5db6 920 /* Strip away any outer COMPONENT_REF, ARRAY_REF or ADDR_EXPR
921 nodes from the RHS. */
d0d1ecb8 922 tree *rhsp = gimple_assign_rhs1_ptr (use_stmt);
923 if (TREE_CODE (*rhsp) == ADDR_EXPR)
924 rhsp = &TREE_OPERAND (*rhsp, 0);
925 while (handled_component_p (*rhsp))
926 rhsp = &TREE_OPERAND (*rhsp, 0);
927 rhs = *rhsp;
291d763b 928
182cf5a9 929 /* Now see if the RHS node is a MEM_REF using NAME. If so,
291d763b 930 propagate the ADDR_EXPR into the use of NAME and fold the result. */
182cf5a9 931 if (TREE_CODE (rhs) == MEM_REF
932 && TREE_OPERAND (rhs, 0) == name)
291d763b 933 {
182cf5a9 934 tree def_rhs_base;
935 HOST_WIDE_INT def_rhs_offset;
936 if ((def_rhs_base = get_addr_base_and_unit_offset (TREE_OPERAND (def_rhs, 0),
937 &def_rhs_offset)))
938 {
5de9d3ed 939 offset_int off = mem_ref_offset (rhs);
182cf5a9 940 tree new_ptr;
e913b5cd 941 off += def_rhs_offset;
182cf5a9 942 if (TREE_CODE (def_rhs_base) == MEM_REF)
943 {
cf8f0e63 944 off += mem_ref_offset (def_rhs_base);
182cf5a9 945 new_ptr = TREE_OPERAND (def_rhs_base, 0);
946 }
947 else
948 new_ptr = build_fold_addr_expr (def_rhs_base);
949 TREE_OPERAND (rhs, 0) = new_ptr;
950 TREE_OPERAND (rhs, 1)
e913b5cd 951 = wide_int_to_tree (TREE_TYPE (TREE_OPERAND (rhs, 1)), off);
50aacf4c 952 fold_stmt_inplace (use_stmt_gsi);
182cf5a9 953 tidy_after_forward_propagate_addr (use_stmt);
954 return res;
955 }
2e5dc41c 956 /* If the RHS is a plain dereference and the value type is the same as
182cf5a9 957 that of the pointed-to type of the address we can put the
2e5dc41c 958 dereferenced address on the RHS preserving the original alias-type. */
d0d1ecb8 959 else if (integer_zerop (TREE_OPERAND (rhs, 1))
960 && ((gimple_assign_rhs1 (use_stmt) == rhs
961 && useless_type_conversion_p
962 (TREE_TYPE (gimple_assign_lhs (use_stmt)),
963 TREE_TYPE (TREE_OPERAND (def_rhs, 0))))
964 || types_compatible_p (TREE_TYPE (rhs),
965 TREE_TYPE (TREE_OPERAND (def_rhs, 0)))))
182cf5a9 966 {
967 tree *def_rhs_basep = &TREE_OPERAND (def_rhs, 0);
98d96c6f 968 tree new_offset, new_base, saved, new_rhs;
182cf5a9 969 while (handled_component_p (*def_rhs_basep))
970 def_rhs_basep = &TREE_OPERAND (*def_rhs_basep, 0);
971 saved = *def_rhs_basep;
972 if (TREE_CODE (*def_rhs_basep) == MEM_REF)
973 {
974 new_base = TREE_OPERAND (*def_rhs_basep, 0);
b97e39a0 975 new_offset = fold_convert (TREE_TYPE (TREE_OPERAND (rhs, 1)),
976 TREE_OPERAND (*def_rhs_basep, 1));
182cf5a9 977 }
978 else
979 {
980 new_base = build_fold_addr_expr (*def_rhs_basep);
981 new_offset = TREE_OPERAND (rhs, 1);
982 }
983 *def_rhs_basep = build2 (MEM_REF, TREE_TYPE (*def_rhs_basep),
984 new_base, new_offset);
2e5dc41c 985 TREE_THIS_VOLATILE (*def_rhs_basep) = TREE_THIS_VOLATILE (rhs);
31fa5b0d 986 TREE_SIDE_EFFECTS (*def_rhs_basep) = TREE_SIDE_EFFECTS (rhs);
2e5dc41c 987 TREE_THIS_NOTRAP (*def_rhs_basep) = TREE_THIS_NOTRAP (rhs);
98d96c6f 988 new_rhs = unshare_expr (TREE_OPERAND (def_rhs, 0));
d0d1ecb8 989 *rhsp = new_rhs;
98d96c6f 990 TREE_THIS_VOLATILE (new_rhs) = TREE_THIS_VOLATILE (rhs);
31fa5b0d 991 TREE_SIDE_EFFECTS (new_rhs) = TREE_SIDE_EFFECTS (rhs);
182cf5a9 992 *def_rhs_basep = saved;
50aacf4c 993 fold_stmt_inplace (use_stmt_gsi);
182cf5a9 994 tidy_after_forward_propagate_addr (use_stmt);
995 return res;
996 }
291d763b 997 }
998
971c637a 999 /* If the use of the ADDR_EXPR is not a POINTER_PLUS_EXPR, there
1000 is nothing to do. */
75a70cf9 1001 if (gimple_assign_rhs_code (use_stmt) != POINTER_PLUS_EXPR
1002 || gimple_assign_rhs1 (use_stmt) != name)
971c637a 1003 return false;
1004
291d763b 1005 /* The remaining cases are all for turning pointer arithmetic into
1006 array indexing. They only apply when we have the address of
1007 element zero in an array. If that is not the case then there
1008 is nothing to do. */
15ec875c 1009 array_ref = TREE_OPERAND (def_rhs, 0);
182cf5a9 1010 if ((TREE_CODE (array_ref) != ARRAY_REF
1011 || TREE_CODE (TREE_TYPE (TREE_OPERAND (array_ref, 0))) != ARRAY_TYPE
1012 || TREE_CODE (TREE_OPERAND (array_ref, 1)) != INTEGER_CST)
1013 && TREE_CODE (TREE_TYPE (array_ref)) != ARRAY_TYPE)
291d763b 1014 return false;
1015
75a70cf9 1016 rhs2 = gimple_assign_rhs2 (use_stmt);
704d7315 1017 /* Optimize &x[C1] p+ C2 to &x p+ C3 with C3 = C1 * element_size + C2. */
75a70cf9 1018 if (TREE_CODE (rhs2) == INTEGER_CST)
291d763b 1019 {
704d7315 1020 tree new_rhs = build1_loc (gimple_location (use_stmt),
1021 ADDR_EXPR, TREE_TYPE (def_rhs),
1022 fold_build2 (MEM_REF,
1023 TREE_TYPE (TREE_TYPE (def_rhs)),
1024 unshare_expr (def_rhs),
1025 fold_convert (ptr_type_node,
1026 rhs2)));
1027 gimple_assign_set_rhs_from_tree (use_stmt_gsi, new_rhs);
1028 use_stmt = gsi_stmt (*use_stmt_gsi);
1029 update_stmt (use_stmt);
1030 tidy_after_forward_propagate_addr (use_stmt);
1031 return true;
291d763b 1032 }
1033
291d763b 1034 return false;
1035}
1036
3d5cfe81 1037/* STMT is a statement of the form SSA_NAME = ADDR_EXPR <whatever>.
1038
1039 Try to forward propagate the ADDR_EXPR into all uses of the SSA_NAME.
1040 Often this will allow for removal of an ADDR_EXPR and INDIRECT_REF
1041 node or for recovery of array indexing from pointer arithmetic.
bfb89138 1042
1043 PARENT_SINGLE_USE_P tells if, when in a recursive invocation, NAME was
1044 the single use in the previous invocation. Pass true when calling
1045 this as toplevel.
1046
3d5cfe81 1047 Returns true, if all uses have been propagated into. */
1048
1049static bool
bfb89138 1050forward_propagate_addr_expr (tree name, tree rhs, bool parent_single_use_p)
3d5cfe81 1051{
3d5cfe81 1052 imm_use_iterator iter;
75a70cf9 1053 gimple use_stmt;
3d5cfe81 1054 bool all = true;
bfb89138 1055 bool single_use_p = parent_single_use_p && has_single_use (name);
3d5cfe81 1056
09aca5bc 1057 FOR_EACH_IMM_USE_STMT (use_stmt, iter, name)
3d5cfe81 1058 {
c96420f8 1059 bool result;
9481f629 1060 tree use_rhs;
3d5cfe81 1061
1062 /* If the use is not in a simple assignment statement, then
1063 there is nothing we can do. */
162efce1 1064 if (!is_gimple_assign (use_stmt))
3d5cfe81 1065 {
688ff29b 1066 if (!is_gimple_debug (use_stmt))
9845d120 1067 all = false;
3d5cfe81 1068 continue;
1069 }
1070
162efce1 1071 gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
1072 result = forward_propagate_addr_expr_1 (name, rhs, &gsi,
1073 single_use_p);
1074 /* If the use has moved to a different statement adjust
1075 the update machinery for the old statement too. */
1076 if (use_stmt != gsi_stmt (gsi))
3d5cfe81 1077 {
162efce1 1078 update_stmt (use_stmt);
1079 use_stmt = gsi_stmt (gsi);
3d5cfe81 1080 }
162efce1 1081 update_stmt (use_stmt);
c96420f8 1082 all &= result;
de6ed584 1083
15ec875c 1084 /* Remove intermediate now unused copy and conversion chains. */
75a70cf9 1085 use_rhs = gimple_assign_rhs1 (use_stmt);
15ec875c 1086 if (result
75a70cf9 1087 && TREE_CODE (gimple_assign_lhs (use_stmt)) == SSA_NAME
7b705d94 1088 && TREE_CODE (use_rhs) == SSA_NAME
1089 && has_zero_uses (gimple_assign_lhs (use_stmt)))
15ec875c 1090 {
75a70cf9 1091 gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
15ec875c 1092 release_defs (use_stmt);
75a70cf9 1093 gsi_remove (&gsi, true);
15ec875c 1094 }
3d5cfe81 1095 }
1096
628ce22b 1097 return all && has_zero_uses (name);
3d5cfe81 1098}
1099
678b2f5b 1100
e3a19533 1101/* Forward propagate the comparison defined in *DEFGSI like
678b2f5b 1102 cond_1 = x CMP y to uses of the form
1103 a_1 = (T')cond_1
1104 a_1 = !cond_1
1105 a_1 = cond_1 != 0
e3a19533 1106 Returns true if stmt is now unused. Advance DEFGSI to the next
1107 statement. */
678b2f5b 1108
1109static bool
e3a19533 1110forward_propagate_comparison (gimple_stmt_iterator *defgsi)
678b2f5b 1111{
e3a19533 1112 gimple stmt = gsi_stmt (*defgsi);
678b2f5b 1113 tree name = gimple_assign_lhs (stmt);
1114 gimple use_stmt;
1115 tree tmp = NULL_TREE;
e5b1e080 1116 gimple_stmt_iterator gsi;
1117 enum tree_code code;
1118 tree lhs;
678b2f5b 1119
1120 /* Don't propagate ssa names that occur in abnormal phis. */
1121 if ((TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
1122 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_assign_rhs1 (stmt)))
1123 || (TREE_CODE (gimple_assign_rhs2 (stmt)) == SSA_NAME
1124 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_assign_rhs2 (stmt))))
e3a19533 1125 goto bailout;
678b2f5b 1126
1127 /* Do not un-cse comparisons. But propagate through copies. */
1128 use_stmt = get_prop_dest_stmt (name, &name);
e5b1e080 1129 if (!use_stmt
1130 || !is_gimple_assign (use_stmt))
e3a19533 1131 goto bailout;
678b2f5b 1132
e5b1e080 1133 code = gimple_assign_rhs_code (use_stmt);
1134 lhs = gimple_assign_lhs (use_stmt);
1135 if (!INTEGRAL_TYPE_P (TREE_TYPE (lhs)))
e3a19533 1136 goto bailout;
678b2f5b 1137
e5b1e080 1138 /* We can propagate the condition into a statement that
1139 computes the logical negation of the comparison result. */
4b5f1658 1140 if ((code == BIT_NOT_EXPR
1141 && TYPE_PRECISION (TREE_TYPE (lhs)) == 1)
1142 || (code == BIT_XOR_EXPR
1143 && integer_onep (gimple_assign_rhs2 (use_stmt))))
e5b1e080 1144 {
1145 tree type = TREE_TYPE (gimple_assign_rhs1 (stmt));
1146 bool nans = HONOR_NANS (TYPE_MODE (type));
1147 enum tree_code inv_code;
1148 inv_code = invert_tree_comparison (gimple_assign_rhs_code (stmt), nans);
1149 if (inv_code == ERROR_MARK)
e3a19533 1150 goto bailout;
678b2f5b 1151
e5b1e080 1152 tmp = build2 (inv_code, TREE_TYPE (lhs), gimple_assign_rhs1 (stmt),
1153 gimple_assign_rhs2 (stmt));
1154 }
1155 else
e3a19533 1156 goto bailout;
678b2f5b 1157
e5b1e080 1158 gsi = gsi_for_stmt (use_stmt);
1159 gimple_assign_set_rhs_from_tree (&gsi, unshare_expr (tmp));
1160 use_stmt = gsi_stmt (gsi);
1161 update_stmt (use_stmt);
678b2f5b 1162
e5b1e080 1163 if (dump_file && (dump_flags & TDF_DETAILS))
1164 {
1165 fprintf (dump_file, " Replaced '");
1166 print_gimple_expr (dump_file, stmt, 0, dump_flags);
1167 fprintf (dump_file, "' with '");
1168 print_gimple_expr (dump_file, use_stmt, 0, dump_flags);
1169 fprintf (dump_file, "'\n");
678b2f5b 1170 }
1171
e3a19533 1172 /* When we remove stmt now the iterator defgsi goes off it's current
1173 sequence, hence advance it now. */
1174 gsi_next (defgsi);
1175
e5b1e080 1176 /* Remove defining statements. */
1177 return remove_prop_source_from_use (name);
e3a19533 1178
1179bailout:
1180 gsi_next (defgsi);
1181 return false;
678b2f5b 1182}
1183
1184
d23e1965 1185/* GSI_P points to a statement which performs a narrowing integral
1186 conversion.
1187
1188 Look for cases like:
1189
1190 t = x & c;
1191 y = (T) t;
1192
1193 Turn them into:
1194
1195 t = x & c;
1196 y = (T) x;
1197
1198 If T is narrower than X's type and C merely masks off bits outside
1199 of (T) and nothing else.
1200
1201 Normally we'd let DCE remove the dead statement. But no DCE runs
1202 after the last forwprop/combine pass, so we remove the obviously
1203 dead code ourselves.
1204
1205 Return TRUE if a change was made, FALSE otherwise. */
1206
1207static bool
1208simplify_conversion_from_bitmask (gimple_stmt_iterator *gsi_p)
1209{
1210 gimple stmt = gsi_stmt (*gsi_p);
1211 gimple rhs_def_stmt = SSA_NAME_DEF_STMT (gimple_assign_rhs1 (stmt));
1212
1213 /* See if the input for the conversion was set via a BIT_AND_EXPR and
1214 the only use of the BIT_AND_EXPR result is the conversion. */
1215 if (is_gimple_assign (rhs_def_stmt)
1216 && gimple_assign_rhs_code (rhs_def_stmt) == BIT_AND_EXPR
1217 && has_single_use (gimple_assign_lhs (rhs_def_stmt)))
1218 {
1219 tree rhs_def_operand1 = gimple_assign_rhs1 (rhs_def_stmt);
1220 tree rhs_def_operand2 = gimple_assign_rhs2 (rhs_def_stmt);
1221 tree lhs_type = TREE_TYPE (gimple_assign_lhs (stmt));
1222
1223 /* Now verify suitability of the BIT_AND_EXPR's operands.
1224 The first must be an SSA_NAME that we can propagate and the
1225 second must be an integer constant that masks out all the
1226 bits outside the final result's type, but nothing else. */
1227 if (TREE_CODE (rhs_def_operand1) == SSA_NAME
1228 && ! SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs_def_operand1)
1229 && TREE_CODE (rhs_def_operand2) == INTEGER_CST
1230 && operand_equal_p (rhs_def_operand2,
1231 build_low_bits_mask (TREE_TYPE (rhs_def_operand2),
1232 TYPE_PRECISION (lhs_type)),
1233 0))
1234 {
1235 /* This is an optimizable case. Replace the source operand
1236 in the conversion with the first source operand of the
1237 BIT_AND_EXPR. */
1238 gimple_assign_set_rhs1 (stmt, rhs_def_operand1);
1239 stmt = gsi_stmt (*gsi_p);
1240 update_stmt (stmt);
1241
1242 /* There is no DCE after the last forwprop pass. It's
1243 easy to clean up the first order effects here. */
1244 gimple_stmt_iterator si;
1245 si = gsi_for_stmt (rhs_def_stmt);
1246 gsi_remove (&si, true);
1247 release_defs (rhs_def_stmt);
1248 return true;
1249 }
1250 }
1251
1252 return false;
1253}
1254
1255
3a938499 1256/* If we have lhs = ~x (STMT), look and see if earlier we had x = ~y.
1257 If so, we can change STMT into lhs = y which can later be copy
48e1416a 1258 propagated. Similarly for negation.
3a938499 1259
48e1416a 1260 This could trivially be formulated as a forward propagation
3a938499 1261 to immediate uses. However, we already had an implementation
1262 from DOM which used backward propagation via the use-def links.
1263
1264 It turns out that backward propagation is actually faster as
1265 there's less work to do for each NOT/NEG expression we find.
1266 Backwards propagation needs to look at the statement in a single
1267 backlink. Forward propagation needs to look at potentially more
678b2f5b 1268 than one forward link.
3a938499 1269
678b2f5b 1270 Returns true when the statement was changed. */
1271
1272static bool
75a70cf9 1273simplify_not_neg_expr (gimple_stmt_iterator *gsi_p)
3a938499 1274{
75a70cf9 1275 gimple stmt = gsi_stmt (*gsi_p);
1276 tree rhs = gimple_assign_rhs1 (stmt);
1277 gimple rhs_def_stmt = SSA_NAME_DEF_STMT (rhs);
3a938499 1278
1279 /* See if the RHS_DEF_STMT has the same form as our statement. */
75a70cf9 1280 if (is_gimple_assign (rhs_def_stmt)
1281 && gimple_assign_rhs_code (rhs_def_stmt) == gimple_assign_rhs_code (stmt))
3a938499 1282 {
75a70cf9 1283 tree rhs_def_operand = gimple_assign_rhs1 (rhs_def_stmt);
3a938499 1284
1285 /* Verify that RHS_DEF_OPERAND is a suitable SSA_NAME. */
1286 if (TREE_CODE (rhs_def_operand) == SSA_NAME
1287 && ! SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs_def_operand))
1288 {
75a70cf9 1289 gimple_assign_set_rhs_from_tree (gsi_p, rhs_def_operand);
1290 stmt = gsi_stmt (*gsi_p);
3a938499 1291 update_stmt (stmt);
678b2f5b 1292 return true;
3a938499 1293 }
1294 }
678b2f5b 1295
1296 return false;
3a938499 1297}
3d5cfe81 1298
b59e1c90 1299/* Helper function for simplify_gimple_switch. Remove case labels that
1300 have values outside the range of the new type. */
1301
1302static void
1303simplify_gimple_switch_label_vec (gimple stmt, tree index_type)
1304{
1305 unsigned int branch_num = gimple_switch_num_labels (stmt);
c2078b80 1306 auto_vec<tree> labels (branch_num);
b59e1c90 1307 unsigned int i, len;
1308
1309 /* Collect the existing case labels in a VEC, and preprocess it as if
1310 we are gimplifying a GENERIC SWITCH_EXPR. */
1311 for (i = 1; i < branch_num; i++)
f1f41a6c 1312 labels.quick_push (gimple_switch_label (stmt, i));
b59e1c90 1313 preprocess_case_label_vec_for_gimple (labels, index_type, NULL);
1314
1315 /* If any labels were removed, replace the existing case labels
1316 in the GIMPLE_SWITCH statement with the correct ones.
1317 Note that the type updates were done in-place on the case labels,
1318 so we only have to replace the case labels in the GIMPLE_SWITCH
1319 if the number of labels changed. */
f1f41a6c 1320 len = labels.length ();
b59e1c90 1321 if (len < branch_num - 1)
1322 {
1323 bitmap target_blocks;
1324 edge_iterator ei;
1325 edge e;
1326
1327 /* Corner case: *all* case labels have been removed as being
1328 out-of-range for INDEX_TYPE. Push one label and let the
1329 CFG cleanups deal with this further. */
1330 if (len == 0)
1331 {
1332 tree label, elt;
1333
1334 label = CASE_LABEL (gimple_switch_default_label (stmt));
1335 elt = build_case_label (build_int_cst (index_type, 0), NULL, label);
f1f41a6c 1336 labels.quick_push (elt);
b59e1c90 1337 len = 1;
1338 }
1339
f1f41a6c 1340 for (i = 0; i < labels.length (); i++)
1341 gimple_switch_set_label (stmt, i + 1, labels[i]);
b59e1c90 1342 for (i++ ; i < branch_num; i++)
1343 gimple_switch_set_label (stmt, i, NULL_TREE);
1344 gimple_switch_set_num_labels (stmt, len + 1);
1345
1346 /* Cleanup any edges that are now dead. */
1347 target_blocks = BITMAP_ALLOC (NULL);
1348 for (i = 0; i < gimple_switch_num_labels (stmt); i++)
1349 {
1350 tree elt = gimple_switch_label (stmt, i);
1351 basic_block target = label_to_block (CASE_LABEL (elt));
1352 bitmap_set_bit (target_blocks, target->index);
1353 }
1354 for (ei = ei_start (gimple_bb (stmt)->succs); (e = ei_safe_edge (ei)); )
1355 {
1356 if (! bitmap_bit_p (target_blocks, e->dest->index))
1357 {
1358 remove_edge (e);
1359 cfg_changed = true;
1360 free_dominance_info (CDI_DOMINATORS);
1361 }
1362 else
1363 ei_next (&ei);
1364 }
1365 BITMAP_FREE (target_blocks);
1366 }
b59e1c90 1367}
1368
b5860aba 1369/* STMT is a SWITCH_EXPR for which we attempt to find equivalent forms of
1370 the condition which we may be able to optimize better. */
1371
678b2f5b 1372static bool
75a70cf9 1373simplify_gimple_switch (gimple stmt)
b5860aba 1374{
b5860aba 1375 /* The optimization that we really care about is removing unnecessary
1376 casts. That will let us do much better in propagating the inferred
1377 constant at the switch target. */
00bffa46 1378 tree cond = gimple_switch_index (stmt);
b5860aba 1379 if (TREE_CODE (cond) == SSA_NAME)
1380 {
00bffa46 1381 gimple def_stmt = SSA_NAME_DEF_STMT (cond);
1382 if (gimple_assign_cast_p (def_stmt))
b5860aba 1383 {
00bffa46 1384 tree def = gimple_assign_rhs1 (def_stmt);
1385 if (TREE_CODE (def) != SSA_NAME)
1386 return false;
1387
1388 /* If we have an extension or sign-change that preserves the
1389 values we check against then we can copy the source value into
1390 the switch. */
1391 tree ti = TREE_TYPE (def);
1392 if (INTEGRAL_TYPE_P (ti)
1393 && TYPE_PRECISION (ti) <= TYPE_PRECISION (TREE_TYPE (cond)))
b5860aba 1394 {
00bffa46 1395 size_t n = gimple_switch_num_labels (stmt);
1396 tree min = NULL_TREE, max = NULL_TREE;
1397 if (n > 1)
1398 {
1399 min = CASE_LOW (gimple_switch_label (stmt, 1));
1400 if (CASE_HIGH (gimple_switch_label (stmt, n - 1)))
1401 max = CASE_HIGH (gimple_switch_label (stmt, n - 1));
1402 else
1403 max = CASE_LOW (gimple_switch_label (stmt, n - 1));
1404 }
1405 if ((!min || int_fits_type_p (min, ti))
1406 && (!max || int_fits_type_p (max, ti)))
b5860aba 1407 {
75a70cf9 1408 gimple_switch_set_index (stmt, def);
b59e1c90 1409 simplify_gimple_switch_label_vec (stmt, ti);
b5860aba 1410 update_stmt (stmt);
678b2f5b 1411 return true;
b5860aba 1412 }
1413 }
1414 }
1415 }
678b2f5b 1416
1417 return false;
b5860aba 1418}
1419
27f931ff 1420/* For pointers p2 and p1 return p2 - p1 if the
1421 difference is known and constant, otherwise return NULL. */
1422
1423static tree
1424constant_pointer_difference (tree p1, tree p2)
1425{
1426 int i, j;
1427#define CPD_ITERATIONS 5
1428 tree exps[2][CPD_ITERATIONS];
1429 tree offs[2][CPD_ITERATIONS];
1430 int cnt[2];
1431
1432 for (i = 0; i < 2; i++)
1433 {
1434 tree p = i ? p1 : p2;
1435 tree off = size_zero_node;
1436 gimple stmt;
1437 enum tree_code code;
1438
1439 /* For each of p1 and p2 we need to iterate at least
1440 twice, to handle ADDR_EXPR directly in p1/p2,
1441 SSA_NAME with ADDR_EXPR or POINTER_PLUS_EXPR etc.
1442 on definition's stmt RHS. Iterate a few extra times. */
1443 j = 0;
1444 do
1445 {
1446 if (!POINTER_TYPE_P (TREE_TYPE (p)))
1447 break;
1448 if (TREE_CODE (p) == ADDR_EXPR)
1449 {
1450 tree q = TREE_OPERAND (p, 0);
1451 HOST_WIDE_INT offset;
1452 tree base = get_addr_base_and_unit_offset (q, &offset);
1453 if (base)
1454 {
1455 q = base;
1456 if (offset)
1457 off = size_binop (PLUS_EXPR, off, size_int (offset));
1458 }
1459 if (TREE_CODE (q) == MEM_REF
1460 && TREE_CODE (TREE_OPERAND (q, 0)) == SSA_NAME)
1461 {
1462 p = TREE_OPERAND (q, 0);
1463 off = size_binop (PLUS_EXPR, off,
e913b5cd 1464 wide_int_to_tree (sizetype,
1465 mem_ref_offset (q)));
27f931ff 1466 }
1467 else
1468 {
1469 exps[i][j] = q;
1470 offs[i][j++] = off;
1471 break;
1472 }
1473 }
1474 if (TREE_CODE (p) != SSA_NAME)
1475 break;
1476 exps[i][j] = p;
1477 offs[i][j++] = off;
1478 if (j == CPD_ITERATIONS)
1479 break;
1480 stmt = SSA_NAME_DEF_STMT (p);
1481 if (!is_gimple_assign (stmt) || gimple_assign_lhs (stmt) != p)
1482 break;
1483 code = gimple_assign_rhs_code (stmt);
1484 if (code == POINTER_PLUS_EXPR)
1485 {
1486 if (TREE_CODE (gimple_assign_rhs2 (stmt)) != INTEGER_CST)
1487 break;
1488 off = size_binop (PLUS_EXPR, off, gimple_assign_rhs2 (stmt));
1489 p = gimple_assign_rhs1 (stmt);
1490 }
d09ef31a 1491 else if (code == ADDR_EXPR || CONVERT_EXPR_CODE_P (code))
27f931ff 1492 p = gimple_assign_rhs1 (stmt);
1493 else
1494 break;
1495 }
1496 while (1);
1497 cnt[i] = j;
1498 }
1499
1500 for (i = 0; i < cnt[0]; i++)
1501 for (j = 0; j < cnt[1]; j++)
1502 if (exps[0][i] == exps[1][j])
1503 return size_binop (MINUS_EXPR, offs[0][i], offs[1][j]);
1504
1505 return NULL_TREE;
1506}
1507
1508/* *GSI_P is a GIMPLE_CALL to a builtin function.
1509 Optimize
1510 memcpy (p, "abcd", 4);
1511 memset (p + 4, ' ', 3);
1512 into
1513 memcpy (p, "abcd ", 7);
1514 call if the latter can be stored by pieces during expansion. */
1515
1516static bool
1517simplify_builtin_call (gimple_stmt_iterator *gsi_p, tree callee2)
1518{
1519 gimple stmt1, stmt2 = gsi_stmt (*gsi_p);
1520 tree vuse = gimple_vuse (stmt2);
1521 if (vuse == NULL)
1522 return false;
1523 stmt1 = SSA_NAME_DEF_STMT (vuse);
1524
1525 switch (DECL_FUNCTION_CODE (callee2))
1526 {
1527 case BUILT_IN_MEMSET:
1528 if (gimple_call_num_args (stmt2) != 3
1529 || gimple_call_lhs (stmt2)
1530 || CHAR_BIT != 8
1531 || BITS_PER_UNIT != 8)
1532 break;
1533 else
1534 {
1535 tree callee1;
1536 tree ptr1, src1, str1, off1, len1, lhs1;
1537 tree ptr2 = gimple_call_arg (stmt2, 0);
1538 tree val2 = gimple_call_arg (stmt2, 1);
1539 tree len2 = gimple_call_arg (stmt2, 2);
1540 tree diff, vdef, new_str_cst;
1541 gimple use_stmt;
1542 unsigned int ptr1_align;
1543 unsigned HOST_WIDE_INT src_len;
1544 char *src_buf;
1545 use_operand_p use_p;
1546
e913b5cd 1547 if (!tree_fits_shwi_p (val2)
1548 || !tree_fits_uhwi_p (len2))
27f931ff 1549 break;
1550 if (is_gimple_call (stmt1))
1551 {
1552 /* If first stmt is a call, it needs to be memcpy
1553 or mempcpy, with string literal as second argument and
1554 constant length. */
1555 callee1 = gimple_call_fndecl (stmt1);
1556 if (callee1 == NULL_TREE
1557 || DECL_BUILT_IN_CLASS (callee1) != BUILT_IN_NORMAL
1558 || gimple_call_num_args (stmt1) != 3)
1559 break;
1560 if (DECL_FUNCTION_CODE (callee1) != BUILT_IN_MEMCPY
1561 && DECL_FUNCTION_CODE (callee1) != BUILT_IN_MEMPCPY)
1562 break;
1563 ptr1 = gimple_call_arg (stmt1, 0);
1564 src1 = gimple_call_arg (stmt1, 1);
1565 len1 = gimple_call_arg (stmt1, 2);
1566 lhs1 = gimple_call_lhs (stmt1);
e913b5cd 1567 if (!tree_fits_uhwi_p (len1))
27f931ff 1568 break;
1569 str1 = string_constant (src1, &off1);
1570 if (str1 == NULL_TREE)
1571 break;
e913b5cd 1572 if (!tree_fits_uhwi_p (off1)
27f931ff 1573 || compare_tree_int (off1, TREE_STRING_LENGTH (str1) - 1) > 0
1574 || compare_tree_int (len1, TREE_STRING_LENGTH (str1)
e913b5cd 1575 - tree_to_uhwi (off1)) > 0
27f931ff 1576 || TREE_CODE (TREE_TYPE (str1)) != ARRAY_TYPE
1577 || TYPE_MODE (TREE_TYPE (TREE_TYPE (str1)))
1578 != TYPE_MODE (char_type_node))
1579 break;
1580 }
1581 else if (gimple_assign_single_p (stmt1))
1582 {
1583 /* Otherwise look for length 1 memcpy optimized into
1584 assignment. */
1585 ptr1 = gimple_assign_lhs (stmt1);
1586 src1 = gimple_assign_rhs1 (stmt1);
1587 if (TREE_CODE (ptr1) != MEM_REF
1588 || TYPE_MODE (TREE_TYPE (ptr1)) != TYPE_MODE (char_type_node)
e913b5cd 1589 || !tree_fits_shwi_p (src1))
27f931ff 1590 break;
1591 ptr1 = build_fold_addr_expr (ptr1);
1592 callee1 = NULL_TREE;
1593 len1 = size_one_node;
1594 lhs1 = NULL_TREE;
1595 off1 = size_zero_node;
1596 str1 = NULL_TREE;
1597 }
1598 else
1599 break;
1600
1601 diff = constant_pointer_difference (ptr1, ptr2);
1602 if (diff == NULL && lhs1 != NULL)
1603 {
1604 diff = constant_pointer_difference (lhs1, ptr2);
1605 if (DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY
1606 && diff != NULL)
1607 diff = size_binop (PLUS_EXPR, diff,
1608 fold_convert (sizetype, len1));
1609 }
1610 /* If the difference between the second and first destination pointer
1611 is not constant, or is bigger than memcpy length, bail out. */
1612 if (diff == NULL
e913b5cd 1613 || !tree_fits_uhwi_p (diff)
27f931ff 1614 || tree_int_cst_lt (len1, diff))
1615 break;
1616
1617 /* Use maximum of difference plus memset length and memcpy length
1618 as the new memcpy length, if it is too big, bail out. */
e913b5cd 1619 src_len = tree_to_uhwi (diff);
1620 src_len += tree_to_uhwi (len2);
aa59f000 1621 if (src_len < tree_to_uhwi (len1))
e913b5cd 1622 src_len = tree_to_uhwi (len1);
27f931ff 1623 if (src_len > 1024)
1624 break;
1625
1626 /* If mempcpy value is used elsewhere, bail out, as mempcpy
1627 with bigger length will return different result. */
1628 if (lhs1 != NULL_TREE
1629 && DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY
1630 && (TREE_CODE (lhs1) != SSA_NAME
1631 || !single_imm_use (lhs1, &use_p, &use_stmt)
1632 || use_stmt != stmt2))
1633 break;
1634
1635 /* If anything reads memory in between memcpy and memset
1636 call, the modified memcpy call might change it. */
1637 vdef = gimple_vdef (stmt1);
1638 if (vdef != NULL
1639 && (!single_imm_use (vdef, &use_p, &use_stmt)
1640 || use_stmt != stmt2))
1641 break;
1642
957d0361 1643 ptr1_align = get_pointer_alignment (ptr1);
27f931ff 1644 /* Construct the new source string literal. */
1645 src_buf = XALLOCAVEC (char, src_len + 1);
1646 if (callee1)
1647 memcpy (src_buf,
e913b5cd 1648 TREE_STRING_POINTER (str1) + tree_to_uhwi (off1),
1649 tree_to_uhwi (len1));
27f931ff 1650 else
e913b5cd 1651 src_buf[0] = tree_to_shwi (src1);
1652 memset (src_buf + tree_to_uhwi (diff),
1653 tree_to_shwi (val2), tree_to_uhwi (len2));
27f931ff 1654 src_buf[src_len] = '\0';
1655 /* Neither builtin_strncpy_read_str nor builtin_memcpy_read_str
1656 handle embedded '\0's. */
1657 if (strlen (src_buf) != src_len)
1658 break;
1659 rtl_profile_for_bb (gimple_bb (stmt2));
1660 /* If the new memcpy wouldn't be emitted by storing the literal
1661 by pieces, this optimization might enlarge .rodata too much,
1662 as commonly used string literals couldn't be shared any
1663 longer. */
1664 if (!can_store_by_pieces (src_len,
1665 builtin_strncpy_read_str,
1666 src_buf, ptr1_align, false))
1667 break;
1668
1669 new_str_cst = build_string_literal (src_len, src_buf);
1670 if (callee1)
1671 {
1672 /* If STMT1 is a mem{,p}cpy call, adjust it and remove
1673 memset call. */
1674 if (lhs1 && DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY)
1675 gimple_call_set_lhs (stmt1, NULL_TREE);
1676 gimple_call_set_arg (stmt1, 1, new_str_cst);
1677 gimple_call_set_arg (stmt1, 2,
1678 build_int_cst (TREE_TYPE (len1), src_len));
1679 update_stmt (stmt1);
1680 unlink_stmt_vdef (stmt2);
1681 gsi_remove (gsi_p, true);
1682 release_defs (stmt2);
1683 if (lhs1 && DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY)
1684 release_ssa_name (lhs1);
1685 return true;
1686 }
1687 else
1688 {
1689 /* Otherwise, if STMT1 is length 1 memcpy optimized into
1690 assignment, remove STMT1 and change memset call into
1691 memcpy call. */
1692 gimple_stmt_iterator gsi = gsi_for_stmt (stmt1);
1693
7ecb2e7c 1694 if (!is_gimple_val (ptr1))
1695 ptr1 = force_gimple_operand_gsi (gsi_p, ptr1, true, NULL_TREE,
1696 true, GSI_SAME_STMT);
b9a16870 1697 gimple_call_set_fndecl (stmt2,
1698 builtin_decl_explicit (BUILT_IN_MEMCPY));
27f931ff 1699 gimple_call_set_arg (stmt2, 0, ptr1);
1700 gimple_call_set_arg (stmt2, 1, new_str_cst);
1701 gimple_call_set_arg (stmt2, 2,
1702 build_int_cst (TREE_TYPE (len2), src_len));
1703 unlink_stmt_vdef (stmt1);
1704 gsi_remove (&gsi, true);
1705 release_defs (stmt1);
1706 update_stmt (stmt2);
1707 return false;
1708 }
1709 }
1710 break;
1711 default:
1712 break;
1713 }
1714 return false;
1715}
1716
41913fa9 1717/* Checks if expression has type of one-bit precision, or is a known
1718 truth-valued expression. */
1719static bool
1720truth_valued_ssa_name (tree name)
1721{
1722 gimple def;
1723 tree type = TREE_TYPE (name);
1724
1725 if (!INTEGRAL_TYPE_P (type))
1726 return false;
1727 /* Don't check here for BOOLEAN_TYPE as the precision isn't
1728 necessarily one and so ~X is not equal to !X. */
1729 if (TYPE_PRECISION (type) == 1)
1730 return true;
1731 def = SSA_NAME_DEF_STMT (name);
1732 if (is_gimple_assign (def))
1733 return truth_value_p (gimple_assign_rhs_code (def));
1734 return false;
1735}
1736
1737/* Helper routine for simplify_bitwise_binary_1 function.
1738 Return for the SSA name NAME the expression X if it mets condition
1739 NAME = !X. Otherwise return NULL_TREE.
1740 Detected patterns for NAME = !X are:
1741 !X and X == 0 for X with integral type.
1742 X ^ 1, X != 1,or ~X for X with integral type with precision of one. */
1743static tree
1744lookup_logical_inverted_value (tree name)
1745{
1746 tree op1, op2;
1747 enum tree_code code;
1748 gimple def;
1749
1750 /* If name has none-intergal type, or isn't a SSA_NAME, then
1751 return. */
1752 if (TREE_CODE (name) != SSA_NAME
1753 || !INTEGRAL_TYPE_P (TREE_TYPE (name)))
1754 return NULL_TREE;
1755 def = SSA_NAME_DEF_STMT (name);
1756 if (!is_gimple_assign (def))
1757 return NULL_TREE;
1758
1759 code = gimple_assign_rhs_code (def);
1760 op1 = gimple_assign_rhs1 (def);
1761 op2 = NULL_TREE;
1762
1763 /* Get for EQ_EXPR or BIT_XOR_EXPR operation the second operand.
8f4a7578 1764 If CODE isn't an EQ_EXPR, BIT_XOR_EXPR, or BIT_NOT_EXPR, then return. */
41913fa9 1765 if (code == EQ_EXPR || code == NE_EXPR
1766 || code == BIT_XOR_EXPR)
1767 op2 = gimple_assign_rhs2 (def);
1768
1769 switch (code)
1770 {
41913fa9 1771 case BIT_NOT_EXPR:
1772 if (truth_valued_ssa_name (name))
1773 return op1;
1774 break;
1775 case EQ_EXPR:
1776 /* Check if we have X == 0 and X has an integral type. */
1777 if (!INTEGRAL_TYPE_P (TREE_TYPE (op1)))
1778 break;
1779 if (integer_zerop (op2))
1780 return op1;
1781 break;
1782 case NE_EXPR:
1783 /* Check if we have X != 1 and X is a truth-valued. */
1784 if (!INTEGRAL_TYPE_P (TREE_TYPE (op1)))
1785 break;
1786 if (integer_onep (op2) && truth_valued_ssa_name (op1))
1787 return op1;
1788 break;
1789 case BIT_XOR_EXPR:
1790 /* Check if we have X ^ 1 and X is truth valued. */
1791 if (integer_onep (op2) && truth_valued_ssa_name (op1))
1792 return op1;
1793 break;
1794 default:
1795 break;
1796 }
1797
1798 return NULL_TREE;
1799}
1800
1801/* Optimize ARG1 CODE ARG2 to a constant for bitwise binary
1802 operations CODE, if one operand has the logically inverted
1803 value of the other. */
1804static tree
1805simplify_bitwise_binary_1 (enum tree_code code, tree type,
1806 tree arg1, tree arg2)
1807{
1808 tree anot;
1809
1810 /* If CODE isn't a bitwise binary operation, return NULL_TREE. */
1811 if (code != BIT_AND_EXPR && code != BIT_IOR_EXPR
1812 && code != BIT_XOR_EXPR)
1813 return NULL_TREE;
1814
1815 /* First check if operands ARG1 and ARG2 are equal. If so
1816 return NULL_TREE as this optimization is handled fold_stmt. */
1817 if (arg1 == arg2)
1818 return NULL_TREE;
1819 /* See if we have in arguments logical-not patterns. */
1820 if (((anot = lookup_logical_inverted_value (arg1)) == NULL_TREE
1821 || anot != arg2)
1822 && ((anot = lookup_logical_inverted_value (arg2)) == NULL_TREE
1823 || anot != arg1))
1824 return NULL_TREE;
1825
1826 /* X & !X -> 0. */
1827 if (code == BIT_AND_EXPR)
1828 return fold_convert (type, integer_zero_node);
1829 /* X | !X -> 1 and X ^ !X -> 1, if X is truth-valued. */
1830 if (truth_valued_ssa_name (anot))
1831 return fold_convert (type, integer_one_node);
1832
1833 /* ??? Otherwise result is (X != 0 ? X : 1). not handled. */
1834 return NULL_TREE;
1835}
1836
10fbe63d 1837/* Given a ssa_name in NAME see if it was defined by an assignment and
1838 set CODE to be the code and ARG1 to the first operand on the rhs and ARG2
1839 to the second operand on the rhs. */
1840
1841static inline void
1842defcodefor_name (tree name, enum tree_code *code, tree *arg1, tree *arg2)
1843{
1844 gimple def;
1845 enum tree_code code1;
1846 tree arg11;
1847 tree arg21;
1848 tree arg31;
1849 enum gimple_rhs_class grhs_class;
1850
1851 code1 = TREE_CODE (name);
1852 arg11 = name;
1853 arg21 = NULL_TREE;
1854 grhs_class = get_gimple_rhs_class (code1);
1855
1856 if (code1 == SSA_NAME)
1857 {
1858 def = SSA_NAME_DEF_STMT (name);
1859
1860 if (def && is_gimple_assign (def)
1861 && can_propagate_from (def))
1862 {
1863 code1 = gimple_assign_rhs_code (def);
1864 arg11 = gimple_assign_rhs1 (def);
1865 arg21 = gimple_assign_rhs2 (def);
1866 arg31 = gimple_assign_rhs2 (def);
1867 }
1868 }
1869 else if (grhs_class == GIMPLE_TERNARY_RHS
1870 || GIMPLE_BINARY_RHS
1871 || GIMPLE_UNARY_RHS
1872 || GIMPLE_SINGLE_RHS)
1873 extract_ops_from_tree_1 (name, &code1, &arg11, &arg21, &arg31);
1874
1875 *code = code1;
1876 *arg1 = arg11;
1877 if (arg2)
1878 *arg2 = arg21;
1879 /* Ignore arg3 currently. */
1880}
1881
750e47f5 1882/* Return true if a conversion of an operand from type FROM to type TO
1883 should be applied after performing the operation instead. */
1884
1885static bool
1886hoist_conversion_for_bitop_p (tree to, tree from)
1887{
1888 /* That's a good idea if the conversion widens the operand, thus
1889 after hoisting the conversion the operation will be narrower. */
1890 if (TYPE_PRECISION (from) < TYPE_PRECISION (to))
1891 return true;
1892
1893 /* It's also a good idea if the conversion is to a non-integer mode. */
1894 if (GET_MODE_CLASS (TYPE_MODE (to)) != MODE_INT)
1895 return true;
1896
1897 /* Or if the precision of TO is not the same as the precision
1898 of its mode. */
1899 if (TYPE_PRECISION (to) != GET_MODE_PRECISION (TYPE_MODE (to)))
1900 return true;
1901
1902 return false;
1903}
1904
16bc66ec 1905/* GSI points to a statement of the form
1906
1907 result = OP0 CODE OP1
1908
1909 Where OP0 and OP1 are single bit SSA_NAMEs and CODE is either
1910 BIT_AND_EXPR or BIT_IOR_EXPR.
1911
1912 If OP0 is fed by a bitwise negation of another single bit SSA_NAME,
1913 then we can simplify the two statements into a single LT_EXPR or LE_EXPR
1914 when code is BIT_AND_EXPR and BIT_IOR_EXPR respectively.
1915
040f64e5 1916 If a simplification is made, return TRUE, else return FALSE. */
16bc66ec 1917static bool
1918simplify_bitwise_binary_boolean (gimple_stmt_iterator *gsi,
1919 enum tree_code code,
1920 tree op0, tree op1)
1921{
1922 gimple op0_def_stmt = SSA_NAME_DEF_STMT (op0);
1923
1924 if (!is_gimple_assign (op0_def_stmt)
1925 || (gimple_assign_rhs_code (op0_def_stmt) != BIT_NOT_EXPR))
1926 return false;
1927
1928 tree x = gimple_assign_rhs1 (op0_def_stmt);
1929 if (TREE_CODE (x) == SSA_NAME
1930 && INTEGRAL_TYPE_P (TREE_TYPE (x))
1931 && TYPE_PRECISION (TREE_TYPE (x)) == 1
1932 && TYPE_UNSIGNED (TREE_TYPE (x)) == TYPE_UNSIGNED (TREE_TYPE (op1)))
1933 {
1934 enum tree_code newcode;
1935
1936 gimple stmt = gsi_stmt (*gsi);
1937 gimple_assign_set_rhs1 (stmt, x);
1938 gimple_assign_set_rhs2 (stmt, op1);
1939 if (code == BIT_AND_EXPR)
1940 newcode = TYPE_UNSIGNED (TREE_TYPE (x)) ? LT_EXPR : GT_EXPR;
1941 else
1942 newcode = TYPE_UNSIGNED (TREE_TYPE (x)) ? LE_EXPR : GE_EXPR;
1943 gimple_assign_set_rhs_code (stmt, newcode);
1944 update_stmt (stmt);
1945 return true;
1946 }
1947 return false;
1948
1949}
1950
300da094 1951/* Simplify bitwise binary operations.
1952 Return true if a transformation applied, otherwise return false. */
1c4607fd 1953
300da094 1954static bool
1955simplify_bitwise_binary (gimple_stmt_iterator *gsi)
1c4607fd 1956{
300da094 1957 gimple stmt = gsi_stmt (*gsi);
1c4607fd 1958 tree arg1 = gimple_assign_rhs1 (stmt);
1959 tree arg2 = gimple_assign_rhs2 (stmt);
300da094 1960 enum tree_code code = gimple_assign_rhs_code (stmt);
1961 tree res;
10fbe63d 1962 tree def1_arg1, def1_arg2, def2_arg1, def2_arg2;
26f54bd0 1963 enum tree_code def1_code, def2_code;
1c4607fd 1964
10fbe63d 1965 defcodefor_name (arg1, &def1_code, &def1_arg1, &def1_arg2);
1966 defcodefor_name (arg2, &def2_code, &def2_arg1, &def2_arg2);
26f54bd0 1967
750e47f5 1968 /* Try to fold (type) X op CST -> (type) (X op ((type-x) CST))
1969 when profitable. */
25ce0d90 1970 if (TREE_CODE (arg2) == INTEGER_CST
1971 && CONVERT_EXPR_CODE_P (def1_code)
750e47f5 1972 && hoist_conversion_for_bitop_p (TREE_TYPE (arg1), TREE_TYPE (def1_arg1))
105fc895 1973 && INTEGRAL_TYPE_P (TREE_TYPE (def1_arg1))
25ce0d90 1974 && int_fits_type_p (arg2, TREE_TYPE (def1_arg1)))
1975 {
1976 gimple newop;
03d37e4e 1977 tree tem = make_ssa_name (TREE_TYPE (def1_arg1), NULL);
25ce0d90 1978 newop =
1979 gimple_build_assign_with_ops (code, tem, def1_arg1,
1980 fold_convert_loc (gimple_location (stmt),
1981 TREE_TYPE (def1_arg1),
1982 arg2));
4b5f1658 1983 gimple_set_location (newop, gimple_location (stmt));
25ce0d90 1984 gsi_insert_before (gsi, newop, GSI_SAME_STMT);
1985 gimple_assign_set_rhs_with_ops_1 (gsi, NOP_EXPR,
1986 tem, NULL_TREE, NULL_TREE);
1987 update_stmt (gsi_stmt (*gsi));
1988 return true;
1989 }
1990
300da094 1991 /* For bitwise binary operations apply operand conversions to the
1992 binary operation result instead of to the operands. This allows
1993 to combine successive conversions and bitwise binary operations. */
26f54bd0 1994 if (CONVERT_EXPR_CODE_P (def1_code)
1995 && CONVERT_EXPR_CODE_P (def2_code)
1996 && types_compatible_p (TREE_TYPE (def1_arg1), TREE_TYPE (def2_arg1))
750e47f5 1997 && hoist_conversion_for_bitop_p (TREE_TYPE (arg1), TREE_TYPE (def1_arg1)))
1c4607fd 1998 {
26f54bd0 1999 gimple newop;
03d37e4e 2000 tree tem = make_ssa_name (TREE_TYPE (def1_arg1), NULL);
26f54bd0 2001 newop = gimple_build_assign_with_ops (code, tem, def1_arg1, def2_arg1);
4b5f1658 2002 gimple_set_location (newop, gimple_location (stmt));
26f54bd0 2003 gsi_insert_before (gsi, newop, GSI_SAME_STMT);
2004 gimple_assign_set_rhs_with_ops_1 (gsi, NOP_EXPR,
2005 tem, NULL_TREE, NULL_TREE);
2006 update_stmt (gsi_stmt (*gsi));
2007 return true;
2008 }
2009
35967c0f 2010
2011 /* Simplify (A & B) OP0 (C & B) to (A OP0 C) & B. */
2012 if (def1_code == def2_code
2013 && def1_code == BIT_AND_EXPR
0a3f7203 2014 && operand_equal_for_phi_arg_p (def1_arg2,
2015 def2_arg2))
35967c0f 2016 {
0a3f7203 2017 tree b = def1_arg2;
35967c0f 2018 tree a = def1_arg1;
2019 tree c = def2_arg1;
2020 tree inner = fold_build2 (code, TREE_TYPE (arg2), a, c);
2021 /* If A OP0 C (this usually means C is the same as A) is 0
2022 then fold it down correctly. */
2023 if (integer_zerop (inner))
2024 {
2025 gimple_assign_set_rhs_from_tree (gsi, inner);
2026 update_stmt (stmt);
2027 return true;
2028 }
2029 /* If A OP0 C (this usually means C is the same as A) is a ssa_name
2030 then fold it down correctly. */
2031 else if (TREE_CODE (inner) == SSA_NAME)
2032 {
2033 tree outer = fold_build2 (def1_code, TREE_TYPE (inner),
2034 inner, b);
2035 gimple_assign_set_rhs_from_tree (gsi, outer);
2036 update_stmt (stmt);
2037 return true;
2038 }
2039 else
2040 {
2041 gimple newop;
2042 tree tem;
03d37e4e 2043 tem = make_ssa_name (TREE_TYPE (arg2), NULL);
35967c0f 2044 newop = gimple_build_assign_with_ops (code, tem, a, c);
35967c0f 2045 gimple_set_location (newop, gimple_location (stmt));
2046 /* Make sure to re-process the new stmt as it's walking upwards. */
2047 gsi_insert_before (gsi, newop, GSI_NEW_STMT);
2048 gimple_assign_set_rhs1 (stmt, tem);
2049 gimple_assign_set_rhs2 (stmt, b);
2050 gimple_assign_set_rhs_code (stmt, def1_code);
2051 update_stmt (stmt);
2052 return true;
2053 }
2054 }
2055
26f54bd0 2056 /* (a | CST1) & CST2 -> (a & CST2) | (CST1 & CST2). */
2057 if (code == BIT_AND_EXPR
2058 && def1_code == BIT_IOR_EXPR
2c83a45e 2059 && CONSTANT_CLASS_P (arg2)
2060 && CONSTANT_CLASS_P (def1_arg2))
26f54bd0 2061 {
2062 tree cst = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg2),
10fbe63d 2063 arg2, def1_arg2);
26f54bd0 2064 tree tem;
2065 gimple newop;
2066 if (integer_zerop (cst))
300da094 2067 {
26f54bd0 2068 gimple_assign_set_rhs1 (stmt, def1_arg1);
2069 update_stmt (stmt);
2070 return true;
300da094 2071 }
03d37e4e 2072 tem = make_ssa_name (TREE_TYPE (arg2), NULL);
26f54bd0 2073 newop = gimple_build_assign_with_ops (BIT_AND_EXPR,
2074 tem, def1_arg1, arg2);
4b5f1658 2075 gimple_set_location (newop, gimple_location (stmt));
26f54bd0 2076 /* Make sure to re-process the new stmt as it's walking upwards. */
2077 gsi_insert_before (gsi, newop, GSI_NEW_STMT);
2078 gimple_assign_set_rhs1 (stmt, tem);
2079 gimple_assign_set_rhs2 (stmt, cst);
2080 gimple_assign_set_rhs_code (stmt, BIT_IOR_EXPR);
2081 update_stmt (stmt);
2082 return true;
2083 }
2084
2085 /* Combine successive equal operations with constants. */
2086 if ((code == BIT_AND_EXPR
2087 || code == BIT_IOR_EXPR
2088 || code == BIT_XOR_EXPR)
2089 && def1_code == code
2c83a45e 2090 && CONSTANT_CLASS_P (arg2)
2091 && CONSTANT_CLASS_P (def1_arg2))
26f54bd0 2092 {
2093 tree cst = fold_build2 (code, TREE_TYPE (arg2),
10fbe63d 2094 arg2, def1_arg2);
26f54bd0 2095 gimple_assign_set_rhs1 (stmt, def1_arg1);
2096 gimple_assign_set_rhs2 (stmt, cst);
2097 update_stmt (stmt);
2098 return true;
1c4607fd 2099 }
300da094 2100
41913fa9 2101 /* Try simple folding for X op !X, and X op X. */
2102 res = simplify_bitwise_binary_1 (code, TREE_TYPE (arg1), arg1, arg2);
2103 if (res != NULL_TREE)
2104 {
2105 gimple_assign_set_rhs_from_tree (gsi, res);
2106 update_stmt (gsi_stmt (*gsi));
2107 return true;
2108 }
2109
10fbe63d 2110 if (code == BIT_AND_EXPR || code == BIT_IOR_EXPR)
2111 {
2112 enum tree_code ocode = code == BIT_AND_EXPR ? BIT_IOR_EXPR : BIT_AND_EXPR;
2113 if (def1_code == ocode)
2114 {
2115 tree x = arg2;
2116 enum tree_code coden;
2117 tree a1, a2;
2118 /* ( X | Y) & X -> X */
2119 /* ( X & Y) | X -> X */
2120 if (x == def1_arg1
2121 || x == def1_arg2)
2122 {
2123 gimple_assign_set_rhs_from_tree (gsi, x);
2124 update_stmt (gsi_stmt (*gsi));
2125 return true;
2126 }
2127
2128 defcodefor_name (def1_arg1, &coden, &a1, &a2);
2129 /* (~X | Y) & X -> X & Y */
2130 /* (~X & Y) | X -> X | Y */
2131 if (coden == BIT_NOT_EXPR && a1 == x)
2132 {
2133 gimple_assign_set_rhs_with_ops (gsi, code,
2134 x, def1_arg2);
2135 gcc_assert (gsi_stmt (*gsi) == stmt);
2136 update_stmt (stmt);
2137 return true;
2138 }
2139 defcodefor_name (def1_arg2, &coden, &a1, &a2);
2140 /* (Y | ~X) & X -> X & Y */
2141 /* (Y & ~X) | X -> X | Y */
2142 if (coden == BIT_NOT_EXPR && a1 == x)
2143 {
2144 gimple_assign_set_rhs_with_ops (gsi, code,
2145 x, def1_arg1);
2146 gcc_assert (gsi_stmt (*gsi) == stmt);
2147 update_stmt (stmt);
2148 return true;
2149 }
2150 }
2151 if (def2_code == ocode)
2152 {
2153 enum tree_code coden;
2154 tree a1;
2155 tree x = arg1;
2156 /* X & ( X | Y) -> X */
2157 /* X | ( X & Y) -> X */
2158 if (x == def2_arg1
2159 || x == def2_arg2)
2160 {
2161 gimple_assign_set_rhs_from_tree (gsi, x);
2162 update_stmt (gsi_stmt (*gsi));
2163 return true;
2164 }
2165 defcodefor_name (def2_arg1, &coden, &a1, NULL);
2166 /* (~X | Y) & X -> X & Y */
2167 /* (~X & Y) | X -> X | Y */
2168 if (coden == BIT_NOT_EXPR && a1 == x)
2169 {
2170 gimple_assign_set_rhs_with_ops (gsi, code,
2171 x, def2_arg2);
2172 gcc_assert (gsi_stmt (*gsi) == stmt);
2173 update_stmt (stmt);
2174 return true;
2175 }
2176 defcodefor_name (def2_arg2, &coden, &a1, NULL);
2177 /* (Y | ~X) & X -> X & Y */
2178 /* (Y & ~X) | X -> X | Y */
2179 if (coden == BIT_NOT_EXPR && a1 == x)
2180 {
2181 gimple_assign_set_rhs_with_ops (gsi, code,
2182 x, def2_arg1);
2183 gcc_assert (gsi_stmt (*gsi) == stmt);
2184 update_stmt (stmt);
2185 return true;
2186 }
2187 }
10fbe63d 2188
16bc66ec 2189 /* If arg1 and arg2 are booleans (or any single bit type)
2190 then try to simplify:
2191
2192 (~X & Y) -> X < Y
2193 (X & ~Y) -> Y < X
2194 (~X | Y) -> X <= Y
2195 (X | ~Y) -> Y <= X
2196
2197 But only do this if our result feeds into a comparison as
2198 this transformation is not always a win, particularly on
2199 targets with and-not instructions. */
2200 if (TREE_CODE (arg1) == SSA_NAME
2201 && TREE_CODE (arg2) == SSA_NAME
2202 && INTEGRAL_TYPE_P (TREE_TYPE (arg1))
2203 && TYPE_PRECISION (TREE_TYPE (arg1)) == 1
2204 && TYPE_PRECISION (TREE_TYPE (arg2)) == 1
2205 && (TYPE_UNSIGNED (TREE_TYPE (arg1))
2206 == TYPE_UNSIGNED (TREE_TYPE (arg2))))
2207 {
2208 use_operand_p use_p;
2209 gimple use_stmt;
2210
2211 if (single_imm_use (gimple_assign_lhs (stmt), &use_p, &use_stmt))
2212 {
2213 if (gimple_code (use_stmt) == GIMPLE_COND
2214 && gimple_cond_lhs (use_stmt) == gimple_assign_lhs (stmt)
2215 && integer_zerop (gimple_cond_rhs (use_stmt))
2216 && gimple_cond_code (use_stmt) == NE_EXPR)
2217 {
2218 if (simplify_bitwise_binary_boolean (gsi, code, arg1, arg2))
2219 return true;
2220 if (simplify_bitwise_binary_boolean (gsi, code, arg2, arg1))
2221 return true;
2222 }
2223 }
2224 }
2225 }
300da094 2226 return false;
1c4607fd 2227}
2228
ca3c9092 2229
3b8827a2 2230/* Recognize rotation patterns. Return true if a transformation
2231 applied, otherwise return false.
2232
2233 We are looking for X with unsigned type T with bitsize B, OP being
2234 +, | or ^, some type T2 wider than T and
2235 (X << CNT1) OP (X >> CNT2) iff CNT1 + CNT2 == B
2236 ((T) ((T2) X << CNT1)) OP ((T) ((T2) X >> CNT2)) iff CNT1 + CNT2 == B
2237 (X << Y) OP (X >> (B - Y))
2238 (X << (int) Y) OP (X >> (int) (B - Y))
2239 ((T) ((T2) X << Y)) OP ((T) ((T2) X >> (B - Y)))
2240 ((T) ((T2) X << (int) Y)) OP ((T) ((T2) X >> (int) (B - Y)))
043ce677 2241 (X << Y) | (X >> ((-Y) & (B - 1)))
2242 (X << (int) Y) | (X >> (int) ((-Y) & (B - 1)))
2243 ((T) ((T2) X << Y)) | ((T) ((T2) X >> ((-Y) & (B - 1))))
2244 ((T) ((T2) X << (int) Y)) | ((T) ((T2) X >> (int) ((-Y) & (B - 1))))
3b8827a2 2245
2246 and transform these into:
2247 X r<< CNT1
2248 X r<< Y
2249
2250 Note, in the patterns with T2 type, the type of OP operands
2251 might be even a signed type, but should have precision B. */
2252
2253static bool
2254simplify_rotate (gimple_stmt_iterator *gsi)
2255{
2256 gimple stmt = gsi_stmt (*gsi);
2257 tree arg[2], rtype, rotcnt = NULL_TREE;
2258 tree def_arg1[2], def_arg2[2];
2259 enum tree_code def_code[2];
2260 tree lhs;
2261 int i;
2262 bool swapped_p = false;
2263 gimple g;
2264
2265 arg[0] = gimple_assign_rhs1 (stmt);
2266 arg[1] = gimple_assign_rhs2 (stmt);
2267 rtype = TREE_TYPE (arg[0]);
2268
2269 /* Only create rotates in complete modes. Other cases are not
2270 expanded properly. */
2271 if (!INTEGRAL_TYPE_P (rtype)
2272 || TYPE_PRECISION (rtype) != GET_MODE_PRECISION (TYPE_MODE (rtype)))
2273 return false;
2274
2275 for (i = 0; i < 2; i++)
2276 defcodefor_name (arg[i], &def_code[i], &def_arg1[i], &def_arg2[i]);
2277
2278 /* Look through narrowing conversions. */
2279 if (CONVERT_EXPR_CODE_P (def_code[0])
2280 && CONVERT_EXPR_CODE_P (def_code[1])
2281 && INTEGRAL_TYPE_P (TREE_TYPE (def_arg1[0]))
2282 && INTEGRAL_TYPE_P (TREE_TYPE (def_arg1[1]))
2283 && TYPE_PRECISION (TREE_TYPE (def_arg1[0]))
2284 == TYPE_PRECISION (TREE_TYPE (def_arg1[1]))
2285 && TYPE_PRECISION (TREE_TYPE (def_arg1[0])) > TYPE_PRECISION (rtype)
2286 && has_single_use (arg[0])
2287 && has_single_use (arg[1]))
2288 {
2289 for (i = 0; i < 2; i++)
2290 {
2291 arg[i] = def_arg1[i];
2292 defcodefor_name (arg[i], &def_code[i], &def_arg1[i], &def_arg2[i]);
2293 }
2294 }
2295
2296 /* One operand has to be LSHIFT_EXPR and one RSHIFT_EXPR. */
2297 for (i = 0; i < 2; i++)
2298 if (def_code[i] != LSHIFT_EXPR && def_code[i] != RSHIFT_EXPR)
2299 return false;
2300 else if (!has_single_use (arg[i]))
2301 return false;
2302 if (def_code[0] == def_code[1])
2303 return false;
2304
2305 /* If we've looked through narrowing conversions before, look through
2306 widening conversions from unsigned type with the same precision
2307 as rtype here. */
2308 if (TYPE_PRECISION (TREE_TYPE (def_arg1[0])) != TYPE_PRECISION (rtype))
2309 for (i = 0; i < 2; i++)
2310 {
2311 tree tem;
2312 enum tree_code code;
2313 defcodefor_name (def_arg1[i], &code, &tem, NULL);
2314 if (!CONVERT_EXPR_CODE_P (code)
2315 || !INTEGRAL_TYPE_P (TREE_TYPE (tem))
2316 || TYPE_PRECISION (TREE_TYPE (tem)) != TYPE_PRECISION (rtype))
2317 return false;
2318 def_arg1[i] = tem;
2319 }
2320 /* Both shifts have to use the same first operand. */
2321 if (TREE_CODE (def_arg1[0]) != SSA_NAME || def_arg1[0] != def_arg1[1])
2322 return false;
2323 if (!TYPE_UNSIGNED (TREE_TYPE (def_arg1[0])))
2324 return false;
2325
2326 /* CNT1 + CNT2 == B case above. */
e913b5cd 2327 if (tree_fits_uhwi_p (def_arg2[0])
2328 && tree_fits_uhwi_p (def_arg2[1])
aa59f000 2329 && tree_to_uhwi (def_arg2[0])
e913b5cd 2330 + tree_to_uhwi (def_arg2[1]) == TYPE_PRECISION (rtype))
3b8827a2 2331 rotcnt = def_arg2[0];
2332 else if (TREE_CODE (def_arg2[0]) != SSA_NAME
2333 || TREE_CODE (def_arg2[1]) != SSA_NAME)
2334 return false;
2335 else
2336 {
2337 tree cdef_arg1[2], cdef_arg2[2], def_arg2_alt[2];
2338 enum tree_code cdef_code[2];
2339 /* Look through conversion of the shift count argument.
2340 The C/C++ FE cast any shift count argument to integer_type_node.
2341 The only problem might be if the shift count type maximum value
2342 is equal or smaller than number of bits in rtype. */
2343 for (i = 0; i < 2; i++)
2344 {
2345 def_arg2_alt[i] = def_arg2[i];
2346 defcodefor_name (def_arg2[i], &cdef_code[i],
2347 &cdef_arg1[i], &cdef_arg2[i]);
2348 if (CONVERT_EXPR_CODE_P (cdef_code[i])
2349 && INTEGRAL_TYPE_P (TREE_TYPE (cdef_arg1[i]))
2350 && TYPE_PRECISION (TREE_TYPE (cdef_arg1[i]))
2351 > floor_log2 (TYPE_PRECISION (rtype))
2352 && TYPE_PRECISION (TREE_TYPE (cdef_arg1[i]))
2353 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (cdef_arg1[i]))))
2354 {
2355 def_arg2_alt[i] = cdef_arg1[i];
2356 defcodefor_name (def_arg2_alt[i], &cdef_code[i],
2357 &cdef_arg1[i], &cdef_arg2[i]);
2358 }
2359 }
2360 for (i = 0; i < 2; i++)
2361 /* Check for one shift count being Y and the other B - Y,
2362 with optional casts. */
2363 if (cdef_code[i] == MINUS_EXPR
e913b5cd 2364 && tree_fits_shwi_p (cdef_arg1[i])
2365 && tree_to_shwi (cdef_arg1[i]) == TYPE_PRECISION (rtype)
3b8827a2 2366 && TREE_CODE (cdef_arg2[i]) == SSA_NAME)
2367 {
2368 tree tem;
2369 enum tree_code code;
2370
2371 if (cdef_arg2[i] == def_arg2[1 - i]
2372 || cdef_arg2[i] == def_arg2_alt[1 - i])
2373 {
2374 rotcnt = cdef_arg2[i];
2375 break;
2376 }
2377 defcodefor_name (cdef_arg2[i], &code, &tem, NULL);
2378 if (CONVERT_EXPR_CODE_P (code)
2379 && INTEGRAL_TYPE_P (TREE_TYPE (tem))
2380 && TYPE_PRECISION (TREE_TYPE (tem))
2381 > floor_log2 (TYPE_PRECISION (rtype))
2382 && TYPE_PRECISION (TREE_TYPE (tem))
2383 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (tem)))
2384 && (tem == def_arg2[1 - i]
2385 || tem == def_arg2_alt[1 - i]))
2386 {
2387 rotcnt = tem;
2388 break;
2389 }
2390 }
2391 /* The above sequence isn't safe for Y being 0,
2392 because then one of the shifts triggers undefined behavior.
2393 This alternative is safe even for rotation count of 0.
2394 One shift count is Y and the other (-Y) & (B - 1). */
2395 else if (cdef_code[i] == BIT_AND_EXPR
e913b5cd 2396 && tree_fits_shwi_p (cdef_arg2[i])
2397 && tree_to_shwi (cdef_arg2[i])
3b8827a2 2398 == TYPE_PRECISION (rtype) - 1
043ce677 2399 && TREE_CODE (cdef_arg1[i]) == SSA_NAME
2400 && gimple_assign_rhs_code (stmt) == BIT_IOR_EXPR)
3b8827a2 2401 {
2402 tree tem;
2403 enum tree_code code;
2404
2405 defcodefor_name (cdef_arg1[i], &code, &tem, NULL);
2406 if (CONVERT_EXPR_CODE_P (code)
2407 && INTEGRAL_TYPE_P (TREE_TYPE (tem))
2408 && TYPE_PRECISION (TREE_TYPE (tem))
2409 > floor_log2 (TYPE_PRECISION (rtype))
2410 && TYPE_PRECISION (TREE_TYPE (tem))
2411 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (tem))))
2412 defcodefor_name (tem, &code, &tem, NULL);
2413
2414 if (code == NEGATE_EXPR)
2415 {
2416 if (tem == def_arg2[1 - i] || tem == def_arg2_alt[1 - i])
2417 {
2418 rotcnt = tem;
2419 break;
2420 }
2421 defcodefor_name (tem, &code, &tem, NULL);
2422 if (CONVERT_EXPR_CODE_P (code)
2423 && INTEGRAL_TYPE_P (TREE_TYPE (tem))
2424 && TYPE_PRECISION (TREE_TYPE (tem))
2425 > floor_log2 (TYPE_PRECISION (rtype))
2426 && TYPE_PRECISION (TREE_TYPE (tem))
2427 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (tem)))
2428 && (tem == def_arg2[1 - i]
2429 || tem == def_arg2_alt[1 - i]))
2430 {
2431 rotcnt = tem;
2432 break;
2433 }
2434 }
2435 }
2436 if (rotcnt == NULL_TREE)
2437 return false;
2438 swapped_p = i != 1;
2439 }
2440
2441 if (!useless_type_conversion_p (TREE_TYPE (def_arg2[0]),
2442 TREE_TYPE (rotcnt)))
2443 {
2444 g = gimple_build_assign_with_ops (NOP_EXPR,
2445 make_ssa_name (TREE_TYPE (def_arg2[0]),
2446 NULL),
2447 rotcnt, NULL_TREE);
2448 gsi_insert_before (gsi, g, GSI_SAME_STMT);
2449 rotcnt = gimple_assign_lhs (g);
2450 }
2451 lhs = gimple_assign_lhs (stmt);
2452 if (!useless_type_conversion_p (rtype, TREE_TYPE (def_arg1[0])))
2453 lhs = make_ssa_name (TREE_TYPE (def_arg1[0]), NULL);
2454 g = gimple_build_assign_with_ops (((def_code[0] == LSHIFT_EXPR) ^ swapped_p)
2455 ? LROTATE_EXPR : RROTATE_EXPR,
2456 lhs, def_arg1[0], rotcnt);
2457 if (!useless_type_conversion_p (rtype, TREE_TYPE (def_arg1[0])))
2458 {
2459 gsi_insert_before (gsi, g, GSI_SAME_STMT);
2460 g = gimple_build_assign_with_ops (NOP_EXPR, gimple_assign_lhs (stmt),
2461 lhs, NULL_TREE);
2462 }
2463 gsi_replace (gsi, g, false);
2464 return true;
2465}
2466
ca3c9092 2467/* Perform re-associations of the plus or minus statement STMT that are
b69d1cb6 2468 always permitted. Returns true if the CFG was changed. */
ca3c9092 2469
b69d1cb6 2470static bool
50aacf4c 2471associate_plusminus (gimple_stmt_iterator *gsi)
ca3c9092 2472{
50aacf4c 2473 gimple stmt = gsi_stmt (*gsi);
ca3c9092 2474 tree rhs1 = gimple_assign_rhs1 (stmt);
2475 tree rhs2 = gimple_assign_rhs2 (stmt);
2476 enum tree_code code = gimple_assign_rhs_code (stmt);
ca3c9092 2477 bool changed;
2478
2479 /* We can't reassociate at all for saturating types. */
2480 if (TYPE_SATURATING (TREE_TYPE (rhs1)))
b69d1cb6 2481 return false;
ca3c9092 2482
2483 /* First contract negates. */
2484 do
2485 {
2486 changed = false;
2487
2488 /* A +- (-B) -> A -+ B. */
2489 if (TREE_CODE (rhs2) == SSA_NAME)
2490 {
2491 gimple def_stmt = SSA_NAME_DEF_STMT (rhs2);
2492 if (is_gimple_assign (def_stmt)
32cdcc42 2493 && gimple_assign_rhs_code (def_stmt) == NEGATE_EXPR
2494 && can_propagate_from (def_stmt))
ca3c9092 2495 {
2496 code = (code == MINUS_EXPR) ? PLUS_EXPR : MINUS_EXPR;
2497 gimple_assign_set_rhs_code (stmt, code);
2498 rhs2 = gimple_assign_rhs1 (def_stmt);
2499 gimple_assign_set_rhs2 (stmt, rhs2);
2500 gimple_set_modified (stmt, true);
2501 changed = true;
2502 }
2503 }
2504
2505 /* (-A) + B -> B - A. */
2506 if (TREE_CODE (rhs1) == SSA_NAME
2507 && code == PLUS_EXPR)
2508 {
2509 gimple def_stmt = SSA_NAME_DEF_STMT (rhs1);
2510 if (is_gimple_assign (def_stmt)
32cdcc42 2511 && gimple_assign_rhs_code (def_stmt) == NEGATE_EXPR
2512 && can_propagate_from (def_stmt))
ca3c9092 2513 {
2514 code = MINUS_EXPR;
2515 gimple_assign_set_rhs_code (stmt, code);
2516 rhs1 = rhs2;
2517 gimple_assign_set_rhs1 (stmt, rhs1);
2518 rhs2 = gimple_assign_rhs1 (def_stmt);
2519 gimple_assign_set_rhs2 (stmt, rhs2);
2520 gimple_set_modified (stmt, true);
2521 changed = true;
2522 }
2523 }
2524 }
2525 while (changed);
2526
2527 /* We can't reassociate floating-point or fixed-point plus or minus
2528 because of saturation to +-Inf. */
2529 if (FLOAT_TYPE_P (TREE_TYPE (rhs1))
2530 || FIXED_POINT_TYPE_P (TREE_TYPE (rhs1)))
2531 goto out;
2532
2533 /* Second match patterns that allow contracting a plus-minus pair
2534 irrespective of overflow issues.
2535
2536 (A +- B) - A -> +- B
2537 (A +- B) -+ B -> A
2538 (CST +- A) +- CST -> CST +- A
2c83a45e 2539 (A +- CST) +- CST -> A +- CST
ca3c9092 2540 ~A + A -> -1
2541 ~A + 1 -> -A
2542 A - (A +- B) -> -+ B
2543 A +- (B +- A) -> +- B
2544 CST +- (CST +- A) -> CST +- A
2545 CST +- (A +- CST) -> CST +- A
2546 A + ~A -> -1
c223850c 2547 (T)(P + A) - (T)P -> (T)A
ca3c9092 2548
2549 via commutating the addition and contracting operations to zero
2550 by reassociation. */
2551
ca3c9092 2552 if (TREE_CODE (rhs1) == SSA_NAME)
2553 {
2554 gimple def_stmt = SSA_NAME_DEF_STMT (rhs1);
32cdcc42 2555 if (is_gimple_assign (def_stmt) && can_propagate_from (def_stmt))
ca3c9092 2556 {
2557 enum tree_code def_code = gimple_assign_rhs_code (def_stmt);
2558 if (def_code == PLUS_EXPR
2559 || def_code == MINUS_EXPR)
2560 {
2561 tree def_rhs1 = gimple_assign_rhs1 (def_stmt);
2562 tree def_rhs2 = gimple_assign_rhs2 (def_stmt);
2563 if (operand_equal_p (def_rhs1, rhs2, 0)
2564 && code == MINUS_EXPR)
2565 {
2566 /* (A +- B) - A -> +- B. */
2567 code = ((def_code == PLUS_EXPR)
2568 ? TREE_CODE (def_rhs2) : NEGATE_EXPR);
2569 rhs1 = def_rhs2;
2570 rhs2 = NULL_TREE;
50aacf4c 2571 gimple_assign_set_rhs_with_ops (gsi, code, rhs1, NULL_TREE);
2572 gcc_assert (gsi_stmt (*gsi) == stmt);
ca3c9092 2573 gimple_set_modified (stmt, true);
2574 }
2575 else if (operand_equal_p (def_rhs2, rhs2, 0)
2576 && code != def_code)
2577 {
2578 /* (A +- B) -+ B -> A. */
2579 code = TREE_CODE (def_rhs1);
2580 rhs1 = def_rhs1;
2581 rhs2 = NULL_TREE;
50aacf4c 2582 gimple_assign_set_rhs_with_ops (gsi, code, rhs1, NULL_TREE);
2583 gcc_assert (gsi_stmt (*gsi) == stmt);
ca3c9092 2584 gimple_set_modified (stmt, true);
2585 }
2c83a45e 2586 else if (CONSTANT_CLASS_P (rhs2)
2587 && CONSTANT_CLASS_P (def_rhs1))
ca3c9092 2588 {
2589 /* (CST +- A) +- CST -> CST +- A. */
2590 tree cst = fold_binary (code, TREE_TYPE (rhs1),
2591 def_rhs1, rhs2);
2592 if (cst && !TREE_OVERFLOW (cst))
2593 {
2594 code = def_code;
2595 gimple_assign_set_rhs_code (stmt, code);
2596 rhs1 = cst;
2597 gimple_assign_set_rhs1 (stmt, rhs1);
2598 rhs2 = def_rhs2;
2599 gimple_assign_set_rhs2 (stmt, rhs2);
2600 gimple_set_modified (stmt, true);
2601 }
2602 }
2c83a45e 2603 else if (CONSTANT_CLASS_P (rhs2)
2604 && CONSTANT_CLASS_P (def_rhs2))
ca3c9092 2605 {
2c83a45e 2606 /* (A +- CST) +- CST -> A +- CST. */
2607 enum tree_code mix = (code == def_code)
2608 ? PLUS_EXPR : MINUS_EXPR;
2609 tree cst = fold_binary (mix, TREE_TYPE (rhs1),
ca3c9092 2610 def_rhs2, rhs2);
2611 if (cst && !TREE_OVERFLOW (cst))
2612 {
2c83a45e 2613 code = def_code;
ca3c9092 2614 gimple_assign_set_rhs_code (stmt, code);
2615 rhs1 = def_rhs1;
2616 gimple_assign_set_rhs1 (stmt, rhs1);
2617 rhs2 = cst;
2618 gimple_assign_set_rhs2 (stmt, rhs2);
2619 gimple_set_modified (stmt, true);
2620 }
2621 }
2622 }
2c83a45e 2623 else if (def_code == BIT_NOT_EXPR && code == PLUS_EXPR)
ca3c9092 2624 {
2625 tree def_rhs1 = gimple_assign_rhs1 (def_stmt);
2c83a45e 2626 if (operand_equal_p (def_rhs1, rhs2, 0))
ca3c9092 2627 {
2628 /* ~A + A -> -1. */
2c83a45e 2629 rhs1 = build_all_ones_cst (TREE_TYPE (rhs2));
ca3c9092 2630 rhs2 = NULL_TREE;
2c83a45e 2631 code = TREE_CODE (rhs1);
50aacf4c 2632 gimple_assign_set_rhs_with_ops (gsi, code, rhs1, NULL_TREE);
2633 gcc_assert (gsi_stmt (*gsi) == stmt);
ca3c9092 2634 gimple_set_modified (stmt, true);
2635 }
2c83a45e 2636 else if ((TREE_CODE (TREE_TYPE (rhs2)) != COMPLEX_TYPE
2637 && integer_onep (rhs2))
2638 || (TREE_CODE (rhs2) == COMPLEX_CST
2639 && integer_onep (TREE_REALPART (rhs2))
2640 && integer_onep (TREE_IMAGPART (rhs2))))
ca3c9092 2641 {
2642 /* ~A + 1 -> -A. */
2643 code = NEGATE_EXPR;
2644 rhs1 = def_rhs1;
2645 rhs2 = NULL_TREE;
50aacf4c 2646 gimple_assign_set_rhs_with_ops (gsi, code, rhs1, NULL_TREE);
2647 gcc_assert (gsi_stmt (*gsi) == stmt);
ca3c9092 2648 gimple_set_modified (stmt, true);
2649 }
2650 }
f0365515 2651 else if (code == MINUS_EXPR
2652 && CONVERT_EXPR_CODE_P (def_code)
2653 && TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME
c223850c 2654 && TREE_CODE (rhs2) == SSA_NAME)
2655 {
f0365515 2656 /* (T)(P + A) - (T)P -> (T)A. */
c223850c 2657 gimple def_stmt2 = SSA_NAME_DEF_STMT (rhs2);
f0365515 2658 if (is_gimple_assign (def_stmt2)
c223850c 2659 && can_propagate_from (def_stmt2)
2660 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt2))
2661 && TREE_CODE (gimple_assign_rhs1 (def_stmt2)) == SSA_NAME)
2662 {
f0365515 2663 /* Now we have (T)X - (T)P. */
2664 tree p = gimple_assign_rhs1 (def_stmt2);
c223850c 2665 def_stmt2 = SSA_NAME_DEF_STMT (gimple_assign_rhs1 (def_stmt));
2666 if (is_gimple_assign (def_stmt2)
f0365515 2667 && can_propagate_from (def_stmt2)
2668 && (gimple_assign_rhs_code (def_stmt2) == POINTER_PLUS_EXPR
2669 || gimple_assign_rhs_code (def_stmt2) == PLUS_EXPR)
2670 && gimple_assign_rhs1 (def_stmt2) == p)
c223850c 2671 {
f0365515 2672 /* And finally (T)(P + A) - (T)P. */
2673 tree a = gimple_assign_rhs2 (def_stmt2);
8f79c655 2674 if (TYPE_PRECISION (TREE_TYPE (rhs1))
2675 <= TYPE_PRECISION (TREE_TYPE (a))
2676 /* For integer types, if A has a smaller type
2677 than T the result depends on the possible
2678 overflow in P + A.
2679 E.g. T=size_t, A=(unsigned)429497295, P>0.
2680 However, if an overflow in P + A would cause
2681 undefined behavior, we can assume that there
2682 is no overflow. */
2683 || (INTEGRAL_TYPE_P (TREE_TYPE (p))
2684 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (p)))
2685 /* For pointer types, if the conversion of A to the
2686 final type requires a sign- or zero-extension,
2687 then we have to punt - it is not defined which
2688 one is correct. */
2689 || (POINTER_TYPE_P (TREE_TYPE (p))
2690 && TREE_CODE (a) == INTEGER_CST
f0365515 2691 && tree_int_cst_sign_bit (a) == 0))
c223850c 2692 {
8f79c655 2693 if (issue_strict_overflow_warning
2694 (WARN_STRICT_OVERFLOW_MISC)
2695 && TYPE_PRECISION (TREE_TYPE (rhs1))
2696 > TYPE_PRECISION (TREE_TYPE (a))
2697 && INTEGRAL_TYPE_P (TREE_TYPE (p)))
2698 warning_at (gimple_location (stmt),
2699 OPT_Wstrict_overflow,
2700 "assuming signed overflow does not "
2701 "occur when assuming that "
2702 "(T)(P + A) - (T)P is always (T)A");
c223850c 2703 if (useless_type_conversion_p (TREE_TYPE (rhs1),
f0365515 2704 TREE_TYPE (a)))
2705 code = TREE_CODE (a);
c223850c 2706 else
f0365515 2707 code = NOP_EXPR;
2708 rhs1 = a;
c223850c 2709 rhs2 = NULL_TREE;
2710 gimple_assign_set_rhs_with_ops (gsi, code, rhs1,
f0365515 2711 rhs2);
c223850c 2712 gcc_assert (gsi_stmt (*gsi) == stmt);
2713 gimple_set_modified (stmt, true);
2714 }
2715 }
2716 }
2717 }
ca3c9092 2718 }
2719 }
2720
2721 if (rhs2 && TREE_CODE (rhs2) == SSA_NAME)
2722 {
2723 gimple def_stmt = SSA_NAME_DEF_STMT (rhs2);
32cdcc42 2724 if (is_gimple_assign (def_stmt) && can_propagate_from (def_stmt))
ca3c9092 2725 {
2726 enum tree_code def_code = gimple_assign_rhs_code (def_stmt);
2727 if (def_code == PLUS_EXPR
2728 || def_code == MINUS_EXPR)
2729 {
2730 tree def_rhs1 = gimple_assign_rhs1 (def_stmt);
2731 tree def_rhs2 = gimple_assign_rhs2 (def_stmt);
2732 if (operand_equal_p (def_rhs1, rhs1, 0)
2733 && code == MINUS_EXPR)
2734 {
2735 /* A - (A +- B) -> -+ B. */
2736 code = ((def_code == PLUS_EXPR)
2737 ? NEGATE_EXPR : TREE_CODE (def_rhs2));
2738 rhs1 = def_rhs2;
2739 rhs2 = NULL_TREE;
50aacf4c 2740 gimple_assign_set_rhs_with_ops (gsi, code, rhs1, NULL_TREE);
2741 gcc_assert (gsi_stmt (*gsi) == stmt);
ca3c9092 2742 gimple_set_modified (stmt, true);
2743 }
2744 else if (operand_equal_p (def_rhs2, rhs1, 0)
2745 && code != def_code)
2746 {
2747 /* A +- (B +- A) -> +- B. */
2748 code = ((code == PLUS_EXPR)
2749 ? TREE_CODE (def_rhs1) : NEGATE_EXPR);
2750 rhs1 = def_rhs1;
2751 rhs2 = NULL_TREE;
50aacf4c 2752 gimple_assign_set_rhs_with_ops (gsi, code, rhs1, NULL_TREE);
2753 gcc_assert (gsi_stmt (*gsi) == stmt);
ca3c9092 2754 gimple_set_modified (stmt, true);
2755 }
2c83a45e 2756 else if (CONSTANT_CLASS_P (rhs1)
2757 && CONSTANT_CLASS_P (def_rhs1))
ca3c9092 2758 {
2759 /* CST +- (CST +- A) -> CST +- A. */
2760 tree cst = fold_binary (code, TREE_TYPE (rhs2),
2761 rhs1, def_rhs1);
2762 if (cst && !TREE_OVERFLOW (cst))
2763 {
2764 code = (code == def_code ? PLUS_EXPR : MINUS_EXPR);
2765 gimple_assign_set_rhs_code (stmt, code);
2766 rhs1 = cst;
2767 gimple_assign_set_rhs1 (stmt, rhs1);
2768 rhs2 = def_rhs2;
2769 gimple_assign_set_rhs2 (stmt, rhs2);
2770 gimple_set_modified (stmt, true);
2771 }
2772 }
2c83a45e 2773 else if (CONSTANT_CLASS_P (rhs1)
2774 && CONSTANT_CLASS_P (def_rhs2))
ca3c9092 2775 {
2776 /* CST +- (A +- CST) -> CST +- A. */
2777 tree cst = fold_binary (def_code == code
2778 ? PLUS_EXPR : MINUS_EXPR,
2779 TREE_TYPE (rhs2),
2780 rhs1, def_rhs2);
2781 if (cst && !TREE_OVERFLOW (cst))
2782 {
2783 rhs1 = cst;
2784 gimple_assign_set_rhs1 (stmt, rhs1);
2785 rhs2 = def_rhs1;
2786 gimple_assign_set_rhs2 (stmt, rhs2);
2787 gimple_set_modified (stmt, true);
2788 }
2789 }
2790 }
2c83a45e 2791 else if (def_code == BIT_NOT_EXPR)
ca3c9092 2792 {
2793 tree def_rhs1 = gimple_assign_rhs1 (def_stmt);
2794 if (code == PLUS_EXPR
2795 && operand_equal_p (def_rhs1, rhs1, 0))
2796 {
2797 /* A + ~A -> -1. */
2c83a45e 2798 rhs1 = build_all_ones_cst (TREE_TYPE (rhs1));
ca3c9092 2799 rhs2 = NULL_TREE;
2c83a45e 2800 code = TREE_CODE (rhs1);
50aacf4c 2801 gimple_assign_set_rhs_with_ops (gsi, code, rhs1, NULL_TREE);
2802 gcc_assert (gsi_stmt (*gsi) == stmt);
ca3c9092 2803 gimple_set_modified (stmt, true);
2804 }
2805 }
2806 }
2807 }
2808
2809out:
2810 if (gimple_modified_p (stmt))
2811 {
50aacf4c 2812 fold_stmt_inplace (gsi);
ca3c9092 2813 update_stmt (stmt);
5a423a75 2814 return true;
ca3c9092 2815 }
b69d1cb6 2816
2817 return false;
ca3c9092 2818}
2819
c9c17332 2820/* Associate operands of a POINTER_PLUS_EXPR assignmen at *GSI. Returns
2821 true if anything changed, false otherwise. */
2822
2823static bool
b904104c 2824associate_pointerplus_align (gimple_stmt_iterator *gsi)
c9c17332 2825{
2826 gimple stmt = gsi_stmt (*gsi);
2827 gimple def_stmt;
2828 tree ptr, rhs, algn;
2829
2830 /* Pattern match
2831 tem = (sizetype) ptr;
2832 tem = tem & algn;
2833 tem = -tem;
2834 ... = ptr p+ tem;
2835 and produce the simpler and easier to analyze with respect to alignment
2836 ... = ptr & ~algn; */
2837 ptr = gimple_assign_rhs1 (stmt);
2838 rhs = gimple_assign_rhs2 (stmt);
2839 if (TREE_CODE (rhs) != SSA_NAME)
2840 return false;
2841 def_stmt = SSA_NAME_DEF_STMT (rhs);
2842 if (!is_gimple_assign (def_stmt)
2843 || gimple_assign_rhs_code (def_stmt) != NEGATE_EXPR)
2844 return false;
2845 rhs = gimple_assign_rhs1 (def_stmt);
2846 if (TREE_CODE (rhs) != SSA_NAME)
2847 return false;
2848 def_stmt = SSA_NAME_DEF_STMT (rhs);
2849 if (!is_gimple_assign (def_stmt)
2850 || gimple_assign_rhs_code (def_stmt) != BIT_AND_EXPR)
2851 return false;
2852 rhs = gimple_assign_rhs1 (def_stmt);
2853 algn = gimple_assign_rhs2 (def_stmt);
2854 if (TREE_CODE (rhs) != SSA_NAME
2855 || TREE_CODE (algn) != INTEGER_CST)
2856 return false;
2857 def_stmt = SSA_NAME_DEF_STMT (rhs);
2858 if (!is_gimple_assign (def_stmt)
2859 || !CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt)))
2860 return false;
2861 if (gimple_assign_rhs1 (def_stmt) != ptr)
2862 return false;
2863
6da74b21 2864 algn = wide_int_to_tree (TREE_TYPE (ptr), wi::bit_not (algn));
c9c17332 2865 gimple_assign_set_rhs_with_ops (gsi, BIT_AND_EXPR, ptr, algn);
2866 fold_stmt_inplace (gsi);
2867 update_stmt (stmt);
2868
2869 return true;
2870}
2871
b904104c 2872/* Associate operands of a POINTER_PLUS_EXPR assignmen at *GSI. Returns
2873 true if anything changed, false otherwise. */
2874
2875static bool
2876associate_pointerplus_diff (gimple_stmt_iterator *gsi)
2877{
2878 gimple stmt = gsi_stmt (*gsi);
2879 gimple def_stmt;
2880 tree ptr1, rhs;
2881
2882 /* Pattern match
2883 tem1 = (long) ptr1;
2884 tem2 = (long) ptr2;
2885 tem3 = tem2 - tem1;
2886 tem4 = (unsigned long) tem3;
2887 tem5 = ptr1 + tem4;
2888 and produce
2889 tem5 = ptr2; */
2890 ptr1 = gimple_assign_rhs1 (stmt);
2891 rhs = gimple_assign_rhs2 (stmt);
2892 if (TREE_CODE (rhs) != SSA_NAME)
2893 return false;
2894 gimple minus = SSA_NAME_DEF_STMT (rhs);
2895 /* Conditionally look through a sign-changing conversion. */
2896 if (is_gimple_assign (minus)
2897 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (minus))
2898 && (TYPE_PRECISION (TREE_TYPE (gimple_assign_rhs1 (minus)))
2899 == TYPE_PRECISION (TREE_TYPE (rhs)))
2900 && TREE_CODE (gimple_assign_rhs1 (minus)) == SSA_NAME)
2901 minus = SSA_NAME_DEF_STMT (gimple_assign_rhs1 (minus));
2902 if (!is_gimple_assign (minus))
2903 return false;
2904 if (gimple_assign_rhs_code (minus) != MINUS_EXPR)
2905 return false;
2906 rhs = gimple_assign_rhs2 (minus);
2907 if (TREE_CODE (rhs) != SSA_NAME)
2908 return false;
2909 def_stmt = SSA_NAME_DEF_STMT (rhs);
2910 if (!is_gimple_assign (def_stmt)
2911 || ! CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt))
2912 || gimple_assign_rhs1 (def_stmt) != ptr1)
2913 return false;
2914 rhs = gimple_assign_rhs1 (minus);
2915 if (TREE_CODE (rhs) != SSA_NAME)
2916 return false;
2917 def_stmt = SSA_NAME_DEF_STMT (rhs);
2918 if (!is_gimple_assign (def_stmt)
2919 || ! CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt)))
2920 return false;
2921 rhs = gimple_assign_rhs1 (def_stmt);
2922 if (! useless_type_conversion_p (TREE_TYPE (ptr1), TREE_TYPE (rhs)))
2923 return false;
2924
2925 gimple_assign_set_rhs_with_ops (gsi, TREE_CODE (rhs), rhs, NULL_TREE);
2926 update_stmt (stmt);
2927
2928 return true;
2929}
2930
2931/* Associate operands of a POINTER_PLUS_EXPR assignmen at *GSI. Returns
2932 true if anything changed, false otherwise. */
2933
2934static bool
2935associate_pointerplus (gimple_stmt_iterator *gsi)
2936{
2937 gimple stmt = gsi_stmt (*gsi);
2938 gimple def_stmt;
2939 tree ptr, off1, off2;
2940
2941 if (associate_pointerplus_align (gsi)
2942 || associate_pointerplus_diff (gsi))
2943 return true;
2944
2945 /* Associate (p +p off1) +p off2 as (p +p (off1 + off2)). */
2946 ptr = gimple_assign_rhs1 (stmt);
2947 off1 = gimple_assign_rhs2 (stmt);
135b982d 2948 if (TREE_CODE (ptr) != SSA_NAME
2949 || !has_single_use (ptr))
b904104c 2950 return false;
2951 def_stmt = SSA_NAME_DEF_STMT (ptr);
2952 if (!is_gimple_assign (def_stmt)
135b982d 2953 || gimple_assign_rhs_code (def_stmt) != POINTER_PLUS_EXPR
2954 || !can_propagate_from (def_stmt))
b904104c 2955 return false;
2956 ptr = gimple_assign_rhs1 (def_stmt);
2957 off2 = gimple_assign_rhs2 (def_stmt);
2958 if (!types_compatible_p (TREE_TYPE (off1), TREE_TYPE (off2)))
2959 return false;
2960
2961 tree off = make_ssa_name (TREE_TYPE (off1), NULL);
2962 gimple ostmt = gimple_build_assign_with_ops (PLUS_EXPR, off, off1, off2);
2963 gsi_insert_before (gsi, ostmt, GSI_SAME_STMT);
2964
2965 gimple_assign_set_rhs_with_ops (gsi, POINTER_PLUS_EXPR, ptr, off);
2966 update_stmt (stmt);
2967
2968 return true;
2969}
2970
6afd0544 2971/* Combine two conversions in a row for the second conversion at *GSI.
89c8f35a 2972 Returns 1 if there were any changes made, 2 if cfg-cleanup needs to
2973 run. Else it returns 0. */
6afd0544 2974
89c8f35a 2975static int
6afd0544 2976combine_conversions (gimple_stmt_iterator *gsi)
2977{
2978 gimple stmt = gsi_stmt (*gsi);
2979 gimple def_stmt;
2980 tree op0, lhs;
2981 enum tree_code code = gimple_assign_rhs_code (stmt);
487282d5 2982 enum tree_code code2;
6afd0544 2983
2984 gcc_checking_assert (CONVERT_EXPR_CODE_P (code)
2985 || code == FLOAT_EXPR
2986 || code == FIX_TRUNC_EXPR);
2987
2988 lhs = gimple_assign_lhs (stmt);
2989 op0 = gimple_assign_rhs1 (stmt);
2990 if (useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (op0)))
2991 {
2992 gimple_assign_set_rhs_code (stmt, TREE_CODE (op0));
89c8f35a 2993 return 1;
6afd0544 2994 }
2995
2996 if (TREE_CODE (op0) != SSA_NAME)
89c8f35a 2997 return 0;
6afd0544 2998
2999 def_stmt = SSA_NAME_DEF_STMT (op0);
3000 if (!is_gimple_assign (def_stmt))
89c8f35a 3001 return 0;
6afd0544 3002
487282d5 3003 code2 = gimple_assign_rhs_code (def_stmt);
3004
3005 if (CONVERT_EXPR_CODE_P (code2) || code2 == FLOAT_EXPR)
6afd0544 3006 {
3007 tree defop0 = gimple_assign_rhs1 (def_stmt);
3008 tree type = TREE_TYPE (lhs);
3009 tree inside_type = TREE_TYPE (defop0);
3010 tree inter_type = TREE_TYPE (op0);
3011 int inside_int = INTEGRAL_TYPE_P (inside_type);
3012 int inside_ptr = POINTER_TYPE_P (inside_type);
3013 int inside_float = FLOAT_TYPE_P (inside_type);
3014 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
3015 unsigned int inside_prec = TYPE_PRECISION (inside_type);
3016 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
3017 int inter_int = INTEGRAL_TYPE_P (inter_type);
3018 int inter_ptr = POINTER_TYPE_P (inter_type);
3019 int inter_float = FLOAT_TYPE_P (inter_type);
3020 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
3021 unsigned int inter_prec = TYPE_PRECISION (inter_type);
3022 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
3023 int final_int = INTEGRAL_TYPE_P (type);
3024 int final_ptr = POINTER_TYPE_P (type);
3025 int final_float = FLOAT_TYPE_P (type);
3026 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
3027 unsigned int final_prec = TYPE_PRECISION (type);
3028 int final_unsignedp = TYPE_UNSIGNED (type);
3029
3aeff048 3030 /* Don't propagate ssa names that occur in abnormal phis. */
3031 if (TREE_CODE (defop0) == SSA_NAME
3032 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (defop0))
3033 return 0;
3034
6afd0544 3035 /* In addition to the cases of two conversions in a row
3036 handled below, if we are converting something to its own
3037 type via an object of identical or wider precision, neither
3038 conversion is needed. */
3039 if (useless_type_conversion_p (type, inside_type)
3040 && (((inter_int || inter_ptr) && final_int)
3041 || (inter_float && final_float))
3042 && inter_prec >= final_prec)
3043 {
3044 gimple_assign_set_rhs1 (stmt, unshare_expr (defop0));
3045 gimple_assign_set_rhs_code (stmt, TREE_CODE (defop0));
3046 update_stmt (stmt);
89c8f35a 3047 return remove_prop_source_from_use (op0) ? 2 : 1;
6afd0544 3048 }
3049
3050 /* Likewise, if the intermediate and initial types are either both
3051 float or both integer, we don't need the middle conversion if the
3052 former is wider than the latter and doesn't change the signedness
3053 (for integers). Avoid this if the final type is a pointer since
3054 then we sometimes need the middle conversion. Likewise if the
3055 final type has a precision not equal to the size of its mode. */
3056 if (((inter_int && inside_int)
3057 || (inter_float && inside_float)
3058 || (inter_vec && inside_vec))
3059 && inter_prec >= inside_prec
3060 && (inter_float || inter_vec
3061 || inter_unsignedp == inside_unsignedp)
51dbf409 3062 && ! (final_prec != GET_MODE_PRECISION (TYPE_MODE (type))
6afd0544 3063 && TYPE_MODE (type) == TYPE_MODE (inter_type))
3064 && ! final_ptr
3065 && (! final_vec || inter_prec == inside_prec))
3066 {
3067 gimple_assign_set_rhs1 (stmt, defop0);
3068 update_stmt (stmt);
89c8f35a 3069 return remove_prop_source_from_use (op0) ? 2 : 1;
6afd0544 3070 }
3071
3072 /* If we have a sign-extension of a zero-extended value, we can
a6476f88 3073 replace that by a single zero-extension. Likewise if the
3074 final conversion does not change precision we can drop the
3075 intermediate conversion. */
6afd0544 3076 if (inside_int && inter_int && final_int
a6476f88 3077 && ((inside_prec < inter_prec && inter_prec < final_prec
3078 && inside_unsignedp && !inter_unsignedp)
3079 || final_prec == inter_prec))
6afd0544 3080 {
3081 gimple_assign_set_rhs1 (stmt, defop0);
3082 update_stmt (stmt);
89c8f35a 3083 return remove_prop_source_from_use (op0) ? 2 : 1;
6afd0544 3084 }
3085
3086 /* Two conversions in a row are not needed unless:
3087 - some conversion is floating-point (overstrict for now), or
3088 - some conversion is a vector (overstrict for now), or
3089 - the intermediate type is narrower than both initial and
3090 final, or
3091 - the intermediate type and innermost type differ in signedness,
3092 and the outermost type is wider than the intermediate, or
3093 - the initial type is a pointer type and the precisions of the
3094 intermediate and final types differ, or
3095 - the final type is a pointer type and the precisions of the
3096 initial and intermediate types differ. */
3097 if (! inside_float && ! inter_float && ! final_float
3098 && ! inside_vec && ! inter_vec && ! final_vec
3099 && (inter_prec >= inside_prec || inter_prec >= final_prec)
3100 && ! (inside_int && inter_int
3101 && inter_unsignedp != inside_unsignedp
3102 && inter_prec < final_prec)
3103 && ((inter_unsignedp && inter_prec > inside_prec)
3104 == (final_unsignedp && final_prec > inter_prec))
3105 && ! (inside_ptr && inter_prec != final_prec)
3106 && ! (final_ptr && inside_prec != inter_prec)
51dbf409 3107 && ! (final_prec != GET_MODE_PRECISION (TYPE_MODE (type))
6afd0544 3108 && TYPE_MODE (type) == TYPE_MODE (inter_type)))
3109 {
3110 gimple_assign_set_rhs1 (stmt, defop0);
3111 update_stmt (stmt);
89c8f35a 3112 return remove_prop_source_from_use (op0) ? 2 : 1;
6afd0544 3113 }
3114
3115 /* A truncation to an unsigned type should be canonicalized as
3116 bitwise and of a mask. */
3117 if (final_int && inter_int && inside_int
3118 && final_prec == inside_prec
3119 && final_prec > inter_prec
3120 && inter_unsignedp)
3121 {
3122 tree tem;
3123 tem = fold_build2 (BIT_AND_EXPR, inside_type,
3124 defop0,
e913b5cd 3125 wide_int_to_tree
796b6678 3126 (inside_type,
3127 wi::mask (inter_prec, false,
3128 TYPE_PRECISION (inside_type))));
6afd0544 3129 if (!useless_type_conversion_p (type, inside_type))
3130 {
3131 tem = force_gimple_operand_gsi (gsi, tem, true, NULL_TREE, true,
3132 GSI_SAME_STMT);
3133 gimple_assign_set_rhs1 (stmt, tem);
3134 }
3135 else
3136 gimple_assign_set_rhs_from_tree (gsi, tem);
3137 update_stmt (gsi_stmt (*gsi));
89c8f35a 3138 return 1;
6afd0544 3139 }
487282d5 3140
3141 /* If we are converting an integer to a floating-point that can
3142 represent it exactly and back to an integer, we can skip the
3143 floating-point conversion. */
3144 if (inside_int && inter_float && final_int &&
3145 (unsigned) significand_size (TYPE_MODE (inter_type))
3146 >= inside_prec - !inside_unsignedp)
3147 {
3148 if (useless_type_conversion_p (type, inside_type))
3149 {
3150 gimple_assign_set_rhs1 (stmt, unshare_expr (defop0));
3151 gimple_assign_set_rhs_code (stmt, TREE_CODE (defop0));
3152 update_stmt (stmt);
3153 return remove_prop_source_from_use (op0) ? 2 : 1;
3154 }
3155 else
3156 {
3157 gimple_assign_set_rhs1 (stmt, defop0);
3158 gimple_assign_set_rhs_code (stmt, CONVERT_EXPR);
3159 update_stmt (stmt);
3160 return remove_prop_source_from_use (op0) ? 2 : 1;
3161 }
3162 }
6afd0544 3163 }
3164
89c8f35a 3165 return 0;
6afd0544 3166}
3167
173c91d9 3168/* Combine an element access with a shuffle. Returns true if there were
3169 any changes made, else it returns false. */
3170
3171static bool
3172simplify_bitfield_ref (gimple_stmt_iterator *gsi)
3173{
3174 gimple stmt = gsi_stmt (*gsi);
3175 gimple def_stmt;
3176 tree op, op0, op1, op2;
3177 tree elem_type;
3178 unsigned idx, n, size;
3179 enum tree_code code;
3180
3181 op = gimple_assign_rhs1 (stmt);
3182 gcc_checking_assert (TREE_CODE (op) == BIT_FIELD_REF);
3183
3184 op0 = TREE_OPERAND (op, 0);
3185 if (TREE_CODE (op0) != SSA_NAME
3186 || TREE_CODE (TREE_TYPE (op0)) != VECTOR_TYPE)
3187 return false;
3188
58bf5219 3189 def_stmt = get_prop_source_stmt (op0, false, NULL);
3190 if (!def_stmt || !can_propagate_from (def_stmt))
3191 return false;
3192
3193 op1 = TREE_OPERAND (op, 1);
3194 op2 = TREE_OPERAND (op, 2);
3195 code = gimple_assign_rhs_code (def_stmt);
3196
3197 if (code == CONSTRUCTOR)
3198 {
3199 tree tem = fold_ternary (BIT_FIELD_REF, TREE_TYPE (op),
3200 gimple_assign_rhs1 (def_stmt), op1, op2);
3201 if (!tem || !valid_gimple_rhs_p (tem))
3202 return false;
3203 gimple_assign_set_rhs_from_tree (gsi, tem);
3204 update_stmt (gsi_stmt (*gsi));
3205 return true;
3206 }
3207
173c91d9 3208 elem_type = TREE_TYPE (TREE_TYPE (op0));
3209 if (TREE_TYPE (op) != elem_type)
3210 return false;
3211
f9ae6f95 3212 size = TREE_INT_CST_LOW (TYPE_SIZE (elem_type));
3213 n = TREE_INT_CST_LOW (op1) / size;
173c91d9 3214 if (n != 1)
3215 return false;
f9ae6f95 3216 idx = TREE_INT_CST_LOW (op2) / size;
173c91d9 3217
173c91d9 3218 if (code == VEC_PERM_EXPR)
3219 {
3220 tree p, m, index, tem;
3221 unsigned nelts;
3222 m = gimple_assign_rhs3 (def_stmt);
3223 if (TREE_CODE (m) != VECTOR_CST)
3224 return false;
3225 nelts = VECTOR_CST_NELTS (m);
f9ae6f95 3226 idx = TREE_INT_CST_LOW (VECTOR_CST_ELT (m, idx));
173c91d9 3227 idx %= 2 * nelts;
3228 if (idx < nelts)
3229 {
3230 p = gimple_assign_rhs1 (def_stmt);
3231 }
3232 else
3233 {
3234 p = gimple_assign_rhs2 (def_stmt);
3235 idx -= nelts;
3236 }
3237 index = build_int_cst (TREE_TYPE (TREE_TYPE (m)), idx * size);
3238 tem = build3 (BIT_FIELD_REF, TREE_TYPE (op),
ab54bbbd 3239 unshare_expr (p), op1, index);
173c91d9 3240 gimple_assign_set_rhs1 (stmt, tem);
3241 fold_stmt (gsi);
3242 update_stmt (gsi_stmt (*gsi));
3243 return true;
3244 }
3245
3246 return false;
3247}
3248
496ec2ad 3249/* Determine whether applying the 2 permutations (mask1 then mask2)
3250 gives back one of the input. */
3251
3252static int
3253is_combined_permutation_identity (tree mask1, tree mask2)
3254{
3255 tree mask;
3256 unsigned int nelts, i, j;
3257 bool maybe_identity1 = true;
3258 bool maybe_identity2 = true;
3259
3260 gcc_checking_assert (TREE_CODE (mask1) == VECTOR_CST
3261 && TREE_CODE (mask2) == VECTOR_CST);
3262 mask = fold_ternary (VEC_PERM_EXPR, TREE_TYPE (mask1), mask1, mask1, mask2);
3263 gcc_assert (TREE_CODE (mask) == VECTOR_CST);
3264
3265 nelts = VECTOR_CST_NELTS (mask);
3266 for (i = 0; i < nelts; i++)
3267 {
3268 tree val = VECTOR_CST_ELT (mask, i);
3269 gcc_assert (TREE_CODE (val) == INTEGER_CST);
f9ae6f95 3270 j = TREE_INT_CST_LOW (val) & (2 * nelts - 1);
496ec2ad 3271 if (j == i)
3272 maybe_identity2 = false;
3273 else if (j == i + nelts)
3274 maybe_identity1 = false;
3275 else
3276 return 0;
3277 }
3278 return maybe_identity1 ? 1 : maybe_identity2 ? 2 : 0;
3279}
3280
2b9112d6 3281/* Combine a shuffle with its arguments. Returns 1 if there were any
3282 changes made, 2 if cfg-cleanup needs to run. Else it returns 0. */
496ec2ad 3283
3284static int
3285simplify_permutation (gimple_stmt_iterator *gsi)
3286{
3287 gimple stmt = gsi_stmt (*gsi);
3288 gimple def_stmt;
2b9112d6 3289 tree op0, op1, op2, op3, arg0, arg1;
3290 enum tree_code code;
ab54bbbd 3291 bool single_use_op0 = false;
496ec2ad 3292
2b9112d6 3293 gcc_checking_assert (gimple_assign_rhs_code (stmt) == VEC_PERM_EXPR);
496ec2ad 3294
3295 op0 = gimple_assign_rhs1 (stmt);
3296 op1 = gimple_assign_rhs2 (stmt);
3297 op2 = gimple_assign_rhs3 (stmt);
3298
496ec2ad 3299 if (TREE_CODE (op2) != VECTOR_CST)
3300 return 0;
3301
2b9112d6 3302 if (TREE_CODE (op0) == VECTOR_CST)
3303 {
3304 code = VECTOR_CST;
3305 arg0 = op0;
3306 }
3307 else if (TREE_CODE (op0) == SSA_NAME)
3308 {
ab54bbbd 3309 def_stmt = get_prop_source_stmt (op0, false, &single_use_op0);
3310 if (!def_stmt || !can_propagate_from (def_stmt))
2b9112d6 3311 return 0;
496ec2ad 3312
2b9112d6 3313 code = gimple_assign_rhs_code (def_stmt);
3314 arg0 = gimple_assign_rhs1 (def_stmt);
3315 }
3316 else
496ec2ad 3317 return 0;
3318
496ec2ad 3319 /* Two consecutive shuffles. */
2b9112d6 3320 if (code == VEC_PERM_EXPR)
496ec2ad 3321 {
3322 tree orig;
3323 int ident;
2b9112d6 3324
3325 if (op0 != op1)
3326 return 0;
496ec2ad 3327 op3 = gimple_assign_rhs3 (def_stmt);
3328 if (TREE_CODE (op3) != VECTOR_CST)
3329 return 0;
3330 ident = is_combined_permutation_identity (op3, op2);
3331 if (!ident)
3332 return 0;
3333 orig = (ident == 1) ? gimple_assign_rhs1 (def_stmt)
3334 : gimple_assign_rhs2 (def_stmt);
3335 gimple_assign_set_rhs1 (stmt, unshare_expr (orig));
3336 gimple_assign_set_rhs_code (stmt, TREE_CODE (orig));
3337 gimple_set_num_ops (stmt, 2);
3338 update_stmt (stmt);
3339 return remove_prop_source_from_use (op0) ? 2 : 1;
3340 }
3341
2b9112d6 3342 /* Shuffle of a constructor. */
3343 else if (code == CONSTRUCTOR || code == VECTOR_CST)
3344 {
3345 tree opt;
3346 bool ret = false;
3347 if (op0 != op1)
3348 {
ab54bbbd 3349 if (TREE_CODE (op0) == SSA_NAME && !single_use_op0)
2b9112d6 3350 return 0;
3351
3352 if (TREE_CODE (op1) == VECTOR_CST)
3353 arg1 = op1;
3354 else if (TREE_CODE (op1) == SSA_NAME)
3355 {
3356 enum tree_code code2;
3357
ab54bbbd 3358 gimple def_stmt2 = get_prop_source_stmt (op1, true, NULL);
3359 if (!def_stmt2 || !can_propagate_from (def_stmt2))
2b9112d6 3360 return 0;
3361
3362 code2 = gimple_assign_rhs_code (def_stmt2);
3363 if (code2 != CONSTRUCTOR && code2 != VECTOR_CST)
3364 return 0;
3365 arg1 = gimple_assign_rhs1 (def_stmt2);
3366 }
3367 else
3368 return 0;
3369 }
3370 else
3371 {
3372 /* Already used twice in this statement. */
3373 if (TREE_CODE (op0) == SSA_NAME && num_imm_uses (op0) > 2)
3374 return 0;
3375 arg1 = arg0;
3376 }
9af5ce0c 3377 opt = fold_ternary (VEC_PERM_EXPR, TREE_TYPE (op0), arg0, arg1, op2);
2b9112d6 3378 if (!opt
9af5ce0c 3379 || (TREE_CODE (opt) != CONSTRUCTOR && TREE_CODE (opt) != VECTOR_CST))
2b9112d6 3380 return 0;
3381 gimple_assign_set_rhs_from_tree (gsi, opt);
3382 update_stmt (gsi_stmt (*gsi));
3383 if (TREE_CODE (op0) == SSA_NAME)
3384 ret = remove_prop_source_from_use (op0);
3385 if (op0 != op1 && TREE_CODE (op1) == SSA_NAME)
3386 ret |= remove_prop_source_from_use (op1);
3387 return ret ? 2 : 1;
3388 }
3389
3390 return 0;
496ec2ad 3391}
3392
6a9e13a2 3393/* Recognize a VEC_PERM_EXPR. Returns true if there were any changes. */
3394
3395static bool
3396simplify_vector_constructor (gimple_stmt_iterator *gsi)
3397{
3398 gimple stmt = gsi_stmt (*gsi);
3399 gimple def_stmt;
3400 tree op, op2, orig, type, elem_type;
3401 unsigned elem_size, nelts, i;
3402 enum tree_code code;
3403 constructor_elt *elt;
3404 unsigned char *sel;
3405 bool maybe_ident;
3406
3407 gcc_checking_assert (gimple_assign_rhs_code (stmt) == CONSTRUCTOR);
3408
3409 op = gimple_assign_rhs1 (stmt);
3410 type = TREE_TYPE (op);
3411 gcc_checking_assert (TREE_CODE (type) == VECTOR_TYPE);
3412
3413 nelts = TYPE_VECTOR_SUBPARTS (type);
3414 elem_type = TREE_TYPE (type);
f9ae6f95 3415 elem_size = TREE_INT_CST_LOW (TYPE_SIZE (elem_type));
6a9e13a2 3416
3417 sel = XALLOCAVEC (unsigned char, nelts);
3418 orig = NULL;
3419 maybe_ident = true;
f1f41a6c 3420 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (op), i, elt)
6a9e13a2 3421 {
3422 tree ref, op1;
3423
3424 if (i >= nelts)
3425 return false;
3426
3427 if (TREE_CODE (elt->value) != SSA_NAME)
3428 return false;
ab54bbbd 3429 def_stmt = get_prop_source_stmt (elt->value, false, NULL);
3430 if (!def_stmt)
6a9e13a2 3431 return false;
3432 code = gimple_assign_rhs_code (def_stmt);
3433 if (code != BIT_FIELD_REF)
3434 return false;
3435 op1 = gimple_assign_rhs1 (def_stmt);
3436 ref = TREE_OPERAND (op1, 0);
3437 if (orig)
3438 {
3439 if (ref != orig)
3440 return false;
3441 }
3442 else
3443 {
3444 if (TREE_CODE (ref) != SSA_NAME)
3445 return false;
8a13ba5e 3446 if (!useless_type_conversion_p (type, TREE_TYPE (ref)))
3447 return false;
6a9e13a2 3448 orig = ref;
3449 }
f9ae6f95 3450 if (TREE_INT_CST_LOW (TREE_OPERAND (op1, 1)) != elem_size)
6a9e13a2 3451 return false;
f9ae6f95 3452 sel[i] = TREE_INT_CST_LOW (TREE_OPERAND (op1, 2)) / elem_size;
6a9e13a2 3453 if (sel[i] != i) maybe_ident = false;
3454 }
3455 if (i < nelts)
3456 return false;
3457
3458 if (maybe_ident)
d1938a4b 3459 gimple_assign_set_rhs_from_tree (gsi, orig);
6a9e13a2 3460 else
3461 {
d1938a4b 3462 tree mask_type, *mask_elts;
3463
3464 if (!can_vec_perm_p (TYPE_MODE (type), false, sel))
3465 return false;
3466 mask_type
3467 = build_vector_type (build_nonstandard_integer_type (elem_size, 1),
3468 nelts);
3469 if (GET_MODE_CLASS (TYPE_MODE (mask_type)) != MODE_VECTOR_INT
3470 || GET_MODE_SIZE (TYPE_MODE (mask_type))
3471 != GET_MODE_SIZE (TYPE_MODE (type)))
6a9e13a2 3472 return false;
d1938a4b 3473 mask_elts = XALLOCAVEC (tree, nelts);
3474 for (i = 0; i < nelts; i++)
3475 mask_elts[i] = build_int_cst (TREE_TYPE (mask_type), sel[i]);
3476 op2 = build_vector (mask_type, mask_elts);
6a9e13a2 3477 gimple_assign_set_rhs_with_ops_1 (gsi, VEC_PERM_EXPR, orig, orig, op2);
3478 }
3479 update_stmt (gsi_stmt (*gsi));
3480 return true;
3481}
3482
5a423a75 3483/* Simplify multiplications.
3484 Return true if a transformation applied, otherwise return false. */
3485
3486static bool
3487simplify_mult (gimple_stmt_iterator *gsi)
3488{
3489 gimple stmt = gsi_stmt (*gsi);
3490 tree arg1 = gimple_assign_rhs1 (stmt);
3491 tree arg2 = gimple_assign_rhs2 (stmt);
3492
3493 if (TREE_CODE (arg1) != SSA_NAME)
3494 return false;
3495
3496 gimple def_stmt = SSA_NAME_DEF_STMT (arg1);
3497 if (!is_gimple_assign (def_stmt))
3498 return false;
3499
3500 /* Look through a sign-changing conversion. */
3501 if (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt)))
3502 {
3503 if (TYPE_PRECISION (TREE_TYPE (gimple_assign_lhs (def_stmt)))
3504 != TYPE_PRECISION (TREE_TYPE (gimple_assign_rhs1 (def_stmt)))
3505 || TREE_CODE (gimple_assign_rhs1 (def_stmt)) != SSA_NAME)
3506 return false;
3507 def_stmt = SSA_NAME_DEF_STMT (gimple_assign_rhs1 (def_stmt));
3508 if (!is_gimple_assign (def_stmt))
3509 return false;
3510 }
3511
3512 if (gimple_assign_rhs_code (def_stmt) == EXACT_DIV_EXPR)
3513 {
3514 if (operand_equal_p (gimple_assign_rhs2 (def_stmt), arg2, 0))
3515 {
3516 tree res = gimple_assign_rhs1 (def_stmt);
3517 if (useless_type_conversion_p (TREE_TYPE (arg1), TREE_TYPE (res)))
3518 gimple_assign_set_rhs_with_ops (gsi, TREE_CODE (res), res,
3519 NULL_TREE);
3520 else
3521 gimple_assign_set_rhs_with_ops (gsi, NOP_EXPR, res, NULL_TREE);
3522 gcc_assert (gsi_stmt (*gsi) == stmt);
3523 update_stmt (stmt);
3524 return true;
3525 }
3526 }
3527
3528 return false;
3529}
f619ecae 3530
3531
3532/* Const-and-copy lattice for fold_all_stmts. */
3533static vec<tree> lattice;
3534
3535/* Primitive "lattice" function for gimple_simplify. */
3536
3537static tree
3538fwprop_ssa_val (tree name)
3539{
3540 /* First valueize NAME. */
3541 if (TREE_CODE (name) == SSA_NAME
3542 && SSA_NAME_VERSION (name) < lattice.length ())
3543 {
3544 tree val = lattice[SSA_NAME_VERSION (name)];
3545 if (val)
3546 name = val;
3547 }
58810b92 3548 /* We continue matching along SSA use-def edges for SSA names
3549 that are not single-use. Currently there are no patterns
3550 that would cause any issues with that. */
f619ecae 3551 return name;
3552}
3553
3554/* Fold all stmts using fold_stmt following only single-use chains
3555 and using a simple const-and-copy lattice. */
3556
3557static bool
3558fold_all_stmts (struct function *fun)
3559{
3560 bool cfg_changed = false;
3561
3562 /* Combine stmts with the stmts defining their operands. Do that
3563 in an order that guarantees visiting SSA defs before SSA uses. */
3564 lattice.create (num_ssa_names);
3565 lattice.quick_grow_cleared (num_ssa_names);
3566 int *postorder = XNEWVEC (int, n_basic_blocks_for_fn (fun));
3567 int postorder_num = inverted_post_order_compute (postorder);
3568 for (int i = 0; i < postorder_num; ++i)
3569 {
3570 basic_block bb = BASIC_BLOCK_FOR_FN (fun, postorder[i]);
3571 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
3572 !gsi_end_p (gsi); gsi_next (&gsi))
3573 {
3574 gimple stmt = gsi_stmt (gsi);
3575 gimple orig_stmt = stmt;
3576
3577 if (fold_stmt (&gsi, fwprop_ssa_val))
3578 {
3579 stmt = gsi_stmt (gsi);
3580 if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt)
3581 && gimple_purge_dead_eh_edges (bb))
3582 cfg_changed = true;
3583 /* Cleanup the CFG if we simplified a condition to
3584 true or false. */
3585 if (gimple_code (stmt) == GIMPLE_COND
3586 && (gimple_cond_true_p (stmt)
3587 || gimple_cond_false_p (stmt)))
3588 cfg_changed = true;
3589 update_stmt (stmt);
3590 }
3591
3592 /* Fill up the lattice. */
3593 if (gimple_assign_single_p (stmt))
3594 {
3595 tree lhs = gimple_assign_lhs (stmt);
3596 tree rhs = gimple_assign_rhs1 (stmt);
3597 if (TREE_CODE (lhs) == SSA_NAME)
3598 {
3599 if (TREE_CODE (rhs) == SSA_NAME)
3600 lattice[SSA_NAME_VERSION (lhs)] = fwprop_ssa_val (rhs);
3601 else if (is_gimple_min_invariant (rhs))
3602 lattice[SSA_NAME_VERSION (lhs)] = rhs;
3603 else
3604 lattice[SSA_NAME_VERSION (lhs)] = lhs;
3605 }
3606 }
3607 }
3608 }
3609 free (postorder);
3610 lattice.release ();
3611
3612 return cfg_changed;
3613}
3614
678b2f5b 3615/* Main entry point for the forward propagation and statement combine
3616 optimizer. */
4ee9c684 3617
65b0537f 3618namespace {
3619
3620const pass_data pass_data_forwprop =
3621{
3622 GIMPLE_PASS, /* type */
3623 "forwprop", /* name */
3624 OPTGROUP_NONE, /* optinfo_flags */
65b0537f 3625 TV_TREE_FORWPROP, /* tv_id */
3626 ( PROP_cfg | PROP_ssa ), /* properties_required */
3627 0, /* properties_provided */
3628 0, /* properties_destroyed */
3629 0, /* todo_flags_start */
8b88439e 3630 TODO_update_ssa, /* todo_flags_finish */
65b0537f 3631};
3632
3633class pass_forwprop : public gimple_opt_pass
3634{
3635public:
3636 pass_forwprop (gcc::context *ctxt)
3637 : gimple_opt_pass (pass_data_forwprop, ctxt)
3638 {}
3639
3640 /* opt_pass methods: */
3641 opt_pass * clone () { return new pass_forwprop (m_ctxt); }
3642 virtual bool gate (function *) { return flag_tree_forwprop; }
3643 virtual unsigned int execute (function *);
3644
3645}; // class pass_forwprop
3646
3647unsigned int
3648pass_forwprop::execute (function *fun)
4ee9c684 3649{
f5c8cff5 3650 basic_block bb;
c96420f8 3651 unsigned int todoflags = 0;
4ee9c684 3652
148aa112 3653 cfg_changed = false;
3654
65b0537f 3655 FOR_EACH_BB_FN (bb, fun)
f5c8cff5 3656 {
2f5a3c4a 3657 gimple_stmt_iterator gsi;
291d763b 3658
678b2f5b 3659 /* Apply forward propagation to all stmts in the basic-block.
3660 Note we update GSI within the loop as necessary. */
75a70cf9 3661 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); )
291d763b 3662 {
75a70cf9 3663 gimple stmt = gsi_stmt (gsi);
678b2f5b 3664 tree lhs, rhs;
3665 enum tree_code code;
291d763b 3666
678b2f5b 3667 if (!is_gimple_assign (stmt))
291d763b 3668 {
678b2f5b 3669 gsi_next (&gsi);
3670 continue;
3671 }
3a938499 3672
678b2f5b 3673 lhs = gimple_assign_lhs (stmt);
3674 rhs = gimple_assign_rhs1 (stmt);
3675 code = gimple_assign_rhs_code (stmt);
3676 if (TREE_CODE (lhs) != SSA_NAME
3677 || has_zero_uses (lhs))
3678 {
3679 gsi_next (&gsi);
3680 continue;
3681 }
3a938499 3682
678b2f5b 3683 /* If this statement sets an SSA_NAME to an address,
3684 try to propagate the address into the uses of the SSA_NAME. */
3685 if (code == ADDR_EXPR
3686 /* Handle pointer conversions on invariant addresses
3687 as well, as this is valid gimple. */
3688 || (CONVERT_EXPR_CODE_P (code)
3689 && TREE_CODE (rhs) == ADDR_EXPR
3690 && POINTER_TYPE_P (TREE_TYPE (lhs))))
3691 {
3692 tree base = get_base_address (TREE_OPERAND (rhs, 0));
3693 if ((!base
3694 || !DECL_P (base)
3695 || decl_address_invariant_p (base))
3696 && !stmt_references_abnormal_ssa_name (stmt)
bfb89138 3697 && forward_propagate_addr_expr (lhs, rhs, true))
1c4607fd 3698 {
678b2f5b 3699 release_defs (stmt);
678b2f5b 3700 gsi_remove (&gsi, true);
1c4607fd 3701 }
678b2f5b 3702 else
3703 gsi_next (&gsi);
3704 }
cd22a796 3705 else if (code == POINTER_PLUS_EXPR)
678b2f5b 3706 {
cd22a796 3707 tree off = gimple_assign_rhs2 (stmt);
3708 if (TREE_CODE (off) == INTEGER_CST
3709 && can_propagate_from (stmt)
3710 && !simple_iv_increment_p (stmt)
678b2f5b 3711 /* ??? Better adjust the interface to that function
3712 instead of building new trees here. */
3713 && forward_propagate_addr_expr
cd22a796 3714 (lhs,
3715 build1_loc (gimple_location (stmt),
3716 ADDR_EXPR, TREE_TYPE (rhs),
3717 fold_build2 (MEM_REF,
3718 TREE_TYPE (TREE_TYPE (rhs)),
3719 rhs,
3720 fold_convert (ptr_type_node,
bfb89138 3721 off))), true))
ca3c9092 3722 {
678b2f5b 3723 release_defs (stmt);
678b2f5b 3724 gsi_remove (&gsi, true);
ca3c9092 3725 }
678b2f5b 3726 else if (is_gimple_min_invariant (rhs))
6afd0544 3727 {
678b2f5b 3728 /* Make sure to fold &a[0] + off_1 here. */
50aacf4c 3729 fold_stmt_inplace (&gsi);
678b2f5b 3730 update_stmt (stmt);
3731 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
6afd0544 3732 gsi_next (&gsi);
3733 }
291d763b 3734 else
75a70cf9 3735 gsi_next (&gsi);
291d763b 3736 }
678b2f5b 3737 else if (TREE_CODE_CLASS (code) == tcc_comparison)
b5860aba 3738 {
e3a19533 3739 if (forward_propagate_comparison (&gsi))
65b0537f 3740 cfg_changed = true;
b5860aba 3741 }
291d763b 3742 else
75a70cf9 3743 gsi_next (&gsi);
291d763b 3744 }
678b2f5b 3745
3746 /* Combine stmts with the stmts defining their operands.
3747 Note we update GSI within the loop as necessary. */
a7107e58 3748 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
678b2f5b 3749 {
3750 gimple stmt = gsi_stmt (gsi);
3751 bool changed = false;
3752
2f5a3c4a 3753 /* Mark stmt as potentially needing revisiting. */
3754 gimple_set_plf (stmt, GF_PLF_1, false);
3755
678b2f5b 3756 switch (gimple_code (stmt))
3757 {
3758 case GIMPLE_ASSIGN:
3759 {
3760 tree rhs1 = gimple_assign_rhs1 (stmt);
3761 enum tree_code code = gimple_assign_rhs_code (stmt);
3762
3763 if ((code == BIT_NOT_EXPR
3764 || code == NEGATE_EXPR)
3765 && TREE_CODE (rhs1) == SSA_NAME)
3766 changed = simplify_not_neg_expr (&gsi);
360b78f3 3767 else if (code == COND_EXPR
3768 || code == VEC_COND_EXPR)
678b2f5b 3769 {
3770 /* In this case the entire COND_EXPR is in rhs1. */
11b881f5 3771 if (forward_propagate_into_cond (&gsi)
3772 || combine_cond_exprs (&gsi))
3773 {
3774 changed = true;
3775 stmt = gsi_stmt (gsi);
3776 }
678b2f5b 3777 }
3778 else if (TREE_CODE_CLASS (code) == tcc_comparison)
3779 {
6f9714b3 3780 int did_something;
6f9714b3 3781 did_something = forward_propagate_into_comparison (&gsi);
3782 if (did_something == 2)
3783 cfg_changed = true;
6f9714b3 3784 changed = did_something != 0;
678b2f5b 3785 }
3b8827a2 3786 else if ((code == PLUS_EXPR
3787 || code == BIT_IOR_EXPR
3788 || code == BIT_XOR_EXPR)
3789 && simplify_rotate (&gsi))
3790 changed = true;
678b2f5b 3791 else if (code == BIT_AND_EXPR
3792 || code == BIT_IOR_EXPR
3793 || code == BIT_XOR_EXPR)
3794 changed = simplify_bitwise_binary (&gsi);
5a423a75 3795 else if (code == MULT_EXPR)
3796 {
3797 changed = simplify_mult (&gsi);
3798 if (changed
3799 && maybe_clean_or_replace_eh_stmt (stmt, stmt)
3800 && gimple_purge_dead_eh_edges (bb))
3801 cfg_changed = true;
3802 }
678b2f5b 3803 else if (code == PLUS_EXPR
3804 || code == MINUS_EXPR)
5a423a75 3805 {
3806 changed = associate_plusminus (&gsi);
3807 if (changed
3808 && maybe_clean_or_replace_eh_stmt (stmt, stmt)
3809 && gimple_purge_dead_eh_edges (bb))
3810 cfg_changed = true;
3811 }
c9c17332 3812 else if (code == POINTER_PLUS_EXPR)
3813 changed = associate_pointerplus (&gsi);
678b2f5b 3814 else if (CONVERT_EXPR_CODE_P (code)
3815 || code == FLOAT_EXPR
3816 || code == FIX_TRUNC_EXPR)
89c8f35a 3817 {
3818 int did_something = combine_conversions (&gsi);
3819 if (did_something == 2)
3820 cfg_changed = true;
d23e1965 3821
3822 /* If we have a narrowing conversion to an integral
3823 type that is fed by a BIT_AND_EXPR, we might be
3824 able to remove the BIT_AND_EXPR if it merely
3825 masks off bits outside the final type (and nothing
3826 else. */
3827 if (! did_something)
3828 {
3829 tree outer_type = TREE_TYPE (gimple_assign_lhs (stmt));
3830 tree inner_type = TREE_TYPE (gimple_assign_rhs1 (stmt));
4c0d6cf7 3831 if (TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
3832 && INTEGRAL_TYPE_P (outer_type)
d23e1965 3833 && INTEGRAL_TYPE_P (inner_type)
3834 && (TYPE_PRECISION (outer_type)
3835 <= TYPE_PRECISION (inner_type)))
3836 did_something = simplify_conversion_from_bitmask (&gsi);
3837 }
3838
89c8f35a 3839 changed = did_something != 0;
3840 }
496ec2ad 3841 else if (code == VEC_PERM_EXPR)
3842 {
3843 int did_something = simplify_permutation (&gsi);
3844 if (did_something == 2)
3845 cfg_changed = true;
3846 changed = did_something != 0;
3847 }
173c91d9 3848 else if (code == BIT_FIELD_REF)
3849 changed = simplify_bitfield_ref (&gsi);
6a9e13a2 3850 else if (code == CONSTRUCTOR
3851 && TREE_CODE (TREE_TYPE (rhs1)) == VECTOR_TYPE)
3852 changed = simplify_vector_constructor (&gsi);
678b2f5b 3853 break;
3854 }
3855
3856 case GIMPLE_SWITCH:
3857 changed = simplify_gimple_switch (stmt);
3858 break;
3859
3860 case GIMPLE_COND:
3861 {
3862 int did_something;
678b2f5b 3863 did_something = forward_propagate_into_gimple_cond (stmt);
3864 if (did_something == 2)
3865 cfg_changed = true;
678b2f5b 3866 changed = did_something != 0;
3867 break;
3868 }
3869
3870 case GIMPLE_CALL:
3871 {
3872 tree callee = gimple_call_fndecl (stmt);
3873 if (callee != NULL_TREE
3874 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
3875 changed = simplify_builtin_call (&gsi, callee);
3876 break;
3877 }
3878
3879 default:;
3880 }
3881
a7107e58 3882 if (changed)
3883 {
3884 /* If the stmt changed then re-visit it and the statements
3885 inserted before it. */
2f5a3c4a 3886 for (; !gsi_end_p (gsi); gsi_prev (&gsi))
3887 if (gimple_plf (gsi_stmt (gsi), GF_PLF_1))
3888 break;
3889 if (gsi_end_p (gsi))
a7107e58 3890 gsi = gsi_start_bb (bb);
3891 else
2f5a3c4a 3892 gsi_next (&gsi);
a7107e58 3893 }
3894 else
3895 {
2f5a3c4a 3896 /* Stmt no longer needs to be revisited. */
3897 gimple_set_plf (stmt, GF_PLF_1, true);
a7107e58 3898 gsi_next (&gsi);
3899 }
678b2f5b 3900 }
f5c8cff5 3901 }
148aa112 3902
f619ecae 3903 /* At the end fold all statements. */
3904 cfg_changed |= fold_all_stmts (fun);
3905
148aa112 3906 if (cfg_changed)
6fa78c7b 3907 todoflags |= TODO_cleanup_cfg;
678b2f5b 3908
c96420f8 3909 return todoflags;
4ee9c684 3910}
3911
cbe8bda8 3912} // anon namespace
3913
3914gimple_opt_pass *
3915make_pass_forwprop (gcc::context *ctxt)
3916{
3917 return new pass_forwprop (ctxt);
3918}