]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/tree-ssa-forwprop.c
Remove tree_fits_hwi_p and the 2-argument tree_to_hwi.
[thirdparty/gcc.git] / gcc / tree-ssa-forwprop.c
CommitLineData
291d763b 1/* Forward propagation of expressions for single use variables.
711789cc 2 Copyright (C) 2004-2013 Free Software Foundation, Inc.
4ee9c684 3
4This file is part of GCC.
5
6GCC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
8c4c00c1 8the Free Software Foundation; either version 3, or (at your option)
4ee9c684 9any later version.
10
11GCC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
8c4c00c1 17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
4ee9c684 19
20#include "config.h"
21#include "system.h"
22#include "coretypes.h"
23#include "tm.h"
4ee9c684 24#include "tree.h"
4ee9c684 25#include "tm_p.h"
26#include "basic-block.h"
e5b1e080 27#include "gimple-pretty-print.h"
073c1fd5 28#include "gimple.h"
a8783bee 29#include "gimplify.h"
dcf1a1ec 30#include "gimple-iterator.h"
e795d6e1 31#include "gimplify-me.h"
073c1fd5 32#include "gimple-ssa.h"
33#include "tree-cfg.h"
34#include "tree-phinodes.h"
35#include "ssa-iterators.h"
36#include "tree-ssanames.h"
37#include "tree-dfa.h"
4ee9c684 38#include "tree-pass.h"
291d763b 39#include "langhooks.h"
5adc1066 40#include "flags.h"
27f931ff 41#include "expr.h"
6b42039a 42#include "cfgloop.h"
d1938a4b 43#include "optabs.h"
58bf5219 44#include "tree-ssa-propagate.h"
424a4a92 45#include "tree-ssa-dom.h"
4ee9c684 46
291d763b 47/* This pass propagates the RHS of assignment statements into use
48 sites of the LHS of the assignment. It's basically a specialized
8f628ee8 49 form of tree combination. It is hoped all of this can disappear
50 when we have a generalized tree combiner.
4ee9c684 51
291d763b 52 One class of common cases we handle is forward propagating a single use
48e1416a 53 variable into a COND_EXPR.
4ee9c684 54
55 bb0:
56 x = a COND b;
57 if (x) goto ... else goto ...
58
59 Will be transformed into:
60
61 bb0:
62 if (a COND b) goto ... else goto ...
48e1416a 63
4ee9c684 64 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
65
66 Or (assuming c1 and c2 are constants):
67
68 bb0:
48e1416a 69 x = a + c1;
4ee9c684 70 if (x EQ/NEQ c2) goto ... else goto ...
71
72 Will be transformed into:
73
74 bb0:
75 if (a EQ/NEQ (c2 - c1)) goto ... else goto ...
76
77 Similarly for x = a - c1.
48e1416a 78
4ee9c684 79 Or
80
81 bb0:
82 x = !a
83 if (x) goto ... else goto ...
84
85 Will be transformed into:
86
87 bb0:
88 if (a == 0) goto ... else goto ...
89
90 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
91 For these cases, we propagate A into all, possibly more than one,
92 COND_EXPRs that use X.
93
f5c8cff5 94 Or
95
96 bb0:
97 x = (typecast) a
98 if (x) goto ... else goto ...
99
100 Will be transformed into:
101
102 bb0:
103 if (a != 0) goto ... else goto ...
104
105 (Assuming a is an integral type and x is a boolean or x is an
106 integral and a is a boolean.)
107
108 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
109 For these cases, we propagate A into all, possibly more than one,
110 COND_EXPRs that use X.
111
4ee9c684 112 In addition to eliminating the variable and the statement which assigns
113 a value to the variable, we may be able to later thread the jump without
e6dfde59 114 adding insane complexity in the dominator optimizer.
4ee9c684 115
f5c8cff5 116 Also note these transformations can cascade. We handle this by having
117 a worklist of COND_EXPR statements to examine. As we make a change to
118 a statement, we put it back on the worklist to examine on the next
119 iteration of the main loop.
120
291d763b 121 A second class of propagation opportunities arises for ADDR_EXPR
122 nodes.
123
124 ptr = &x->y->z;
125 res = *ptr;
126
127 Will get turned into
128
129 res = x->y->z;
130
50f39ec6 131 Or
132 ptr = (type1*)&type2var;
133 res = *ptr
134
135 Will get turned into (if type1 and type2 are the same size
136 and neither have volatile on them):
137 res = VIEW_CONVERT_EXPR<type1>(type2var)
138
291d763b 139 Or
140
141 ptr = &x[0];
142 ptr2 = ptr + <constant>;
143
144 Will get turned into
145
146 ptr2 = &x[constant/elementsize];
147
148 Or
149
150 ptr = &x[0];
151 offset = index * element_size;
152 offset_p = (pointer) offset;
153 ptr2 = ptr + offset_p
154
155 Will get turned into:
156
157 ptr2 = &x[index];
158
1c4607fd 159 Or
160 ssa = (int) decl
161 res = ssa & 1
162
163 Provided that decl has known alignment >= 2, will get turned into
164
165 res = 0
166
8f628ee8 167 We also propagate casts into SWITCH_EXPR and COND_EXPR conditions to
168 allow us to remove the cast and {NOT_EXPR,NEG_EXPR} into a subsequent
169 {NOT_EXPR,NEG_EXPR}.
291d763b 170
4ee9c684 171 This will (of course) be extended as other needs arise. */
172
bfb89138 173static bool forward_propagate_addr_expr (tree, tree, bool);
148aa112 174
b59e1c90 175/* Set to true if we delete dead edges during the optimization. */
148aa112 176static bool cfg_changed;
177
75a70cf9 178static tree rhs_to_tree (tree type, gimple stmt);
148aa112 179
83a20baf 180/* Get the next statement we can propagate NAME's value into skipping
5adc1066 181 trivial copies. Returns the statement that is suitable as a
182 propagation destination or NULL_TREE if there is no such one.
183 This only returns destinations in a single-use chain. FINAL_NAME_P
184 if non-NULL is written to the ssa name that represents the use. */
a3451973 185
75a70cf9 186static gimple
5adc1066 187get_prop_dest_stmt (tree name, tree *final_name_p)
a3451973 188{
5adc1066 189 use_operand_p use;
75a70cf9 190 gimple use_stmt;
a3451973 191
5adc1066 192 do {
193 /* If name has multiple uses, bail out. */
194 if (!single_imm_use (name, &use, &use_stmt))
75a70cf9 195 return NULL;
a3451973 196
5adc1066 197 /* If this is not a trivial copy, we found it. */
8f0b877f 198 if (!gimple_assign_ssa_name_copy_p (use_stmt)
75a70cf9 199 || gimple_assign_rhs1 (use_stmt) != name)
5adc1066 200 break;
201
202 /* Continue searching uses of the copy destination. */
75a70cf9 203 name = gimple_assign_lhs (use_stmt);
5adc1066 204 } while (1);
205
206 if (final_name_p)
207 *final_name_p = name;
208
209 return use_stmt;
a3451973 210}
211
5adc1066 212/* Get the statement we can propagate from into NAME skipping
213 trivial copies. Returns the statement which defines the
214 propagation source or NULL_TREE if there is no such one.
215 If SINGLE_USE_ONLY is set considers only sources which have
216 a single use chain up to NAME. If SINGLE_USE_P is non-null,
217 it is set to whether the chain to NAME is a single use chain
218 or not. SINGLE_USE_P is not written to if SINGLE_USE_ONLY is set. */
4ee9c684 219
75a70cf9 220static gimple
5adc1066 221get_prop_source_stmt (tree name, bool single_use_only, bool *single_use_p)
f5c8cff5 222{
5adc1066 223 bool single_use = true;
224
225 do {
75a70cf9 226 gimple def_stmt = SSA_NAME_DEF_STMT (name);
5adc1066 227
228 if (!has_single_use (name))
229 {
230 single_use = false;
231 if (single_use_only)
75a70cf9 232 return NULL;
5adc1066 233 }
234
235 /* If name is defined by a PHI node or is the default def, bail out. */
8f0b877f 236 if (!is_gimple_assign (def_stmt))
75a70cf9 237 return NULL;
5adc1066 238
ab31ca23 239 /* If def_stmt is a simple copy, continue looking. */
240 if (gimple_assign_rhs_code (def_stmt) == SSA_NAME)
241 name = gimple_assign_rhs1 (def_stmt);
242 else
5adc1066 243 {
244 if (!single_use_only && single_use_p)
245 *single_use_p = single_use;
246
ab31ca23 247 return def_stmt;
5adc1066 248 }
5adc1066 249 } while (1);
250}
e6dfde59 251
5adc1066 252/* Checks if the destination ssa name in DEF_STMT can be used as
253 propagation source. Returns true if so, otherwise false. */
e6dfde59 254
5adc1066 255static bool
75a70cf9 256can_propagate_from (gimple def_stmt)
5adc1066 257{
75a70cf9 258 gcc_assert (is_gimple_assign (def_stmt));
8f0b877f 259
484b827b 260 /* If the rhs has side-effects we cannot propagate from it. */
75a70cf9 261 if (gimple_has_volatile_ops (def_stmt))
484b827b 262 return false;
263
264 /* If the rhs is a load we cannot propagate from it. */
75a70cf9 265 if (TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)) == tcc_reference
266 || TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)) == tcc_declaration)
484b827b 267 return false;
268
b9e98b8a 269 /* Constants can be always propagated. */
8f0b877f 270 if (gimple_assign_single_p (def_stmt)
271 && is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt)))
b9e98b8a 272 return true;
273
75a70cf9 274 /* We cannot propagate ssa names that occur in abnormal phi nodes. */
32cdcc42 275 if (stmt_references_abnormal_ssa_name (def_stmt))
276 return false;
4ee9c684 277
5adc1066 278 /* If the definition is a conversion of a pointer to a function type,
75a70cf9 279 then we can not apply optimizations as some targets require
280 function pointers to be canonicalized and in this case this
281 optimization could eliminate a necessary canonicalization. */
8f0b877f 282 if (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt)))
75a70cf9 283 {
284 tree rhs = gimple_assign_rhs1 (def_stmt);
285 if (POINTER_TYPE_P (TREE_TYPE (rhs))
286 && TREE_CODE (TREE_TYPE (TREE_TYPE (rhs))) == FUNCTION_TYPE)
287 return false;
288 }
8f0b877f 289
5adc1066 290 return true;
e6dfde59 291}
292
ff0739e0 293/* Remove a chain of dead statements starting at the definition of
294 NAME. The chain is linked via the first operand of the defining statements.
5d2361b0 295 If NAME was replaced in its only use then this function can be used
ff0739e0 296 to clean up dead stmts. The function handles already released SSA
297 names gracefully.
298 Returns true if cleanup-cfg has to run. */
8f628ee8 299
5adc1066 300static bool
5d2361b0 301remove_prop_source_from_use (tree name)
5adc1066 302{
75a70cf9 303 gimple_stmt_iterator gsi;
304 gimple stmt;
5d2361b0 305 bool cfg_changed = false;
8f628ee8 306
5adc1066 307 do {
5d2361b0 308 basic_block bb;
309
ff0739e0 310 if (SSA_NAME_IN_FREE_LIST (name)
311 || SSA_NAME_IS_DEFAULT_DEF (name)
312 || !has_zero_uses (name))
5d2361b0 313 return cfg_changed;
8f628ee8 314
5adc1066 315 stmt = SSA_NAME_DEF_STMT (name);
ff0739e0 316 if (gimple_code (stmt) == GIMPLE_PHI
317 || gimple_has_side_effects (stmt))
6f9714b3 318 return cfg_changed;
ff0739e0 319
320 bb = gimple_bb (stmt);
6f9714b3 321 gsi = gsi_for_stmt (stmt);
ff0739e0 322 unlink_stmt_vdef (stmt);
13ff78a4 323 if (gsi_remove (&gsi, true))
324 cfg_changed |= gimple_purge_dead_eh_edges (bb);
ff0739e0 325 release_defs (stmt);
8f628ee8 326
ff0739e0 327 name = is_gimple_assign (stmt) ? gimple_assign_rhs1 (stmt) : NULL_TREE;
75a70cf9 328 } while (name && TREE_CODE (name) == SSA_NAME);
8f628ee8 329
5d2361b0 330 return cfg_changed;
5adc1066 331}
8f628ee8 332
75a70cf9 333/* Return the rhs of a gimple_assign STMT in a form of a single tree,
334 converted to type TYPE.
48e1416a 335
75a70cf9 336 This should disappear, but is needed so we can combine expressions and use
337 the fold() interfaces. Long term, we need to develop folding and combine
338 routines that deal with gimple exclusively . */
339
340static tree
341rhs_to_tree (tree type, gimple stmt)
342{
389dd41b 343 location_t loc = gimple_location (stmt);
75a70cf9 344 enum tree_code code = gimple_assign_rhs_code (stmt);
57c45d70 345 if (get_gimple_rhs_class (code) == GIMPLE_TERNARY_RHS)
346 return fold_build3_loc (loc, code, type, gimple_assign_rhs1 (stmt),
347 gimple_assign_rhs2 (stmt),
348 gimple_assign_rhs3 (stmt));
349 else if (get_gimple_rhs_class (code) == GIMPLE_BINARY_RHS)
389dd41b 350 return fold_build2_loc (loc, code, type, gimple_assign_rhs1 (stmt),
fb8ed03f 351 gimple_assign_rhs2 (stmt));
75a70cf9 352 else if (get_gimple_rhs_class (code) == GIMPLE_UNARY_RHS)
fb8ed03f 353 return build1 (code, type, gimple_assign_rhs1 (stmt));
75a70cf9 354 else if (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS)
355 return gimple_assign_rhs1 (stmt);
356 else
357 gcc_unreachable ();
358}
359
5adc1066 360/* Combine OP0 CODE OP1 in the context of a COND_EXPR. Returns
361 the folded result in a form suitable for COND_EXPR_COND or
362 NULL_TREE, if there is no suitable simplified form. If
363 INVARIANT_ONLY is true only gimple_min_invariant results are
364 considered simplified. */
8f628ee8 365
366static tree
c73fee76 367combine_cond_expr_cond (gimple stmt, enum tree_code code, tree type,
5adc1066 368 tree op0, tree op1, bool invariant_only)
8f628ee8 369{
5adc1066 370 tree t;
8f628ee8 371
5adc1066 372 gcc_assert (TREE_CODE_CLASS (code) == tcc_comparison);
8f628ee8 373
c73fee76 374 fold_defer_overflow_warnings ();
375 t = fold_binary_loc (gimple_location (stmt), code, type, op0, op1);
5adc1066 376 if (!t)
c73fee76 377 {
378 fold_undefer_overflow_warnings (false, NULL, 0);
379 return NULL_TREE;
380 }
8f628ee8 381
5adc1066 382 /* Require that we got a boolean type out if we put one in. */
383 gcc_assert (TREE_CODE (TREE_TYPE (t)) == TREE_CODE (type));
8f628ee8 384
a7392604 385 /* Canonicalize the combined condition for use in a COND_EXPR. */
386 t = canonicalize_cond_expr_cond (t);
8f628ee8 387
5adc1066 388 /* Bail out if we required an invariant but didn't get one. */
75a70cf9 389 if (!t || (invariant_only && !is_gimple_min_invariant (t)))
c73fee76 390 {
391 fold_undefer_overflow_warnings (false, NULL, 0);
392 return NULL_TREE;
393 }
394
395 fold_undefer_overflow_warnings (!gimple_no_warning_p (stmt), stmt, 0);
8f628ee8 396
a7392604 397 return t;
8f628ee8 398}
399
c8126d25 400/* Combine the comparison OP0 CODE OP1 at LOC with the defining statements
401 of its operand. Return a new comparison tree or NULL_TREE if there
402 were no simplifying combines. */
403
404static tree
c73fee76 405forward_propagate_into_comparison_1 (gimple stmt,
678b2f5b 406 enum tree_code code, tree type,
407 tree op0, tree op1)
c8126d25 408{
409 tree tmp = NULL_TREE;
410 tree rhs0 = NULL_TREE, rhs1 = NULL_TREE;
411 bool single_use0_p = false, single_use1_p = false;
412
413 /* For comparisons use the first operand, that is likely to
414 simplify comparisons against constants. */
415 if (TREE_CODE (op0) == SSA_NAME)
416 {
417 gimple def_stmt = get_prop_source_stmt (op0, false, &single_use0_p);
418 if (def_stmt && can_propagate_from (def_stmt))
419 {
420 rhs0 = rhs_to_tree (TREE_TYPE (op1), def_stmt);
c73fee76 421 tmp = combine_cond_expr_cond (stmt, code, type,
c8126d25 422 rhs0, op1, !single_use0_p);
423 if (tmp)
424 return tmp;
425 }
426 }
427
428 /* If that wasn't successful, try the second operand. */
429 if (TREE_CODE (op1) == SSA_NAME)
430 {
431 gimple def_stmt = get_prop_source_stmt (op1, false, &single_use1_p);
432 if (def_stmt && can_propagate_from (def_stmt))
433 {
434 rhs1 = rhs_to_tree (TREE_TYPE (op0), def_stmt);
c73fee76 435 tmp = combine_cond_expr_cond (stmt, code, type,
c8126d25 436 op0, rhs1, !single_use1_p);
437 if (tmp)
438 return tmp;
439 }
440 }
441
442 /* If that wasn't successful either, try both operands. */
443 if (rhs0 != NULL_TREE
444 && rhs1 != NULL_TREE)
c73fee76 445 tmp = combine_cond_expr_cond (stmt, code, type,
c8126d25 446 rhs0, rhs1,
447 !(single_use0_p && single_use1_p));
448
449 return tmp;
450}
451
678b2f5b 452/* Propagate from the ssa name definition statements of the assignment
453 from a comparison at *GSI into the conditional if that simplifies it.
6f9714b3 454 Returns 1 if the stmt was modified and 2 if the CFG needs cleanup,
455 otherwise returns 0. */
c8126d25 456
6f9714b3 457static int
678b2f5b 458forward_propagate_into_comparison (gimple_stmt_iterator *gsi)
c8126d25 459{
678b2f5b 460 gimple stmt = gsi_stmt (*gsi);
461 tree tmp;
6f9714b3 462 bool cfg_changed = false;
56632de0 463 tree type = TREE_TYPE (gimple_assign_lhs (stmt));
6f9714b3 464 tree rhs1 = gimple_assign_rhs1 (stmt);
465 tree rhs2 = gimple_assign_rhs2 (stmt);
c8126d25 466
467 /* Combine the comparison with defining statements. */
c73fee76 468 tmp = forward_propagate_into_comparison_1 (stmt,
678b2f5b 469 gimple_assign_rhs_code (stmt),
56632de0 470 type, rhs1, rhs2);
471 if (tmp && useless_type_conversion_p (type, TREE_TYPE (tmp)))
c8126d25 472 {
678b2f5b 473 gimple_assign_set_rhs_from_tree (gsi, tmp);
50aacf4c 474 fold_stmt (gsi);
475 update_stmt (gsi_stmt (*gsi));
75200312 476
6f9714b3 477 if (TREE_CODE (rhs1) == SSA_NAME)
478 cfg_changed |= remove_prop_source_from_use (rhs1);
479 if (TREE_CODE (rhs2) == SSA_NAME)
480 cfg_changed |= remove_prop_source_from_use (rhs2);
481 return cfg_changed ? 2 : 1;
c8126d25 482 }
483
6f9714b3 484 return 0;
c8126d25 485}
486
5adc1066 487/* Propagate from the ssa name definition statements of COND_EXPR
75a70cf9 488 in GIMPLE_COND statement STMT into the conditional if that simplifies it.
489 Returns zero if no statement was changed, one if there were
490 changes and two if cfg_cleanup needs to run.
48e1416a 491
75a70cf9 492 This must be kept in sync with forward_propagate_into_cond. */
493
494static int
495forward_propagate_into_gimple_cond (gimple stmt)
496{
678b2f5b 497 tree tmp;
498 enum tree_code code = gimple_cond_code (stmt);
6f9714b3 499 bool cfg_changed = false;
500 tree rhs1 = gimple_cond_lhs (stmt);
501 tree rhs2 = gimple_cond_rhs (stmt);
678b2f5b 502
503 /* We can do tree combining on SSA_NAME and comparison expressions. */
504 if (TREE_CODE_CLASS (gimple_cond_code (stmt)) != tcc_comparison)
505 return 0;
506
c73fee76 507 tmp = forward_propagate_into_comparison_1 (stmt, code,
678b2f5b 508 boolean_type_node,
6f9714b3 509 rhs1, rhs2);
678b2f5b 510 if (tmp)
511 {
512 if (dump_file && tmp)
513 {
678b2f5b 514 fprintf (dump_file, " Replaced '");
6f9714b3 515 print_gimple_expr (dump_file, stmt, 0, 0);
678b2f5b 516 fprintf (dump_file, "' with '");
517 print_generic_expr (dump_file, tmp, 0);
518 fprintf (dump_file, "'\n");
519 }
75a70cf9 520
678b2f5b 521 gimple_cond_set_condition_from_tree (stmt, unshare_expr (tmp));
522 update_stmt (stmt);
75a70cf9 523
6f9714b3 524 if (TREE_CODE (rhs1) == SSA_NAME)
525 cfg_changed |= remove_prop_source_from_use (rhs1);
526 if (TREE_CODE (rhs2) == SSA_NAME)
527 cfg_changed |= remove_prop_source_from_use (rhs2);
528 return (cfg_changed || is_gimple_min_invariant (tmp)) ? 2 : 1;
678b2f5b 529 }
75a70cf9 530
10a6edd6 531 /* Canonicalize _Bool == 0 and _Bool != 1 to _Bool != 0 by swapping edges. */
532 if ((TREE_CODE (TREE_TYPE (rhs1)) == BOOLEAN_TYPE
533 || (INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
534 && TYPE_PRECISION (TREE_TYPE (rhs1)) == 1))
535 && ((code == EQ_EXPR
536 && integer_zerop (rhs2))
537 || (code == NE_EXPR
538 && integer_onep (rhs2))))
539 {
540 basic_block bb = gimple_bb (stmt);
541 gimple_cond_set_code (stmt, NE_EXPR);
542 gimple_cond_set_rhs (stmt, build_zero_cst (TREE_TYPE (rhs1)));
543 EDGE_SUCC (bb, 0)->flags ^= (EDGE_TRUE_VALUE|EDGE_FALSE_VALUE);
544 EDGE_SUCC (bb, 1)->flags ^= (EDGE_TRUE_VALUE|EDGE_FALSE_VALUE);
545 return 1;
546 }
547
6f9714b3 548 return 0;
75a70cf9 549}
550
551
552/* Propagate from the ssa name definition statements of COND_EXPR
553 in the rhs of statement STMT into the conditional if that simplifies it.
8a2caf10 554 Returns true zero if the stmt was changed. */
4ee9c684 555
8a2caf10 556static bool
75a70cf9 557forward_propagate_into_cond (gimple_stmt_iterator *gsi_p)
e6dfde59 558{
75a70cf9 559 gimple stmt = gsi_stmt (*gsi_p);
678b2f5b 560 tree tmp = NULL_TREE;
561 tree cond = gimple_assign_rhs1 (stmt);
def3cb70 562 enum tree_code code = gimple_assign_rhs_code (stmt);
10a6edd6 563 bool swap = false;
d080be9e 564
678b2f5b 565 /* We can do tree combining on SSA_NAME and comparison expressions. */
566 if (COMPARISON_CLASS_P (cond))
c73fee76 567 tmp = forward_propagate_into_comparison_1 (stmt, TREE_CODE (cond),
f2c1848b 568 TREE_TYPE (cond),
c8126d25 569 TREE_OPERAND (cond, 0),
570 TREE_OPERAND (cond, 1));
678b2f5b 571 else if (TREE_CODE (cond) == SSA_NAME)
572 {
def3cb70 573 enum tree_code def_code;
8a2caf10 574 tree name = cond;
678b2f5b 575 gimple def_stmt = get_prop_source_stmt (name, true, NULL);
576 if (!def_stmt || !can_propagate_from (def_stmt))
6f9714b3 577 return 0;
5adc1066 578
def3cb70 579 def_code = gimple_assign_rhs_code (def_stmt);
580 if (TREE_CODE_CLASS (def_code) == tcc_comparison)
8a2caf10 581 tmp = fold_build2_loc (gimple_location (def_stmt),
def3cb70 582 def_code,
bc112f18 583 TREE_TYPE (cond),
8a2caf10 584 gimple_assign_rhs1 (def_stmt),
585 gimple_assign_rhs2 (def_stmt));
def3cb70 586 else if (code == COND_EXPR
587 && ((def_code == BIT_NOT_EXPR
588 && TYPE_PRECISION (TREE_TYPE (cond)) == 1)
589 || (def_code == BIT_XOR_EXPR
590 && integer_onep (gimple_assign_rhs2 (def_stmt)))))
10a6edd6 591 {
592 tmp = gimple_assign_rhs1 (def_stmt);
593 swap = true;
594 }
678b2f5b 595 }
5adc1066 596
25f48be0 597 if (tmp
598 && is_gimple_condexpr (tmp))
678b2f5b 599 {
600 if (dump_file && tmp)
601 {
602 fprintf (dump_file, " Replaced '");
603 print_generic_expr (dump_file, cond, 0);
604 fprintf (dump_file, "' with '");
605 print_generic_expr (dump_file, tmp, 0);
606 fprintf (dump_file, "'\n");
607 }
d080be9e 608
def3cb70 609 if ((code == VEC_COND_EXPR) ? integer_all_onesp (tmp)
610 : integer_onep (tmp))
8a2caf10 611 gimple_assign_set_rhs_from_tree (gsi_p, gimple_assign_rhs2 (stmt));
612 else if (integer_zerop (tmp))
613 gimple_assign_set_rhs_from_tree (gsi_p, gimple_assign_rhs3 (stmt));
614 else
10a6edd6 615 {
616 gimple_assign_set_rhs1 (stmt, unshare_expr (tmp));
617 if (swap)
618 {
619 tree t = gimple_assign_rhs2 (stmt);
620 gimple_assign_set_rhs2 (stmt, gimple_assign_rhs3 (stmt));
621 gimple_assign_set_rhs3 (stmt, t);
622 }
623 }
678b2f5b 624 stmt = gsi_stmt (*gsi_p);
625 update_stmt (stmt);
5adc1066 626
8a2caf10 627 return true;
678b2f5b 628 }
d080be9e 629
6f9714b3 630 return 0;
4ee9c684 631}
632
360b78f3 633/* Propagate from the ssa name definition statements of COND_EXPR
634 values in the rhs of statement STMT into the conditional arms
635 if that simplifies it.
636 Returns true if the stmt was changed. */
637
638static bool
639combine_cond_exprs (gimple_stmt_iterator *gsi_p)
640{
641 gimple stmt = gsi_stmt (*gsi_p);
642 tree cond, val1, val2;
643 bool changed = false;
644
645 cond = gimple_assign_rhs1 (stmt);
646 val1 = gimple_assign_rhs2 (stmt);
647 if (TREE_CODE (val1) == SSA_NAME)
648 {
649 gimple def_stmt = SSA_NAME_DEF_STMT (val1);
650 if (is_gimple_assign (def_stmt)
651 && gimple_assign_rhs_code (def_stmt) == gimple_assign_rhs_code (stmt)
652 && operand_equal_p (gimple_assign_rhs1 (def_stmt), cond, 0))
653 {
654 val1 = unshare_expr (gimple_assign_rhs2 (def_stmt));
655 gimple_assign_set_rhs2 (stmt, val1);
656 changed = true;
657 }
658 }
659 val2 = gimple_assign_rhs3 (stmt);
660 if (TREE_CODE (val2) == SSA_NAME)
661 {
662 gimple def_stmt = SSA_NAME_DEF_STMT (val2);
663 if (is_gimple_assign (def_stmt)
664 && gimple_assign_rhs_code (def_stmt) == gimple_assign_rhs_code (stmt)
665 && operand_equal_p (gimple_assign_rhs1 (def_stmt), cond, 0))
666 {
667 val2 = unshare_expr (gimple_assign_rhs3 (def_stmt));
668 gimple_assign_set_rhs3 (stmt, val2);
669 changed = true;
670 }
671 }
672 if (operand_equal_p (val1, val2, 0))
673 {
674 gimple_assign_set_rhs_from_tree (gsi_p, val1);
675 stmt = gsi_stmt (*gsi_p);
676 changed = true;
677 }
678
679 if (changed)
680 update_stmt (stmt);
681
682 return changed;
683}
684
48e1416a 685/* We've just substituted an ADDR_EXPR into stmt. Update all the
148aa112 686 relevant data structures to match. */
687
688static void
75a70cf9 689tidy_after_forward_propagate_addr (gimple stmt)
148aa112 690{
148aa112 691 /* We may have turned a trapping insn into a non-trapping insn. */
692 if (maybe_clean_or_replace_eh_stmt (stmt, stmt)
75a70cf9 693 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
148aa112 694 cfg_changed = true;
f2fae51f 695
75a70cf9 696 if (TREE_CODE (gimple_assign_rhs1 (stmt)) == ADDR_EXPR)
697 recompute_tree_invariant_for_addr_expr (gimple_assign_rhs1 (stmt));
148aa112 698}
699
15ec875c 700/* NAME is a SSA_NAME representing DEF_RHS which is of the form
701 ADDR_EXPR <whatever>.
291d763b 702
3d5cfe81 703 Try to forward propagate the ADDR_EXPR into the use USE_STMT.
291d763b 704 Often this will allow for removal of an ADDR_EXPR and INDIRECT_REF
3d5cfe81 705 node or for recovery of array indexing from pointer arithmetic.
75a70cf9 706
6b5a5c42 707 Return true if the propagation was successful (the propagation can
708 be not totally successful, yet things may have been changed). */
291d763b 709
710static bool
75a70cf9 711forward_propagate_addr_expr_1 (tree name, tree def_rhs,
712 gimple_stmt_iterator *use_stmt_gsi,
6776dec8 713 bool single_use_p)
291d763b 714{
75a70cf9 715 tree lhs, rhs, rhs2, array_ref;
75a70cf9 716 gimple use_stmt = gsi_stmt (*use_stmt_gsi);
717 enum tree_code rhs_code;
9e019299 718 bool res = true;
291d763b 719
971c637a 720 gcc_assert (TREE_CODE (def_rhs) == ADDR_EXPR);
291d763b 721
75a70cf9 722 lhs = gimple_assign_lhs (use_stmt);
723 rhs_code = gimple_assign_rhs_code (use_stmt);
724 rhs = gimple_assign_rhs1 (use_stmt);
15ec875c 725
bfb89138 726 /* Do not perform copy-propagation but recurse through copy chains. */
727 if (TREE_CODE (lhs) == SSA_NAME
728 && rhs_code == SSA_NAME)
729 return forward_propagate_addr_expr (lhs, def_rhs, single_use_p);
730
731 /* The use statement could be a conversion. Recurse to the uses of the
732 lhs as copyprop does not copy through pointer to integer to pointer
733 conversions and FRE does not catch all cases either.
734 Treat the case of a single-use name and
6776dec8 735 a conversion to def_rhs type separate, though. */
971c637a 736 if (TREE_CODE (lhs) == SSA_NAME
bfb89138 737 && CONVERT_EXPR_CODE_P (rhs_code))
6776dec8 738 {
bfb89138 739 /* If there is a point in a conversion chain where the types match
740 so we can remove a conversion re-materialize the address here
741 and stop. */
742 if (single_use_p
743 && useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (def_rhs)))
744 {
745 gimple_assign_set_rhs1 (use_stmt, unshare_expr (def_rhs));
746 gimple_assign_set_rhs_code (use_stmt, TREE_CODE (def_rhs));
747 return true;
748 }
749
750 /* Else recurse if the conversion preserves the address value. */
751 if ((INTEGRAL_TYPE_P (TREE_TYPE (lhs))
752 || POINTER_TYPE_P (TREE_TYPE (lhs)))
753 && (TYPE_PRECISION (TREE_TYPE (lhs))
754 >= TYPE_PRECISION (TREE_TYPE (def_rhs))))
755 return forward_propagate_addr_expr (lhs, def_rhs, single_use_p);
756
757 return false;
6776dec8 758 }
971c637a 759
bfb89138 760 /* If this isn't a conversion chain from this on we only can propagate
761 into compatible pointer contexts. */
762 if (!types_compatible_p (TREE_TYPE (name), TREE_TYPE (def_rhs)))
763 return false;
764
182cf5a9 765 /* Propagate through constant pointer adjustments. */
766 if (TREE_CODE (lhs) == SSA_NAME
767 && rhs_code == POINTER_PLUS_EXPR
768 && rhs == name
769 && TREE_CODE (gimple_assign_rhs2 (use_stmt)) == INTEGER_CST)
770 {
771 tree new_def_rhs;
772 /* As we come here with non-invariant addresses in def_rhs we need
773 to make sure we can build a valid constant offsetted address
774 for further propagation. Simply rely on fold building that
775 and check after the fact. */
776 new_def_rhs = fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (rhs)),
777 def_rhs,
778 fold_convert (ptr_type_node,
779 gimple_assign_rhs2 (use_stmt)));
780 if (TREE_CODE (new_def_rhs) == MEM_REF
f5d03f27 781 && !is_gimple_mem_ref_addr (TREE_OPERAND (new_def_rhs, 0)))
182cf5a9 782 return false;
783 new_def_rhs = build_fold_addr_expr_with_type (new_def_rhs,
784 TREE_TYPE (rhs));
785
786 /* Recurse. If we could propagate into all uses of lhs do not
787 bother to replace into the current use but just pretend we did. */
788 if (TREE_CODE (new_def_rhs) == ADDR_EXPR
bfb89138 789 && forward_propagate_addr_expr (lhs, new_def_rhs, single_use_p))
182cf5a9 790 return true;
791
792 if (useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (new_def_rhs)))
793 gimple_assign_set_rhs_with_ops (use_stmt_gsi, TREE_CODE (new_def_rhs),
794 new_def_rhs, NULL_TREE);
795 else if (is_gimple_min_invariant (new_def_rhs))
796 gimple_assign_set_rhs_with_ops (use_stmt_gsi, NOP_EXPR,
797 new_def_rhs, NULL_TREE);
798 else
799 return false;
800 gcc_assert (gsi_stmt (*use_stmt_gsi) == use_stmt);
801 update_stmt (use_stmt);
802 return true;
803 }
804
48e1416a 805 /* Now strip away any outer COMPONENT_REF/ARRAY_REF nodes from the LHS.
971c637a 806 ADDR_EXPR will not appear on the LHS. */
d0d1ecb8 807 tree *lhsp = gimple_assign_lhs_ptr (use_stmt);
808 while (handled_component_p (*lhsp))
809 lhsp = &TREE_OPERAND (*lhsp, 0);
810 lhs = *lhsp;
971c637a 811
182cf5a9 812 /* Now see if the LHS node is a MEM_REF using NAME. If so,
971c637a 813 propagate the ADDR_EXPR into the use of NAME and fold the result. */
182cf5a9 814 if (TREE_CODE (lhs) == MEM_REF
9e019299 815 && TREE_OPERAND (lhs, 0) == name)
971c637a 816 {
182cf5a9 817 tree def_rhs_base;
818 HOST_WIDE_INT def_rhs_offset;
819 /* If the address is invariant we can always fold it. */
820 if ((def_rhs_base = get_addr_base_and_unit_offset (TREE_OPERAND (def_rhs, 0),
821 &def_rhs_offset)))
9e019299 822 {
5de9d3ed 823 offset_int off = mem_ref_offset (lhs);
182cf5a9 824 tree new_ptr;
e913b5cd 825 off += def_rhs_offset;
182cf5a9 826 if (TREE_CODE (def_rhs_base) == MEM_REF)
827 {
cf8f0e63 828 off += mem_ref_offset (def_rhs_base);
182cf5a9 829 new_ptr = TREE_OPERAND (def_rhs_base, 0);
830 }
831 else
832 new_ptr = build_fold_addr_expr (def_rhs_base);
833 TREE_OPERAND (lhs, 0) = new_ptr;
834 TREE_OPERAND (lhs, 1)
e913b5cd 835 = wide_int_to_tree (TREE_TYPE (TREE_OPERAND (lhs, 1)), off);
9e019299 836 tidy_after_forward_propagate_addr (use_stmt);
9e019299 837 /* Continue propagating into the RHS if this was not the only use. */
838 if (single_use_p)
839 return true;
840 }
182cf5a9 841 /* If the LHS is a plain dereference and the value type is the same as
842 that of the pointed-to type of the address we can put the
843 dereferenced address on the LHS preserving the original alias-type. */
d0d1ecb8 844 else if (integer_zerop (TREE_OPERAND (lhs, 1))
845 && ((gimple_assign_lhs (use_stmt) == lhs
846 && useless_type_conversion_p
847 (TREE_TYPE (TREE_OPERAND (def_rhs, 0)),
848 TREE_TYPE (gimple_assign_rhs1 (use_stmt))))
849 || types_compatible_p (TREE_TYPE (lhs),
850 TREE_TYPE (TREE_OPERAND (def_rhs, 0))))
f6e2e4ff 851 /* Don't forward anything into clobber stmts if it would result
852 in the lhs no longer being a MEM_REF. */
853 && (!gimple_clobber_p (use_stmt)
854 || TREE_CODE (TREE_OPERAND (def_rhs, 0)) == MEM_REF))
182cf5a9 855 {
856 tree *def_rhs_basep = &TREE_OPERAND (def_rhs, 0);
98d96c6f 857 tree new_offset, new_base, saved, new_lhs;
182cf5a9 858 while (handled_component_p (*def_rhs_basep))
859 def_rhs_basep = &TREE_OPERAND (*def_rhs_basep, 0);
860 saved = *def_rhs_basep;
861 if (TREE_CODE (*def_rhs_basep) == MEM_REF)
862 {
863 new_base = TREE_OPERAND (*def_rhs_basep, 0);
b97e39a0 864 new_offset = fold_convert (TREE_TYPE (TREE_OPERAND (lhs, 1)),
865 TREE_OPERAND (*def_rhs_basep, 1));
182cf5a9 866 }
867 else
868 {
869 new_base = build_fold_addr_expr (*def_rhs_basep);
870 new_offset = TREE_OPERAND (lhs, 1);
871 }
872 *def_rhs_basep = build2 (MEM_REF, TREE_TYPE (*def_rhs_basep),
873 new_base, new_offset);
2e5dc41c 874 TREE_THIS_VOLATILE (*def_rhs_basep) = TREE_THIS_VOLATILE (lhs);
31fa5b0d 875 TREE_SIDE_EFFECTS (*def_rhs_basep) = TREE_SIDE_EFFECTS (lhs);
2e5dc41c 876 TREE_THIS_NOTRAP (*def_rhs_basep) = TREE_THIS_NOTRAP (lhs);
98d96c6f 877 new_lhs = unshare_expr (TREE_OPERAND (def_rhs, 0));
d0d1ecb8 878 *lhsp = new_lhs;
98d96c6f 879 TREE_THIS_VOLATILE (new_lhs) = TREE_THIS_VOLATILE (lhs);
31fa5b0d 880 TREE_SIDE_EFFECTS (new_lhs) = TREE_SIDE_EFFECTS (lhs);
182cf5a9 881 *def_rhs_basep = saved;
882 tidy_after_forward_propagate_addr (use_stmt);
883 /* Continue propagating into the RHS if this was not the
884 only use. */
885 if (single_use_p)
886 return true;
887 }
9e019299 888 else
889 /* We can have a struct assignment dereferencing our name twice.
890 Note that we didn't propagate into the lhs to not falsely
891 claim we did when propagating into the rhs. */
892 res = false;
971c637a 893 }
15ec875c 894
631d5db6 895 /* Strip away any outer COMPONENT_REF, ARRAY_REF or ADDR_EXPR
896 nodes from the RHS. */
d0d1ecb8 897 tree *rhsp = gimple_assign_rhs1_ptr (use_stmt);
898 if (TREE_CODE (*rhsp) == ADDR_EXPR)
899 rhsp = &TREE_OPERAND (*rhsp, 0);
900 while (handled_component_p (*rhsp))
901 rhsp = &TREE_OPERAND (*rhsp, 0);
902 rhs = *rhsp;
291d763b 903
182cf5a9 904 /* Now see if the RHS node is a MEM_REF using NAME. If so,
291d763b 905 propagate the ADDR_EXPR into the use of NAME and fold the result. */
182cf5a9 906 if (TREE_CODE (rhs) == MEM_REF
907 && TREE_OPERAND (rhs, 0) == name)
291d763b 908 {
182cf5a9 909 tree def_rhs_base;
910 HOST_WIDE_INT def_rhs_offset;
911 if ((def_rhs_base = get_addr_base_and_unit_offset (TREE_OPERAND (def_rhs, 0),
912 &def_rhs_offset)))
913 {
5de9d3ed 914 offset_int off = mem_ref_offset (rhs);
182cf5a9 915 tree new_ptr;
e913b5cd 916 off += def_rhs_offset;
182cf5a9 917 if (TREE_CODE (def_rhs_base) == MEM_REF)
918 {
cf8f0e63 919 off += mem_ref_offset (def_rhs_base);
182cf5a9 920 new_ptr = TREE_OPERAND (def_rhs_base, 0);
921 }
922 else
923 new_ptr = build_fold_addr_expr (def_rhs_base);
924 TREE_OPERAND (rhs, 0) = new_ptr;
925 TREE_OPERAND (rhs, 1)
e913b5cd 926 = wide_int_to_tree (TREE_TYPE (TREE_OPERAND (rhs, 1)), off);
50aacf4c 927 fold_stmt_inplace (use_stmt_gsi);
182cf5a9 928 tidy_after_forward_propagate_addr (use_stmt);
929 return res;
930 }
2e5dc41c 931 /* If the RHS is a plain dereference and the value type is the same as
182cf5a9 932 that of the pointed-to type of the address we can put the
2e5dc41c 933 dereferenced address on the RHS preserving the original alias-type. */
d0d1ecb8 934 else if (integer_zerop (TREE_OPERAND (rhs, 1))
935 && ((gimple_assign_rhs1 (use_stmt) == rhs
936 && useless_type_conversion_p
937 (TREE_TYPE (gimple_assign_lhs (use_stmt)),
938 TREE_TYPE (TREE_OPERAND (def_rhs, 0))))
939 || types_compatible_p (TREE_TYPE (rhs),
940 TREE_TYPE (TREE_OPERAND (def_rhs, 0)))))
182cf5a9 941 {
942 tree *def_rhs_basep = &TREE_OPERAND (def_rhs, 0);
98d96c6f 943 tree new_offset, new_base, saved, new_rhs;
182cf5a9 944 while (handled_component_p (*def_rhs_basep))
945 def_rhs_basep = &TREE_OPERAND (*def_rhs_basep, 0);
946 saved = *def_rhs_basep;
947 if (TREE_CODE (*def_rhs_basep) == MEM_REF)
948 {
949 new_base = TREE_OPERAND (*def_rhs_basep, 0);
b97e39a0 950 new_offset = fold_convert (TREE_TYPE (TREE_OPERAND (rhs, 1)),
951 TREE_OPERAND (*def_rhs_basep, 1));
182cf5a9 952 }
953 else
954 {
955 new_base = build_fold_addr_expr (*def_rhs_basep);
956 new_offset = TREE_OPERAND (rhs, 1);
957 }
958 *def_rhs_basep = build2 (MEM_REF, TREE_TYPE (*def_rhs_basep),
959 new_base, new_offset);
2e5dc41c 960 TREE_THIS_VOLATILE (*def_rhs_basep) = TREE_THIS_VOLATILE (rhs);
31fa5b0d 961 TREE_SIDE_EFFECTS (*def_rhs_basep) = TREE_SIDE_EFFECTS (rhs);
2e5dc41c 962 TREE_THIS_NOTRAP (*def_rhs_basep) = TREE_THIS_NOTRAP (rhs);
98d96c6f 963 new_rhs = unshare_expr (TREE_OPERAND (def_rhs, 0));
d0d1ecb8 964 *rhsp = new_rhs;
98d96c6f 965 TREE_THIS_VOLATILE (new_rhs) = TREE_THIS_VOLATILE (rhs);
31fa5b0d 966 TREE_SIDE_EFFECTS (new_rhs) = TREE_SIDE_EFFECTS (rhs);
182cf5a9 967 *def_rhs_basep = saved;
50aacf4c 968 fold_stmt_inplace (use_stmt_gsi);
182cf5a9 969 tidy_after_forward_propagate_addr (use_stmt);
970 return res;
971 }
291d763b 972 }
973
971c637a 974 /* If the use of the ADDR_EXPR is not a POINTER_PLUS_EXPR, there
975 is nothing to do. */
75a70cf9 976 if (gimple_assign_rhs_code (use_stmt) != POINTER_PLUS_EXPR
977 || gimple_assign_rhs1 (use_stmt) != name)
971c637a 978 return false;
979
291d763b 980 /* The remaining cases are all for turning pointer arithmetic into
981 array indexing. They only apply when we have the address of
982 element zero in an array. If that is not the case then there
983 is nothing to do. */
15ec875c 984 array_ref = TREE_OPERAND (def_rhs, 0);
182cf5a9 985 if ((TREE_CODE (array_ref) != ARRAY_REF
986 || TREE_CODE (TREE_TYPE (TREE_OPERAND (array_ref, 0))) != ARRAY_TYPE
987 || TREE_CODE (TREE_OPERAND (array_ref, 1)) != INTEGER_CST)
988 && TREE_CODE (TREE_TYPE (array_ref)) != ARRAY_TYPE)
291d763b 989 return false;
990
75a70cf9 991 rhs2 = gimple_assign_rhs2 (use_stmt);
704d7315 992 /* Optimize &x[C1] p+ C2 to &x p+ C3 with C3 = C1 * element_size + C2. */
75a70cf9 993 if (TREE_CODE (rhs2) == INTEGER_CST)
291d763b 994 {
704d7315 995 tree new_rhs = build1_loc (gimple_location (use_stmt),
996 ADDR_EXPR, TREE_TYPE (def_rhs),
997 fold_build2 (MEM_REF,
998 TREE_TYPE (TREE_TYPE (def_rhs)),
999 unshare_expr (def_rhs),
1000 fold_convert (ptr_type_node,
1001 rhs2)));
1002 gimple_assign_set_rhs_from_tree (use_stmt_gsi, new_rhs);
1003 use_stmt = gsi_stmt (*use_stmt_gsi);
1004 update_stmt (use_stmt);
1005 tidy_after_forward_propagate_addr (use_stmt);
1006 return true;
291d763b 1007 }
1008
291d763b 1009 return false;
1010}
1011
3d5cfe81 1012/* STMT is a statement of the form SSA_NAME = ADDR_EXPR <whatever>.
1013
1014 Try to forward propagate the ADDR_EXPR into all uses of the SSA_NAME.
1015 Often this will allow for removal of an ADDR_EXPR and INDIRECT_REF
1016 node or for recovery of array indexing from pointer arithmetic.
bfb89138 1017
1018 PARENT_SINGLE_USE_P tells if, when in a recursive invocation, NAME was
1019 the single use in the previous invocation. Pass true when calling
1020 this as toplevel.
1021
3d5cfe81 1022 Returns true, if all uses have been propagated into. */
1023
1024static bool
bfb89138 1025forward_propagate_addr_expr (tree name, tree rhs, bool parent_single_use_p)
3d5cfe81 1026{
3d5cfe81 1027 imm_use_iterator iter;
75a70cf9 1028 gimple use_stmt;
3d5cfe81 1029 bool all = true;
bfb89138 1030 bool single_use_p = parent_single_use_p && has_single_use (name);
3d5cfe81 1031
09aca5bc 1032 FOR_EACH_IMM_USE_STMT (use_stmt, iter, name)
3d5cfe81 1033 {
c96420f8 1034 bool result;
9481f629 1035 tree use_rhs;
3d5cfe81 1036
1037 /* If the use is not in a simple assignment statement, then
1038 there is nothing we can do. */
162efce1 1039 if (!is_gimple_assign (use_stmt))
3d5cfe81 1040 {
688ff29b 1041 if (!is_gimple_debug (use_stmt))
9845d120 1042 all = false;
3d5cfe81 1043 continue;
1044 }
1045
162efce1 1046 gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
1047 result = forward_propagate_addr_expr_1 (name, rhs, &gsi,
1048 single_use_p);
1049 /* If the use has moved to a different statement adjust
1050 the update machinery for the old statement too. */
1051 if (use_stmt != gsi_stmt (gsi))
3d5cfe81 1052 {
162efce1 1053 update_stmt (use_stmt);
1054 use_stmt = gsi_stmt (gsi);
3d5cfe81 1055 }
162efce1 1056 update_stmt (use_stmt);
c96420f8 1057 all &= result;
de6ed584 1058
15ec875c 1059 /* Remove intermediate now unused copy and conversion chains. */
75a70cf9 1060 use_rhs = gimple_assign_rhs1 (use_stmt);
15ec875c 1061 if (result
75a70cf9 1062 && TREE_CODE (gimple_assign_lhs (use_stmt)) == SSA_NAME
7b705d94 1063 && TREE_CODE (use_rhs) == SSA_NAME
1064 && has_zero_uses (gimple_assign_lhs (use_stmt)))
15ec875c 1065 {
75a70cf9 1066 gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
15ec875c 1067 release_defs (use_stmt);
75a70cf9 1068 gsi_remove (&gsi, true);
15ec875c 1069 }
3d5cfe81 1070 }
1071
628ce22b 1072 return all && has_zero_uses (name);
3d5cfe81 1073}
1074
678b2f5b 1075
e3a19533 1076/* Forward propagate the comparison defined in *DEFGSI like
678b2f5b 1077 cond_1 = x CMP y to uses of the form
1078 a_1 = (T')cond_1
1079 a_1 = !cond_1
1080 a_1 = cond_1 != 0
e3a19533 1081 Returns true if stmt is now unused. Advance DEFGSI to the next
1082 statement. */
678b2f5b 1083
1084static bool
e3a19533 1085forward_propagate_comparison (gimple_stmt_iterator *defgsi)
678b2f5b 1086{
e3a19533 1087 gimple stmt = gsi_stmt (*defgsi);
678b2f5b 1088 tree name = gimple_assign_lhs (stmt);
1089 gimple use_stmt;
1090 tree tmp = NULL_TREE;
e5b1e080 1091 gimple_stmt_iterator gsi;
1092 enum tree_code code;
1093 tree lhs;
678b2f5b 1094
1095 /* Don't propagate ssa names that occur in abnormal phis. */
1096 if ((TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
1097 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_assign_rhs1 (stmt)))
1098 || (TREE_CODE (gimple_assign_rhs2 (stmt)) == SSA_NAME
1099 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_assign_rhs2 (stmt))))
e3a19533 1100 goto bailout;
678b2f5b 1101
1102 /* Do not un-cse comparisons. But propagate through copies. */
1103 use_stmt = get_prop_dest_stmt (name, &name);
e5b1e080 1104 if (!use_stmt
1105 || !is_gimple_assign (use_stmt))
e3a19533 1106 goto bailout;
678b2f5b 1107
e5b1e080 1108 code = gimple_assign_rhs_code (use_stmt);
1109 lhs = gimple_assign_lhs (use_stmt);
1110 if (!INTEGRAL_TYPE_P (TREE_TYPE (lhs)))
e3a19533 1111 goto bailout;
678b2f5b 1112
e5b1e080 1113 /* We can propagate the condition into a statement that
1114 computes the logical negation of the comparison result. */
4b5f1658 1115 if ((code == BIT_NOT_EXPR
1116 && TYPE_PRECISION (TREE_TYPE (lhs)) == 1)
1117 || (code == BIT_XOR_EXPR
1118 && integer_onep (gimple_assign_rhs2 (use_stmt))))
e5b1e080 1119 {
1120 tree type = TREE_TYPE (gimple_assign_rhs1 (stmt));
1121 bool nans = HONOR_NANS (TYPE_MODE (type));
1122 enum tree_code inv_code;
1123 inv_code = invert_tree_comparison (gimple_assign_rhs_code (stmt), nans);
1124 if (inv_code == ERROR_MARK)
e3a19533 1125 goto bailout;
678b2f5b 1126
e5b1e080 1127 tmp = build2 (inv_code, TREE_TYPE (lhs), gimple_assign_rhs1 (stmt),
1128 gimple_assign_rhs2 (stmt));
1129 }
1130 else
e3a19533 1131 goto bailout;
678b2f5b 1132
e5b1e080 1133 gsi = gsi_for_stmt (use_stmt);
1134 gimple_assign_set_rhs_from_tree (&gsi, unshare_expr (tmp));
1135 use_stmt = gsi_stmt (gsi);
1136 update_stmt (use_stmt);
678b2f5b 1137
e5b1e080 1138 if (dump_file && (dump_flags & TDF_DETAILS))
1139 {
1140 fprintf (dump_file, " Replaced '");
1141 print_gimple_expr (dump_file, stmt, 0, dump_flags);
1142 fprintf (dump_file, "' with '");
1143 print_gimple_expr (dump_file, use_stmt, 0, dump_flags);
1144 fprintf (dump_file, "'\n");
678b2f5b 1145 }
1146
e3a19533 1147 /* When we remove stmt now the iterator defgsi goes off it's current
1148 sequence, hence advance it now. */
1149 gsi_next (defgsi);
1150
e5b1e080 1151 /* Remove defining statements. */
1152 return remove_prop_source_from_use (name);
e3a19533 1153
1154bailout:
1155 gsi_next (defgsi);
1156 return false;
678b2f5b 1157}
1158
1159
d23e1965 1160/* GSI_P points to a statement which performs a narrowing integral
1161 conversion.
1162
1163 Look for cases like:
1164
1165 t = x & c;
1166 y = (T) t;
1167
1168 Turn them into:
1169
1170 t = x & c;
1171 y = (T) x;
1172
1173 If T is narrower than X's type and C merely masks off bits outside
1174 of (T) and nothing else.
1175
1176 Normally we'd let DCE remove the dead statement. But no DCE runs
1177 after the last forwprop/combine pass, so we remove the obviously
1178 dead code ourselves.
1179
1180 Return TRUE if a change was made, FALSE otherwise. */
1181
1182static bool
1183simplify_conversion_from_bitmask (gimple_stmt_iterator *gsi_p)
1184{
1185 gimple stmt = gsi_stmt (*gsi_p);
1186 gimple rhs_def_stmt = SSA_NAME_DEF_STMT (gimple_assign_rhs1 (stmt));
1187
1188 /* See if the input for the conversion was set via a BIT_AND_EXPR and
1189 the only use of the BIT_AND_EXPR result is the conversion. */
1190 if (is_gimple_assign (rhs_def_stmt)
1191 && gimple_assign_rhs_code (rhs_def_stmt) == BIT_AND_EXPR
1192 && has_single_use (gimple_assign_lhs (rhs_def_stmt)))
1193 {
1194 tree rhs_def_operand1 = gimple_assign_rhs1 (rhs_def_stmt);
1195 tree rhs_def_operand2 = gimple_assign_rhs2 (rhs_def_stmt);
1196 tree lhs_type = TREE_TYPE (gimple_assign_lhs (stmt));
1197
1198 /* Now verify suitability of the BIT_AND_EXPR's operands.
1199 The first must be an SSA_NAME that we can propagate and the
1200 second must be an integer constant that masks out all the
1201 bits outside the final result's type, but nothing else. */
1202 if (TREE_CODE (rhs_def_operand1) == SSA_NAME
1203 && ! SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs_def_operand1)
1204 && TREE_CODE (rhs_def_operand2) == INTEGER_CST
1205 && operand_equal_p (rhs_def_operand2,
1206 build_low_bits_mask (TREE_TYPE (rhs_def_operand2),
1207 TYPE_PRECISION (lhs_type)),
1208 0))
1209 {
1210 /* This is an optimizable case. Replace the source operand
1211 in the conversion with the first source operand of the
1212 BIT_AND_EXPR. */
1213 gimple_assign_set_rhs1 (stmt, rhs_def_operand1);
1214 stmt = gsi_stmt (*gsi_p);
1215 update_stmt (stmt);
1216
1217 /* There is no DCE after the last forwprop pass. It's
1218 easy to clean up the first order effects here. */
1219 gimple_stmt_iterator si;
1220 si = gsi_for_stmt (rhs_def_stmt);
1221 gsi_remove (&si, true);
1222 release_defs (rhs_def_stmt);
1223 return true;
1224 }
1225 }
1226
1227 return false;
1228}
1229
1230
3a938499 1231/* If we have lhs = ~x (STMT), look and see if earlier we had x = ~y.
1232 If so, we can change STMT into lhs = y which can later be copy
48e1416a 1233 propagated. Similarly for negation.
3a938499 1234
48e1416a 1235 This could trivially be formulated as a forward propagation
3a938499 1236 to immediate uses. However, we already had an implementation
1237 from DOM which used backward propagation via the use-def links.
1238
1239 It turns out that backward propagation is actually faster as
1240 there's less work to do for each NOT/NEG expression we find.
1241 Backwards propagation needs to look at the statement in a single
1242 backlink. Forward propagation needs to look at potentially more
678b2f5b 1243 than one forward link.
3a938499 1244
678b2f5b 1245 Returns true when the statement was changed. */
1246
1247static bool
75a70cf9 1248simplify_not_neg_expr (gimple_stmt_iterator *gsi_p)
3a938499 1249{
75a70cf9 1250 gimple stmt = gsi_stmt (*gsi_p);
1251 tree rhs = gimple_assign_rhs1 (stmt);
1252 gimple rhs_def_stmt = SSA_NAME_DEF_STMT (rhs);
3a938499 1253
1254 /* See if the RHS_DEF_STMT has the same form as our statement. */
75a70cf9 1255 if (is_gimple_assign (rhs_def_stmt)
1256 && gimple_assign_rhs_code (rhs_def_stmt) == gimple_assign_rhs_code (stmt))
3a938499 1257 {
75a70cf9 1258 tree rhs_def_operand = gimple_assign_rhs1 (rhs_def_stmt);
3a938499 1259
1260 /* Verify that RHS_DEF_OPERAND is a suitable SSA_NAME. */
1261 if (TREE_CODE (rhs_def_operand) == SSA_NAME
1262 && ! SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs_def_operand))
1263 {
75a70cf9 1264 gimple_assign_set_rhs_from_tree (gsi_p, rhs_def_operand);
1265 stmt = gsi_stmt (*gsi_p);
3a938499 1266 update_stmt (stmt);
678b2f5b 1267 return true;
3a938499 1268 }
1269 }
678b2f5b 1270
1271 return false;
3a938499 1272}
3d5cfe81 1273
b59e1c90 1274/* Helper function for simplify_gimple_switch. Remove case labels that
1275 have values outside the range of the new type. */
1276
1277static void
1278simplify_gimple_switch_label_vec (gimple stmt, tree index_type)
1279{
1280 unsigned int branch_num = gimple_switch_num_labels (stmt);
f1f41a6c 1281 vec<tree> labels;
1282 labels.create (branch_num);
b59e1c90 1283 unsigned int i, len;
1284
1285 /* Collect the existing case labels in a VEC, and preprocess it as if
1286 we are gimplifying a GENERIC SWITCH_EXPR. */
1287 for (i = 1; i < branch_num; i++)
f1f41a6c 1288 labels.quick_push (gimple_switch_label (stmt, i));
b59e1c90 1289 preprocess_case_label_vec_for_gimple (labels, index_type, NULL);
1290
1291 /* If any labels were removed, replace the existing case labels
1292 in the GIMPLE_SWITCH statement with the correct ones.
1293 Note that the type updates were done in-place on the case labels,
1294 so we only have to replace the case labels in the GIMPLE_SWITCH
1295 if the number of labels changed. */
f1f41a6c 1296 len = labels.length ();
b59e1c90 1297 if (len < branch_num - 1)
1298 {
1299 bitmap target_blocks;
1300 edge_iterator ei;
1301 edge e;
1302
1303 /* Corner case: *all* case labels have been removed as being
1304 out-of-range for INDEX_TYPE. Push one label and let the
1305 CFG cleanups deal with this further. */
1306 if (len == 0)
1307 {
1308 tree label, elt;
1309
1310 label = CASE_LABEL (gimple_switch_default_label (stmt));
1311 elt = build_case_label (build_int_cst (index_type, 0), NULL, label);
f1f41a6c 1312 labels.quick_push (elt);
b59e1c90 1313 len = 1;
1314 }
1315
f1f41a6c 1316 for (i = 0; i < labels.length (); i++)
1317 gimple_switch_set_label (stmt, i + 1, labels[i]);
b59e1c90 1318 for (i++ ; i < branch_num; i++)
1319 gimple_switch_set_label (stmt, i, NULL_TREE);
1320 gimple_switch_set_num_labels (stmt, len + 1);
1321
1322 /* Cleanup any edges that are now dead. */
1323 target_blocks = BITMAP_ALLOC (NULL);
1324 for (i = 0; i < gimple_switch_num_labels (stmt); i++)
1325 {
1326 tree elt = gimple_switch_label (stmt, i);
1327 basic_block target = label_to_block (CASE_LABEL (elt));
1328 bitmap_set_bit (target_blocks, target->index);
1329 }
1330 for (ei = ei_start (gimple_bb (stmt)->succs); (e = ei_safe_edge (ei)); )
1331 {
1332 if (! bitmap_bit_p (target_blocks, e->dest->index))
1333 {
1334 remove_edge (e);
1335 cfg_changed = true;
1336 free_dominance_info (CDI_DOMINATORS);
1337 }
1338 else
1339 ei_next (&ei);
1340 }
1341 BITMAP_FREE (target_blocks);
1342 }
1343
f1f41a6c 1344 labels.release ();
b59e1c90 1345}
1346
b5860aba 1347/* STMT is a SWITCH_EXPR for which we attempt to find equivalent forms of
1348 the condition which we may be able to optimize better. */
1349
678b2f5b 1350static bool
75a70cf9 1351simplify_gimple_switch (gimple stmt)
b5860aba 1352{
75a70cf9 1353 tree cond = gimple_switch_index (stmt);
b5860aba 1354 tree def, to, ti;
75a70cf9 1355 gimple def_stmt;
b5860aba 1356
1357 /* The optimization that we really care about is removing unnecessary
1358 casts. That will let us do much better in propagating the inferred
1359 constant at the switch target. */
1360 if (TREE_CODE (cond) == SSA_NAME)
1361 {
75a70cf9 1362 def_stmt = SSA_NAME_DEF_STMT (cond);
1363 if (is_gimple_assign (def_stmt))
b5860aba 1364 {
75a70cf9 1365 if (gimple_assign_rhs_code (def_stmt) == NOP_EXPR)
b5860aba 1366 {
1367 int need_precision;
1368 bool fail;
1369
75a70cf9 1370 def = gimple_assign_rhs1 (def_stmt);
b5860aba 1371
b5860aba 1372 to = TREE_TYPE (cond);
1373 ti = TREE_TYPE (def);
1374
1375 /* If we have an extension that preserves value, then we
1376 can copy the source value into the switch. */
1377
1378 need_precision = TYPE_PRECISION (ti);
1379 fail = false;
c5237b8b 1380 if (! INTEGRAL_TYPE_P (ti))
1381 fail = true;
1382 else if (TYPE_UNSIGNED (to) && !TYPE_UNSIGNED (ti))
b5860aba 1383 fail = true;
1384 else if (!TYPE_UNSIGNED (to) && TYPE_UNSIGNED (ti))
1385 need_precision += 1;
1386 if (TYPE_PRECISION (to) < need_precision)
1387 fail = true;
1388
1389 if (!fail)
1390 {
75a70cf9 1391 gimple_switch_set_index (stmt, def);
b59e1c90 1392 simplify_gimple_switch_label_vec (stmt, ti);
b5860aba 1393 update_stmt (stmt);
678b2f5b 1394 return true;
b5860aba 1395 }
1396 }
1397 }
1398 }
678b2f5b 1399
1400 return false;
b5860aba 1401}
1402
27f931ff 1403/* For pointers p2 and p1 return p2 - p1 if the
1404 difference is known and constant, otherwise return NULL. */
1405
1406static tree
1407constant_pointer_difference (tree p1, tree p2)
1408{
1409 int i, j;
1410#define CPD_ITERATIONS 5
1411 tree exps[2][CPD_ITERATIONS];
1412 tree offs[2][CPD_ITERATIONS];
1413 int cnt[2];
1414
1415 for (i = 0; i < 2; i++)
1416 {
1417 tree p = i ? p1 : p2;
1418 tree off = size_zero_node;
1419 gimple stmt;
1420 enum tree_code code;
1421
1422 /* For each of p1 and p2 we need to iterate at least
1423 twice, to handle ADDR_EXPR directly in p1/p2,
1424 SSA_NAME with ADDR_EXPR or POINTER_PLUS_EXPR etc.
1425 on definition's stmt RHS. Iterate a few extra times. */
1426 j = 0;
1427 do
1428 {
1429 if (!POINTER_TYPE_P (TREE_TYPE (p)))
1430 break;
1431 if (TREE_CODE (p) == ADDR_EXPR)
1432 {
1433 tree q = TREE_OPERAND (p, 0);
1434 HOST_WIDE_INT offset;
1435 tree base = get_addr_base_and_unit_offset (q, &offset);
1436 if (base)
1437 {
1438 q = base;
1439 if (offset)
1440 off = size_binop (PLUS_EXPR, off, size_int (offset));
1441 }
1442 if (TREE_CODE (q) == MEM_REF
1443 && TREE_CODE (TREE_OPERAND (q, 0)) == SSA_NAME)
1444 {
1445 p = TREE_OPERAND (q, 0);
1446 off = size_binop (PLUS_EXPR, off,
e913b5cd 1447 wide_int_to_tree (sizetype,
1448 mem_ref_offset (q)));
27f931ff 1449 }
1450 else
1451 {
1452 exps[i][j] = q;
1453 offs[i][j++] = off;
1454 break;
1455 }
1456 }
1457 if (TREE_CODE (p) != SSA_NAME)
1458 break;
1459 exps[i][j] = p;
1460 offs[i][j++] = off;
1461 if (j == CPD_ITERATIONS)
1462 break;
1463 stmt = SSA_NAME_DEF_STMT (p);
1464 if (!is_gimple_assign (stmt) || gimple_assign_lhs (stmt) != p)
1465 break;
1466 code = gimple_assign_rhs_code (stmt);
1467 if (code == POINTER_PLUS_EXPR)
1468 {
1469 if (TREE_CODE (gimple_assign_rhs2 (stmt)) != INTEGER_CST)
1470 break;
1471 off = size_binop (PLUS_EXPR, off, gimple_assign_rhs2 (stmt));
1472 p = gimple_assign_rhs1 (stmt);
1473 }
1474 else if (code == ADDR_EXPR || code == NOP_EXPR)
1475 p = gimple_assign_rhs1 (stmt);
1476 else
1477 break;
1478 }
1479 while (1);
1480 cnt[i] = j;
1481 }
1482
1483 for (i = 0; i < cnt[0]; i++)
1484 for (j = 0; j < cnt[1]; j++)
1485 if (exps[0][i] == exps[1][j])
1486 return size_binop (MINUS_EXPR, offs[0][i], offs[1][j]);
1487
1488 return NULL_TREE;
1489}
1490
1491/* *GSI_P is a GIMPLE_CALL to a builtin function.
1492 Optimize
1493 memcpy (p, "abcd", 4);
1494 memset (p + 4, ' ', 3);
1495 into
1496 memcpy (p, "abcd ", 7);
1497 call if the latter can be stored by pieces during expansion. */
1498
1499static bool
1500simplify_builtin_call (gimple_stmt_iterator *gsi_p, tree callee2)
1501{
1502 gimple stmt1, stmt2 = gsi_stmt (*gsi_p);
1503 tree vuse = gimple_vuse (stmt2);
1504 if (vuse == NULL)
1505 return false;
1506 stmt1 = SSA_NAME_DEF_STMT (vuse);
1507
1508 switch (DECL_FUNCTION_CODE (callee2))
1509 {
1510 case BUILT_IN_MEMSET:
1511 if (gimple_call_num_args (stmt2) != 3
1512 || gimple_call_lhs (stmt2)
1513 || CHAR_BIT != 8
1514 || BITS_PER_UNIT != 8)
1515 break;
1516 else
1517 {
1518 tree callee1;
1519 tree ptr1, src1, str1, off1, len1, lhs1;
1520 tree ptr2 = gimple_call_arg (stmt2, 0);
1521 tree val2 = gimple_call_arg (stmt2, 1);
1522 tree len2 = gimple_call_arg (stmt2, 2);
1523 tree diff, vdef, new_str_cst;
1524 gimple use_stmt;
1525 unsigned int ptr1_align;
1526 unsigned HOST_WIDE_INT src_len;
1527 char *src_buf;
1528 use_operand_p use_p;
1529
e913b5cd 1530 if (!tree_fits_shwi_p (val2)
1531 || !tree_fits_uhwi_p (len2))
27f931ff 1532 break;
1533 if (is_gimple_call (stmt1))
1534 {
1535 /* If first stmt is a call, it needs to be memcpy
1536 or mempcpy, with string literal as second argument and
1537 constant length. */
1538 callee1 = gimple_call_fndecl (stmt1);
1539 if (callee1 == NULL_TREE
1540 || DECL_BUILT_IN_CLASS (callee1) != BUILT_IN_NORMAL
1541 || gimple_call_num_args (stmt1) != 3)
1542 break;
1543 if (DECL_FUNCTION_CODE (callee1) != BUILT_IN_MEMCPY
1544 && DECL_FUNCTION_CODE (callee1) != BUILT_IN_MEMPCPY)
1545 break;
1546 ptr1 = gimple_call_arg (stmt1, 0);
1547 src1 = gimple_call_arg (stmt1, 1);
1548 len1 = gimple_call_arg (stmt1, 2);
1549 lhs1 = gimple_call_lhs (stmt1);
e913b5cd 1550 if (!tree_fits_uhwi_p (len1))
27f931ff 1551 break;
1552 str1 = string_constant (src1, &off1);
1553 if (str1 == NULL_TREE)
1554 break;
e913b5cd 1555 if (!tree_fits_uhwi_p (off1)
27f931ff 1556 || compare_tree_int (off1, TREE_STRING_LENGTH (str1) - 1) > 0
1557 || compare_tree_int (len1, TREE_STRING_LENGTH (str1)
e913b5cd 1558 - tree_to_uhwi (off1)) > 0
27f931ff 1559 || TREE_CODE (TREE_TYPE (str1)) != ARRAY_TYPE
1560 || TYPE_MODE (TREE_TYPE (TREE_TYPE (str1)))
1561 != TYPE_MODE (char_type_node))
1562 break;
1563 }
1564 else if (gimple_assign_single_p (stmt1))
1565 {
1566 /* Otherwise look for length 1 memcpy optimized into
1567 assignment. */
1568 ptr1 = gimple_assign_lhs (stmt1);
1569 src1 = gimple_assign_rhs1 (stmt1);
1570 if (TREE_CODE (ptr1) != MEM_REF
1571 || TYPE_MODE (TREE_TYPE (ptr1)) != TYPE_MODE (char_type_node)
e913b5cd 1572 || !tree_fits_shwi_p (src1))
27f931ff 1573 break;
1574 ptr1 = build_fold_addr_expr (ptr1);
1575 callee1 = NULL_TREE;
1576 len1 = size_one_node;
1577 lhs1 = NULL_TREE;
1578 off1 = size_zero_node;
1579 str1 = NULL_TREE;
1580 }
1581 else
1582 break;
1583
1584 diff = constant_pointer_difference (ptr1, ptr2);
1585 if (diff == NULL && lhs1 != NULL)
1586 {
1587 diff = constant_pointer_difference (lhs1, ptr2);
1588 if (DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY
1589 && diff != NULL)
1590 diff = size_binop (PLUS_EXPR, diff,
1591 fold_convert (sizetype, len1));
1592 }
1593 /* If the difference between the second and first destination pointer
1594 is not constant, or is bigger than memcpy length, bail out. */
1595 if (diff == NULL
e913b5cd 1596 || !tree_fits_uhwi_p (diff)
27f931ff 1597 || tree_int_cst_lt (len1, diff))
1598 break;
1599
1600 /* Use maximum of difference plus memset length and memcpy length
1601 as the new memcpy length, if it is too big, bail out. */
e913b5cd 1602 src_len = tree_to_uhwi (diff);
1603 src_len += tree_to_uhwi (len2);
aa59f000 1604 if (src_len < tree_to_uhwi (len1))
e913b5cd 1605 src_len = tree_to_uhwi (len1);
27f931ff 1606 if (src_len > 1024)
1607 break;
1608
1609 /* If mempcpy value is used elsewhere, bail out, as mempcpy
1610 with bigger length will return different result. */
1611 if (lhs1 != NULL_TREE
1612 && DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY
1613 && (TREE_CODE (lhs1) != SSA_NAME
1614 || !single_imm_use (lhs1, &use_p, &use_stmt)
1615 || use_stmt != stmt2))
1616 break;
1617
1618 /* If anything reads memory in between memcpy and memset
1619 call, the modified memcpy call might change it. */
1620 vdef = gimple_vdef (stmt1);
1621 if (vdef != NULL
1622 && (!single_imm_use (vdef, &use_p, &use_stmt)
1623 || use_stmt != stmt2))
1624 break;
1625
957d0361 1626 ptr1_align = get_pointer_alignment (ptr1);
27f931ff 1627 /* Construct the new source string literal. */
1628 src_buf = XALLOCAVEC (char, src_len + 1);
1629 if (callee1)
1630 memcpy (src_buf,
e913b5cd 1631 TREE_STRING_POINTER (str1) + tree_to_uhwi (off1),
1632 tree_to_uhwi (len1));
27f931ff 1633 else
e913b5cd 1634 src_buf[0] = tree_to_shwi (src1);
1635 memset (src_buf + tree_to_uhwi (diff),
1636 tree_to_shwi (val2), tree_to_uhwi (len2));
27f931ff 1637 src_buf[src_len] = '\0';
1638 /* Neither builtin_strncpy_read_str nor builtin_memcpy_read_str
1639 handle embedded '\0's. */
1640 if (strlen (src_buf) != src_len)
1641 break;
1642 rtl_profile_for_bb (gimple_bb (stmt2));
1643 /* If the new memcpy wouldn't be emitted by storing the literal
1644 by pieces, this optimization might enlarge .rodata too much,
1645 as commonly used string literals couldn't be shared any
1646 longer. */
1647 if (!can_store_by_pieces (src_len,
1648 builtin_strncpy_read_str,
1649 src_buf, ptr1_align, false))
1650 break;
1651
1652 new_str_cst = build_string_literal (src_len, src_buf);
1653 if (callee1)
1654 {
1655 /* If STMT1 is a mem{,p}cpy call, adjust it and remove
1656 memset call. */
1657 if (lhs1 && DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY)
1658 gimple_call_set_lhs (stmt1, NULL_TREE);
1659 gimple_call_set_arg (stmt1, 1, new_str_cst);
1660 gimple_call_set_arg (stmt1, 2,
1661 build_int_cst (TREE_TYPE (len1), src_len));
1662 update_stmt (stmt1);
1663 unlink_stmt_vdef (stmt2);
1664 gsi_remove (gsi_p, true);
1665 release_defs (stmt2);
1666 if (lhs1 && DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY)
1667 release_ssa_name (lhs1);
1668 return true;
1669 }
1670 else
1671 {
1672 /* Otherwise, if STMT1 is length 1 memcpy optimized into
1673 assignment, remove STMT1 and change memset call into
1674 memcpy call. */
1675 gimple_stmt_iterator gsi = gsi_for_stmt (stmt1);
1676
7ecb2e7c 1677 if (!is_gimple_val (ptr1))
1678 ptr1 = force_gimple_operand_gsi (gsi_p, ptr1, true, NULL_TREE,
1679 true, GSI_SAME_STMT);
b9a16870 1680 gimple_call_set_fndecl (stmt2,
1681 builtin_decl_explicit (BUILT_IN_MEMCPY));
27f931ff 1682 gimple_call_set_arg (stmt2, 0, ptr1);
1683 gimple_call_set_arg (stmt2, 1, new_str_cst);
1684 gimple_call_set_arg (stmt2, 2,
1685 build_int_cst (TREE_TYPE (len2), src_len));
1686 unlink_stmt_vdef (stmt1);
1687 gsi_remove (&gsi, true);
1688 release_defs (stmt1);
1689 update_stmt (stmt2);
1690 return false;
1691 }
1692 }
1693 break;
1694 default:
1695 break;
1696 }
1697 return false;
1698}
1699
41913fa9 1700/* Checks if expression has type of one-bit precision, or is a known
1701 truth-valued expression. */
1702static bool
1703truth_valued_ssa_name (tree name)
1704{
1705 gimple def;
1706 tree type = TREE_TYPE (name);
1707
1708 if (!INTEGRAL_TYPE_P (type))
1709 return false;
1710 /* Don't check here for BOOLEAN_TYPE as the precision isn't
1711 necessarily one and so ~X is not equal to !X. */
1712 if (TYPE_PRECISION (type) == 1)
1713 return true;
1714 def = SSA_NAME_DEF_STMT (name);
1715 if (is_gimple_assign (def))
1716 return truth_value_p (gimple_assign_rhs_code (def));
1717 return false;
1718}
1719
1720/* Helper routine for simplify_bitwise_binary_1 function.
1721 Return for the SSA name NAME the expression X if it mets condition
1722 NAME = !X. Otherwise return NULL_TREE.
1723 Detected patterns for NAME = !X are:
1724 !X and X == 0 for X with integral type.
1725 X ^ 1, X != 1,or ~X for X with integral type with precision of one. */
1726static tree
1727lookup_logical_inverted_value (tree name)
1728{
1729 tree op1, op2;
1730 enum tree_code code;
1731 gimple def;
1732
1733 /* If name has none-intergal type, or isn't a SSA_NAME, then
1734 return. */
1735 if (TREE_CODE (name) != SSA_NAME
1736 || !INTEGRAL_TYPE_P (TREE_TYPE (name)))
1737 return NULL_TREE;
1738 def = SSA_NAME_DEF_STMT (name);
1739 if (!is_gimple_assign (def))
1740 return NULL_TREE;
1741
1742 code = gimple_assign_rhs_code (def);
1743 op1 = gimple_assign_rhs1 (def);
1744 op2 = NULL_TREE;
1745
1746 /* Get for EQ_EXPR or BIT_XOR_EXPR operation the second operand.
8f4a7578 1747 If CODE isn't an EQ_EXPR, BIT_XOR_EXPR, or BIT_NOT_EXPR, then return. */
41913fa9 1748 if (code == EQ_EXPR || code == NE_EXPR
1749 || code == BIT_XOR_EXPR)
1750 op2 = gimple_assign_rhs2 (def);
1751
1752 switch (code)
1753 {
41913fa9 1754 case BIT_NOT_EXPR:
1755 if (truth_valued_ssa_name (name))
1756 return op1;
1757 break;
1758 case EQ_EXPR:
1759 /* Check if we have X == 0 and X has an integral type. */
1760 if (!INTEGRAL_TYPE_P (TREE_TYPE (op1)))
1761 break;
1762 if (integer_zerop (op2))
1763 return op1;
1764 break;
1765 case NE_EXPR:
1766 /* Check if we have X != 1 and X is a truth-valued. */
1767 if (!INTEGRAL_TYPE_P (TREE_TYPE (op1)))
1768 break;
1769 if (integer_onep (op2) && truth_valued_ssa_name (op1))
1770 return op1;
1771 break;
1772 case BIT_XOR_EXPR:
1773 /* Check if we have X ^ 1 and X is truth valued. */
1774 if (integer_onep (op2) && truth_valued_ssa_name (op1))
1775 return op1;
1776 break;
1777 default:
1778 break;
1779 }
1780
1781 return NULL_TREE;
1782}
1783
1784/* Optimize ARG1 CODE ARG2 to a constant for bitwise binary
1785 operations CODE, if one operand has the logically inverted
1786 value of the other. */
1787static tree
1788simplify_bitwise_binary_1 (enum tree_code code, tree type,
1789 tree arg1, tree arg2)
1790{
1791 tree anot;
1792
1793 /* If CODE isn't a bitwise binary operation, return NULL_TREE. */
1794 if (code != BIT_AND_EXPR && code != BIT_IOR_EXPR
1795 && code != BIT_XOR_EXPR)
1796 return NULL_TREE;
1797
1798 /* First check if operands ARG1 and ARG2 are equal. If so
1799 return NULL_TREE as this optimization is handled fold_stmt. */
1800 if (arg1 == arg2)
1801 return NULL_TREE;
1802 /* See if we have in arguments logical-not patterns. */
1803 if (((anot = lookup_logical_inverted_value (arg1)) == NULL_TREE
1804 || anot != arg2)
1805 && ((anot = lookup_logical_inverted_value (arg2)) == NULL_TREE
1806 || anot != arg1))
1807 return NULL_TREE;
1808
1809 /* X & !X -> 0. */
1810 if (code == BIT_AND_EXPR)
1811 return fold_convert (type, integer_zero_node);
1812 /* X | !X -> 1 and X ^ !X -> 1, if X is truth-valued. */
1813 if (truth_valued_ssa_name (anot))
1814 return fold_convert (type, integer_one_node);
1815
1816 /* ??? Otherwise result is (X != 0 ? X : 1). not handled. */
1817 return NULL_TREE;
1818}
1819
10fbe63d 1820/* Given a ssa_name in NAME see if it was defined by an assignment and
1821 set CODE to be the code and ARG1 to the first operand on the rhs and ARG2
1822 to the second operand on the rhs. */
1823
1824static inline void
1825defcodefor_name (tree name, enum tree_code *code, tree *arg1, tree *arg2)
1826{
1827 gimple def;
1828 enum tree_code code1;
1829 tree arg11;
1830 tree arg21;
1831 tree arg31;
1832 enum gimple_rhs_class grhs_class;
1833
1834 code1 = TREE_CODE (name);
1835 arg11 = name;
1836 arg21 = NULL_TREE;
1837 grhs_class = get_gimple_rhs_class (code1);
1838
1839 if (code1 == SSA_NAME)
1840 {
1841 def = SSA_NAME_DEF_STMT (name);
1842
1843 if (def && is_gimple_assign (def)
1844 && can_propagate_from (def))
1845 {
1846 code1 = gimple_assign_rhs_code (def);
1847 arg11 = gimple_assign_rhs1 (def);
1848 arg21 = gimple_assign_rhs2 (def);
1849 arg31 = gimple_assign_rhs2 (def);
1850 }
1851 }
1852 else if (grhs_class == GIMPLE_TERNARY_RHS
1853 || GIMPLE_BINARY_RHS
1854 || GIMPLE_UNARY_RHS
1855 || GIMPLE_SINGLE_RHS)
1856 extract_ops_from_tree_1 (name, &code1, &arg11, &arg21, &arg31);
1857
1858 *code = code1;
1859 *arg1 = arg11;
1860 if (arg2)
1861 *arg2 = arg21;
1862 /* Ignore arg3 currently. */
1863}
1864
750e47f5 1865/* Return true if a conversion of an operand from type FROM to type TO
1866 should be applied after performing the operation instead. */
1867
1868static bool
1869hoist_conversion_for_bitop_p (tree to, tree from)
1870{
1871 /* That's a good idea if the conversion widens the operand, thus
1872 after hoisting the conversion the operation will be narrower. */
1873 if (TYPE_PRECISION (from) < TYPE_PRECISION (to))
1874 return true;
1875
1876 /* It's also a good idea if the conversion is to a non-integer mode. */
1877 if (GET_MODE_CLASS (TYPE_MODE (to)) != MODE_INT)
1878 return true;
1879
1880 /* Or if the precision of TO is not the same as the precision
1881 of its mode. */
1882 if (TYPE_PRECISION (to) != GET_MODE_PRECISION (TYPE_MODE (to)))
1883 return true;
1884
1885 return false;
1886}
1887
16bc66ec 1888/* GSI points to a statement of the form
1889
1890 result = OP0 CODE OP1
1891
1892 Where OP0 and OP1 are single bit SSA_NAMEs and CODE is either
1893 BIT_AND_EXPR or BIT_IOR_EXPR.
1894
1895 If OP0 is fed by a bitwise negation of another single bit SSA_NAME,
1896 then we can simplify the two statements into a single LT_EXPR or LE_EXPR
1897 when code is BIT_AND_EXPR and BIT_IOR_EXPR respectively.
1898
040f64e5 1899 If a simplification is made, return TRUE, else return FALSE. */
16bc66ec 1900static bool
1901simplify_bitwise_binary_boolean (gimple_stmt_iterator *gsi,
1902 enum tree_code code,
1903 tree op0, tree op1)
1904{
1905 gimple op0_def_stmt = SSA_NAME_DEF_STMT (op0);
1906
1907 if (!is_gimple_assign (op0_def_stmt)
1908 || (gimple_assign_rhs_code (op0_def_stmt) != BIT_NOT_EXPR))
1909 return false;
1910
1911 tree x = gimple_assign_rhs1 (op0_def_stmt);
1912 if (TREE_CODE (x) == SSA_NAME
1913 && INTEGRAL_TYPE_P (TREE_TYPE (x))
1914 && TYPE_PRECISION (TREE_TYPE (x)) == 1
1915 && TYPE_UNSIGNED (TREE_TYPE (x)) == TYPE_UNSIGNED (TREE_TYPE (op1)))
1916 {
1917 enum tree_code newcode;
1918
1919 gimple stmt = gsi_stmt (*gsi);
1920 gimple_assign_set_rhs1 (stmt, x);
1921 gimple_assign_set_rhs2 (stmt, op1);
1922 if (code == BIT_AND_EXPR)
1923 newcode = TYPE_UNSIGNED (TREE_TYPE (x)) ? LT_EXPR : GT_EXPR;
1924 else
1925 newcode = TYPE_UNSIGNED (TREE_TYPE (x)) ? LE_EXPR : GE_EXPR;
1926 gimple_assign_set_rhs_code (stmt, newcode);
1927 update_stmt (stmt);
1928 return true;
1929 }
1930 return false;
1931
1932}
1933
300da094 1934/* Simplify bitwise binary operations.
1935 Return true if a transformation applied, otherwise return false. */
1c4607fd 1936
300da094 1937static bool
1938simplify_bitwise_binary (gimple_stmt_iterator *gsi)
1c4607fd 1939{
300da094 1940 gimple stmt = gsi_stmt (*gsi);
1c4607fd 1941 tree arg1 = gimple_assign_rhs1 (stmt);
1942 tree arg2 = gimple_assign_rhs2 (stmt);
300da094 1943 enum tree_code code = gimple_assign_rhs_code (stmt);
1944 tree res;
10fbe63d 1945 tree def1_arg1, def1_arg2, def2_arg1, def2_arg2;
26f54bd0 1946 enum tree_code def1_code, def2_code;
1c4607fd 1947
10fbe63d 1948 defcodefor_name (arg1, &def1_code, &def1_arg1, &def1_arg2);
1949 defcodefor_name (arg2, &def2_code, &def2_arg1, &def2_arg2);
26f54bd0 1950
750e47f5 1951 /* Try to fold (type) X op CST -> (type) (X op ((type-x) CST))
1952 when profitable. */
25ce0d90 1953 if (TREE_CODE (arg2) == INTEGER_CST
1954 && CONVERT_EXPR_CODE_P (def1_code)
750e47f5 1955 && hoist_conversion_for_bitop_p (TREE_TYPE (arg1), TREE_TYPE (def1_arg1))
105fc895 1956 && INTEGRAL_TYPE_P (TREE_TYPE (def1_arg1))
25ce0d90 1957 && int_fits_type_p (arg2, TREE_TYPE (def1_arg1)))
1958 {
1959 gimple newop;
03d37e4e 1960 tree tem = make_ssa_name (TREE_TYPE (def1_arg1), NULL);
25ce0d90 1961 newop =
1962 gimple_build_assign_with_ops (code, tem, def1_arg1,
1963 fold_convert_loc (gimple_location (stmt),
1964 TREE_TYPE (def1_arg1),
1965 arg2));
4b5f1658 1966 gimple_set_location (newop, gimple_location (stmt));
25ce0d90 1967 gsi_insert_before (gsi, newop, GSI_SAME_STMT);
1968 gimple_assign_set_rhs_with_ops_1 (gsi, NOP_EXPR,
1969 tem, NULL_TREE, NULL_TREE);
1970 update_stmt (gsi_stmt (*gsi));
1971 return true;
1972 }
1973
300da094 1974 /* For bitwise binary operations apply operand conversions to the
1975 binary operation result instead of to the operands. This allows
1976 to combine successive conversions and bitwise binary operations. */
26f54bd0 1977 if (CONVERT_EXPR_CODE_P (def1_code)
1978 && CONVERT_EXPR_CODE_P (def2_code)
1979 && types_compatible_p (TREE_TYPE (def1_arg1), TREE_TYPE (def2_arg1))
750e47f5 1980 && hoist_conversion_for_bitop_p (TREE_TYPE (arg1), TREE_TYPE (def1_arg1)))
1c4607fd 1981 {
26f54bd0 1982 gimple newop;
03d37e4e 1983 tree tem = make_ssa_name (TREE_TYPE (def1_arg1), NULL);
26f54bd0 1984 newop = gimple_build_assign_with_ops (code, tem, def1_arg1, def2_arg1);
4b5f1658 1985 gimple_set_location (newop, gimple_location (stmt));
26f54bd0 1986 gsi_insert_before (gsi, newop, GSI_SAME_STMT);
1987 gimple_assign_set_rhs_with_ops_1 (gsi, NOP_EXPR,
1988 tem, NULL_TREE, NULL_TREE);
1989 update_stmt (gsi_stmt (*gsi));
1990 return true;
1991 }
1992
35967c0f 1993
1994 /* Simplify (A & B) OP0 (C & B) to (A OP0 C) & B. */
1995 if (def1_code == def2_code
1996 && def1_code == BIT_AND_EXPR
0a3f7203 1997 && operand_equal_for_phi_arg_p (def1_arg2,
1998 def2_arg2))
35967c0f 1999 {
0a3f7203 2000 tree b = def1_arg2;
35967c0f 2001 tree a = def1_arg1;
2002 tree c = def2_arg1;
2003 tree inner = fold_build2 (code, TREE_TYPE (arg2), a, c);
2004 /* If A OP0 C (this usually means C is the same as A) is 0
2005 then fold it down correctly. */
2006 if (integer_zerop (inner))
2007 {
2008 gimple_assign_set_rhs_from_tree (gsi, inner);
2009 update_stmt (stmt);
2010 return true;
2011 }
2012 /* If A OP0 C (this usually means C is the same as A) is a ssa_name
2013 then fold it down correctly. */
2014 else if (TREE_CODE (inner) == SSA_NAME)
2015 {
2016 tree outer = fold_build2 (def1_code, TREE_TYPE (inner),
2017 inner, b);
2018 gimple_assign_set_rhs_from_tree (gsi, outer);
2019 update_stmt (stmt);
2020 return true;
2021 }
2022 else
2023 {
2024 gimple newop;
2025 tree tem;
03d37e4e 2026 tem = make_ssa_name (TREE_TYPE (arg2), NULL);
35967c0f 2027 newop = gimple_build_assign_with_ops (code, tem, a, c);
35967c0f 2028 gimple_set_location (newop, gimple_location (stmt));
2029 /* Make sure to re-process the new stmt as it's walking upwards. */
2030 gsi_insert_before (gsi, newop, GSI_NEW_STMT);
2031 gimple_assign_set_rhs1 (stmt, tem);
2032 gimple_assign_set_rhs2 (stmt, b);
2033 gimple_assign_set_rhs_code (stmt, def1_code);
2034 update_stmt (stmt);
2035 return true;
2036 }
2037 }
2038
26f54bd0 2039 /* (a | CST1) & CST2 -> (a & CST2) | (CST1 & CST2). */
2040 if (code == BIT_AND_EXPR
2041 && def1_code == BIT_IOR_EXPR
2c83a45e 2042 && CONSTANT_CLASS_P (arg2)
2043 && CONSTANT_CLASS_P (def1_arg2))
26f54bd0 2044 {
2045 tree cst = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg2),
10fbe63d 2046 arg2, def1_arg2);
26f54bd0 2047 tree tem;
2048 gimple newop;
2049 if (integer_zerop (cst))
300da094 2050 {
26f54bd0 2051 gimple_assign_set_rhs1 (stmt, def1_arg1);
2052 update_stmt (stmt);
2053 return true;
300da094 2054 }
03d37e4e 2055 tem = make_ssa_name (TREE_TYPE (arg2), NULL);
26f54bd0 2056 newop = gimple_build_assign_with_ops (BIT_AND_EXPR,
2057 tem, def1_arg1, arg2);
4b5f1658 2058 gimple_set_location (newop, gimple_location (stmt));
26f54bd0 2059 /* Make sure to re-process the new stmt as it's walking upwards. */
2060 gsi_insert_before (gsi, newop, GSI_NEW_STMT);
2061 gimple_assign_set_rhs1 (stmt, tem);
2062 gimple_assign_set_rhs2 (stmt, cst);
2063 gimple_assign_set_rhs_code (stmt, BIT_IOR_EXPR);
2064 update_stmt (stmt);
2065 return true;
2066 }
2067
2068 /* Combine successive equal operations with constants. */
2069 if ((code == BIT_AND_EXPR
2070 || code == BIT_IOR_EXPR
2071 || code == BIT_XOR_EXPR)
2072 && def1_code == code
2c83a45e 2073 && CONSTANT_CLASS_P (arg2)
2074 && CONSTANT_CLASS_P (def1_arg2))
26f54bd0 2075 {
2076 tree cst = fold_build2 (code, TREE_TYPE (arg2),
10fbe63d 2077 arg2, def1_arg2);
26f54bd0 2078 gimple_assign_set_rhs1 (stmt, def1_arg1);
2079 gimple_assign_set_rhs2 (stmt, cst);
2080 update_stmt (stmt);
2081 return true;
1c4607fd 2082 }
300da094 2083
8a5f403f 2084 /* Canonicalize X ^ ~0 to ~X. */
2085 if (code == BIT_XOR_EXPR
8a5f403f 2086 && integer_all_onesp (arg2))
2087 {
2088 gimple_assign_set_rhs_with_ops (gsi, BIT_NOT_EXPR, arg1, NULL_TREE);
2089 gcc_assert (gsi_stmt (*gsi) == stmt);
2090 update_stmt (stmt);
2091 return true;
2092 }
2093
41913fa9 2094 /* Try simple folding for X op !X, and X op X. */
2095 res = simplify_bitwise_binary_1 (code, TREE_TYPE (arg1), arg1, arg2);
2096 if (res != NULL_TREE)
2097 {
2098 gimple_assign_set_rhs_from_tree (gsi, res);
2099 update_stmt (gsi_stmt (*gsi));
2100 return true;
2101 }
2102
10fbe63d 2103 if (code == BIT_AND_EXPR || code == BIT_IOR_EXPR)
2104 {
2105 enum tree_code ocode = code == BIT_AND_EXPR ? BIT_IOR_EXPR : BIT_AND_EXPR;
2106 if (def1_code == ocode)
2107 {
2108 tree x = arg2;
2109 enum tree_code coden;
2110 tree a1, a2;
2111 /* ( X | Y) & X -> X */
2112 /* ( X & Y) | X -> X */
2113 if (x == def1_arg1
2114 || x == def1_arg2)
2115 {
2116 gimple_assign_set_rhs_from_tree (gsi, x);
2117 update_stmt (gsi_stmt (*gsi));
2118 return true;
2119 }
2120
2121 defcodefor_name (def1_arg1, &coden, &a1, &a2);
2122 /* (~X | Y) & X -> X & Y */
2123 /* (~X & Y) | X -> X | Y */
2124 if (coden == BIT_NOT_EXPR && a1 == x)
2125 {
2126 gimple_assign_set_rhs_with_ops (gsi, code,
2127 x, def1_arg2);
2128 gcc_assert (gsi_stmt (*gsi) == stmt);
2129 update_stmt (stmt);
2130 return true;
2131 }
2132 defcodefor_name (def1_arg2, &coden, &a1, &a2);
2133 /* (Y | ~X) & X -> X & Y */
2134 /* (Y & ~X) | X -> X | Y */
2135 if (coden == BIT_NOT_EXPR && a1 == x)
2136 {
2137 gimple_assign_set_rhs_with_ops (gsi, code,
2138 x, def1_arg1);
2139 gcc_assert (gsi_stmt (*gsi) == stmt);
2140 update_stmt (stmt);
2141 return true;
2142 }
2143 }
2144 if (def2_code == ocode)
2145 {
2146 enum tree_code coden;
2147 tree a1;
2148 tree x = arg1;
2149 /* X & ( X | Y) -> X */
2150 /* X | ( X & Y) -> X */
2151 if (x == def2_arg1
2152 || x == def2_arg2)
2153 {
2154 gimple_assign_set_rhs_from_tree (gsi, x);
2155 update_stmt (gsi_stmt (*gsi));
2156 return true;
2157 }
2158 defcodefor_name (def2_arg1, &coden, &a1, NULL);
2159 /* (~X | Y) & X -> X & Y */
2160 /* (~X & Y) | X -> X | Y */
2161 if (coden == BIT_NOT_EXPR && a1 == x)
2162 {
2163 gimple_assign_set_rhs_with_ops (gsi, code,
2164 x, def2_arg2);
2165 gcc_assert (gsi_stmt (*gsi) == stmt);
2166 update_stmt (stmt);
2167 return true;
2168 }
2169 defcodefor_name (def2_arg2, &coden, &a1, NULL);
2170 /* (Y | ~X) & X -> X & Y */
2171 /* (Y & ~X) | X -> X | Y */
2172 if (coden == BIT_NOT_EXPR && a1 == x)
2173 {
2174 gimple_assign_set_rhs_with_ops (gsi, code,
2175 x, def2_arg1);
2176 gcc_assert (gsi_stmt (*gsi) == stmt);
2177 update_stmt (stmt);
2178 return true;
2179 }
2180 }
10fbe63d 2181
16bc66ec 2182 /* If arg1 and arg2 are booleans (or any single bit type)
2183 then try to simplify:
2184
2185 (~X & Y) -> X < Y
2186 (X & ~Y) -> Y < X
2187 (~X | Y) -> X <= Y
2188 (X | ~Y) -> Y <= X
2189
2190 But only do this if our result feeds into a comparison as
2191 this transformation is not always a win, particularly on
2192 targets with and-not instructions. */
2193 if (TREE_CODE (arg1) == SSA_NAME
2194 && TREE_CODE (arg2) == SSA_NAME
2195 && INTEGRAL_TYPE_P (TREE_TYPE (arg1))
2196 && TYPE_PRECISION (TREE_TYPE (arg1)) == 1
2197 && TYPE_PRECISION (TREE_TYPE (arg2)) == 1
2198 && (TYPE_UNSIGNED (TREE_TYPE (arg1))
2199 == TYPE_UNSIGNED (TREE_TYPE (arg2))))
2200 {
2201 use_operand_p use_p;
2202 gimple use_stmt;
2203
2204 if (single_imm_use (gimple_assign_lhs (stmt), &use_p, &use_stmt))
2205 {
2206 if (gimple_code (use_stmt) == GIMPLE_COND
2207 && gimple_cond_lhs (use_stmt) == gimple_assign_lhs (stmt)
2208 && integer_zerop (gimple_cond_rhs (use_stmt))
2209 && gimple_cond_code (use_stmt) == NE_EXPR)
2210 {
2211 if (simplify_bitwise_binary_boolean (gsi, code, arg1, arg2))
2212 return true;
2213 if (simplify_bitwise_binary_boolean (gsi, code, arg2, arg1))
2214 return true;
2215 }
2216 }
2217 }
2218 }
300da094 2219 return false;
1c4607fd 2220}
2221
ca3c9092 2222
3b8827a2 2223/* Recognize rotation patterns. Return true if a transformation
2224 applied, otherwise return false.
2225
2226 We are looking for X with unsigned type T with bitsize B, OP being
2227 +, | or ^, some type T2 wider than T and
2228 (X << CNT1) OP (X >> CNT2) iff CNT1 + CNT2 == B
2229 ((T) ((T2) X << CNT1)) OP ((T) ((T2) X >> CNT2)) iff CNT1 + CNT2 == B
2230 (X << Y) OP (X >> (B - Y))
2231 (X << (int) Y) OP (X >> (int) (B - Y))
2232 ((T) ((T2) X << Y)) OP ((T) ((T2) X >> (B - Y)))
2233 ((T) ((T2) X << (int) Y)) OP ((T) ((T2) X >> (int) (B - Y)))
043ce677 2234 (X << Y) | (X >> ((-Y) & (B - 1)))
2235 (X << (int) Y) | (X >> (int) ((-Y) & (B - 1)))
2236 ((T) ((T2) X << Y)) | ((T) ((T2) X >> ((-Y) & (B - 1))))
2237 ((T) ((T2) X << (int) Y)) | ((T) ((T2) X >> (int) ((-Y) & (B - 1))))
3b8827a2 2238
2239 and transform these into:
2240 X r<< CNT1
2241 X r<< Y
2242
2243 Note, in the patterns with T2 type, the type of OP operands
2244 might be even a signed type, but should have precision B. */
2245
2246static bool
2247simplify_rotate (gimple_stmt_iterator *gsi)
2248{
2249 gimple stmt = gsi_stmt (*gsi);
2250 tree arg[2], rtype, rotcnt = NULL_TREE;
2251 tree def_arg1[2], def_arg2[2];
2252 enum tree_code def_code[2];
2253 tree lhs;
2254 int i;
2255 bool swapped_p = false;
2256 gimple g;
2257
2258 arg[0] = gimple_assign_rhs1 (stmt);
2259 arg[1] = gimple_assign_rhs2 (stmt);
2260 rtype = TREE_TYPE (arg[0]);
2261
2262 /* Only create rotates in complete modes. Other cases are not
2263 expanded properly. */
2264 if (!INTEGRAL_TYPE_P (rtype)
2265 || TYPE_PRECISION (rtype) != GET_MODE_PRECISION (TYPE_MODE (rtype)))
2266 return false;
2267
2268 for (i = 0; i < 2; i++)
2269 defcodefor_name (arg[i], &def_code[i], &def_arg1[i], &def_arg2[i]);
2270
2271 /* Look through narrowing conversions. */
2272 if (CONVERT_EXPR_CODE_P (def_code[0])
2273 && CONVERT_EXPR_CODE_P (def_code[1])
2274 && INTEGRAL_TYPE_P (TREE_TYPE (def_arg1[0]))
2275 && INTEGRAL_TYPE_P (TREE_TYPE (def_arg1[1]))
2276 && TYPE_PRECISION (TREE_TYPE (def_arg1[0]))
2277 == TYPE_PRECISION (TREE_TYPE (def_arg1[1]))
2278 && TYPE_PRECISION (TREE_TYPE (def_arg1[0])) > TYPE_PRECISION (rtype)
2279 && has_single_use (arg[0])
2280 && has_single_use (arg[1]))
2281 {
2282 for (i = 0; i < 2; i++)
2283 {
2284 arg[i] = def_arg1[i];
2285 defcodefor_name (arg[i], &def_code[i], &def_arg1[i], &def_arg2[i]);
2286 }
2287 }
2288
2289 /* One operand has to be LSHIFT_EXPR and one RSHIFT_EXPR. */
2290 for (i = 0; i < 2; i++)
2291 if (def_code[i] != LSHIFT_EXPR && def_code[i] != RSHIFT_EXPR)
2292 return false;
2293 else if (!has_single_use (arg[i]))
2294 return false;
2295 if (def_code[0] == def_code[1])
2296 return false;
2297
2298 /* If we've looked through narrowing conversions before, look through
2299 widening conversions from unsigned type with the same precision
2300 as rtype here. */
2301 if (TYPE_PRECISION (TREE_TYPE (def_arg1[0])) != TYPE_PRECISION (rtype))
2302 for (i = 0; i < 2; i++)
2303 {
2304 tree tem;
2305 enum tree_code code;
2306 defcodefor_name (def_arg1[i], &code, &tem, NULL);
2307 if (!CONVERT_EXPR_CODE_P (code)
2308 || !INTEGRAL_TYPE_P (TREE_TYPE (tem))
2309 || TYPE_PRECISION (TREE_TYPE (tem)) != TYPE_PRECISION (rtype))
2310 return false;
2311 def_arg1[i] = tem;
2312 }
2313 /* Both shifts have to use the same first operand. */
2314 if (TREE_CODE (def_arg1[0]) != SSA_NAME || def_arg1[0] != def_arg1[1])
2315 return false;
2316 if (!TYPE_UNSIGNED (TREE_TYPE (def_arg1[0])))
2317 return false;
2318
2319 /* CNT1 + CNT2 == B case above. */
e913b5cd 2320 if (tree_fits_uhwi_p (def_arg2[0])
2321 && tree_fits_uhwi_p (def_arg2[1])
aa59f000 2322 && tree_to_uhwi (def_arg2[0])
e913b5cd 2323 + tree_to_uhwi (def_arg2[1]) == TYPE_PRECISION (rtype))
3b8827a2 2324 rotcnt = def_arg2[0];
2325 else if (TREE_CODE (def_arg2[0]) != SSA_NAME
2326 || TREE_CODE (def_arg2[1]) != SSA_NAME)
2327 return false;
2328 else
2329 {
2330 tree cdef_arg1[2], cdef_arg2[2], def_arg2_alt[2];
2331 enum tree_code cdef_code[2];
2332 /* Look through conversion of the shift count argument.
2333 The C/C++ FE cast any shift count argument to integer_type_node.
2334 The only problem might be if the shift count type maximum value
2335 is equal or smaller than number of bits in rtype. */
2336 for (i = 0; i < 2; i++)
2337 {
2338 def_arg2_alt[i] = def_arg2[i];
2339 defcodefor_name (def_arg2[i], &cdef_code[i],
2340 &cdef_arg1[i], &cdef_arg2[i]);
2341 if (CONVERT_EXPR_CODE_P (cdef_code[i])
2342 && INTEGRAL_TYPE_P (TREE_TYPE (cdef_arg1[i]))
2343 && TYPE_PRECISION (TREE_TYPE (cdef_arg1[i]))
2344 > floor_log2 (TYPE_PRECISION (rtype))
2345 && TYPE_PRECISION (TREE_TYPE (cdef_arg1[i]))
2346 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (cdef_arg1[i]))))
2347 {
2348 def_arg2_alt[i] = cdef_arg1[i];
2349 defcodefor_name (def_arg2_alt[i], &cdef_code[i],
2350 &cdef_arg1[i], &cdef_arg2[i]);
2351 }
2352 }
2353 for (i = 0; i < 2; i++)
2354 /* Check for one shift count being Y and the other B - Y,
2355 with optional casts. */
2356 if (cdef_code[i] == MINUS_EXPR
e913b5cd 2357 && tree_fits_shwi_p (cdef_arg1[i])
2358 && tree_to_shwi (cdef_arg1[i]) == TYPE_PRECISION (rtype)
3b8827a2 2359 && TREE_CODE (cdef_arg2[i]) == SSA_NAME)
2360 {
2361 tree tem;
2362 enum tree_code code;
2363
2364 if (cdef_arg2[i] == def_arg2[1 - i]
2365 || cdef_arg2[i] == def_arg2_alt[1 - i])
2366 {
2367 rotcnt = cdef_arg2[i];
2368 break;
2369 }
2370 defcodefor_name (cdef_arg2[i], &code, &tem, NULL);
2371 if (CONVERT_EXPR_CODE_P (code)
2372 && INTEGRAL_TYPE_P (TREE_TYPE (tem))
2373 && TYPE_PRECISION (TREE_TYPE (tem))
2374 > floor_log2 (TYPE_PRECISION (rtype))
2375 && TYPE_PRECISION (TREE_TYPE (tem))
2376 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (tem)))
2377 && (tem == def_arg2[1 - i]
2378 || tem == def_arg2_alt[1 - i]))
2379 {
2380 rotcnt = tem;
2381 break;
2382 }
2383 }
2384 /* The above sequence isn't safe for Y being 0,
2385 because then one of the shifts triggers undefined behavior.
2386 This alternative is safe even for rotation count of 0.
2387 One shift count is Y and the other (-Y) & (B - 1). */
2388 else if (cdef_code[i] == BIT_AND_EXPR
e913b5cd 2389 && tree_fits_shwi_p (cdef_arg2[i])
2390 && tree_to_shwi (cdef_arg2[i])
3b8827a2 2391 == TYPE_PRECISION (rtype) - 1
043ce677 2392 && TREE_CODE (cdef_arg1[i]) == SSA_NAME
2393 && gimple_assign_rhs_code (stmt) == BIT_IOR_EXPR)
3b8827a2 2394 {
2395 tree tem;
2396 enum tree_code code;
2397
2398 defcodefor_name (cdef_arg1[i], &code, &tem, NULL);
2399 if (CONVERT_EXPR_CODE_P (code)
2400 && INTEGRAL_TYPE_P (TREE_TYPE (tem))
2401 && TYPE_PRECISION (TREE_TYPE (tem))
2402 > floor_log2 (TYPE_PRECISION (rtype))
2403 && TYPE_PRECISION (TREE_TYPE (tem))
2404 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (tem))))
2405 defcodefor_name (tem, &code, &tem, NULL);
2406
2407 if (code == NEGATE_EXPR)
2408 {
2409 if (tem == def_arg2[1 - i] || tem == def_arg2_alt[1 - i])
2410 {
2411 rotcnt = tem;
2412 break;
2413 }
2414 defcodefor_name (tem, &code, &tem, NULL);
2415 if (CONVERT_EXPR_CODE_P (code)
2416 && INTEGRAL_TYPE_P (TREE_TYPE (tem))
2417 && TYPE_PRECISION (TREE_TYPE (tem))
2418 > floor_log2 (TYPE_PRECISION (rtype))
2419 && TYPE_PRECISION (TREE_TYPE (tem))
2420 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (tem)))
2421 && (tem == def_arg2[1 - i]
2422 || tem == def_arg2_alt[1 - i]))
2423 {
2424 rotcnt = tem;
2425 break;
2426 }
2427 }
2428 }
2429 if (rotcnt == NULL_TREE)
2430 return false;
2431 swapped_p = i != 1;
2432 }
2433
2434 if (!useless_type_conversion_p (TREE_TYPE (def_arg2[0]),
2435 TREE_TYPE (rotcnt)))
2436 {
2437 g = gimple_build_assign_with_ops (NOP_EXPR,
2438 make_ssa_name (TREE_TYPE (def_arg2[0]),
2439 NULL),
2440 rotcnt, NULL_TREE);
2441 gsi_insert_before (gsi, g, GSI_SAME_STMT);
2442 rotcnt = gimple_assign_lhs (g);
2443 }
2444 lhs = gimple_assign_lhs (stmt);
2445 if (!useless_type_conversion_p (rtype, TREE_TYPE (def_arg1[0])))
2446 lhs = make_ssa_name (TREE_TYPE (def_arg1[0]), NULL);
2447 g = gimple_build_assign_with_ops (((def_code[0] == LSHIFT_EXPR) ^ swapped_p)
2448 ? LROTATE_EXPR : RROTATE_EXPR,
2449 lhs, def_arg1[0], rotcnt);
2450 if (!useless_type_conversion_p (rtype, TREE_TYPE (def_arg1[0])))
2451 {
2452 gsi_insert_before (gsi, g, GSI_SAME_STMT);
2453 g = gimple_build_assign_with_ops (NOP_EXPR, gimple_assign_lhs (stmt),
2454 lhs, NULL_TREE);
2455 }
2456 gsi_replace (gsi, g, false);
2457 return true;
2458}
2459
ca3c9092 2460/* Perform re-associations of the plus or minus statement STMT that are
b69d1cb6 2461 always permitted. Returns true if the CFG was changed. */
ca3c9092 2462
b69d1cb6 2463static bool
50aacf4c 2464associate_plusminus (gimple_stmt_iterator *gsi)
ca3c9092 2465{
50aacf4c 2466 gimple stmt = gsi_stmt (*gsi);
ca3c9092 2467 tree rhs1 = gimple_assign_rhs1 (stmt);
2468 tree rhs2 = gimple_assign_rhs2 (stmt);
2469 enum tree_code code = gimple_assign_rhs_code (stmt);
ca3c9092 2470 bool changed;
2471
2472 /* We can't reassociate at all for saturating types. */
2473 if (TYPE_SATURATING (TREE_TYPE (rhs1)))
b69d1cb6 2474 return false;
ca3c9092 2475
2476 /* First contract negates. */
2477 do
2478 {
2479 changed = false;
2480
2481 /* A +- (-B) -> A -+ B. */
2482 if (TREE_CODE (rhs2) == SSA_NAME)
2483 {
2484 gimple def_stmt = SSA_NAME_DEF_STMT (rhs2);
2485 if (is_gimple_assign (def_stmt)
32cdcc42 2486 && gimple_assign_rhs_code (def_stmt) == NEGATE_EXPR
2487 && can_propagate_from (def_stmt))
ca3c9092 2488 {
2489 code = (code == MINUS_EXPR) ? PLUS_EXPR : MINUS_EXPR;
2490 gimple_assign_set_rhs_code (stmt, code);
2491 rhs2 = gimple_assign_rhs1 (def_stmt);
2492 gimple_assign_set_rhs2 (stmt, rhs2);
2493 gimple_set_modified (stmt, true);
2494 changed = true;
2495 }
2496 }
2497
2498 /* (-A) + B -> B - A. */
2499 if (TREE_CODE (rhs1) == SSA_NAME
2500 && code == PLUS_EXPR)
2501 {
2502 gimple def_stmt = SSA_NAME_DEF_STMT (rhs1);
2503 if (is_gimple_assign (def_stmt)
32cdcc42 2504 && gimple_assign_rhs_code (def_stmt) == NEGATE_EXPR
2505 && can_propagate_from (def_stmt))
ca3c9092 2506 {
2507 code = MINUS_EXPR;
2508 gimple_assign_set_rhs_code (stmt, code);
2509 rhs1 = rhs2;
2510 gimple_assign_set_rhs1 (stmt, rhs1);
2511 rhs2 = gimple_assign_rhs1 (def_stmt);
2512 gimple_assign_set_rhs2 (stmt, rhs2);
2513 gimple_set_modified (stmt, true);
2514 changed = true;
2515 }
2516 }
2517 }
2518 while (changed);
2519
2520 /* We can't reassociate floating-point or fixed-point plus or minus
2521 because of saturation to +-Inf. */
2522 if (FLOAT_TYPE_P (TREE_TYPE (rhs1))
2523 || FIXED_POINT_TYPE_P (TREE_TYPE (rhs1)))
2524 goto out;
2525
2526 /* Second match patterns that allow contracting a plus-minus pair
2527 irrespective of overflow issues.
2528
2529 (A +- B) - A -> +- B
2530 (A +- B) -+ B -> A
2531 (CST +- A) +- CST -> CST +- A
2c83a45e 2532 (A +- CST) +- CST -> A +- CST
ca3c9092 2533 ~A + A -> -1
2534 ~A + 1 -> -A
2535 A - (A +- B) -> -+ B
2536 A +- (B +- A) -> +- B
2537 CST +- (CST +- A) -> CST +- A
2538 CST +- (A +- CST) -> CST +- A
2539 A + ~A -> -1
2540
2541 via commutating the addition and contracting operations to zero
2542 by reassociation. */
2543
ca3c9092 2544 if (TREE_CODE (rhs1) == SSA_NAME)
2545 {
2546 gimple def_stmt = SSA_NAME_DEF_STMT (rhs1);
32cdcc42 2547 if (is_gimple_assign (def_stmt) && can_propagate_from (def_stmt))
ca3c9092 2548 {
2549 enum tree_code def_code = gimple_assign_rhs_code (def_stmt);
2550 if (def_code == PLUS_EXPR
2551 || def_code == MINUS_EXPR)
2552 {
2553 tree def_rhs1 = gimple_assign_rhs1 (def_stmt);
2554 tree def_rhs2 = gimple_assign_rhs2 (def_stmt);
2555 if (operand_equal_p (def_rhs1, rhs2, 0)
2556 && code == MINUS_EXPR)
2557 {
2558 /* (A +- B) - A -> +- B. */
2559 code = ((def_code == PLUS_EXPR)
2560 ? TREE_CODE (def_rhs2) : NEGATE_EXPR);
2561 rhs1 = def_rhs2;
2562 rhs2 = NULL_TREE;
50aacf4c 2563 gimple_assign_set_rhs_with_ops (gsi, code, rhs1, NULL_TREE);
2564 gcc_assert (gsi_stmt (*gsi) == stmt);
ca3c9092 2565 gimple_set_modified (stmt, true);
2566 }
2567 else if (operand_equal_p (def_rhs2, rhs2, 0)
2568 && code != def_code)
2569 {
2570 /* (A +- B) -+ B -> A. */
2571 code = TREE_CODE (def_rhs1);
2572 rhs1 = def_rhs1;
2573 rhs2 = NULL_TREE;
50aacf4c 2574 gimple_assign_set_rhs_with_ops (gsi, code, rhs1, NULL_TREE);
2575 gcc_assert (gsi_stmt (*gsi) == stmt);
ca3c9092 2576 gimple_set_modified (stmt, true);
2577 }
2c83a45e 2578 else if (CONSTANT_CLASS_P (rhs2)
2579 && CONSTANT_CLASS_P (def_rhs1))
ca3c9092 2580 {
2581 /* (CST +- A) +- CST -> CST +- A. */
2582 tree cst = fold_binary (code, TREE_TYPE (rhs1),
2583 def_rhs1, rhs2);
2584 if (cst && !TREE_OVERFLOW (cst))
2585 {
2586 code = def_code;
2587 gimple_assign_set_rhs_code (stmt, code);
2588 rhs1 = cst;
2589 gimple_assign_set_rhs1 (stmt, rhs1);
2590 rhs2 = def_rhs2;
2591 gimple_assign_set_rhs2 (stmt, rhs2);
2592 gimple_set_modified (stmt, true);
2593 }
2594 }
2c83a45e 2595 else if (CONSTANT_CLASS_P (rhs2)
2596 && CONSTANT_CLASS_P (def_rhs2))
ca3c9092 2597 {
2c83a45e 2598 /* (A +- CST) +- CST -> A +- CST. */
2599 enum tree_code mix = (code == def_code)
2600 ? PLUS_EXPR : MINUS_EXPR;
2601 tree cst = fold_binary (mix, TREE_TYPE (rhs1),
ca3c9092 2602 def_rhs2, rhs2);
2603 if (cst && !TREE_OVERFLOW (cst))
2604 {
2c83a45e 2605 code = def_code;
ca3c9092 2606 gimple_assign_set_rhs_code (stmt, code);
2607 rhs1 = def_rhs1;
2608 gimple_assign_set_rhs1 (stmt, rhs1);
2609 rhs2 = cst;
2610 gimple_assign_set_rhs2 (stmt, rhs2);
2611 gimple_set_modified (stmt, true);
2612 }
2613 }
2614 }
2c83a45e 2615 else if (def_code == BIT_NOT_EXPR && code == PLUS_EXPR)
ca3c9092 2616 {
2617 tree def_rhs1 = gimple_assign_rhs1 (def_stmt);
2c83a45e 2618 if (operand_equal_p (def_rhs1, rhs2, 0))
ca3c9092 2619 {
2620 /* ~A + A -> -1. */
2c83a45e 2621 rhs1 = build_all_ones_cst (TREE_TYPE (rhs2));
ca3c9092 2622 rhs2 = NULL_TREE;
2c83a45e 2623 code = TREE_CODE (rhs1);
50aacf4c 2624 gimple_assign_set_rhs_with_ops (gsi, code, rhs1, NULL_TREE);
2625 gcc_assert (gsi_stmt (*gsi) == stmt);
ca3c9092 2626 gimple_set_modified (stmt, true);
2627 }
2c83a45e 2628 else if ((TREE_CODE (TREE_TYPE (rhs2)) != COMPLEX_TYPE
2629 && integer_onep (rhs2))
2630 || (TREE_CODE (rhs2) == COMPLEX_CST
2631 && integer_onep (TREE_REALPART (rhs2))
2632 && integer_onep (TREE_IMAGPART (rhs2))))
ca3c9092 2633 {
2634 /* ~A + 1 -> -A. */
2635 code = NEGATE_EXPR;
2636 rhs1 = def_rhs1;
2637 rhs2 = NULL_TREE;
50aacf4c 2638 gimple_assign_set_rhs_with_ops (gsi, code, rhs1, NULL_TREE);
2639 gcc_assert (gsi_stmt (*gsi) == stmt);
ca3c9092 2640 gimple_set_modified (stmt, true);
2641 }
2642 }
2643 }
2644 }
2645
2646 if (rhs2 && TREE_CODE (rhs2) == SSA_NAME)
2647 {
2648 gimple def_stmt = SSA_NAME_DEF_STMT (rhs2);
32cdcc42 2649 if (is_gimple_assign (def_stmt) && can_propagate_from (def_stmt))
ca3c9092 2650 {
2651 enum tree_code def_code = gimple_assign_rhs_code (def_stmt);
2652 if (def_code == PLUS_EXPR
2653 || def_code == MINUS_EXPR)
2654 {
2655 tree def_rhs1 = gimple_assign_rhs1 (def_stmt);
2656 tree def_rhs2 = gimple_assign_rhs2 (def_stmt);
2657 if (operand_equal_p (def_rhs1, rhs1, 0)
2658 && code == MINUS_EXPR)
2659 {
2660 /* A - (A +- B) -> -+ B. */
2661 code = ((def_code == PLUS_EXPR)
2662 ? NEGATE_EXPR : TREE_CODE (def_rhs2));
2663 rhs1 = def_rhs2;
2664 rhs2 = NULL_TREE;
50aacf4c 2665 gimple_assign_set_rhs_with_ops (gsi, code, rhs1, NULL_TREE);
2666 gcc_assert (gsi_stmt (*gsi) == stmt);
ca3c9092 2667 gimple_set_modified (stmt, true);
2668 }
2669 else if (operand_equal_p (def_rhs2, rhs1, 0)
2670 && code != def_code)
2671 {
2672 /* A +- (B +- A) -> +- B. */
2673 code = ((code == PLUS_EXPR)
2674 ? TREE_CODE (def_rhs1) : NEGATE_EXPR);
2675 rhs1 = def_rhs1;
2676 rhs2 = NULL_TREE;
50aacf4c 2677 gimple_assign_set_rhs_with_ops (gsi, code, rhs1, NULL_TREE);
2678 gcc_assert (gsi_stmt (*gsi) == stmt);
ca3c9092 2679 gimple_set_modified (stmt, true);
2680 }
2c83a45e 2681 else if (CONSTANT_CLASS_P (rhs1)
2682 && CONSTANT_CLASS_P (def_rhs1))
ca3c9092 2683 {
2684 /* CST +- (CST +- A) -> CST +- A. */
2685 tree cst = fold_binary (code, TREE_TYPE (rhs2),
2686 rhs1, def_rhs1);
2687 if (cst && !TREE_OVERFLOW (cst))
2688 {
2689 code = (code == def_code ? PLUS_EXPR : MINUS_EXPR);
2690 gimple_assign_set_rhs_code (stmt, code);
2691 rhs1 = cst;
2692 gimple_assign_set_rhs1 (stmt, rhs1);
2693 rhs2 = def_rhs2;
2694 gimple_assign_set_rhs2 (stmt, rhs2);
2695 gimple_set_modified (stmt, true);
2696 }
2697 }
2c83a45e 2698 else if (CONSTANT_CLASS_P (rhs1)
2699 && CONSTANT_CLASS_P (def_rhs2))
ca3c9092 2700 {
2701 /* CST +- (A +- CST) -> CST +- A. */
2702 tree cst = fold_binary (def_code == code
2703 ? PLUS_EXPR : MINUS_EXPR,
2704 TREE_TYPE (rhs2),
2705 rhs1, def_rhs2);
2706 if (cst && !TREE_OVERFLOW (cst))
2707 {
2708 rhs1 = cst;
2709 gimple_assign_set_rhs1 (stmt, rhs1);
2710 rhs2 = def_rhs1;
2711 gimple_assign_set_rhs2 (stmt, rhs2);
2712 gimple_set_modified (stmt, true);
2713 }
2714 }
2715 }
2c83a45e 2716 else if (def_code == BIT_NOT_EXPR)
ca3c9092 2717 {
2718 tree def_rhs1 = gimple_assign_rhs1 (def_stmt);
2719 if (code == PLUS_EXPR
2720 && operand_equal_p (def_rhs1, rhs1, 0))
2721 {
2722 /* A + ~A -> -1. */
2c83a45e 2723 rhs1 = build_all_ones_cst (TREE_TYPE (rhs1));
ca3c9092 2724 rhs2 = NULL_TREE;
2c83a45e 2725 code = TREE_CODE (rhs1);
50aacf4c 2726 gimple_assign_set_rhs_with_ops (gsi, code, rhs1, NULL_TREE);
2727 gcc_assert (gsi_stmt (*gsi) == stmt);
ca3c9092 2728 gimple_set_modified (stmt, true);
2729 }
2730 }
2731 }
2732 }
2733
2734out:
2735 if (gimple_modified_p (stmt))
2736 {
50aacf4c 2737 fold_stmt_inplace (gsi);
ca3c9092 2738 update_stmt (stmt);
b69d1cb6 2739 if (maybe_clean_or_replace_eh_stmt (stmt, stmt)
2740 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
2741 return true;
ca3c9092 2742 }
b69d1cb6 2743
2744 return false;
ca3c9092 2745}
2746
c9c17332 2747/* Associate operands of a POINTER_PLUS_EXPR assignmen at *GSI. Returns
2748 true if anything changed, false otherwise. */
2749
2750static bool
2751associate_pointerplus (gimple_stmt_iterator *gsi)
2752{
2753 gimple stmt = gsi_stmt (*gsi);
2754 gimple def_stmt;
2755 tree ptr, rhs, algn;
2756
2757 /* Pattern match
2758 tem = (sizetype) ptr;
2759 tem = tem & algn;
2760 tem = -tem;
2761 ... = ptr p+ tem;
2762 and produce the simpler and easier to analyze with respect to alignment
2763 ... = ptr & ~algn; */
2764 ptr = gimple_assign_rhs1 (stmt);
2765 rhs = gimple_assign_rhs2 (stmt);
2766 if (TREE_CODE (rhs) != SSA_NAME)
2767 return false;
2768 def_stmt = SSA_NAME_DEF_STMT (rhs);
2769 if (!is_gimple_assign (def_stmt)
2770 || gimple_assign_rhs_code (def_stmt) != NEGATE_EXPR)
2771 return false;
2772 rhs = gimple_assign_rhs1 (def_stmt);
2773 if (TREE_CODE (rhs) != SSA_NAME)
2774 return false;
2775 def_stmt = SSA_NAME_DEF_STMT (rhs);
2776 if (!is_gimple_assign (def_stmt)
2777 || gimple_assign_rhs_code (def_stmt) != BIT_AND_EXPR)
2778 return false;
2779 rhs = gimple_assign_rhs1 (def_stmt);
2780 algn = gimple_assign_rhs2 (def_stmt);
2781 if (TREE_CODE (rhs) != SSA_NAME
2782 || TREE_CODE (algn) != INTEGER_CST)
2783 return false;
2784 def_stmt = SSA_NAME_DEF_STMT (rhs);
2785 if (!is_gimple_assign (def_stmt)
2786 || !CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt)))
2787 return false;
2788 if (gimple_assign_rhs1 (def_stmt) != ptr)
2789 return false;
2790
6da74b21 2791 algn = wide_int_to_tree (TREE_TYPE (ptr), wi::bit_not (algn));
c9c17332 2792 gimple_assign_set_rhs_with_ops (gsi, BIT_AND_EXPR, ptr, algn);
2793 fold_stmt_inplace (gsi);
2794 update_stmt (stmt);
2795
2796 return true;
2797}
2798
6afd0544 2799/* Combine two conversions in a row for the second conversion at *GSI.
89c8f35a 2800 Returns 1 if there were any changes made, 2 if cfg-cleanup needs to
2801 run. Else it returns 0. */
6afd0544 2802
89c8f35a 2803static int
6afd0544 2804combine_conversions (gimple_stmt_iterator *gsi)
2805{
2806 gimple stmt = gsi_stmt (*gsi);
2807 gimple def_stmt;
2808 tree op0, lhs;
2809 enum tree_code code = gimple_assign_rhs_code (stmt);
487282d5 2810 enum tree_code code2;
6afd0544 2811
2812 gcc_checking_assert (CONVERT_EXPR_CODE_P (code)
2813 || code == FLOAT_EXPR
2814 || code == FIX_TRUNC_EXPR);
2815
2816 lhs = gimple_assign_lhs (stmt);
2817 op0 = gimple_assign_rhs1 (stmt);
2818 if (useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (op0)))
2819 {
2820 gimple_assign_set_rhs_code (stmt, TREE_CODE (op0));
89c8f35a 2821 return 1;
6afd0544 2822 }
2823
2824 if (TREE_CODE (op0) != SSA_NAME)
89c8f35a 2825 return 0;
6afd0544 2826
2827 def_stmt = SSA_NAME_DEF_STMT (op0);
2828 if (!is_gimple_assign (def_stmt))
89c8f35a 2829 return 0;
6afd0544 2830
487282d5 2831 code2 = gimple_assign_rhs_code (def_stmt);
2832
2833 if (CONVERT_EXPR_CODE_P (code2) || code2 == FLOAT_EXPR)
6afd0544 2834 {
2835 tree defop0 = gimple_assign_rhs1 (def_stmt);
2836 tree type = TREE_TYPE (lhs);
2837 tree inside_type = TREE_TYPE (defop0);
2838 tree inter_type = TREE_TYPE (op0);
2839 int inside_int = INTEGRAL_TYPE_P (inside_type);
2840 int inside_ptr = POINTER_TYPE_P (inside_type);
2841 int inside_float = FLOAT_TYPE_P (inside_type);
2842 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
2843 unsigned int inside_prec = TYPE_PRECISION (inside_type);
2844 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
2845 int inter_int = INTEGRAL_TYPE_P (inter_type);
2846 int inter_ptr = POINTER_TYPE_P (inter_type);
2847 int inter_float = FLOAT_TYPE_P (inter_type);
2848 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
2849 unsigned int inter_prec = TYPE_PRECISION (inter_type);
2850 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
2851 int final_int = INTEGRAL_TYPE_P (type);
2852 int final_ptr = POINTER_TYPE_P (type);
2853 int final_float = FLOAT_TYPE_P (type);
2854 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
2855 unsigned int final_prec = TYPE_PRECISION (type);
2856 int final_unsignedp = TYPE_UNSIGNED (type);
2857
3aeff048 2858 /* Don't propagate ssa names that occur in abnormal phis. */
2859 if (TREE_CODE (defop0) == SSA_NAME
2860 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (defop0))
2861 return 0;
2862
6afd0544 2863 /* In addition to the cases of two conversions in a row
2864 handled below, if we are converting something to its own
2865 type via an object of identical or wider precision, neither
2866 conversion is needed. */
2867 if (useless_type_conversion_p (type, inside_type)
2868 && (((inter_int || inter_ptr) && final_int)
2869 || (inter_float && final_float))
2870 && inter_prec >= final_prec)
2871 {
2872 gimple_assign_set_rhs1 (stmt, unshare_expr (defop0));
2873 gimple_assign_set_rhs_code (stmt, TREE_CODE (defop0));
2874 update_stmt (stmt);
89c8f35a 2875 return remove_prop_source_from_use (op0) ? 2 : 1;
6afd0544 2876 }
2877
2878 /* Likewise, if the intermediate and initial types are either both
2879 float or both integer, we don't need the middle conversion if the
2880 former is wider than the latter and doesn't change the signedness
2881 (for integers). Avoid this if the final type is a pointer since
2882 then we sometimes need the middle conversion. Likewise if the
2883 final type has a precision not equal to the size of its mode. */
2884 if (((inter_int && inside_int)
2885 || (inter_float && inside_float)
2886 || (inter_vec && inside_vec))
2887 && inter_prec >= inside_prec
2888 && (inter_float || inter_vec
2889 || inter_unsignedp == inside_unsignedp)
51dbf409 2890 && ! (final_prec != GET_MODE_PRECISION (TYPE_MODE (type))
6afd0544 2891 && TYPE_MODE (type) == TYPE_MODE (inter_type))
2892 && ! final_ptr
2893 && (! final_vec || inter_prec == inside_prec))
2894 {
2895 gimple_assign_set_rhs1 (stmt, defop0);
2896 update_stmt (stmt);
89c8f35a 2897 return remove_prop_source_from_use (op0) ? 2 : 1;
6afd0544 2898 }
2899
2900 /* If we have a sign-extension of a zero-extended value, we can
a6476f88 2901 replace that by a single zero-extension. Likewise if the
2902 final conversion does not change precision we can drop the
2903 intermediate conversion. */
6afd0544 2904 if (inside_int && inter_int && final_int
a6476f88 2905 && ((inside_prec < inter_prec && inter_prec < final_prec
2906 && inside_unsignedp && !inter_unsignedp)
2907 || final_prec == inter_prec))
6afd0544 2908 {
2909 gimple_assign_set_rhs1 (stmt, defop0);
2910 update_stmt (stmt);
89c8f35a 2911 return remove_prop_source_from_use (op0) ? 2 : 1;
6afd0544 2912 }
2913
2914 /* Two conversions in a row are not needed unless:
2915 - some conversion is floating-point (overstrict for now), or
2916 - some conversion is a vector (overstrict for now), or
2917 - the intermediate type is narrower than both initial and
2918 final, or
2919 - the intermediate type and innermost type differ in signedness,
2920 and the outermost type is wider than the intermediate, or
2921 - the initial type is a pointer type and the precisions of the
2922 intermediate and final types differ, or
2923 - the final type is a pointer type and the precisions of the
2924 initial and intermediate types differ. */
2925 if (! inside_float && ! inter_float && ! final_float
2926 && ! inside_vec && ! inter_vec && ! final_vec
2927 && (inter_prec >= inside_prec || inter_prec >= final_prec)
2928 && ! (inside_int && inter_int
2929 && inter_unsignedp != inside_unsignedp
2930 && inter_prec < final_prec)
2931 && ((inter_unsignedp && inter_prec > inside_prec)
2932 == (final_unsignedp && final_prec > inter_prec))
2933 && ! (inside_ptr && inter_prec != final_prec)
2934 && ! (final_ptr && inside_prec != inter_prec)
51dbf409 2935 && ! (final_prec != GET_MODE_PRECISION (TYPE_MODE (type))
6afd0544 2936 && TYPE_MODE (type) == TYPE_MODE (inter_type)))
2937 {
2938 gimple_assign_set_rhs1 (stmt, defop0);
2939 update_stmt (stmt);
89c8f35a 2940 return remove_prop_source_from_use (op0) ? 2 : 1;
6afd0544 2941 }
2942
2943 /* A truncation to an unsigned type should be canonicalized as
2944 bitwise and of a mask. */
2945 if (final_int && inter_int && inside_int
2946 && final_prec == inside_prec
2947 && final_prec > inter_prec
2948 && inter_unsignedp)
2949 {
2950 tree tem;
2951 tem = fold_build2 (BIT_AND_EXPR, inside_type,
2952 defop0,
e913b5cd 2953 wide_int_to_tree
796b6678 2954 (inside_type,
2955 wi::mask (inter_prec, false,
2956 TYPE_PRECISION (inside_type))));
6afd0544 2957 if (!useless_type_conversion_p (type, inside_type))
2958 {
2959 tem = force_gimple_operand_gsi (gsi, tem, true, NULL_TREE, true,
2960 GSI_SAME_STMT);
2961 gimple_assign_set_rhs1 (stmt, tem);
2962 }
2963 else
2964 gimple_assign_set_rhs_from_tree (gsi, tem);
2965 update_stmt (gsi_stmt (*gsi));
89c8f35a 2966 return 1;
6afd0544 2967 }
487282d5 2968
2969 /* If we are converting an integer to a floating-point that can
2970 represent it exactly and back to an integer, we can skip the
2971 floating-point conversion. */
2972 if (inside_int && inter_float && final_int &&
2973 (unsigned) significand_size (TYPE_MODE (inter_type))
2974 >= inside_prec - !inside_unsignedp)
2975 {
2976 if (useless_type_conversion_p (type, inside_type))
2977 {
2978 gimple_assign_set_rhs1 (stmt, unshare_expr (defop0));
2979 gimple_assign_set_rhs_code (stmt, TREE_CODE (defop0));
2980 update_stmt (stmt);
2981 return remove_prop_source_from_use (op0) ? 2 : 1;
2982 }
2983 else
2984 {
2985 gimple_assign_set_rhs1 (stmt, defop0);
2986 gimple_assign_set_rhs_code (stmt, CONVERT_EXPR);
2987 update_stmt (stmt);
2988 return remove_prop_source_from_use (op0) ? 2 : 1;
2989 }
2990 }
6afd0544 2991 }
2992
89c8f35a 2993 return 0;
6afd0544 2994}
2995
173c91d9 2996/* Combine an element access with a shuffle. Returns true if there were
2997 any changes made, else it returns false. */
2998
2999static bool
3000simplify_bitfield_ref (gimple_stmt_iterator *gsi)
3001{
3002 gimple stmt = gsi_stmt (*gsi);
3003 gimple def_stmt;
3004 tree op, op0, op1, op2;
3005 tree elem_type;
3006 unsigned idx, n, size;
3007 enum tree_code code;
3008
3009 op = gimple_assign_rhs1 (stmt);
3010 gcc_checking_assert (TREE_CODE (op) == BIT_FIELD_REF);
3011
3012 op0 = TREE_OPERAND (op, 0);
3013 if (TREE_CODE (op0) != SSA_NAME
3014 || TREE_CODE (TREE_TYPE (op0)) != VECTOR_TYPE)
3015 return false;
3016
58bf5219 3017 def_stmt = get_prop_source_stmt (op0, false, NULL);
3018 if (!def_stmt || !can_propagate_from (def_stmt))
3019 return false;
3020
3021 op1 = TREE_OPERAND (op, 1);
3022 op2 = TREE_OPERAND (op, 2);
3023 code = gimple_assign_rhs_code (def_stmt);
3024
3025 if (code == CONSTRUCTOR)
3026 {
3027 tree tem = fold_ternary (BIT_FIELD_REF, TREE_TYPE (op),
3028 gimple_assign_rhs1 (def_stmt), op1, op2);
3029 if (!tem || !valid_gimple_rhs_p (tem))
3030 return false;
3031 gimple_assign_set_rhs_from_tree (gsi, tem);
3032 update_stmt (gsi_stmt (*gsi));
3033 return true;
3034 }
3035
173c91d9 3036 elem_type = TREE_TYPE (TREE_TYPE (op0));
3037 if (TREE_TYPE (op) != elem_type)
3038 return false;
3039
e913b5cd 3040 size = tree_to_hwi (TYPE_SIZE (elem_type));
3041 n = tree_to_hwi (op1) / size;
173c91d9 3042 if (n != 1)
3043 return false;
e913b5cd 3044 idx = tree_to_hwi (op2) / size;
173c91d9 3045
173c91d9 3046 if (code == VEC_PERM_EXPR)
3047 {
3048 tree p, m, index, tem;
3049 unsigned nelts;
3050 m = gimple_assign_rhs3 (def_stmt);
3051 if (TREE_CODE (m) != VECTOR_CST)
3052 return false;
3053 nelts = VECTOR_CST_NELTS (m);
e913b5cd 3054 idx = tree_to_hwi (VECTOR_CST_ELT (m, idx));
173c91d9 3055 idx %= 2 * nelts;
3056 if (idx < nelts)
3057 {
3058 p = gimple_assign_rhs1 (def_stmt);
3059 }
3060 else
3061 {
3062 p = gimple_assign_rhs2 (def_stmt);
3063 idx -= nelts;
3064 }
3065 index = build_int_cst (TREE_TYPE (TREE_TYPE (m)), idx * size);
3066 tem = build3 (BIT_FIELD_REF, TREE_TYPE (op),
ab54bbbd 3067 unshare_expr (p), op1, index);
173c91d9 3068 gimple_assign_set_rhs1 (stmt, tem);
3069 fold_stmt (gsi);
3070 update_stmt (gsi_stmt (*gsi));
3071 return true;
3072 }
3073
3074 return false;
3075}
3076
496ec2ad 3077/* Determine whether applying the 2 permutations (mask1 then mask2)
3078 gives back one of the input. */
3079
3080static int
3081is_combined_permutation_identity (tree mask1, tree mask2)
3082{
3083 tree mask;
3084 unsigned int nelts, i, j;
3085 bool maybe_identity1 = true;
3086 bool maybe_identity2 = true;
3087
3088 gcc_checking_assert (TREE_CODE (mask1) == VECTOR_CST
3089 && TREE_CODE (mask2) == VECTOR_CST);
3090 mask = fold_ternary (VEC_PERM_EXPR, TREE_TYPE (mask1), mask1, mask1, mask2);
3091 gcc_assert (TREE_CODE (mask) == VECTOR_CST);
3092
3093 nelts = VECTOR_CST_NELTS (mask);
3094 for (i = 0; i < nelts; i++)
3095 {
3096 tree val = VECTOR_CST_ELT (mask, i);
3097 gcc_assert (TREE_CODE (val) == INTEGER_CST);
e913b5cd 3098 j = tree_to_hwi (val) & (2 * nelts - 1);
496ec2ad 3099 if (j == i)
3100 maybe_identity2 = false;
3101 else if (j == i + nelts)
3102 maybe_identity1 = false;
3103 else
3104 return 0;
3105 }
3106 return maybe_identity1 ? 1 : maybe_identity2 ? 2 : 0;
3107}
3108
2b9112d6 3109/* Combine a shuffle with its arguments. Returns 1 if there were any
3110 changes made, 2 if cfg-cleanup needs to run. Else it returns 0. */
496ec2ad 3111
3112static int
3113simplify_permutation (gimple_stmt_iterator *gsi)
3114{
3115 gimple stmt = gsi_stmt (*gsi);
3116 gimple def_stmt;
2b9112d6 3117 tree op0, op1, op2, op3, arg0, arg1;
3118 enum tree_code code;
ab54bbbd 3119 bool single_use_op0 = false;
496ec2ad 3120
2b9112d6 3121 gcc_checking_assert (gimple_assign_rhs_code (stmt) == VEC_PERM_EXPR);
496ec2ad 3122
3123 op0 = gimple_assign_rhs1 (stmt);
3124 op1 = gimple_assign_rhs2 (stmt);
3125 op2 = gimple_assign_rhs3 (stmt);
3126
496ec2ad 3127 if (TREE_CODE (op2) != VECTOR_CST)
3128 return 0;
3129
2b9112d6 3130 if (TREE_CODE (op0) == VECTOR_CST)
3131 {
3132 code = VECTOR_CST;
3133 arg0 = op0;
3134 }
3135 else if (TREE_CODE (op0) == SSA_NAME)
3136 {
ab54bbbd 3137 def_stmt = get_prop_source_stmt (op0, false, &single_use_op0);
3138 if (!def_stmt || !can_propagate_from (def_stmt))
2b9112d6 3139 return 0;
496ec2ad 3140
2b9112d6 3141 code = gimple_assign_rhs_code (def_stmt);
3142 arg0 = gimple_assign_rhs1 (def_stmt);
3143 }
3144 else
496ec2ad 3145 return 0;
3146
496ec2ad 3147 /* Two consecutive shuffles. */
2b9112d6 3148 if (code == VEC_PERM_EXPR)
496ec2ad 3149 {
3150 tree orig;
3151 int ident;
2b9112d6 3152
3153 if (op0 != op1)
3154 return 0;
496ec2ad 3155 op3 = gimple_assign_rhs3 (def_stmt);
3156 if (TREE_CODE (op3) != VECTOR_CST)
3157 return 0;
3158 ident = is_combined_permutation_identity (op3, op2);
3159 if (!ident)
3160 return 0;
3161 orig = (ident == 1) ? gimple_assign_rhs1 (def_stmt)
3162 : gimple_assign_rhs2 (def_stmt);
3163 gimple_assign_set_rhs1 (stmt, unshare_expr (orig));
3164 gimple_assign_set_rhs_code (stmt, TREE_CODE (orig));
3165 gimple_set_num_ops (stmt, 2);
3166 update_stmt (stmt);
3167 return remove_prop_source_from_use (op0) ? 2 : 1;
3168 }
3169
2b9112d6 3170 /* Shuffle of a constructor. */
3171 else if (code == CONSTRUCTOR || code == VECTOR_CST)
3172 {
3173 tree opt;
3174 bool ret = false;
3175 if (op0 != op1)
3176 {
ab54bbbd 3177 if (TREE_CODE (op0) == SSA_NAME && !single_use_op0)
2b9112d6 3178 return 0;
3179
3180 if (TREE_CODE (op1) == VECTOR_CST)
3181 arg1 = op1;
3182 else if (TREE_CODE (op1) == SSA_NAME)
3183 {
3184 enum tree_code code2;
3185
ab54bbbd 3186 gimple def_stmt2 = get_prop_source_stmt (op1, true, NULL);
3187 if (!def_stmt2 || !can_propagate_from (def_stmt2))
2b9112d6 3188 return 0;
3189
3190 code2 = gimple_assign_rhs_code (def_stmt2);
3191 if (code2 != CONSTRUCTOR && code2 != VECTOR_CST)
3192 return 0;
3193 arg1 = gimple_assign_rhs1 (def_stmt2);
3194 }
3195 else
3196 return 0;
3197 }
3198 else
3199 {
3200 /* Already used twice in this statement. */
3201 if (TREE_CODE (op0) == SSA_NAME && num_imm_uses (op0) > 2)
3202 return 0;
3203 arg1 = arg0;
3204 }
9af5ce0c 3205 opt = fold_ternary (VEC_PERM_EXPR, TREE_TYPE (op0), arg0, arg1, op2);
2b9112d6 3206 if (!opt
9af5ce0c 3207 || (TREE_CODE (opt) != CONSTRUCTOR && TREE_CODE (opt) != VECTOR_CST))
2b9112d6 3208 return 0;
3209 gimple_assign_set_rhs_from_tree (gsi, opt);
3210 update_stmt (gsi_stmt (*gsi));
3211 if (TREE_CODE (op0) == SSA_NAME)
3212 ret = remove_prop_source_from_use (op0);
3213 if (op0 != op1 && TREE_CODE (op1) == SSA_NAME)
3214 ret |= remove_prop_source_from_use (op1);
3215 return ret ? 2 : 1;
3216 }
3217
3218 return 0;
496ec2ad 3219}
3220
6a9e13a2 3221/* Recognize a VEC_PERM_EXPR. Returns true if there were any changes. */
3222
3223static bool
3224simplify_vector_constructor (gimple_stmt_iterator *gsi)
3225{
3226 gimple stmt = gsi_stmt (*gsi);
3227 gimple def_stmt;
3228 tree op, op2, orig, type, elem_type;
3229 unsigned elem_size, nelts, i;
3230 enum tree_code code;
3231 constructor_elt *elt;
3232 unsigned char *sel;
3233 bool maybe_ident;
3234
3235 gcc_checking_assert (gimple_assign_rhs_code (stmt) == CONSTRUCTOR);
3236
3237 op = gimple_assign_rhs1 (stmt);
3238 type = TREE_TYPE (op);
3239 gcc_checking_assert (TREE_CODE (type) == VECTOR_TYPE);
3240
3241 nelts = TYPE_VECTOR_SUBPARTS (type);
3242 elem_type = TREE_TYPE (type);
e913b5cd 3243 elem_size = tree_to_hwi (TYPE_SIZE (elem_type));
6a9e13a2 3244
3245 sel = XALLOCAVEC (unsigned char, nelts);
3246 orig = NULL;
3247 maybe_ident = true;
f1f41a6c 3248 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (op), i, elt)
6a9e13a2 3249 {
3250 tree ref, op1;
3251
3252 if (i >= nelts)
3253 return false;
3254
3255 if (TREE_CODE (elt->value) != SSA_NAME)
3256 return false;
ab54bbbd 3257 def_stmt = get_prop_source_stmt (elt->value, false, NULL);
3258 if (!def_stmt)
6a9e13a2 3259 return false;
3260 code = gimple_assign_rhs_code (def_stmt);
3261 if (code != BIT_FIELD_REF)
3262 return false;
3263 op1 = gimple_assign_rhs1 (def_stmt);
3264 ref = TREE_OPERAND (op1, 0);
3265 if (orig)
3266 {
3267 if (ref != orig)
3268 return false;
3269 }
3270 else
3271 {
3272 if (TREE_CODE (ref) != SSA_NAME)
3273 return false;
8a13ba5e 3274 if (!useless_type_conversion_p (type, TREE_TYPE (ref)))
3275 return false;
6a9e13a2 3276 orig = ref;
3277 }
e913b5cd 3278 if (tree_to_hwi (TREE_OPERAND (op1, 1)) != elem_size)
6a9e13a2 3279 return false;
e913b5cd 3280 sel[i] = tree_to_hwi (TREE_OPERAND (op1, 2)) / elem_size;
6a9e13a2 3281 if (sel[i] != i) maybe_ident = false;
3282 }
3283 if (i < nelts)
3284 return false;
3285
3286 if (maybe_ident)
d1938a4b 3287 gimple_assign_set_rhs_from_tree (gsi, orig);
6a9e13a2 3288 else
3289 {
d1938a4b 3290 tree mask_type, *mask_elts;
3291
3292 if (!can_vec_perm_p (TYPE_MODE (type), false, sel))
3293 return false;
3294 mask_type
3295 = build_vector_type (build_nonstandard_integer_type (elem_size, 1),
3296 nelts);
3297 if (GET_MODE_CLASS (TYPE_MODE (mask_type)) != MODE_VECTOR_INT
3298 || GET_MODE_SIZE (TYPE_MODE (mask_type))
3299 != GET_MODE_SIZE (TYPE_MODE (type)))
6a9e13a2 3300 return false;
d1938a4b 3301 mask_elts = XALLOCAVEC (tree, nelts);
3302 for (i = 0; i < nelts; i++)
3303 mask_elts[i] = build_int_cst (TREE_TYPE (mask_type), sel[i]);
3304 op2 = build_vector (mask_type, mask_elts);
6a9e13a2 3305 gimple_assign_set_rhs_with_ops_1 (gsi, VEC_PERM_EXPR, orig, orig, op2);
3306 }
3307 update_stmt (gsi_stmt (*gsi));
3308 return true;
3309}
3310
678b2f5b 3311/* Main entry point for the forward propagation and statement combine
3312 optimizer. */
4ee9c684 3313
2a1990e9 3314static unsigned int
678b2f5b 3315ssa_forward_propagate_and_combine (void)
4ee9c684 3316{
f5c8cff5 3317 basic_block bb;
c96420f8 3318 unsigned int todoflags = 0;
4ee9c684 3319
148aa112 3320 cfg_changed = false;
3321
f5c8cff5 3322 FOR_EACH_BB (bb)
3323 {
2f5a3c4a 3324 gimple_stmt_iterator gsi;
291d763b 3325
678b2f5b 3326 /* Apply forward propagation to all stmts in the basic-block.
3327 Note we update GSI within the loop as necessary. */
75a70cf9 3328 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); )
291d763b 3329 {
75a70cf9 3330 gimple stmt = gsi_stmt (gsi);
678b2f5b 3331 tree lhs, rhs;
3332 enum tree_code code;
291d763b 3333
678b2f5b 3334 if (!is_gimple_assign (stmt))
291d763b 3335 {
678b2f5b 3336 gsi_next (&gsi);
3337 continue;
3338 }
3a938499 3339
678b2f5b 3340 lhs = gimple_assign_lhs (stmt);
3341 rhs = gimple_assign_rhs1 (stmt);
3342 code = gimple_assign_rhs_code (stmt);
3343 if (TREE_CODE (lhs) != SSA_NAME
3344 || has_zero_uses (lhs))
3345 {
3346 gsi_next (&gsi);
3347 continue;
3348 }
3a938499 3349
678b2f5b 3350 /* If this statement sets an SSA_NAME to an address,
3351 try to propagate the address into the uses of the SSA_NAME. */
3352 if (code == ADDR_EXPR
3353 /* Handle pointer conversions on invariant addresses
3354 as well, as this is valid gimple. */
3355 || (CONVERT_EXPR_CODE_P (code)
3356 && TREE_CODE (rhs) == ADDR_EXPR
3357 && POINTER_TYPE_P (TREE_TYPE (lhs))))
3358 {
3359 tree base = get_base_address (TREE_OPERAND (rhs, 0));
3360 if ((!base
3361 || !DECL_P (base)
3362 || decl_address_invariant_p (base))
3363 && !stmt_references_abnormal_ssa_name (stmt)
bfb89138 3364 && forward_propagate_addr_expr (lhs, rhs, true))
1c4607fd 3365 {
678b2f5b 3366 release_defs (stmt);
678b2f5b 3367 gsi_remove (&gsi, true);
1c4607fd 3368 }
678b2f5b 3369 else
3370 gsi_next (&gsi);
3371 }
cd22a796 3372 else if (code == POINTER_PLUS_EXPR)
678b2f5b 3373 {
cd22a796 3374 tree off = gimple_assign_rhs2 (stmt);
3375 if (TREE_CODE (off) == INTEGER_CST
3376 && can_propagate_from (stmt)
3377 && !simple_iv_increment_p (stmt)
678b2f5b 3378 /* ??? Better adjust the interface to that function
3379 instead of building new trees here. */
3380 && forward_propagate_addr_expr
cd22a796 3381 (lhs,
3382 build1_loc (gimple_location (stmt),
3383 ADDR_EXPR, TREE_TYPE (rhs),
3384 fold_build2 (MEM_REF,
3385 TREE_TYPE (TREE_TYPE (rhs)),
3386 rhs,
3387 fold_convert (ptr_type_node,
bfb89138 3388 off))), true))
ca3c9092 3389 {
678b2f5b 3390 release_defs (stmt);
678b2f5b 3391 gsi_remove (&gsi, true);
ca3c9092 3392 }
678b2f5b 3393 else if (is_gimple_min_invariant (rhs))
6afd0544 3394 {
678b2f5b 3395 /* Make sure to fold &a[0] + off_1 here. */
50aacf4c 3396 fold_stmt_inplace (&gsi);
678b2f5b 3397 update_stmt (stmt);
3398 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
6afd0544 3399 gsi_next (&gsi);
3400 }
291d763b 3401 else
75a70cf9 3402 gsi_next (&gsi);
291d763b 3403 }
678b2f5b 3404 else if (TREE_CODE_CLASS (code) == tcc_comparison)
b5860aba 3405 {
e3a19533 3406 if (forward_propagate_comparison (&gsi))
75200312 3407 cfg_changed = true;
b5860aba 3408 }
291d763b 3409 else
75a70cf9 3410 gsi_next (&gsi);
291d763b 3411 }
678b2f5b 3412
3413 /* Combine stmts with the stmts defining their operands.
3414 Note we update GSI within the loop as necessary. */
a7107e58 3415 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
678b2f5b 3416 {
3417 gimple stmt = gsi_stmt (gsi);
3418 bool changed = false;
3419
2f5a3c4a 3420 /* Mark stmt as potentially needing revisiting. */
3421 gimple_set_plf (stmt, GF_PLF_1, false);
3422
678b2f5b 3423 switch (gimple_code (stmt))
3424 {
3425 case GIMPLE_ASSIGN:
3426 {
3427 tree rhs1 = gimple_assign_rhs1 (stmt);
3428 enum tree_code code = gimple_assign_rhs_code (stmt);
3429
3430 if ((code == BIT_NOT_EXPR
3431 || code == NEGATE_EXPR)
3432 && TREE_CODE (rhs1) == SSA_NAME)
3433 changed = simplify_not_neg_expr (&gsi);
360b78f3 3434 else if (code == COND_EXPR
3435 || code == VEC_COND_EXPR)
678b2f5b 3436 {
3437 /* In this case the entire COND_EXPR is in rhs1. */
11b881f5 3438 if (forward_propagate_into_cond (&gsi)
3439 || combine_cond_exprs (&gsi))
3440 {
3441 changed = true;
3442 stmt = gsi_stmt (gsi);
3443 }
678b2f5b 3444 }
3445 else if (TREE_CODE_CLASS (code) == tcc_comparison)
3446 {
6f9714b3 3447 int did_something;
6f9714b3 3448 did_something = forward_propagate_into_comparison (&gsi);
3449 if (did_something == 2)
3450 cfg_changed = true;
6f9714b3 3451 changed = did_something != 0;
678b2f5b 3452 }
3b8827a2 3453 else if ((code == PLUS_EXPR
3454 || code == BIT_IOR_EXPR
3455 || code == BIT_XOR_EXPR)
3456 && simplify_rotate (&gsi))
3457 changed = true;
678b2f5b 3458 else if (code == BIT_AND_EXPR
3459 || code == BIT_IOR_EXPR
3460 || code == BIT_XOR_EXPR)
3461 changed = simplify_bitwise_binary (&gsi);
3462 else if (code == PLUS_EXPR
3463 || code == MINUS_EXPR)
50aacf4c 3464 changed = associate_plusminus (&gsi);
c9c17332 3465 else if (code == POINTER_PLUS_EXPR)
3466 changed = associate_pointerplus (&gsi);
678b2f5b 3467 else if (CONVERT_EXPR_CODE_P (code)
3468 || code == FLOAT_EXPR
3469 || code == FIX_TRUNC_EXPR)
89c8f35a 3470 {
3471 int did_something = combine_conversions (&gsi);
3472 if (did_something == 2)
3473 cfg_changed = true;
d23e1965 3474
3475 /* If we have a narrowing conversion to an integral
3476 type that is fed by a BIT_AND_EXPR, we might be
3477 able to remove the BIT_AND_EXPR if it merely
3478 masks off bits outside the final type (and nothing
3479 else. */
3480 if (! did_something)
3481 {
3482 tree outer_type = TREE_TYPE (gimple_assign_lhs (stmt));
3483 tree inner_type = TREE_TYPE (gimple_assign_rhs1 (stmt));
3484 if (INTEGRAL_TYPE_P (outer_type)
3485 && INTEGRAL_TYPE_P (inner_type)
3486 && (TYPE_PRECISION (outer_type)
3487 <= TYPE_PRECISION (inner_type)))
3488 did_something = simplify_conversion_from_bitmask (&gsi);
3489 }
3490
89c8f35a 3491 changed = did_something != 0;
3492 }
496ec2ad 3493 else if (code == VEC_PERM_EXPR)
3494 {
3495 int did_something = simplify_permutation (&gsi);
3496 if (did_something == 2)
3497 cfg_changed = true;
3498 changed = did_something != 0;
3499 }
173c91d9 3500 else if (code == BIT_FIELD_REF)
3501 changed = simplify_bitfield_ref (&gsi);
6a9e13a2 3502 else if (code == CONSTRUCTOR
3503 && TREE_CODE (TREE_TYPE (rhs1)) == VECTOR_TYPE)
3504 changed = simplify_vector_constructor (&gsi);
678b2f5b 3505 break;
3506 }
3507
3508 case GIMPLE_SWITCH:
3509 changed = simplify_gimple_switch (stmt);
3510 break;
3511
3512 case GIMPLE_COND:
3513 {
3514 int did_something;
678b2f5b 3515 did_something = forward_propagate_into_gimple_cond (stmt);
3516 if (did_something == 2)
3517 cfg_changed = true;
678b2f5b 3518 changed = did_something != 0;
3519 break;
3520 }
3521
3522 case GIMPLE_CALL:
3523 {
3524 tree callee = gimple_call_fndecl (stmt);
3525 if (callee != NULL_TREE
3526 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
3527 changed = simplify_builtin_call (&gsi, callee);
3528 break;
3529 }
3530
3531 default:;
3532 }
3533
a7107e58 3534 if (changed)
3535 {
3536 /* If the stmt changed then re-visit it and the statements
3537 inserted before it. */
2f5a3c4a 3538 for (; !gsi_end_p (gsi); gsi_prev (&gsi))
3539 if (gimple_plf (gsi_stmt (gsi), GF_PLF_1))
3540 break;
3541 if (gsi_end_p (gsi))
a7107e58 3542 gsi = gsi_start_bb (bb);
3543 else
2f5a3c4a 3544 gsi_next (&gsi);
a7107e58 3545 }
3546 else
3547 {
2f5a3c4a 3548 /* Stmt no longer needs to be revisited. */
3549 gimple_set_plf (stmt, GF_PLF_1, true);
a7107e58 3550 gsi_next (&gsi);
3551 }
678b2f5b 3552 }
f5c8cff5 3553 }
148aa112 3554
3555 if (cfg_changed)
6fa78c7b 3556 todoflags |= TODO_cleanup_cfg;
678b2f5b 3557
c96420f8 3558 return todoflags;
4ee9c684 3559}
3560
3561
3562static bool
3563gate_forwprop (void)
3564{
408c3c77 3565 return flag_tree_forwprop;
4ee9c684 3566}
3567
cbe8bda8 3568namespace {
3569
3570const pass_data pass_data_forwprop =
20099e35 3571{
cbe8bda8 3572 GIMPLE_PASS, /* type */
3573 "forwprop", /* name */
3574 OPTGROUP_NONE, /* optinfo_flags */
3575 true, /* has_gate */
3576 true, /* has_execute */
3577 TV_TREE_FORWPROP, /* tv_id */
3578 ( PROP_cfg | PROP_ssa ), /* properties_required */
3579 0, /* properties_provided */
3580 0, /* properties_destroyed */
3581 0, /* todo_flags_start */
3582 ( TODO_update_ssa | TODO_verify_ssa ), /* todo_flags_finish */
4ee9c684 3583};
cbe8bda8 3584
3585class pass_forwprop : public gimple_opt_pass
3586{
3587public:
9af5ce0c 3588 pass_forwprop (gcc::context *ctxt)
3589 : gimple_opt_pass (pass_data_forwprop, ctxt)
cbe8bda8 3590 {}
3591
3592 /* opt_pass methods: */
ae84f584 3593 opt_pass * clone () { return new pass_forwprop (m_ctxt); }
cbe8bda8 3594 bool gate () { return gate_forwprop (); }
3595 unsigned int execute () { return ssa_forward_propagate_and_combine (); }
3596
3597}; // class pass_forwprop
3598
3599} // anon namespace
3600
3601gimple_opt_pass *
3602make_pass_forwprop (gcc::context *ctxt)
3603{
3604 return new pass_forwprop (ctxt);
3605}