]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/tree-ssa-forwprop.c
[gcc/testsuite/]
[thirdparty/gcc.git] / gcc / tree-ssa-forwprop.c
CommitLineData
291d763b 1/* Forward propagation of expressions for single use variables.
711789cc 2 Copyright (C) 2004-2013 Free Software Foundation, Inc.
4ee9c684 3
4This file is part of GCC.
5
6GCC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
8c4c00c1 8the Free Software Foundation; either version 3, or (at your option)
4ee9c684 9any later version.
10
11GCC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
8c4c00c1 17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
4ee9c684 19
20#include "config.h"
21#include "system.h"
22#include "coretypes.h"
23#include "tm.h"
4ee9c684 24#include "tree.h"
4ee9c684 25#include "tm_p.h"
26#include "basic-block.h"
e5b1e080 27#include "gimple-pretty-print.h"
69ee5dbb 28#include "tree-ssa.h"
4ee9c684 29#include "tree-pass.h"
291d763b 30#include "langhooks.h"
5adc1066 31#include "flags.h"
75a70cf9 32#include "gimple.h"
27f931ff 33#include "expr.h"
6b42039a 34#include "cfgloop.h"
d1938a4b 35#include "optabs.h"
58bf5219 36#include "tree-ssa-propagate.h"
4ee9c684 37
291d763b 38/* This pass propagates the RHS of assignment statements into use
39 sites of the LHS of the assignment. It's basically a specialized
8f628ee8 40 form of tree combination. It is hoped all of this can disappear
41 when we have a generalized tree combiner.
4ee9c684 42
291d763b 43 One class of common cases we handle is forward propagating a single use
48e1416a 44 variable into a COND_EXPR.
4ee9c684 45
46 bb0:
47 x = a COND b;
48 if (x) goto ... else goto ...
49
50 Will be transformed into:
51
52 bb0:
53 if (a COND b) goto ... else goto ...
48e1416a 54
4ee9c684 55 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
56
57 Or (assuming c1 and c2 are constants):
58
59 bb0:
48e1416a 60 x = a + c1;
4ee9c684 61 if (x EQ/NEQ c2) goto ... else goto ...
62
63 Will be transformed into:
64
65 bb0:
66 if (a EQ/NEQ (c2 - c1)) goto ... else goto ...
67
68 Similarly for x = a - c1.
48e1416a 69
4ee9c684 70 Or
71
72 bb0:
73 x = !a
74 if (x) goto ... else goto ...
75
76 Will be transformed into:
77
78 bb0:
79 if (a == 0) goto ... else goto ...
80
81 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
82 For these cases, we propagate A into all, possibly more than one,
83 COND_EXPRs that use X.
84
f5c8cff5 85 Or
86
87 bb0:
88 x = (typecast) a
89 if (x) goto ... else goto ...
90
91 Will be transformed into:
92
93 bb0:
94 if (a != 0) goto ... else goto ...
95
96 (Assuming a is an integral type and x is a boolean or x is an
97 integral and a is a boolean.)
98
99 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
100 For these cases, we propagate A into all, possibly more than one,
101 COND_EXPRs that use X.
102
4ee9c684 103 In addition to eliminating the variable and the statement which assigns
104 a value to the variable, we may be able to later thread the jump without
e6dfde59 105 adding insane complexity in the dominator optimizer.
4ee9c684 106
f5c8cff5 107 Also note these transformations can cascade. We handle this by having
108 a worklist of COND_EXPR statements to examine. As we make a change to
109 a statement, we put it back on the worklist to examine on the next
110 iteration of the main loop.
111
291d763b 112 A second class of propagation opportunities arises for ADDR_EXPR
113 nodes.
114
115 ptr = &x->y->z;
116 res = *ptr;
117
118 Will get turned into
119
120 res = x->y->z;
121
50f39ec6 122 Or
123 ptr = (type1*)&type2var;
124 res = *ptr
125
126 Will get turned into (if type1 and type2 are the same size
127 and neither have volatile on them):
128 res = VIEW_CONVERT_EXPR<type1>(type2var)
129
291d763b 130 Or
131
132 ptr = &x[0];
133 ptr2 = ptr + <constant>;
134
135 Will get turned into
136
137 ptr2 = &x[constant/elementsize];
138
139 Or
140
141 ptr = &x[0];
142 offset = index * element_size;
143 offset_p = (pointer) offset;
144 ptr2 = ptr + offset_p
145
146 Will get turned into:
147
148 ptr2 = &x[index];
149
1c4607fd 150 Or
151 ssa = (int) decl
152 res = ssa & 1
153
154 Provided that decl has known alignment >= 2, will get turned into
155
156 res = 0
157
8f628ee8 158 We also propagate casts into SWITCH_EXPR and COND_EXPR conditions to
159 allow us to remove the cast and {NOT_EXPR,NEG_EXPR} into a subsequent
160 {NOT_EXPR,NEG_EXPR}.
291d763b 161
4ee9c684 162 This will (of course) be extended as other needs arise. */
163
15ec875c 164static bool forward_propagate_addr_expr (tree name, tree rhs);
148aa112 165
b59e1c90 166/* Set to true if we delete dead edges during the optimization. */
148aa112 167static bool cfg_changed;
168
75a70cf9 169static tree rhs_to_tree (tree type, gimple stmt);
148aa112 170
83a20baf 171/* Get the next statement we can propagate NAME's value into skipping
5adc1066 172 trivial copies. Returns the statement that is suitable as a
173 propagation destination or NULL_TREE if there is no such one.
174 This only returns destinations in a single-use chain. FINAL_NAME_P
175 if non-NULL is written to the ssa name that represents the use. */
a3451973 176
75a70cf9 177static gimple
5adc1066 178get_prop_dest_stmt (tree name, tree *final_name_p)
a3451973 179{
5adc1066 180 use_operand_p use;
75a70cf9 181 gimple use_stmt;
a3451973 182
5adc1066 183 do {
184 /* If name has multiple uses, bail out. */
185 if (!single_imm_use (name, &use, &use_stmt))
75a70cf9 186 return NULL;
a3451973 187
5adc1066 188 /* If this is not a trivial copy, we found it. */
8f0b877f 189 if (!gimple_assign_ssa_name_copy_p (use_stmt)
75a70cf9 190 || gimple_assign_rhs1 (use_stmt) != name)
5adc1066 191 break;
192
193 /* Continue searching uses of the copy destination. */
75a70cf9 194 name = gimple_assign_lhs (use_stmt);
5adc1066 195 } while (1);
196
197 if (final_name_p)
198 *final_name_p = name;
199
200 return use_stmt;
a3451973 201}
202
5adc1066 203/* Get the statement we can propagate from into NAME skipping
204 trivial copies. Returns the statement which defines the
205 propagation source or NULL_TREE if there is no such one.
206 If SINGLE_USE_ONLY is set considers only sources which have
207 a single use chain up to NAME. If SINGLE_USE_P is non-null,
208 it is set to whether the chain to NAME is a single use chain
209 or not. SINGLE_USE_P is not written to if SINGLE_USE_ONLY is set. */
4ee9c684 210
75a70cf9 211static gimple
5adc1066 212get_prop_source_stmt (tree name, bool single_use_only, bool *single_use_p)
f5c8cff5 213{
5adc1066 214 bool single_use = true;
215
216 do {
75a70cf9 217 gimple def_stmt = SSA_NAME_DEF_STMT (name);
5adc1066 218
219 if (!has_single_use (name))
220 {
221 single_use = false;
222 if (single_use_only)
75a70cf9 223 return NULL;
5adc1066 224 }
225
226 /* If name is defined by a PHI node or is the default def, bail out. */
8f0b877f 227 if (!is_gimple_assign (def_stmt))
75a70cf9 228 return NULL;
5adc1066 229
ab31ca23 230 /* If def_stmt is a simple copy, continue looking. */
231 if (gimple_assign_rhs_code (def_stmt) == SSA_NAME)
232 name = gimple_assign_rhs1 (def_stmt);
233 else
5adc1066 234 {
235 if (!single_use_only && single_use_p)
236 *single_use_p = single_use;
237
ab31ca23 238 return def_stmt;
5adc1066 239 }
5adc1066 240 } while (1);
241}
e6dfde59 242
5adc1066 243/* Checks if the destination ssa name in DEF_STMT can be used as
244 propagation source. Returns true if so, otherwise false. */
e6dfde59 245
5adc1066 246static bool
75a70cf9 247can_propagate_from (gimple def_stmt)
5adc1066 248{
75a70cf9 249 gcc_assert (is_gimple_assign (def_stmt));
8f0b877f 250
484b827b 251 /* If the rhs has side-effects we cannot propagate from it. */
75a70cf9 252 if (gimple_has_volatile_ops (def_stmt))
484b827b 253 return false;
254
255 /* If the rhs is a load we cannot propagate from it. */
75a70cf9 256 if (TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)) == tcc_reference
257 || TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)) == tcc_declaration)
484b827b 258 return false;
259
b9e98b8a 260 /* Constants can be always propagated. */
8f0b877f 261 if (gimple_assign_single_p (def_stmt)
262 && is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt)))
b9e98b8a 263 return true;
264
75a70cf9 265 /* We cannot propagate ssa names that occur in abnormal phi nodes. */
32cdcc42 266 if (stmt_references_abnormal_ssa_name (def_stmt))
267 return false;
4ee9c684 268
5adc1066 269 /* If the definition is a conversion of a pointer to a function type,
75a70cf9 270 then we can not apply optimizations as some targets require
271 function pointers to be canonicalized and in this case this
272 optimization could eliminate a necessary canonicalization. */
8f0b877f 273 if (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt)))
75a70cf9 274 {
275 tree rhs = gimple_assign_rhs1 (def_stmt);
276 if (POINTER_TYPE_P (TREE_TYPE (rhs))
277 && TREE_CODE (TREE_TYPE (TREE_TYPE (rhs))) == FUNCTION_TYPE)
278 return false;
279 }
8f0b877f 280
5adc1066 281 return true;
e6dfde59 282}
283
ff0739e0 284/* Remove a chain of dead statements starting at the definition of
285 NAME. The chain is linked via the first operand of the defining statements.
5d2361b0 286 If NAME was replaced in its only use then this function can be used
ff0739e0 287 to clean up dead stmts. The function handles already released SSA
288 names gracefully.
289 Returns true if cleanup-cfg has to run. */
8f628ee8 290
5adc1066 291static bool
5d2361b0 292remove_prop_source_from_use (tree name)
5adc1066 293{
75a70cf9 294 gimple_stmt_iterator gsi;
295 gimple stmt;
5d2361b0 296 bool cfg_changed = false;
8f628ee8 297
5adc1066 298 do {
5d2361b0 299 basic_block bb;
300
ff0739e0 301 if (SSA_NAME_IN_FREE_LIST (name)
302 || SSA_NAME_IS_DEFAULT_DEF (name)
303 || !has_zero_uses (name))
5d2361b0 304 return cfg_changed;
8f628ee8 305
5adc1066 306 stmt = SSA_NAME_DEF_STMT (name);
ff0739e0 307 if (gimple_code (stmt) == GIMPLE_PHI
308 || gimple_has_side_effects (stmt))
6f9714b3 309 return cfg_changed;
ff0739e0 310
311 bb = gimple_bb (stmt);
6f9714b3 312 gsi = gsi_for_stmt (stmt);
ff0739e0 313 unlink_stmt_vdef (stmt);
13ff78a4 314 if (gsi_remove (&gsi, true))
315 cfg_changed |= gimple_purge_dead_eh_edges (bb);
ff0739e0 316 release_defs (stmt);
8f628ee8 317
ff0739e0 318 name = is_gimple_assign (stmt) ? gimple_assign_rhs1 (stmt) : NULL_TREE;
75a70cf9 319 } while (name && TREE_CODE (name) == SSA_NAME);
8f628ee8 320
5d2361b0 321 return cfg_changed;
5adc1066 322}
8f628ee8 323
75a70cf9 324/* Return the rhs of a gimple_assign STMT in a form of a single tree,
325 converted to type TYPE.
48e1416a 326
75a70cf9 327 This should disappear, but is needed so we can combine expressions and use
328 the fold() interfaces. Long term, we need to develop folding and combine
329 routines that deal with gimple exclusively . */
330
331static tree
332rhs_to_tree (tree type, gimple stmt)
333{
389dd41b 334 location_t loc = gimple_location (stmt);
75a70cf9 335 enum tree_code code = gimple_assign_rhs_code (stmt);
57c45d70 336 if (get_gimple_rhs_class (code) == GIMPLE_TERNARY_RHS)
337 return fold_build3_loc (loc, code, type, gimple_assign_rhs1 (stmt),
338 gimple_assign_rhs2 (stmt),
339 gimple_assign_rhs3 (stmt));
340 else if (get_gimple_rhs_class (code) == GIMPLE_BINARY_RHS)
389dd41b 341 return fold_build2_loc (loc, code, type, gimple_assign_rhs1 (stmt),
fb8ed03f 342 gimple_assign_rhs2 (stmt));
75a70cf9 343 else if (get_gimple_rhs_class (code) == GIMPLE_UNARY_RHS)
fb8ed03f 344 return build1 (code, type, gimple_assign_rhs1 (stmt));
75a70cf9 345 else if (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS)
346 return gimple_assign_rhs1 (stmt);
347 else
348 gcc_unreachable ();
349}
350
5adc1066 351/* Combine OP0 CODE OP1 in the context of a COND_EXPR. Returns
352 the folded result in a form suitable for COND_EXPR_COND or
353 NULL_TREE, if there is no suitable simplified form. If
354 INVARIANT_ONLY is true only gimple_min_invariant results are
355 considered simplified. */
8f628ee8 356
357static tree
c73fee76 358combine_cond_expr_cond (gimple stmt, enum tree_code code, tree type,
5adc1066 359 tree op0, tree op1, bool invariant_only)
8f628ee8 360{
5adc1066 361 tree t;
8f628ee8 362
5adc1066 363 gcc_assert (TREE_CODE_CLASS (code) == tcc_comparison);
8f628ee8 364
c73fee76 365 fold_defer_overflow_warnings ();
366 t = fold_binary_loc (gimple_location (stmt), code, type, op0, op1);
5adc1066 367 if (!t)
c73fee76 368 {
369 fold_undefer_overflow_warnings (false, NULL, 0);
370 return NULL_TREE;
371 }
8f628ee8 372
5adc1066 373 /* Require that we got a boolean type out if we put one in. */
374 gcc_assert (TREE_CODE (TREE_TYPE (t)) == TREE_CODE (type));
8f628ee8 375
a7392604 376 /* Canonicalize the combined condition for use in a COND_EXPR. */
377 t = canonicalize_cond_expr_cond (t);
8f628ee8 378
5adc1066 379 /* Bail out if we required an invariant but didn't get one. */
75a70cf9 380 if (!t || (invariant_only && !is_gimple_min_invariant (t)))
c73fee76 381 {
382 fold_undefer_overflow_warnings (false, NULL, 0);
383 return NULL_TREE;
384 }
385
386 fold_undefer_overflow_warnings (!gimple_no_warning_p (stmt), stmt, 0);
8f628ee8 387
a7392604 388 return t;
8f628ee8 389}
390
c8126d25 391/* Combine the comparison OP0 CODE OP1 at LOC with the defining statements
392 of its operand. Return a new comparison tree or NULL_TREE if there
393 were no simplifying combines. */
394
395static tree
c73fee76 396forward_propagate_into_comparison_1 (gimple stmt,
678b2f5b 397 enum tree_code code, tree type,
398 tree op0, tree op1)
c8126d25 399{
400 tree tmp = NULL_TREE;
401 tree rhs0 = NULL_TREE, rhs1 = NULL_TREE;
402 bool single_use0_p = false, single_use1_p = false;
403
404 /* For comparisons use the first operand, that is likely to
405 simplify comparisons against constants. */
406 if (TREE_CODE (op0) == SSA_NAME)
407 {
408 gimple def_stmt = get_prop_source_stmt (op0, false, &single_use0_p);
409 if (def_stmt && can_propagate_from (def_stmt))
410 {
411 rhs0 = rhs_to_tree (TREE_TYPE (op1), def_stmt);
c73fee76 412 tmp = combine_cond_expr_cond (stmt, code, type,
c8126d25 413 rhs0, op1, !single_use0_p);
414 if (tmp)
415 return tmp;
416 }
417 }
418
419 /* If that wasn't successful, try the second operand. */
420 if (TREE_CODE (op1) == SSA_NAME)
421 {
422 gimple def_stmt = get_prop_source_stmt (op1, false, &single_use1_p);
423 if (def_stmt && can_propagate_from (def_stmt))
424 {
425 rhs1 = rhs_to_tree (TREE_TYPE (op0), def_stmt);
c73fee76 426 tmp = combine_cond_expr_cond (stmt, code, type,
c8126d25 427 op0, rhs1, !single_use1_p);
428 if (tmp)
429 return tmp;
430 }
431 }
432
433 /* If that wasn't successful either, try both operands. */
434 if (rhs0 != NULL_TREE
435 && rhs1 != NULL_TREE)
c73fee76 436 tmp = combine_cond_expr_cond (stmt, code, type,
c8126d25 437 rhs0, rhs1,
438 !(single_use0_p && single_use1_p));
439
440 return tmp;
441}
442
678b2f5b 443/* Propagate from the ssa name definition statements of the assignment
444 from a comparison at *GSI into the conditional if that simplifies it.
6f9714b3 445 Returns 1 if the stmt was modified and 2 if the CFG needs cleanup,
446 otherwise returns 0. */
c8126d25 447
6f9714b3 448static int
678b2f5b 449forward_propagate_into_comparison (gimple_stmt_iterator *gsi)
c8126d25 450{
678b2f5b 451 gimple stmt = gsi_stmt (*gsi);
452 tree tmp;
6f9714b3 453 bool cfg_changed = false;
56632de0 454 tree type = TREE_TYPE (gimple_assign_lhs (stmt));
6f9714b3 455 tree rhs1 = gimple_assign_rhs1 (stmt);
456 tree rhs2 = gimple_assign_rhs2 (stmt);
c8126d25 457
458 /* Combine the comparison with defining statements. */
c73fee76 459 tmp = forward_propagate_into_comparison_1 (stmt,
678b2f5b 460 gimple_assign_rhs_code (stmt),
56632de0 461 type, rhs1, rhs2);
462 if (tmp && useless_type_conversion_p (type, TREE_TYPE (tmp)))
c8126d25 463 {
678b2f5b 464 gimple_assign_set_rhs_from_tree (gsi, tmp);
50aacf4c 465 fold_stmt (gsi);
466 update_stmt (gsi_stmt (*gsi));
75200312 467
6f9714b3 468 if (TREE_CODE (rhs1) == SSA_NAME)
469 cfg_changed |= remove_prop_source_from_use (rhs1);
470 if (TREE_CODE (rhs2) == SSA_NAME)
471 cfg_changed |= remove_prop_source_from_use (rhs2);
472 return cfg_changed ? 2 : 1;
c8126d25 473 }
474
6f9714b3 475 return 0;
c8126d25 476}
477
5adc1066 478/* Propagate from the ssa name definition statements of COND_EXPR
75a70cf9 479 in GIMPLE_COND statement STMT into the conditional if that simplifies it.
480 Returns zero if no statement was changed, one if there were
481 changes and two if cfg_cleanup needs to run.
48e1416a 482
75a70cf9 483 This must be kept in sync with forward_propagate_into_cond. */
484
485static int
486forward_propagate_into_gimple_cond (gimple stmt)
487{
678b2f5b 488 tree tmp;
489 enum tree_code code = gimple_cond_code (stmt);
6f9714b3 490 bool cfg_changed = false;
491 tree rhs1 = gimple_cond_lhs (stmt);
492 tree rhs2 = gimple_cond_rhs (stmt);
678b2f5b 493
494 /* We can do tree combining on SSA_NAME and comparison expressions. */
495 if (TREE_CODE_CLASS (gimple_cond_code (stmt)) != tcc_comparison)
496 return 0;
497
c73fee76 498 tmp = forward_propagate_into_comparison_1 (stmt, code,
678b2f5b 499 boolean_type_node,
6f9714b3 500 rhs1, rhs2);
678b2f5b 501 if (tmp)
502 {
503 if (dump_file && tmp)
504 {
678b2f5b 505 fprintf (dump_file, " Replaced '");
6f9714b3 506 print_gimple_expr (dump_file, stmt, 0, 0);
678b2f5b 507 fprintf (dump_file, "' with '");
508 print_generic_expr (dump_file, tmp, 0);
509 fprintf (dump_file, "'\n");
510 }
75a70cf9 511
678b2f5b 512 gimple_cond_set_condition_from_tree (stmt, unshare_expr (tmp));
513 update_stmt (stmt);
75a70cf9 514
6f9714b3 515 if (TREE_CODE (rhs1) == SSA_NAME)
516 cfg_changed |= remove_prop_source_from_use (rhs1);
517 if (TREE_CODE (rhs2) == SSA_NAME)
518 cfg_changed |= remove_prop_source_from_use (rhs2);
519 return (cfg_changed || is_gimple_min_invariant (tmp)) ? 2 : 1;
678b2f5b 520 }
75a70cf9 521
10a6edd6 522 /* Canonicalize _Bool == 0 and _Bool != 1 to _Bool != 0 by swapping edges. */
523 if ((TREE_CODE (TREE_TYPE (rhs1)) == BOOLEAN_TYPE
524 || (INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
525 && TYPE_PRECISION (TREE_TYPE (rhs1)) == 1))
526 && ((code == EQ_EXPR
527 && integer_zerop (rhs2))
528 || (code == NE_EXPR
529 && integer_onep (rhs2))))
530 {
531 basic_block bb = gimple_bb (stmt);
532 gimple_cond_set_code (stmt, NE_EXPR);
533 gimple_cond_set_rhs (stmt, build_zero_cst (TREE_TYPE (rhs1)));
534 EDGE_SUCC (bb, 0)->flags ^= (EDGE_TRUE_VALUE|EDGE_FALSE_VALUE);
535 EDGE_SUCC (bb, 1)->flags ^= (EDGE_TRUE_VALUE|EDGE_FALSE_VALUE);
536 return 1;
537 }
538
6f9714b3 539 return 0;
75a70cf9 540}
541
542
543/* Propagate from the ssa name definition statements of COND_EXPR
544 in the rhs of statement STMT into the conditional if that simplifies it.
8a2caf10 545 Returns true zero if the stmt was changed. */
4ee9c684 546
8a2caf10 547static bool
75a70cf9 548forward_propagate_into_cond (gimple_stmt_iterator *gsi_p)
e6dfde59 549{
75a70cf9 550 gimple stmt = gsi_stmt (*gsi_p);
678b2f5b 551 tree tmp = NULL_TREE;
552 tree cond = gimple_assign_rhs1 (stmt);
def3cb70 553 enum tree_code code = gimple_assign_rhs_code (stmt);
10a6edd6 554 bool swap = false;
d080be9e 555
678b2f5b 556 /* We can do tree combining on SSA_NAME and comparison expressions. */
557 if (COMPARISON_CLASS_P (cond))
c73fee76 558 tmp = forward_propagate_into_comparison_1 (stmt, TREE_CODE (cond),
f2c1848b 559 TREE_TYPE (cond),
c8126d25 560 TREE_OPERAND (cond, 0),
561 TREE_OPERAND (cond, 1));
678b2f5b 562 else if (TREE_CODE (cond) == SSA_NAME)
563 {
def3cb70 564 enum tree_code def_code;
8a2caf10 565 tree name = cond;
678b2f5b 566 gimple def_stmt = get_prop_source_stmt (name, true, NULL);
567 if (!def_stmt || !can_propagate_from (def_stmt))
6f9714b3 568 return 0;
5adc1066 569
def3cb70 570 def_code = gimple_assign_rhs_code (def_stmt);
571 if (TREE_CODE_CLASS (def_code) == tcc_comparison)
8a2caf10 572 tmp = fold_build2_loc (gimple_location (def_stmt),
def3cb70 573 def_code,
bc112f18 574 TREE_TYPE (cond),
8a2caf10 575 gimple_assign_rhs1 (def_stmt),
576 gimple_assign_rhs2 (def_stmt));
def3cb70 577 else if (code == COND_EXPR
578 && ((def_code == BIT_NOT_EXPR
579 && TYPE_PRECISION (TREE_TYPE (cond)) == 1)
580 || (def_code == BIT_XOR_EXPR
581 && integer_onep (gimple_assign_rhs2 (def_stmt)))))
10a6edd6 582 {
583 tmp = gimple_assign_rhs1 (def_stmt);
584 swap = true;
585 }
678b2f5b 586 }
5adc1066 587
25f48be0 588 if (tmp
589 && is_gimple_condexpr (tmp))
678b2f5b 590 {
591 if (dump_file && tmp)
592 {
593 fprintf (dump_file, " Replaced '");
594 print_generic_expr (dump_file, cond, 0);
595 fprintf (dump_file, "' with '");
596 print_generic_expr (dump_file, tmp, 0);
597 fprintf (dump_file, "'\n");
598 }
d080be9e 599
def3cb70 600 if ((code == VEC_COND_EXPR) ? integer_all_onesp (tmp)
601 : integer_onep (tmp))
8a2caf10 602 gimple_assign_set_rhs_from_tree (gsi_p, gimple_assign_rhs2 (stmt));
603 else if (integer_zerop (tmp))
604 gimple_assign_set_rhs_from_tree (gsi_p, gimple_assign_rhs3 (stmt));
605 else
10a6edd6 606 {
607 gimple_assign_set_rhs1 (stmt, unshare_expr (tmp));
608 if (swap)
609 {
610 tree t = gimple_assign_rhs2 (stmt);
611 gimple_assign_set_rhs2 (stmt, gimple_assign_rhs3 (stmt));
612 gimple_assign_set_rhs3 (stmt, t);
613 }
614 }
678b2f5b 615 stmt = gsi_stmt (*gsi_p);
616 update_stmt (stmt);
5adc1066 617
8a2caf10 618 return true;
678b2f5b 619 }
d080be9e 620
6f9714b3 621 return 0;
4ee9c684 622}
623
360b78f3 624/* Propagate from the ssa name definition statements of COND_EXPR
625 values in the rhs of statement STMT into the conditional arms
626 if that simplifies it.
627 Returns true if the stmt was changed. */
628
629static bool
630combine_cond_exprs (gimple_stmt_iterator *gsi_p)
631{
632 gimple stmt = gsi_stmt (*gsi_p);
633 tree cond, val1, val2;
634 bool changed = false;
635
636 cond = gimple_assign_rhs1 (stmt);
637 val1 = gimple_assign_rhs2 (stmt);
638 if (TREE_CODE (val1) == SSA_NAME)
639 {
640 gimple def_stmt = SSA_NAME_DEF_STMT (val1);
641 if (is_gimple_assign (def_stmt)
642 && gimple_assign_rhs_code (def_stmt) == gimple_assign_rhs_code (stmt)
643 && operand_equal_p (gimple_assign_rhs1 (def_stmt), cond, 0))
644 {
645 val1 = unshare_expr (gimple_assign_rhs2 (def_stmt));
646 gimple_assign_set_rhs2 (stmt, val1);
647 changed = true;
648 }
649 }
650 val2 = gimple_assign_rhs3 (stmt);
651 if (TREE_CODE (val2) == SSA_NAME)
652 {
653 gimple def_stmt = SSA_NAME_DEF_STMT (val2);
654 if (is_gimple_assign (def_stmt)
655 && gimple_assign_rhs_code (def_stmt) == gimple_assign_rhs_code (stmt)
656 && operand_equal_p (gimple_assign_rhs1 (def_stmt), cond, 0))
657 {
658 val2 = unshare_expr (gimple_assign_rhs3 (def_stmt));
659 gimple_assign_set_rhs3 (stmt, val2);
660 changed = true;
661 }
662 }
663 if (operand_equal_p (val1, val2, 0))
664 {
665 gimple_assign_set_rhs_from_tree (gsi_p, val1);
666 stmt = gsi_stmt (*gsi_p);
667 changed = true;
668 }
669
670 if (changed)
671 update_stmt (stmt);
672
673 return changed;
674}
675
48e1416a 676/* We've just substituted an ADDR_EXPR into stmt. Update all the
148aa112 677 relevant data structures to match. */
678
679static void
75a70cf9 680tidy_after_forward_propagate_addr (gimple stmt)
148aa112 681{
148aa112 682 /* We may have turned a trapping insn into a non-trapping insn. */
683 if (maybe_clean_or_replace_eh_stmt (stmt, stmt)
75a70cf9 684 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
148aa112 685 cfg_changed = true;
f2fae51f 686
75a70cf9 687 if (TREE_CODE (gimple_assign_rhs1 (stmt)) == ADDR_EXPR)
688 recompute_tree_invariant_for_addr_expr (gimple_assign_rhs1 (stmt));
148aa112 689}
690
15ec875c 691/* NAME is a SSA_NAME representing DEF_RHS which is of the form
692 ADDR_EXPR <whatever>.
291d763b 693
3d5cfe81 694 Try to forward propagate the ADDR_EXPR into the use USE_STMT.
291d763b 695 Often this will allow for removal of an ADDR_EXPR and INDIRECT_REF
3d5cfe81 696 node or for recovery of array indexing from pointer arithmetic.
75a70cf9 697
6b5a5c42 698 Return true if the propagation was successful (the propagation can
699 be not totally successful, yet things may have been changed). */
291d763b 700
701static bool
75a70cf9 702forward_propagate_addr_expr_1 (tree name, tree def_rhs,
703 gimple_stmt_iterator *use_stmt_gsi,
6776dec8 704 bool single_use_p)
291d763b 705{
75a70cf9 706 tree lhs, rhs, rhs2, array_ref;
75a70cf9 707 gimple use_stmt = gsi_stmt (*use_stmt_gsi);
708 enum tree_code rhs_code;
9e019299 709 bool res = true;
291d763b 710
971c637a 711 gcc_assert (TREE_CODE (def_rhs) == ADDR_EXPR);
291d763b 712
75a70cf9 713 lhs = gimple_assign_lhs (use_stmt);
714 rhs_code = gimple_assign_rhs_code (use_stmt);
715 rhs = gimple_assign_rhs1 (use_stmt);
15ec875c 716
6776dec8 717 /* Trivial cases. The use statement could be a trivial copy or a
15ec875c 718 useless conversion. Recurse to the uses of the lhs as copyprop does
971c637a 719 not copy through different variant pointers and FRE does not catch
6776dec8 720 all useless conversions. Treat the case of a single-use name and
721 a conversion to def_rhs type separate, though. */
971c637a 722 if (TREE_CODE (lhs) == SSA_NAME
75a70cf9 723 && ((rhs_code == SSA_NAME && rhs == name)
316616c9 724 || CONVERT_EXPR_CODE_P (rhs_code)))
6776dec8 725 {
316616c9 726 /* Only recurse if we don't deal with a single use or we cannot
727 do the propagation to the current statement. In particular
728 we can end up with a conversion needed for a non-invariant
729 address which we cannot do in a single statement. */
730 if (!single_use_p
731 || (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (def_rhs))
bd8d8d81 732 && (!is_gimple_min_invariant (def_rhs)
733 || (INTEGRAL_TYPE_P (TREE_TYPE (lhs))
734 && POINTER_TYPE_P (TREE_TYPE (def_rhs))
735 && (TYPE_PRECISION (TREE_TYPE (lhs))
736 > TYPE_PRECISION (TREE_TYPE (def_rhs)))))))
971c637a 737 return forward_propagate_addr_expr (lhs, def_rhs);
738
75a70cf9 739 gimple_assign_set_rhs1 (use_stmt, unshare_expr (def_rhs));
316616c9 740 if (useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (def_rhs)))
741 gimple_assign_set_rhs_code (use_stmt, TREE_CODE (def_rhs));
742 else
743 gimple_assign_set_rhs_code (use_stmt, NOP_EXPR);
6776dec8 744 return true;
745 }
971c637a 746
182cf5a9 747 /* Propagate through constant pointer adjustments. */
748 if (TREE_CODE (lhs) == SSA_NAME
749 && rhs_code == POINTER_PLUS_EXPR
750 && rhs == name
751 && TREE_CODE (gimple_assign_rhs2 (use_stmt)) == INTEGER_CST)
752 {
753 tree new_def_rhs;
754 /* As we come here with non-invariant addresses in def_rhs we need
755 to make sure we can build a valid constant offsetted address
756 for further propagation. Simply rely on fold building that
757 and check after the fact. */
758 new_def_rhs = fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (rhs)),
759 def_rhs,
760 fold_convert (ptr_type_node,
761 gimple_assign_rhs2 (use_stmt)));
762 if (TREE_CODE (new_def_rhs) == MEM_REF
f5d03f27 763 && !is_gimple_mem_ref_addr (TREE_OPERAND (new_def_rhs, 0)))
182cf5a9 764 return false;
765 new_def_rhs = build_fold_addr_expr_with_type (new_def_rhs,
766 TREE_TYPE (rhs));
767
768 /* Recurse. If we could propagate into all uses of lhs do not
769 bother to replace into the current use but just pretend we did. */
770 if (TREE_CODE (new_def_rhs) == ADDR_EXPR
771 && forward_propagate_addr_expr (lhs, new_def_rhs))
772 return true;
773
774 if (useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (new_def_rhs)))
775 gimple_assign_set_rhs_with_ops (use_stmt_gsi, TREE_CODE (new_def_rhs),
776 new_def_rhs, NULL_TREE);
777 else if (is_gimple_min_invariant (new_def_rhs))
778 gimple_assign_set_rhs_with_ops (use_stmt_gsi, NOP_EXPR,
779 new_def_rhs, NULL_TREE);
780 else
781 return false;
782 gcc_assert (gsi_stmt (*use_stmt_gsi) == use_stmt);
783 update_stmt (use_stmt);
784 return true;
785 }
786
48e1416a 787 /* Now strip away any outer COMPONENT_REF/ARRAY_REF nodes from the LHS.
971c637a 788 ADDR_EXPR will not appear on the LHS. */
d0d1ecb8 789 tree *lhsp = gimple_assign_lhs_ptr (use_stmt);
790 while (handled_component_p (*lhsp))
791 lhsp = &TREE_OPERAND (*lhsp, 0);
792 lhs = *lhsp;
971c637a 793
182cf5a9 794 /* Now see if the LHS node is a MEM_REF using NAME. If so,
971c637a 795 propagate the ADDR_EXPR into the use of NAME and fold the result. */
182cf5a9 796 if (TREE_CODE (lhs) == MEM_REF
9e019299 797 && TREE_OPERAND (lhs, 0) == name)
971c637a 798 {
182cf5a9 799 tree def_rhs_base;
800 HOST_WIDE_INT def_rhs_offset;
801 /* If the address is invariant we can always fold it. */
802 if ((def_rhs_base = get_addr_base_and_unit_offset (TREE_OPERAND (def_rhs, 0),
803 &def_rhs_offset)))
9e019299 804 {
182cf5a9 805 double_int off = mem_ref_offset (lhs);
806 tree new_ptr;
cf8f0e63 807 off += double_int::from_shwi (def_rhs_offset);
182cf5a9 808 if (TREE_CODE (def_rhs_base) == MEM_REF)
809 {
cf8f0e63 810 off += mem_ref_offset (def_rhs_base);
182cf5a9 811 new_ptr = TREE_OPERAND (def_rhs_base, 0);
812 }
813 else
814 new_ptr = build_fold_addr_expr (def_rhs_base);
815 TREE_OPERAND (lhs, 0) = new_ptr;
816 TREE_OPERAND (lhs, 1)
817 = double_int_to_tree (TREE_TYPE (TREE_OPERAND (lhs, 1)), off);
9e019299 818 tidy_after_forward_propagate_addr (use_stmt);
9e019299 819 /* Continue propagating into the RHS if this was not the only use. */
820 if (single_use_p)
821 return true;
822 }
182cf5a9 823 /* If the LHS is a plain dereference and the value type is the same as
824 that of the pointed-to type of the address we can put the
825 dereferenced address on the LHS preserving the original alias-type. */
d0d1ecb8 826 else if (integer_zerop (TREE_OPERAND (lhs, 1))
827 && ((gimple_assign_lhs (use_stmt) == lhs
828 && useless_type_conversion_p
829 (TREE_TYPE (TREE_OPERAND (def_rhs, 0)),
830 TREE_TYPE (gimple_assign_rhs1 (use_stmt))))
831 || types_compatible_p (TREE_TYPE (lhs),
832 TREE_TYPE (TREE_OPERAND (def_rhs, 0))))
f6e2e4ff 833 /* Don't forward anything into clobber stmts if it would result
834 in the lhs no longer being a MEM_REF. */
835 && (!gimple_clobber_p (use_stmt)
836 || TREE_CODE (TREE_OPERAND (def_rhs, 0)) == MEM_REF))
182cf5a9 837 {
838 tree *def_rhs_basep = &TREE_OPERAND (def_rhs, 0);
98d96c6f 839 tree new_offset, new_base, saved, new_lhs;
182cf5a9 840 while (handled_component_p (*def_rhs_basep))
841 def_rhs_basep = &TREE_OPERAND (*def_rhs_basep, 0);
842 saved = *def_rhs_basep;
843 if (TREE_CODE (*def_rhs_basep) == MEM_REF)
844 {
845 new_base = TREE_OPERAND (*def_rhs_basep, 0);
b97e39a0 846 new_offset = fold_convert (TREE_TYPE (TREE_OPERAND (lhs, 1)),
847 TREE_OPERAND (*def_rhs_basep, 1));
182cf5a9 848 }
849 else
850 {
851 new_base = build_fold_addr_expr (*def_rhs_basep);
852 new_offset = TREE_OPERAND (lhs, 1);
853 }
854 *def_rhs_basep = build2 (MEM_REF, TREE_TYPE (*def_rhs_basep),
855 new_base, new_offset);
2e5dc41c 856 TREE_THIS_VOLATILE (*def_rhs_basep) = TREE_THIS_VOLATILE (lhs);
31fa5b0d 857 TREE_SIDE_EFFECTS (*def_rhs_basep) = TREE_SIDE_EFFECTS (lhs);
2e5dc41c 858 TREE_THIS_NOTRAP (*def_rhs_basep) = TREE_THIS_NOTRAP (lhs);
98d96c6f 859 new_lhs = unshare_expr (TREE_OPERAND (def_rhs, 0));
d0d1ecb8 860 *lhsp = new_lhs;
98d96c6f 861 TREE_THIS_VOLATILE (new_lhs) = TREE_THIS_VOLATILE (lhs);
31fa5b0d 862 TREE_SIDE_EFFECTS (new_lhs) = TREE_SIDE_EFFECTS (lhs);
182cf5a9 863 *def_rhs_basep = saved;
864 tidy_after_forward_propagate_addr (use_stmt);
865 /* Continue propagating into the RHS if this was not the
866 only use. */
867 if (single_use_p)
868 return true;
869 }
9e019299 870 else
871 /* We can have a struct assignment dereferencing our name twice.
872 Note that we didn't propagate into the lhs to not falsely
873 claim we did when propagating into the rhs. */
874 res = false;
971c637a 875 }
15ec875c 876
631d5db6 877 /* Strip away any outer COMPONENT_REF, ARRAY_REF or ADDR_EXPR
878 nodes from the RHS. */
d0d1ecb8 879 tree *rhsp = gimple_assign_rhs1_ptr (use_stmt);
880 if (TREE_CODE (*rhsp) == ADDR_EXPR)
881 rhsp = &TREE_OPERAND (*rhsp, 0);
882 while (handled_component_p (*rhsp))
883 rhsp = &TREE_OPERAND (*rhsp, 0);
884 rhs = *rhsp;
291d763b 885
182cf5a9 886 /* Now see if the RHS node is a MEM_REF using NAME. If so,
291d763b 887 propagate the ADDR_EXPR into the use of NAME and fold the result. */
182cf5a9 888 if (TREE_CODE (rhs) == MEM_REF
889 && TREE_OPERAND (rhs, 0) == name)
291d763b 890 {
182cf5a9 891 tree def_rhs_base;
892 HOST_WIDE_INT def_rhs_offset;
893 if ((def_rhs_base = get_addr_base_and_unit_offset (TREE_OPERAND (def_rhs, 0),
894 &def_rhs_offset)))
895 {
896 double_int off = mem_ref_offset (rhs);
897 tree new_ptr;
cf8f0e63 898 off += double_int::from_shwi (def_rhs_offset);
182cf5a9 899 if (TREE_CODE (def_rhs_base) == MEM_REF)
900 {
cf8f0e63 901 off += mem_ref_offset (def_rhs_base);
182cf5a9 902 new_ptr = TREE_OPERAND (def_rhs_base, 0);
903 }
904 else
905 new_ptr = build_fold_addr_expr (def_rhs_base);
906 TREE_OPERAND (rhs, 0) = new_ptr;
907 TREE_OPERAND (rhs, 1)
908 = double_int_to_tree (TREE_TYPE (TREE_OPERAND (rhs, 1)), off);
50aacf4c 909 fold_stmt_inplace (use_stmt_gsi);
182cf5a9 910 tidy_after_forward_propagate_addr (use_stmt);
911 return res;
912 }
2e5dc41c 913 /* If the RHS is a plain dereference and the value type is the same as
182cf5a9 914 that of the pointed-to type of the address we can put the
2e5dc41c 915 dereferenced address on the RHS preserving the original alias-type. */
d0d1ecb8 916 else if (integer_zerop (TREE_OPERAND (rhs, 1))
917 && ((gimple_assign_rhs1 (use_stmt) == rhs
918 && useless_type_conversion_p
919 (TREE_TYPE (gimple_assign_lhs (use_stmt)),
920 TREE_TYPE (TREE_OPERAND (def_rhs, 0))))
921 || types_compatible_p (TREE_TYPE (rhs),
922 TREE_TYPE (TREE_OPERAND (def_rhs, 0)))))
182cf5a9 923 {
924 tree *def_rhs_basep = &TREE_OPERAND (def_rhs, 0);
98d96c6f 925 tree new_offset, new_base, saved, new_rhs;
182cf5a9 926 while (handled_component_p (*def_rhs_basep))
927 def_rhs_basep = &TREE_OPERAND (*def_rhs_basep, 0);
928 saved = *def_rhs_basep;
929 if (TREE_CODE (*def_rhs_basep) == MEM_REF)
930 {
931 new_base = TREE_OPERAND (*def_rhs_basep, 0);
b97e39a0 932 new_offset = fold_convert (TREE_TYPE (TREE_OPERAND (rhs, 1)),
933 TREE_OPERAND (*def_rhs_basep, 1));
182cf5a9 934 }
935 else
936 {
937 new_base = build_fold_addr_expr (*def_rhs_basep);
938 new_offset = TREE_OPERAND (rhs, 1);
939 }
940 *def_rhs_basep = build2 (MEM_REF, TREE_TYPE (*def_rhs_basep),
941 new_base, new_offset);
2e5dc41c 942 TREE_THIS_VOLATILE (*def_rhs_basep) = TREE_THIS_VOLATILE (rhs);
31fa5b0d 943 TREE_SIDE_EFFECTS (*def_rhs_basep) = TREE_SIDE_EFFECTS (rhs);
2e5dc41c 944 TREE_THIS_NOTRAP (*def_rhs_basep) = TREE_THIS_NOTRAP (rhs);
98d96c6f 945 new_rhs = unshare_expr (TREE_OPERAND (def_rhs, 0));
d0d1ecb8 946 *rhsp = new_rhs;
98d96c6f 947 TREE_THIS_VOLATILE (new_rhs) = TREE_THIS_VOLATILE (rhs);
31fa5b0d 948 TREE_SIDE_EFFECTS (new_rhs) = TREE_SIDE_EFFECTS (rhs);
182cf5a9 949 *def_rhs_basep = saved;
50aacf4c 950 fold_stmt_inplace (use_stmt_gsi);
182cf5a9 951 tidy_after_forward_propagate_addr (use_stmt);
952 return res;
953 }
291d763b 954 }
955
971c637a 956 /* If the use of the ADDR_EXPR is not a POINTER_PLUS_EXPR, there
957 is nothing to do. */
75a70cf9 958 if (gimple_assign_rhs_code (use_stmt) != POINTER_PLUS_EXPR
959 || gimple_assign_rhs1 (use_stmt) != name)
971c637a 960 return false;
961
291d763b 962 /* The remaining cases are all for turning pointer arithmetic into
963 array indexing. They only apply when we have the address of
964 element zero in an array. If that is not the case then there
965 is nothing to do. */
15ec875c 966 array_ref = TREE_OPERAND (def_rhs, 0);
182cf5a9 967 if ((TREE_CODE (array_ref) != ARRAY_REF
968 || TREE_CODE (TREE_TYPE (TREE_OPERAND (array_ref, 0))) != ARRAY_TYPE
969 || TREE_CODE (TREE_OPERAND (array_ref, 1)) != INTEGER_CST)
970 && TREE_CODE (TREE_TYPE (array_ref)) != ARRAY_TYPE)
291d763b 971 return false;
972
75a70cf9 973 rhs2 = gimple_assign_rhs2 (use_stmt);
704d7315 974 /* Optimize &x[C1] p+ C2 to &x p+ C3 with C3 = C1 * element_size + C2. */
75a70cf9 975 if (TREE_CODE (rhs2) == INTEGER_CST)
291d763b 976 {
704d7315 977 tree new_rhs = build1_loc (gimple_location (use_stmt),
978 ADDR_EXPR, TREE_TYPE (def_rhs),
979 fold_build2 (MEM_REF,
980 TREE_TYPE (TREE_TYPE (def_rhs)),
981 unshare_expr (def_rhs),
982 fold_convert (ptr_type_node,
983 rhs2)));
984 gimple_assign_set_rhs_from_tree (use_stmt_gsi, new_rhs);
985 use_stmt = gsi_stmt (*use_stmt_gsi);
986 update_stmt (use_stmt);
987 tidy_after_forward_propagate_addr (use_stmt);
988 return true;
291d763b 989 }
990
291d763b 991 return false;
992}
993
3d5cfe81 994/* STMT is a statement of the form SSA_NAME = ADDR_EXPR <whatever>.
995
996 Try to forward propagate the ADDR_EXPR into all uses of the SSA_NAME.
997 Often this will allow for removal of an ADDR_EXPR and INDIRECT_REF
998 node or for recovery of array indexing from pointer arithmetic.
999 Returns true, if all uses have been propagated into. */
1000
1001static bool
15ec875c 1002forward_propagate_addr_expr (tree name, tree rhs)
3d5cfe81 1003{
3d5cfe81 1004 imm_use_iterator iter;
75a70cf9 1005 gimple use_stmt;
3d5cfe81 1006 bool all = true;
6776dec8 1007 bool single_use_p = has_single_use (name);
3d5cfe81 1008
09aca5bc 1009 FOR_EACH_IMM_USE_STMT (use_stmt, iter, name)
3d5cfe81 1010 {
c96420f8 1011 bool result;
9481f629 1012 tree use_rhs;
3d5cfe81 1013
1014 /* If the use is not in a simple assignment statement, then
1015 there is nothing we can do. */
162efce1 1016 if (!is_gimple_assign (use_stmt))
3d5cfe81 1017 {
688ff29b 1018 if (!is_gimple_debug (use_stmt))
9845d120 1019 all = false;
3d5cfe81 1020 continue;
1021 }
1022
162efce1 1023 gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
1024 result = forward_propagate_addr_expr_1 (name, rhs, &gsi,
1025 single_use_p);
1026 /* If the use has moved to a different statement adjust
1027 the update machinery for the old statement too. */
1028 if (use_stmt != gsi_stmt (gsi))
3d5cfe81 1029 {
162efce1 1030 update_stmt (use_stmt);
1031 use_stmt = gsi_stmt (gsi);
3d5cfe81 1032 }
162efce1 1033 update_stmt (use_stmt);
c96420f8 1034 all &= result;
de6ed584 1035
15ec875c 1036 /* Remove intermediate now unused copy and conversion chains. */
75a70cf9 1037 use_rhs = gimple_assign_rhs1 (use_stmt);
15ec875c 1038 if (result
75a70cf9 1039 && TREE_CODE (gimple_assign_lhs (use_stmt)) == SSA_NAME
7b705d94 1040 && TREE_CODE (use_rhs) == SSA_NAME
1041 && has_zero_uses (gimple_assign_lhs (use_stmt)))
15ec875c 1042 {
75a70cf9 1043 gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
15ec875c 1044 release_defs (use_stmt);
75a70cf9 1045 gsi_remove (&gsi, true);
15ec875c 1046 }
3d5cfe81 1047 }
1048
628ce22b 1049 return all && has_zero_uses (name);
3d5cfe81 1050}
1051
678b2f5b 1052
e3a19533 1053/* Forward propagate the comparison defined in *DEFGSI like
678b2f5b 1054 cond_1 = x CMP y to uses of the form
1055 a_1 = (T')cond_1
1056 a_1 = !cond_1
1057 a_1 = cond_1 != 0
e3a19533 1058 Returns true if stmt is now unused. Advance DEFGSI to the next
1059 statement. */
678b2f5b 1060
1061static bool
e3a19533 1062forward_propagate_comparison (gimple_stmt_iterator *defgsi)
678b2f5b 1063{
e3a19533 1064 gimple stmt = gsi_stmt (*defgsi);
678b2f5b 1065 tree name = gimple_assign_lhs (stmt);
1066 gimple use_stmt;
1067 tree tmp = NULL_TREE;
e5b1e080 1068 gimple_stmt_iterator gsi;
1069 enum tree_code code;
1070 tree lhs;
678b2f5b 1071
1072 /* Don't propagate ssa names that occur in abnormal phis. */
1073 if ((TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
1074 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_assign_rhs1 (stmt)))
1075 || (TREE_CODE (gimple_assign_rhs2 (stmt)) == SSA_NAME
1076 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_assign_rhs2 (stmt))))
e3a19533 1077 goto bailout;
678b2f5b 1078
1079 /* Do not un-cse comparisons. But propagate through copies. */
1080 use_stmt = get_prop_dest_stmt (name, &name);
e5b1e080 1081 if (!use_stmt
1082 || !is_gimple_assign (use_stmt))
e3a19533 1083 goto bailout;
678b2f5b 1084
e5b1e080 1085 code = gimple_assign_rhs_code (use_stmt);
1086 lhs = gimple_assign_lhs (use_stmt);
1087 if (!INTEGRAL_TYPE_P (TREE_TYPE (lhs)))
e3a19533 1088 goto bailout;
678b2f5b 1089
e5b1e080 1090 /* We can propagate the condition into a statement that
1091 computes the logical negation of the comparison result. */
4b5f1658 1092 if ((code == BIT_NOT_EXPR
1093 && TYPE_PRECISION (TREE_TYPE (lhs)) == 1)
1094 || (code == BIT_XOR_EXPR
1095 && integer_onep (gimple_assign_rhs2 (use_stmt))))
e5b1e080 1096 {
1097 tree type = TREE_TYPE (gimple_assign_rhs1 (stmt));
1098 bool nans = HONOR_NANS (TYPE_MODE (type));
1099 enum tree_code inv_code;
1100 inv_code = invert_tree_comparison (gimple_assign_rhs_code (stmt), nans);
1101 if (inv_code == ERROR_MARK)
e3a19533 1102 goto bailout;
678b2f5b 1103
e5b1e080 1104 tmp = build2 (inv_code, TREE_TYPE (lhs), gimple_assign_rhs1 (stmt),
1105 gimple_assign_rhs2 (stmt));
1106 }
1107 else
e3a19533 1108 goto bailout;
678b2f5b 1109
e5b1e080 1110 gsi = gsi_for_stmt (use_stmt);
1111 gimple_assign_set_rhs_from_tree (&gsi, unshare_expr (tmp));
1112 use_stmt = gsi_stmt (gsi);
1113 update_stmt (use_stmt);
678b2f5b 1114
e5b1e080 1115 if (dump_file && (dump_flags & TDF_DETAILS))
1116 {
1117 fprintf (dump_file, " Replaced '");
1118 print_gimple_expr (dump_file, stmt, 0, dump_flags);
1119 fprintf (dump_file, "' with '");
1120 print_gimple_expr (dump_file, use_stmt, 0, dump_flags);
1121 fprintf (dump_file, "'\n");
678b2f5b 1122 }
1123
e3a19533 1124 /* When we remove stmt now the iterator defgsi goes off it's current
1125 sequence, hence advance it now. */
1126 gsi_next (defgsi);
1127
e5b1e080 1128 /* Remove defining statements. */
1129 return remove_prop_source_from_use (name);
e3a19533 1130
1131bailout:
1132 gsi_next (defgsi);
1133 return false;
678b2f5b 1134}
1135
1136
d23e1965 1137/* GSI_P points to a statement which performs a narrowing integral
1138 conversion.
1139
1140 Look for cases like:
1141
1142 t = x & c;
1143 y = (T) t;
1144
1145 Turn them into:
1146
1147 t = x & c;
1148 y = (T) x;
1149
1150 If T is narrower than X's type and C merely masks off bits outside
1151 of (T) and nothing else.
1152
1153 Normally we'd let DCE remove the dead statement. But no DCE runs
1154 after the last forwprop/combine pass, so we remove the obviously
1155 dead code ourselves.
1156
1157 Return TRUE if a change was made, FALSE otherwise. */
1158
1159static bool
1160simplify_conversion_from_bitmask (gimple_stmt_iterator *gsi_p)
1161{
1162 gimple stmt = gsi_stmt (*gsi_p);
1163 gimple rhs_def_stmt = SSA_NAME_DEF_STMT (gimple_assign_rhs1 (stmt));
1164
1165 /* See if the input for the conversion was set via a BIT_AND_EXPR and
1166 the only use of the BIT_AND_EXPR result is the conversion. */
1167 if (is_gimple_assign (rhs_def_stmt)
1168 && gimple_assign_rhs_code (rhs_def_stmt) == BIT_AND_EXPR
1169 && has_single_use (gimple_assign_lhs (rhs_def_stmt)))
1170 {
1171 tree rhs_def_operand1 = gimple_assign_rhs1 (rhs_def_stmt);
1172 tree rhs_def_operand2 = gimple_assign_rhs2 (rhs_def_stmt);
1173 tree lhs_type = TREE_TYPE (gimple_assign_lhs (stmt));
1174
1175 /* Now verify suitability of the BIT_AND_EXPR's operands.
1176 The first must be an SSA_NAME that we can propagate and the
1177 second must be an integer constant that masks out all the
1178 bits outside the final result's type, but nothing else. */
1179 if (TREE_CODE (rhs_def_operand1) == SSA_NAME
1180 && ! SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs_def_operand1)
1181 && TREE_CODE (rhs_def_operand2) == INTEGER_CST
1182 && operand_equal_p (rhs_def_operand2,
1183 build_low_bits_mask (TREE_TYPE (rhs_def_operand2),
1184 TYPE_PRECISION (lhs_type)),
1185 0))
1186 {
1187 /* This is an optimizable case. Replace the source operand
1188 in the conversion with the first source operand of the
1189 BIT_AND_EXPR. */
1190 gimple_assign_set_rhs1 (stmt, rhs_def_operand1);
1191 stmt = gsi_stmt (*gsi_p);
1192 update_stmt (stmt);
1193
1194 /* There is no DCE after the last forwprop pass. It's
1195 easy to clean up the first order effects here. */
1196 gimple_stmt_iterator si;
1197 si = gsi_for_stmt (rhs_def_stmt);
1198 gsi_remove (&si, true);
1199 release_defs (rhs_def_stmt);
1200 return true;
1201 }
1202 }
1203
1204 return false;
1205}
1206
1207
3a938499 1208/* If we have lhs = ~x (STMT), look and see if earlier we had x = ~y.
1209 If so, we can change STMT into lhs = y which can later be copy
48e1416a 1210 propagated. Similarly for negation.
3a938499 1211
48e1416a 1212 This could trivially be formulated as a forward propagation
3a938499 1213 to immediate uses. However, we already had an implementation
1214 from DOM which used backward propagation via the use-def links.
1215
1216 It turns out that backward propagation is actually faster as
1217 there's less work to do for each NOT/NEG expression we find.
1218 Backwards propagation needs to look at the statement in a single
1219 backlink. Forward propagation needs to look at potentially more
678b2f5b 1220 than one forward link.
3a938499 1221
678b2f5b 1222 Returns true when the statement was changed. */
1223
1224static bool
75a70cf9 1225simplify_not_neg_expr (gimple_stmt_iterator *gsi_p)
3a938499 1226{
75a70cf9 1227 gimple stmt = gsi_stmt (*gsi_p);
1228 tree rhs = gimple_assign_rhs1 (stmt);
1229 gimple rhs_def_stmt = SSA_NAME_DEF_STMT (rhs);
3a938499 1230
1231 /* See if the RHS_DEF_STMT has the same form as our statement. */
75a70cf9 1232 if (is_gimple_assign (rhs_def_stmt)
1233 && gimple_assign_rhs_code (rhs_def_stmt) == gimple_assign_rhs_code (stmt))
3a938499 1234 {
75a70cf9 1235 tree rhs_def_operand = gimple_assign_rhs1 (rhs_def_stmt);
3a938499 1236
1237 /* Verify that RHS_DEF_OPERAND is a suitable SSA_NAME. */
1238 if (TREE_CODE (rhs_def_operand) == SSA_NAME
1239 && ! SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs_def_operand))
1240 {
75a70cf9 1241 gimple_assign_set_rhs_from_tree (gsi_p, rhs_def_operand);
1242 stmt = gsi_stmt (*gsi_p);
3a938499 1243 update_stmt (stmt);
678b2f5b 1244 return true;
3a938499 1245 }
1246 }
678b2f5b 1247
1248 return false;
3a938499 1249}
3d5cfe81 1250
b59e1c90 1251/* Helper function for simplify_gimple_switch. Remove case labels that
1252 have values outside the range of the new type. */
1253
1254static void
1255simplify_gimple_switch_label_vec (gimple stmt, tree index_type)
1256{
1257 unsigned int branch_num = gimple_switch_num_labels (stmt);
f1f41a6c 1258 vec<tree> labels;
1259 labels.create (branch_num);
b59e1c90 1260 unsigned int i, len;
1261
1262 /* Collect the existing case labels in a VEC, and preprocess it as if
1263 we are gimplifying a GENERIC SWITCH_EXPR. */
1264 for (i = 1; i < branch_num; i++)
f1f41a6c 1265 labels.quick_push (gimple_switch_label (stmt, i));
b59e1c90 1266 preprocess_case_label_vec_for_gimple (labels, index_type, NULL);
1267
1268 /* If any labels were removed, replace the existing case labels
1269 in the GIMPLE_SWITCH statement with the correct ones.
1270 Note that the type updates were done in-place on the case labels,
1271 so we only have to replace the case labels in the GIMPLE_SWITCH
1272 if the number of labels changed. */
f1f41a6c 1273 len = labels.length ();
b59e1c90 1274 if (len < branch_num - 1)
1275 {
1276 bitmap target_blocks;
1277 edge_iterator ei;
1278 edge e;
1279
1280 /* Corner case: *all* case labels have been removed as being
1281 out-of-range for INDEX_TYPE. Push one label and let the
1282 CFG cleanups deal with this further. */
1283 if (len == 0)
1284 {
1285 tree label, elt;
1286
1287 label = CASE_LABEL (gimple_switch_default_label (stmt));
1288 elt = build_case_label (build_int_cst (index_type, 0), NULL, label);
f1f41a6c 1289 labels.quick_push (elt);
b59e1c90 1290 len = 1;
1291 }
1292
f1f41a6c 1293 for (i = 0; i < labels.length (); i++)
1294 gimple_switch_set_label (stmt, i + 1, labels[i]);
b59e1c90 1295 for (i++ ; i < branch_num; i++)
1296 gimple_switch_set_label (stmt, i, NULL_TREE);
1297 gimple_switch_set_num_labels (stmt, len + 1);
1298
1299 /* Cleanup any edges that are now dead. */
1300 target_blocks = BITMAP_ALLOC (NULL);
1301 for (i = 0; i < gimple_switch_num_labels (stmt); i++)
1302 {
1303 tree elt = gimple_switch_label (stmt, i);
1304 basic_block target = label_to_block (CASE_LABEL (elt));
1305 bitmap_set_bit (target_blocks, target->index);
1306 }
1307 for (ei = ei_start (gimple_bb (stmt)->succs); (e = ei_safe_edge (ei)); )
1308 {
1309 if (! bitmap_bit_p (target_blocks, e->dest->index))
1310 {
1311 remove_edge (e);
1312 cfg_changed = true;
1313 free_dominance_info (CDI_DOMINATORS);
1314 }
1315 else
1316 ei_next (&ei);
1317 }
1318 BITMAP_FREE (target_blocks);
1319 }
1320
f1f41a6c 1321 labels.release ();
b59e1c90 1322}
1323
b5860aba 1324/* STMT is a SWITCH_EXPR for which we attempt to find equivalent forms of
1325 the condition which we may be able to optimize better. */
1326
678b2f5b 1327static bool
75a70cf9 1328simplify_gimple_switch (gimple stmt)
b5860aba 1329{
75a70cf9 1330 tree cond = gimple_switch_index (stmt);
b5860aba 1331 tree def, to, ti;
75a70cf9 1332 gimple def_stmt;
b5860aba 1333
1334 /* The optimization that we really care about is removing unnecessary
1335 casts. That will let us do much better in propagating the inferred
1336 constant at the switch target. */
1337 if (TREE_CODE (cond) == SSA_NAME)
1338 {
75a70cf9 1339 def_stmt = SSA_NAME_DEF_STMT (cond);
1340 if (is_gimple_assign (def_stmt))
b5860aba 1341 {
75a70cf9 1342 if (gimple_assign_rhs_code (def_stmt) == NOP_EXPR)
b5860aba 1343 {
1344 int need_precision;
1345 bool fail;
1346
75a70cf9 1347 def = gimple_assign_rhs1 (def_stmt);
b5860aba 1348
b5860aba 1349 to = TREE_TYPE (cond);
1350 ti = TREE_TYPE (def);
1351
1352 /* If we have an extension that preserves value, then we
1353 can copy the source value into the switch. */
1354
1355 need_precision = TYPE_PRECISION (ti);
1356 fail = false;
c5237b8b 1357 if (! INTEGRAL_TYPE_P (ti))
1358 fail = true;
1359 else if (TYPE_UNSIGNED (to) && !TYPE_UNSIGNED (ti))
b5860aba 1360 fail = true;
1361 else if (!TYPE_UNSIGNED (to) && TYPE_UNSIGNED (ti))
1362 need_precision += 1;
1363 if (TYPE_PRECISION (to) < need_precision)
1364 fail = true;
1365
1366 if (!fail)
1367 {
75a70cf9 1368 gimple_switch_set_index (stmt, def);
b59e1c90 1369 simplify_gimple_switch_label_vec (stmt, ti);
b5860aba 1370 update_stmt (stmt);
678b2f5b 1371 return true;
b5860aba 1372 }
1373 }
1374 }
1375 }
678b2f5b 1376
1377 return false;
b5860aba 1378}
1379
27f931ff 1380/* For pointers p2 and p1 return p2 - p1 if the
1381 difference is known and constant, otherwise return NULL. */
1382
1383static tree
1384constant_pointer_difference (tree p1, tree p2)
1385{
1386 int i, j;
1387#define CPD_ITERATIONS 5
1388 tree exps[2][CPD_ITERATIONS];
1389 tree offs[2][CPD_ITERATIONS];
1390 int cnt[2];
1391
1392 for (i = 0; i < 2; i++)
1393 {
1394 tree p = i ? p1 : p2;
1395 tree off = size_zero_node;
1396 gimple stmt;
1397 enum tree_code code;
1398
1399 /* For each of p1 and p2 we need to iterate at least
1400 twice, to handle ADDR_EXPR directly in p1/p2,
1401 SSA_NAME with ADDR_EXPR or POINTER_PLUS_EXPR etc.
1402 on definition's stmt RHS. Iterate a few extra times. */
1403 j = 0;
1404 do
1405 {
1406 if (!POINTER_TYPE_P (TREE_TYPE (p)))
1407 break;
1408 if (TREE_CODE (p) == ADDR_EXPR)
1409 {
1410 tree q = TREE_OPERAND (p, 0);
1411 HOST_WIDE_INT offset;
1412 tree base = get_addr_base_and_unit_offset (q, &offset);
1413 if (base)
1414 {
1415 q = base;
1416 if (offset)
1417 off = size_binop (PLUS_EXPR, off, size_int (offset));
1418 }
1419 if (TREE_CODE (q) == MEM_REF
1420 && TREE_CODE (TREE_OPERAND (q, 0)) == SSA_NAME)
1421 {
1422 p = TREE_OPERAND (q, 0);
1423 off = size_binop (PLUS_EXPR, off,
1424 double_int_to_tree (sizetype,
1425 mem_ref_offset (q)));
1426 }
1427 else
1428 {
1429 exps[i][j] = q;
1430 offs[i][j++] = off;
1431 break;
1432 }
1433 }
1434 if (TREE_CODE (p) != SSA_NAME)
1435 break;
1436 exps[i][j] = p;
1437 offs[i][j++] = off;
1438 if (j == CPD_ITERATIONS)
1439 break;
1440 stmt = SSA_NAME_DEF_STMT (p);
1441 if (!is_gimple_assign (stmt) || gimple_assign_lhs (stmt) != p)
1442 break;
1443 code = gimple_assign_rhs_code (stmt);
1444 if (code == POINTER_PLUS_EXPR)
1445 {
1446 if (TREE_CODE (gimple_assign_rhs2 (stmt)) != INTEGER_CST)
1447 break;
1448 off = size_binop (PLUS_EXPR, off, gimple_assign_rhs2 (stmt));
1449 p = gimple_assign_rhs1 (stmt);
1450 }
1451 else if (code == ADDR_EXPR || code == NOP_EXPR)
1452 p = gimple_assign_rhs1 (stmt);
1453 else
1454 break;
1455 }
1456 while (1);
1457 cnt[i] = j;
1458 }
1459
1460 for (i = 0; i < cnt[0]; i++)
1461 for (j = 0; j < cnt[1]; j++)
1462 if (exps[0][i] == exps[1][j])
1463 return size_binop (MINUS_EXPR, offs[0][i], offs[1][j]);
1464
1465 return NULL_TREE;
1466}
1467
1468/* *GSI_P is a GIMPLE_CALL to a builtin function.
1469 Optimize
1470 memcpy (p, "abcd", 4);
1471 memset (p + 4, ' ', 3);
1472 into
1473 memcpy (p, "abcd ", 7);
1474 call if the latter can be stored by pieces during expansion. */
1475
1476static bool
1477simplify_builtin_call (gimple_stmt_iterator *gsi_p, tree callee2)
1478{
1479 gimple stmt1, stmt2 = gsi_stmt (*gsi_p);
1480 tree vuse = gimple_vuse (stmt2);
1481 if (vuse == NULL)
1482 return false;
1483 stmt1 = SSA_NAME_DEF_STMT (vuse);
1484
1485 switch (DECL_FUNCTION_CODE (callee2))
1486 {
1487 case BUILT_IN_MEMSET:
1488 if (gimple_call_num_args (stmt2) != 3
1489 || gimple_call_lhs (stmt2)
1490 || CHAR_BIT != 8
1491 || BITS_PER_UNIT != 8)
1492 break;
1493 else
1494 {
1495 tree callee1;
1496 tree ptr1, src1, str1, off1, len1, lhs1;
1497 tree ptr2 = gimple_call_arg (stmt2, 0);
1498 tree val2 = gimple_call_arg (stmt2, 1);
1499 tree len2 = gimple_call_arg (stmt2, 2);
1500 tree diff, vdef, new_str_cst;
1501 gimple use_stmt;
1502 unsigned int ptr1_align;
1503 unsigned HOST_WIDE_INT src_len;
1504 char *src_buf;
1505 use_operand_p use_p;
1506
1507 if (!host_integerp (val2, 0)
1508 || !host_integerp (len2, 1))
1509 break;
1510 if (is_gimple_call (stmt1))
1511 {
1512 /* If first stmt is a call, it needs to be memcpy
1513 or mempcpy, with string literal as second argument and
1514 constant length. */
1515 callee1 = gimple_call_fndecl (stmt1);
1516 if (callee1 == NULL_TREE
1517 || DECL_BUILT_IN_CLASS (callee1) != BUILT_IN_NORMAL
1518 || gimple_call_num_args (stmt1) != 3)
1519 break;
1520 if (DECL_FUNCTION_CODE (callee1) != BUILT_IN_MEMCPY
1521 && DECL_FUNCTION_CODE (callee1) != BUILT_IN_MEMPCPY)
1522 break;
1523 ptr1 = gimple_call_arg (stmt1, 0);
1524 src1 = gimple_call_arg (stmt1, 1);
1525 len1 = gimple_call_arg (stmt1, 2);
1526 lhs1 = gimple_call_lhs (stmt1);
1527 if (!host_integerp (len1, 1))
1528 break;
1529 str1 = string_constant (src1, &off1);
1530 if (str1 == NULL_TREE)
1531 break;
1532 if (!host_integerp (off1, 1)
1533 || compare_tree_int (off1, TREE_STRING_LENGTH (str1) - 1) > 0
1534 || compare_tree_int (len1, TREE_STRING_LENGTH (str1)
1535 - tree_low_cst (off1, 1)) > 0
1536 || TREE_CODE (TREE_TYPE (str1)) != ARRAY_TYPE
1537 || TYPE_MODE (TREE_TYPE (TREE_TYPE (str1)))
1538 != TYPE_MODE (char_type_node))
1539 break;
1540 }
1541 else if (gimple_assign_single_p (stmt1))
1542 {
1543 /* Otherwise look for length 1 memcpy optimized into
1544 assignment. */
1545 ptr1 = gimple_assign_lhs (stmt1);
1546 src1 = gimple_assign_rhs1 (stmt1);
1547 if (TREE_CODE (ptr1) != MEM_REF
1548 || TYPE_MODE (TREE_TYPE (ptr1)) != TYPE_MODE (char_type_node)
1549 || !host_integerp (src1, 0))
1550 break;
1551 ptr1 = build_fold_addr_expr (ptr1);
1552 callee1 = NULL_TREE;
1553 len1 = size_one_node;
1554 lhs1 = NULL_TREE;
1555 off1 = size_zero_node;
1556 str1 = NULL_TREE;
1557 }
1558 else
1559 break;
1560
1561 diff = constant_pointer_difference (ptr1, ptr2);
1562 if (diff == NULL && lhs1 != NULL)
1563 {
1564 diff = constant_pointer_difference (lhs1, ptr2);
1565 if (DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY
1566 && diff != NULL)
1567 diff = size_binop (PLUS_EXPR, diff,
1568 fold_convert (sizetype, len1));
1569 }
1570 /* If the difference between the second and first destination pointer
1571 is not constant, or is bigger than memcpy length, bail out. */
1572 if (diff == NULL
1573 || !host_integerp (diff, 1)
1574 || tree_int_cst_lt (len1, diff))
1575 break;
1576
1577 /* Use maximum of difference plus memset length and memcpy length
1578 as the new memcpy length, if it is too big, bail out. */
1579 src_len = tree_low_cst (diff, 1);
1580 src_len += tree_low_cst (len2, 1);
1581 if (src_len < (unsigned HOST_WIDE_INT) tree_low_cst (len1, 1))
1582 src_len = tree_low_cst (len1, 1);
1583 if (src_len > 1024)
1584 break;
1585
1586 /* If mempcpy value is used elsewhere, bail out, as mempcpy
1587 with bigger length will return different result. */
1588 if (lhs1 != NULL_TREE
1589 && DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY
1590 && (TREE_CODE (lhs1) != SSA_NAME
1591 || !single_imm_use (lhs1, &use_p, &use_stmt)
1592 || use_stmt != stmt2))
1593 break;
1594
1595 /* If anything reads memory in between memcpy and memset
1596 call, the modified memcpy call might change it. */
1597 vdef = gimple_vdef (stmt1);
1598 if (vdef != NULL
1599 && (!single_imm_use (vdef, &use_p, &use_stmt)
1600 || use_stmt != stmt2))
1601 break;
1602
957d0361 1603 ptr1_align = get_pointer_alignment (ptr1);
27f931ff 1604 /* Construct the new source string literal. */
1605 src_buf = XALLOCAVEC (char, src_len + 1);
1606 if (callee1)
1607 memcpy (src_buf,
1608 TREE_STRING_POINTER (str1) + tree_low_cst (off1, 1),
1609 tree_low_cst (len1, 1));
1610 else
1611 src_buf[0] = tree_low_cst (src1, 0);
1612 memset (src_buf + tree_low_cst (diff, 1),
0adffe78 1613 tree_low_cst (val2, 0), tree_low_cst (len2, 1));
27f931ff 1614 src_buf[src_len] = '\0';
1615 /* Neither builtin_strncpy_read_str nor builtin_memcpy_read_str
1616 handle embedded '\0's. */
1617 if (strlen (src_buf) != src_len)
1618 break;
1619 rtl_profile_for_bb (gimple_bb (stmt2));
1620 /* If the new memcpy wouldn't be emitted by storing the literal
1621 by pieces, this optimization might enlarge .rodata too much,
1622 as commonly used string literals couldn't be shared any
1623 longer. */
1624 if (!can_store_by_pieces (src_len,
1625 builtin_strncpy_read_str,
1626 src_buf, ptr1_align, false))
1627 break;
1628
1629 new_str_cst = build_string_literal (src_len, src_buf);
1630 if (callee1)
1631 {
1632 /* If STMT1 is a mem{,p}cpy call, adjust it and remove
1633 memset call. */
1634 if (lhs1 && DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY)
1635 gimple_call_set_lhs (stmt1, NULL_TREE);
1636 gimple_call_set_arg (stmt1, 1, new_str_cst);
1637 gimple_call_set_arg (stmt1, 2,
1638 build_int_cst (TREE_TYPE (len1), src_len));
1639 update_stmt (stmt1);
1640 unlink_stmt_vdef (stmt2);
1641 gsi_remove (gsi_p, true);
1642 release_defs (stmt2);
1643 if (lhs1 && DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY)
1644 release_ssa_name (lhs1);
1645 return true;
1646 }
1647 else
1648 {
1649 /* Otherwise, if STMT1 is length 1 memcpy optimized into
1650 assignment, remove STMT1 and change memset call into
1651 memcpy call. */
1652 gimple_stmt_iterator gsi = gsi_for_stmt (stmt1);
1653
7ecb2e7c 1654 if (!is_gimple_val (ptr1))
1655 ptr1 = force_gimple_operand_gsi (gsi_p, ptr1, true, NULL_TREE,
1656 true, GSI_SAME_STMT);
b9a16870 1657 gimple_call_set_fndecl (stmt2,
1658 builtin_decl_explicit (BUILT_IN_MEMCPY));
27f931ff 1659 gimple_call_set_arg (stmt2, 0, ptr1);
1660 gimple_call_set_arg (stmt2, 1, new_str_cst);
1661 gimple_call_set_arg (stmt2, 2,
1662 build_int_cst (TREE_TYPE (len2), src_len));
1663 unlink_stmt_vdef (stmt1);
1664 gsi_remove (&gsi, true);
1665 release_defs (stmt1);
1666 update_stmt (stmt2);
1667 return false;
1668 }
1669 }
1670 break;
1671 default:
1672 break;
1673 }
1674 return false;
1675}
1676
41913fa9 1677/* Checks if expression has type of one-bit precision, or is a known
1678 truth-valued expression. */
1679static bool
1680truth_valued_ssa_name (tree name)
1681{
1682 gimple def;
1683 tree type = TREE_TYPE (name);
1684
1685 if (!INTEGRAL_TYPE_P (type))
1686 return false;
1687 /* Don't check here for BOOLEAN_TYPE as the precision isn't
1688 necessarily one and so ~X is not equal to !X. */
1689 if (TYPE_PRECISION (type) == 1)
1690 return true;
1691 def = SSA_NAME_DEF_STMT (name);
1692 if (is_gimple_assign (def))
1693 return truth_value_p (gimple_assign_rhs_code (def));
1694 return false;
1695}
1696
1697/* Helper routine for simplify_bitwise_binary_1 function.
1698 Return for the SSA name NAME the expression X if it mets condition
1699 NAME = !X. Otherwise return NULL_TREE.
1700 Detected patterns for NAME = !X are:
1701 !X and X == 0 for X with integral type.
1702 X ^ 1, X != 1,or ~X for X with integral type with precision of one. */
1703static tree
1704lookup_logical_inverted_value (tree name)
1705{
1706 tree op1, op2;
1707 enum tree_code code;
1708 gimple def;
1709
1710 /* If name has none-intergal type, or isn't a SSA_NAME, then
1711 return. */
1712 if (TREE_CODE (name) != SSA_NAME
1713 || !INTEGRAL_TYPE_P (TREE_TYPE (name)))
1714 return NULL_TREE;
1715 def = SSA_NAME_DEF_STMT (name);
1716 if (!is_gimple_assign (def))
1717 return NULL_TREE;
1718
1719 code = gimple_assign_rhs_code (def);
1720 op1 = gimple_assign_rhs1 (def);
1721 op2 = NULL_TREE;
1722
1723 /* Get for EQ_EXPR or BIT_XOR_EXPR operation the second operand.
8f4a7578 1724 If CODE isn't an EQ_EXPR, BIT_XOR_EXPR, or BIT_NOT_EXPR, then return. */
41913fa9 1725 if (code == EQ_EXPR || code == NE_EXPR
1726 || code == BIT_XOR_EXPR)
1727 op2 = gimple_assign_rhs2 (def);
1728
1729 switch (code)
1730 {
41913fa9 1731 case BIT_NOT_EXPR:
1732 if (truth_valued_ssa_name (name))
1733 return op1;
1734 break;
1735 case EQ_EXPR:
1736 /* Check if we have X == 0 and X has an integral type. */
1737 if (!INTEGRAL_TYPE_P (TREE_TYPE (op1)))
1738 break;
1739 if (integer_zerop (op2))
1740 return op1;
1741 break;
1742 case NE_EXPR:
1743 /* Check if we have X != 1 and X is a truth-valued. */
1744 if (!INTEGRAL_TYPE_P (TREE_TYPE (op1)))
1745 break;
1746 if (integer_onep (op2) && truth_valued_ssa_name (op1))
1747 return op1;
1748 break;
1749 case BIT_XOR_EXPR:
1750 /* Check if we have X ^ 1 and X is truth valued. */
1751 if (integer_onep (op2) && truth_valued_ssa_name (op1))
1752 return op1;
1753 break;
1754 default:
1755 break;
1756 }
1757
1758 return NULL_TREE;
1759}
1760
1761/* Optimize ARG1 CODE ARG2 to a constant for bitwise binary
1762 operations CODE, if one operand has the logically inverted
1763 value of the other. */
1764static tree
1765simplify_bitwise_binary_1 (enum tree_code code, tree type,
1766 tree arg1, tree arg2)
1767{
1768 tree anot;
1769
1770 /* If CODE isn't a bitwise binary operation, return NULL_TREE. */
1771 if (code != BIT_AND_EXPR && code != BIT_IOR_EXPR
1772 && code != BIT_XOR_EXPR)
1773 return NULL_TREE;
1774
1775 /* First check if operands ARG1 and ARG2 are equal. If so
1776 return NULL_TREE as this optimization is handled fold_stmt. */
1777 if (arg1 == arg2)
1778 return NULL_TREE;
1779 /* See if we have in arguments logical-not patterns. */
1780 if (((anot = lookup_logical_inverted_value (arg1)) == NULL_TREE
1781 || anot != arg2)
1782 && ((anot = lookup_logical_inverted_value (arg2)) == NULL_TREE
1783 || anot != arg1))
1784 return NULL_TREE;
1785
1786 /* X & !X -> 0. */
1787 if (code == BIT_AND_EXPR)
1788 return fold_convert (type, integer_zero_node);
1789 /* X | !X -> 1 and X ^ !X -> 1, if X is truth-valued. */
1790 if (truth_valued_ssa_name (anot))
1791 return fold_convert (type, integer_one_node);
1792
1793 /* ??? Otherwise result is (X != 0 ? X : 1). not handled. */
1794 return NULL_TREE;
1795}
1796
10fbe63d 1797/* Given a ssa_name in NAME see if it was defined by an assignment and
1798 set CODE to be the code and ARG1 to the first operand on the rhs and ARG2
1799 to the second operand on the rhs. */
1800
1801static inline void
1802defcodefor_name (tree name, enum tree_code *code, tree *arg1, tree *arg2)
1803{
1804 gimple def;
1805 enum tree_code code1;
1806 tree arg11;
1807 tree arg21;
1808 tree arg31;
1809 enum gimple_rhs_class grhs_class;
1810
1811 code1 = TREE_CODE (name);
1812 arg11 = name;
1813 arg21 = NULL_TREE;
1814 grhs_class = get_gimple_rhs_class (code1);
1815
1816 if (code1 == SSA_NAME)
1817 {
1818 def = SSA_NAME_DEF_STMT (name);
1819
1820 if (def && is_gimple_assign (def)
1821 && can_propagate_from (def))
1822 {
1823 code1 = gimple_assign_rhs_code (def);
1824 arg11 = gimple_assign_rhs1 (def);
1825 arg21 = gimple_assign_rhs2 (def);
1826 arg31 = gimple_assign_rhs2 (def);
1827 }
1828 }
1829 else if (grhs_class == GIMPLE_TERNARY_RHS
1830 || GIMPLE_BINARY_RHS
1831 || GIMPLE_UNARY_RHS
1832 || GIMPLE_SINGLE_RHS)
1833 extract_ops_from_tree_1 (name, &code1, &arg11, &arg21, &arg31);
1834
1835 *code = code1;
1836 *arg1 = arg11;
1837 if (arg2)
1838 *arg2 = arg21;
1839 /* Ignore arg3 currently. */
1840}
1841
750e47f5 1842/* Return true if a conversion of an operand from type FROM to type TO
1843 should be applied after performing the operation instead. */
1844
1845static bool
1846hoist_conversion_for_bitop_p (tree to, tree from)
1847{
1848 /* That's a good idea if the conversion widens the operand, thus
1849 after hoisting the conversion the operation will be narrower. */
1850 if (TYPE_PRECISION (from) < TYPE_PRECISION (to))
1851 return true;
1852
1853 /* It's also a good idea if the conversion is to a non-integer mode. */
1854 if (GET_MODE_CLASS (TYPE_MODE (to)) != MODE_INT)
1855 return true;
1856
1857 /* Or if the precision of TO is not the same as the precision
1858 of its mode. */
1859 if (TYPE_PRECISION (to) != GET_MODE_PRECISION (TYPE_MODE (to)))
1860 return true;
1861
1862 return false;
1863}
1864
16bc66ec 1865/* GSI points to a statement of the form
1866
1867 result = OP0 CODE OP1
1868
1869 Where OP0 and OP1 are single bit SSA_NAMEs and CODE is either
1870 BIT_AND_EXPR or BIT_IOR_EXPR.
1871
1872 If OP0 is fed by a bitwise negation of another single bit SSA_NAME,
1873 then we can simplify the two statements into a single LT_EXPR or LE_EXPR
1874 when code is BIT_AND_EXPR and BIT_IOR_EXPR respectively.
1875
040f64e5 1876 If a simplification is made, return TRUE, else return FALSE. */
16bc66ec 1877static bool
1878simplify_bitwise_binary_boolean (gimple_stmt_iterator *gsi,
1879 enum tree_code code,
1880 tree op0, tree op1)
1881{
1882 gimple op0_def_stmt = SSA_NAME_DEF_STMT (op0);
1883
1884 if (!is_gimple_assign (op0_def_stmt)
1885 || (gimple_assign_rhs_code (op0_def_stmt) != BIT_NOT_EXPR))
1886 return false;
1887
1888 tree x = gimple_assign_rhs1 (op0_def_stmt);
1889 if (TREE_CODE (x) == SSA_NAME
1890 && INTEGRAL_TYPE_P (TREE_TYPE (x))
1891 && TYPE_PRECISION (TREE_TYPE (x)) == 1
1892 && TYPE_UNSIGNED (TREE_TYPE (x)) == TYPE_UNSIGNED (TREE_TYPE (op1)))
1893 {
1894 enum tree_code newcode;
1895
1896 gimple stmt = gsi_stmt (*gsi);
1897 gimple_assign_set_rhs1 (stmt, x);
1898 gimple_assign_set_rhs2 (stmt, op1);
1899 if (code == BIT_AND_EXPR)
1900 newcode = TYPE_UNSIGNED (TREE_TYPE (x)) ? LT_EXPR : GT_EXPR;
1901 else
1902 newcode = TYPE_UNSIGNED (TREE_TYPE (x)) ? LE_EXPR : GE_EXPR;
1903 gimple_assign_set_rhs_code (stmt, newcode);
1904 update_stmt (stmt);
1905 return true;
1906 }
1907 return false;
1908
1909}
1910
300da094 1911/* Simplify bitwise binary operations.
1912 Return true if a transformation applied, otherwise return false. */
1c4607fd 1913
300da094 1914static bool
1915simplify_bitwise_binary (gimple_stmt_iterator *gsi)
1c4607fd 1916{
300da094 1917 gimple stmt = gsi_stmt (*gsi);
1c4607fd 1918 tree arg1 = gimple_assign_rhs1 (stmt);
1919 tree arg2 = gimple_assign_rhs2 (stmt);
300da094 1920 enum tree_code code = gimple_assign_rhs_code (stmt);
1921 tree res;
10fbe63d 1922 tree def1_arg1, def1_arg2, def2_arg1, def2_arg2;
26f54bd0 1923 enum tree_code def1_code, def2_code;
1c4607fd 1924
10fbe63d 1925 defcodefor_name (arg1, &def1_code, &def1_arg1, &def1_arg2);
1926 defcodefor_name (arg2, &def2_code, &def2_arg1, &def2_arg2);
26f54bd0 1927
750e47f5 1928 /* Try to fold (type) X op CST -> (type) (X op ((type-x) CST))
1929 when profitable. */
25ce0d90 1930 if (TREE_CODE (arg2) == INTEGER_CST
1931 && CONVERT_EXPR_CODE_P (def1_code)
750e47f5 1932 && hoist_conversion_for_bitop_p (TREE_TYPE (arg1), TREE_TYPE (def1_arg1))
105fc895 1933 && INTEGRAL_TYPE_P (TREE_TYPE (def1_arg1))
25ce0d90 1934 && int_fits_type_p (arg2, TREE_TYPE (def1_arg1)))
1935 {
1936 gimple newop;
03d37e4e 1937 tree tem = make_ssa_name (TREE_TYPE (def1_arg1), NULL);
25ce0d90 1938 newop =
1939 gimple_build_assign_with_ops (code, tem, def1_arg1,
1940 fold_convert_loc (gimple_location (stmt),
1941 TREE_TYPE (def1_arg1),
1942 arg2));
4b5f1658 1943 gimple_set_location (newop, gimple_location (stmt));
25ce0d90 1944 gsi_insert_before (gsi, newop, GSI_SAME_STMT);
1945 gimple_assign_set_rhs_with_ops_1 (gsi, NOP_EXPR,
1946 tem, NULL_TREE, NULL_TREE);
1947 update_stmt (gsi_stmt (*gsi));
1948 return true;
1949 }
1950
300da094 1951 /* For bitwise binary operations apply operand conversions to the
1952 binary operation result instead of to the operands. This allows
1953 to combine successive conversions and bitwise binary operations. */
26f54bd0 1954 if (CONVERT_EXPR_CODE_P (def1_code)
1955 && CONVERT_EXPR_CODE_P (def2_code)
1956 && types_compatible_p (TREE_TYPE (def1_arg1), TREE_TYPE (def2_arg1))
750e47f5 1957 && hoist_conversion_for_bitop_p (TREE_TYPE (arg1), TREE_TYPE (def1_arg1)))
1c4607fd 1958 {
26f54bd0 1959 gimple newop;
03d37e4e 1960 tree tem = make_ssa_name (TREE_TYPE (def1_arg1), NULL);
26f54bd0 1961 newop = gimple_build_assign_with_ops (code, tem, def1_arg1, def2_arg1);
4b5f1658 1962 gimple_set_location (newop, gimple_location (stmt));
26f54bd0 1963 gsi_insert_before (gsi, newop, GSI_SAME_STMT);
1964 gimple_assign_set_rhs_with_ops_1 (gsi, NOP_EXPR,
1965 tem, NULL_TREE, NULL_TREE);
1966 update_stmt (gsi_stmt (*gsi));
1967 return true;
1968 }
1969
35967c0f 1970
1971 /* Simplify (A & B) OP0 (C & B) to (A OP0 C) & B. */
1972 if (def1_code == def2_code
1973 && def1_code == BIT_AND_EXPR
0a3f7203 1974 && operand_equal_for_phi_arg_p (def1_arg2,
1975 def2_arg2))
35967c0f 1976 {
0a3f7203 1977 tree b = def1_arg2;
35967c0f 1978 tree a = def1_arg1;
1979 tree c = def2_arg1;
1980 tree inner = fold_build2 (code, TREE_TYPE (arg2), a, c);
1981 /* If A OP0 C (this usually means C is the same as A) is 0
1982 then fold it down correctly. */
1983 if (integer_zerop (inner))
1984 {
1985 gimple_assign_set_rhs_from_tree (gsi, inner);
1986 update_stmt (stmt);
1987 return true;
1988 }
1989 /* If A OP0 C (this usually means C is the same as A) is a ssa_name
1990 then fold it down correctly. */
1991 else if (TREE_CODE (inner) == SSA_NAME)
1992 {
1993 tree outer = fold_build2 (def1_code, TREE_TYPE (inner),
1994 inner, b);
1995 gimple_assign_set_rhs_from_tree (gsi, outer);
1996 update_stmt (stmt);
1997 return true;
1998 }
1999 else
2000 {
2001 gimple newop;
2002 tree tem;
03d37e4e 2003 tem = make_ssa_name (TREE_TYPE (arg2), NULL);
35967c0f 2004 newop = gimple_build_assign_with_ops (code, tem, a, c);
35967c0f 2005 gimple_set_location (newop, gimple_location (stmt));
2006 /* Make sure to re-process the new stmt as it's walking upwards. */
2007 gsi_insert_before (gsi, newop, GSI_NEW_STMT);
2008 gimple_assign_set_rhs1 (stmt, tem);
2009 gimple_assign_set_rhs2 (stmt, b);
2010 gimple_assign_set_rhs_code (stmt, def1_code);
2011 update_stmt (stmt);
2012 return true;
2013 }
2014 }
2015
26f54bd0 2016 /* (a | CST1) & CST2 -> (a & CST2) | (CST1 & CST2). */
2017 if (code == BIT_AND_EXPR
2018 && def1_code == BIT_IOR_EXPR
2c83a45e 2019 && CONSTANT_CLASS_P (arg2)
2020 && CONSTANT_CLASS_P (def1_arg2))
26f54bd0 2021 {
2022 tree cst = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg2),
10fbe63d 2023 arg2, def1_arg2);
26f54bd0 2024 tree tem;
2025 gimple newop;
2026 if (integer_zerop (cst))
300da094 2027 {
26f54bd0 2028 gimple_assign_set_rhs1 (stmt, def1_arg1);
2029 update_stmt (stmt);
2030 return true;
300da094 2031 }
03d37e4e 2032 tem = make_ssa_name (TREE_TYPE (arg2), NULL);
26f54bd0 2033 newop = gimple_build_assign_with_ops (BIT_AND_EXPR,
2034 tem, def1_arg1, arg2);
4b5f1658 2035 gimple_set_location (newop, gimple_location (stmt));
26f54bd0 2036 /* Make sure to re-process the new stmt as it's walking upwards. */
2037 gsi_insert_before (gsi, newop, GSI_NEW_STMT);
2038 gimple_assign_set_rhs1 (stmt, tem);
2039 gimple_assign_set_rhs2 (stmt, cst);
2040 gimple_assign_set_rhs_code (stmt, BIT_IOR_EXPR);
2041 update_stmt (stmt);
2042 return true;
2043 }
2044
2045 /* Combine successive equal operations with constants. */
2046 if ((code == BIT_AND_EXPR
2047 || code == BIT_IOR_EXPR
2048 || code == BIT_XOR_EXPR)
2049 && def1_code == code
2c83a45e 2050 && CONSTANT_CLASS_P (arg2)
2051 && CONSTANT_CLASS_P (def1_arg2))
26f54bd0 2052 {
2053 tree cst = fold_build2 (code, TREE_TYPE (arg2),
10fbe63d 2054 arg2, def1_arg2);
26f54bd0 2055 gimple_assign_set_rhs1 (stmt, def1_arg1);
2056 gimple_assign_set_rhs2 (stmt, cst);
2057 update_stmt (stmt);
2058 return true;
1c4607fd 2059 }
300da094 2060
8a5f403f 2061 /* Canonicalize X ^ ~0 to ~X. */
2062 if (code == BIT_XOR_EXPR
8a5f403f 2063 && integer_all_onesp (arg2))
2064 {
2065 gimple_assign_set_rhs_with_ops (gsi, BIT_NOT_EXPR, arg1, NULL_TREE);
2066 gcc_assert (gsi_stmt (*gsi) == stmt);
2067 update_stmt (stmt);
2068 return true;
2069 }
2070
41913fa9 2071 /* Try simple folding for X op !X, and X op X. */
2072 res = simplify_bitwise_binary_1 (code, TREE_TYPE (arg1), arg1, arg2);
2073 if (res != NULL_TREE)
2074 {
2075 gimple_assign_set_rhs_from_tree (gsi, res);
2076 update_stmt (gsi_stmt (*gsi));
2077 return true;
2078 }
2079
10fbe63d 2080 if (code == BIT_AND_EXPR || code == BIT_IOR_EXPR)
2081 {
2082 enum tree_code ocode = code == BIT_AND_EXPR ? BIT_IOR_EXPR : BIT_AND_EXPR;
2083 if (def1_code == ocode)
2084 {
2085 tree x = arg2;
2086 enum tree_code coden;
2087 tree a1, a2;
2088 /* ( X | Y) & X -> X */
2089 /* ( X & Y) | X -> X */
2090 if (x == def1_arg1
2091 || x == def1_arg2)
2092 {
2093 gimple_assign_set_rhs_from_tree (gsi, x);
2094 update_stmt (gsi_stmt (*gsi));
2095 return true;
2096 }
2097
2098 defcodefor_name (def1_arg1, &coden, &a1, &a2);
2099 /* (~X | Y) & X -> X & Y */
2100 /* (~X & Y) | X -> X | Y */
2101 if (coden == BIT_NOT_EXPR && a1 == x)
2102 {
2103 gimple_assign_set_rhs_with_ops (gsi, code,
2104 x, def1_arg2);
2105 gcc_assert (gsi_stmt (*gsi) == stmt);
2106 update_stmt (stmt);
2107 return true;
2108 }
2109 defcodefor_name (def1_arg2, &coden, &a1, &a2);
2110 /* (Y | ~X) & X -> X & Y */
2111 /* (Y & ~X) | X -> X | Y */
2112 if (coden == BIT_NOT_EXPR && a1 == x)
2113 {
2114 gimple_assign_set_rhs_with_ops (gsi, code,
2115 x, def1_arg1);
2116 gcc_assert (gsi_stmt (*gsi) == stmt);
2117 update_stmt (stmt);
2118 return true;
2119 }
2120 }
2121 if (def2_code == ocode)
2122 {
2123 enum tree_code coden;
2124 tree a1;
2125 tree x = arg1;
2126 /* X & ( X | Y) -> X */
2127 /* X | ( X & Y) -> X */
2128 if (x == def2_arg1
2129 || x == def2_arg2)
2130 {
2131 gimple_assign_set_rhs_from_tree (gsi, x);
2132 update_stmt (gsi_stmt (*gsi));
2133 return true;
2134 }
2135 defcodefor_name (def2_arg1, &coden, &a1, NULL);
2136 /* (~X | Y) & X -> X & Y */
2137 /* (~X & Y) | X -> X | Y */
2138 if (coden == BIT_NOT_EXPR && a1 == x)
2139 {
2140 gimple_assign_set_rhs_with_ops (gsi, code,
2141 x, def2_arg2);
2142 gcc_assert (gsi_stmt (*gsi) == stmt);
2143 update_stmt (stmt);
2144 return true;
2145 }
2146 defcodefor_name (def2_arg2, &coden, &a1, NULL);
2147 /* (Y | ~X) & X -> X & Y */
2148 /* (Y & ~X) | X -> X | Y */
2149 if (coden == BIT_NOT_EXPR && a1 == x)
2150 {
2151 gimple_assign_set_rhs_with_ops (gsi, code,
2152 x, def2_arg1);
2153 gcc_assert (gsi_stmt (*gsi) == stmt);
2154 update_stmt (stmt);
2155 return true;
2156 }
2157 }
10fbe63d 2158
16bc66ec 2159 /* If arg1 and arg2 are booleans (or any single bit type)
2160 then try to simplify:
2161
2162 (~X & Y) -> X < Y
2163 (X & ~Y) -> Y < X
2164 (~X | Y) -> X <= Y
2165 (X | ~Y) -> Y <= X
2166
2167 But only do this if our result feeds into a comparison as
2168 this transformation is not always a win, particularly on
2169 targets with and-not instructions. */
2170 if (TREE_CODE (arg1) == SSA_NAME
2171 && TREE_CODE (arg2) == SSA_NAME
2172 && INTEGRAL_TYPE_P (TREE_TYPE (arg1))
2173 && TYPE_PRECISION (TREE_TYPE (arg1)) == 1
2174 && TYPE_PRECISION (TREE_TYPE (arg2)) == 1
2175 && (TYPE_UNSIGNED (TREE_TYPE (arg1))
2176 == TYPE_UNSIGNED (TREE_TYPE (arg2))))
2177 {
2178 use_operand_p use_p;
2179 gimple use_stmt;
2180
2181 if (single_imm_use (gimple_assign_lhs (stmt), &use_p, &use_stmt))
2182 {
2183 if (gimple_code (use_stmt) == GIMPLE_COND
2184 && gimple_cond_lhs (use_stmt) == gimple_assign_lhs (stmt)
2185 && integer_zerop (gimple_cond_rhs (use_stmt))
2186 && gimple_cond_code (use_stmt) == NE_EXPR)
2187 {
2188 if (simplify_bitwise_binary_boolean (gsi, code, arg1, arg2))
2189 return true;
2190 if (simplify_bitwise_binary_boolean (gsi, code, arg2, arg1))
2191 return true;
2192 }
2193 }
2194 }
2195 }
300da094 2196 return false;
1c4607fd 2197}
2198
ca3c9092 2199
3b8827a2 2200/* Recognize rotation patterns. Return true if a transformation
2201 applied, otherwise return false.
2202
2203 We are looking for X with unsigned type T with bitsize B, OP being
2204 +, | or ^, some type T2 wider than T and
2205 (X << CNT1) OP (X >> CNT2) iff CNT1 + CNT2 == B
2206 ((T) ((T2) X << CNT1)) OP ((T) ((T2) X >> CNT2)) iff CNT1 + CNT2 == B
2207 (X << Y) OP (X >> (B - Y))
2208 (X << (int) Y) OP (X >> (int) (B - Y))
2209 ((T) ((T2) X << Y)) OP ((T) ((T2) X >> (B - Y)))
2210 ((T) ((T2) X << (int) Y)) OP ((T) ((T2) X >> (int) (B - Y)))
043ce677 2211 (X << Y) | (X >> ((-Y) & (B - 1)))
2212 (X << (int) Y) | (X >> (int) ((-Y) & (B - 1)))
2213 ((T) ((T2) X << Y)) | ((T) ((T2) X >> ((-Y) & (B - 1))))
2214 ((T) ((T2) X << (int) Y)) | ((T) ((T2) X >> (int) ((-Y) & (B - 1))))
3b8827a2 2215
2216 and transform these into:
2217 X r<< CNT1
2218 X r<< Y
2219
2220 Note, in the patterns with T2 type, the type of OP operands
2221 might be even a signed type, but should have precision B. */
2222
2223static bool
2224simplify_rotate (gimple_stmt_iterator *gsi)
2225{
2226 gimple stmt = gsi_stmt (*gsi);
2227 tree arg[2], rtype, rotcnt = NULL_TREE;
2228 tree def_arg1[2], def_arg2[2];
2229 enum tree_code def_code[2];
2230 tree lhs;
2231 int i;
2232 bool swapped_p = false;
2233 gimple g;
2234
2235 arg[0] = gimple_assign_rhs1 (stmt);
2236 arg[1] = gimple_assign_rhs2 (stmt);
2237 rtype = TREE_TYPE (arg[0]);
2238
2239 /* Only create rotates in complete modes. Other cases are not
2240 expanded properly. */
2241 if (!INTEGRAL_TYPE_P (rtype)
2242 || TYPE_PRECISION (rtype) != GET_MODE_PRECISION (TYPE_MODE (rtype)))
2243 return false;
2244
2245 for (i = 0; i < 2; i++)
2246 defcodefor_name (arg[i], &def_code[i], &def_arg1[i], &def_arg2[i]);
2247
2248 /* Look through narrowing conversions. */
2249 if (CONVERT_EXPR_CODE_P (def_code[0])
2250 && CONVERT_EXPR_CODE_P (def_code[1])
2251 && INTEGRAL_TYPE_P (TREE_TYPE (def_arg1[0]))
2252 && INTEGRAL_TYPE_P (TREE_TYPE (def_arg1[1]))
2253 && TYPE_PRECISION (TREE_TYPE (def_arg1[0]))
2254 == TYPE_PRECISION (TREE_TYPE (def_arg1[1]))
2255 && TYPE_PRECISION (TREE_TYPE (def_arg1[0])) > TYPE_PRECISION (rtype)
2256 && has_single_use (arg[0])
2257 && has_single_use (arg[1]))
2258 {
2259 for (i = 0; i < 2; i++)
2260 {
2261 arg[i] = def_arg1[i];
2262 defcodefor_name (arg[i], &def_code[i], &def_arg1[i], &def_arg2[i]);
2263 }
2264 }
2265
2266 /* One operand has to be LSHIFT_EXPR and one RSHIFT_EXPR. */
2267 for (i = 0; i < 2; i++)
2268 if (def_code[i] != LSHIFT_EXPR && def_code[i] != RSHIFT_EXPR)
2269 return false;
2270 else if (!has_single_use (arg[i]))
2271 return false;
2272 if (def_code[0] == def_code[1])
2273 return false;
2274
2275 /* If we've looked through narrowing conversions before, look through
2276 widening conversions from unsigned type with the same precision
2277 as rtype here. */
2278 if (TYPE_PRECISION (TREE_TYPE (def_arg1[0])) != TYPE_PRECISION (rtype))
2279 for (i = 0; i < 2; i++)
2280 {
2281 tree tem;
2282 enum tree_code code;
2283 defcodefor_name (def_arg1[i], &code, &tem, NULL);
2284 if (!CONVERT_EXPR_CODE_P (code)
2285 || !INTEGRAL_TYPE_P (TREE_TYPE (tem))
2286 || TYPE_PRECISION (TREE_TYPE (tem)) != TYPE_PRECISION (rtype))
2287 return false;
2288 def_arg1[i] = tem;
2289 }
2290 /* Both shifts have to use the same first operand. */
2291 if (TREE_CODE (def_arg1[0]) != SSA_NAME || def_arg1[0] != def_arg1[1])
2292 return false;
2293 if (!TYPE_UNSIGNED (TREE_TYPE (def_arg1[0])))
2294 return false;
2295
2296 /* CNT1 + CNT2 == B case above. */
2297 if (host_integerp (def_arg2[0], 1)
2298 && host_integerp (def_arg2[1], 1)
2299 && (unsigned HOST_WIDE_INT) tree_low_cst (def_arg2[0], 1)
2300 + tree_low_cst (def_arg2[1], 1) == TYPE_PRECISION (rtype))
2301 rotcnt = def_arg2[0];
2302 else if (TREE_CODE (def_arg2[0]) != SSA_NAME
2303 || TREE_CODE (def_arg2[1]) != SSA_NAME)
2304 return false;
2305 else
2306 {
2307 tree cdef_arg1[2], cdef_arg2[2], def_arg2_alt[2];
2308 enum tree_code cdef_code[2];
2309 /* Look through conversion of the shift count argument.
2310 The C/C++ FE cast any shift count argument to integer_type_node.
2311 The only problem might be if the shift count type maximum value
2312 is equal or smaller than number of bits in rtype. */
2313 for (i = 0; i < 2; i++)
2314 {
2315 def_arg2_alt[i] = def_arg2[i];
2316 defcodefor_name (def_arg2[i], &cdef_code[i],
2317 &cdef_arg1[i], &cdef_arg2[i]);
2318 if (CONVERT_EXPR_CODE_P (cdef_code[i])
2319 && INTEGRAL_TYPE_P (TREE_TYPE (cdef_arg1[i]))
2320 && TYPE_PRECISION (TREE_TYPE (cdef_arg1[i]))
2321 > floor_log2 (TYPE_PRECISION (rtype))
2322 && TYPE_PRECISION (TREE_TYPE (cdef_arg1[i]))
2323 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (cdef_arg1[i]))))
2324 {
2325 def_arg2_alt[i] = cdef_arg1[i];
2326 defcodefor_name (def_arg2_alt[i], &cdef_code[i],
2327 &cdef_arg1[i], &cdef_arg2[i]);
2328 }
2329 }
2330 for (i = 0; i < 2; i++)
2331 /* Check for one shift count being Y and the other B - Y,
2332 with optional casts. */
2333 if (cdef_code[i] == MINUS_EXPR
2334 && host_integerp (cdef_arg1[i], 0)
2335 && tree_low_cst (cdef_arg1[i], 0) == TYPE_PRECISION (rtype)
2336 && TREE_CODE (cdef_arg2[i]) == SSA_NAME)
2337 {
2338 tree tem;
2339 enum tree_code code;
2340
2341 if (cdef_arg2[i] == def_arg2[1 - i]
2342 || cdef_arg2[i] == def_arg2_alt[1 - i])
2343 {
2344 rotcnt = cdef_arg2[i];
2345 break;
2346 }
2347 defcodefor_name (cdef_arg2[i], &code, &tem, NULL);
2348 if (CONVERT_EXPR_CODE_P (code)
2349 && INTEGRAL_TYPE_P (TREE_TYPE (tem))
2350 && TYPE_PRECISION (TREE_TYPE (tem))
2351 > floor_log2 (TYPE_PRECISION (rtype))
2352 && TYPE_PRECISION (TREE_TYPE (tem))
2353 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (tem)))
2354 && (tem == def_arg2[1 - i]
2355 || tem == def_arg2_alt[1 - i]))
2356 {
2357 rotcnt = tem;
2358 break;
2359 }
2360 }
2361 /* The above sequence isn't safe for Y being 0,
2362 because then one of the shifts triggers undefined behavior.
2363 This alternative is safe even for rotation count of 0.
2364 One shift count is Y and the other (-Y) & (B - 1). */
2365 else if (cdef_code[i] == BIT_AND_EXPR
2366 && host_integerp (cdef_arg2[i], 0)
2367 && tree_low_cst (cdef_arg2[i], 0)
2368 == TYPE_PRECISION (rtype) - 1
043ce677 2369 && TREE_CODE (cdef_arg1[i]) == SSA_NAME
2370 && gimple_assign_rhs_code (stmt) == BIT_IOR_EXPR)
3b8827a2 2371 {
2372 tree tem;
2373 enum tree_code code;
2374
2375 defcodefor_name (cdef_arg1[i], &code, &tem, NULL);
2376 if (CONVERT_EXPR_CODE_P (code)
2377 && INTEGRAL_TYPE_P (TREE_TYPE (tem))
2378 && TYPE_PRECISION (TREE_TYPE (tem))
2379 > floor_log2 (TYPE_PRECISION (rtype))
2380 && TYPE_PRECISION (TREE_TYPE (tem))
2381 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (tem))))
2382 defcodefor_name (tem, &code, &tem, NULL);
2383
2384 if (code == NEGATE_EXPR)
2385 {
2386 if (tem == def_arg2[1 - i] || tem == def_arg2_alt[1 - i])
2387 {
2388 rotcnt = tem;
2389 break;
2390 }
2391 defcodefor_name (tem, &code, &tem, NULL);
2392 if (CONVERT_EXPR_CODE_P (code)
2393 && INTEGRAL_TYPE_P (TREE_TYPE (tem))
2394 && TYPE_PRECISION (TREE_TYPE (tem))
2395 > floor_log2 (TYPE_PRECISION (rtype))
2396 && TYPE_PRECISION (TREE_TYPE (tem))
2397 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (tem)))
2398 && (tem == def_arg2[1 - i]
2399 || tem == def_arg2_alt[1 - i]))
2400 {
2401 rotcnt = tem;
2402 break;
2403 }
2404 }
2405 }
2406 if (rotcnt == NULL_TREE)
2407 return false;
2408 swapped_p = i != 1;
2409 }
2410
2411 if (!useless_type_conversion_p (TREE_TYPE (def_arg2[0]),
2412 TREE_TYPE (rotcnt)))
2413 {
2414 g = gimple_build_assign_with_ops (NOP_EXPR,
2415 make_ssa_name (TREE_TYPE (def_arg2[0]),
2416 NULL),
2417 rotcnt, NULL_TREE);
2418 gsi_insert_before (gsi, g, GSI_SAME_STMT);
2419 rotcnt = gimple_assign_lhs (g);
2420 }
2421 lhs = gimple_assign_lhs (stmt);
2422 if (!useless_type_conversion_p (rtype, TREE_TYPE (def_arg1[0])))
2423 lhs = make_ssa_name (TREE_TYPE (def_arg1[0]), NULL);
2424 g = gimple_build_assign_with_ops (((def_code[0] == LSHIFT_EXPR) ^ swapped_p)
2425 ? LROTATE_EXPR : RROTATE_EXPR,
2426 lhs, def_arg1[0], rotcnt);
2427 if (!useless_type_conversion_p (rtype, TREE_TYPE (def_arg1[0])))
2428 {
2429 gsi_insert_before (gsi, g, GSI_SAME_STMT);
2430 g = gimple_build_assign_with_ops (NOP_EXPR, gimple_assign_lhs (stmt),
2431 lhs, NULL_TREE);
2432 }
2433 gsi_replace (gsi, g, false);
2434 return true;
2435}
2436
ca3c9092 2437/* Perform re-associations of the plus or minus statement STMT that are
b69d1cb6 2438 always permitted. Returns true if the CFG was changed. */
ca3c9092 2439
b69d1cb6 2440static bool
50aacf4c 2441associate_plusminus (gimple_stmt_iterator *gsi)
ca3c9092 2442{
50aacf4c 2443 gimple stmt = gsi_stmt (*gsi);
ca3c9092 2444 tree rhs1 = gimple_assign_rhs1 (stmt);
2445 tree rhs2 = gimple_assign_rhs2 (stmt);
2446 enum tree_code code = gimple_assign_rhs_code (stmt);
ca3c9092 2447 bool changed;
2448
2449 /* We can't reassociate at all for saturating types. */
2450 if (TYPE_SATURATING (TREE_TYPE (rhs1)))
b69d1cb6 2451 return false;
ca3c9092 2452
2453 /* First contract negates. */
2454 do
2455 {
2456 changed = false;
2457
2458 /* A +- (-B) -> A -+ B. */
2459 if (TREE_CODE (rhs2) == SSA_NAME)
2460 {
2461 gimple def_stmt = SSA_NAME_DEF_STMT (rhs2);
2462 if (is_gimple_assign (def_stmt)
32cdcc42 2463 && gimple_assign_rhs_code (def_stmt) == NEGATE_EXPR
2464 && can_propagate_from (def_stmt))
ca3c9092 2465 {
2466 code = (code == MINUS_EXPR) ? PLUS_EXPR : MINUS_EXPR;
2467 gimple_assign_set_rhs_code (stmt, code);
2468 rhs2 = gimple_assign_rhs1 (def_stmt);
2469 gimple_assign_set_rhs2 (stmt, rhs2);
2470 gimple_set_modified (stmt, true);
2471 changed = true;
2472 }
2473 }
2474
2475 /* (-A) + B -> B - A. */
2476 if (TREE_CODE (rhs1) == SSA_NAME
2477 && code == PLUS_EXPR)
2478 {
2479 gimple def_stmt = SSA_NAME_DEF_STMT (rhs1);
2480 if (is_gimple_assign (def_stmt)
32cdcc42 2481 && gimple_assign_rhs_code (def_stmt) == NEGATE_EXPR
2482 && can_propagate_from (def_stmt))
ca3c9092 2483 {
2484 code = MINUS_EXPR;
2485 gimple_assign_set_rhs_code (stmt, code);
2486 rhs1 = rhs2;
2487 gimple_assign_set_rhs1 (stmt, rhs1);
2488 rhs2 = gimple_assign_rhs1 (def_stmt);
2489 gimple_assign_set_rhs2 (stmt, rhs2);
2490 gimple_set_modified (stmt, true);
2491 changed = true;
2492 }
2493 }
2494 }
2495 while (changed);
2496
2497 /* We can't reassociate floating-point or fixed-point plus or minus
2498 because of saturation to +-Inf. */
2499 if (FLOAT_TYPE_P (TREE_TYPE (rhs1))
2500 || FIXED_POINT_TYPE_P (TREE_TYPE (rhs1)))
2501 goto out;
2502
2503 /* Second match patterns that allow contracting a plus-minus pair
2504 irrespective of overflow issues.
2505
2506 (A +- B) - A -> +- B
2507 (A +- B) -+ B -> A
2508 (CST +- A) +- CST -> CST +- A
2c83a45e 2509 (A +- CST) +- CST -> A +- CST
ca3c9092 2510 ~A + A -> -1
2511 ~A + 1 -> -A
2512 A - (A +- B) -> -+ B
2513 A +- (B +- A) -> +- B
2514 CST +- (CST +- A) -> CST +- A
2515 CST +- (A +- CST) -> CST +- A
2516 A + ~A -> -1
2517
2518 via commutating the addition and contracting operations to zero
2519 by reassociation. */
2520
ca3c9092 2521 if (TREE_CODE (rhs1) == SSA_NAME)
2522 {
2523 gimple def_stmt = SSA_NAME_DEF_STMT (rhs1);
32cdcc42 2524 if (is_gimple_assign (def_stmt) && can_propagate_from (def_stmt))
ca3c9092 2525 {
2526 enum tree_code def_code = gimple_assign_rhs_code (def_stmt);
2527 if (def_code == PLUS_EXPR
2528 || def_code == MINUS_EXPR)
2529 {
2530 tree def_rhs1 = gimple_assign_rhs1 (def_stmt);
2531 tree def_rhs2 = gimple_assign_rhs2 (def_stmt);
2532 if (operand_equal_p (def_rhs1, rhs2, 0)
2533 && code == MINUS_EXPR)
2534 {
2535 /* (A +- B) - A -> +- B. */
2536 code = ((def_code == PLUS_EXPR)
2537 ? TREE_CODE (def_rhs2) : NEGATE_EXPR);
2538 rhs1 = def_rhs2;
2539 rhs2 = NULL_TREE;
50aacf4c 2540 gimple_assign_set_rhs_with_ops (gsi, code, rhs1, NULL_TREE);
2541 gcc_assert (gsi_stmt (*gsi) == stmt);
ca3c9092 2542 gimple_set_modified (stmt, true);
2543 }
2544 else if (operand_equal_p (def_rhs2, rhs2, 0)
2545 && code != def_code)
2546 {
2547 /* (A +- B) -+ B -> A. */
2548 code = TREE_CODE (def_rhs1);
2549 rhs1 = def_rhs1;
2550 rhs2 = NULL_TREE;
50aacf4c 2551 gimple_assign_set_rhs_with_ops (gsi, code, rhs1, NULL_TREE);
2552 gcc_assert (gsi_stmt (*gsi) == stmt);
ca3c9092 2553 gimple_set_modified (stmt, true);
2554 }
2c83a45e 2555 else if (CONSTANT_CLASS_P (rhs2)
2556 && CONSTANT_CLASS_P (def_rhs1))
ca3c9092 2557 {
2558 /* (CST +- A) +- CST -> CST +- A. */
2559 tree cst = fold_binary (code, TREE_TYPE (rhs1),
2560 def_rhs1, rhs2);
2561 if (cst && !TREE_OVERFLOW (cst))
2562 {
2563 code = def_code;
2564 gimple_assign_set_rhs_code (stmt, code);
2565 rhs1 = cst;
2566 gimple_assign_set_rhs1 (stmt, rhs1);
2567 rhs2 = def_rhs2;
2568 gimple_assign_set_rhs2 (stmt, rhs2);
2569 gimple_set_modified (stmt, true);
2570 }
2571 }
2c83a45e 2572 else if (CONSTANT_CLASS_P (rhs2)
2573 && CONSTANT_CLASS_P (def_rhs2))
ca3c9092 2574 {
2c83a45e 2575 /* (A +- CST) +- CST -> A +- CST. */
2576 enum tree_code mix = (code == def_code)
2577 ? PLUS_EXPR : MINUS_EXPR;
2578 tree cst = fold_binary (mix, TREE_TYPE (rhs1),
ca3c9092 2579 def_rhs2, rhs2);
2580 if (cst && !TREE_OVERFLOW (cst))
2581 {
2c83a45e 2582 code = def_code;
ca3c9092 2583 gimple_assign_set_rhs_code (stmt, code);
2584 rhs1 = def_rhs1;
2585 gimple_assign_set_rhs1 (stmt, rhs1);
2586 rhs2 = cst;
2587 gimple_assign_set_rhs2 (stmt, rhs2);
2588 gimple_set_modified (stmt, true);
2589 }
2590 }
2591 }
2c83a45e 2592 else if (def_code == BIT_NOT_EXPR && code == PLUS_EXPR)
ca3c9092 2593 {
2594 tree def_rhs1 = gimple_assign_rhs1 (def_stmt);
2c83a45e 2595 if (operand_equal_p (def_rhs1, rhs2, 0))
ca3c9092 2596 {
2597 /* ~A + A -> -1. */
2c83a45e 2598 rhs1 = build_all_ones_cst (TREE_TYPE (rhs2));
ca3c9092 2599 rhs2 = NULL_TREE;
2c83a45e 2600 code = TREE_CODE (rhs1);
50aacf4c 2601 gimple_assign_set_rhs_with_ops (gsi, code, rhs1, NULL_TREE);
2602 gcc_assert (gsi_stmt (*gsi) == stmt);
ca3c9092 2603 gimple_set_modified (stmt, true);
2604 }
2c83a45e 2605 else if ((TREE_CODE (TREE_TYPE (rhs2)) != COMPLEX_TYPE
2606 && integer_onep (rhs2))
2607 || (TREE_CODE (rhs2) == COMPLEX_CST
2608 && integer_onep (TREE_REALPART (rhs2))
2609 && integer_onep (TREE_IMAGPART (rhs2))))
ca3c9092 2610 {
2611 /* ~A + 1 -> -A. */
2612 code = NEGATE_EXPR;
2613 rhs1 = def_rhs1;
2614 rhs2 = NULL_TREE;
50aacf4c 2615 gimple_assign_set_rhs_with_ops (gsi, code, rhs1, NULL_TREE);
2616 gcc_assert (gsi_stmt (*gsi) == stmt);
ca3c9092 2617 gimple_set_modified (stmt, true);
2618 }
2619 }
2620 }
2621 }
2622
2623 if (rhs2 && TREE_CODE (rhs2) == SSA_NAME)
2624 {
2625 gimple def_stmt = SSA_NAME_DEF_STMT (rhs2);
32cdcc42 2626 if (is_gimple_assign (def_stmt) && can_propagate_from (def_stmt))
ca3c9092 2627 {
2628 enum tree_code def_code = gimple_assign_rhs_code (def_stmt);
2629 if (def_code == PLUS_EXPR
2630 || def_code == MINUS_EXPR)
2631 {
2632 tree def_rhs1 = gimple_assign_rhs1 (def_stmt);
2633 tree def_rhs2 = gimple_assign_rhs2 (def_stmt);
2634 if (operand_equal_p (def_rhs1, rhs1, 0)
2635 && code == MINUS_EXPR)
2636 {
2637 /* A - (A +- B) -> -+ B. */
2638 code = ((def_code == PLUS_EXPR)
2639 ? NEGATE_EXPR : TREE_CODE (def_rhs2));
2640 rhs1 = def_rhs2;
2641 rhs2 = NULL_TREE;
50aacf4c 2642 gimple_assign_set_rhs_with_ops (gsi, code, rhs1, NULL_TREE);
2643 gcc_assert (gsi_stmt (*gsi) == stmt);
ca3c9092 2644 gimple_set_modified (stmt, true);
2645 }
2646 else if (operand_equal_p (def_rhs2, rhs1, 0)
2647 && code != def_code)
2648 {
2649 /* A +- (B +- A) -> +- B. */
2650 code = ((code == PLUS_EXPR)
2651 ? TREE_CODE (def_rhs1) : NEGATE_EXPR);
2652 rhs1 = def_rhs1;
2653 rhs2 = NULL_TREE;
50aacf4c 2654 gimple_assign_set_rhs_with_ops (gsi, code, rhs1, NULL_TREE);
2655 gcc_assert (gsi_stmt (*gsi) == stmt);
ca3c9092 2656 gimple_set_modified (stmt, true);
2657 }
2c83a45e 2658 else if (CONSTANT_CLASS_P (rhs1)
2659 && CONSTANT_CLASS_P (def_rhs1))
ca3c9092 2660 {
2661 /* CST +- (CST +- A) -> CST +- A. */
2662 tree cst = fold_binary (code, TREE_TYPE (rhs2),
2663 rhs1, def_rhs1);
2664 if (cst && !TREE_OVERFLOW (cst))
2665 {
2666 code = (code == def_code ? PLUS_EXPR : MINUS_EXPR);
2667 gimple_assign_set_rhs_code (stmt, code);
2668 rhs1 = cst;
2669 gimple_assign_set_rhs1 (stmt, rhs1);
2670 rhs2 = def_rhs2;
2671 gimple_assign_set_rhs2 (stmt, rhs2);
2672 gimple_set_modified (stmt, true);
2673 }
2674 }
2c83a45e 2675 else if (CONSTANT_CLASS_P (rhs1)
2676 && CONSTANT_CLASS_P (def_rhs2))
ca3c9092 2677 {
2678 /* CST +- (A +- CST) -> CST +- A. */
2679 tree cst = fold_binary (def_code == code
2680 ? PLUS_EXPR : MINUS_EXPR,
2681 TREE_TYPE (rhs2),
2682 rhs1, def_rhs2);
2683 if (cst && !TREE_OVERFLOW (cst))
2684 {
2685 rhs1 = cst;
2686 gimple_assign_set_rhs1 (stmt, rhs1);
2687 rhs2 = def_rhs1;
2688 gimple_assign_set_rhs2 (stmt, rhs2);
2689 gimple_set_modified (stmt, true);
2690 }
2691 }
2692 }
2c83a45e 2693 else if (def_code == BIT_NOT_EXPR)
ca3c9092 2694 {
2695 tree def_rhs1 = gimple_assign_rhs1 (def_stmt);
2696 if (code == PLUS_EXPR
2697 && operand_equal_p (def_rhs1, rhs1, 0))
2698 {
2699 /* A + ~A -> -1. */
2c83a45e 2700 rhs1 = build_all_ones_cst (TREE_TYPE (rhs1));
ca3c9092 2701 rhs2 = NULL_TREE;
2c83a45e 2702 code = TREE_CODE (rhs1);
50aacf4c 2703 gimple_assign_set_rhs_with_ops (gsi, code, rhs1, NULL_TREE);
2704 gcc_assert (gsi_stmt (*gsi) == stmt);
ca3c9092 2705 gimple_set_modified (stmt, true);
2706 }
2707 }
2708 }
2709 }
2710
2711out:
2712 if (gimple_modified_p (stmt))
2713 {
50aacf4c 2714 fold_stmt_inplace (gsi);
ca3c9092 2715 update_stmt (stmt);
b69d1cb6 2716 if (maybe_clean_or_replace_eh_stmt (stmt, stmt)
2717 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
2718 return true;
ca3c9092 2719 }
b69d1cb6 2720
2721 return false;
ca3c9092 2722}
2723
c9c17332 2724/* Associate operands of a POINTER_PLUS_EXPR assignmen at *GSI. Returns
2725 true if anything changed, false otherwise. */
2726
2727static bool
2728associate_pointerplus (gimple_stmt_iterator *gsi)
2729{
2730 gimple stmt = gsi_stmt (*gsi);
2731 gimple def_stmt;
2732 tree ptr, rhs, algn;
2733
2734 /* Pattern match
2735 tem = (sizetype) ptr;
2736 tem = tem & algn;
2737 tem = -tem;
2738 ... = ptr p+ tem;
2739 and produce the simpler and easier to analyze with respect to alignment
2740 ... = ptr & ~algn; */
2741 ptr = gimple_assign_rhs1 (stmt);
2742 rhs = gimple_assign_rhs2 (stmt);
2743 if (TREE_CODE (rhs) != SSA_NAME)
2744 return false;
2745 def_stmt = SSA_NAME_DEF_STMT (rhs);
2746 if (!is_gimple_assign (def_stmt)
2747 || gimple_assign_rhs_code (def_stmt) != NEGATE_EXPR)
2748 return false;
2749 rhs = gimple_assign_rhs1 (def_stmt);
2750 if (TREE_CODE (rhs) != SSA_NAME)
2751 return false;
2752 def_stmt = SSA_NAME_DEF_STMT (rhs);
2753 if (!is_gimple_assign (def_stmt)
2754 || gimple_assign_rhs_code (def_stmt) != BIT_AND_EXPR)
2755 return false;
2756 rhs = gimple_assign_rhs1 (def_stmt);
2757 algn = gimple_assign_rhs2 (def_stmt);
2758 if (TREE_CODE (rhs) != SSA_NAME
2759 || TREE_CODE (algn) != INTEGER_CST)
2760 return false;
2761 def_stmt = SSA_NAME_DEF_STMT (rhs);
2762 if (!is_gimple_assign (def_stmt)
2763 || !CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt)))
2764 return false;
2765 if (gimple_assign_rhs1 (def_stmt) != ptr)
2766 return false;
2767
cf8f0e63 2768 algn = double_int_to_tree (TREE_TYPE (ptr), ~tree_to_double_int (algn));
c9c17332 2769 gimple_assign_set_rhs_with_ops (gsi, BIT_AND_EXPR, ptr, algn);
2770 fold_stmt_inplace (gsi);
2771 update_stmt (stmt);
2772
2773 return true;
2774}
2775
6afd0544 2776/* Combine two conversions in a row for the second conversion at *GSI.
89c8f35a 2777 Returns 1 if there were any changes made, 2 if cfg-cleanup needs to
2778 run. Else it returns 0. */
6afd0544 2779
89c8f35a 2780static int
6afd0544 2781combine_conversions (gimple_stmt_iterator *gsi)
2782{
2783 gimple stmt = gsi_stmt (*gsi);
2784 gimple def_stmt;
2785 tree op0, lhs;
2786 enum tree_code code = gimple_assign_rhs_code (stmt);
487282d5 2787 enum tree_code code2;
6afd0544 2788
2789 gcc_checking_assert (CONVERT_EXPR_CODE_P (code)
2790 || code == FLOAT_EXPR
2791 || code == FIX_TRUNC_EXPR);
2792
2793 lhs = gimple_assign_lhs (stmt);
2794 op0 = gimple_assign_rhs1 (stmt);
2795 if (useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (op0)))
2796 {
2797 gimple_assign_set_rhs_code (stmt, TREE_CODE (op0));
89c8f35a 2798 return 1;
6afd0544 2799 }
2800
2801 if (TREE_CODE (op0) != SSA_NAME)
89c8f35a 2802 return 0;
6afd0544 2803
2804 def_stmt = SSA_NAME_DEF_STMT (op0);
2805 if (!is_gimple_assign (def_stmt))
89c8f35a 2806 return 0;
6afd0544 2807
487282d5 2808 code2 = gimple_assign_rhs_code (def_stmt);
2809
2810 if (CONVERT_EXPR_CODE_P (code2) || code2 == FLOAT_EXPR)
6afd0544 2811 {
2812 tree defop0 = gimple_assign_rhs1 (def_stmt);
2813 tree type = TREE_TYPE (lhs);
2814 tree inside_type = TREE_TYPE (defop0);
2815 tree inter_type = TREE_TYPE (op0);
2816 int inside_int = INTEGRAL_TYPE_P (inside_type);
2817 int inside_ptr = POINTER_TYPE_P (inside_type);
2818 int inside_float = FLOAT_TYPE_P (inside_type);
2819 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
2820 unsigned int inside_prec = TYPE_PRECISION (inside_type);
2821 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
2822 int inter_int = INTEGRAL_TYPE_P (inter_type);
2823 int inter_ptr = POINTER_TYPE_P (inter_type);
2824 int inter_float = FLOAT_TYPE_P (inter_type);
2825 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
2826 unsigned int inter_prec = TYPE_PRECISION (inter_type);
2827 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
2828 int final_int = INTEGRAL_TYPE_P (type);
2829 int final_ptr = POINTER_TYPE_P (type);
2830 int final_float = FLOAT_TYPE_P (type);
2831 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
2832 unsigned int final_prec = TYPE_PRECISION (type);
2833 int final_unsignedp = TYPE_UNSIGNED (type);
2834
3aeff048 2835 /* Don't propagate ssa names that occur in abnormal phis. */
2836 if (TREE_CODE (defop0) == SSA_NAME
2837 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (defop0))
2838 return 0;
2839
6afd0544 2840 /* In addition to the cases of two conversions in a row
2841 handled below, if we are converting something to its own
2842 type via an object of identical or wider precision, neither
2843 conversion is needed. */
2844 if (useless_type_conversion_p (type, inside_type)
2845 && (((inter_int || inter_ptr) && final_int)
2846 || (inter_float && final_float))
2847 && inter_prec >= final_prec)
2848 {
2849 gimple_assign_set_rhs1 (stmt, unshare_expr (defop0));
2850 gimple_assign_set_rhs_code (stmt, TREE_CODE (defop0));
2851 update_stmt (stmt);
89c8f35a 2852 return remove_prop_source_from_use (op0) ? 2 : 1;
6afd0544 2853 }
2854
2855 /* Likewise, if the intermediate and initial types are either both
2856 float or both integer, we don't need the middle conversion if the
2857 former is wider than the latter and doesn't change the signedness
2858 (for integers). Avoid this if the final type is a pointer since
2859 then we sometimes need the middle conversion. Likewise if the
2860 final type has a precision not equal to the size of its mode. */
2861 if (((inter_int && inside_int)
2862 || (inter_float && inside_float)
2863 || (inter_vec && inside_vec))
2864 && inter_prec >= inside_prec
2865 && (inter_float || inter_vec
2866 || inter_unsignedp == inside_unsignedp)
51dbf409 2867 && ! (final_prec != GET_MODE_PRECISION (TYPE_MODE (type))
6afd0544 2868 && TYPE_MODE (type) == TYPE_MODE (inter_type))
2869 && ! final_ptr
2870 && (! final_vec || inter_prec == inside_prec))
2871 {
2872 gimple_assign_set_rhs1 (stmt, defop0);
2873 update_stmt (stmt);
89c8f35a 2874 return remove_prop_source_from_use (op0) ? 2 : 1;
6afd0544 2875 }
2876
2877 /* If we have a sign-extension of a zero-extended value, we can
a6476f88 2878 replace that by a single zero-extension. Likewise if the
2879 final conversion does not change precision we can drop the
2880 intermediate conversion. */
6afd0544 2881 if (inside_int && inter_int && final_int
a6476f88 2882 && ((inside_prec < inter_prec && inter_prec < final_prec
2883 && inside_unsignedp && !inter_unsignedp)
2884 || final_prec == inter_prec))
6afd0544 2885 {
2886 gimple_assign_set_rhs1 (stmt, defop0);
2887 update_stmt (stmt);
89c8f35a 2888 return remove_prop_source_from_use (op0) ? 2 : 1;
6afd0544 2889 }
2890
2891 /* Two conversions in a row are not needed unless:
2892 - some conversion is floating-point (overstrict for now), or
2893 - some conversion is a vector (overstrict for now), or
2894 - the intermediate type is narrower than both initial and
2895 final, or
2896 - the intermediate type and innermost type differ in signedness,
2897 and the outermost type is wider than the intermediate, or
2898 - the initial type is a pointer type and the precisions of the
2899 intermediate and final types differ, or
2900 - the final type is a pointer type and the precisions of the
2901 initial and intermediate types differ. */
2902 if (! inside_float && ! inter_float && ! final_float
2903 && ! inside_vec && ! inter_vec && ! final_vec
2904 && (inter_prec >= inside_prec || inter_prec >= final_prec)
2905 && ! (inside_int && inter_int
2906 && inter_unsignedp != inside_unsignedp
2907 && inter_prec < final_prec)
2908 && ((inter_unsignedp && inter_prec > inside_prec)
2909 == (final_unsignedp && final_prec > inter_prec))
2910 && ! (inside_ptr && inter_prec != final_prec)
2911 && ! (final_ptr && inside_prec != inter_prec)
51dbf409 2912 && ! (final_prec != GET_MODE_PRECISION (TYPE_MODE (type))
6afd0544 2913 && TYPE_MODE (type) == TYPE_MODE (inter_type)))
2914 {
2915 gimple_assign_set_rhs1 (stmt, defop0);
2916 update_stmt (stmt);
89c8f35a 2917 return remove_prop_source_from_use (op0) ? 2 : 1;
6afd0544 2918 }
2919
2920 /* A truncation to an unsigned type should be canonicalized as
2921 bitwise and of a mask. */
2922 if (final_int && inter_int && inside_int
2923 && final_prec == inside_prec
2924 && final_prec > inter_prec
2925 && inter_unsignedp)
2926 {
2927 tree tem;
2928 tem = fold_build2 (BIT_AND_EXPR, inside_type,
2929 defop0,
2930 double_int_to_tree
cf8f0e63 2931 (inside_type, double_int::mask (inter_prec)));
6afd0544 2932 if (!useless_type_conversion_p (type, inside_type))
2933 {
2934 tem = force_gimple_operand_gsi (gsi, tem, true, NULL_TREE, true,
2935 GSI_SAME_STMT);
2936 gimple_assign_set_rhs1 (stmt, tem);
2937 }
2938 else
2939 gimple_assign_set_rhs_from_tree (gsi, tem);
2940 update_stmt (gsi_stmt (*gsi));
89c8f35a 2941 return 1;
6afd0544 2942 }
487282d5 2943
2944 /* If we are converting an integer to a floating-point that can
2945 represent it exactly and back to an integer, we can skip the
2946 floating-point conversion. */
2947 if (inside_int && inter_float && final_int &&
2948 (unsigned) significand_size (TYPE_MODE (inter_type))
2949 >= inside_prec - !inside_unsignedp)
2950 {
2951 if (useless_type_conversion_p (type, inside_type))
2952 {
2953 gimple_assign_set_rhs1 (stmt, unshare_expr (defop0));
2954 gimple_assign_set_rhs_code (stmt, TREE_CODE (defop0));
2955 update_stmt (stmt);
2956 return remove_prop_source_from_use (op0) ? 2 : 1;
2957 }
2958 else
2959 {
2960 gimple_assign_set_rhs1 (stmt, defop0);
2961 gimple_assign_set_rhs_code (stmt, CONVERT_EXPR);
2962 update_stmt (stmt);
2963 return remove_prop_source_from_use (op0) ? 2 : 1;
2964 }
2965 }
6afd0544 2966 }
2967
89c8f35a 2968 return 0;
6afd0544 2969}
2970
173c91d9 2971/* Combine an element access with a shuffle. Returns true if there were
2972 any changes made, else it returns false. */
2973
2974static bool
2975simplify_bitfield_ref (gimple_stmt_iterator *gsi)
2976{
2977 gimple stmt = gsi_stmt (*gsi);
2978 gimple def_stmt;
2979 tree op, op0, op1, op2;
2980 tree elem_type;
2981 unsigned idx, n, size;
2982 enum tree_code code;
2983
2984 op = gimple_assign_rhs1 (stmt);
2985 gcc_checking_assert (TREE_CODE (op) == BIT_FIELD_REF);
2986
2987 op0 = TREE_OPERAND (op, 0);
2988 if (TREE_CODE (op0) != SSA_NAME
2989 || TREE_CODE (TREE_TYPE (op0)) != VECTOR_TYPE)
2990 return false;
2991
58bf5219 2992 def_stmt = get_prop_source_stmt (op0, false, NULL);
2993 if (!def_stmt || !can_propagate_from (def_stmt))
2994 return false;
2995
2996 op1 = TREE_OPERAND (op, 1);
2997 op2 = TREE_OPERAND (op, 2);
2998 code = gimple_assign_rhs_code (def_stmt);
2999
3000 if (code == CONSTRUCTOR)
3001 {
3002 tree tem = fold_ternary (BIT_FIELD_REF, TREE_TYPE (op),
3003 gimple_assign_rhs1 (def_stmt), op1, op2);
3004 if (!tem || !valid_gimple_rhs_p (tem))
3005 return false;
3006 gimple_assign_set_rhs_from_tree (gsi, tem);
3007 update_stmt (gsi_stmt (*gsi));
3008 return true;
3009 }
3010
173c91d9 3011 elem_type = TREE_TYPE (TREE_TYPE (op0));
3012 if (TREE_TYPE (op) != elem_type)
3013 return false;
3014
3015 size = TREE_INT_CST_LOW (TYPE_SIZE (elem_type));
173c91d9 3016 n = TREE_INT_CST_LOW (op1) / size;
3017 if (n != 1)
3018 return false;
173c91d9 3019 idx = TREE_INT_CST_LOW (op2) / size;
3020
173c91d9 3021 if (code == VEC_PERM_EXPR)
3022 {
3023 tree p, m, index, tem;
3024 unsigned nelts;
3025 m = gimple_assign_rhs3 (def_stmt);
3026 if (TREE_CODE (m) != VECTOR_CST)
3027 return false;
3028 nelts = VECTOR_CST_NELTS (m);
3029 idx = TREE_INT_CST_LOW (VECTOR_CST_ELT (m, idx));
3030 idx %= 2 * nelts;
3031 if (idx < nelts)
3032 {
3033 p = gimple_assign_rhs1 (def_stmt);
3034 }
3035 else
3036 {
3037 p = gimple_assign_rhs2 (def_stmt);
3038 idx -= nelts;
3039 }
3040 index = build_int_cst (TREE_TYPE (TREE_TYPE (m)), idx * size);
3041 tem = build3 (BIT_FIELD_REF, TREE_TYPE (op),
ab54bbbd 3042 unshare_expr (p), op1, index);
173c91d9 3043 gimple_assign_set_rhs1 (stmt, tem);
3044 fold_stmt (gsi);
3045 update_stmt (gsi_stmt (*gsi));
3046 return true;
3047 }
3048
3049 return false;
3050}
3051
496ec2ad 3052/* Determine whether applying the 2 permutations (mask1 then mask2)
3053 gives back one of the input. */
3054
3055static int
3056is_combined_permutation_identity (tree mask1, tree mask2)
3057{
3058 tree mask;
3059 unsigned int nelts, i, j;
3060 bool maybe_identity1 = true;
3061 bool maybe_identity2 = true;
3062
3063 gcc_checking_assert (TREE_CODE (mask1) == VECTOR_CST
3064 && TREE_CODE (mask2) == VECTOR_CST);
3065 mask = fold_ternary (VEC_PERM_EXPR, TREE_TYPE (mask1), mask1, mask1, mask2);
3066 gcc_assert (TREE_CODE (mask) == VECTOR_CST);
3067
3068 nelts = VECTOR_CST_NELTS (mask);
3069 for (i = 0; i < nelts; i++)
3070 {
3071 tree val = VECTOR_CST_ELT (mask, i);
3072 gcc_assert (TREE_CODE (val) == INTEGER_CST);
3073 j = TREE_INT_CST_LOW (val) & (2 * nelts - 1);
3074 if (j == i)
3075 maybe_identity2 = false;
3076 else if (j == i + nelts)
3077 maybe_identity1 = false;
3078 else
3079 return 0;
3080 }
3081 return maybe_identity1 ? 1 : maybe_identity2 ? 2 : 0;
3082}
3083
2b9112d6 3084/* Combine a shuffle with its arguments. Returns 1 if there were any
3085 changes made, 2 if cfg-cleanup needs to run. Else it returns 0. */
496ec2ad 3086
3087static int
3088simplify_permutation (gimple_stmt_iterator *gsi)
3089{
3090 gimple stmt = gsi_stmt (*gsi);
3091 gimple def_stmt;
2b9112d6 3092 tree op0, op1, op2, op3, arg0, arg1;
3093 enum tree_code code;
ab54bbbd 3094 bool single_use_op0 = false;
496ec2ad 3095
2b9112d6 3096 gcc_checking_assert (gimple_assign_rhs_code (stmt) == VEC_PERM_EXPR);
496ec2ad 3097
3098 op0 = gimple_assign_rhs1 (stmt);
3099 op1 = gimple_assign_rhs2 (stmt);
3100 op2 = gimple_assign_rhs3 (stmt);
3101
496ec2ad 3102 if (TREE_CODE (op2) != VECTOR_CST)
3103 return 0;
3104
2b9112d6 3105 if (TREE_CODE (op0) == VECTOR_CST)
3106 {
3107 code = VECTOR_CST;
3108 arg0 = op0;
3109 }
3110 else if (TREE_CODE (op0) == SSA_NAME)
3111 {
ab54bbbd 3112 def_stmt = get_prop_source_stmt (op0, false, &single_use_op0);
3113 if (!def_stmt || !can_propagate_from (def_stmt))
2b9112d6 3114 return 0;
496ec2ad 3115
2b9112d6 3116 code = gimple_assign_rhs_code (def_stmt);
3117 arg0 = gimple_assign_rhs1 (def_stmt);
3118 }
3119 else
496ec2ad 3120 return 0;
3121
496ec2ad 3122 /* Two consecutive shuffles. */
2b9112d6 3123 if (code == VEC_PERM_EXPR)
496ec2ad 3124 {
3125 tree orig;
3126 int ident;
2b9112d6 3127
3128 if (op0 != op1)
3129 return 0;
496ec2ad 3130 op3 = gimple_assign_rhs3 (def_stmt);
3131 if (TREE_CODE (op3) != VECTOR_CST)
3132 return 0;
3133 ident = is_combined_permutation_identity (op3, op2);
3134 if (!ident)
3135 return 0;
3136 orig = (ident == 1) ? gimple_assign_rhs1 (def_stmt)
3137 : gimple_assign_rhs2 (def_stmt);
3138 gimple_assign_set_rhs1 (stmt, unshare_expr (orig));
3139 gimple_assign_set_rhs_code (stmt, TREE_CODE (orig));
3140 gimple_set_num_ops (stmt, 2);
3141 update_stmt (stmt);
3142 return remove_prop_source_from_use (op0) ? 2 : 1;
3143 }
3144
2b9112d6 3145 /* Shuffle of a constructor. */
3146 else if (code == CONSTRUCTOR || code == VECTOR_CST)
3147 {
3148 tree opt;
3149 bool ret = false;
3150 if (op0 != op1)
3151 {
ab54bbbd 3152 if (TREE_CODE (op0) == SSA_NAME && !single_use_op0)
2b9112d6 3153 return 0;
3154
3155 if (TREE_CODE (op1) == VECTOR_CST)
3156 arg1 = op1;
3157 else if (TREE_CODE (op1) == SSA_NAME)
3158 {
3159 enum tree_code code2;
3160
ab54bbbd 3161 gimple def_stmt2 = get_prop_source_stmt (op1, true, NULL);
3162 if (!def_stmt2 || !can_propagate_from (def_stmt2))
2b9112d6 3163 return 0;
3164
3165 code2 = gimple_assign_rhs_code (def_stmt2);
3166 if (code2 != CONSTRUCTOR && code2 != VECTOR_CST)
3167 return 0;
3168 arg1 = gimple_assign_rhs1 (def_stmt2);
3169 }
3170 else
3171 return 0;
3172 }
3173 else
3174 {
3175 /* Already used twice in this statement. */
3176 if (TREE_CODE (op0) == SSA_NAME && num_imm_uses (op0) > 2)
3177 return 0;
3178 arg1 = arg0;
3179 }
9af5ce0c 3180 opt = fold_ternary (VEC_PERM_EXPR, TREE_TYPE (op0), arg0, arg1, op2);
2b9112d6 3181 if (!opt
9af5ce0c 3182 || (TREE_CODE (opt) != CONSTRUCTOR && TREE_CODE (opt) != VECTOR_CST))
2b9112d6 3183 return 0;
3184 gimple_assign_set_rhs_from_tree (gsi, opt);
3185 update_stmt (gsi_stmt (*gsi));
3186 if (TREE_CODE (op0) == SSA_NAME)
3187 ret = remove_prop_source_from_use (op0);
3188 if (op0 != op1 && TREE_CODE (op1) == SSA_NAME)
3189 ret |= remove_prop_source_from_use (op1);
3190 return ret ? 2 : 1;
3191 }
3192
3193 return 0;
496ec2ad 3194}
3195
6a9e13a2 3196/* Recognize a VEC_PERM_EXPR. Returns true if there were any changes. */
3197
3198static bool
3199simplify_vector_constructor (gimple_stmt_iterator *gsi)
3200{
3201 gimple stmt = gsi_stmt (*gsi);
3202 gimple def_stmt;
3203 tree op, op2, orig, type, elem_type;
3204 unsigned elem_size, nelts, i;
3205 enum tree_code code;
3206 constructor_elt *elt;
3207 unsigned char *sel;
3208 bool maybe_ident;
3209
3210 gcc_checking_assert (gimple_assign_rhs_code (stmt) == CONSTRUCTOR);
3211
3212 op = gimple_assign_rhs1 (stmt);
3213 type = TREE_TYPE (op);
3214 gcc_checking_assert (TREE_CODE (type) == VECTOR_TYPE);
3215
3216 nelts = TYPE_VECTOR_SUBPARTS (type);
3217 elem_type = TREE_TYPE (type);
3218 elem_size = TREE_INT_CST_LOW (TYPE_SIZE (elem_type));
3219
3220 sel = XALLOCAVEC (unsigned char, nelts);
3221 orig = NULL;
3222 maybe_ident = true;
f1f41a6c 3223 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (op), i, elt)
6a9e13a2 3224 {
3225 tree ref, op1;
3226
3227 if (i >= nelts)
3228 return false;
3229
3230 if (TREE_CODE (elt->value) != SSA_NAME)
3231 return false;
ab54bbbd 3232 def_stmt = get_prop_source_stmt (elt->value, false, NULL);
3233 if (!def_stmt)
6a9e13a2 3234 return false;
3235 code = gimple_assign_rhs_code (def_stmt);
3236 if (code != BIT_FIELD_REF)
3237 return false;
3238 op1 = gimple_assign_rhs1 (def_stmt);
3239 ref = TREE_OPERAND (op1, 0);
3240 if (orig)
3241 {
3242 if (ref != orig)
3243 return false;
3244 }
3245 else
3246 {
3247 if (TREE_CODE (ref) != SSA_NAME)
3248 return false;
8a13ba5e 3249 if (!useless_type_conversion_p (type, TREE_TYPE (ref)))
3250 return false;
6a9e13a2 3251 orig = ref;
3252 }
3253 if (TREE_INT_CST_LOW (TREE_OPERAND (op1, 1)) != elem_size)
3254 return false;
3255 sel[i] = TREE_INT_CST_LOW (TREE_OPERAND (op1, 2)) / elem_size;
3256 if (sel[i] != i) maybe_ident = false;
3257 }
3258 if (i < nelts)
3259 return false;
3260
3261 if (maybe_ident)
d1938a4b 3262 gimple_assign_set_rhs_from_tree (gsi, orig);
6a9e13a2 3263 else
3264 {
d1938a4b 3265 tree mask_type, *mask_elts;
3266
3267 if (!can_vec_perm_p (TYPE_MODE (type), false, sel))
3268 return false;
3269 mask_type
3270 = build_vector_type (build_nonstandard_integer_type (elem_size, 1),
3271 nelts);
3272 if (GET_MODE_CLASS (TYPE_MODE (mask_type)) != MODE_VECTOR_INT
3273 || GET_MODE_SIZE (TYPE_MODE (mask_type))
3274 != GET_MODE_SIZE (TYPE_MODE (type)))
6a9e13a2 3275 return false;
d1938a4b 3276 mask_elts = XALLOCAVEC (tree, nelts);
3277 for (i = 0; i < nelts; i++)
3278 mask_elts[i] = build_int_cst (TREE_TYPE (mask_type), sel[i]);
3279 op2 = build_vector (mask_type, mask_elts);
6a9e13a2 3280 gimple_assign_set_rhs_with_ops_1 (gsi, VEC_PERM_EXPR, orig, orig, op2);
3281 }
3282 update_stmt (gsi_stmt (*gsi));
3283 return true;
3284}
3285
678b2f5b 3286/* Main entry point for the forward propagation and statement combine
3287 optimizer. */
4ee9c684 3288
2a1990e9 3289static unsigned int
678b2f5b 3290ssa_forward_propagate_and_combine (void)
4ee9c684 3291{
f5c8cff5 3292 basic_block bb;
c96420f8 3293 unsigned int todoflags = 0;
4ee9c684 3294
148aa112 3295 cfg_changed = false;
3296
f5c8cff5 3297 FOR_EACH_BB (bb)
3298 {
2f5a3c4a 3299 gimple_stmt_iterator gsi;
291d763b 3300
678b2f5b 3301 /* Apply forward propagation to all stmts in the basic-block.
3302 Note we update GSI within the loop as necessary. */
75a70cf9 3303 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); )
291d763b 3304 {
75a70cf9 3305 gimple stmt = gsi_stmt (gsi);
678b2f5b 3306 tree lhs, rhs;
3307 enum tree_code code;
291d763b 3308
678b2f5b 3309 if (!is_gimple_assign (stmt))
291d763b 3310 {
678b2f5b 3311 gsi_next (&gsi);
3312 continue;
3313 }
3a938499 3314
678b2f5b 3315 lhs = gimple_assign_lhs (stmt);
3316 rhs = gimple_assign_rhs1 (stmt);
3317 code = gimple_assign_rhs_code (stmt);
3318 if (TREE_CODE (lhs) != SSA_NAME
3319 || has_zero_uses (lhs))
3320 {
3321 gsi_next (&gsi);
3322 continue;
3323 }
3a938499 3324
678b2f5b 3325 /* If this statement sets an SSA_NAME to an address,
3326 try to propagate the address into the uses of the SSA_NAME. */
3327 if (code == ADDR_EXPR
3328 /* Handle pointer conversions on invariant addresses
3329 as well, as this is valid gimple. */
3330 || (CONVERT_EXPR_CODE_P (code)
3331 && TREE_CODE (rhs) == ADDR_EXPR
3332 && POINTER_TYPE_P (TREE_TYPE (lhs))))
3333 {
3334 tree base = get_base_address (TREE_OPERAND (rhs, 0));
3335 if ((!base
3336 || !DECL_P (base)
3337 || decl_address_invariant_p (base))
3338 && !stmt_references_abnormal_ssa_name (stmt)
3339 && forward_propagate_addr_expr (lhs, rhs))
1c4607fd 3340 {
678b2f5b 3341 release_defs (stmt);
678b2f5b 3342 gsi_remove (&gsi, true);
1c4607fd 3343 }
678b2f5b 3344 else
3345 gsi_next (&gsi);
3346 }
cd22a796 3347 else if (code == POINTER_PLUS_EXPR)
678b2f5b 3348 {
cd22a796 3349 tree off = gimple_assign_rhs2 (stmt);
3350 if (TREE_CODE (off) == INTEGER_CST
3351 && can_propagate_from (stmt)
3352 && !simple_iv_increment_p (stmt)
678b2f5b 3353 /* ??? Better adjust the interface to that function
3354 instead of building new trees here. */
3355 && forward_propagate_addr_expr
cd22a796 3356 (lhs,
3357 build1_loc (gimple_location (stmt),
3358 ADDR_EXPR, TREE_TYPE (rhs),
3359 fold_build2 (MEM_REF,
3360 TREE_TYPE (TREE_TYPE (rhs)),
3361 rhs,
3362 fold_convert (ptr_type_node,
3363 off)))))
ca3c9092 3364 {
678b2f5b 3365 release_defs (stmt);
678b2f5b 3366 gsi_remove (&gsi, true);
ca3c9092 3367 }
678b2f5b 3368 else if (is_gimple_min_invariant (rhs))
6afd0544 3369 {
678b2f5b 3370 /* Make sure to fold &a[0] + off_1 here. */
50aacf4c 3371 fold_stmt_inplace (&gsi);
678b2f5b 3372 update_stmt (stmt);
3373 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
6afd0544 3374 gsi_next (&gsi);
3375 }
291d763b 3376 else
75a70cf9 3377 gsi_next (&gsi);
291d763b 3378 }
678b2f5b 3379 else if (TREE_CODE_CLASS (code) == tcc_comparison)
b5860aba 3380 {
e3a19533 3381 if (forward_propagate_comparison (&gsi))
75200312 3382 cfg_changed = true;
b5860aba 3383 }
291d763b 3384 else
75a70cf9 3385 gsi_next (&gsi);
291d763b 3386 }
678b2f5b 3387
3388 /* Combine stmts with the stmts defining their operands.
3389 Note we update GSI within the loop as necessary. */
a7107e58 3390 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
678b2f5b 3391 {
3392 gimple stmt = gsi_stmt (gsi);
3393 bool changed = false;
3394
2f5a3c4a 3395 /* Mark stmt as potentially needing revisiting. */
3396 gimple_set_plf (stmt, GF_PLF_1, false);
3397
678b2f5b 3398 switch (gimple_code (stmt))
3399 {
3400 case GIMPLE_ASSIGN:
3401 {
3402 tree rhs1 = gimple_assign_rhs1 (stmt);
3403 enum tree_code code = gimple_assign_rhs_code (stmt);
3404
3405 if ((code == BIT_NOT_EXPR
3406 || code == NEGATE_EXPR)
3407 && TREE_CODE (rhs1) == SSA_NAME)
3408 changed = simplify_not_neg_expr (&gsi);
360b78f3 3409 else if (code == COND_EXPR
3410 || code == VEC_COND_EXPR)
678b2f5b 3411 {
3412 /* In this case the entire COND_EXPR is in rhs1. */
11b881f5 3413 if (forward_propagate_into_cond (&gsi)
3414 || combine_cond_exprs (&gsi))
3415 {
3416 changed = true;
3417 stmt = gsi_stmt (gsi);
3418 }
678b2f5b 3419 }
3420 else if (TREE_CODE_CLASS (code) == tcc_comparison)
3421 {
6f9714b3 3422 int did_something;
6f9714b3 3423 did_something = forward_propagate_into_comparison (&gsi);
3424 if (did_something == 2)
3425 cfg_changed = true;
6f9714b3 3426 changed = did_something != 0;
678b2f5b 3427 }
3b8827a2 3428 else if ((code == PLUS_EXPR
3429 || code == BIT_IOR_EXPR
3430 || code == BIT_XOR_EXPR)
3431 && simplify_rotate (&gsi))
3432 changed = true;
678b2f5b 3433 else if (code == BIT_AND_EXPR
3434 || code == BIT_IOR_EXPR
3435 || code == BIT_XOR_EXPR)
3436 changed = simplify_bitwise_binary (&gsi);
3437 else if (code == PLUS_EXPR
3438 || code == MINUS_EXPR)
50aacf4c 3439 changed = associate_plusminus (&gsi);
c9c17332 3440 else if (code == POINTER_PLUS_EXPR)
3441 changed = associate_pointerplus (&gsi);
678b2f5b 3442 else if (CONVERT_EXPR_CODE_P (code)
3443 || code == FLOAT_EXPR
3444 || code == FIX_TRUNC_EXPR)
89c8f35a 3445 {
3446 int did_something = combine_conversions (&gsi);
3447 if (did_something == 2)
3448 cfg_changed = true;
d23e1965 3449
3450 /* If we have a narrowing conversion to an integral
3451 type that is fed by a BIT_AND_EXPR, we might be
3452 able to remove the BIT_AND_EXPR if it merely
3453 masks off bits outside the final type (and nothing
3454 else. */
3455 if (! did_something)
3456 {
3457 tree outer_type = TREE_TYPE (gimple_assign_lhs (stmt));
3458 tree inner_type = TREE_TYPE (gimple_assign_rhs1 (stmt));
3459 if (INTEGRAL_TYPE_P (outer_type)
3460 && INTEGRAL_TYPE_P (inner_type)
3461 && (TYPE_PRECISION (outer_type)
3462 <= TYPE_PRECISION (inner_type)))
3463 did_something = simplify_conversion_from_bitmask (&gsi);
3464 }
3465
89c8f35a 3466 changed = did_something != 0;
3467 }
496ec2ad 3468 else if (code == VEC_PERM_EXPR)
3469 {
3470 int did_something = simplify_permutation (&gsi);
3471 if (did_something == 2)
3472 cfg_changed = true;
3473 changed = did_something != 0;
3474 }
173c91d9 3475 else if (code == BIT_FIELD_REF)
3476 changed = simplify_bitfield_ref (&gsi);
6a9e13a2 3477 else if (code == CONSTRUCTOR
3478 && TREE_CODE (TREE_TYPE (rhs1)) == VECTOR_TYPE)
3479 changed = simplify_vector_constructor (&gsi);
678b2f5b 3480 break;
3481 }
3482
3483 case GIMPLE_SWITCH:
3484 changed = simplify_gimple_switch (stmt);
3485 break;
3486
3487 case GIMPLE_COND:
3488 {
3489 int did_something;
678b2f5b 3490 did_something = forward_propagate_into_gimple_cond (stmt);
3491 if (did_something == 2)
3492 cfg_changed = true;
678b2f5b 3493 changed = did_something != 0;
3494 break;
3495 }
3496
3497 case GIMPLE_CALL:
3498 {
3499 tree callee = gimple_call_fndecl (stmt);
3500 if (callee != NULL_TREE
3501 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
3502 changed = simplify_builtin_call (&gsi, callee);
3503 break;
3504 }
3505
3506 default:;
3507 }
3508
a7107e58 3509 if (changed)
3510 {
3511 /* If the stmt changed then re-visit it and the statements
3512 inserted before it. */
2f5a3c4a 3513 for (; !gsi_end_p (gsi); gsi_prev (&gsi))
3514 if (gimple_plf (gsi_stmt (gsi), GF_PLF_1))
3515 break;
3516 if (gsi_end_p (gsi))
a7107e58 3517 gsi = gsi_start_bb (bb);
3518 else
2f5a3c4a 3519 gsi_next (&gsi);
a7107e58 3520 }
3521 else
3522 {
2f5a3c4a 3523 /* Stmt no longer needs to be revisited. */
3524 gimple_set_plf (stmt, GF_PLF_1, true);
a7107e58 3525 gsi_next (&gsi);
3526 }
678b2f5b 3527 }
f5c8cff5 3528 }
148aa112 3529
3530 if (cfg_changed)
6fa78c7b 3531 todoflags |= TODO_cleanup_cfg;
678b2f5b 3532
c96420f8 3533 return todoflags;
4ee9c684 3534}
3535
3536
3537static bool
3538gate_forwprop (void)
3539{
408c3c77 3540 return flag_tree_forwprop;
4ee9c684 3541}
3542
cbe8bda8 3543namespace {
3544
3545const pass_data pass_data_forwprop =
20099e35 3546{
cbe8bda8 3547 GIMPLE_PASS, /* type */
3548 "forwprop", /* name */
3549 OPTGROUP_NONE, /* optinfo_flags */
3550 true, /* has_gate */
3551 true, /* has_execute */
3552 TV_TREE_FORWPROP, /* tv_id */
3553 ( PROP_cfg | PROP_ssa ), /* properties_required */
3554 0, /* properties_provided */
3555 0, /* properties_destroyed */
3556 0, /* todo_flags_start */
3557 ( TODO_update_ssa | TODO_verify_ssa ), /* todo_flags_finish */
4ee9c684 3558};
cbe8bda8 3559
3560class pass_forwprop : public gimple_opt_pass
3561{
3562public:
9af5ce0c 3563 pass_forwprop (gcc::context *ctxt)
3564 : gimple_opt_pass (pass_data_forwprop, ctxt)
cbe8bda8 3565 {}
3566
3567 /* opt_pass methods: */
ae84f584 3568 opt_pass * clone () { return new pass_forwprop (m_ctxt); }
cbe8bda8 3569 bool gate () { return gate_forwprop (); }
3570 unsigned int execute () { return ssa_forward_propagate_and_combine (); }
3571
3572}; // class pass_forwprop
3573
3574} // anon namespace
3575
3576gimple_opt_pass *
3577make_pass_forwprop (gcc::context *ctxt)
3578{
3579 return new pass_forwprop (ctxt);
3580}