]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/tree-ssa-forwprop.c
Daily bump.
[thirdparty/gcc.git] / gcc / tree-ssa-forwprop.c
CommitLineData
291d763b 1/* Forward propagation of expressions for single use variables.
711789cc 2 Copyright (C) 2004-2013 Free Software Foundation, Inc.
4ee9c684 3
4This file is part of GCC.
5
6GCC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
8c4c00c1 8the Free Software Foundation; either version 3, or (at your option)
4ee9c684 9any later version.
10
11GCC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
8c4c00c1 17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
4ee9c684 19
20#include "config.h"
21#include "system.h"
22#include "coretypes.h"
23#include "tm.h"
4ee9c684 24#include "tree.h"
4ee9c684 25#include "tm_p.h"
26#include "basic-block.h"
e5b1e080 27#include "gimple-pretty-print.h"
69ee5dbb 28#include "tree-ssa.h"
4ee9c684 29#include "tree-pass.h"
291d763b 30#include "langhooks.h"
5adc1066 31#include "flags.h"
75a70cf9 32#include "gimple.h"
27f931ff 33#include "expr.h"
6b42039a 34#include "cfgloop.h"
d1938a4b 35#include "optabs.h"
58bf5219 36#include "tree-ssa-propagate.h"
4ee9c684 37
291d763b 38/* This pass propagates the RHS of assignment statements into use
39 sites of the LHS of the assignment. It's basically a specialized
8f628ee8 40 form of tree combination. It is hoped all of this can disappear
41 when we have a generalized tree combiner.
4ee9c684 42
291d763b 43 One class of common cases we handle is forward propagating a single use
48e1416a 44 variable into a COND_EXPR.
4ee9c684 45
46 bb0:
47 x = a COND b;
48 if (x) goto ... else goto ...
49
50 Will be transformed into:
51
52 bb0:
53 if (a COND b) goto ... else goto ...
48e1416a 54
4ee9c684 55 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
56
57 Or (assuming c1 and c2 are constants):
58
59 bb0:
48e1416a 60 x = a + c1;
4ee9c684 61 if (x EQ/NEQ c2) goto ... else goto ...
62
63 Will be transformed into:
64
65 bb0:
66 if (a EQ/NEQ (c2 - c1)) goto ... else goto ...
67
68 Similarly for x = a - c1.
48e1416a 69
4ee9c684 70 Or
71
72 bb0:
73 x = !a
74 if (x) goto ... else goto ...
75
76 Will be transformed into:
77
78 bb0:
79 if (a == 0) goto ... else goto ...
80
81 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
82 For these cases, we propagate A into all, possibly more than one,
83 COND_EXPRs that use X.
84
f5c8cff5 85 Or
86
87 bb0:
88 x = (typecast) a
89 if (x) goto ... else goto ...
90
91 Will be transformed into:
92
93 bb0:
94 if (a != 0) goto ... else goto ...
95
96 (Assuming a is an integral type and x is a boolean or x is an
97 integral and a is a boolean.)
98
99 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
100 For these cases, we propagate A into all, possibly more than one,
101 COND_EXPRs that use X.
102
4ee9c684 103 In addition to eliminating the variable and the statement which assigns
104 a value to the variable, we may be able to later thread the jump without
e6dfde59 105 adding insane complexity in the dominator optimizer.
4ee9c684 106
f5c8cff5 107 Also note these transformations can cascade. We handle this by having
108 a worklist of COND_EXPR statements to examine. As we make a change to
109 a statement, we put it back on the worklist to examine on the next
110 iteration of the main loop.
111
291d763b 112 A second class of propagation opportunities arises for ADDR_EXPR
113 nodes.
114
115 ptr = &x->y->z;
116 res = *ptr;
117
118 Will get turned into
119
120 res = x->y->z;
121
50f39ec6 122 Or
123 ptr = (type1*)&type2var;
124 res = *ptr
125
126 Will get turned into (if type1 and type2 are the same size
127 and neither have volatile on them):
128 res = VIEW_CONVERT_EXPR<type1>(type2var)
129
291d763b 130 Or
131
132 ptr = &x[0];
133 ptr2 = ptr + <constant>;
134
135 Will get turned into
136
137 ptr2 = &x[constant/elementsize];
138
139 Or
140
141 ptr = &x[0];
142 offset = index * element_size;
143 offset_p = (pointer) offset;
144 ptr2 = ptr + offset_p
145
146 Will get turned into:
147
148 ptr2 = &x[index];
149
1c4607fd 150 Or
151 ssa = (int) decl
152 res = ssa & 1
153
154 Provided that decl has known alignment >= 2, will get turned into
155
156 res = 0
157
8f628ee8 158 We also propagate casts into SWITCH_EXPR and COND_EXPR conditions to
159 allow us to remove the cast and {NOT_EXPR,NEG_EXPR} into a subsequent
160 {NOT_EXPR,NEG_EXPR}.
291d763b 161
4ee9c684 162 This will (of course) be extended as other needs arise. */
163
15ec875c 164static bool forward_propagate_addr_expr (tree name, tree rhs);
148aa112 165
b59e1c90 166/* Set to true if we delete dead edges during the optimization. */
148aa112 167static bool cfg_changed;
168
75a70cf9 169static tree rhs_to_tree (tree type, gimple stmt);
148aa112 170
83a20baf 171/* Get the next statement we can propagate NAME's value into skipping
5adc1066 172 trivial copies. Returns the statement that is suitable as a
173 propagation destination or NULL_TREE if there is no such one.
174 This only returns destinations in a single-use chain. FINAL_NAME_P
175 if non-NULL is written to the ssa name that represents the use. */
a3451973 176
75a70cf9 177static gimple
5adc1066 178get_prop_dest_stmt (tree name, tree *final_name_p)
a3451973 179{
5adc1066 180 use_operand_p use;
75a70cf9 181 gimple use_stmt;
a3451973 182
5adc1066 183 do {
184 /* If name has multiple uses, bail out. */
185 if (!single_imm_use (name, &use, &use_stmt))
75a70cf9 186 return NULL;
a3451973 187
5adc1066 188 /* If this is not a trivial copy, we found it. */
8f0b877f 189 if (!gimple_assign_ssa_name_copy_p (use_stmt)
75a70cf9 190 || gimple_assign_rhs1 (use_stmt) != name)
5adc1066 191 break;
192
193 /* Continue searching uses of the copy destination. */
75a70cf9 194 name = gimple_assign_lhs (use_stmt);
5adc1066 195 } while (1);
196
197 if (final_name_p)
198 *final_name_p = name;
199
200 return use_stmt;
a3451973 201}
202
5adc1066 203/* Get the statement we can propagate from into NAME skipping
204 trivial copies. Returns the statement which defines the
205 propagation source or NULL_TREE if there is no such one.
206 If SINGLE_USE_ONLY is set considers only sources which have
207 a single use chain up to NAME. If SINGLE_USE_P is non-null,
208 it is set to whether the chain to NAME is a single use chain
209 or not. SINGLE_USE_P is not written to if SINGLE_USE_ONLY is set. */
4ee9c684 210
75a70cf9 211static gimple
5adc1066 212get_prop_source_stmt (tree name, bool single_use_only, bool *single_use_p)
f5c8cff5 213{
5adc1066 214 bool single_use = true;
215
216 do {
75a70cf9 217 gimple def_stmt = SSA_NAME_DEF_STMT (name);
5adc1066 218
219 if (!has_single_use (name))
220 {
221 single_use = false;
222 if (single_use_only)
75a70cf9 223 return NULL;
5adc1066 224 }
225
226 /* If name is defined by a PHI node or is the default def, bail out. */
8f0b877f 227 if (!is_gimple_assign (def_stmt))
75a70cf9 228 return NULL;
5adc1066 229
ab31ca23 230 /* If def_stmt is a simple copy, continue looking. */
231 if (gimple_assign_rhs_code (def_stmt) == SSA_NAME)
232 name = gimple_assign_rhs1 (def_stmt);
233 else
5adc1066 234 {
235 if (!single_use_only && single_use_p)
236 *single_use_p = single_use;
237
ab31ca23 238 return def_stmt;
5adc1066 239 }
5adc1066 240 } while (1);
241}
e6dfde59 242
5adc1066 243/* Checks if the destination ssa name in DEF_STMT can be used as
244 propagation source. Returns true if so, otherwise false. */
e6dfde59 245
5adc1066 246static bool
75a70cf9 247can_propagate_from (gimple def_stmt)
5adc1066 248{
75a70cf9 249 gcc_assert (is_gimple_assign (def_stmt));
8f0b877f 250
484b827b 251 /* If the rhs has side-effects we cannot propagate from it. */
75a70cf9 252 if (gimple_has_volatile_ops (def_stmt))
484b827b 253 return false;
254
255 /* If the rhs is a load we cannot propagate from it. */
75a70cf9 256 if (TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)) == tcc_reference
257 || TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)) == tcc_declaration)
484b827b 258 return false;
259
b9e98b8a 260 /* Constants can be always propagated. */
8f0b877f 261 if (gimple_assign_single_p (def_stmt)
262 && is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt)))
b9e98b8a 263 return true;
264
75a70cf9 265 /* We cannot propagate ssa names that occur in abnormal phi nodes. */
32cdcc42 266 if (stmt_references_abnormal_ssa_name (def_stmt))
267 return false;
4ee9c684 268
5adc1066 269 /* If the definition is a conversion of a pointer to a function type,
75a70cf9 270 then we can not apply optimizations as some targets require
271 function pointers to be canonicalized and in this case this
272 optimization could eliminate a necessary canonicalization. */
8f0b877f 273 if (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt)))
75a70cf9 274 {
275 tree rhs = gimple_assign_rhs1 (def_stmt);
276 if (POINTER_TYPE_P (TREE_TYPE (rhs))
277 && TREE_CODE (TREE_TYPE (TREE_TYPE (rhs))) == FUNCTION_TYPE)
278 return false;
279 }
8f0b877f 280
5adc1066 281 return true;
e6dfde59 282}
283
ff0739e0 284/* Remove a chain of dead statements starting at the definition of
285 NAME. The chain is linked via the first operand of the defining statements.
5d2361b0 286 If NAME was replaced in its only use then this function can be used
ff0739e0 287 to clean up dead stmts. The function handles already released SSA
288 names gracefully.
289 Returns true if cleanup-cfg has to run. */
8f628ee8 290
5adc1066 291static bool
5d2361b0 292remove_prop_source_from_use (tree name)
5adc1066 293{
75a70cf9 294 gimple_stmt_iterator gsi;
295 gimple stmt;
5d2361b0 296 bool cfg_changed = false;
8f628ee8 297
5adc1066 298 do {
5d2361b0 299 basic_block bb;
300
ff0739e0 301 if (SSA_NAME_IN_FREE_LIST (name)
302 || SSA_NAME_IS_DEFAULT_DEF (name)
303 || !has_zero_uses (name))
5d2361b0 304 return cfg_changed;
8f628ee8 305
5adc1066 306 stmt = SSA_NAME_DEF_STMT (name);
ff0739e0 307 if (gimple_code (stmt) == GIMPLE_PHI
308 || gimple_has_side_effects (stmt))
6f9714b3 309 return cfg_changed;
ff0739e0 310
311 bb = gimple_bb (stmt);
6f9714b3 312 gsi = gsi_for_stmt (stmt);
ff0739e0 313 unlink_stmt_vdef (stmt);
13ff78a4 314 if (gsi_remove (&gsi, true))
315 cfg_changed |= gimple_purge_dead_eh_edges (bb);
ff0739e0 316 release_defs (stmt);
8f628ee8 317
ff0739e0 318 name = is_gimple_assign (stmt) ? gimple_assign_rhs1 (stmt) : NULL_TREE;
75a70cf9 319 } while (name && TREE_CODE (name) == SSA_NAME);
8f628ee8 320
5d2361b0 321 return cfg_changed;
5adc1066 322}
8f628ee8 323
75a70cf9 324/* Return the rhs of a gimple_assign STMT in a form of a single tree,
325 converted to type TYPE.
48e1416a 326
75a70cf9 327 This should disappear, but is needed so we can combine expressions and use
328 the fold() interfaces. Long term, we need to develop folding and combine
329 routines that deal with gimple exclusively . */
330
331static tree
332rhs_to_tree (tree type, gimple stmt)
333{
389dd41b 334 location_t loc = gimple_location (stmt);
75a70cf9 335 enum tree_code code = gimple_assign_rhs_code (stmt);
57c45d70 336 if (get_gimple_rhs_class (code) == GIMPLE_TERNARY_RHS)
337 return fold_build3_loc (loc, code, type, gimple_assign_rhs1 (stmt),
338 gimple_assign_rhs2 (stmt),
339 gimple_assign_rhs3 (stmt));
340 else if (get_gimple_rhs_class (code) == GIMPLE_BINARY_RHS)
389dd41b 341 return fold_build2_loc (loc, code, type, gimple_assign_rhs1 (stmt),
fb8ed03f 342 gimple_assign_rhs2 (stmt));
75a70cf9 343 else if (get_gimple_rhs_class (code) == GIMPLE_UNARY_RHS)
fb8ed03f 344 return build1 (code, type, gimple_assign_rhs1 (stmt));
75a70cf9 345 else if (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS)
346 return gimple_assign_rhs1 (stmt);
347 else
348 gcc_unreachable ();
349}
350
5adc1066 351/* Combine OP0 CODE OP1 in the context of a COND_EXPR. Returns
352 the folded result in a form suitable for COND_EXPR_COND or
353 NULL_TREE, if there is no suitable simplified form. If
354 INVARIANT_ONLY is true only gimple_min_invariant results are
355 considered simplified. */
8f628ee8 356
357static tree
c73fee76 358combine_cond_expr_cond (gimple stmt, enum tree_code code, tree type,
5adc1066 359 tree op0, tree op1, bool invariant_only)
8f628ee8 360{
5adc1066 361 tree t;
8f628ee8 362
5adc1066 363 gcc_assert (TREE_CODE_CLASS (code) == tcc_comparison);
8f628ee8 364
c73fee76 365 fold_defer_overflow_warnings ();
366 t = fold_binary_loc (gimple_location (stmt), code, type, op0, op1);
5adc1066 367 if (!t)
c73fee76 368 {
369 fold_undefer_overflow_warnings (false, NULL, 0);
370 return NULL_TREE;
371 }
8f628ee8 372
5adc1066 373 /* Require that we got a boolean type out if we put one in. */
374 gcc_assert (TREE_CODE (TREE_TYPE (t)) == TREE_CODE (type));
8f628ee8 375
a7392604 376 /* Canonicalize the combined condition for use in a COND_EXPR. */
377 t = canonicalize_cond_expr_cond (t);
8f628ee8 378
5adc1066 379 /* Bail out if we required an invariant but didn't get one. */
75a70cf9 380 if (!t || (invariant_only && !is_gimple_min_invariant (t)))
c73fee76 381 {
382 fold_undefer_overflow_warnings (false, NULL, 0);
383 return NULL_TREE;
384 }
385
386 fold_undefer_overflow_warnings (!gimple_no_warning_p (stmt), stmt, 0);
8f628ee8 387
a7392604 388 return t;
8f628ee8 389}
390
c8126d25 391/* Combine the comparison OP0 CODE OP1 at LOC with the defining statements
392 of its operand. Return a new comparison tree or NULL_TREE if there
393 were no simplifying combines. */
394
395static tree
c73fee76 396forward_propagate_into_comparison_1 (gimple stmt,
678b2f5b 397 enum tree_code code, tree type,
398 tree op0, tree op1)
c8126d25 399{
400 tree tmp = NULL_TREE;
401 tree rhs0 = NULL_TREE, rhs1 = NULL_TREE;
402 bool single_use0_p = false, single_use1_p = false;
403
404 /* For comparisons use the first operand, that is likely to
405 simplify comparisons against constants. */
406 if (TREE_CODE (op0) == SSA_NAME)
407 {
408 gimple def_stmt = get_prop_source_stmt (op0, false, &single_use0_p);
409 if (def_stmt && can_propagate_from (def_stmt))
410 {
411 rhs0 = rhs_to_tree (TREE_TYPE (op1), def_stmt);
c73fee76 412 tmp = combine_cond_expr_cond (stmt, code, type,
c8126d25 413 rhs0, op1, !single_use0_p);
414 if (tmp)
415 return tmp;
416 }
417 }
418
419 /* If that wasn't successful, try the second operand. */
420 if (TREE_CODE (op1) == SSA_NAME)
421 {
422 gimple def_stmt = get_prop_source_stmt (op1, false, &single_use1_p);
423 if (def_stmt && can_propagate_from (def_stmt))
424 {
425 rhs1 = rhs_to_tree (TREE_TYPE (op0), def_stmt);
c73fee76 426 tmp = combine_cond_expr_cond (stmt, code, type,
c8126d25 427 op0, rhs1, !single_use1_p);
428 if (tmp)
429 return tmp;
430 }
431 }
432
433 /* If that wasn't successful either, try both operands. */
434 if (rhs0 != NULL_TREE
435 && rhs1 != NULL_TREE)
c73fee76 436 tmp = combine_cond_expr_cond (stmt, code, type,
c8126d25 437 rhs0, rhs1,
438 !(single_use0_p && single_use1_p));
439
440 return tmp;
441}
442
678b2f5b 443/* Propagate from the ssa name definition statements of the assignment
444 from a comparison at *GSI into the conditional if that simplifies it.
6f9714b3 445 Returns 1 if the stmt was modified and 2 if the CFG needs cleanup,
446 otherwise returns 0. */
c8126d25 447
6f9714b3 448static int
678b2f5b 449forward_propagate_into_comparison (gimple_stmt_iterator *gsi)
c8126d25 450{
678b2f5b 451 gimple stmt = gsi_stmt (*gsi);
452 tree tmp;
6f9714b3 453 bool cfg_changed = false;
56632de0 454 tree type = TREE_TYPE (gimple_assign_lhs (stmt));
6f9714b3 455 tree rhs1 = gimple_assign_rhs1 (stmt);
456 tree rhs2 = gimple_assign_rhs2 (stmt);
c8126d25 457
458 /* Combine the comparison with defining statements. */
c73fee76 459 tmp = forward_propagate_into_comparison_1 (stmt,
678b2f5b 460 gimple_assign_rhs_code (stmt),
56632de0 461 type, rhs1, rhs2);
462 if (tmp && useless_type_conversion_p (type, TREE_TYPE (tmp)))
c8126d25 463 {
678b2f5b 464 gimple_assign_set_rhs_from_tree (gsi, tmp);
50aacf4c 465 fold_stmt (gsi);
466 update_stmt (gsi_stmt (*gsi));
75200312 467
6f9714b3 468 if (TREE_CODE (rhs1) == SSA_NAME)
469 cfg_changed |= remove_prop_source_from_use (rhs1);
470 if (TREE_CODE (rhs2) == SSA_NAME)
471 cfg_changed |= remove_prop_source_from_use (rhs2);
472 return cfg_changed ? 2 : 1;
c8126d25 473 }
474
6f9714b3 475 return 0;
c8126d25 476}
477
5adc1066 478/* Propagate from the ssa name definition statements of COND_EXPR
75a70cf9 479 in GIMPLE_COND statement STMT into the conditional if that simplifies it.
480 Returns zero if no statement was changed, one if there were
481 changes and two if cfg_cleanup needs to run.
48e1416a 482
75a70cf9 483 This must be kept in sync with forward_propagate_into_cond. */
484
485static int
486forward_propagate_into_gimple_cond (gimple stmt)
487{
678b2f5b 488 tree tmp;
489 enum tree_code code = gimple_cond_code (stmt);
6f9714b3 490 bool cfg_changed = false;
491 tree rhs1 = gimple_cond_lhs (stmt);
492 tree rhs2 = gimple_cond_rhs (stmt);
678b2f5b 493
494 /* We can do tree combining on SSA_NAME and comparison expressions. */
495 if (TREE_CODE_CLASS (gimple_cond_code (stmt)) != tcc_comparison)
496 return 0;
497
c73fee76 498 tmp = forward_propagate_into_comparison_1 (stmt, code,
678b2f5b 499 boolean_type_node,
6f9714b3 500 rhs1, rhs2);
678b2f5b 501 if (tmp)
502 {
503 if (dump_file && tmp)
504 {
678b2f5b 505 fprintf (dump_file, " Replaced '");
6f9714b3 506 print_gimple_expr (dump_file, stmt, 0, 0);
678b2f5b 507 fprintf (dump_file, "' with '");
508 print_generic_expr (dump_file, tmp, 0);
509 fprintf (dump_file, "'\n");
510 }
75a70cf9 511
678b2f5b 512 gimple_cond_set_condition_from_tree (stmt, unshare_expr (tmp));
513 update_stmt (stmt);
75a70cf9 514
6f9714b3 515 if (TREE_CODE (rhs1) == SSA_NAME)
516 cfg_changed |= remove_prop_source_from_use (rhs1);
517 if (TREE_CODE (rhs2) == SSA_NAME)
518 cfg_changed |= remove_prop_source_from_use (rhs2);
519 return (cfg_changed || is_gimple_min_invariant (tmp)) ? 2 : 1;
678b2f5b 520 }
75a70cf9 521
10a6edd6 522 /* Canonicalize _Bool == 0 and _Bool != 1 to _Bool != 0 by swapping edges. */
523 if ((TREE_CODE (TREE_TYPE (rhs1)) == BOOLEAN_TYPE
524 || (INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
525 && TYPE_PRECISION (TREE_TYPE (rhs1)) == 1))
526 && ((code == EQ_EXPR
527 && integer_zerop (rhs2))
528 || (code == NE_EXPR
529 && integer_onep (rhs2))))
530 {
531 basic_block bb = gimple_bb (stmt);
532 gimple_cond_set_code (stmt, NE_EXPR);
533 gimple_cond_set_rhs (stmt, build_zero_cst (TREE_TYPE (rhs1)));
534 EDGE_SUCC (bb, 0)->flags ^= (EDGE_TRUE_VALUE|EDGE_FALSE_VALUE);
535 EDGE_SUCC (bb, 1)->flags ^= (EDGE_TRUE_VALUE|EDGE_FALSE_VALUE);
536 return 1;
537 }
538
6f9714b3 539 return 0;
75a70cf9 540}
541
542
543/* Propagate from the ssa name definition statements of COND_EXPR
544 in the rhs of statement STMT into the conditional if that simplifies it.
8a2caf10 545 Returns true zero if the stmt was changed. */
4ee9c684 546
8a2caf10 547static bool
75a70cf9 548forward_propagate_into_cond (gimple_stmt_iterator *gsi_p)
e6dfde59 549{
75a70cf9 550 gimple stmt = gsi_stmt (*gsi_p);
678b2f5b 551 tree tmp = NULL_TREE;
552 tree cond = gimple_assign_rhs1 (stmt);
def3cb70 553 enum tree_code code = gimple_assign_rhs_code (stmt);
10a6edd6 554 bool swap = false;
d080be9e 555
678b2f5b 556 /* We can do tree combining on SSA_NAME and comparison expressions. */
557 if (COMPARISON_CLASS_P (cond))
c73fee76 558 tmp = forward_propagate_into_comparison_1 (stmt, TREE_CODE (cond),
f2c1848b 559 TREE_TYPE (cond),
c8126d25 560 TREE_OPERAND (cond, 0),
561 TREE_OPERAND (cond, 1));
678b2f5b 562 else if (TREE_CODE (cond) == SSA_NAME)
563 {
def3cb70 564 enum tree_code def_code;
8a2caf10 565 tree name = cond;
678b2f5b 566 gimple def_stmt = get_prop_source_stmt (name, true, NULL);
567 if (!def_stmt || !can_propagate_from (def_stmt))
6f9714b3 568 return 0;
5adc1066 569
def3cb70 570 def_code = gimple_assign_rhs_code (def_stmt);
571 if (TREE_CODE_CLASS (def_code) == tcc_comparison)
8a2caf10 572 tmp = fold_build2_loc (gimple_location (def_stmt),
def3cb70 573 def_code,
bc112f18 574 TREE_TYPE (cond),
8a2caf10 575 gimple_assign_rhs1 (def_stmt),
576 gimple_assign_rhs2 (def_stmt));
def3cb70 577 else if (code == COND_EXPR
578 && ((def_code == BIT_NOT_EXPR
579 && TYPE_PRECISION (TREE_TYPE (cond)) == 1)
580 || (def_code == BIT_XOR_EXPR
581 && integer_onep (gimple_assign_rhs2 (def_stmt)))))
10a6edd6 582 {
583 tmp = gimple_assign_rhs1 (def_stmt);
584 swap = true;
585 }
678b2f5b 586 }
5adc1066 587
25f48be0 588 if (tmp
589 && is_gimple_condexpr (tmp))
678b2f5b 590 {
591 if (dump_file && tmp)
592 {
593 fprintf (dump_file, " Replaced '");
594 print_generic_expr (dump_file, cond, 0);
595 fprintf (dump_file, "' with '");
596 print_generic_expr (dump_file, tmp, 0);
597 fprintf (dump_file, "'\n");
598 }
d080be9e 599
def3cb70 600 if ((code == VEC_COND_EXPR) ? integer_all_onesp (tmp)
601 : integer_onep (tmp))
8a2caf10 602 gimple_assign_set_rhs_from_tree (gsi_p, gimple_assign_rhs2 (stmt));
603 else if (integer_zerop (tmp))
604 gimple_assign_set_rhs_from_tree (gsi_p, gimple_assign_rhs3 (stmt));
605 else
10a6edd6 606 {
607 gimple_assign_set_rhs1 (stmt, unshare_expr (tmp));
608 if (swap)
609 {
610 tree t = gimple_assign_rhs2 (stmt);
611 gimple_assign_set_rhs2 (stmt, gimple_assign_rhs3 (stmt));
612 gimple_assign_set_rhs3 (stmt, t);
613 }
614 }
678b2f5b 615 stmt = gsi_stmt (*gsi_p);
616 update_stmt (stmt);
5adc1066 617
8a2caf10 618 return true;
678b2f5b 619 }
d080be9e 620
6f9714b3 621 return 0;
4ee9c684 622}
623
360b78f3 624/* Propagate from the ssa name definition statements of COND_EXPR
625 values in the rhs of statement STMT into the conditional arms
626 if that simplifies it.
627 Returns true if the stmt was changed. */
628
629static bool
630combine_cond_exprs (gimple_stmt_iterator *gsi_p)
631{
632 gimple stmt = gsi_stmt (*gsi_p);
633 tree cond, val1, val2;
634 bool changed = false;
635
636 cond = gimple_assign_rhs1 (stmt);
637 val1 = gimple_assign_rhs2 (stmt);
638 if (TREE_CODE (val1) == SSA_NAME)
639 {
640 gimple def_stmt = SSA_NAME_DEF_STMT (val1);
641 if (is_gimple_assign (def_stmt)
642 && gimple_assign_rhs_code (def_stmt) == gimple_assign_rhs_code (stmt)
643 && operand_equal_p (gimple_assign_rhs1 (def_stmt), cond, 0))
644 {
645 val1 = unshare_expr (gimple_assign_rhs2 (def_stmt));
646 gimple_assign_set_rhs2 (stmt, val1);
647 changed = true;
648 }
649 }
650 val2 = gimple_assign_rhs3 (stmt);
651 if (TREE_CODE (val2) == SSA_NAME)
652 {
653 gimple def_stmt = SSA_NAME_DEF_STMT (val2);
654 if (is_gimple_assign (def_stmt)
655 && gimple_assign_rhs_code (def_stmt) == gimple_assign_rhs_code (stmt)
656 && operand_equal_p (gimple_assign_rhs1 (def_stmt), cond, 0))
657 {
658 val2 = unshare_expr (gimple_assign_rhs3 (def_stmt));
659 gimple_assign_set_rhs3 (stmt, val2);
660 changed = true;
661 }
662 }
663 if (operand_equal_p (val1, val2, 0))
664 {
665 gimple_assign_set_rhs_from_tree (gsi_p, val1);
666 stmt = gsi_stmt (*gsi_p);
667 changed = true;
668 }
669
670 if (changed)
671 update_stmt (stmt);
672
673 return changed;
674}
675
48e1416a 676/* We've just substituted an ADDR_EXPR into stmt. Update all the
148aa112 677 relevant data structures to match. */
678
679static void
75a70cf9 680tidy_after_forward_propagate_addr (gimple stmt)
148aa112 681{
148aa112 682 /* We may have turned a trapping insn into a non-trapping insn. */
683 if (maybe_clean_or_replace_eh_stmt (stmt, stmt)
75a70cf9 684 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
148aa112 685 cfg_changed = true;
f2fae51f 686
75a70cf9 687 if (TREE_CODE (gimple_assign_rhs1 (stmt)) == ADDR_EXPR)
688 recompute_tree_invariant_for_addr_expr (gimple_assign_rhs1 (stmt));
148aa112 689}
690
15ec875c 691/* NAME is a SSA_NAME representing DEF_RHS which is of the form
692 ADDR_EXPR <whatever>.
291d763b 693
3d5cfe81 694 Try to forward propagate the ADDR_EXPR into the use USE_STMT.
291d763b 695 Often this will allow for removal of an ADDR_EXPR and INDIRECT_REF
3d5cfe81 696 node or for recovery of array indexing from pointer arithmetic.
75a70cf9 697
6b5a5c42 698 Return true if the propagation was successful (the propagation can
699 be not totally successful, yet things may have been changed). */
291d763b 700
701static bool
75a70cf9 702forward_propagate_addr_expr_1 (tree name, tree def_rhs,
703 gimple_stmt_iterator *use_stmt_gsi,
6776dec8 704 bool single_use_p)
291d763b 705{
75a70cf9 706 tree lhs, rhs, rhs2, array_ref;
75a70cf9 707 gimple use_stmt = gsi_stmt (*use_stmt_gsi);
708 enum tree_code rhs_code;
9e019299 709 bool res = true;
291d763b 710
971c637a 711 gcc_assert (TREE_CODE (def_rhs) == ADDR_EXPR);
291d763b 712
75a70cf9 713 lhs = gimple_assign_lhs (use_stmt);
714 rhs_code = gimple_assign_rhs_code (use_stmt);
715 rhs = gimple_assign_rhs1 (use_stmt);
15ec875c 716
6776dec8 717 /* Trivial cases. The use statement could be a trivial copy or a
15ec875c 718 useless conversion. Recurse to the uses of the lhs as copyprop does
971c637a 719 not copy through different variant pointers and FRE does not catch
6776dec8 720 all useless conversions. Treat the case of a single-use name and
721 a conversion to def_rhs type separate, though. */
971c637a 722 if (TREE_CODE (lhs) == SSA_NAME
75a70cf9 723 && ((rhs_code == SSA_NAME && rhs == name)
316616c9 724 || CONVERT_EXPR_CODE_P (rhs_code)))
6776dec8 725 {
316616c9 726 /* Only recurse if we don't deal with a single use or we cannot
727 do the propagation to the current statement. In particular
728 we can end up with a conversion needed for a non-invariant
729 address which we cannot do in a single statement. */
730 if (!single_use_p
731 || (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (def_rhs))
bd8d8d81 732 && (!is_gimple_min_invariant (def_rhs)
733 || (INTEGRAL_TYPE_P (TREE_TYPE (lhs))
734 && POINTER_TYPE_P (TREE_TYPE (def_rhs))
735 && (TYPE_PRECISION (TREE_TYPE (lhs))
736 > TYPE_PRECISION (TREE_TYPE (def_rhs)))))))
971c637a 737 return forward_propagate_addr_expr (lhs, def_rhs);
738
75a70cf9 739 gimple_assign_set_rhs1 (use_stmt, unshare_expr (def_rhs));
316616c9 740 if (useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (def_rhs)))
741 gimple_assign_set_rhs_code (use_stmt, TREE_CODE (def_rhs));
742 else
743 gimple_assign_set_rhs_code (use_stmt, NOP_EXPR);
6776dec8 744 return true;
745 }
971c637a 746
182cf5a9 747 /* Propagate through constant pointer adjustments. */
748 if (TREE_CODE (lhs) == SSA_NAME
749 && rhs_code == POINTER_PLUS_EXPR
750 && rhs == name
751 && TREE_CODE (gimple_assign_rhs2 (use_stmt)) == INTEGER_CST)
752 {
753 tree new_def_rhs;
754 /* As we come here with non-invariant addresses in def_rhs we need
755 to make sure we can build a valid constant offsetted address
756 for further propagation. Simply rely on fold building that
757 and check after the fact. */
758 new_def_rhs = fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (rhs)),
759 def_rhs,
760 fold_convert (ptr_type_node,
761 gimple_assign_rhs2 (use_stmt)));
762 if (TREE_CODE (new_def_rhs) == MEM_REF
f5d03f27 763 && !is_gimple_mem_ref_addr (TREE_OPERAND (new_def_rhs, 0)))
182cf5a9 764 return false;
765 new_def_rhs = build_fold_addr_expr_with_type (new_def_rhs,
766 TREE_TYPE (rhs));
767
768 /* Recurse. If we could propagate into all uses of lhs do not
769 bother to replace into the current use but just pretend we did. */
770 if (TREE_CODE (new_def_rhs) == ADDR_EXPR
771 && forward_propagate_addr_expr (lhs, new_def_rhs))
772 return true;
773
774 if (useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (new_def_rhs)))
775 gimple_assign_set_rhs_with_ops (use_stmt_gsi, TREE_CODE (new_def_rhs),
776 new_def_rhs, NULL_TREE);
777 else if (is_gimple_min_invariant (new_def_rhs))
778 gimple_assign_set_rhs_with_ops (use_stmt_gsi, NOP_EXPR,
779 new_def_rhs, NULL_TREE);
780 else
781 return false;
782 gcc_assert (gsi_stmt (*use_stmt_gsi) == use_stmt);
783 update_stmt (use_stmt);
784 return true;
785 }
786
48e1416a 787 /* Now strip away any outer COMPONENT_REF/ARRAY_REF nodes from the LHS.
971c637a 788 ADDR_EXPR will not appear on the LHS. */
d0d1ecb8 789 tree *lhsp = gimple_assign_lhs_ptr (use_stmt);
790 while (handled_component_p (*lhsp))
791 lhsp = &TREE_OPERAND (*lhsp, 0);
792 lhs = *lhsp;
971c637a 793
182cf5a9 794 /* Now see if the LHS node is a MEM_REF using NAME. If so,
971c637a 795 propagate the ADDR_EXPR into the use of NAME and fold the result. */
182cf5a9 796 if (TREE_CODE (lhs) == MEM_REF
9e019299 797 && TREE_OPERAND (lhs, 0) == name)
971c637a 798 {
182cf5a9 799 tree def_rhs_base;
800 HOST_WIDE_INT def_rhs_offset;
801 /* If the address is invariant we can always fold it. */
802 if ((def_rhs_base = get_addr_base_and_unit_offset (TREE_OPERAND (def_rhs, 0),
803 &def_rhs_offset)))
9e019299 804 {
182cf5a9 805 double_int off = mem_ref_offset (lhs);
806 tree new_ptr;
cf8f0e63 807 off += double_int::from_shwi (def_rhs_offset);
182cf5a9 808 if (TREE_CODE (def_rhs_base) == MEM_REF)
809 {
cf8f0e63 810 off += mem_ref_offset (def_rhs_base);
182cf5a9 811 new_ptr = TREE_OPERAND (def_rhs_base, 0);
812 }
813 else
814 new_ptr = build_fold_addr_expr (def_rhs_base);
815 TREE_OPERAND (lhs, 0) = new_ptr;
816 TREE_OPERAND (lhs, 1)
817 = double_int_to_tree (TREE_TYPE (TREE_OPERAND (lhs, 1)), off);
9e019299 818 tidy_after_forward_propagate_addr (use_stmt);
9e019299 819 /* Continue propagating into the RHS if this was not the only use. */
820 if (single_use_p)
821 return true;
822 }
182cf5a9 823 /* If the LHS is a plain dereference and the value type is the same as
824 that of the pointed-to type of the address we can put the
825 dereferenced address on the LHS preserving the original alias-type. */
d0d1ecb8 826 else if (integer_zerop (TREE_OPERAND (lhs, 1))
827 && ((gimple_assign_lhs (use_stmt) == lhs
828 && useless_type_conversion_p
829 (TREE_TYPE (TREE_OPERAND (def_rhs, 0)),
830 TREE_TYPE (gimple_assign_rhs1 (use_stmt))))
831 || types_compatible_p (TREE_TYPE (lhs),
832 TREE_TYPE (TREE_OPERAND (def_rhs, 0))))
f6e2e4ff 833 /* Don't forward anything into clobber stmts if it would result
834 in the lhs no longer being a MEM_REF. */
835 && (!gimple_clobber_p (use_stmt)
836 || TREE_CODE (TREE_OPERAND (def_rhs, 0)) == MEM_REF))
182cf5a9 837 {
838 tree *def_rhs_basep = &TREE_OPERAND (def_rhs, 0);
98d96c6f 839 tree new_offset, new_base, saved, new_lhs;
182cf5a9 840 while (handled_component_p (*def_rhs_basep))
841 def_rhs_basep = &TREE_OPERAND (*def_rhs_basep, 0);
842 saved = *def_rhs_basep;
843 if (TREE_CODE (*def_rhs_basep) == MEM_REF)
844 {
845 new_base = TREE_OPERAND (*def_rhs_basep, 0);
b97e39a0 846 new_offset = fold_convert (TREE_TYPE (TREE_OPERAND (lhs, 1)),
847 TREE_OPERAND (*def_rhs_basep, 1));
182cf5a9 848 }
849 else
850 {
851 new_base = build_fold_addr_expr (*def_rhs_basep);
852 new_offset = TREE_OPERAND (lhs, 1);
853 }
854 *def_rhs_basep = build2 (MEM_REF, TREE_TYPE (*def_rhs_basep),
855 new_base, new_offset);
2e5dc41c 856 TREE_THIS_VOLATILE (*def_rhs_basep) = TREE_THIS_VOLATILE (lhs);
31fa5b0d 857 TREE_SIDE_EFFECTS (*def_rhs_basep) = TREE_SIDE_EFFECTS (lhs);
2e5dc41c 858 TREE_THIS_NOTRAP (*def_rhs_basep) = TREE_THIS_NOTRAP (lhs);
98d96c6f 859 new_lhs = unshare_expr (TREE_OPERAND (def_rhs, 0));
d0d1ecb8 860 *lhsp = new_lhs;
98d96c6f 861 TREE_THIS_VOLATILE (new_lhs) = TREE_THIS_VOLATILE (lhs);
31fa5b0d 862 TREE_SIDE_EFFECTS (new_lhs) = TREE_SIDE_EFFECTS (lhs);
182cf5a9 863 *def_rhs_basep = saved;
864 tidy_after_forward_propagate_addr (use_stmt);
865 /* Continue propagating into the RHS if this was not the
866 only use. */
867 if (single_use_p)
868 return true;
869 }
9e019299 870 else
871 /* We can have a struct assignment dereferencing our name twice.
872 Note that we didn't propagate into the lhs to not falsely
873 claim we did when propagating into the rhs. */
874 res = false;
971c637a 875 }
15ec875c 876
631d5db6 877 /* Strip away any outer COMPONENT_REF, ARRAY_REF or ADDR_EXPR
878 nodes from the RHS. */
d0d1ecb8 879 tree *rhsp = gimple_assign_rhs1_ptr (use_stmt);
880 if (TREE_CODE (*rhsp) == ADDR_EXPR)
881 rhsp = &TREE_OPERAND (*rhsp, 0);
882 while (handled_component_p (*rhsp))
883 rhsp = &TREE_OPERAND (*rhsp, 0);
884 rhs = *rhsp;
291d763b 885
182cf5a9 886 /* Now see if the RHS node is a MEM_REF using NAME. If so,
291d763b 887 propagate the ADDR_EXPR into the use of NAME and fold the result. */
182cf5a9 888 if (TREE_CODE (rhs) == MEM_REF
889 && TREE_OPERAND (rhs, 0) == name)
291d763b 890 {
182cf5a9 891 tree def_rhs_base;
892 HOST_WIDE_INT def_rhs_offset;
893 if ((def_rhs_base = get_addr_base_and_unit_offset (TREE_OPERAND (def_rhs, 0),
894 &def_rhs_offset)))
895 {
896 double_int off = mem_ref_offset (rhs);
897 tree new_ptr;
cf8f0e63 898 off += double_int::from_shwi (def_rhs_offset);
182cf5a9 899 if (TREE_CODE (def_rhs_base) == MEM_REF)
900 {
cf8f0e63 901 off += mem_ref_offset (def_rhs_base);
182cf5a9 902 new_ptr = TREE_OPERAND (def_rhs_base, 0);
903 }
904 else
905 new_ptr = build_fold_addr_expr (def_rhs_base);
906 TREE_OPERAND (rhs, 0) = new_ptr;
907 TREE_OPERAND (rhs, 1)
908 = double_int_to_tree (TREE_TYPE (TREE_OPERAND (rhs, 1)), off);
50aacf4c 909 fold_stmt_inplace (use_stmt_gsi);
182cf5a9 910 tidy_after_forward_propagate_addr (use_stmt);
911 return res;
912 }
2e5dc41c 913 /* If the RHS is a plain dereference and the value type is the same as
182cf5a9 914 that of the pointed-to type of the address we can put the
2e5dc41c 915 dereferenced address on the RHS preserving the original alias-type. */
d0d1ecb8 916 else if (integer_zerop (TREE_OPERAND (rhs, 1))
917 && ((gimple_assign_rhs1 (use_stmt) == rhs
918 && useless_type_conversion_p
919 (TREE_TYPE (gimple_assign_lhs (use_stmt)),
920 TREE_TYPE (TREE_OPERAND (def_rhs, 0))))
921 || types_compatible_p (TREE_TYPE (rhs),
922 TREE_TYPE (TREE_OPERAND (def_rhs, 0)))))
182cf5a9 923 {
924 tree *def_rhs_basep = &TREE_OPERAND (def_rhs, 0);
98d96c6f 925 tree new_offset, new_base, saved, new_rhs;
182cf5a9 926 while (handled_component_p (*def_rhs_basep))
927 def_rhs_basep = &TREE_OPERAND (*def_rhs_basep, 0);
928 saved = *def_rhs_basep;
929 if (TREE_CODE (*def_rhs_basep) == MEM_REF)
930 {
931 new_base = TREE_OPERAND (*def_rhs_basep, 0);
b97e39a0 932 new_offset = fold_convert (TREE_TYPE (TREE_OPERAND (rhs, 1)),
933 TREE_OPERAND (*def_rhs_basep, 1));
182cf5a9 934 }
935 else
936 {
937 new_base = build_fold_addr_expr (*def_rhs_basep);
938 new_offset = TREE_OPERAND (rhs, 1);
939 }
940 *def_rhs_basep = build2 (MEM_REF, TREE_TYPE (*def_rhs_basep),
941 new_base, new_offset);
2e5dc41c 942 TREE_THIS_VOLATILE (*def_rhs_basep) = TREE_THIS_VOLATILE (rhs);
31fa5b0d 943 TREE_SIDE_EFFECTS (*def_rhs_basep) = TREE_SIDE_EFFECTS (rhs);
2e5dc41c 944 TREE_THIS_NOTRAP (*def_rhs_basep) = TREE_THIS_NOTRAP (rhs);
98d96c6f 945 new_rhs = unshare_expr (TREE_OPERAND (def_rhs, 0));
d0d1ecb8 946 *rhsp = new_rhs;
98d96c6f 947 TREE_THIS_VOLATILE (new_rhs) = TREE_THIS_VOLATILE (rhs);
31fa5b0d 948 TREE_SIDE_EFFECTS (new_rhs) = TREE_SIDE_EFFECTS (rhs);
182cf5a9 949 *def_rhs_basep = saved;
50aacf4c 950 fold_stmt_inplace (use_stmt_gsi);
182cf5a9 951 tidy_after_forward_propagate_addr (use_stmt);
952 return res;
953 }
291d763b 954 }
955
971c637a 956 /* If the use of the ADDR_EXPR is not a POINTER_PLUS_EXPR, there
957 is nothing to do. */
75a70cf9 958 if (gimple_assign_rhs_code (use_stmt) != POINTER_PLUS_EXPR
959 || gimple_assign_rhs1 (use_stmt) != name)
971c637a 960 return false;
961
291d763b 962 /* The remaining cases are all for turning pointer arithmetic into
963 array indexing. They only apply when we have the address of
964 element zero in an array. If that is not the case then there
965 is nothing to do. */
15ec875c 966 array_ref = TREE_OPERAND (def_rhs, 0);
182cf5a9 967 if ((TREE_CODE (array_ref) != ARRAY_REF
968 || TREE_CODE (TREE_TYPE (TREE_OPERAND (array_ref, 0))) != ARRAY_TYPE
969 || TREE_CODE (TREE_OPERAND (array_ref, 1)) != INTEGER_CST)
970 && TREE_CODE (TREE_TYPE (array_ref)) != ARRAY_TYPE)
291d763b 971 return false;
972
75a70cf9 973 rhs2 = gimple_assign_rhs2 (use_stmt);
704d7315 974 /* Optimize &x[C1] p+ C2 to &x p+ C3 with C3 = C1 * element_size + C2. */
75a70cf9 975 if (TREE_CODE (rhs2) == INTEGER_CST)
291d763b 976 {
704d7315 977 tree new_rhs = build1_loc (gimple_location (use_stmt),
978 ADDR_EXPR, TREE_TYPE (def_rhs),
979 fold_build2 (MEM_REF,
980 TREE_TYPE (TREE_TYPE (def_rhs)),
981 unshare_expr (def_rhs),
982 fold_convert (ptr_type_node,
983 rhs2)));
984 gimple_assign_set_rhs_from_tree (use_stmt_gsi, new_rhs);
985 use_stmt = gsi_stmt (*use_stmt_gsi);
986 update_stmt (use_stmt);
987 tidy_after_forward_propagate_addr (use_stmt);
988 return true;
291d763b 989 }
990
291d763b 991 return false;
992}
993
3d5cfe81 994/* STMT is a statement of the form SSA_NAME = ADDR_EXPR <whatever>.
995
996 Try to forward propagate the ADDR_EXPR into all uses of the SSA_NAME.
997 Often this will allow for removal of an ADDR_EXPR and INDIRECT_REF
998 node or for recovery of array indexing from pointer arithmetic.
999 Returns true, if all uses have been propagated into. */
1000
1001static bool
15ec875c 1002forward_propagate_addr_expr (tree name, tree rhs)
3d5cfe81 1003{
6b42039a 1004 int stmt_loop_depth = bb_loop_depth (gimple_bb (SSA_NAME_DEF_STMT (name)));
3d5cfe81 1005 imm_use_iterator iter;
75a70cf9 1006 gimple use_stmt;
3d5cfe81 1007 bool all = true;
6776dec8 1008 bool single_use_p = has_single_use (name);
3d5cfe81 1009
09aca5bc 1010 FOR_EACH_IMM_USE_STMT (use_stmt, iter, name)
3d5cfe81 1011 {
c96420f8 1012 bool result;
9481f629 1013 tree use_rhs;
3d5cfe81 1014
1015 /* If the use is not in a simple assignment statement, then
1016 there is nothing we can do. */
75a70cf9 1017 if (gimple_code (use_stmt) != GIMPLE_ASSIGN)
3d5cfe81 1018 {
688ff29b 1019 if (!is_gimple_debug (use_stmt))
9845d120 1020 all = false;
3d5cfe81 1021 continue;
1022 }
1023
a540e2fe 1024 /* If the use is in a deeper loop nest, then we do not want
ed40c3d0 1025 to propagate non-invariant ADDR_EXPRs into the loop as that
1026 is likely adding expression evaluations into the loop. */
6b42039a 1027 if (bb_loop_depth (gimple_bb (use_stmt)) > stmt_loop_depth
ed40c3d0 1028 && !is_gimple_min_invariant (rhs))
3d5cfe81 1029 {
1030 all = false;
1031 continue;
1032 }
a540e2fe 1033
75a70cf9 1034 {
1035 gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
1036 result = forward_propagate_addr_expr_1 (name, rhs, &gsi,
1037 single_use_p);
dd277d48 1038 /* If the use has moved to a different statement adjust
4c5fd53c 1039 the update machinery for the old statement too. */
dd277d48 1040 if (use_stmt != gsi_stmt (gsi))
1041 {
dd277d48 1042 update_stmt (use_stmt);
4c5fd53c 1043 use_stmt = gsi_stmt (gsi);
dd277d48 1044 }
4c5fd53c 1045
1046 update_stmt (use_stmt);
75a70cf9 1047 }
c96420f8 1048 all &= result;
de6ed584 1049
15ec875c 1050 /* Remove intermediate now unused copy and conversion chains. */
75a70cf9 1051 use_rhs = gimple_assign_rhs1 (use_stmt);
15ec875c 1052 if (result
75a70cf9 1053 && TREE_CODE (gimple_assign_lhs (use_stmt)) == SSA_NAME
7b705d94 1054 && TREE_CODE (use_rhs) == SSA_NAME
1055 && has_zero_uses (gimple_assign_lhs (use_stmt)))
15ec875c 1056 {
75a70cf9 1057 gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
15ec875c 1058 release_defs (use_stmt);
75a70cf9 1059 gsi_remove (&gsi, true);
15ec875c 1060 }
3d5cfe81 1061 }
1062
628ce22b 1063 return all && has_zero_uses (name);
3d5cfe81 1064}
1065
678b2f5b 1066
e3a19533 1067/* Forward propagate the comparison defined in *DEFGSI like
678b2f5b 1068 cond_1 = x CMP y to uses of the form
1069 a_1 = (T')cond_1
1070 a_1 = !cond_1
1071 a_1 = cond_1 != 0
e3a19533 1072 Returns true if stmt is now unused. Advance DEFGSI to the next
1073 statement. */
678b2f5b 1074
1075static bool
e3a19533 1076forward_propagate_comparison (gimple_stmt_iterator *defgsi)
678b2f5b 1077{
e3a19533 1078 gimple stmt = gsi_stmt (*defgsi);
678b2f5b 1079 tree name = gimple_assign_lhs (stmt);
1080 gimple use_stmt;
1081 tree tmp = NULL_TREE;
e5b1e080 1082 gimple_stmt_iterator gsi;
1083 enum tree_code code;
1084 tree lhs;
678b2f5b 1085
1086 /* Don't propagate ssa names that occur in abnormal phis. */
1087 if ((TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
1088 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_assign_rhs1 (stmt)))
1089 || (TREE_CODE (gimple_assign_rhs2 (stmt)) == SSA_NAME
1090 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_assign_rhs2 (stmt))))
e3a19533 1091 goto bailout;
678b2f5b 1092
1093 /* Do not un-cse comparisons. But propagate through copies. */
1094 use_stmt = get_prop_dest_stmt (name, &name);
e5b1e080 1095 if (!use_stmt
1096 || !is_gimple_assign (use_stmt))
e3a19533 1097 goto bailout;
678b2f5b 1098
e5b1e080 1099 code = gimple_assign_rhs_code (use_stmt);
1100 lhs = gimple_assign_lhs (use_stmt);
1101 if (!INTEGRAL_TYPE_P (TREE_TYPE (lhs)))
e3a19533 1102 goto bailout;
678b2f5b 1103
e5b1e080 1104 /* We can propagate the condition into a statement that
1105 computes the logical negation of the comparison result. */
4b5f1658 1106 if ((code == BIT_NOT_EXPR
1107 && TYPE_PRECISION (TREE_TYPE (lhs)) == 1)
1108 || (code == BIT_XOR_EXPR
1109 && integer_onep (gimple_assign_rhs2 (use_stmt))))
e5b1e080 1110 {
1111 tree type = TREE_TYPE (gimple_assign_rhs1 (stmt));
1112 bool nans = HONOR_NANS (TYPE_MODE (type));
1113 enum tree_code inv_code;
1114 inv_code = invert_tree_comparison (gimple_assign_rhs_code (stmt), nans);
1115 if (inv_code == ERROR_MARK)
e3a19533 1116 goto bailout;
678b2f5b 1117
e5b1e080 1118 tmp = build2 (inv_code, TREE_TYPE (lhs), gimple_assign_rhs1 (stmt),
1119 gimple_assign_rhs2 (stmt));
1120 }
1121 else
e3a19533 1122 goto bailout;
678b2f5b 1123
e5b1e080 1124 gsi = gsi_for_stmt (use_stmt);
1125 gimple_assign_set_rhs_from_tree (&gsi, unshare_expr (tmp));
1126 use_stmt = gsi_stmt (gsi);
1127 update_stmt (use_stmt);
678b2f5b 1128
e5b1e080 1129 if (dump_file && (dump_flags & TDF_DETAILS))
1130 {
1131 fprintf (dump_file, " Replaced '");
1132 print_gimple_expr (dump_file, stmt, 0, dump_flags);
1133 fprintf (dump_file, "' with '");
1134 print_gimple_expr (dump_file, use_stmt, 0, dump_flags);
1135 fprintf (dump_file, "'\n");
678b2f5b 1136 }
1137
e3a19533 1138 /* When we remove stmt now the iterator defgsi goes off it's current
1139 sequence, hence advance it now. */
1140 gsi_next (defgsi);
1141
e5b1e080 1142 /* Remove defining statements. */
1143 return remove_prop_source_from_use (name);
e3a19533 1144
1145bailout:
1146 gsi_next (defgsi);
1147 return false;
678b2f5b 1148}
1149
1150
d23e1965 1151/* GSI_P points to a statement which performs a narrowing integral
1152 conversion.
1153
1154 Look for cases like:
1155
1156 t = x & c;
1157 y = (T) t;
1158
1159 Turn them into:
1160
1161 t = x & c;
1162 y = (T) x;
1163
1164 If T is narrower than X's type and C merely masks off bits outside
1165 of (T) and nothing else.
1166
1167 Normally we'd let DCE remove the dead statement. But no DCE runs
1168 after the last forwprop/combine pass, so we remove the obviously
1169 dead code ourselves.
1170
1171 Return TRUE if a change was made, FALSE otherwise. */
1172
1173static bool
1174simplify_conversion_from_bitmask (gimple_stmt_iterator *gsi_p)
1175{
1176 gimple stmt = gsi_stmt (*gsi_p);
1177 gimple rhs_def_stmt = SSA_NAME_DEF_STMT (gimple_assign_rhs1 (stmt));
1178
1179 /* See if the input for the conversion was set via a BIT_AND_EXPR and
1180 the only use of the BIT_AND_EXPR result is the conversion. */
1181 if (is_gimple_assign (rhs_def_stmt)
1182 && gimple_assign_rhs_code (rhs_def_stmt) == BIT_AND_EXPR
1183 && has_single_use (gimple_assign_lhs (rhs_def_stmt)))
1184 {
1185 tree rhs_def_operand1 = gimple_assign_rhs1 (rhs_def_stmt);
1186 tree rhs_def_operand2 = gimple_assign_rhs2 (rhs_def_stmt);
1187 tree lhs_type = TREE_TYPE (gimple_assign_lhs (stmt));
1188
1189 /* Now verify suitability of the BIT_AND_EXPR's operands.
1190 The first must be an SSA_NAME that we can propagate and the
1191 second must be an integer constant that masks out all the
1192 bits outside the final result's type, but nothing else. */
1193 if (TREE_CODE (rhs_def_operand1) == SSA_NAME
1194 && ! SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs_def_operand1)
1195 && TREE_CODE (rhs_def_operand2) == INTEGER_CST
1196 && operand_equal_p (rhs_def_operand2,
1197 build_low_bits_mask (TREE_TYPE (rhs_def_operand2),
1198 TYPE_PRECISION (lhs_type)),
1199 0))
1200 {
1201 /* This is an optimizable case. Replace the source operand
1202 in the conversion with the first source operand of the
1203 BIT_AND_EXPR. */
1204 gimple_assign_set_rhs1 (stmt, rhs_def_operand1);
1205 stmt = gsi_stmt (*gsi_p);
1206 update_stmt (stmt);
1207
1208 /* There is no DCE after the last forwprop pass. It's
1209 easy to clean up the first order effects here. */
1210 gimple_stmt_iterator si;
1211 si = gsi_for_stmt (rhs_def_stmt);
1212 gsi_remove (&si, true);
1213 release_defs (rhs_def_stmt);
1214 return true;
1215 }
1216 }
1217
1218 return false;
1219}
1220
1221
3a938499 1222/* If we have lhs = ~x (STMT), look and see if earlier we had x = ~y.
1223 If so, we can change STMT into lhs = y which can later be copy
48e1416a 1224 propagated. Similarly for negation.
3a938499 1225
48e1416a 1226 This could trivially be formulated as a forward propagation
3a938499 1227 to immediate uses. However, we already had an implementation
1228 from DOM which used backward propagation via the use-def links.
1229
1230 It turns out that backward propagation is actually faster as
1231 there's less work to do for each NOT/NEG expression we find.
1232 Backwards propagation needs to look at the statement in a single
1233 backlink. Forward propagation needs to look at potentially more
678b2f5b 1234 than one forward link.
3a938499 1235
678b2f5b 1236 Returns true when the statement was changed. */
1237
1238static bool
75a70cf9 1239simplify_not_neg_expr (gimple_stmt_iterator *gsi_p)
3a938499 1240{
75a70cf9 1241 gimple stmt = gsi_stmt (*gsi_p);
1242 tree rhs = gimple_assign_rhs1 (stmt);
1243 gimple rhs_def_stmt = SSA_NAME_DEF_STMT (rhs);
3a938499 1244
1245 /* See if the RHS_DEF_STMT has the same form as our statement. */
75a70cf9 1246 if (is_gimple_assign (rhs_def_stmt)
1247 && gimple_assign_rhs_code (rhs_def_stmt) == gimple_assign_rhs_code (stmt))
3a938499 1248 {
75a70cf9 1249 tree rhs_def_operand = gimple_assign_rhs1 (rhs_def_stmt);
3a938499 1250
1251 /* Verify that RHS_DEF_OPERAND is a suitable SSA_NAME. */
1252 if (TREE_CODE (rhs_def_operand) == SSA_NAME
1253 && ! SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs_def_operand))
1254 {
75a70cf9 1255 gimple_assign_set_rhs_from_tree (gsi_p, rhs_def_operand);
1256 stmt = gsi_stmt (*gsi_p);
3a938499 1257 update_stmt (stmt);
678b2f5b 1258 return true;
3a938499 1259 }
1260 }
678b2f5b 1261
1262 return false;
3a938499 1263}
3d5cfe81 1264
b59e1c90 1265/* Helper function for simplify_gimple_switch. Remove case labels that
1266 have values outside the range of the new type. */
1267
1268static void
1269simplify_gimple_switch_label_vec (gimple stmt, tree index_type)
1270{
1271 unsigned int branch_num = gimple_switch_num_labels (stmt);
f1f41a6c 1272 vec<tree> labels;
1273 labels.create (branch_num);
b59e1c90 1274 unsigned int i, len;
1275
1276 /* Collect the existing case labels in a VEC, and preprocess it as if
1277 we are gimplifying a GENERIC SWITCH_EXPR. */
1278 for (i = 1; i < branch_num; i++)
f1f41a6c 1279 labels.quick_push (gimple_switch_label (stmt, i));
b59e1c90 1280 preprocess_case_label_vec_for_gimple (labels, index_type, NULL);
1281
1282 /* If any labels were removed, replace the existing case labels
1283 in the GIMPLE_SWITCH statement with the correct ones.
1284 Note that the type updates were done in-place on the case labels,
1285 so we only have to replace the case labels in the GIMPLE_SWITCH
1286 if the number of labels changed. */
f1f41a6c 1287 len = labels.length ();
b59e1c90 1288 if (len < branch_num - 1)
1289 {
1290 bitmap target_blocks;
1291 edge_iterator ei;
1292 edge e;
1293
1294 /* Corner case: *all* case labels have been removed as being
1295 out-of-range for INDEX_TYPE. Push one label and let the
1296 CFG cleanups deal with this further. */
1297 if (len == 0)
1298 {
1299 tree label, elt;
1300
1301 label = CASE_LABEL (gimple_switch_default_label (stmt));
1302 elt = build_case_label (build_int_cst (index_type, 0), NULL, label);
f1f41a6c 1303 labels.quick_push (elt);
b59e1c90 1304 len = 1;
1305 }
1306
f1f41a6c 1307 for (i = 0; i < labels.length (); i++)
1308 gimple_switch_set_label (stmt, i + 1, labels[i]);
b59e1c90 1309 for (i++ ; i < branch_num; i++)
1310 gimple_switch_set_label (stmt, i, NULL_TREE);
1311 gimple_switch_set_num_labels (stmt, len + 1);
1312
1313 /* Cleanup any edges that are now dead. */
1314 target_blocks = BITMAP_ALLOC (NULL);
1315 for (i = 0; i < gimple_switch_num_labels (stmt); i++)
1316 {
1317 tree elt = gimple_switch_label (stmt, i);
1318 basic_block target = label_to_block (CASE_LABEL (elt));
1319 bitmap_set_bit (target_blocks, target->index);
1320 }
1321 for (ei = ei_start (gimple_bb (stmt)->succs); (e = ei_safe_edge (ei)); )
1322 {
1323 if (! bitmap_bit_p (target_blocks, e->dest->index))
1324 {
1325 remove_edge (e);
1326 cfg_changed = true;
1327 free_dominance_info (CDI_DOMINATORS);
1328 }
1329 else
1330 ei_next (&ei);
1331 }
1332 BITMAP_FREE (target_blocks);
1333 }
1334
f1f41a6c 1335 labels.release ();
b59e1c90 1336}
1337
b5860aba 1338/* STMT is a SWITCH_EXPR for which we attempt to find equivalent forms of
1339 the condition which we may be able to optimize better. */
1340
678b2f5b 1341static bool
75a70cf9 1342simplify_gimple_switch (gimple stmt)
b5860aba 1343{
75a70cf9 1344 tree cond = gimple_switch_index (stmt);
b5860aba 1345 tree def, to, ti;
75a70cf9 1346 gimple def_stmt;
b5860aba 1347
1348 /* The optimization that we really care about is removing unnecessary
1349 casts. That will let us do much better in propagating the inferred
1350 constant at the switch target. */
1351 if (TREE_CODE (cond) == SSA_NAME)
1352 {
75a70cf9 1353 def_stmt = SSA_NAME_DEF_STMT (cond);
1354 if (is_gimple_assign (def_stmt))
b5860aba 1355 {
75a70cf9 1356 if (gimple_assign_rhs_code (def_stmt) == NOP_EXPR)
b5860aba 1357 {
1358 int need_precision;
1359 bool fail;
1360
75a70cf9 1361 def = gimple_assign_rhs1 (def_stmt);
b5860aba 1362
b5860aba 1363 to = TREE_TYPE (cond);
1364 ti = TREE_TYPE (def);
1365
1366 /* If we have an extension that preserves value, then we
1367 can copy the source value into the switch. */
1368
1369 need_precision = TYPE_PRECISION (ti);
1370 fail = false;
c5237b8b 1371 if (! INTEGRAL_TYPE_P (ti))
1372 fail = true;
1373 else if (TYPE_UNSIGNED (to) && !TYPE_UNSIGNED (ti))
b5860aba 1374 fail = true;
1375 else if (!TYPE_UNSIGNED (to) && TYPE_UNSIGNED (ti))
1376 need_precision += 1;
1377 if (TYPE_PRECISION (to) < need_precision)
1378 fail = true;
1379
1380 if (!fail)
1381 {
75a70cf9 1382 gimple_switch_set_index (stmt, def);
b59e1c90 1383 simplify_gimple_switch_label_vec (stmt, ti);
b5860aba 1384 update_stmt (stmt);
678b2f5b 1385 return true;
b5860aba 1386 }
1387 }
1388 }
1389 }
678b2f5b 1390
1391 return false;
b5860aba 1392}
1393
27f931ff 1394/* For pointers p2 and p1 return p2 - p1 if the
1395 difference is known and constant, otherwise return NULL. */
1396
1397static tree
1398constant_pointer_difference (tree p1, tree p2)
1399{
1400 int i, j;
1401#define CPD_ITERATIONS 5
1402 tree exps[2][CPD_ITERATIONS];
1403 tree offs[2][CPD_ITERATIONS];
1404 int cnt[2];
1405
1406 for (i = 0; i < 2; i++)
1407 {
1408 tree p = i ? p1 : p2;
1409 tree off = size_zero_node;
1410 gimple stmt;
1411 enum tree_code code;
1412
1413 /* For each of p1 and p2 we need to iterate at least
1414 twice, to handle ADDR_EXPR directly in p1/p2,
1415 SSA_NAME with ADDR_EXPR or POINTER_PLUS_EXPR etc.
1416 on definition's stmt RHS. Iterate a few extra times. */
1417 j = 0;
1418 do
1419 {
1420 if (!POINTER_TYPE_P (TREE_TYPE (p)))
1421 break;
1422 if (TREE_CODE (p) == ADDR_EXPR)
1423 {
1424 tree q = TREE_OPERAND (p, 0);
1425 HOST_WIDE_INT offset;
1426 tree base = get_addr_base_and_unit_offset (q, &offset);
1427 if (base)
1428 {
1429 q = base;
1430 if (offset)
1431 off = size_binop (PLUS_EXPR, off, size_int (offset));
1432 }
1433 if (TREE_CODE (q) == MEM_REF
1434 && TREE_CODE (TREE_OPERAND (q, 0)) == SSA_NAME)
1435 {
1436 p = TREE_OPERAND (q, 0);
1437 off = size_binop (PLUS_EXPR, off,
1438 double_int_to_tree (sizetype,
1439 mem_ref_offset (q)));
1440 }
1441 else
1442 {
1443 exps[i][j] = q;
1444 offs[i][j++] = off;
1445 break;
1446 }
1447 }
1448 if (TREE_CODE (p) != SSA_NAME)
1449 break;
1450 exps[i][j] = p;
1451 offs[i][j++] = off;
1452 if (j == CPD_ITERATIONS)
1453 break;
1454 stmt = SSA_NAME_DEF_STMT (p);
1455 if (!is_gimple_assign (stmt) || gimple_assign_lhs (stmt) != p)
1456 break;
1457 code = gimple_assign_rhs_code (stmt);
1458 if (code == POINTER_PLUS_EXPR)
1459 {
1460 if (TREE_CODE (gimple_assign_rhs2 (stmt)) != INTEGER_CST)
1461 break;
1462 off = size_binop (PLUS_EXPR, off, gimple_assign_rhs2 (stmt));
1463 p = gimple_assign_rhs1 (stmt);
1464 }
1465 else if (code == ADDR_EXPR || code == NOP_EXPR)
1466 p = gimple_assign_rhs1 (stmt);
1467 else
1468 break;
1469 }
1470 while (1);
1471 cnt[i] = j;
1472 }
1473
1474 for (i = 0; i < cnt[0]; i++)
1475 for (j = 0; j < cnt[1]; j++)
1476 if (exps[0][i] == exps[1][j])
1477 return size_binop (MINUS_EXPR, offs[0][i], offs[1][j]);
1478
1479 return NULL_TREE;
1480}
1481
1482/* *GSI_P is a GIMPLE_CALL to a builtin function.
1483 Optimize
1484 memcpy (p, "abcd", 4);
1485 memset (p + 4, ' ', 3);
1486 into
1487 memcpy (p, "abcd ", 7);
1488 call if the latter can be stored by pieces during expansion. */
1489
1490static bool
1491simplify_builtin_call (gimple_stmt_iterator *gsi_p, tree callee2)
1492{
1493 gimple stmt1, stmt2 = gsi_stmt (*gsi_p);
1494 tree vuse = gimple_vuse (stmt2);
1495 if (vuse == NULL)
1496 return false;
1497 stmt1 = SSA_NAME_DEF_STMT (vuse);
1498
1499 switch (DECL_FUNCTION_CODE (callee2))
1500 {
1501 case BUILT_IN_MEMSET:
1502 if (gimple_call_num_args (stmt2) != 3
1503 || gimple_call_lhs (stmt2)
1504 || CHAR_BIT != 8
1505 || BITS_PER_UNIT != 8)
1506 break;
1507 else
1508 {
1509 tree callee1;
1510 tree ptr1, src1, str1, off1, len1, lhs1;
1511 tree ptr2 = gimple_call_arg (stmt2, 0);
1512 tree val2 = gimple_call_arg (stmt2, 1);
1513 tree len2 = gimple_call_arg (stmt2, 2);
1514 tree diff, vdef, new_str_cst;
1515 gimple use_stmt;
1516 unsigned int ptr1_align;
1517 unsigned HOST_WIDE_INT src_len;
1518 char *src_buf;
1519 use_operand_p use_p;
1520
1521 if (!host_integerp (val2, 0)
1522 || !host_integerp (len2, 1))
1523 break;
1524 if (is_gimple_call (stmt1))
1525 {
1526 /* If first stmt is a call, it needs to be memcpy
1527 or mempcpy, with string literal as second argument and
1528 constant length. */
1529 callee1 = gimple_call_fndecl (stmt1);
1530 if (callee1 == NULL_TREE
1531 || DECL_BUILT_IN_CLASS (callee1) != BUILT_IN_NORMAL
1532 || gimple_call_num_args (stmt1) != 3)
1533 break;
1534 if (DECL_FUNCTION_CODE (callee1) != BUILT_IN_MEMCPY
1535 && DECL_FUNCTION_CODE (callee1) != BUILT_IN_MEMPCPY)
1536 break;
1537 ptr1 = gimple_call_arg (stmt1, 0);
1538 src1 = gimple_call_arg (stmt1, 1);
1539 len1 = gimple_call_arg (stmt1, 2);
1540 lhs1 = gimple_call_lhs (stmt1);
1541 if (!host_integerp (len1, 1))
1542 break;
1543 str1 = string_constant (src1, &off1);
1544 if (str1 == NULL_TREE)
1545 break;
1546 if (!host_integerp (off1, 1)
1547 || compare_tree_int (off1, TREE_STRING_LENGTH (str1) - 1) > 0
1548 || compare_tree_int (len1, TREE_STRING_LENGTH (str1)
1549 - tree_low_cst (off1, 1)) > 0
1550 || TREE_CODE (TREE_TYPE (str1)) != ARRAY_TYPE
1551 || TYPE_MODE (TREE_TYPE (TREE_TYPE (str1)))
1552 != TYPE_MODE (char_type_node))
1553 break;
1554 }
1555 else if (gimple_assign_single_p (stmt1))
1556 {
1557 /* Otherwise look for length 1 memcpy optimized into
1558 assignment. */
1559 ptr1 = gimple_assign_lhs (stmt1);
1560 src1 = gimple_assign_rhs1 (stmt1);
1561 if (TREE_CODE (ptr1) != MEM_REF
1562 || TYPE_MODE (TREE_TYPE (ptr1)) != TYPE_MODE (char_type_node)
1563 || !host_integerp (src1, 0))
1564 break;
1565 ptr1 = build_fold_addr_expr (ptr1);
1566 callee1 = NULL_TREE;
1567 len1 = size_one_node;
1568 lhs1 = NULL_TREE;
1569 off1 = size_zero_node;
1570 str1 = NULL_TREE;
1571 }
1572 else
1573 break;
1574
1575 diff = constant_pointer_difference (ptr1, ptr2);
1576 if (diff == NULL && lhs1 != NULL)
1577 {
1578 diff = constant_pointer_difference (lhs1, ptr2);
1579 if (DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY
1580 && diff != NULL)
1581 diff = size_binop (PLUS_EXPR, diff,
1582 fold_convert (sizetype, len1));
1583 }
1584 /* If the difference between the second and first destination pointer
1585 is not constant, or is bigger than memcpy length, bail out. */
1586 if (diff == NULL
1587 || !host_integerp (diff, 1)
1588 || tree_int_cst_lt (len1, diff))
1589 break;
1590
1591 /* Use maximum of difference plus memset length and memcpy length
1592 as the new memcpy length, if it is too big, bail out. */
1593 src_len = tree_low_cst (diff, 1);
1594 src_len += tree_low_cst (len2, 1);
1595 if (src_len < (unsigned HOST_WIDE_INT) tree_low_cst (len1, 1))
1596 src_len = tree_low_cst (len1, 1);
1597 if (src_len > 1024)
1598 break;
1599
1600 /* If mempcpy value is used elsewhere, bail out, as mempcpy
1601 with bigger length will return different result. */
1602 if (lhs1 != NULL_TREE
1603 && DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY
1604 && (TREE_CODE (lhs1) != SSA_NAME
1605 || !single_imm_use (lhs1, &use_p, &use_stmt)
1606 || use_stmt != stmt2))
1607 break;
1608
1609 /* If anything reads memory in between memcpy and memset
1610 call, the modified memcpy call might change it. */
1611 vdef = gimple_vdef (stmt1);
1612 if (vdef != NULL
1613 && (!single_imm_use (vdef, &use_p, &use_stmt)
1614 || use_stmt != stmt2))
1615 break;
1616
957d0361 1617 ptr1_align = get_pointer_alignment (ptr1);
27f931ff 1618 /* Construct the new source string literal. */
1619 src_buf = XALLOCAVEC (char, src_len + 1);
1620 if (callee1)
1621 memcpy (src_buf,
1622 TREE_STRING_POINTER (str1) + tree_low_cst (off1, 1),
1623 tree_low_cst (len1, 1));
1624 else
1625 src_buf[0] = tree_low_cst (src1, 0);
1626 memset (src_buf + tree_low_cst (diff, 1),
0adffe78 1627 tree_low_cst (val2, 0), tree_low_cst (len2, 1));
27f931ff 1628 src_buf[src_len] = '\0';
1629 /* Neither builtin_strncpy_read_str nor builtin_memcpy_read_str
1630 handle embedded '\0's. */
1631 if (strlen (src_buf) != src_len)
1632 break;
1633 rtl_profile_for_bb (gimple_bb (stmt2));
1634 /* If the new memcpy wouldn't be emitted by storing the literal
1635 by pieces, this optimization might enlarge .rodata too much,
1636 as commonly used string literals couldn't be shared any
1637 longer. */
1638 if (!can_store_by_pieces (src_len,
1639 builtin_strncpy_read_str,
1640 src_buf, ptr1_align, false))
1641 break;
1642
1643 new_str_cst = build_string_literal (src_len, src_buf);
1644 if (callee1)
1645 {
1646 /* If STMT1 is a mem{,p}cpy call, adjust it and remove
1647 memset call. */
1648 if (lhs1 && DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY)
1649 gimple_call_set_lhs (stmt1, NULL_TREE);
1650 gimple_call_set_arg (stmt1, 1, new_str_cst);
1651 gimple_call_set_arg (stmt1, 2,
1652 build_int_cst (TREE_TYPE (len1), src_len));
1653 update_stmt (stmt1);
1654 unlink_stmt_vdef (stmt2);
1655 gsi_remove (gsi_p, true);
1656 release_defs (stmt2);
1657 if (lhs1 && DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY)
1658 release_ssa_name (lhs1);
1659 return true;
1660 }
1661 else
1662 {
1663 /* Otherwise, if STMT1 is length 1 memcpy optimized into
1664 assignment, remove STMT1 and change memset call into
1665 memcpy call. */
1666 gimple_stmt_iterator gsi = gsi_for_stmt (stmt1);
1667
7ecb2e7c 1668 if (!is_gimple_val (ptr1))
1669 ptr1 = force_gimple_operand_gsi (gsi_p, ptr1, true, NULL_TREE,
1670 true, GSI_SAME_STMT);
b9a16870 1671 gimple_call_set_fndecl (stmt2,
1672 builtin_decl_explicit (BUILT_IN_MEMCPY));
27f931ff 1673 gimple_call_set_arg (stmt2, 0, ptr1);
1674 gimple_call_set_arg (stmt2, 1, new_str_cst);
1675 gimple_call_set_arg (stmt2, 2,
1676 build_int_cst (TREE_TYPE (len2), src_len));
1677 unlink_stmt_vdef (stmt1);
1678 gsi_remove (&gsi, true);
1679 release_defs (stmt1);
1680 update_stmt (stmt2);
1681 return false;
1682 }
1683 }
1684 break;
1685 default:
1686 break;
1687 }
1688 return false;
1689}
1690
41913fa9 1691/* Checks if expression has type of one-bit precision, or is a known
1692 truth-valued expression. */
1693static bool
1694truth_valued_ssa_name (tree name)
1695{
1696 gimple def;
1697 tree type = TREE_TYPE (name);
1698
1699 if (!INTEGRAL_TYPE_P (type))
1700 return false;
1701 /* Don't check here for BOOLEAN_TYPE as the precision isn't
1702 necessarily one and so ~X is not equal to !X. */
1703 if (TYPE_PRECISION (type) == 1)
1704 return true;
1705 def = SSA_NAME_DEF_STMT (name);
1706 if (is_gimple_assign (def))
1707 return truth_value_p (gimple_assign_rhs_code (def));
1708 return false;
1709}
1710
1711/* Helper routine for simplify_bitwise_binary_1 function.
1712 Return for the SSA name NAME the expression X if it mets condition
1713 NAME = !X. Otherwise return NULL_TREE.
1714 Detected patterns for NAME = !X are:
1715 !X and X == 0 for X with integral type.
1716 X ^ 1, X != 1,or ~X for X with integral type with precision of one. */
1717static tree
1718lookup_logical_inverted_value (tree name)
1719{
1720 tree op1, op2;
1721 enum tree_code code;
1722 gimple def;
1723
1724 /* If name has none-intergal type, or isn't a SSA_NAME, then
1725 return. */
1726 if (TREE_CODE (name) != SSA_NAME
1727 || !INTEGRAL_TYPE_P (TREE_TYPE (name)))
1728 return NULL_TREE;
1729 def = SSA_NAME_DEF_STMT (name);
1730 if (!is_gimple_assign (def))
1731 return NULL_TREE;
1732
1733 code = gimple_assign_rhs_code (def);
1734 op1 = gimple_assign_rhs1 (def);
1735 op2 = NULL_TREE;
1736
1737 /* Get for EQ_EXPR or BIT_XOR_EXPR operation the second operand.
8f4a7578 1738 If CODE isn't an EQ_EXPR, BIT_XOR_EXPR, or BIT_NOT_EXPR, then return. */
41913fa9 1739 if (code == EQ_EXPR || code == NE_EXPR
1740 || code == BIT_XOR_EXPR)
1741 op2 = gimple_assign_rhs2 (def);
1742
1743 switch (code)
1744 {
41913fa9 1745 case BIT_NOT_EXPR:
1746 if (truth_valued_ssa_name (name))
1747 return op1;
1748 break;
1749 case EQ_EXPR:
1750 /* Check if we have X == 0 and X has an integral type. */
1751 if (!INTEGRAL_TYPE_P (TREE_TYPE (op1)))
1752 break;
1753 if (integer_zerop (op2))
1754 return op1;
1755 break;
1756 case NE_EXPR:
1757 /* Check if we have X != 1 and X is a truth-valued. */
1758 if (!INTEGRAL_TYPE_P (TREE_TYPE (op1)))
1759 break;
1760 if (integer_onep (op2) && truth_valued_ssa_name (op1))
1761 return op1;
1762 break;
1763 case BIT_XOR_EXPR:
1764 /* Check if we have X ^ 1 and X is truth valued. */
1765 if (integer_onep (op2) && truth_valued_ssa_name (op1))
1766 return op1;
1767 break;
1768 default:
1769 break;
1770 }
1771
1772 return NULL_TREE;
1773}
1774
1775/* Optimize ARG1 CODE ARG2 to a constant for bitwise binary
1776 operations CODE, if one operand has the logically inverted
1777 value of the other. */
1778static tree
1779simplify_bitwise_binary_1 (enum tree_code code, tree type,
1780 tree arg1, tree arg2)
1781{
1782 tree anot;
1783
1784 /* If CODE isn't a bitwise binary operation, return NULL_TREE. */
1785 if (code != BIT_AND_EXPR && code != BIT_IOR_EXPR
1786 && code != BIT_XOR_EXPR)
1787 return NULL_TREE;
1788
1789 /* First check if operands ARG1 and ARG2 are equal. If so
1790 return NULL_TREE as this optimization is handled fold_stmt. */
1791 if (arg1 == arg2)
1792 return NULL_TREE;
1793 /* See if we have in arguments logical-not patterns. */
1794 if (((anot = lookup_logical_inverted_value (arg1)) == NULL_TREE
1795 || anot != arg2)
1796 && ((anot = lookup_logical_inverted_value (arg2)) == NULL_TREE
1797 || anot != arg1))
1798 return NULL_TREE;
1799
1800 /* X & !X -> 0. */
1801 if (code == BIT_AND_EXPR)
1802 return fold_convert (type, integer_zero_node);
1803 /* X | !X -> 1 and X ^ !X -> 1, if X is truth-valued. */
1804 if (truth_valued_ssa_name (anot))
1805 return fold_convert (type, integer_one_node);
1806
1807 /* ??? Otherwise result is (X != 0 ? X : 1). not handled. */
1808 return NULL_TREE;
1809}
1810
10fbe63d 1811/* Given a ssa_name in NAME see if it was defined by an assignment and
1812 set CODE to be the code and ARG1 to the first operand on the rhs and ARG2
1813 to the second operand on the rhs. */
1814
1815static inline void
1816defcodefor_name (tree name, enum tree_code *code, tree *arg1, tree *arg2)
1817{
1818 gimple def;
1819 enum tree_code code1;
1820 tree arg11;
1821 tree arg21;
1822 tree arg31;
1823 enum gimple_rhs_class grhs_class;
1824
1825 code1 = TREE_CODE (name);
1826 arg11 = name;
1827 arg21 = NULL_TREE;
1828 grhs_class = get_gimple_rhs_class (code1);
1829
1830 if (code1 == SSA_NAME)
1831 {
1832 def = SSA_NAME_DEF_STMT (name);
1833
1834 if (def && is_gimple_assign (def)
1835 && can_propagate_from (def))
1836 {
1837 code1 = gimple_assign_rhs_code (def);
1838 arg11 = gimple_assign_rhs1 (def);
1839 arg21 = gimple_assign_rhs2 (def);
1840 arg31 = gimple_assign_rhs2 (def);
1841 }
1842 }
1843 else if (grhs_class == GIMPLE_TERNARY_RHS
1844 || GIMPLE_BINARY_RHS
1845 || GIMPLE_UNARY_RHS
1846 || GIMPLE_SINGLE_RHS)
1847 extract_ops_from_tree_1 (name, &code1, &arg11, &arg21, &arg31);
1848
1849 *code = code1;
1850 *arg1 = arg11;
1851 if (arg2)
1852 *arg2 = arg21;
1853 /* Ignore arg3 currently. */
1854}
1855
750e47f5 1856/* Return true if a conversion of an operand from type FROM to type TO
1857 should be applied after performing the operation instead. */
1858
1859static bool
1860hoist_conversion_for_bitop_p (tree to, tree from)
1861{
1862 /* That's a good idea if the conversion widens the operand, thus
1863 after hoisting the conversion the operation will be narrower. */
1864 if (TYPE_PRECISION (from) < TYPE_PRECISION (to))
1865 return true;
1866
1867 /* It's also a good idea if the conversion is to a non-integer mode. */
1868 if (GET_MODE_CLASS (TYPE_MODE (to)) != MODE_INT)
1869 return true;
1870
1871 /* Or if the precision of TO is not the same as the precision
1872 of its mode. */
1873 if (TYPE_PRECISION (to) != GET_MODE_PRECISION (TYPE_MODE (to)))
1874 return true;
1875
1876 return false;
1877}
1878
16bc66ec 1879/* GSI points to a statement of the form
1880
1881 result = OP0 CODE OP1
1882
1883 Where OP0 and OP1 are single bit SSA_NAMEs and CODE is either
1884 BIT_AND_EXPR or BIT_IOR_EXPR.
1885
1886 If OP0 is fed by a bitwise negation of another single bit SSA_NAME,
1887 then we can simplify the two statements into a single LT_EXPR or LE_EXPR
1888 when code is BIT_AND_EXPR and BIT_IOR_EXPR respectively.
1889
040f64e5 1890 If a simplification is made, return TRUE, else return FALSE. */
16bc66ec 1891static bool
1892simplify_bitwise_binary_boolean (gimple_stmt_iterator *gsi,
1893 enum tree_code code,
1894 tree op0, tree op1)
1895{
1896 gimple op0_def_stmt = SSA_NAME_DEF_STMT (op0);
1897
1898 if (!is_gimple_assign (op0_def_stmt)
1899 || (gimple_assign_rhs_code (op0_def_stmt) != BIT_NOT_EXPR))
1900 return false;
1901
1902 tree x = gimple_assign_rhs1 (op0_def_stmt);
1903 if (TREE_CODE (x) == SSA_NAME
1904 && INTEGRAL_TYPE_P (TREE_TYPE (x))
1905 && TYPE_PRECISION (TREE_TYPE (x)) == 1
1906 && TYPE_UNSIGNED (TREE_TYPE (x)) == TYPE_UNSIGNED (TREE_TYPE (op1)))
1907 {
1908 enum tree_code newcode;
1909
1910 gimple stmt = gsi_stmt (*gsi);
1911 gimple_assign_set_rhs1 (stmt, x);
1912 gimple_assign_set_rhs2 (stmt, op1);
1913 if (code == BIT_AND_EXPR)
1914 newcode = TYPE_UNSIGNED (TREE_TYPE (x)) ? LT_EXPR : GT_EXPR;
1915 else
1916 newcode = TYPE_UNSIGNED (TREE_TYPE (x)) ? LE_EXPR : GE_EXPR;
1917 gimple_assign_set_rhs_code (stmt, newcode);
1918 update_stmt (stmt);
1919 return true;
1920 }
1921 return false;
1922
1923}
1924
300da094 1925/* Simplify bitwise binary operations.
1926 Return true if a transformation applied, otherwise return false. */
1c4607fd 1927
300da094 1928static bool
1929simplify_bitwise_binary (gimple_stmt_iterator *gsi)
1c4607fd 1930{
300da094 1931 gimple stmt = gsi_stmt (*gsi);
1c4607fd 1932 tree arg1 = gimple_assign_rhs1 (stmt);
1933 tree arg2 = gimple_assign_rhs2 (stmt);
300da094 1934 enum tree_code code = gimple_assign_rhs_code (stmt);
1935 tree res;
10fbe63d 1936 tree def1_arg1, def1_arg2, def2_arg1, def2_arg2;
26f54bd0 1937 enum tree_code def1_code, def2_code;
1c4607fd 1938
10fbe63d 1939 defcodefor_name (arg1, &def1_code, &def1_arg1, &def1_arg2);
1940 defcodefor_name (arg2, &def2_code, &def2_arg1, &def2_arg2);
26f54bd0 1941
750e47f5 1942 /* Try to fold (type) X op CST -> (type) (X op ((type-x) CST))
1943 when profitable. */
25ce0d90 1944 if (TREE_CODE (arg2) == INTEGER_CST
1945 && CONVERT_EXPR_CODE_P (def1_code)
750e47f5 1946 && hoist_conversion_for_bitop_p (TREE_TYPE (arg1), TREE_TYPE (def1_arg1))
105fc895 1947 && INTEGRAL_TYPE_P (TREE_TYPE (def1_arg1))
25ce0d90 1948 && int_fits_type_p (arg2, TREE_TYPE (def1_arg1)))
1949 {
1950 gimple newop;
03d37e4e 1951 tree tem = make_ssa_name (TREE_TYPE (def1_arg1), NULL);
25ce0d90 1952 newop =
1953 gimple_build_assign_with_ops (code, tem, def1_arg1,
1954 fold_convert_loc (gimple_location (stmt),
1955 TREE_TYPE (def1_arg1),
1956 arg2));
4b5f1658 1957 gimple_set_location (newop, gimple_location (stmt));
25ce0d90 1958 gsi_insert_before (gsi, newop, GSI_SAME_STMT);
1959 gimple_assign_set_rhs_with_ops_1 (gsi, NOP_EXPR,
1960 tem, NULL_TREE, NULL_TREE);
1961 update_stmt (gsi_stmt (*gsi));
1962 return true;
1963 }
1964
300da094 1965 /* For bitwise binary operations apply operand conversions to the
1966 binary operation result instead of to the operands. This allows
1967 to combine successive conversions and bitwise binary operations. */
26f54bd0 1968 if (CONVERT_EXPR_CODE_P (def1_code)
1969 && CONVERT_EXPR_CODE_P (def2_code)
1970 && types_compatible_p (TREE_TYPE (def1_arg1), TREE_TYPE (def2_arg1))
750e47f5 1971 && hoist_conversion_for_bitop_p (TREE_TYPE (arg1), TREE_TYPE (def1_arg1)))
1c4607fd 1972 {
26f54bd0 1973 gimple newop;
03d37e4e 1974 tree tem = make_ssa_name (TREE_TYPE (def1_arg1), NULL);
26f54bd0 1975 newop = gimple_build_assign_with_ops (code, tem, def1_arg1, def2_arg1);
4b5f1658 1976 gimple_set_location (newop, gimple_location (stmt));
26f54bd0 1977 gsi_insert_before (gsi, newop, GSI_SAME_STMT);
1978 gimple_assign_set_rhs_with_ops_1 (gsi, NOP_EXPR,
1979 tem, NULL_TREE, NULL_TREE);
1980 update_stmt (gsi_stmt (*gsi));
1981 return true;
1982 }
1983
35967c0f 1984
1985 /* Simplify (A & B) OP0 (C & B) to (A OP0 C) & B. */
1986 if (def1_code == def2_code
1987 && def1_code == BIT_AND_EXPR
0a3f7203 1988 && operand_equal_for_phi_arg_p (def1_arg2,
1989 def2_arg2))
35967c0f 1990 {
0a3f7203 1991 tree b = def1_arg2;
35967c0f 1992 tree a = def1_arg1;
1993 tree c = def2_arg1;
1994 tree inner = fold_build2 (code, TREE_TYPE (arg2), a, c);
1995 /* If A OP0 C (this usually means C is the same as A) is 0
1996 then fold it down correctly. */
1997 if (integer_zerop (inner))
1998 {
1999 gimple_assign_set_rhs_from_tree (gsi, inner);
2000 update_stmt (stmt);
2001 return true;
2002 }
2003 /* If A OP0 C (this usually means C is the same as A) is a ssa_name
2004 then fold it down correctly. */
2005 else if (TREE_CODE (inner) == SSA_NAME)
2006 {
2007 tree outer = fold_build2 (def1_code, TREE_TYPE (inner),
2008 inner, b);
2009 gimple_assign_set_rhs_from_tree (gsi, outer);
2010 update_stmt (stmt);
2011 return true;
2012 }
2013 else
2014 {
2015 gimple newop;
2016 tree tem;
03d37e4e 2017 tem = make_ssa_name (TREE_TYPE (arg2), NULL);
35967c0f 2018 newop = gimple_build_assign_with_ops (code, tem, a, c);
35967c0f 2019 gimple_set_location (newop, gimple_location (stmt));
2020 /* Make sure to re-process the new stmt as it's walking upwards. */
2021 gsi_insert_before (gsi, newop, GSI_NEW_STMT);
2022 gimple_assign_set_rhs1 (stmt, tem);
2023 gimple_assign_set_rhs2 (stmt, b);
2024 gimple_assign_set_rhs_code (stmt, def1_code);
2025 update_stmt (stmt);
2026 return true;
2027 }
2028 }
2029
26f54bd0 2030 /* (a | CST1) & CST2 -> (a & CST2) | (CST1 & CST2). */
2031 if (code == BIT_AND_EXPR
2032 && def1_code == BIT_IOR_EXPR
2c83a45e 2033 && CONSTANT_CLASS_P (arg2)
2034 && CONSTANT_CLASS_P (def1_arg2))
26f54bd0 2035 {
2036 tree cst = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg2),
10fbe63d 2037 arg2, def1_arg2);
26f54bd0 2038 tree tem;
2039 gimple newop;
2040 if (integer_zerop (cst))
300da094 2041 {
26f54bd0 2042 gimple_assign_set_rhs1 (stmt, def1_arg1);
2043 update_stmt (stmt);
2044 return true;
300da094 2045 }
03d37e4e 2046 tem = make_ssa_name (TREE_TYPE (arg2), NULL);
26f54bd0 2047 newop = gimple_build_assign_with_ops (BIT_AND_EXPR,
2048 tem, def1_arg1, arg2);
4b5f1658 2049 gimple_set_location (newop, gimple_location (stmt));
26f54bd0 2050 /* Make sure to re-process the new stmt as it's walking upwards. */
2051 gsi_insert_before (gsi, newop, GSI_NEW_STMT);
2052 gimple_assign_set_rhs1 (stmt, tem);
2053 gimple_assign_set_rhs2 (stmt, cst);
2054 gimple_assign_set_rhs_code (stmt, BIT_IOR_EXPR);
2055 update_stmt (stmt);
2056 return true;
2057 }
2058
2059 /* Combine successive equal operations with constants. */
2060 if ((code == BIT_AND_EXPR
2061 || code == BIT_IOR_EXPR
2062 || code == BIT_XOR_EXPR)
2063 && def1_code == code
2c83a45e 2064 && CONSTANT_CLASS_P (arg2)
2065 && CONSTANT_CLASS_P (def1_arg2))
26f54bd0 2066 {
2067 tree cst = fold_build2 (code, TREE_TYPE (arg2),
10fbe63d 2068 arg2, def1_arg2);
26f54bd0 2069 gimple_assign_set_rhs1 (stmt, def1_arg1);
2070 gimple_assign_set_rhs2 (stmt, cst);
2071 update_stmt (stmt);
2072 return true;
1c4607fd 2073 }
300da094 2074
8a5f403f 2075 /* Canonicalize X ^ ~0 to ~X. */
2076 if (code == BIT_XOR_EXPR
8a5f403f 2077 && integer_all_onesp (arg2))
2078 {
2079 gimple_assign_set_rhs_with_ops (gsi, BIT_NOT_EXPR, arg1, NULL_TREE);
2080 gcc_assert (gsi_stmt (*gsi) == stmt);
2081 update_stmt (stmt);
2082 return true;
2083 }
2084
41913fa9 2085 /* Try simple folding for X op !X, and X op X. */
2086 res = simplify_bitwise_binary_1 (code, TREE_TYPE (arg1), arg1, arg2);
2087 if (res != NULL_TREE)
2088 {
2089 gimple_assign_set_rhs_from_tree (gsi, res);
2090 update_stmt (gsi_stmt (*gsi));
2091 return true;
2092 }
2093
10fbe63d 2094 if (code == BIT_AND_EXPR || code == BIT_IOR_EXPR)
2095 {
2096 enum tree_code ocode = code == BIT_AND_EXPR ? BIT_IOR_EXPR : BIT_AND_EXPR;
2097 if (def1_code == ocode)
2098 {
2099 tree x = arg2;
2100 enum tree_code coden;
2101 tree a1, a2;
2102 /* ( X | Y) & X -> X */
2103 /* ( X & Y) | X -> X */
2104 if (x == def1_arg1
2105 || x == def1_arg2)
2106 {
2107 gimple_assign_set_rhs_from_tree (gsi, x);
2108 update_stmt (gsi_stmt (*gsi));
2109 return true;
2110 }
2111
2112 defcodefor_name (def1_arg1, &coden, &a1, &a2);
2113 /* (~X | Y) & X -> X & Y */
2114 /* (~X & Y) | X -> X | Y */
2115 if (coden == BIT_NOT_EXPR && a1 == x)
2116 {
2117 gimple_assign_set_rhs_with_ops (gsi, code,
2118 x, def1_arg2);
2119 gcc_assert (gsi_stmt (*gsi) == stmt);
2120 update_stmt (stmt);
2121 return true;
2122 }
2123 defcodefor_name (def1_arg2, &coden, &a1, &a2);
2124 /* (Y | ~X) & X -> X & Y */
2125 /* (Y & ~X) | X -> X | Y */
2126 if (coden == BIT_NOT_EXPR && a1 == x)
2127 {
2128 gimple_assign_set_rhs_with_ops (gsi, code,
2129 x, def1_arg1);
2130 gcc_assert (gsi_stmt (*gsi) == stmt);
2131 update_stmt (stmt);
2132 return true;
2133 }
2134 }
2135 if (def2_code == ocode)
2136 {
2137 enum tree_code coden;
2138 tree a1;
2139 tree x = arg1;
2140 /* X & ( X | Y) -> X */
2141 /* X | ( X & Y) -> X */
2142 if (x == def2_arg1
2143 || x == def2_arg2)
2144 {
2145 gimple_assign_set_rhs_from_tree (gsi, x);
2146 update_stmt (gsi_stmt (*gsi));
2147 return true;
2148 }
2149 defcodefor_name (def2_arg1, &coden, &a1, NULL);
2150 /* (~X | Y) & X -> X & Y */
2151 /* (~X & Y) | X -> X | Y */
2152 if (coden == BIT_NOT_EXPR && a1 == x)
2153 {
2154 gimple_assign_set_rhs_with_ops (gsi, code,
2155 x, def2_arg2);
2156 gcc_assert (gsi_stmt (*gsi) == stmt);
2157 update_stmt (stmt);
2158 return true;
2159 }
2160 defcodefor_name (def2_arg2, &coden, &a1, NULL);
2161 /* (Y | ~X) & X -> X & Y */
2162 /* (Y & ~X) | X -> X | Y */
2163 if (coden == BIT_NOT_EXPR && a1 == x)
2164 {
2165 gimple_assign_set_rhs_with_ops (gsi, code,
2166 x, def2_arg1);
2167 gcc_assert (gsi_stmt (*gsi) == stmt);
2168 update_stmt (stmt);
2169 return true;
2170 }
2171 }
10fbe63d 2172
16bc66ec 2173 /* If arg1 and arg2 are booleans (or any single bit type)
2174 then try to simplify:
2175
2176 (~X & Y) -> X < Y
2177 (X & ~Y) -> Y < X
2178 (~X | Y) -> X <= Y
2179 (X | ~Y) -> Y <= X
2180
2181 But only do this if our result feeds into a comparison as
2182 this transformation is not always a win, particularly on
2183 targets with and-not instructions. */
2184 if (TREE_CODE (arg1) == SSA_NAME
2185 && TREE_CODE (arg2) == SSA_NAME
2186 && INTEGRAL_TYPE_P (TREE_TYPE (arg1))
2187 && TYPE_PRECISION (TREE_TYPE (arg1)) == 1
2188 && TYPE_PRECISION (TREE_TYPE (arg2)) == 1
2189 && (TYPE_UNSIGNED (TREE_TYPE (arg1))
2190 == TYPE_UNSIGNED (TREE_TYPE (arg2))))
2191 {
2192 use_operand_p use_p;
2193 gimple use_stmt;
2194
2195 if (single_imm_use (gimple_assign_lhs (stmt), &use_p, &use_stmt))
2196 {
2197 if (gimple_code (use_stmt) == GIMPLE_COND
2198 && gimple_cond_lhs (use_stmt) == gimple_assign_lhs (stmt)
2199 && integer_zerop (gimple_cond_rhs (use_stmt))
2200 && gimple_cond_code (use_stmt) == NE_EXPR)
2201 {
2202 if (simplify_bitwise_binary_boolean (gsi, code, arg1, arg2))
2203 return true;
2204 if (simplify_bitwise_binary_boolean (gsi, code, arg2, arg1))
2205 return true;
2206 }
2207 }
2208 }
2209 }
300da094 2210 return false;
1c4607fd 2211}
2212
ca3c9092 2213
3b8827a2 2214/* Recognize rotation patterns. Return true if a transformation
2215 applied, otherwise return false.
2216
2217 We are looking for X with unsigned type T with bitsize B, OP being
2218 +, | or ^, some type T2 wider than T and
2219 (X << CNT1) OP (X >> CNT2) iff CNT1 + CNT2 == B
2220 ((T) ((T2) X << CNT1)) OP ((T) ((T2) X >> CNT2)) iff CNT1 + CNT2 == B
2221 (X << Y) OP (X >> (B - Y))
2222 (X << (int) Y) OP (X >> (int) (B - Y))
2223 ((T) ((T2) X << Y)) OP ((T) ((T2) X >> (B - Y)))
2224 ((T) ((T2) X << (int) Y)) OP ((T) ((T2) X >> (int) (B - Y)))
043ce677 2225 (X << Y) | (X >> ((-Y) & (B - 1)))
2226 (X << (int) Y) | (X >> (int) ((-Y) & (B - 1)))
2227 ((T) ((T2) X << Y)) | ((T) ((T2) X >> ((-Y) & (B - 1))))
2228 ((T) ((T2) X << (int) Y)) | ((T) ((T2) X >> (int) ((-Y) & (B - 1))))
3b8827a2 2229
2230 and transform these into:
2231 X r<< CNT1
2232 X r<< Y
2233
2234 Note, in the patterns with T2 type, the type of OP operands
2235 might be even a signed type, but should have precision B. */
2236
2237static bool
2238simplify_rotate (gimple_stmt_iterator *gsi)
2239{
2240 gimple stmt = gsi_stmt (*gsi);
2241 tree arg[2], rtype, rotcnt = NULL_TREE;
2242 tree def_arg1[2], def_arg2[2];
2243 enum tree_code def_code[2];
2244 tree lhs;
2245 int i;
2246 bool swapped_p = false;
2247 gimple g;
2248
2249 arg[0] = gimple_assign_rhs1 (stmt);
2250 arg[1] = gimple_assign_rhs2 (stmt);
2251 rtype = TREE_TYPE (arg[0]);
2252
2253 /* Only create rotates in complete modes. Other cases are not
2254 expanded properly. */
2255 if (!INTEGRAL_TYPE_P (rtype)
2256 || TYPE_PRECISION (rtype) != GET_MODE_PRECISION (TYPE_MODE (rtype)))
2257 return false;
2258
2259 for (i = 0; i < 2; i++)
2260 defcodefor_name (arg[i], &def_code[i], &def_arg1[i], &def_arg2[i]);
2261
2262 /* Look through narrowing conversions. */
2263 if (CONVERT_EXPR_CODE_P (def_code[0])
2264 && CONVERT_EXPR_CODE_P (def_code[1])
2265 && INTEGRAL_TYPE_P (TREE_TYPE (def_arg1[0]))
2266 && INTEGRAL_TYPE_P (TREE_TYPE (def_arg1[1]))
2267 && TYPE_PRECISION (TREE_TYPE (def_arg1[0]))
2268 == TYPE_PRECISION (TREE_TYPE (def_arg1[1]))
2269 && TYPE_PRECISION (TREE_TYPE (def_arg1[0])) > TYPE_PRECISION (rtype)
2270 && has_single_use (arg[0])
2271 && has_single_use (arg[1]))
2272 {
2273 for (i = 0; i < 2; i++)
2274 {
2275 arg[i] = def_arg1[i];
2276 defcodefor_name (arg[i], &def_code[i], &def_arg1[i], &def_arg2[i]);
2277 }
2278 }
2279
2280 /* One operand has to be LSHIFT_EXPR and one RSHIFT_EXPR. */
2281 for (i = 0; i < 2; i++)
2282 if (def_code[i] != LSHIFT_EXPR && def_code[i] != RSHIFT_EXPR)
2283 return false;
2284 else if (!has_single_use (arg[i]))
2285 return false;
2286 if (def_code[0] == def_code[1])
2287 return false;
2288
2289 /* If we've looked through narrowing conversions before, look through
2290 widening conversions from unsigned type with the same precision
2291 as rtype here. */
2292 if (TYPE_PRECISION (TREE_TYPE (def_arg1[0])) != TYPE_PRECISION (rtype))
2293 for (i = 0; i < 2; i++)
2294 {
2295 tree tem;
2296 enum tree_code code;
2297 defcodefor_name (def_arg1[i], &code, &tem, NULL);
2298 if (!CONVERT_EXPR_CODE_P (code)
2299 || !INTEGRAL_TYPE_P (TREE_TYPE (tem))
2300 || TYPE_PRECISION (TREE_TYPE (tem)) != TYPE_PRECISION (rtype))
2301 return false;
2302 def_arg1[i] = tem;
2303 }
2304 /* Both shifts have to use the same first operand. */
2305 if (TREE_CODE (def_arg1[0]) != SSA_NAME || def_arg1[0] != def_arg1[1])
2306 return false;
2307 if (!TYPE_UNSIGNED (TREE_TYPE (def_arg1[0])))
2308 return false;
2309
2310 /* CNT1 + CNT2 == B case above. */
2311 if (host_integerp (def_arg2[0], 1)
2312 && host_integerp (def_arg2[1], 1)
2313 && (unsigned HOST_WIDE_INT) tree_low_cst (def_arg2[0], 1)
2314 + tree_low_cst (def_arg2[1], 1) == TYPE_PRECISION (rtype))
2315 rotcnt = def_arg2[0];
2316 else if (TREE_CODE (def_arg2[0]) != SSA_NAME
2317 || TREE_CODE (def_arg2[1]) != SSA_NAME)
2318 return false;
2319 else
2320 {
2321 tree cdef_arg1[2], cdef_arg2[2], def_arg2_alt[2];
2322 enum tree_code cdef_code[2];
2323 /* Look through conversion of the shift count argument.
2324 The C/C++ FE cast any shift count argument to integer_type_node.
2325 The only problem might be if the shift count type maximum value
2326 is equal or smaller than number of bits in rtype. */
2327 for (i = 0; i < 2; i++)
2328 {
2329 def_arg2_alt[i] = def_arg2[i];
2330 defcodefor_name (def_arg2[i], &cdef_code[i],
2331 &cdef_arg1[i], &cdef_arg2[i]);
2332 if (CONVERT_EXPR_CODE_P (cdef_code[i])
2333 && INTEGRAL_TYPE_P (TREE_TYPE (cdef_arg1[i]))
2334 && TYPE_PRECISION (TREE_TYPE (cdef_arg1[i]))
2335 > floor_log2 (TYPE_PRECISION (rtype))
2336 && TYPE_PRECISION (TREE_TYPE (cdef_arg1[i]))
2337 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (cdef_arg1[i]))))
2338 {
2339 def_arg2_alt[i] = cdef_arg1[i];
2340 defcodefor_name (def_arg2_alt[i], &cdef_code[i],
2341 &cdef_arg1[i], &cdef_arg2[i]);
2342 }
2343 }
2344 for (i = 0; i < 2; i++)
2345 /* Check for one shift count being Y and the other B - Y,
2346 with optional casts. */
2347 if (cdef_code[i] == MINUS_EXPR
2348 && host_integerp (cdef_arg1[i], 0)
2349 && tree_low_cst (cdef_arg1[i], 0) == TYPE_PRECISION (rtype)
2350 && TREE_CODE (cdef_arg2[i]) == SSA_NAME)
2351 {
2352 tree tem;
2353 enum tree_code code;
2354
2355 if (cdef_arg2[i] == def_arg2[1 - i]
2356 || cdef_arg2[i] == def_arg2_alt[1 - i])
2357 {
2358 rotcnt = cdef_arg2[i];
2359 break;
2360 }
2361 defcodefor_name (cdef_arg2[i], &code, &tem, NULL);
2362 if (CONVERT_EXPR_CODE_P (code)
2363 && INTEGRAL_TYPE_P (TREE_TYPE (tem))
2364 && TYPE_PRECISION (TREE_TYPE (tem))
2365 > floor_log2 (TYPE_PRECISION (rtype))
2366 && TYPE_PRECISION (TREE_TYPE (tem))
2367 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (tem)))
2368 && (tem == def_arg2[1 - i]
2369 || tem == def_arg2_alt[1 - i]))
2370 {
2371 rotcnt = tem;
2372 break;
2373 }
2374 }
2375 /* The above sequence isn't safe for Y being 0,
2376 because then one of the shifts triggers undefined behavior.
2377 This alternative is safe even for rotation count of 0.
2378 One shift count is Y and the other (-Y) & (B - 1). */
2379 else if (cdef_code[i] == BIT_AND_EXPR
2380 && host_integerp (cdef_arg2[i], 0)
2381 && tree_low_cst (cdef_arg2[i], 0)
2382 == TYPE_PRECISION (rtype) - 1
043ce677 2383 && TREE_CODE (cdef_arg1[i]) == SSA_NAME
2384 && gimple_assign_rhs_code (stmt) == BIT_IOR_EXPR)
3b8827a2 2385 {
2386 tree tem;
2387 enum tree_code code;
2388
2389 defcodefor_name (cdef_arg1[i], &code, &tem, NULL);
2390 if (CONVERT_EXPR_CODE_P (code)
2391 && INTEGRAL_TYPE_P (TREE_TYPE (tem))
2392 && TYPE_PRECISION (TREE_TYPE (tem))
2393 > floor_log2 (TYPE_PRECISION (rtype))
2394 && TYPE_PRECISION (TREE_TYPE (tem))
2395 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (tem))))
2396 defcodefor_name (tem, &code, &tem, NULL);
2397
2398 if (code == NEGATE_EXPR)
2399 {
2400 if (tem == def_arg2[1 - i] || tem == def_arg2_alt[1 - i])
2401 {
2402 rotcnt = tem;
2403 break;
2404 }
2405 defcodefor_name (tem, &code, &tem, NULL);
2406 if (CONVERT_EXPR_CODE_P (code)
2407 && INTEGRAL_TYPE_P (TREE_TYPE (tem))
2408 && TYPE_PRECISION (TREE_TYPE (tem))
2409 > floor_log2 (TYPE_PRECISION (rtype))
2410 && TYPE_PRECISION (TREE_TYPE (tem))
2411 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (tem)))
2412 && (tem == def_arg2[1 - i]
2413 || tem == def_arg2_alt[1 - i]))
2414 {
2415 rotcnt = tem;
2416 break;
2417 }
2418 }
2419 }
2420 if (rotcnt == NULL_TREE)
2421 return false;
2422 swapped_p = i != 1;
2423 }
2424
2425 if (!useless_type_conversion_p (TREE_TYPE (def_arg2[0]),
2426 TREE_TYPE (rotcnt)))
2427 {
2428 g = gimple_build_assign_with_ops (NOP_EXPR,
2429 make_ssa_name (TREE_TYPE (def_arg2[0]),
2430 NULL),
2431 rotcnt, NULL_TREE);
2432 gsi_insert_before (gsi, g, GSI_SAME_STMT);
2433 rotcnt = gimple_assign_lhs (g);
2434 }
2435 lhs = gimple_assign_lhs (stmt);
2436 if (!useless_type_conversion_p (rtype, TREE_TYPE (def_arg1[0])))
2437 lhs = make_ssa_name (TREE_TYPE (def_arg1[0]), NULL);
2438 g = gimple_build_assign_with_ops (((def_code[0] == LSHIFT_EXPR) ^ swapped_p)
2439 ? LROTATE_EXPR : RROTATE_EXPR,
2440 lhs, def_arg1[0], rotcnt);
2441 if (!useless_type_conversion_p (rtype, TREE_TYPE (def_arg1[0])))
2442 {
2443 gsi_insert_before (gsi, g, GSI_SAME_STMT);
2444 g = gimple_build_assign_with_ops (NOP_EXPR, gimple_assign_lhs (stmt),
2445 lhs, NULL_TREE);
2446 }
2447 gsi_replace (gsi, g, false);
2448 return true;
2449}
2450
ca3c9092 2451/* Perform re-associations of the plus or minus statement STMT that are
b69d1cb6 2452 always permitted. Returns true if the CFG was changed. */
ca3c9092 2453
b69d1cb6 2454static bool
50aacf4c 2455associate_plusminus (gimple_stmt_iterator *gsi)
ca3c9092 2456{
50aacf4c 2457 gimple stmt = gsi_stmt (*gsi);
ca3c9092 2458 tree rhs1 = gimple_assign_rhs1 (stmt);
2459 tree rhs2 = gimple_assign_rhs2 (stmt);
2460 enum tree_code code = gimple_assign_rhs_code (stmt);
ca3c9092 2461 bool changed;
2462
2463 /* We can't reassociate at all for saturating types. */
2464 if (TYPE_SATURATING (TREE_TYPE (rhs1)))
b69d1cb6 2465 return false;
ca3c9092 2466
2467 /* First contract negates. */
2468 do
2469 {
2470 changed = false;
2471
2472 /* A +- (-B) -> A -+ B. */
2473 if (TREE_CODE (rhs2) == SSA_NAME)
2474 {
2475 gimple def_stmt = SSA_NAME_DEF_STMT (rhs2);
2476 if (is_gimple_assign (def_stmt)
32cdcc42 2477 && gimple_assign_rhs_code (def_stmt) == NEGATE_EXPR
2478 && can_propagate_from (def_stmt))
ca3c9092 2479 {
2480 code = (code == MINUS_EXPR) ? PLUS_EXPR : MINUS_EXPR;
2481 gimple_assign_set_rhs_code (stmt, code);
2482 rhs2 = gimple_assign_rhs1 (def_stmt);
2483 gimple_assign_set_rhs2 (stmt, rhs2);
2484 gimple_set_modified (stmt, true);
2485 changed = true;
2486 }
2487 }
2488
2489 /* (-A) + B -> B - A. */
2490 if (TREE_CODE (rhs1) == SSA_NAME
2491 && code == PLUS_EXPR)
2492 {
2493 gimple def_stmt = SSA_NAME_DEF_STMT (rhs1);
2494 if (is_gimple_assign (def_stmt)
32cdcc42 2495 && gimple_assign_rhs_code (def_stmt) == NEGATE_EXPR
2496 && can_propagate_from (def_stmt))
ca3c9092 2497 {
2498 code = MINUS_EXPR;
2499 gimple_assign_set_rhs_code (stmt, code);
2500 rhs1 = rhs2;
2501 gimple_assign_set_rhs1 (stmt, rhs1);
2502 rhs2 = gimple_assign_rhs1 (def_stmt);
2503 gimple_assign_set_rhs2 (stmt, rhs2);
2504 gimple_set_modified (stmt, true);
2505 changed = true;
2506 }
2507 }
2508 }
2509 while (changed);
2510
2511 /* We can't reassociate floating-point or fixed-point plus or minus
2512 because of saturation to +-Inf. */
2513 if (FLOAT_TYPE_P (TREE_TYPE (rhs1))
2514 || FIXED_POINT_TYPE_P (TREE_TYPE (rhs1)))
2515 goto out;
2516
2517 /* Second match patterns that allow contracting a plus-minus pair
2518 irrespective of overflow issues.
2519
2520 (A +- B) - A -> +- B
2521 (A +- B) -+ B -> A
2522 (CST +- A) +- CST -> CST +- A
2c83a45e 2523 (A +- CST) +- CST -> A +- CST
ca3c9092 2524 ~A + A -> -1
2525 ~A + 1 -> -A
2526 A - (A +- B) -> -+ B
2527 A +- (B +- A) -> +- B
2528 CST +- (CST +- A) -> CST +- A
2529 CST +- (A +- CST) -> CST +- A
2530 A + ~A -> -1
2531
2532 via commutating the addition and contracting operations to zero
2533 by reassociation. */
2534
ca3c9092 2535 if (TREE_CODE (rhs1) == SSA_NAME)
2536 {
2537 gimple def_stmt = SSA_NAME_DEF_STMT (rhs1);
32cdcc42 2538 if (is_gimple_assign (def_stmt) && can_propagate_from (def_stmt))
ca3c9092 2539 {
2540 enum tree_code def_code = gimple_assign_rhs_code (def_stmt);
2541 if (def_code == PLUS_EXPR
2542 || def_code == MINUS_EXPR)
2543 {
2544 tree def_rhs1 = gimple_assign_rhs1 (def_stmt);
2545 tree def_rhs2 = gimple_assign_rhs2 (def_stmt);
2546 if (operand_equal_p (def_rhs1, rhs2, 0)
2547 && code == MINUS_EXPR)
2548 {
2549 /* (A +- B) - A -> +- B. */
2550 code = ((def_code == PLUS_EXPR)
2551 ? TREE_CODE (def_rhs2) : NEGATE_EXPR);
2552 rhs1 = def_rhs2;
2553 rhs2 = NULL_TREE;
50aacf4c 2554 gimple_assign_set_rhs_with_ops (gsi, code, rhs1, NULL_TREE);
2555 gcc_assert (gsi_stmt (*gsi) == stmt);
ca3c9092 2556 gimple_set_modified (stmt, true);
2557 }
2558 else if (operand_equal_p (def_rhs2, rhs2, 0)
2559 && code != def_code)
2560 {
2561 /* (A +- B) -+ B -> A. */
2562 code = TREE_CODE (def_rhs1);
2563 rhs1 = def_rhs1;
2564 rhs2 = NULL_TREE;
50aacf4c 2565 gimple_assign_set_rhs_with_ops (gsi, code, rhs1, NULL_TREE);
2566 gcc_assert (gsi_stmt (*gsi) == stmt);
ca3c9092 2567 gimple_set_modified (stmt, true);
2568 }
2c83a45e 2569 else if (CONSTANT_CLASS_P (rhs2)
2570 && CONSTANT_CLASS_P (def_rhs1))
ca3c9092 2571 {
2572 /* (CST +- A) +- CST -> CST +- A. */
2573 tree cst = fold_binary (code, TREE_TYPE (rhs1),
2574 def_rhs1, rhs2);
2575 if (cst && !TREE_OVERFLOW (cst))
2576 {
2577 code = def_code;
2578 gimple_assign_set_rhs_code (stmt, code);
2579 rhs1 = cst;
2580 gimple_assign_set_rhs1 (stmt, rhs1);
2581 rhs2 = def_rhs2;
2582 gimple_assign_set_rhs2 (stmt, rhs2);
2583 gimple_set_modified (stmt, true);
2584 }
2585 }
2c83a45e 2586 else if (CONSTANT_CLASS_P (rhs2)
2587 && CONSTANT_CLASS_P (def_rhs2))
ca3c9092 2588 {
2c83a45e 2589 /* (A +- CST) +- CST -> A +- CST. */
2590 enum tree_code mix = (code == def_code)
2591 ? PLUS_EXPR : MINUS_EXPR;
2592 tree cst = fold_binary (mix, TREE_TYPE (rhs1),
ca3c9092 2593 def_rhs2, rhs2);
2594 if (cst && !TREE_OVERFLOW (cst))
2595 {
2c83a45e 2596 code = def_code;
ca3c9092 2597 gimple_assign_set_rhs_code (stmt, code);
2598 rhs1 = def_rhs1;
2599 gimple_assign_set_rhs1 (stmt, rhs1);
2600 rhs2 = cst;
2601 gimple_assign_set_rhs2 (stmt, rhs2);
2602 gimple_set_modified (stmt, true);
2603 }
2604 }
2605 }
2c83a45e 2606 else if (def_code == BIT_NOT_EXPR && code == PLUS_EXPR)
ca3c9092 2607 {
2608 tree def_rhs1 = gimple_assign_rhs1 (def_stmt);
2c83a45e 2609 if (operand_equal_p (def_rhs1, rhs2, 0))
ca3c9092 2610 {
2611 /* ~A + A -> -1. */
2c83a45e 2612 rhs1 = build_all_ones_cst (TREE_TYPE (rhs2));
ca3c9092 2613 rhs2 = NULL_TREE;
2c83a45e 2614 code = TREE_CODE (rhs1);
50aacf4c 2615 gimple_assign_set_rhs_with_ops (gsi, code, rhs1, NULL_TREE);
2616 gcc_assert (gsi_stmt (*gsi) == stmt);
ca3c9092 2617 gimple_set_modified (stmt, true);
2618 }
2c83a45e 2619 else if ((TREE_CODE (TREE_TYPE (rhs2)) != COMPLEX_TYPE
2620 && integer_onep (rhs2))
2621 || (TREE_CODE (rhs2) == COMPLEX_CST
2622 && integer_onep (TREE_REALPART (rhs2))
2623 && integer_onep (TREE_IMAGPART (rhs2))))
ca3c9092 2624 {
2625 /* ~A + 1 -> -A. */
2626 code = NEGATE_EXPR;
2627 rhs1 = def_rhs1;
2628 rhs2 = NULL_TREE;
50aacf4c 2629 gimple_assign_set_rhs_with_ops (gsi, code, rhs1, NULL_TREE);
2630 gcc_assert (gsi_stmt (*gsi) == stmt);
ca3c9092 2631 gimple_set_modified (stmt, true);
2632 }
2633 }
2634 }
2635 }
2636
2637 if (rhs2 && TREE_CODE (rhs2) == SSA_NAME)
2638 {
2639 gimple def_stmt = SSA_NAME_DEF_STMT (rhs2);
32cdcc42 2640 if (is_gimple_assign (def_stmt) && can_propagate_from (def_stmt))
ca3c9092 2641 {
2642 enum tree_code def_code = gimple_assign_rhs_code (def_stmt);
2643 if (def_code == PLUS_EXPR
2644 || def_code == MINUS_EXPR)
2645 {
2646 tree def_rhs1 = gimple_assign_rhs1 (def_stmt);
2647 tree def_rhs2 = gimple_assign_rhs2 (def_stmt);
2648 if (operand_equal_p (def_rhs1, rhs1, 0)
2649 && code == MINUS_EXPR)
2650 {
2651 /* A - (A +- B) -> -+ B. */
2652 code = ((def_code == PLUS_EXPR)
2653 ? NEGATE_EXPR : TREE_CODE (def_rhs2));
2654 rhs1 = def_rhs2;
2655 rhs2 = NULL_TREE;
50aacf4c 2656 gimple_assign_set_rhs_with_ops (gsi, code, rhs1, NULL_TREE);
2657 gcc_assert (gsi_stmt (*gsi) == stmt);
ca3c9092 2658 gimple_set_modified (stmt, true);
2659 }
2660 else if (operand_equal_p (def_rhs2, rhs1, 0)
2661 && code != def_code)
2662 {
2663 /* A +- (B +- A) -> +- B. */
2664 code = ((code == PLUS_EXPR)
2665 ? TREE_CODE (def_rhs1) : NEGATE_EXPR);
2666 rhs1 = def_rhs1;
2667 rhs2 = NULL_TREE;
50aacf4c 2668 gimple_assign_set_rhs_with_ops (gsi, code, rhs1, NULL_TREE);
2669 gcc_assert (gsi_stmt (*gsi) == stmt);
ca3c9092 2670 gimple_set_modified (stmt, true);
2671 }
2c83a45e 2672 else if (CONSTANT_CLASS_P (rhs1)
2673 && CONSTANT_CLASS_P (def_rhs1))
ca3c9092 2674 {
2675 /* CST +- (CST +- A) -> CST +- A. */
2676 tree cst = fold_binary (code, TREE_TYPE (rhs2),
2677 rhs1, def_rhs1);
2678 if (cst && !TREE_OVERFLOW (cst))
2679 {
2680 code = (code == def_code ? PLUS_EXPR : MINUS_EXPR);
2681 gimple_assign_set_rhs_code (stmt, code);
2682 rhs1 = cst;
2683 gimple_assign_set_rhs1 (stmt, rhs1);
2684 rhs2 = def_rhs2;
2685 gimple_assign_set_rhs2 (stmt, rhs2);
2686 gimple_set_modified (stmt, true);
2687 }
2688 }
2c83a45e 2689 else if (CONSTANT_CLASS_P (rhs1)
2690 && CONSTANT_CLASS_P (def_rhs2))
ca3c9092 2691 {
2692 /* CST +- (A +- CST) -> CST +- A. */
2693 tree cst = fold_binary (def_code == code
2694 ? PLUS_EXPR : MINUS_EXPR,
2695 TREE_TYPE (rhs2),
2696 rhs1, def_rhs2);
2697 if (cst && !TREE_OVERFLOW (cst))
2698 {
2699 rhs1 = cst;
2700 gimple_assign_set_rhs1 (stmt, rhs1);
2701 rhs2 = def_rhs1;
2702 gimple_assign_set_rhs2 (stmt, rhs2);
2703 gimple_set_modified (stmt, true);
2704 }
2705 }
2706 }
2c83a45e 2707 else if (def_code == BIT_NOT_EXPR)
ca3c9092 2708 {
2709 tree def_rhs1 = gimple_assign_rhs1 (def_stmt);
2710 if (code == PLUS_EXPR
2711 && operand_equal_p (def_rhs1, rhs1, 0))
2712 {
2713 /* A + ~A -> -1. */
2c83a45e 2714 rhs1 = build_all_ones_cst (TREE_TYPE (rhs1));
ca3c9092 2715 rhs2 = NULL_TREE;
2c83a45e 2716 code = TREE_CODE (rhs1);
50aacf4c 2717 gimple_assign_set_rhs_with_ops (gsi, code, rhs1, NULL_TREE);
2718 gcc_assert (gsi_stmt (*gsi) == stmt);
ca3c9092 2719 gimple_set_modified (stmt, true);
2720 }
2721 }
2722 }
2723 }
2724
2725out:
2726 if (gimple_modified_p (stmt))
2727 {
50aacf4c 2728 fold_stmt_inplace (gsi);
ca3c9092 2729 update_stmt (stmt);
b69d1cb6 2730 if (maybe_clean_or_replace_eh_stmt (stmt, stmt)
2731 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
2732 return true;
ca3c9092 2733 }
b69d1cb6 2734
2735 return false;
ca3c9092 2736}
2737
c9c17332 2738/* Associate operands of a POINTER_PLUS_EXPR assignmen at *GSI. Returns
2739 true if anything changed, false otherwise. */
2740
2741static bool
2742associate_pointerplus (gimple_stmt_iterator *gsi)
2743{
2744 gimple stmt = gsi_stmt (*gsi);
2745 gimple def_stmt;
2746 tree ptr, rhs, algn;
2747
2748 /* Pattern match
2749 tem = (sizetype) ptr;
2750 tem = tem & algn;
2751 tem = -tem;
2752 ... = ptr p+ tem;
2753 and produce the simpler and easier to analyze with respect to alignment
2754 ... = ptr & ~algn; */
2755 ptr = gimple_assign_rhs1 (stmt);
2756 rhs = gimple_assign_rhs2 (stmt);
2757 if (TREE_CODE (rhs) != SSA_NAME)
2758 return false;
2759 def_stmt = SSA_NAME_DEF_STMT (rhs);
2760 if (!is_gimple_assign (def_stmt)
2761 || gimple_assign_rhs_code (def_stmt) != NEGATE_EXPR)
2762 return false;
2763 rhs = gimple_assign_rhs1 (def_stmt);
2764 if (TREE_CODE (rhs) != SSA_NAME)
2765 return false;
2766 def_stmt = SSA_NAME_DEF_STMT (rhs);
2767 if (!is_gimple_assign (def_stmt)
2768 || gimple_assign_rhs_code (def_stmt) != BIT_AND_EXPR)
2769 return false;
2770 rhs = gimple_assign_rhs1 (def_stmt);
2771 algn = gimple_assign_rhs2 (def_stmt);
2772 if (TREE_CODE (rhs) != SSA_NAME
2773 || TREE_CODE (algn) != INTEGER_CST)
2774 return false;
2775 def_stmt = SSA_NAME_DEF_STMT (rhs);
2776 if (!is_gimple_assign (def_stmt)
2777 || !CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt)))
2778 return false;
2779 if (gimple_assign_rhs1 (def_stmt) != ptr)
2780 return false;
2781
cf8f0e63 2782 algn = double_int_to_tree (TREE_TYPE (ptr), ~tree_to_double_int (algn));
c9c17332 2783 gimple_assign_set_rhs_with_ops (gsi, BIT_AND_EXPR, ptr, algn);
2784 fold_stmt_inplace (gsi);
2785 update_stmt (stmt);
2786
2787 return true;
2788}
2789
6afd0544 2790/* Combine two conversions in a row for the second conversion at *GSI.
89c8f35a 2791 Returns 1 if there were any changes made, 2 if cfg-cleanup needs to
2792 run. Else it returns 0. */
6afd0544 2793
89c8f35a 2794static int
6afd0544 2795combine_conversions (gimple_stmt_iterator *gsi)
2796{
2797 gimple stmt = gsi_stmt (*gsi);
2798 gimple def_stmt;
2799 tree op0, lhs;
2800 enum tree_code code = gimple_assign_rhs_code (stmt);
487282d5 2801 enum tree_code code2;
6afd0544 2802
2803 gcc_checking_assert (CONVERT_EXPR_CODE_P (code)
2804 || code == FLOAT_EXPR
2805 || code == FIX_TRUNC_EXPR);
2806
2807 lhs = gimple_assign_lhs (stmt);
2808 op0 = gimple_assign_rhs1 (stmt);
2809 if (useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (op0)))
2810 {
2811 gimple_assign_set_rhs_code (stmt, TREE_CODE (op0));
89c8f35a 2812 return 1;
6afd0544 2813 }
2814
2815 if (TREE_CODE (op0) != SSA_NAME)
89c8f35a 2816 return 0;
6afd0544 2817
2818 def_stmt = SSA_NAME_DEF_STMT (op0);
2819 if (!is_gimple_assign (def_stmt))
89c8f35a 2820 return 0;
6afd0544 2821
487282d5 2822 code2 = gimple_assign_rhs_code (def_stmt);
2823
2824 if (CONVERT_EXPR_CODE_P (code2) || code2 == FLOAT_EXPR)
6afd0544 2825 {
2826 tree defop0 = gimple_assign_rhs1 (def_stmt);
2827 tree type = TREE_TYPE (lhs);
2828 tree inside_type = TREE_TYPE (defop0);
2829 tree inter_type = TREE_TYPE (op0);
2830 int inside_int = INTEGRAL_TYPE_P (inside_type);
2831 int inside_ptr = POINTER_TYPE_P (inside_type);
2832 int inside_float = FLOAT_TYPE_P (inside_type);
2833 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
2834 unsigned int inside_prec = TYPE_PRECISION (inside_type);
2835 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
2836 int inter_int = INTEGRAL_TYPE_P (inter_type);
2837 int inter_ptr = POINTER_TYPE_P (inter_type);
2838 int inter_float = FLOAT_TYPE_P (inter_type);
2839 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
2840 unsigned int inter_prec = TYPE_PRECISION (inter_type);
2841 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
2842 int final_int = INTEGRAL_TYPE_P (type);
2843 int final_ptr = POINTER_TYPE_P (type);
2844 int final_float = FLOAT_TYPE_P (type);
2845 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
2846 unsigned int final_prec = TYPE_PRECISION (type);
2847 int final_unsignedp = TYPE_UNSIGNED (type);
2848
3aeff048 2849 /* Don't propagate ssa names that occur in abnormal phis. */
2850 if (TREE_CODE (defop0) == SSA_NAME
2851 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (defop0))
2852 return 0;
2853
6afd0544 2854 /* In addition to the cases of two conversions in a row
2855 handled below, if we are converting something to its own
2856 type via an object of identical or wider precision, neither
2857 conversion is needed. */
2858 if (useless_type_conversion_p (type, inside_type)
2859 && (((inter_int || inter_ptr) && final_int)
2860 || (inter_float && final_float))
2861 && inter_prec >= final_prec)
2862 {
2863 gimple_assign_set_rhs1 (stmt, unshare_expr (defop0));
2864 gimple_assign_set_rhs_code (stmt, TREE_CODE (defop0));
2865 update_stmt (stmt);
89c8f35a 2866 return remove_prop_source_from_use (op0) ? 2 : 1;
6afd0544 2867 }
2868
2869 /* Likewise, if the intermediate and initial types are either both
2870 float or both integer, we don't need the middle conversion if the
2871 former is wider than the latter and doesn't change the signedness
2872 (for integers). Avoid this if the final type is a pointer since
2873 then we sometimes need the middle conversion. Likewise if the
2874 final type has a precision not equal to the size of its mode. */
2875 if (((inter_int && inside_int)
2876 || (inter_float && inside_float)
2877 || (inter_vec && inside_vec))
2878 && inter_prec >= inside_prec
2879 && (inter_float || inter_vec
2880 || inter_unsignedp == inside_unsignedp)
51dbf409 2881 && ! (final_prec != GET_MODE_PRECISION (TYPE_MODE (type))
6afd0544 2882 && TYPE_MODE (type) == TYPE_MODE (inter_type))
2883 && ! final_ptr
2884 && (! final_vec || inter_prec == inside_prec))
2885 {
2886 gimple_assign_set_rhs1 (stmt, defop0);
2887 update_stmt (stmt);
89c8f35a 2888 return remove_prop_source_from_use (op0) ? 2 : 1;
6afd0544 2889 }
2890
2891 /* If we have a sign-extension of a zero-extended value, we can
a6476f88 2892 replace that by a single zero-extension. Likewise if the
2893 final conversion does not change precision we can drop the
2894 intermediate conversion. */
6afd0544 2895 if (inside_int && inter_int && final_int
a6476f88 2896 && ((inside_prec < inter_prec && inter_prec < final_prec
2897 && inside_unsignedp && !inter_unsignedp)
2898 || final_prec == inter_prec))
6afd0544 2899 {
2900 gimple_assign_set_rhs1 (stmt, defop0);
2901 update_stmt (stmt);
89c8f35a 2902 return remove_prop_source_from_use (op0) ? 2 : 1;
6afd0544 2903 }
2904
2905 /* Two conversions in a row are not needed unless:
2906 - some conversion is floating-point (overstrict for now), or
2907 - some conversion is a vector (overstrict for now), or
2908 - the intermediate type is narrower than both initial and
2909 final, or
2910 - the intermediate type and innermost type differ in signedness,
2911 and the outermost type is wider than the intermediate, or
2912 - the initial type is a pointer type and the precisions of the
2913 intermediate and final types differ, or
2914 - the final type is a pointer type and the precisions of the
2915 initial and intermediate types differ. */
2916 if (! inside_float && ! inter_float && ! final_float
2917 && ! inside_vec && ! inter_vec && ! final_vec
2918 && (inter_prec >= inside_prec || inter_prec >= final_prec)
2919 && ! (inside_int && inter_int
2920 && inter_unsignedp != inside_unsignedp
2921 && inter_prec < final_prec)
2922 && ((inter_unsignedp && inter_prec > inside_prec)
2923 == (final_unsignedp && final_prec > inter_prec))
2924 && ! (inside_ptr && inter_prec != final_prec)
2925 && ! (final_ptr && inside_prec != inter_prec)
51dbf409 2926 && ! (final_prec != GET_MODE_PRECISION (TYPE_MODE (type))
6afd0544 2927 && TYPE_MODE (type) == TYPE_MODE (inter_type)))
2928 {
2929 gimple_assign_set_rhs1 (stmt, defop0);
2930 update_stmt (stmt);
89c8f35a 2931 return remove_prop_source_from_use (op0) ? 2 : 1;
6afd0544 2932 }
2933
2934 /* A truncation to an unsigned type should be canonicalized as
2935 bitwise and of a mask. */
2936 if (final_int && inter_int && inside_int
2937 && final_prec == inside_prec
2938 && final_prec > inter_prec
2939 && inter_unsignedp)
2940 {
2941 tree tem;
2942 tem = fold_build2 (BIT_AND_EXPR, inside_type,
2943 defop0,
2944 double_int_to_tree
cf8f0e63 2945 (inside_type, double_int::mask (inter_prec)));
6afd0544 2946 if (!useless_type_conversion_p (type, inside_type))
2947 {
2948 tem = force_gimple_operand_gsi (gsi, tem, true, NULL_TREE, true,
2949 GSI_SAME_STMT);
2950 gimple_assign_set_rhs1 (stmt, tem);
2951 }
2952 else
2953 gimple_assign_set_rhs_from_tree (gsi, tem);
2954 update_stmt (gsi_stmt (*gsi));
89c8f35a 2955 return 1;
6afd0544 2956 }
487282d5 2957
2958 /* If we are converting an integer to a floating-point that can
2959 represent it exactly and back to an integer, we can skip the
2960 floating-point conversion. */
2961 if (inside_int && inter_float && final_int &&
2962 (unsigned) significand_size (TYPE_MODE (inter_type))
2963 >= inside_prec - !inside_unsignedp)
2964 {
2965 if (useless_type_conversion_p (type, inside_type))
2966 {
2967 gimple_assign_set_rhs1 (stmt, unshare_expr (defop0));
2968 gimple_assign_set_rhs_code (stmt, TREE_CODE (defop0));
2969 update_stmt (stmt);
2970 return remove_prop_source_from_use (op0) ? 2 : 1;
2971 }
2972 else
2973 {
2974 gimple_assign_set_rhs1 (stmt, defop0);
2975 gimple_assign_set_rhs_code (stmt, CONVERT_EXPR);
2976 update_stmt (stmt);
2977 return remove_prop_source_from_use (op0) ? 2 : 1;
2978 }
2979 }
6afd0544 2980 }
2981
89c8f35a 2982 return 0;
6afd0544 2983}
2984
173c91d9 2985/* Combine an element access with a shuffle. Returns true if there were
2986 any changes made, else it returns false. */
2987
2988static bool
2989simplify_bitfield_ref (gimple_stmt_iterator *gsi)
2990{
2991 gimple stmt = gsi_stmt (*gsi);
2992 gimple def_stmt;
2993 tree op, op0, op1, op2;
2994 tree elem_type;
2995 unsigned idx, n, size;
2996 enum tree_code code;
2997
2998 op = gimple_assign_rhs1 (stmt);
2999 gcc_checking_assert (TREE_CODE (op) == BIT_FIELD_REF);
3000
3001 op0 = TREE_OPERAND (op, 0);
3002 if (TREE_CODE (op0) != SSA_NAME
3003 || TREE_CODE (TREE_TYPE (op0)) != VECTOR_TYPE)
3004 return false;
3005
58bf5219 3006 def_stmt = get_prop_source_stmt (op0, false, NULL);
3007 if (!def_stmt || !can_propagate_from (def_stmt))
3008 return false;
3009
3010 op1 = TREE_OPERAND (op, 1);
3011 op2 = TREE_OPERAND (op, 2);
3012 code = gimple_assign_rhs_code (def_stmt);
3013
3014 if (code == CONSTRUCTOR)
3015 {
3016 tree tem = fold_ternary (BIT_FIELD_REF, TREE_TYPE (op),
3017 gimple_assign_rhs1 (def_stmt), op1, op2);
3018 if (!tem || !valid_gimple_rhs_p (tem))
3019 return false;
3020 gimple_assign_set_rhs_from_tree (gsi, tem);
3021 update_stmt (gsi_stmt (*gsi));
3022 return true;
3023 }
3024
173c91d9 3025 elem_type = TREE_TYPE (TREE_TYPE (op0));
3026 if (TREE_TYPE (op) != elem_type)
3027 return false;
3028
3029 size = TREE_INT_CST_LOW (TYPE_SIZE (elem_type));
173c91d9 3030 n = TREE_INT_CST_LOW (op1) / size;
3031 if (n != 1)
3032 return false;
173c91d9 3033 idx = TREE_INT_CST_LOW (op2) / size;
3034
173c91d9 3035 if (code == VEC_PERM_EXPR)
3036 {
3037 tree p, m, index, tem;
3038 unsigned nelts;
3039 m = gimple_assign_rhs3 (def_stmt);
3040 if (TREE_CODE (m) != VECTOR_CST)
3041 return false;
3042 nelts = VECTOR_CST_NELTS (m);
3043 idx = TREE_INT_CST_LOW (VECTOR_CST_ELT (m, idx));
3044 idx %= 2 * nelts;
3045 if (idx < nelts)
3046 {
3047 p = gimple_assign_rhs1 (def_stmt);
3048 }
3049 else
3050 {
3051 p = gimple_assign_rhs2 (def_stmt);
3052 idx -= nelts;
3053 }
3054 index = build_int_cst (TREE_TYPE (TREE_TYPE (m)), idx * size);
3055 tem = build3 (BIT_FIELD_REF, TREE_TYPE (op),
ab54bbbd 3056 unshare_expr (p), op1, index);
173c91d9 3057 gimple_assign_set_rhs1 (stmt, tem);
3058 fold_stmt (gsi);
3059 update_stmt (gsi_stmt (*gsi));
3060 return true;
3061 }
3062
3063 return false;
3064}
3065
496ec2ad 3066/* Determine whether applying the 2 permutations (mask1 then mask2)
3067 gives back one of the input. */
3068
3069static int
3070is_combined_permutation_identity (tree mask1, tree mask2)
3071{
3072 tree mask;
3073 unsigned int nelts, i, j;
3074 bool maybe_identity1 = true;
3075 bool maybe_identity2 = true;
3076
3077 gcc_checking_assert (TREE_CODE (mask1) == VECTOR_CST
3078 && TREE_CODE (mask2) == VECTOR_CST);
3079 mask = fold_ternary (VEC_PERM_EXPR, TREE_TYPE (mask1), mask1, mask1, mask2);
3080 gcc_assert (TREE_CODE (mask) == VECTOR_CST);
3081
3082 nelts = VECTOR_CST_NELTS (mask);
3083 for (i = 0; i < nelts; i++)
3084 {
3085 tree val = VECTOR_CST_ELT (mask, i);
3086 gcc_assert (TREE_CODE (val) == INTEGER_CST);
3087 j = TREE_INT_CST_LOW (val) & (2 * nelts - 1);
3088 if (j == i)
3089 maybe_identity2 = false;
3090 else if (j == i + nelts)
3091 maybe_identity1 = false;
3092 else
3093 return 0;
3094 }
3095 return maybe_identity1 ? 1 : maybe_identity2 ? 2 : 0;
3096}
3097
2b9112d6 3098/* Combine a shuffle with its arguments. Returns 1 if there were any
3099 changes made, 2 if cfg-cleanup needs to run. Else it returns 0. */
496ec2ad 3100
3101static int
3102simplify_permutation (gimple_stmt_iterator *gsi)
3103{
3104 gimple stmt = gsi_stmt (*gsi);
3105 gimple def_stmt;
2b9112d6 3106 tree op0, op1, op2, op3, arg0, arg1;
3107 enum tree_code code;
ab54bbbd 3108 bool single_use_op0 = false;
496ec2ad 3109
2b9112d6 3110 gcc_checking_assert (gimple_assign_rhs_code (stmt) == VEC_PERM_EXPR);
496ec2ad 3111
3112 op0 = gimple_assign_rhs1 (stmt);
3113 op1 = gimple_assign_rhs2 (stmt);
3114 op2 = gimple_assign_rhs3 (stmt);
3115
496ec2ad 3116 if (TREE_CODE (op2) != VECTOR_CST)
3117 return 0;
3118
2b9112d6 3119 if (TREE_CODE (op0) == VECTOR_CST)
3120 {
3121 code = VECTOR_CST;
3122 arg0 = op0;
3123 }
3124 else if (TREE_CODE (op0) == SSA_NAME)
3125 {
ab54bbbd 3126 def_stmt = get_prop_source_stmt (op0, false, &single_use_op0);
3127 if (!def_stmt || !can_propagate_from (def_stmt))
2b9112d6 3128 return 0;
496ec2ad 3129
2b9112d6 3130 code = gimple_assign_rhs_code (def_stmt);
3131 arg0 = gimple_assign_rhs1 (def_stmt);
3132 }
3133 else
496ec2ad 3134 return 0;
3135
496ec2ad 3136 /* Two consecutive shuffles. */
2b9112d6 3137 if (code == VEC_PERM_EXPR)
496ec2ad 3138 {
3139 tree orig;
3140 int ident;
2b9112d6 3141
3142 if (op0 != op1)
3143 return 0;
496ec2ad 3144 op3 = gimple_assign_rhs3 (def_stmt);
3145 if (TREE_CODE (op3) != VECTOR_CST)
3146 return 0;
3147 ident = is_combined_permutation_identity (op3, op2);
3148 if (!ident)
3149 return 0;
3150 orig = (ident == 1) ? gimple_assign_rhs1 (def_stmt)
3151 : gimple_assign_rhs2 (def_stmt);
3152 gimple_assign_set_rhs1 (stmt, unshare_expr (orig));
3153 gimple_assign_set_rhs_code (stmt, TREE_CODE (orig));
3154 gimple_set_num_ops (stmt, 2);
3155 update_stmt (stmt);
3156 return remove_prop_source_from_use (op0) ? 2 : 1;
3157 }
3158
2b9112d6 3159 /* Shuffle of a constructor. */
3160 else if (code == CONSTRUCTOR || code == VECTOR_CST)
3161 {
3162 tree opt;
3163 bool ret = false;
3164 if (op0 != op1)
3165 {
ab54bbbd 3166 if (TREE_CODE (op0) == SSA_NAME && !single_use_op0)
2b9112d6 3167 return 0;
3168
3169 if (TREE_CODE (op1) == VECTOR_CST)
3170 arg1 = op1;
3171 else if (TREE_CODE (op1) == SSA_NAME)
3172 {
3173 enum tree_code code2;
3174
ab54bbbd 3175 gimple def_stmt2 = get_prop_source_stmt (op1, true, NULL);
3176 if (!def_stmt2 || !can_propagate_from (def_stmt2))
2b9112d6 3177 return 0;
3178
3179 code2 = gimple_assign_rhs_code (def_stmt2);
3180 if (code2 != CONSTRUCTOR && code2 != VECTOR_CST)
3181 return 0;
3182 arg1 = gimple_assign_rhs1 (def_stmt2);
3183 }
3184 else
3185 return 0;
3186 }
3187 else
3188 {
3189 /* Already used twice in this statement. */
3190 if (TREE_CODE (op0) == SSA_NAME && num_imm_uses (op0) > 2)
3191 return 0;
3192 arg1 = arg0;
3193 }
3194 opt = fold_ternary (VEC_PERM_EXPR, TREE_TYPE(op0), arg0, arg1, op2);
3195 if (!opt
3196 || (TREE_CODE (opt) != CONSTRUCTOR && TREE_CODE(opt) != VECTOR_CST))
3197 return 0;
3198 gimple_assign_set_rhs_from_tree (gsi, opt);
3199 update_stmt (gsi_stmt (*gsi));
3200 if (TREE_CODE (op0) == SSA_NAME)
3201 ret = remove_prop_source_from_use (op0);
3202 if (op0 != op1 && TREE_CODE (op1) == SSA_NAME)
3203 ret |= remove_prop_source_from_use (op1);
3204 return ret ? 2 : 1;
3205 }
3206
3207 return 0;
496ec2ad 3208}
3209
6a9e13a2 3210/* Recognize a VEC_PERM_EXPR. Returns true if there were any changes. */
3211
3212static bool
3213simplify_vector_constructor (gimple_stmt_iterator *gsi)
3214{
3215 gimple stmt = gsi_stmt (*gsi);
3216 gimple def_stmt;
3217 tree op, op2, orig, type, elem_type;
3218 unsigned elem_size, nelts, i;
3219 enum tree_code code;
3220 constructor_elt *elt;
3221 unsigned char *sel;
3222 bool maybe_ident;
3223
3224 gcc_checking_assert (gimple_assign_rhs_code (stmt) == CONSTRUCTOR);
3225
3226 op = gimple_assign_rhs1 (stmt);
3227 type = TREE_TYPE (op);
3228 gcc_checking_assert (TREE_CODE (type) == VECTOR_TYPE);
3229
3230 nelts = TYPE_VECTOR_SUBPARTS (type);
3231 elem_type = TREE_TYPE (type);
3232 elem_size = TREE_INT_CST_LOW (TYPE_SIZE (elem_type));
3233
3234 sel = XALLOCAVEC (unsigned char, nelts);
3235 orig = NULL;
3236 maybe_ident = true;
f1f41a6c 3237 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (op), i, elt)
6a9e13a2 3238 {
3239 tree ref, op1;
3240
3241 if (i >= nelts)
3242 return false;
3243
3244 if (TREE_CODE (elt->value) != SSA_NAME)
3245 return false;
ab54bbbd 3246 def_stmt = get_prop_source_stmt (elt->value, false, NULL);
3247 if (!def_stmt)
6a9e13a2 3248 return false;
3249 code = gimple_assign_rhs_code (def_stmt);
3250 if (code != BIT_FIELD_REF)
3251 return false;
3252 op1 = gimple_assign_rhs1 (def_stmt);
3253 ref = TREE_OPERAND (op1, 0);
3254 if (orig)
3255 {
3256 if (ref != orig)
3257 return false;
3258 }
3259 else
3260 {
3261 if (TREE_CODE (ref) != SSA_NAME)
3262 return false;
8a13ba5e 3263 if (!useless_type_conversion_p (type, TREE_TYPE (ref)))
3264 return false;
6a9e13a2 3265 orig = ref;
3266 }
3267 if (TREE_INT_CST_LOW (TREE_OPERAND (op1, 1)) != elem_size)
3268 return false;
3269 sel[i] = TREE_INT_CST_LOW (TREE_OPERAND (op1, 2)) / elem_size;
3270 if (sel[i] != i) maybe_ident = false;
3271 }
3272 if (i < nelts)
3273 return false;
3274
3275 if (maybe_ident)
d1938a4b 3276 gimple_assign_set_rhs_from_tree (gsi, orig);
6a9e13a2 3277 else
3278 {
d1938a4b 3279 tree mask_type, *mask_elts;
3280
3281 if (!can_vec_perm_p (TYPE_MODE (type), false, sel))
3282 return false;
3283 mask_type
3284 = build_vector_type (build_nonstandard_integer_type (elem_size, 1),
3285 nelts);
3286 if (GET_MODE_CLASS (TYPE_MODE (mask_type)) != MODE_VECTOR_INT
3287 || GET_MODE_SIZE (TYPE_MODE (mask_type))
3288 != GET_MODE_SIZE (TYPE_MODE (type)))
6a9e13a2 3289 return false;
d1938a4b 3290 mask_elts = XALLOCAVEC (tree, nelts);
3291 for (i = 0; i < nelts; i++)
3292 mask_elts[i] = build_int_cst (TREE_TYPE (mask_type), sel[i]);
3293 op2 = build_vector (mask_type, mask_elts);
6a9e13a2 3294 gimple_assign_set_rhs_with_ops_1 (gsi, VEC_PERM_EXPR, orig, orig, op2);
3295 }
3296 update_stmt (gsi_stmt (*gsi));
3297 return true;
3298}
3299
678b2f5b 3300/* Main entry point for the forward propagation and statement combine
3301 optimizer. */
4ee9c684 3302
2a1990e9 3303static unsigned int
678b2f5b 3304ssa_forward_propagate_and_combine (void)
4ee9c684 3305{
f5c8cff5 3306 basic_block bb;
c96420f8 3307 unsigned int todoflags = 0;
4ee9c684 3308
148aa112 3309 cfg_changed = false;
3310
f5c8cff5 3311 FOR_EACH_BB (bb)
3312 {
2f5a3c4a 3313 gimple_stmt_iterator gsi;
291d763b 3314
678b2f5b 3315 /* Apply forward propagation to all stmts in the basic-block.
3316 Note we update GSI within the loop as necessary. */
75a70cf9 3317 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); )
291d763b 3318 {
75a70cf9 3319 gimple stmt = gsi_stmt (gsi);
678b2f5b 3320 tree lhs, rhs;
3321 enum tree_code code;
291d763b 3322
678b2f5b 3323 if (!is_gimple_assign (stmt))
291d763b 3324 {
678b2f5b 3325 gsi_next (&gsi);
3326 continue;
3327 }
3a938499 3328
678b2f5b 3329 lhs = gimple_assign_lhs (stmt);
3330 rhs = gimple_assign_rhs1 (stmt);
3331 code = gimple_assign_rhs_code (stmt);
3332 if (TREE_CODE (lhs) != SSA_NAME
3333 || has_zero_uses (lhs))
3334 {
3335 gsi_next (&gsi);
3336 continue;
3337 }
3a938499 3338
678b2f5b 3339 /* If this statement sets an SSA_NAME to an address,
3340 try to propagate the address into the uses of the SSA_NAME. */
3341 if (code == ADDR_EXPR
3342 /* Handle pointer conversions on invariant addresses
3343 as well, as this is valid gimple. */
3344 || (CONVERT_EXPR_CODE_P (code)
3345 && TREE_CODE (rhs) == ADDR_EXPR
3346 && POINTER_TYPE_P (TREE_TYPE (lhs))))
3347 {
3348 tree base = get_base_address (TREE_OPERAND (rhs, 0));
3349 if ((!base
3350 || !DECL_P (base)
3351 || decl_address_invariant_p (base))
3352 && !stmt_references_abnormal_ssa_name (stmt)
3353 && forward_propagate_addr_expr (lhs, rhs))
1c4607fd 3354 {
678b2f5b 3355 release_defs (stmt);
678b2f5b 3356 gsi_remove (&gsi, true);
1c4607fd 3357 }
678b2f5b 3358 else
3359 gsi_next (&gsi);
3360 }
cd22a796 3361 else if (code == POINTER_PLUS_EXPR)
678b2f5b 3362 {
cd22a796 3363 tree off = gimple_assign_rhs2 (stmt);
3364 if (TREE_CODE (off) == INTEGER_CST
3365 && can_propagate_from (stmt)
3366 && !simple_iv_increment_p (stmt)
678b2f5b 3367 /* ??? Better adjust the interface to that function
3368 instead of building new trees here. */
3369 && forward_propagate_addr_expr
cd22a796 3370 (lhs,
3371 build1_loc (gimple_location (stmt),
3372 ADDR_EXPR, TREE_TYPE (rhs),
3373 fold_build2 (MEM_REF,
3374 TREE_TYPE (TREE_TYPE (rhs)),
3375 rhs,
3376 fold_convert (ptr_type_node,
3377 off)))))
ca3c9092 3378 {
678b2f5b 3379 release_defs (stmt);
678b2f5b 3380 gsi_remove (&gsi, true);
ca3c9092 3381 }
678b2f5b 3382 else if (is_gimple_min_invariant (rhs))
6afd0544 3383 {
678b2f5b 3384 /* Make sure to fold &a[0] + off_1 here. */
50aacf4c 3385 fold_stmt_inplace (&gsi);
678b2f5b 3386 update_stmt (stmt);
3387 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
6afd0544 3388 gsi_next (&gsi);
3389 }
291d763b 3390 else
75a70cf9 3391 gsi_next (&gsi);
291d763b 3392 }
678b2f5b 3393 else if (TREE_CODE_CLASS (code) == tcc_comparison)
b5860aba 3394 {
e3a19533 3395 if (forward_propagate_comparison (&gsi))
75200312 3396 cfg_changed = true;
b5860aba 3397 }
291d763b 3398 else
75a70cf9 3399 gsi_next (&gsi);
291d763b 3400 }
678b2f5b 3401
3402 /* Combine stmts with the stmts defining their operands.
3403 Note we update GSI within the loop as necessary. */
a7107e58 3404 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
678b2f5b 3405 {
3406 gimple stmt = gsi_stmt (gsi);
3407 bool changed = false;
3408
2f5a3c4a 3409 /* Mark stmt as potentially needing revisiting. */
3410 gimple_set_plf (stmt, GF_PLF_1, false);
3411
678b2f5b 3412 switch (gimple_code (stmt))
3413 {
3414 case GIMPLE_ASSIGN:
3415 {
3416 tree rhs1 = gimple_assign_rhs1 (stmt);
3417 enum tree_code code = gimple_assign_rhs_code (stmt);
3418
3419 if ((code == BIT_NOT_EXPR
3420 || code == NEGATE_EXPR)
3421 && TREE_CODE (rhs1) == SSA_NAME)
3422 changed = simplify_not_neg_expr (&gsi);
360b78f3 3423 else if (code == COND_EXPR
3424 || code == VEC_COND_EXPR)
678b2f5b 3425 {
3426 /* In this case the entire COND_EXPR is in rhs1. */
11b881f5 3427 if (forward_propagate_into_cond (&gsi)
3428 || combine_cond_exprs (&gsi))
3429 {
3430 changed = true;
3431 stmt = gsi_stmt (gsi);
3432 }
678b2f5b 3433 }
3434 else if (TREE_CODE_CLASS (code) == tcc_comparison)
3435 {
6f9714b3 3436 int did_something;
6f9714b3 3437 did_something = forward_propagate_into_comparison (&gsi);
3438 if (did_something == 2)
3439 cfg_changed = true;
6f9714b3 3440 changed = did_something != 0;
678b2f5b 3441 }
3b8827a2 3442 else if ((code == PLUS_EXPR
3443 || code == BIT_IOR_EXPR
3444 || code == BIT_XOR_EXPR)
3445 && simplify_rotate (&gsi))
3446 changed = true;
678b2f5b 3447 else if (code == BIT_AND_EXPR
3448 || code == BIT_IOR_EXPR
3449 || code == BIT_XOR_EXPR)
3450 changed = simplify_bitwise_binary (&gsi);
3451 else if (code == PLUS_EXPR
3452 || code == MINUS_EXPR)
50aacf4c 3453 changed = associate_plusminus (&gsi);
c9c17332 3454 else if (code == POINTER_PLUS_EXPR)
3455 changed = associate_pointerplus (&gsi);
678b2f5b 3456 else if (CONVERT_EXPR_CODE_P (code)
3457 || code == FLOAT_EXPR
3458 || code == FIX_TRUNC_EXPR)
89c8f35a 3459 {
3460 int did_something = combine_conversions (&gsi);
3461 if (did_something == 2)
3462 cfg_changed = true;
d23e1965 3463
3464 /* If we have a narrowing conversion to an integral
3465 type that is fed by a BIT_AND_EXPR, we might be
3466 able to remove the BIT_AND_EXPR if it merely
3467 masks off bits outside the final type (and nothing
3468 else. */
3469 if (! did_something)
3470 {
3471 tree outer_type = TREE_TYPE (gimple_assign_lhs (stmt));
3472 tree inner_type = TREE_TYPE (gimple_assign_rhs1 (stmt));
3473 if (INTEGRAL_TYPE_P (outer_type)
3474 && INTEGRAL_TYPE_P (inner_type)
3475 && (TYPE_PRECISION (outer_type)
3476 <= TYPE_PRECISION (inner_type)))
3477 did_something = simplify_conversion_from_bitmask (&gsi);
3478 }
3479
89c8f35a 3480 changed = did_something != 0;
3481 }
496ec2ad 3482 else if (code == VEC_PERM_EXPR)
3483 {
3484 int did_something = simplify_permutation (&gsi);
3485 if (did_something == 2)
3486 cfg_changed = true;
3487 changed = did_something != 0;
3488 }
173c91d9 3489 else if (code == BIT_FIELD_REF)
3490 changed = simplify_bitfield_ref (&gsi);
6a9e13a2 3491 else if (code == CONSTRUCTOR
3492 && TREE_CODE (TREE_TYPE (rhs1)) == VECTOR_TYPE)
3493 changed = simplify_vector_constructor (&gsi);
678b2f5b 3494 break;
3495 }
3496
3497 case GIMPLE_SWITCH:
3498 changed = simplify_gimple_switch (stmt);
3499 break;
3500
3501 case GIMPLE_COND:
3502 {
3503 int did_something;
678b2f5b 3504 did_something = forward_propagate_into_gimple_cond (stmt);
3505 if (did_something == 2)
3506 cfg_changed = true;
678b2f5b 3507 changed = did_something != 0;
3508 break;
3509 }
3510
3511 case GIMPLE_CALL:
3512 {
3513 tree callee = gimple_call_fndecl (stmt);
3514 if (callee != NULL_TREE
3515 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
3516 changed = simplify_builtin_call (&gsi, callee);
3517 break;
3518 }
3519
3520 default:;
3521 }
3522
a7107e58 3523 if (changed)
3524 {
3525 /* If the stmt changed then re-visit it and the statements
3526 inserted before it. */
2f5a3c4a 3527 for (; !gsi_end_p (gsi); gsi_prev (&gsi))
3528 if (gimple_plf (gsi_stmt (gsi), GF_PLF_1))
3529 break;
3530 if (gsi_end_p (gsi))
a7107e58 3531 gsi = gsi_start_bb (bb);
3532 else
2f5a3c4a 3533 gsi_next (&gsi);
a7107e58 3534 }
3535 else
3536 {
2f5a3c4a 3537 /* Stmt no longer needs to be revisited. */
3538 gimple_set_plf (stmt, GF_PLF_1, true);
a7107e58 3539 gsi_next (&gsi);
3540 }
678b2f5b 3541 }
f5c8cff5 3542 }
148aa112 3543
3544 if (cfg_changed)
6fa78c7b 3545 todoflags |= TODO_cleanup_cfg;
678b2f5b 3546
c96420f8 3547 return todoflags;
4ee9c684 3548}
3549
3550
3551static bool
3552gate_forwprop (void)
3553{
408c3c77 3554 return flag_tree_forwprop;
4ee9c684 3555}
3556
cbe8bda8 3557namespace {
3558
3559const pass_data pass_data_forwprop =
20099e35 3560{
cbe8bda8 3561 GIMPLE_PASS, /* type */
3562 "forwprop", /* name */
3563 OPTGROUP_NONE, /* optinfo_flags */
3564 true, /* has_gate */
3565 true, /* has_execute */
3566 TV_TREE_FORWPROP, /* tv_id */
3567 ( PROP_cfg | PROP_ssa ), /* properties_required */
3568 0, /* properties_provided */
3569 0, /* properties_destroyed */
3570 0, /* todo_flags_start */
3571 ( TODO_update_ssa | TODO_verify_ssa ), /* todo_flags_finish */
4ee9c684 3572};
cbe8bda8 3573
3574class pass_forwprop : public gimple_opt_pass
3575{
3576public:
3577 pass_forwprop(gcc::context *ctxt)
3578 : gimple_opt_pass(pass_data_forwprop, ctxt)
3579 {}
3580
3581 /* opt_pass methods: */
3582 opt_pass * clone () { return new pass_forwprop (ctxt_); }
3583 bool gate () { return gate_forwprop (); }
3584 unsigned int execute () { return ssa_forward_propagate_and_combine (); }
3585
3586}; // class pass_forwprop
3587
3588} // anon namespace
3589
3590gimple_opt_pass *
3591make_pass_forwprop (gcc::context *ctxt)
3592{
3593 return new pass_forwprop (ctxt);
3594}