]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/tree-ssa-forwprop.c
* symtab.c (dump_symtab_base): Revert accidental checkin.
[thirdparty/gcc.git] / gcc / tree-ssa-forwprop.c
CommitLineData
291d763b 1/* Forward propagation of expressions for single use variables.
51dbf409 2 Copyright (C) 2004, 2005, 2007, 2008, 2009, 2010, 2011, 2012
ce084dfc 3 Free Software Foundation, Inc.
4ee9c684 4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify
8it under the terms of the GNU General Public License as published by
8c4c00c1 9the Free Software Foundation; either version 3, or (at your option)
4ee9c684 10any later version.
11
12GCC is distributed in the hope that it will be useful,
13but WITHOUT ANY WARRANTY; without even the implied warranty of
14MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15GNU General Public License for more details.
16
17You should have received a copy of the GNU General Public License
8c4c00c1 18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
4ee9c684 20
21#include "config.h"
22#include "system.h"
23#include "coretypes.h"
24#include "tm.h"
4ee9c684 25#include "tree.h"
4ee9c684 26#include "tm_p.h"
27#include "basic-block.h"
28#include "timevar.h"
e5b1e080 29#include "gimple-pretty-print.h"
4ee9c684 30#include "tree-flow.h"
31#include "tree-pass.h"
32#include "tree-dump.h"
291d763b 33#include "langhooks.h"
5adc1066 34#include "flags.h"
75a70cf9 35#include "gimple.h"
27f931ff 36#include "expr.h"
4ee9c684 37
291d763b 38/* This pass propagates the RHS of assignment statements into use
39 sites of the LHS of the assignment. It's basically a specialized
8f628ee8 40 form of tree combination. It is hoped all of this can disappear
41 when we have a generalized tree combiner.
4ee9c684 42
291d763b 43 One class of common cases we handle is forward propagating a single use
48e1416a 44 variable into a COND_EXPR.
4ee9c684 45
46 bb0:
47 x = a COND b;
48 if (x) goto ... else goto ...
49
50 Will be transformed into:
51
52 bb0:
53 if (a COND b) goto ... else goto ...
48e1416a 54
4ee9c684 55 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
56
57 Or (assuming c1 and c2 are constants):
58
59 bb0:
48e1416a 60 x = a + c1;
4ee9c684 61 if (x EQ/NEQ c2) goto ... else goto ...
62
63 Will be transformed into:
64
65 bb0:
66 if (a EQ/NEQ (c2 - c1)) goto ... else goto ...
67
68 Similarly for x = a - c1.
48e1416a 69
4ee9c684 70 Or
71
72 bb0:
73 x = !a
74 if (x) goto ... else goto ...
75
76 Will be transformed into:
77
78 bb0:
79 if (a == 0) goto ... else goto ...
80
81 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
82 For these cases, we propagate A into all, possibly more than one,
83 COND_EXPRs that use X.
84
f5c8cff5 85 Or
86
87 bb0:
88 x = (typecast) a
89 if (x) goto ... else goto ...
90
91 Will be transformed into:
92
93 bb0:
94 if (a != 0) goto ... else goto ...
95
96 (Assuming a is an integral type and x is a boolean or x is an
97 integral and a is a boolean.)
98
99 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
100 For these cases, we propagate A into all, possibly more than one,
101 COND_EXPRs that use X.
102
4ee9c684 103 In addition to eliminating the variable and the statement which assigns
104 a value to the variable, we may be able to later thread the jump without
e6dfde59 105 adding insane complexity in the dominator optimizer.
4ee9c684 106
f5c8cff5 107 Also note these transformations can cascade. We handle this by having
108 a worklist of COND_EXPR statements to examine. As we make a change to
109 a statement, we put it back on the worklist to examine on the next
110 iteration of the main loop.
111
291d763b 112 A second class of propagation opportunities arises for ADDR_EXPR
113 nodes.
114
115 ptr = &x->y->z;
116 res = *ptr;
117
118 Will get turned into
119
120 res = x->y->z;
121
50f39ec6 122 Or
123 ptr = (type1*)&type2var;
124 res = *ptr
125
126 Will get turned into (if type1 and type2 are the same size
127 and neither have volatile on them):
128 res = VIEW_CONVERT_EXPR<type1>(type2var)
129
291d763b 130 Or
131
132 ptr = &x[0];
133 ptr2 = ptr + <constant>;
134
135 Will get turned into
136
137 ptr2 = &x[constant/elementsize];
138
139 Or
140
141 ptr = &x[0];
142 offset = index * element_size;
143 offset_p = (pointer) offset;
144 ptr2 = ptr + offset_p
145
146 Will get turned into:
147
148 ptr2 = &x[index];
149
1c4607fd 150 Or
151 ssa = (int) decl
152 res = ssa & 1
153
154 Provided that decl has known alignment >= 2, will get turned into
155
156 res = 0
157
8f628ee8 158 We also propagate casts into SWITCH_EXPR and COND_EXPR conditions to
159 allow us to remove the cast and {NOT_EXPR,NEG_EXPR} into a subsequent
160 {NOT_EXPR,NEG_EXPR}.
291d763b 161
4ee9c684 162 This will (of course) be extended as other needs arise. */
163
15ec875c 164static bool forward_propagate_addr_expr (tree name, tree rhs);
148aa112 165
166/* Set to true if we delete EH edges during the optimization. */
167static bool cfg_changed;
168
75a70cf9 169static tree rhs_to_tree (tree type, gimple stmt);
148aa112 170
83a20baf 171/* Get the next statement we can propagate NAME's value into skipping
5adc1066 172 trivial copies. Returns the statement that is suitable as a
173 propagation destination or NULL_TREE if there is no such one.
174 This only returns destinations in a single-use chain. FINAL_NAME_P
175 if non-NULL is written to the ssa name that represents the use. */
a3451973 176
75a70cf9 177static gimple
5adc1066 178get_prop_dest_stmt (tree name, tree *final_name_p)
a3451973 179{
5adc1066 180 use_operand_p use;
75a70cf9 181 gimple use_stmt;
a3451973 182
5adc1066 183 do {
184 /* If name has multiple uses, bail out. */
185 if (!single_imm_use (name, &use, &use_stmt))
75a70cf9 186 return NULL;
a3451973 187
5adc1066 188 /* If this is not a trivial copy, we found it. */
8f0b877f 189 if (!gimple_assign_ssa_name_copy_p (use_stmt)
75a70cf9 190 || gimple_assign_rhs1 (use_stmt) != name)
5adc1066 191 break;
192
193 /* Continue searching uses of the copy destination. */
75a70cf9 194 name = gimple_assign_lhs (use_stmt);
5adc1066 195 } while (1);
196
197 if (final_name_p)
198 *final_name_p = name;
199
200 return use_stmt;
a3451973 201}
202
5adc1066 203/* Get the statement we can propagate from into NAME skipping
204 trivial copies. Returns the statement which defines the
205 propagation source or NULL_TREE if there is no such one.
206 If SINGLE_USE_ONLY is set considers only sources which have
207 a single use chain up to NAME. If SINGLE_USE_P is non-null,
208 it is set to whether the chain to NAME is a single use chain
209 or not. SINGLE_USE_P is not written to if SINGLE_USE_ONLY is set. */
4ee9c684 210
75a70cf9 211static gimple
5adc1066 212get_prop_source_stmt (tree name, bool single_use_only, bool *single_use_p)
f5c8cff5 213{
5adc1066 214 bool single_use = true;
215
216 do {
75a70cf9 217 gimple def_stmt = SSA_NAME_DEF_STMT (name);
5adc1066 218
219 if (!has_single_use (name))
220 {
221 single_use = false;
222 if (single_use_only)
75a70cf9 223 return NULL;
5adc1066 224 }
225
226 /* If name is defined by a PHI node or is the default def, bail out. */
8f0b877f 227 if (!is_gimple_assign (def_stmt))
75a70cf9 228 return NULL;
5adc1066 229
8f0b877f 230 /* If def_stmt is not a simple copy, we possibly found it. */
231 if (!gimple_assign_ssa_name_copy_p (def_stmt))
5adc1066 232 {
b9e98b8a 233 tree rhs;
234
5adc1066 235 if (!single_use_only && single_use_p)
236 *single_use_p = single_use;
237
b9e98b8a 238 /* We can look through pointer conversions in the search
239 for a useful stmt for the comparison folding. */
75a70cf9 240 rhs = gimple_assign_rhs1 (def_stmt);
d9659041 241 if (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt))
75a70cf9 242 && TREE_CODE (rhs) == SSA_NAME
243 && POINTER_TYPE_P (TREE_TYPE (gimple_assign_lhs (def_stmt)))
244 && POINTER_TYPE_P (TREE_TYPE (rhs)))
245 name = rhs;
b9e98b8a 246 else
247 return def_stmt;
248 }
249 else
250 {
251 /* Continue searching the def of the copy source name. */
75a70cf9 252 name = gimple_assign_rhs1 (def_stmt);
5adc1066 253 }
5adc1066 254 } while (1);
255}
e6dfde59 256
5adc1066 257/* Checks if the destination ssa name in DEF_STMT can be used as
258 propagation source. Returns true if so, otherwise false. */
e6dfde59 259
5adc1066 260static bool
75a70cf9 261can_propagate_from (gimple def_stmt)
5adc1066 262{
75a70cf9 263 gcc_assert (is_gimple_assign (def_stmt));
8f0b877f 264
484b827b 265 /* If the rhs has side-effects we cannot propagate from it. */
75a70cf9 266 if (gimple_has_volatile_ops (def_stmt))
484b827b 267 return false;
268
269 /* If the rhs is a load we cannot propagate from it. */
75a70cf9 270 if (TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)) == tcc_reference
271 || TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)) == tcc_declaration)
484b827b 272 return false;
273
b9e98b8a 274 /* Constants can be always propagated. */
8f0b877f 275 if (gimple_assign_single_p (def_stmt)
276 && is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt)))
b9e98b8a 277 return true;
278
75a70cf9 279 /* We cannot propagate ssa names that occur in abnormal phi nodes. */
32cdcc42 280 if (stmt_references_abnormal_ssa_name (def_stmt))
281 return false;
4ee9c684 282
5adc1066 283 /* If the definition is a conversion of a pointer to a function type,
75a70cf9 284 then we can not apply optimizations as some targets require
285 function pointers to be canonicalized and in this case this
286 optimization could eliminate a necessary canonicalization. */
8f0b877f 287 if (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt)))
75a70cf9 288 {
289 tree rhs = gimple_assign_rhs1 (def_stmt);
290 if (POINTER_TYPE_P (TREE_TYPE (rhs))
291 && TREE_CODE (TREE_TYPE (TREE_TYPE (rhs))) == FUNCTION_TYPE)
292 return false;
293 }
8f0b877f 294
5adc1066 295 return true;
e6dfde59 296}
297
ff0739e0 298/* Remove a chain of dead statements starting at the definition of
299 NAME. The chain is linked via the first operand of the defining statements.
5d2361b0 300 If NAME was replaced in its only use then this function can be used
ff0739e0 301 to clean up dead stmts. The function handles already released SSA
302 names gracefully.
303 Returns true if cleanup-cfg has to run. */
8f628ee8 304
5adc1066 305static bool
5d2361b0 306remove_prop_source_from_use (tree name)
5adc1066 307{
75a70cf9 308 gimple_stmt_iterator gsi;
309 gimple stmt;
5d2361b0 310 bool cfg_changed = false;
8f628ee8 311
5adc1066 312 do {
5d2361b0 313 basic_block bb;
314
ff0739e0 315 if (SSA_NAME_IN_FREE_LIST (name)
316 || SSA_NAME_IS_DEFAULT_DEF (name)
317 || !has_zero_uses (name))
5d2361b0 318 return cfg_changed;
8f628ee8 319
5adc1066 320 stmt = SSA_NAME_DEF_STMT (name);
ff0739e0 321 if (gimple_code (stmt) == GIMPLE_PHI
322 || gimple_has_side_effects (stmt))
6f9714b3 323 return cfg_changed;
ff0739e0 324
325 bb = gimple_bb (stmt);
6f9714b3 326 gsi = gsi_for_stmt (stmt);
ff0739e0 327 unlink_stmt_vdef (stmt);
13ff78a4 328 if (gsi_remove (&gsi, true))
329 cfg_changed |= gimple_purge_dead_eh_edges (bb);
ff0739e0 330 release_defs (stmt);
8f628ee8 331
ff0739e0 332 name = is_gimple_assign (stmt) ? gimple_assign_rhs1 (stmt) : NULL_TREE;
75a70cf9 333 } while (name && TREE_CODE (name) == SSA_NAME);
8f628ee8 334
5d2361b0 335 return cfg_changed;
5adc1066 336}
8f628ee8 337
75a70cf9 338/* Return the rhs of a gimple_assign STMT in a form of a single tree,
339 converted to type TYPE.
48e1416a 340
75a70cf9 341 This should disappear, but is needed so we can combine expressions and use
342 the fold() interfaces. Long term, we need to develop folding and combine
343 routines that deal with gimple exclusively . */
344
345static tree
346rhs_to_tree (tree type, gimple stmt)
347{
389dd41b 348 location_t loc = gimple_location (stmt);
75a70cf9 349 enum tree_code code = gimple_assign_rhs_code (stmt);
57c45d70 350 if (get_gimple_rhs_class (code) == GIMPLE_TERNARY_RHS)
351 return fold_build3_loc (loc, code, type, gimple_assign_rhs1 (stmt),
352 gimple_assign_rhs2 (stmt),
353 gimple_assign_rhs3 (stmt));
354 else if (get_gimple_rhs_class (code) == GIMPLE_BINARY_RHS)
389dd41b 355 return fold_build2_loc (loc, code, type, gimple_assign_rhs1 (stmt),
fb8ed03f 356 gimple_assign_rhs2 (stmt));
75a70cf9 357 else if (get_gimple_rhs_class (code) == GIMPLE_UNARY_RHS)
fb8ed03f 358 return build1 (code, type, gimple_assign_rhs1 (stmt));
75a70cf9 359 else if (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS)
360 return gimple_assign_rhs1 (stmt);
361 else
362 gcc_unreachable ();
363}
364
5adc1066 365/* Combine OP0 CODE OP1 in the context of a COND_EXPR. Returns
366 the folded result in a form suitable for COND_EXPR_COND or
367 NULL_TREE, if there is no suitable simplified form. If
368 INVARIANT_ONLY is true only gimple_min_invariant results are
369 considered simplified. */
8f628ee8 370
371static tree
c73fee76 372combine_cond_expr_cond (gimple stmt, enum tree_code code, tree type,
5adc1066 373 tree op0, tree op1, bool invariant_only)
8f628ee8 374{
5adc1066 375 tree t;
8f628ee8 376
5adc1066 377 gcc_assert (TREE_CODE_CLASS (code) == tcc_comparison);
8f628ee8 378
c73fee76 379 fold_defer_overflow_warnings ();
380 t = fold_binary_loc (gimple_location (stmt), code, type, op0, op1);
5adc1066 381 if (!t)
c73fee76 382 {
383 fold_undefer_overflow_warnings (false, NULL, 0);
384 return NULL_TREE;
385 }
8f628ee8 386
5adc1066 387 /* Require that we got a boolean type out if we put one in. */
388 gcc_assert (TREE_CODE (TREE_TYPE (t)) == TREE_CODE (type));
8f628ee8 389
a7392604 390 /* Canonicalize the combined condition for use in a COND_EXPR. */
391 t = canonicalize_cond_expr_cond (t);
8f628ee8 392
5adc1066 393 /* Bail out if we required an invariant but didn't get one. */
75a70cf9 394 if (!t || (invariant_only && !is_gimple_min_invariant (t)))
c73fee76 395 {
396 fold_undefer_overflow_warnings (false, NULL, 0);
397 return NULL_TREE;
398 }
399
400 fold_undefer_overflow_warnings (!gimple_no_warning_p (stmt), stmt, 0);
8f628ee8 401
a7392604 402 return t;
8f628ee8 403}
404
c8126d25 405/* Combine the comparison OP0 CODE OP1 at LOC with the defining statements
406 of its operand. Return a new comparison tree or NULL_TREE if there
407 were no simplifying combines. */
408
409static tree
c73fee76 410forward_propagate_into_comparison_1 (gimple stmt,
678b2f5b 411 enum tree_code code, tree type,
412 tree op0, tree op1)
c8126d25 413{
414 tree tmp = NULL_TREE;
415 tree rhs0 = NULL_TREE, rhs1 = NULL_TREE;
416 bool single_use0_p = false, single_use1_p = false;
417
418 /* For comparisons use the first operand, that is likely to
419 simplify comparisons against constants. */
420 if (TREE_CODE (op0) == SSA_NAME)
421 {
422 gimple def_stmt = get_prop_source_stmt (op0, false, &single_use0_p);
423 if (def_stmt && can_propagate_from (def_stmt))
424 {
425 rhs0 = rhs_to_tree (TREE_TYPE (op1), def_stmt);
c73fee76 426 tmp = combine_cond_expr_cond (stmt, code, type,
c8126d25 427 rhs0, op1, !single_use0_p);
428 if (tmp)
429 return tmp;
430 }
431 }
432
433 /* If that wasn't successful, try the second operand. */
434 if (TREE_CODE (op1) == SSA_NAME)
435 {
436 gimple def_stmt = get_prop_source_stmt (op1, false, &single_use1_p);
437 if (def_stmt && can_propagate_from (def_stmt))
438 {
439 rhs1 = rhs_to_tree (TREE_TYPE (op0), def_stmt);
c73fee76 440 tmp = combine_cond_expr_cond (stmt, code, type,
c8126d25 441 op0, rhs1, !single_use1_p);
442 if (tmp)
443 return tmp;
444 }
445 }
446
447 /* If that wasn't successful either, try both operands. */
448 if (rhs0 != NULL_TREE
449 && rhs1 != NULL_TREE)
c73fee76 450 tmp = combine_cond_expr_cond (stmt, code, type,
c8126d25 451 rhs0, rhs1,
452 !(single_use0_p && single_use1_p));
453
454 return tmp;
455}
456
678b2f5b 457/* Propagate from the ssa name definition statements of the assignment
458 from a comparison at *GSI into the conditional if that simplifies it.
6f9714b3 459 Returns 1 if the stmt was modified and 2 if the CFG needs cleanup,
460 otherwise returns 0. */
c8126d25 461
6f9714b3 462static int
678b2f5b 463forward_propagate_into_comparison (gimple_stmt_iterator *gsi)
c8126d25 464{
678b2f5b 465 gimple stmt = gsi_stmt (*gsi);
466 tree tmp;
6f9714b3 467 bool cfg_changed = false;
56632de0 468 tree type = TREE_TYPE (gimple_assign_lhs (stmt));
6f9714b3 469 tree rhs1 = gimple_assign_rhs1 (stmt);
470 tree rhs2 = gimple_assign_rhs2 (stmt);
c8126d25 471
472 /* Combine the comparison with defining statements. */
c73fee76 473 tmp = forward_propagate_into_comparison_1 (stmt,
678b2f5b 474 gimple_assign_rhs_code (stmt),
56632de0 475 type, rhs1, rhs2);
476 if (tmp && useless_type_conversion_p (type, TREE_TYPE (tmp)))
c8126d25 477 {
678b2f5b 478 gimple_assign_set_rhs_from_tree (gsi, tmp);
50aacf4c 479 fold_stmt (gsi);
480 update_stmt (gsi_stmt (*gsi));
75200312 481
6f9714b3 482 if (TREE_CODE (rhs1) == SSA_NAME)
483 cfg_changed |= remove_prop_source_from_use (rhs1);
484 if (TREE_CODE (rhs2) == SSA_NAME)
485 cfg_changed |= remove_prop_source_from_use (rhs2);
486 return cfg_changed ? 2 : 1;
c8126d25 487 }
488
6f9714b3 489 return 0;
c8126d25 490}
491
5adc1066 492/* Propagate from the ssa name definition statements of COND_EXPR
75a70cf9 493 in GIMPLE_COND statement STMT into the conditional if that simplifies it.
494 Returns zero if no statement was changed, one if there were
495 changes and two if cfg_cleanup needs to run.
48e1416a 496
75a70cf9 497 This must be kept in sync with forward_propagate_into_cond. */
498
499static int
500forward_propagate_into_gimple_cond (gimple stmt)
501{
678b2f5b 502 tree tmp;
503 enum tree_code code = gimple_cond_code (stmt);
6f9714b3 504 bool cfg_changed = false;
505 tree rhs1 = gimple_cond_lhs (stmt);
506 tree rhs2 = gimple_cond_rhs (stmt);
678b2f5b 507
508 /* We can do tree combining on SSA_NAME and comparison expressions. */
509 if (TREE_CODE_CLASS (gimple_cond_code (stmt)) != tcc_comparison)
510 return 0;
511
c73fee76 512 tmp = forward_propagate_into_comparison_1 (stmt, code,
678b2f5b 513 boolean_type_node,
6f9714b3 514 rhs1, rhs2);
678b2f5b 515 if (tmp)
516 {
517 if (dump_file && tmp)
518 {
678b2f5b 519 fprintf (dump_file, " Replaced '");
6f9714b3 520 print_gimple_expr (dump_file, stmt, 0, 0);
678b2f5b 521 fprintf (dump_file, "' with '");
522 print_generic_expr (dump_file, tmp, 0);
523 fprintf (dump_file, "'\n");
524 }
75a70cf9 525
678b2f5b 526 gimple_cond_set_condition_from_tree (stmt, unshare_expr (tmp));
527 update_stmt (stmt);
75a70cf9 528
6f9714b3 529 if (TREE_CODE (rhs1) == SSA_NAME)
530 cfg_changed |= remove_prop_source_from_use (rhs1);
531 if (TREE_CODE (rhs2) == SSA_NAME)
532 cfg_changed |= remove_prop_source_from_use (rhs2);
533 return (cfg_changed || is_gimple_min_invariant (tmp)) ? 2 : 1;
678b2f5b 534 }
75a70cf9 535
10a6edd6 536 /* Canonicalize _Bool == 0 and _Bool != 1 to _Bool != 0 by swapping edges. */
537 if ((TREE_CODE (TREE_TYPE (rhs1)) == BOOLEAN_TYPE
538 || (INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
539 && TYPE_PRECISION (TREE_TYPE (rhs1)) == 1))
540 && ((code == EQ_EXPR
541 && integer_zerop (rhs2))
542 || (code == NE_EXPR
543 && integer_onep (rhs2))))
544 {
545 basic_block bb = gimple_bb (stmt);
546 gimple_cond_set_code (stmt, NE_EXPR);
547 gimple_cond_set_rhs (stmt, build_zero_cst (TREE_TYPE (rhs1)));
548 EDGE_SUCC (bb, 0)->flags ^= (EDGE_TRUE_VALUE|EDGE_FALSE_VALUE);
549 EDGE_SUCC (bb, 1)->flags ^= (EDGE_TRUE_VALUE|EDGE_FALSE_VALUE);
550 return 1;
551 }
552
6f9714b3 553 return 0;
75a70cf9 554}
555
556
557/* Propagate from the ssa name definition statements of COND_EXPR
558 in the rhs of statement STMT into the conditional if that simplifies it.
8a2caf10 559 Returns true zero if the stmt was changed. */
4ee9c684 560
8a2caf10 561static bool
75a70cf9 562forward_propagate_into_cond (gimple_stmt_iterator *gsi_p)
e6dfde59 563{
75a70cf9 564 gimple stmt = gsi_stmt (*gsi_p);
678b2f5b 565 tree tmp = NULL_TREE;
566 tree cond = gimple_assign_rhs1 (stmt);
10a6edd6 567 bool swap = false;
d080be9e 568
678b2f5b 569 /* We can do tree combining on SSA_NAME and comparison expressions. */
570 if (COMPARISON_CLASS_P (cond))
c73fee76 571 tmp = forward_propagate_into_comparison_1 (stmt, TREE_CODE (cond),
c8126d25 572 boolean_type_node,
573 TREE_OPERAND (cond, 0),
574 TREE_OPERAND (cond, 1));
678b2f5b 575 else if (TREE_CODE (cond) == SSA_NAME)
576 {
10a6edd6 577 enum tree_code code;
8a2caf10 578 tree name = cond;
678b2f5b 579 gimple def_stmt = get_prop_source_stmt (name, true, NULL);
580 if (!def_stmt || !can_propagate_from (def_stmt))
6f9714b3 581 return 0;
5adc1066 582
10a6edd6 583 code = gimple_assign_rhs_code (def_stmt);
584 if (TREE_CODE_CLASS (code) == tcc_comparison)
8a2caf10 585 tmp = fold_build2_loc (gimple_location (def_stmt),
10a6edd6 586 code,
8a2caf10 587 boolean_type_node,
588 gimple_assign_rhs1 (def_stmt),
589 gimple_assign_rhs2 (def_stmt));
10a6edd6 590 else if ((code == BIT_NOT_EXPR
591 && TYPE_PRECISION (TREE_TYPE (cond)) == 1)
592 || (code == BIT_XOR_EXPR
593 && integer_onep (gimple_assign_rhs2 (def_stmt))))
594 {
595 tmp = gimple_assign_rhs1 (def_stmt);
596 swap = true;
597 }
678b2f5b 598 }
5adc1066 599
25f48be0 600 if (tmp
601 && is_gimple_condexpr (tmp))
678b2f5b 602 {
603 if (dump_file && tmp)
604 {
605 fprintf (dump_file, " Replaced '");
606 print_generic_expr (dump_file, cond, 0);
607 fprintf (dump_file, "' with '");
608 print_generic_expr (dump_file, tmp, 0);
609 fprintf (dump_file, "'\n");
610 }
d080be9e 611
8a2caf10 612 if (integer_onep (tmp))
613 gimple_assign_set_rhs_from_tree (gsi_p, gimple_assign_rhs2 (stmt));
614 else if (integer_zerop (tmp))
615 gimple_assign_set_rhs_from_tree (gsi_p, gimple_assign_rhs3 (stmt));
616 else
10a6edd6 617 {
618 gimple_assign_set_rhs1 (stmt, unshare_expr (tmp));
619 if (swap)
620 {
621 tree t = gimple_assign_rhs2 (stmt);
622 gimple_assign_set_rhs2 (stmt, gimple_assign_rhs3 (stmt));
623 gimple_assign_set_rhs3 (stmt, t);
624 }
625 }
678b2f5b 626 stmt = gsi_stmt (*gsi_p);
627 update_stmt (stmt);
5adc1066 628
8a2caf10 629 return true;
678b2f5b 630 }
d080be9e 631
6f9714b3 632 return 0;
4ee9c684 633}
634
360b78f3 635/* Propagate from the ssa name definition statements of COND_EXPR
636 values in the rhs of statement STMT into the conditional arms
637 if that simplifies it.
638 Returns true if the stmt was changed. */
639
640static bool
641combine_cond_exprs (gimple_stmt_iterator *gsi_p)
642{
643 gimple stmt = gsi_stmt (*gsi_p);
644 tree cond, val1, val2;
645 bool changed = false;
646
647 cond = gimple_assign_rhs1 (stmt);
648 val1 = gimple_assign_rhs2 (stmt);
649 if (TREE_CODE (val1) == SSA_NAME)
650 {
651 gimple def_stmt = SSA_NAME_DEF_STMT (val1);
652 if (is_gimple_assign (def_stmt)
653 && gimple_assign_rhs_code (def_stmt) == gimple_assign_rhs_code (stmt)
654 && operand_equal_p (gimple_assign_rhs1 (def_stmt), cond, 0))
655 {
656 val1 = unshare_expr (gimple_assign_rhs2 (def_stmt));
657 gimple_assign_set_rhs2 (stmt, val1);
658 changed = true;
659 }
660 }
661 val2 = gimple_assign_rhs3 (stmt);
662 if (TREE_CODE (val2) == SSA_NAME)
663 {
664 gimple def_stmt = SSA_NAME_DEF_STMT (val2);
665 if (is_gimple_assign (def_stmt)
666 && gimple_assign_rhs_code (def_stmt) == gimple_assign_rhs_code (stmt)
667 && operand_equal_p (gimple_assign_rhs1 (def_stmt), cond, 0))
668 {
669 val2 = unshare_expr (gimple_assign_rhs3 (def_stmt));
670 gimple_assign_set_rhs3 (stmt, val2);
671 changed = true;
672 }
673 }
674 if (operand_equal_p (val1, val2, 0))
675 {
676 gimple_assign_set_rhs_from_tree (gsi_p, val1);
677 stmt = gsi_stmt (*gsi_p);
678 changed = true;
679 }
680
681 if (changed)
682 update_stmt (stmt);
683
684 return changed;
685}
686
48e1416a 687/* We've just substituted an ADDR_EXPR into stmt. Update all the
148aa112 688 relevant data structures to match. */
689
690static void
75a70cf9 691tidy_after_forward_propagate_addr (gimple stmt)
148aa112 692{
148aa112 693 /* We may have turned a trapping insn into a non-trapping insn. */
694 if (maybe_clean_or_replace_eh_stmt (stmt, stmt)
75a70cf9 695 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
148aa112 696 cfg_changed = true;
f2fae51f 697
75a70cf9 698 if (TREE_CODE (gimple_assign_rhs1 (stmt)) == ADDR_EXPR)
699 recompute_tree_invariant_for_addr_expr (gimple_assign_rhs1 (stmt));
148aa112 700}
701
75a70cf9 702/* DEF_RHS contains the address of the 0th element in an array.
6c01267c 703 USE_STMT uses type of DEF_RHS to compute the address of an
291d763b 704 arbitrary element within the array. The (variable) byte offset
705 of the element is contained in OFFSET.
706
707 We walk back through the use-def chains of OFFSET to verify that
708 it is indeed computing the offset of an element within the array
709 and extract the index corresponding to the given byte offset.
710
711 We then try to fold the entire address expression into a form
712 &array[index].
713
714 If we are successful, we replace the right hand side of USE_STMT
715 with the new address computation. */
716
717static bool
6c01267c 718forward_propagate_addr_into_variable_array_index (tree offset,
75a70cf9 719 tree def_rhs,
720 gimple_stmt_iterator *use_stmt_gsi)
291d763b 721{
401d1fb3 722 tree index, tunit;
75a70cf9 723 gimple offset_def, use_stmt = gsi_stmt (*use_stmt_gsi);
182cf5a9 724 tree new_rhs, tmp;
401d1fb3 725
182cf5a9 726 if (TREE_CODE (TREE_OPERAND (def_rhs, 0)) == ARRAY_REF)
727 tunit = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (def_rhs)));
728 else if (TREE_CODE (TREE_TYPE (TREE_OPERAND (def_rhs, 0))) == ARRAY_TYPE)
729 tunit = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (TREE_TYPE (def_rhs))));
730 else
731 return false;
401d1fb3 732 if (!host_integerp (tunit, 1))
733 return false;
291d763b 734
65c220cd 735 /* Get the offset's defining statement. */
736 offset_def = SSA_NAME_DEF_STMT (offset);
737
738 /* Try to find an expression for a proper index. This is either a
739 multiplication expression by the element size or just the ssa name we came
740 along in case the element size is one. In that case, however, we do not
741 allow multiplications because they can be computing index to a higher
742 level dimension (PR 37861). */
401d1fb3 743 if (integer_onep (tunit))
1a773ec5 744 {
65c220cd 745 if (is_gimple_assign (offset_def)
746 && gimple_assign_rhs_code (offset_def) == MULT_EXPR)
747 return false;
291d763b 748
65c220cd 749 index = offset;
750 }
751 else
752 {
0de36bdb 753 /* The statement which defines OFFSET before type conversion
75a70cf9 754 must be a simple GIMPLE_ASSIGN. */
65c220cd 755 if (!is_gimple_assign (offset_def))
1a773ec5 756 return false;
291d763b 757
0de36bdb 758 /* The RHS of the statement which defines OFFSET must be a
48e1416a 759 multiplication of an object by the size of the array elements.
0de36bdb 760 This implicitly verifies that the size of the array elements
761 is constant. */
401d1fb3 762 if (gimple_assign_rhs_code (offset_def) == MULT_EXPR
763 && TREE_CODE (gimple_assign_rhs2 (offset_def)) == INTEGER_CST
764 && tree_int_cst_equal (gimple_assign_rhs2 (offset_def), tunit))
765 {
766 /* The first operand to the MULT_EXPR is the desired index. */
767 index = gimple_assign_rhs1 (offset_def);
768 }
769 /* If we have idx * tunit + CST * tunit re-associate that. */
770 else if ((gimple_assign_rhs_code (offset_def) == PLUS_EXPR
771 || gimple_assign_rhs_code (offset_def) == MINUS_EXPR)
772 && TREE_CODE (gimple_assign_rhs1 (offset_def)) == SSA_NAME
773 && TREE_CODE (gimple_assign_rhs2 (offset_def)) == INTEGER_CST
774 && (tmp = div_if_zero_remainder (EXACT_DIV_EXPR,
775 gimple_assign_rhs2 (offset_def),
776 tunit)) != NULL_TREE)
777 {
778 gimple offset_def2 = SSA_NAME_DEF_STMT (gimple_assign_rhs1 (offset_def));
507b89a4 779 if (is_gimple_assign (offset_def2)
780 && gimple_assign_rhs_code (offset_def2) == MULT_EXPR
401d1fb3 781 && TREE_CODE (gimple_assign_rhs2 (offset_def2)) == INTEGER_CST
782 && tree_int_cst_equal (gimple_assign_rhs2 (offset_def2), tunit))
783 {
784 index = fold_build2 (gimple_assign_rhs_code (offset_def),
785 TREE_TYPE (offset),
786 gimple_assign_rhs1 (offset_def2), tmp);
787 }
788 else
789 return false;
790 }
791 else
1a773ec5 792 return false;
1a773ec5 793 }
291d763b 794
795 /* Replace the pointer addition with array indexing. */
401d1fb3 796 index = force_gimple_operand_gsi (use_stmt_gsi, index, true, NULL_TREE,
797 true, GSI_SAME_STMT);
182cf5a9 798 if (TREE_CODE (TREE_OPERAND (def_rhs, 0)) == ARRAY_REF)
799 {
800 new_rhs = unshare_expr (def_rhs);
801 TREE_OPERAND (TREE_OPERAND (new_rhs, 0), 1) = index;
802 }
803 else
804 {
805 new_rhs = build4 (ARRAY_REF, TREE_TYPE (TREE_TYPE (TREE_TYPE (def_rhs))),
806 unshare_expr (TREE_OPERAND (def_rhs, 0)),
807 index, integer_zero_node, NULL_TREE);
808 new_rhs = build_fold_addr_expr (new_rhs);
809 if (!useless_type_conversion_p (TREE_TYPE (gimple_assign_lhs (use_stmt)),
810 TREE_TYPE (new_rhs)))
811 {
812 new_rhs = force_gimple_operand_gsi (use_stmt_gsi, new_rhs, true,
813 NULL_TREE, true, GSI_SAME_STMT);
814 new_rhs = fold_convert (TREE_TYPE (gimple_assign_lhs (use_stmt)),
815 new_rhs);
816 }
817 }
818 gimple_assign_set_rhs_from_tree (use_stmt_gsi, new_rhs);
50aacf4c 819 fold_stmt (use_stmt_gsi);
820 tidy_after_forward_propagate_addr (gsi_stmt (*use_stmt_gsi));
291d763b 821 return true;
822}
823
15ec875c 824/* NAME is a SSA_NAME representing DEF_RHS which is of the form
825 ADDR_EXPR <whatever>.
291d763b 826
3d5cfe81 827 Try to forward propagate the ADDR_EXPR into the use USE_STMT.
291d763b 828 Often this will allow for removal of an ADDR_EXPR and INDIRECT_REF
3d5cfe81 829 node or for recovery of array indexing from pointer arithmetic.
75a70cf9 830
6b5a5c42 831 Return true if the propagation was successful (the propagation can
832 be not totally successful, yet things may have been changed). */
291d763b 833
834static bool
75a70cf9 835forward_propagate_addr_expr_1 (tree name, tree def_rhs,
836 gimple_stmt_iterator *use_stmt_gsi,
6776dec8 837 bool single_use_p)
291d763b 838{
75a70cf9 839 tree lhs, rhs, rhs2, array_ref;
75a70cf9 840 gimple use_stmt = gsi_stmt (*use_stmt_gsi);
841 enum tree_code rhs_code;
9e019299 842 bool res = true;
291d763b 843
971c637a 844 gcc_assert (TREE_CODE (def_rhs) == ADDR_EXPR);
291d763b 845
75a70cf9 846 lhs = gimple_assign_lhs (use_stmt);
847 rhs_code = gimple_assign_rhs_code (use_stmt);
848 rhs = gimple_assign_rhs1 (use_stmt);
15ec875c 849
6776dec8 850 /* Trivial cases. The use statement could be a trivial copy or a
15ec875c 851 useless conversion. Recurse to the uses of the lhs as copyprop does
971c637a 852 not copy through different variant pointers and FRE does not catch
6776dec8 853 all useless conversions. Treat the case of a single-use name and
854 a conversion to def_rhs type separate, though. */
971c637a 855 if (TREE_CODE (lhs) == SSA_NAME
75a70cf9 856 && ((rhs_code == SSA_NAME && rhs == name)
316616c9 857 || CONVERT_EXPR_CODE_P (rhs_code)))
6776dec8 858 {
316616c9 859 /* Only recurse if we don't deal with a single use or we cannot
860 do the propagation to the current statement. In particular
861 we can end up with a conversion needed for a non-invariant
862 address which we cannot do in a single statement. */
863 if (!single_use_p
864 || (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (def_rhs))
bd8d8d81 865 && (!is_gimple_min_invariant (def_rhs)
866 || (INTEGRAL_TYPE_P (TREE_TYPE (lhs))
867 && POINTER_TYPE_P (TREE_TYPE (def_rhs))
868 && (TYPE_PRECISION (TREE_TYPE (lhs))
869 > TYPE_PRECISION (TREE_TYPE (def_rhs)))))))
971c637a 870 return forward_propagate_addr_expr (lhs, def_rhs);
871
75a70cf9 872 gimple_assign_set_rhs1 (use_stmt, unshare_expr (def_rhs));
316616c9 873 if (useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (def_rhs)))
874 gimple_assign_set_rhs_code (use_stmt, TREE_CODE (def_rhs));
875 else
876 gimple_assign_set_rhs_code (use_stmt, NOP_EXPR);
6776dec8 877 return true;
878 }
971c637a 879
182cf5a9 880 /* Propagate through constant pointer adjustments. */
881 if (TREE_CODE (lhs) == SSA_NAME
882 && rhs_code == POINTER_PLUS_EXPR
883 && rhs == name
884 && TREE_CODE (gimple_assign_rhs2 (use_stmt)) == INTEGER_CST)
885 {
886 tree new_def_rhs;
887 /* As we come here with non-invariant addresses in def_rhs we need
888 to make sure we can build a valid constant offsetted address
889 for further propagation. Simply rely on fold building that
890 and check after the fact. */
891 new_def_rhs = fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (rhs)),
892 def_rhs,
893 fold_convert (ptr_type_node,
894 gimple_assign_rhs2 (use_stmt)));
895 if (TREE_CODE (new_def_rhs) == MEM_REF
f5d03f27 896 && !is_gimple_mem_ref_addr (TREE_OPERAND (new_def_rhs, 0)))
182cf5a9 897 return false;
898 new_def_rhs = build_fold_addr_expr_with_type (new_def_rhs,
899 TREE_TYPE (rhs));
900
901 /* Recurse. If we could propagate into all uses of lhs do not
902 bother to replace into the current use but just pretend we did. */
903 if (TREE_CODE (new_def_rhs) == ADDR_EXPR
904 && forward_propagate_addr_expr (lhs, new_def_rhs))
905 return true;
906
907 if (useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (new_def_rhs)))
908 gimple_assign_set_rhs_with_ops (use_stmt_gsi, TREE_CODE (new_def_rhs),
909 new_def_rhs, NULL_TREE);
910 else if (is_gimple_min_invariant (new_def_rhs))
911 gimple_assign_set_rhs_with_ops (use_stmt_gsi, NOP_EXPR,
912 new_def_rhs, NULL_TREE);
913 else
914 return false;
915 gcc_assert (gsi_stmt (*use_stmt_gsi) == use_stmt);
916 update_stmt (use_stmt);
917 return true;
918 }
919
48e1416a 920 /* Now strip away any outer COMPONENT_REF/ARRAY_REF nodes from the LHS.
971c637a 921 ADDR_EXPR will not appear on the LHS. */
182cf5a9 922 lhs = gimple_assign_lhs (use_stmt);
923 while (handled_component_p (lhs))
924 lhs = TREE_OPERAND (lhs, 0);
971c637a 925
182cf5a9 926 /* Now see if the LHS node is a MEM_REF using NAME. If so,
971c637a 927 propagate the ADDR_EXPR into the use of NAME and fold the result. */
182cf5a9 928 if (TREE_CODE (lhs) == MEM_REF
9e019299 929 && TREE_OPERAND (lhs, 0) == name)
971c637a 930 {
182cf5a9 931 tree def_rhs_base;
932 HOST_WIDE_INT def_rhs_offset;
933 /* If the address is invariant we can always fold it. */
934 if ((def_rhs_base = get_addr_base_and_unit_offset (TREE_OPERAND (def_rhs, 0),
935 &def_rhs_offset)))
9e019299 936 {
182cf5a9 937 double_int off = mem_ref_offset (lhs);
938 tree new_ptr;
939 off = double_int_add (off,
940 shwi_to_double_int (def_rhs_offset));
941 if (TREE_CODE (def_rhs_base) == MEM_REF)
942 {
943 off = double_int_add (off, mem_ref_offset (def_rhs_base));
944 new_ptr = TREE_OPERAND (def_rhs_base, 0);
945 }
946 else
947 new_ptr = build_fold_addr_expr (def_rhs_base);
948 TREE_OPERAND (lhs, 0) = new_ptr;
949 TREE_OPERAND (lhs, 1)
950 = double_int_to_tree (TREE_TYPE (TREE_OPERAND (lhs, 1)), off);
9e019299 951 tidy_after_forward_propagate_addr (use_stmt);
9e019299 952 /* Continue propagating into the RHS if this was not the only use. */
953 if (single_use_p)
954 return true;
955 }
182cf5a9 956 /* If the LHS is a plain dereference and the value type is the same as
957 that of the pointed-to type of the address we can put the
958 dereferenced address on the LHS preserving the original alias-type. */
959 else if (gimple_assign_lhs (use_stmt) == lhs
b97e39a0 960 && integer_zerop (TREE_OPERAND (lhs, 1))
182cf5a9 961 && useless_type_conversion_p
962 (TREE_TYPE (TREE_OPERAND (def_rhs, 0)),
963 TREE_TYPE (gimple_assign_rhs1 (use_stmt))))
964 {
965 tree *def_rhs_basep = &TREE_OPERAND (def_rhs, 0);
98d96c6f 966 tree new_offset, new_base, saved, new_lhs;
182cf5a9 967 while (handled_component_p (*def_rhs_basep))
968 def_rhs_basep = &TREE_OPERAND (*def_rhs_basep, 0);
969 saved = *def_rhs_basep;
970 if (TREE_CODE (*def_rhs_basep) == MEM_REF)
971 {
972 new_base = TREE_OPERAND (*def_rhs_basep, 0);
b97e39a0 973 new_offset = fold_convert (TREE_TYPE (TREE_OPERAND (lhs, 1)),
974 TREE_OPERAND (*def_rhs_basep, 1));
182cf5a9 975 }
976 else
977 {
978 new_base = build_fold_addr_expr (*def_rhs_basep);
979 new_offset = TREE_OPERAND (lhs, 1);
980 }
981 *def_rhs_basep = build2 (MEM_REF, TREE_TYPE (*def_rhs_basep),
982 new_base, new_offset);
2e5dc41c 983 TREE_THIS_VOLATILE (*def_rhs_basep) = TREE_THIS_VOLATILE (lhs);
31fa5b0d 984 TREE_SIDE_EFFECTS (*def_rhs_basep) = TREE_SIDE_EFFECTS (lhs);
2e5dc41c 985 TREE_THIS_NOTRAP (*def_rhs_basep) = TREE_THIS_NOTRAP (lhs);
98d96c6f 986 new_lhs = unshare_expr (TREE_OPERAND (def_rhs, 0));
987 gimple_assign_set_lhs (use_stmt, new_lhs);
988 TREE_THIS_VOLATILE (new_lhs) = TREE_THIS_VOLATILE (lhs);
31fa5b0d 989 TREE_SIDE_EFFECTS (new_lhs) = TREE_SIDE_EFFECTS (lhs);
182cf5a9 990 *def_rhs_basep = saved;
991 tidy_after_forward_propagate_addr (use_stmt);
992 /* Continue propagating into the RHS if this was not the
993 only use. */
994 if (single_use_p)
995 return true;
996 }
9e019299 997 else
998 /* We can have a struct assignment dereferencing our name twice.
999 Note that we didn't propagate into the lhs to not falsely
1000 claim we did when propagating into the rhs. */
1001 res = false;
971c637a 1002 }
15ec875c 1003
631d5db6 1004 /* Strip away any outer COMPONENT_REF, ARRAY_REF or ADDR_EXPR
1005 nodes from the RHS. */
182cf5a9 1006 rhs = gimple_assign_rhs1 (use_stmt);
1007 if (TREE_CODE (rhs) == ADDR_EXPR)
1008 rhs = TREE_OPERAND (rhs, 0);
1009 while (handled_component_p (rhs))
1010 rhs = TREE_OPERAND (rhs, 0);
291d763b 1011
182cf5a9 1012 /* Now see if the RHS node is a MEM_REF using NAME. If so,
291d763b 1013 propagate the ADDR_EXPR into the use of NAME and fold the result. */
182cf5a9 1014 if (TREE_CODE (rhs) == MEM_REF
1015 && TREE_OPERAND (rhs, 0) == name)
291d763b 1016 {
182cf5a9 1017 tree def_rhs_base;
1018 HOST_WIDE_INT def_rhs_offset;
1019 if ((def_rhs_base = get_addr_base_and_unit_offset (TREE_OPERAND (def_rhs, 0),
1020 &def_rhs_offset)))
1021 {
1022 double_int off = mem_ref_offset (rhs);
1023 tree new_ptr;
1024 off = double_int_add (off,
1025 shwi_to_double_int (def_rhs_offset));
1026 if (TREE_CODE (def_rhs_base) == MEM_REF)
1027 {
1028 off = double_int_add (off, mem_ref_offset (def_rhs_base));
1029 new_ptr = TREE_OPERAND (def_rhs_base, 0);
1030 }
1031 else
1032 new_ptr = build_fold_addr_expr (def_rhs_base);
1033 TREE_OPERAND (rhs, 0) = new_ptr;
1034 TREE_OPERAND (rhs, 1)
1035 = double_int_to_tree (TREE_TYPE (TREE_OPERAND (rhs, 1)), off);
50aacf4c 1036 fold_stmt_inplace (use_stmt_gsi);
182cf5a9 1037 tidy_after_forward_propagate_addr (use_stmt);
1038 return res;
1039 }
2e5dc41c 1040 /* If the RHS is a plain dereference and the value type is the same as
182cf5a9 1041 that of the pointed-to type of the address we can put the
2e5dc41c 1042 dereferenced address on the RHS preserving the original alias-type. */
182cf5a9 1043 else if (gimple_assign_rhs1 (use_stmt) == rhs
b97e39a0 1044 && integer_zerop (TREE_OPERAND (rhs, 1))
182cf5a9 1045 && useless_type_conversion_p
1046 (TREE_TYPE (gimple_assign_lhs (use_stmt)),
1047 TREE_TYPE (TREE_OPERAND (def_rhs, 0))))
1048 {
1049 tree *def_rhs_basep = &TREE_OPERAND (def_rhs, 0);
98d96c6f 1050 tree new_offset, new_base, saved, new_rhs;
182cf5a9 1051 while (handled_component_p (*def_rhs_basep))
1052 def_rhs_basep = &TREE_OPERAND (*def_rhs_basep, 0);
1053 saved = *def_rhs_basep;
1054 if (TREE_CODE (*def_rhs_basep) == MEM_REF)
1055 {
1056 new_base = TREE_OPERAND (*def_rhs_basep, 0);
b97e39a0 1057 new_offset = fold_convert (TREE_TYPE (TREE_OPERAND (rhs, 1)),
1058 TREE_OPERAND (*def_rhs_basep, 1));
182cf5a9 1059 }
1060 else
1061 {
1062 new_base = build_fold_addr_expr (*def_rhs_basep);
1063 new_offset = TREE_OPERAND (rhs, 1);
1064 }
1065 *def_rhs_basep = build2 (MEM_REF, TREE_TYPE (*def_rhs_basep),
1066 new_base, new_offset);
2e5dc41c 1067 TREE_THIS_VOLATILE (*def_rhs_basep) = TREE_THIS_VOLATILE (rhs);
31fa5b0d 1068 TREE_SIDE_EFFECTS (*def_rhs_basep) = TREE_SIDE_EFFECTS (rhs);
2e5dc41c 1069 TREE_THIS_NOTRAP (*def_rhs_basep) = TREE_THIS_NOTRAP (rhs);
98d96c6f 1070 new_rhs = unshare_expr (TREE_OPERAND (def_rhs, 0));
1071 gimple_assign_set_rhs1 (use_stmt, new_rhs);
1072 TREE_THIS_VOLATILE (new_rhs) = TREE_THIS_VOLATILE (rhs);
31fa5b0d 1073 TREE_SIDE_EFFECTS (new_rhs) = TREE_SIDE_EFFECTS (rhs);
182cf5a9 1074 *def_rhs_basep = saved;
50aacf4c 1075 fold_stmt_inplace (use_stmt_gsi);
182cf5a9 1076 tidy_after_forward_propagate_addr (use_stmt);
1077 return res;
1078 }
291d763b 1079 }
1080
971c637a 1081 /* If the use of the ADDR_EXPR is not a POINTER_PLUS_EXPR, there
1082 is nothing to do. */
75a70cf9 1083 if (gimple_assign_rhs_code (use_stmt) != POINTER_PLUS_EXPR
1084 || gimple_assign_rhs1 (use_stmt) != name)
971c637a 1085 return false;
1086
291d763b 1087 /* The remaining cases are all for turning pointer arithmetic into
1088 array indexing. They only apply when we have the address of
1089 element zero in an array. If that is not the case then there
1090 is nothing to do. */
15ec875c 1091 array_ref = TREE_OPERAND (def_rhs, 0);
182cf5a9 1092 if ((TREE_CODE (array_ref) != ARRAY_REF
1093 || TREE_CODE (TREE_TYPE (TREE_OPERAND (array_ref, 0))) != ARRAY_TYPE
1094 || TREE_CODE (TREE_OPERAND (array_ref, 1)) != INTEGER_CST)
1095 && TREE_CODE (TREE_TYPE (array_ref)) != ARRAY_TYPE)
291d763b 1096 return false;
1097
75a70cf9 1098 rhs2 = gimple_assign_rhs2 (use_stmt);
704d7315 1099 /* Optimize &x[C1] p+ C2 to &x p+ C3 with C3 = C1 * element_size + C2. */
75a70cf9 1100 if (TREE_CODE (rhs2) == INTEGER_CST)
291d763b 1101 {
704d7315 1102 tree new_rhs = build1_loc (gimple_location (use_stmt),
1103 ADDR_EXPR, TREE_TYPE (def_rhs),
1104 fold_build2 (MEM_REF,
1105 TREE_TYPE (TREE_TYPE (def_rhs)),
1106 unshare_expr (def_rhs),
1107 fold_convert (ptr_type_node,
1108 rhs2)));
1109 gimple_assign_set_rhs_from_tree (use_stmt_gsi, new_rhs);
1110 use_stmt = gsi_stmt (*use_stmt_gsi);
1111 update_stmt (use_stmt);
1112 tidy_after_forward_propagate_addr (use_stmt);
1113 return true;
291d763b 1114 }
1115
0de36bdb 1116 /* Try to optimize &x[0] p+ OFFSET where OFFSET is defined by
291d763b 1117 converting a multiplication of an index by the size of the
1118 array elements, then the result is converted into the proper
1119 type for the arithmetic. */
75a70cf9 1120 if (TREE_CODE (rhs2) == SSA_NAME
182cf5a9 1121 && (TREE_CODE (array_ref) != ARRAY_REF
1122 || integer_zerop (TREE_OPERAND (array_ref, 1)))
c019af4d 1123 && useless_type_conversion_p (TREE_TYPE (name), TREE_TYPE (def_rhs))
291d763b 1124 /* Avoid problems with IVopts creating PLUS_EXPRs with a
1125 different type than their operands. */
83a99d39 1126 && useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (def_rhs)))
75a70cf9 1127 return forward_propagate_addr_into_variable_array_index (rhs2, def_rhs,
1128 use_stmt_gsi);
291d763b 1129 return false;
1130}
1131
3d5cfe81 1132/* STMT is a statement of the form SSA_NAME = ADDR_EXPR <whatever>.
1133
1134 Try to forward propagate the ADDR_EXPR into all uses of the SSA_NAME.
1135 Often this will allow for removal of an ADDR_EXPR and INDIRECT_REF
1136 node or for recovery of array indexing from pointer arithmetic.
1137 Returns true, if all uses have been propagated into. */
1138
1139static bool
15ec875c 1140forward_propagate_addr_expr (tree name, tree rhs)
3d5cfe81 1141{
75a70cf9 1142 int stmt_loop_depth = gimple_bb (SSA_NAME_DEF_STMT (name))->loop_depth;
3d5cfe81 1143 imm_use_iterator iter;
75a70cf9 1144 gimple use_stmt;
3d5cfe81 1145 bool all = true;
6776dec8 1146 bool single_use_p = has_single_use (name);
3d5cfe81 1147
09aca5bc 1148 FOR_EACH_IMM_USE_STMT (use_stmt, iter, name)
3d5cfe81 1149 {
c96420f8 1150 bool result;
9481f629 1151 tree use_rhs;
3d5cfe81 1152
1153 /* If the use is not in a simple assignment statement, then
1154 there is nothing we can do. */
75a70cf9 1155 if (gimple_code (use_stmt) != GIMPLE_ASSIGN)
3d5cfe81 1156 {
688ff29b 1157 if (!is_gimple_debug (use_stmt))
9845d120 1158 all = false;
3d5cfe81 1159 continue;
1160 }
1161
a540e2fe 1162 /* If the use is in a deeper loop nest, then we do not want
ed40c3d0 1163 to propagate non-invariant ADDR_EXPRs into the loop as that
1164 is likely adding expression evaluations into the loop. */
1165 if (gimple_bb (use_stmt)->loop_depth > stmt_loop_depth
1166 && !is_gimple_min_invariant (rhs))
3d5cfe81 1167 {
1168 all = false;
1169 continue;
1170 }
a540e2fe 1171
75a70cf9 1172 {
1173 gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
1174 result = forward_propagate_addr_expr_1 (name, rhs, &gsi,
1175 single_use_p);
dd277d48 1176 /* If the use has moved to a different statement adjust
4c5fd53c 1177 the update machinery for the old statement too. */
dd277d48 1178 if (use_stmt != gsi_stmt (gsi))
1179 {
dd277d48 1180 update_stmt (use_stmt);
4c5fd53c 1181 use_stmt = gsi_stmt (gsi);
dd277d48 1182 }
4c5fd53c 1183
1184 update_stmt (use_stmt);
75a70cf9 1185 }
c96420f8 1186 all &= result;
de6ed584 1187
15ec875c 1188 /* Remove intermediate now unused copy and conversion chains. */
75a70cf9 1189 use_rhs = gimple_assign_rhs1 (use_stmt);
15ec875c 1190 if (result
75a70cf9 1191 && TREE_CODE (gimple_assign_lhs (use_stmt)) == SSA_NAME
7b705d94 1192 && TREE_CODE (use_rhs) == SSA_NAME
1193 && has_zero_uses (gimple_assign_lhs (use_stmt)))
15ec875c 1194 {
75a70cf9 1195 gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
15ec875c 1196 release_defs (use_stmt);
75a70cf9 1197 gsi_remove (&gsi, true);
15ec875c 1198 }
3d5cfe81 1199 }
1200
628ce22b 1201 return all && has_zero_uses (name);
3d5cfe81 1202}
1203
678b2f5b 1204
1205/* Forward propagate the comparison defined in STMT like
1206 cond_1 = x CMP y to uses of the form
1207 a_1 = (T')cond_1
1208 a_1 = !cond_1
1209 a_1 = cond_1 != 0
1210 Returns true if stmt is now unused. */
1211
1212static bool
1213forward_propagate_comparison (gimple stmt)
1214{
1215 tree name = gimple_assign_lhs (stmt);
1216 gimple use_stmt;
1217 tree tmp = NULL_TREE;
e5b1e080 1218 gimple_stmt_iterator gsi;
1219 enum tree_code code;
1220 tree lhs;
678b2f5b 1221
1222 /* Don't propagate ssa names that occur in abnormal phis. */
1223 if ((TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
1224 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_assign_rhs1 (stmt)))
1225 || (TREE_CODE (gimple_assign_rhs2 (stmt)) == SSA_NAME
1226 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_assign_rhs2 (stmt))))
1227 return false;
1228
1229 /* Do not un-cse comparisons. But propagate through copies. */
1230 use_stmt = get_prop_dest_stmt (name, &name);
e5b1e080 1231 if (!use_stmt
1232 || !is_gimple_assign (use_stmt))
678b2f5b 1233 return false;
1234
e5b1e080 1235 code = gimple_assign_rhs_code (use_stmt);
1236 lhs = gimple_assign_lhs (use_stmt);
1237 if (!INTEGRAL_TYPE_P (TREE_TYPE (lhs)))
1238 return false;
678b2f5b 1239
e5b1e080 1240 /* We can propagate the condition into a statement that
1241 computes the logical negation of the comparison result. */
4b5f1658 1242 if ((code == BIT_NOT_EXPR
1243 && TYPE_PRECISION (TREE_TYPE (lhs)) == 1)
1244 || (code == BIT_XOR_EXPR
1245 && integer_onep (gimple_assign_rhs2 (use_stmt))))
e5b1e080 1246 {
1247 tree type = TREE_TYPE (gimple_assign_rhs1 (stmt));
1248 bool nans = HONOR_NANS (TYPE_MODE (type));
1249 enum tree_code inv_code;
1250 inv_code = invert_tree_comparison (gimple_assign_rhs_code (stmt), nans);
1251 if (inv_code == ERROR_MARK)
678b2f5b 1252 return false;
1253
e5b1e080 1254 tmp = build2 (inv_code, TREE_TYPE (lhs), gimple_assign_rhs1 (stmt),
1255 gimple_assign_rhs2 (stmt));
1256 }
1257 else
1258 return false;
678b2f5b 1259
e5b1e080 1260 gsi = gsi_for_stmt (use_stmt);
1261 gimple_assign_set_rhs_from_tree (&gsi, unshare_expr (tmp));
1262 use_stmt = gsi_stmt (gsi);
1263 update_stmt (use_stmt);
678b2f5b 1264
e5b1e080 1265 if (dump_file && (dump_flags & TDF_DETAILS))
1266 {
1267 fprintf (dump_file, " Replaced '");
1268 print_gimple_expr (dump_file, stmt, 0, dump_flags);
1269 fprintf (dump_file, "' with '");
1270 print_gimple_expr (dump_file, use_stmt, 0, dump_flags);
1271 fprintf (dump_file, "'\n");
678b2f5b 1272 }
1273
e5b1e080 1274 /* Remove defining statements. */
1275 return remove_prop_source_from_use (name);
678b2f5b 1276}
1277
1278
3a938499 1279/* If we have lhs = ~x (STMT), look and see if earlier we had x = ~y.
1280 If so, we can change STMT into lhs = y which can later be copy
48e1416a 1281 propagated. Similarly for negation.
3a938499 1282
48e1416a 1283 This could trivially be formulated as a forward propagation
3a938499 1284 to immediate uses. However, we already had an implementation
1285 from DOM which used backward propagation via the use-def links.
1286
1287 It turns out that backward propagation is actually faster as
1288 there's less work to do for each NOT/NEG expression we find.
1289 Backwards propagation needs to look at the statement in a single
1290 backlink. Forward propagation needs to look at potentially more
678b2f5b 1291 than one forward link.
3a938499 1292
678b2f5b 1293 Returns true when the statement was changed. */
1294
1295static bool
75a70cf9 1296simplify_not_neg_expr (gimple_stmt_iterator *gsi_p)
3a938499 1297{
75a70cf9 1298 gimple stmt = gsi_stmt (*gsi_p);
1299 tree rhs = gimple_assign_rhs1 (stmt);
1300 gimple rhs_def_stmt = SSA_NAME_DEF_STMT (rhs);
3a938499 1301
1302 /* See if the RHS_DEF_STMT has the same form as our statement. */
75a70cf9 1303 if (is_gimple_assign (rhs_def_stmt)
1304 && gimple_assign_rhs_code (rhs_def_stmt) == gimple_assign_rhs_code (stmt))
3a938499 1305 {
75a70cf9 1306 tree rhs_def_operand = gimple_assign_rhs1 (rhs_def_stmt);
3a938499 1307
1308 /* Verify that RHS_DEF_OPERAND is a suitable SSA_NAME. */
1309 if (TREE_CODE (rhs_def_operand) == SSA_NAME
1310 && ! SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs_def_operand))
1311 {
75a70cf9 1312 gimple_assign_set_rhs_from_tree (gsi_p, rhs_def_operand);
1313 stmt = gsi_stmt (*gsi_p);
3a938499 1314 update_stmt (stmt);
678b2f5b 1315 return true;
3a938499 1316 }
1317 }
678b2f5b 1318
1319 return false;
3a938499 1320}
3d5cfe81 1321
b5860aba 1322/* STMT is a SWITCH_EXPR for which we attempt to find equivalent forms of
1323 the condition which we may be able to optimize better. */
1324
678b2f5b 1325static bool
75a70cf9 1326simplify_gimple_switch (gimple stmt)
b5860aba 1327{
75a70cf9 1328 tree cond = gimple_switch_index (stmt);
b5860aba 1329 tree def, to, ti;
75a70cf9 1330 gimple def_stmt;
b5860aba 1331
1332 /* The optimization that we really care about is removing unnecessary
1333 casts. That will let us do much better in propagating the inferred
1334 constant at the switch target. */
1335 if (TREE_CODE (cond) == SSA_NAME)
1336 {
75a70cf9 1337 def_stmt = SSA_NAME_DEF_STMT (cond);
1338 if (is_gimple_assign (def_stmt))
b5860aba 1339 {
75a70cf9 1340 if (gimple_assign_rhs_code (def_stmt) == NOP_EXPR)
b5860aba 1341 {
1342 int need_precision;
1343 bool fail;
1344
75a70cf9 1345 def = gimple_assign_rhs1 (def_stmt);
b5860aba 1346
b5860aba 1347 /* ??? Why was Jeff testing this? We are gimple... */
1b4345f7 1348 gcc_checking_assert (is_gimple_val (def));
b5860aba 1349
1350 to = TREE_TYPE (cond);
1351 ti = TREE_TYPE (def);
1352
1353 /* If we have an extension that preserves value, then we
1354 can copy the source value into the switch. */
1355
1356 need_precision = TYPE_PRECISION (ti);
1357 fail = false;
c5237b8b 1358 if (! INTEGRAL_TYPE_P (ti))
1359 fail = true;
1360 else if (TYPE_UNSIGNED (to) && !TYPE_UNSIGNED (ti))
b5860aba 1361 fail = true;
1362 else if (!TYPE_UNSIGNED (to) && TYPE_UNSIGNED (ti))
1363 need_precision += 1;
1364 if (TYPE_PRECISION (to) < need_precision)
1365 fail = true;
1366
1367 if (!fail)
1368 {
75a70cf9 1369 gimple_switch_set_index (stmt, def);
b5860aba 1370 update_stmt (stmt);
678b2f5b 1371 return true;
b5860aba 1372 }
1373 }
1374 }
1375 }
678b2f5b 1376
1377 return false;
b5860aba 1378}
1379
27f931ff 1380/* For pointers p2 and p1 return p2 - p1 if the
1381 difference is known and constant, otherwise return NULL. */
1382
1383static tree
1384constant_pointer_difference (tree p1, tree p2)
1385{
1386 int i, j;
1387#define CPD_ITERATIONS 5
1388 tree exps[2][CPD_ITERATIONS];
1389 tree offs[2][CPD_ITERATIONS];
1390 int cnt[2];
1391
1392 for (i = 0; i < 2; i++)
1393 {
1394 tree p = i ? p1 : p2;
1395 tree off = size_zero_node;
1396 gimple stmt;
1397 enum tree_code code;
1398
1399 /* For each of p1 and p2 we need to iterate at least
1400 twice, to handle ADDR_EXPR directly in p1/p2,
1401 SSA_NAME with ADDR_EXPR or POINTER_PLUS_EXPR etc.
1402 on definition's stmt RHS. Iterate a few extra times. */
1403 j = 0;
1404 do
1405 {
1406 if (!POINTER_TYPE_P (TREE_TYPE (p)))
1407 break;
1408 if (TREE_CODE (p) == ADDR_EXPR)
1409 {
1410 tree q = TREE_OPERAND (p, 0);
1411 HOST_WIDE_INT offset;
1412 tree base = get_addr_base_and_unit_offset (q, &offset);
1413 if (base)
1414 {
1415 q = base;
1416 if (offset)
1417 off = size_binop (PLUS_EXPR, off, size_int (offset));
1418 }
1419 if (TREE_CODE (q) == MEM_REF
1420 && TREE_CODE (TREE_OPERAND (q, 0)) == SSA_NAME)
1421 {
1422 p = TREE_OPERAND (q, 0);
1423 off = size_binop (PLUS_EXPR, off,
1424 double_int_to_tree (sizetype,
1425 mem_ref_offset (q)));
1426 }
1427 else
1428 {
1429 exps[i][j] = q;
1430 offs[i][j++] = off;
1431 break;
1432 }
1433 }
1434 if (TREE_CODE (p) != SSA_NAME)
1435 break;
1436 exps[i][j] = p;
1437 offs[i][j++] = off;
1438 if (j == CPD_ITERATIONS)
1439 break;
1440 stmt = SSA_NAME_DEF_STMT (p);
1441 if (!is_gimple_assign (stmt) || gimple_assign_lhs (stmt) != p)
1442 break;
1443 code = gimple_assign_rhs_code (stmt);
1444 if (code == POINTER_PLUS_EXPR)
1445 {
1446 if (TREE_CODE (gimple_assign_rhs2 (stmt)) != INTEGER_CST)
1447 break;
1448 off = size_binop (PLUS_EXPR, off, gimple_assign_rhs2 (stmt));
1449 p = gimple_assign_rhs1 (stmt);
1450 }
1451 else if (code == ADDR_EXPR || code == NOP_EXPR)
1452 p = gimple_assign_rhs1 (stmt);
1453 else
1454 break;
1455 }
1456 while (1);
1457 cnt[i] = j;
1458 }
1459
1460 for (i = 0; i < cnt[0]; i++)
1461 for (j = 0; j < cnt[1]; j++)
1462 if (exps[0][i] == exps[1][j])
1463 return size_binop (MINUS_EXPR, offs[0][i], offs[1][j]);
1464
1465 return NULL_TREE;
1466}
1467
1468/* *GSI_P is a GIMPLE_CALL to a builtin function.
1469 Optimize
1470 memcpy (p, "abcd", 4);
1471 memset (p + 4, ' ', 3);
1472 into
1473 memcpy (p, "abcd ", 7);
1474 call if the latter can be stored by pieces during expansion. */
1475
1476static bool
1477simplify_builtin_call (gimple_stmt_iterator *gsi_p, tree callee2)
1478{
1479 gimple stmt1, stmt2 = gsi_stmt (*gsi_p);
1480 tree vuse = gimple_vuse (stmt2);
1481 if (vuse == NULL)
1482 return false;
1483 stmt1 = SSA_NAME_DEF_STMT (vuse);
1484
1485 switch (DECL_FUNCTION_CODE (callee2))
1486 {
1487 case BUILT_IN_MEMSET:
1488 if (gimple_call_num_args (stmt2) != 3
1489 || gimple_call_lhs (stmt2)
1490 || CHAR_BIT != 8
1491 || BITS_PER_UNIT != 8)
1492 break;
1493 else
1494 {
1495 tree callee1;
1496 tree ptr1, src1, str1, off1, len1, lhs1;
1497 tree ptr2 = gimple_call_arg (stmt2, 0);
1498 tree val2 = gimple_call_arg (stmt2, 1);
1499 tree len2 = gimple_call_arg (stmt2, 2);
1500 tree diff, vdef, new_str_cst;
1501 gimple use_stmt;
1502 unsigned int ptr1_align;
1503 unsigned HOST_WIDE_INT src_len;
1504 char *src_buf;
1505 use_operand_p use_p;
1506
1507 if (!host_integerp (val2, 0)
1508 || !host_integerp (len2, 1))
1509 break;
1510 if (is_gimple_call (stmt1))
1511 {
1512 /* If first stmt is a call, it needs to be memcpy
1513 or mempcpy, with string literal as second argument and
1514 constant length. */
1515 callee1 = gimple_call_fndecl (stmt1);
1516 if (callee1 == NULL_TREE
1517 || DECL_BUILT_IN_CLASS (callee1) != BUILT_IN_NORMAL
1518 || gimple_call_num_args (stmt1) != 3)
1519 break;
1520 if (DECL_FUNCTION_CODE (callee1) != BUILT_IN_MEMCPY
1521 && DECL_FUNCTION_CODE (callee1) != BUILT_IN_MEMPCPY)
1522 break;
1523 ptr1 = gimple_call_arg (stmt1, 0);
1524 src1 = gimple_call_arg (stmt1, 1);
1525 len1 = gimple_call_arg (stmt1, 2);
1526 lhs1 = gimple_call_lhs (stmt1);
1527 if (!host_integerp (len1, 1))
1528 break;
1529 str1 = string_constant (src1, &off1);
1530 if (str1 == NULL_TREE)
1531 break;
1532 if (!host_integerp (off1, 1)
1533 || compare_tree_int (off1, TREE_STRING_LENGTH (str1) - 1) > 0
1534 || compare_tree_int (len1, TREE_STRING_LENGTH (str1)
1535 - tree_low_cst (off1, 1)) > 0
1536 || TREE_CODE (TREE_TYPE (str1)) != ARRAY_TYPE
1537 || TYPE_MODE (TREE_TYPE (TREE_TYPE (str1)))
1538 != TYPE_MODE (char_type_node))
1539 break;
1540 }
1541 else if (gimple_assign_single_p (stmt1))
1542 {
1543 /* Otherwise look for length 1 memcpy optimized into
1544 assignment. */
1545 ptr1 = gimple_assign_lhs (stmt1);
1546 src1 = gimple_assign_rhs1 (stmt1);
1547 if (TREE_CODE (ptr1) != MEM_REF
1548 || TYPE_MODE (TREE_TYPE (ptr1)) != TYPE_MODE (char_type_node)
1549 || !host_integerp (src1, 0))
1550 break;
1551 ptr1 = build_fold_addr_expr (ptr1);
1552 callee1 = NULL_TREE;
1553 len1 = size_one_node;
1554 lhs1 = NULL_TREE;
1555 off1 = size_zero_node;
1556 str1 = NULL_TREE;
1557 }
1558 else
1559 break;
1560
1561 diff = constant_pointer_difference (ptr1, ptr2);
1562 if (diff == NULL && lhs1 != NULL)
1563 {
1564 diff = constant_pointer_difference (lhs1, ptr2);
1565 if (DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY
1566 && diff != NULL)
1567 diff = size_binop (PLUS_EXPR, diff,
1568 fold_convert (sizetype, len1));
1569 }
1570 /* If the difference between the second and first destination pointer
1571 is not constant, or is bigger than memcpy length, bail out. */
1572 if (diff == NULL
1573 || !host_integerp (diff, 1)
1574 || tree_int_cst_lt (len1, diff))
1575 break;
1576
1577 /* Use maximum of difference plus memset length and memcpy length
1578 as the new memcpy length, if it is too big, bail out. */
1579 src_len = tree_low_cst (diff, 1);
1580 src_len += tree_low_cst (len2, 1);
1581 if (src_len < (unsigned HOST_WIDE_INT) tree_low_cst (len1, 1))
1582 src_len = tree_low_cst (len1, 1);
1583 if (src_len > 1024)
1584 break;
1585
1586 /* If mempcpy value is used elsewhere, bail out, as mempcpy
1587 with bigger length will return different result. */
1588 if (lhs1 != NULL_TREE
1589 && DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY
1590 && (TREE_CODE (lhs1) != SSA_NAME
1591 || !single_imm_use (lhs1, &use_p, &use_stmt)
1592 || use_stmt != stmt2))
1593 break;
1594
1595 /* If anything reads memory in between memcpy and memset
1596 call, the modified memcpy call might change it. */
1597 vdef = gimple_vdef (stmt1);
1598 if (vdef != NULL
1599 && (!single_imm_use (vdef, &use_p, &use_stmt)
1600 || use_stmt != stmt2))
1601 break;
1602
957d0361 1603 ptr1_align = get_pointer_alignment (ptr1);
27f931ff 1604 /* Construct the new source string literal. */
1605 src_buf = XALLOCAVEC (char, src_len + 1);
1606 if (callee1)
1607 memcpy (src_buf,
1608 TREE_STRING_POINTER (str1) + tree_low_cst (off1, 1),
1609 tree_low_cst (len1, 1));
1610 else
1611 src_buf[0] = tree_low_cst (src1, 0);
1612 memset (src_buf + tree_low_cst (diff, 1),
1613 tree_low_cst (val2, 1), tree_low_cst (len2, 1));
1614 src_buf[src_len] = '\0';
1615 /* Neither builtin_strncpy_read_str nor builtin_memcpy_read_str
1616 handle embedded '\0's. */
1617 if (strlen (src_buf) != src_len)
1618 break;
1619 rtl_profile_for_bb (gimple_bb (stmt2));
1620 /* If the new memcpy wouldn't be emitted by storing the literal
1621 by pieces, this optimization might enlarge .rodata too much,
1622 as commonly used string literals couldn't be shared any
1623 longer. */
1624 if (!can_store_by_pieces (src_len,
1625 builtin_strncpy_read_str,
1626 src_buf, ptr1_align, false))
1627 break;
1628
1629 new_str_cst = build_string_literal (src_len, src_buf);
1630 if (callee1)
1631 {
1632 /* If STMT1 is a mem{,p}cpy call, adjust it and remove
1633 memset call. */
1634 if (lhs1 && DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY)
1635 gimple_call_set_lhs (stmt1, NULL_TREE);
1636 gimple_call_set_arg (stmt1, 1, new_str_cst);
1637 gimple_call_set_arg (stmt1, 2,
1638 build_int_cst (TREE_TYPE (len1), src_len));
1639 update_stmt (stmt1);
1640 unlink_stmt_vdef (stmt2);
1641 gsi_remove (gsi_p, true);
1642 release_defs (stmt2);
1643 if (lhs1 && DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY)
1644 release_ssa_name (lhs1);
1645 return true;
1646 }
1647 else
1648 {
1649 /* Otherwise, if STMT1 is length 1 memcpy optimized into
1650 assignment, remove STMT1 and change memset call into
1651 memcpy call. */
1652 gimple_stmt_iterator gsi = gsi_for_stmt (stmt1);
1653
7ecb2e7c 1654 if (!is_gimple_val (ptr1))
1655 ptr1 = force_gimple_operand_gsi (gsi_p, ptr1, true, NULL_TREE,
1656 true, GSI_SAME_STMT);
b9a16870 1657 gimple_call_set_fndecl (stmt2,
1658 builtin_decl_explicit (BUILT_IN_MEMCPY));
27f931ff 1659 gimple_call_set_arg (stmt2, 0, ptr1);
1660 gimple_call_set_arg (stmt2, 1, new_str_cst);
1661 gimple_call_set_arg (stmt2, 2,
1662 build_int_cst (TREE_TYPE (len2), src_len));
1663 unlink_stmt_vdef (stmt1);
1664 gsi_remove (&gsi, true);
1665 release_defs (stmt1);
1666 update_stmt (stmt2);
1667 return false;
1668 }
1669 }
1670 break;
1671 default:
1672 break;
1673 }
1674 return false;
1675}
1676
41913fa9 1677/* Checks if expression has type of one-bit precision, or is a known
1678 truth-valued expression. */
1679static bool
1680truth_valued_ssa_name (tree name)
1681{
1682 gimple def;
1683 tree type = TREE_TYPE (name);
1684
1685 if (!INTEGRAL_TYPE_P (type))
1686 return false;
1687 /* Don't check here for BOOLEAN_TYPE as the precision isn't
1688 necessarily one and so ~X is not equal to !X. */
1689 if (TYPE_PRECISION (type) == 1)
1690 return true;
1691 def = SSA_NAME_DEF_STMT (name);
1692 if (is_gimple_assign (def))
1693 return truth_value_p (gimple_assign_rhs_code (def));
1694 return false;
1695}
1696
1697/* Helper routine for simplify_bitwise_binary_1 function.
1698 Return for the SSA name NAME the expression X if it mets condition
1699 NAME = !X. Otherwise return NULL_TREE.
1700 Detected patterns for NAME = !X are:
1701 !X and X == 0 for X with integral type.
1702 X ^ 1, X != 1,or ~X for X with integral type with precision of one. */
1703static tree
1704lookup_logical_inverted_value (tree name)
1705{
1706 tree op1, op2;
1707 enum tree_code code;
1708 gimple def;
1709
1710 /* If name has none-intergal type, or isn't a SSA_NAME, then
1711 return. */
1712 if (TREE_CODE (name) != SSA_NAME
1713 || !INTEGRAL_TYPE_P (TREE_TYPE (name)))
1714 return NULL_TREE;
1715 def = SSA_NAME_DEF_STMT (name);
1716 if (!is_gimple_assign (def))
1717 return NULL_TREE;
1718
1719 code = gimple_assign_rhs_code (def);
1720 op1 = gimple_assign_rhs1 (def);
1721 op2 = NULL_TREE;
1722
1723 /* Get for EQ_EXPR or BIT_XOR_EXPR operation the second operand.
8f4a7578 1724 If CODE isn't an EQ_EXPR, BIT_XOR_EXPR, or BIT_NOT_EXPR, then return. */
41913fa9 1725 if (code == EQ_EXPR || code == NE_EXPR
1726 || code == BIT_XOR_EXPR)
1727 op2 = gimple_assign_rhs2 (def);
1728
1729 switch (code)
1730 {
41913fa9 1731 case BIT_NOT_EXPR:
1732 if (truth_valued_ssa_name (name))
1733 return op1;
1734 break;
1735 case EQ_EXPR:
1736 /* Check if we have X == 0 and X has an integral type. */
1737 if (!INTEGRAL_TYPE_P (TREE_TYPE (op1)))
1738 break;
1739 if (integer_zerop (op2))
1740 return op1;
1741 break;
1742 case NE_EXPR:
1743 /* Check if we have X != 1 and X is a truth-valued. */
1744 if (!INTEGRAL_TYPE_P (TREE_TYPE (op1)))
1745 break;
1746 if (integer_onep (op2) && truth_valued_ssa_name (op1))
1747 return op1;
1748 break;
1749 case BIT_XOR_EXPR:
1750 /* Check if we have X ^ 1 and X is truth valued. */
1751 if (integer_onep (op2) && truth_valued_ssa_name (op1))
1752 return op1;
1753 break;
1754 default:
1755 break;
1756 }
1757
1758 return NULL_TREE;
1759}
1760
1761/* Optimize ARG1 CODE ARG2 to a constant for bitwise binary
1762 operations CODE, if one operand has the logically inverted
1763 value of the other. */
1764static tree
1765simplify_bitwise_binary_1 (enum tree_code code, tree type,
1766 tree arg1, tree arg2)
1767{
1768 tree anot;
1769
1770 /* If CODE isn't a bitwise binary operation, return NULL_TREE. */
1771 if (code != BIT_AND_EXPR && code != BIT_IOR_EXPR
1772 && code != BIT_XOR_EXPR)
1773 return NULL_TREE;
1774
1775 /* First check if operands ARG1 and ARG2 are equal. If so
1776 return NULL_TREE as this optimization is handled fold_stmt. */
1777 if (arg1 == arg2)
1778 return NULL_TREE;
1779 /* See if we have in arguments logical-not patterns. */
1780 if (((anot = lookup_logical_inverted_value (arg1)) == NULL_TREE
1781 || anot != arg2)
1782 && ((anot = lookup_logical_inverted_value (arg2)) == NULL_TREE
1783 || anot != arg1))
1784 return NULL_TREE;
1785
1786 /* X & !X -> 0. */
1787 if (code == BIT_AND_EXPR)
1788 return fold_convert (type, integer_zero_node);
1789 /* X | !X -> 1 and X ^ !X -> 1, if X is truth-valued. */
1790 if (truth_valued_ssa_name (anot))
1791 return fold_convert (type, integer_one_node);
1792
1793 /* ??? Otherwise result is (X != 0 ? X : 1). not handled. */
1794 return NULL_TREE;
1795}
1796
300da094 1797/* Simplify bitwise binary operations.
1798 Return true if a transformation applied, otherwise return false. */
1c4607fd 1799
300da094 1800static bool
1801simplify_bitwise_binary (gimple_stmt_iterator *gsi)
1c4607fd 1802{
300da094 1803 gimple stmt = gsi_stmt (*gsi);
1c4607fd 1804 tree arg1 = gimple_assign_rhs1 (stmt);
1805 tree arg2 = gimple_assign_rhs2 (stmt);
300da094 1806 enum tree_code code = gimple_assign_rhs_code (stmt);
1807 tree res;
26f54bd0 1808 gimple def1 = NULL, def2 = NULL;
1809 tree def1_arg1, def2_arg1;
1810 enum tree_code def1_code, def2_code;
1c4607fd 1811
26f54bd0 1812 def1_code = TREE_CODE (arg1);
1813 def1_arg1 = arg1;
1814 if (TREE_CODE (arg1) == SSA_NAME)
1815 {
1816 def1 = SSA_NAME_DEF_STMT (arg1);
1817 if (is_gimple_assign (def1))
1818 {
1819 def1_code = gimple_assign_rhs_code (def1);
1820 def1_arg1 = gimple_assign_rhs1 (def1);
1821 }
1822 }
1823
1824 def2_code = TREE_CODE (arg2);
1825 def2_arg1 = arg2;
1826 if (TREE_CODE (arg2) == SSA_NAME)
1827 {
1828 def2 = SSA_NAME_DEF_STMT (arg2);
1829 if (is_gimple_assign (def2))
1830 {
1831 def2_code = gimple_assign_rhs_code (def2);
1832 def2_arg1 = gimple_assign_rhs1 (def2);
1833 }
1834 }
1835
25ce0d90 1836 /* Try to fold (type) X op CST -> (type) (X op ((type-x) CST)). */
1837 if (TREE_CODE (arg2) == INTEGER_CST
1838 && CONVERT_EXPR_CODE_P (def1_code)
105fc895 1839 && INTEGRAL_TYPE_P (TREE_TYPE (def1_arg1))
25ce0d90 1840 && int_fits_type_p (arg2, TREE_TYPE (def1_arg1)))
1841 {
1842 gimple newop;
1843 tree tem = create_tmp_reg (TREE_TYPE (def1_arg1), NULL);
1844 newop =
1845 gimple_build_assign_with_ops (code, tem, def1_arg1,
1846 fold_convert_loc (gimple_location (stmt),
1847 TREE_TYPE (def1_arg1),
1848 arg2));
1849 tem = make_ssa_name (tem, newop);
1850 gimple_assign_set_lhs (newop, tem);
4b5f1658 1851 gimple_set_location (newop, gimple_location (stmt));
25ce0d90 1852 gsi_insert_before (gsi, newop, GSI_SAME_STMT);
1853 gimple_assign_set_rhs_with_ops_1 (gsi, NOP_EXPR,
1854 tem, NULL_TREE, NULL_TREE);
1855 update_stmt (gsi_stmt (*gsi));
1856 return true;
1857 }
1858
300da094 1859 /* For bitwise binary operations apply operand conversions to the
1860 binary operation result instead of to the operands. This allows
1861 to combine successive conversions and bitwise binary operations. */
26f54bd0 1862 if (CONVERT_EXPR_CODE_P (def1_code)
1863 && CONVERT_EXPR_CODE_P (def2_code)
1864 && types_compatible_p (TREE_TYPE (def1_arg1), TREE_TYPE (def2_arg1))
25ce0d90 1865 /* Make sure that the conversion widens the operands, or has same
1866 precision, or that it changes the operation to a bitfield
1867 precision. */
26f54bd0 1868 && ((TYPE_PRECISION (TREE_TYPE (def1_arg1))
25ce0d90 1869 <= TYPE_PRECISION (TREE_TYPE (arg1)))
26f54bd0 1870 || (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (arg1)))
1871 != MODE_INT)
1872 || (TYPE_PRECISION (TREE_TYPE (arg1))
1873 != GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg1))))))
1c4607fd 1874 {
26f54bd0 1875 gimple newop;
1876 tree tem = create_tmp_reg (TREE_TYPE (def1_arg1),
1877 NULL);
1878 newop = gimple_build_assign_with_ops (code, tem, def1_arg1, def2_arg1);
1879 tem = make_ssa_name (tem, newop);
1880 gimple_assign_set_lhs (newop, tem);
4b5f1658 1881 gimple_set_location (newop, gimple_location (stmt));
26f54bd0 1882 gsi_insert_before (gsi, newop, GSI_SAME_STMT);
1883 gimple_assign_set_rhs_with_ops_1 (gsi, NOP_EXPR,
1884 tem, NULL_TREE, NULL_TREE);
1885 update_stmt (gsi_stmt (*gsi));
1886 return true;
1887 }
1888
1889 /* (a | CST1) & CST2 -> (a & CST2) | (CST1 & CST2). */
1890 if (code == BIT_AND_EXPR
1891 && def1_code == BIT_IOR_EXPR
1892 && TREE_CODE (arg2) == INTEGER_CST
1893 && TREE_CODE (gimple_assign_rhs2 (def1)) == INTEGER_CST)
1894 {
1895 tree cst = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg2),
1896 arg2, gimple_assign_rhs2 (def1));
1897 tree tem;
1898 gimple newop;
1899 if (integer_zerop (cst))
300da094 1900 {
26f54bd0 1901 gimple_assign_set_rhs1 (stmt, def1_arg1);
1902 update_stmt (stmt);
1903 return true;
300da094 1904 }
26f54bd0 1905 tem = create_tmp_reg (TREE_TYPE (arg2), NULL);
1906 newop = gimple_build_assign_with_ops (BIT_AND_EXPR,
1907 tem, def1_arg1, arg2);
1908 tem = make_ssa_name (tem, newop);
1909 gimple_assign_set_lhs (newop, tem);
4b5f1658 1910 gimple_set_location (newop, gimple_location (stmt));
26f54bd0 1911 /* Make sure to re-process the new stmt as it's walking upwards. */
1912 gsi_insert_before (gsi, newop, GSI_NEW_STMT);
1913 gimple_assign_set_rhs1 (stmt, tem);
1914 gimple_assign_set_rhs2 (stmt, cst);
1915 gimple_assign_set_rhs_code (stmt, BIT_IOR_EXPR);
1916 update_stmt (stmt);
1917 return true;
1918 }
1919
1920 /* Combine successive equal operations with constants. */
1921 if ((code == BIT_AND_EXPR
1922 || code == BIT_IOR_EXPR
1923 || code == BIT_XOR_EXPR)
1924 && def1_code == code
1925 && TREE_CODE (arg2) == INTEGER_CST
1926 && TREE_CODE (gimple_assign_rhs2 (def1)) == INTEGER_CST)
1927 {
1928 tree cst = fold_build2 (code, TREE_TYPE (arg2),
1929 arg2, gimple_assign_rhs2 (def1));
1930 gimple_assign_set_rhs1 (stmt, def1_arg1);
1931 gimple_assign_set_rhs2 (stmt, cst);
1932 update_stmt (stmt);
1933 return true;
1c4607fd 1934 }
300da094 1935
8a5f403f 1936 /* Canonicalize X ^ ~0 to ~X. */
1937 if (code == BIT_XOR_EXPR
1938 && TREE_CODE (arg2) == INTEGER_CST
1939 && integer_all_onesp (arg2))
1940 {
1941 gimple_assign_set_rhs_with_ops (gsi, BIT_NOT_EXPR, arg1, NULL_TREE);
1942 gcc_assert (gsi_stmt (*gsi) == stmt);
1943 update_stmt (stmt);
1944 return true;
1945 }
1946
41913fa9 1947 /* Try simple folding for X op !X, and X op X. */
1948 res = simplify_bitwise_binary_1 (code, TREE_TYPE (arg1), arg1, arg2);
1949 if (res != NULL_TREE)
1950 {
1951 gimple_assign_set_rhs_from_tree (gsi, res);
1952 update_stmt (gsi_stmt (*gsi));
1953 return true;
1954 }
1955
300da094 1956 return false;
1c4607fd 1957}
1958
ca3c9092 1959
1960/* Perform re-associations of the plus or minus statement STMT that are
b69d1cb6 1961 always permitted. Returns true if the CFG was changed. */
ca3c9092 1962
b69d1cb6 1963static bool
50aacf4c 1964associate_plusminus (gimple_stmt_iterator *gsi)
ca3c9092 1965{
50aacf4c 1966 gimple stmt = gsi_stmt (*gsi);
ca3c9092 1967 tree rhs1 = gimple_assign_rhs1 (stmt);
1968 tree rhs2 = gimple_assign_rhs2 (stmt);
1969 enum tree_code code = gimple_assign_rhs_code (stmt);
ca3c9092 1970 bool changed;
1971
1972 /* We can't reassociate at all for saturating types. */
1973 if (TYPE_SATURATING (TREE_TYPE (rhs1)))
b69d1cb6 1974 return false;
ca3c9092 1975
1976 /* First contract negates. */
1977 do
1978 {
1979 changed = false;
1980
1981 /* A +- (-B) -> A -+ B. */
1982 if (TREE_CODE (rhs2) == SSA_NAME)
1983 {
1984 gimple def_stmt = SSA_NAME_DEF_STMT (rhs2);
1985 if (is_gimple_assign (def_stmt)
32cdcc42 1986 && gimple_assign_rhs_code (def_stmt) == NEGATE_EXPR
1987 && can_propagate_from (def_stmt))
ca3c9092 1988 {
1989 code = (code == MINUS_EXPR) ? PLUS_EXPR : MINUS_EXPR;
1990 gimple_assign_set_rhs_code (stmt, code);
1991 rhs2 = gimple_assign_rhs1 (def_stmt);
1992 gimple_assign_set_rhs2 (stmt, rhs2);
1993 gimple_set_modified (stmt, true);
1994 changed = true;
1995 }
1996 }
1997
1998 /* (-A) + B -> B - A. */
1999 if (TREE_CODE (rhs1) == SSA_NAME
2000 && code == PLUS_EXPR)
2001 {
2002 gimple def_stmt = SSA_NAME_DEF_STMT (rhs1);
2003 if (is_gimple_assign (def_stmt)
32cdcc42 2004 && gimple_assign_rhs_code (def_stmt) == NEGATE_EXPR
2005 && can_propagate_from (def_stmt))
ca3c9092 2006 {
2007 code = MINUS_EXPR;
2008 gimple_assign_set_rhs_code (stmt, code);
2009 rhs1 = rhs2;
2010 gimple_assign_set_rhs1 (stmt, rhs1);
2011 rhs2 = gimple_assign_rhs1 (def_stmt);
2012 gimple_assign_set_rhs2 (stmt, rhs2);
2013 gimple_set_modified (stmt, true);
2014 changed = true;
2015 }
2016 }
2017 }
2018 while (changed);
2019
2020 /* We can't reassociate floating-point or fixed-point plus or minus
2021 because of saturation to +-Inf. */
2022 if (FLOAT_TYPE_P (TREE_TYPE (rhs1))
2023 || FIXED_POINT_TYPE_P (TREE_TYPE (rhs1)))
2024 goto out;
2025
2026 /* Second match patterns that allow contracting a plus-minus pair
2027 irrespective of overflow issues.
2028
2029 (A +- B) - A -> +- B
2030 (A +- B) -+ B -> A
2031 (CST +- A) +- CST -> CST +- A
2032 (A + CST) +- CST -> A + CST
2033 ~A + A -> -1
2034 ~A + 1 -> -A
2035 A - (A +- B) -> -+ B
2036 A +- (B +- A) -> +- B
2037 CST +- (CST +- A) -> CST +- A
2038 CST +- (A +- CST) -> CST +- A
2039 A + ~A -> -1
2040
2041 via commutating the addition and contracting operations to zero
2042 by reassociation. */
2043
ca3c9092 2044 if (TREE_CODE (rhs1) == SSA_NAME)
2045 {
2046 gimple def_stmt = SSA_NAME_DEF_STMT (rhs1);
32cdcc42 2047 if (is_gimple_assign (def_stmt) && can_propagate_from (def_stmt))
ca3c9092 2048 {
2049 enum tree_code def_code = gimple_assign_rhs_code (def_stmt);
2050 if (def_code == PLUS_EXPR
2051 || def_code == MINUS_EXPR)
2052 {
2053 tree def_rhs1 = gimple_assign_rhs1 (def_stmt);
2054 tree def_rhs2 = gimple_assign_rhs2 (def_stmt);
2055 if (operand_equal_p (def_rhs1, rhs2, 0)
2056 && code == MINUS_EXPR)
2057 {
2058 /* (A +- B) - A -> +- B. */
2059 code = ((def_code == PLUS_EXPR)
2060 ? TREE_CODE (def_rhs2) : NEGATE_EXPR);
2061 rhs1 = def_rhs2;
2062 rhs2 = NULL_TREE;
50aacf4c 2063 gimple_assign_set_rhs_with_ops (gsi, code, rhs1, NULL_TREE);
2064 gcc_assert (gsi_stmt (*gsi) == stmt);
ca3c9092 2065 gimple_set_modified (stmt, true);
2066 }
2067 else if (operand_equal_p (def_rhs2, rhs2, 0)
2068 && code != def_code)
2069 {
2070 /* (A +- B) -+ B -> A. */
2071 code = TREE_CODE (def_rhs1);
2072 rhs1 = def_rhs1;
2073 rhs2 = NULL_TREE;
50aacf4c 2074 gimple_assign_set_rhs_with_ops (gsi, code, rhs1, NULL_TREE);
2075 gcc_assert (gsi_stmt (*gsi) == stmt);
ca3c9092 2076 gimple_set_modified (stmt, true);
2077 }
2078 else if (TREE_CODE (rhs2) == INTEGER_CST
2079 && TREE_CODE (def_rhs1) == INTEGER_CST)
2080 {
2081 /* (CST +- A) +- CST -> CST +- A. */
2082 tree cst = fold_binary (code, TREE_TYPE (rhs1),
2083 def_rhs1, rhs2);
2084 if (cst && !TREE_OVERFLOW (cst))
2085 {
2086 code = def_code;
2087 gimple_assign_set_rhs_code (stmt, code);
2088 rhs1 = cst;
2089 gimple_assign_set_rhs1 (stmt, rhs1);
2090 rhs2 = def_rhs2;
2091 gimple_assign_set_rhs2 (stmt, rhs2);
2092 gimple_set_modified (stmt, true);
2093 }
2094 }
2095 else if (TREE_CODE (rhs2) == INTEGER_CST
2096 && TREE_CODE (def_rhs2) == INTEGER_CST
2097 && def_code == PLUS_EXPR)
2098 {
2099 /* (A + CST) +- CST -> A + CST. */
2100 tree cst = fold_binary (code, TREE_TYPE (rhs1),
2101 def_rhs2, rhs2);
2102 if (cst && !TREE_OVERFLOW (cst))
2103 {
2104 code = PLUS_EXPR;
2105 gimple_assign_set_rhs_code (stmt, code);
2106 rhs1 = def_rhs1;
2107 gimple_assign_set_rhs1 (stmt, rhs1);
2108 rhs2 = cst;
2109 gimple_assign_set_rhs2 (stmt, rhs2);
2110 gimple_set_modified (stmt, true);
2111 }
2112 }
2113 }
2114 else if (def_code == BIT_NOT_EXPR
2115 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1)))
2116 {
2117 tree def_rhs1 = gimple_assign_rhs1 (def_stmt);
2118 if (code == PLUS_EXPR
2119 && operand_equal_p (def_rhs1, rhs2, 0))
2120 {
2121 /* ~A + A -> -1. */
2122 code = INTEGER_CST;
19d861b9 2123 rhs1 = build_int_cst_type (TREE_TYPE (rhs2), -1);
ca3c9092 2124 rhs2 = NULL_TREE;
50aacf4c 2125 gimple_assign_set_rhs_with_ops (gsi, code, rhs1, NULL_TREE);
2126 gcc_assert (gsi_stmt (*gsi) == stmt);
ca3c9092 2127 gimple_set_modified (stmt, true);
2128 }
2129 else if (code == PLUS_EXPR
2130 && integer_onep (rhs1))
2131 {
2132 /* ~A + 1 -> -A. */
2133 code = NEGATE_EXPR;
2134 rhs1 = def_rhs1;
2135 rhs2 = NULL_TREE;
50aacf4c 2136 gimple_assign_set_rhs_with_ops (gsi, code, rhs1, NULL_TREE);
2137 gcc_assert (gsi_stmt (*gsi) == stmt);
ca3c9092 2138 gimple_set_modified (stmt, true);
2139 }
2140 }
2141 }
2142 }
2143
2144 if (rhs2 && TREE_CODE (rhs2) == SSA_NAME)
2145 {
2146 gimple def_stmt = SSA_NAME_DEF_STMT (rhs2);
32cdcc42 2147 if (is_gimple_assign (def_stmt) && can_propagate_from (def_stmt))
ca3c9092 2148 {
2149 enum tree_code def_code = gimple_assign_rhs_code (def_stmt);
2150 if (def_code == PLUS_EXPR
2151 || def_code == MINUS_EXPR)
2152 {
2153 tree def_rhs1 = gimple_assign_rhs1 (def_stmt);
2154 tree def_rhs2 = gimple_assign_rhs2 (def_stmt);
2155 if (operand_equal_p (def_rhs1, rhs1, 0)
2156 && code == MINUS_EXPR)
2157 {
2158 /* A - (A +- B) -> -+ B. */
2159 code = ((def_code == PLUS_EXPR)
2160 ? NEGATE_EXPR : TREE_CODE (def_rhs2));
2161 rhs1 = def_rhs2;
2162 rhs2 = NULL_TREE;
50aacf4c 2163 gimple_assign_set_rhs_with_ops (gsi, code, rhs1, NULL_TREE);
2164 gcc_assert (gsi_stmt (*gsi) == stmt);
ca3c9092 2165 gimple_set_modified (stmt, true);
2166 }
2167 else if (operand_equal_p (def_rhs2, rhs1, 0)
2168 && code != def_code)
2169 {
2170 /* A +- (B +- A) -> +- B. */
2171 code = ((code == PLUS_EXPR)
2172 ? TREE_CODE (def_rhs1) : NEGATE_EXPR);
2173 rhs1 = def_rhs1;
2174 rhs2 = NULL_TREE;
50aacf4c 2175 gimple_assign_set_rhs_with_ops (gsi, code, rhs1, NULL_TREE);
2176 gcc_assert (gsi_stmt (*gsi) == stmt);
ca3c9092 2177 gimple_set_modified (stmt, true);
2178 }
2179 else if (TREE_CODE (rhs1) == INTEGER_CST
2180 && TREE_CODE (def_rhs1) == INTEGER_CST)
2181 {
2182 /* CST +- (CST +- A) -> CST +- A. */
2183 tree cst = fold_binary (code, TREE_TYPE (rhs2),
2184 rhs1, def_rhs1);
2185 if (cst && !TREE_OVERFLOW (cst))
2186 {
2187 code = (code == def_code ? PLUS_EXPR : MINUS_EXPR);
2188 gimple_assign_set_rhs_code (stmt, code);
2189 rhs1 = cst;
2190 gimple_assign_set_rhs1 (stmt, rhs1);
2191 rhs2 = def_rhs2;
2192 gimple_assign_set_rhs2 (stmt, rhs2);
2193 gimple_set_modified (stmt, true);
2194 }
2195 }
2196 else if (TREE_CODE (rhs1) == INTEGER_CST
2197 && TREE_CODE (def_rhs2) == INTEGER_CST)
2198 {
2199 /* CST +- (A +- CST) -> CST +- A. */
2200 tree cst = fold_binary (def_code == code
2201 ? PLUS_EXPR : MINUS_EXPR,
2202 TREE_TYPE (rhs2),
2203 rhs1, def_rhs2);
2204 if (cst && !TREE_OVERFLOW (cst))
2205 {
2206 rhs1 = cst;
2207 gimple_assign_set_rhs1 (stmt, rhs1);
2208 rhs2 = def_rhs1;
2209 gimple_assign_set_rhs2 (stmt, rhs2);
2210 gimple_set_modified (stmt, true);
2211 }
2212 }
2213 }
2214 else if (def_code == BIT_NOT_EXPR
2215 && INTEGRAL_TYPE_P (TREE_TYPE (rhs2)))
2216 {
2217 tree def_rhs1 = gimple_assign_rhs1 (def_stmt);
2218 if (code == PLUS_EXPR
2219 && operand_equal_p (def_rhs1, rhs1, 0))
2220 {
2221 /* A + ~A -> -1. */
2222 code = INTEGER_CST;
19d861b9 2223 rhs1 = build_int_cst_type (TREE_TYPE (rhs1), -1);
ca3c9092 2224 rhs2 = NULL_TREE;
50aacf4c 2225 gimple_assign_set_rhs_with_ops (gsi, code, rhs1, NULL_TREE);
2226 gcc_assert (gsi_stmt (*gsi) == stmt);
ca3c9092 2227 gimple_set_modified (stmt, true);
2228 }
2229 }
2230 }
2231 }
2232
2233out:
2234 if (gimple_modified_p (stmt))
2235 {
50aacf4c 2236 fold_stmt_inplace (gsi);
ca3c9092 2237 update_stmt (stmt);
b69d1cb6 2238 if (maybe_clean_or_replace_eh_stmt (stmt, stmt)
2239 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
2240 return true;
ca3c9092 2241 }
b69d1cb6 2242
2243 return false;
ca3c9092 2244}
2245
6afd0544 2246/* Combine two conversions in a row for the second conversion at *GSI.
89c8f35a 2247 Returns 1 if there were any changes made, 2 if cfg-cleanup needs to
2248 run. Else it returns 0. */
6afd0544 2249
89c8f35a 2250static int
6afd0544 2251combine_conversions (gimple_stmt_iterator *gsi)
2252{
2253 gimple stmt = gsi_stmt (*gsi);
2254 gimple def_stmt;
2255 tree op0, lhs;
2256 enum tree_code code = gimple_assign_rhs_code (stmt);
2257
2258 gcc_checking_assert (CONVERT_EXPR_CODE_P (code)
2259 || code == FLOAT_EXPR
2260 || code == FIX_TRUNC_EXPR);
2261
2262 lhs = gimple_assign_lhs (stmt);
2263 op0 = gimple_assign_rhs1 (stmt);
2264 if (useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (op0)))
2265 {
2266 gimple_assign_set_rhs_code (stmt, TREE_CODE (op0));
89c8f35a 2267 return 1;
6afd0544 2268 }
2269
2270 if (TREE_CODE (op0) != SSA_NAME)
89c8f35a 2271 return 0;
6afd0544 2272
2273 def_stmt = SSA_NAME_DEF_STMT (op0);
2274 if (!is_gimple_assign (def_stmt))
89c8f35a 2275 return 0;
6afd0544 2276
2277 if (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt)))
2278 {
2279 tree defop0 = gimple_assign_rhs1 (def_stmt);
2280 tree type = TREE_TYPE (lhs);
2281 tree inside_type = TREE_TYPE (defop0);
2282 tree inter_type = TREE_TYPE (op0);
2283 int inside_int = INTEGRAL_TYPE_P (inside_type);
2284 int inside_ptr = POINTER_TYPE_P (inside_type);
2285 int inside_float = FLOAT_TYPE_P (inside_type);
2286 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
2287 unsigned int inside_prec = TYPE_PRECISION (inside_type);
2288 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
2289 int inter_int = INTEGRAL_TYPE_P (inter_type);
2290 int inter_ptr = POINTER_TYPE_P (inter_type);
2291 int inter_float = FLOAT_TYPE_P (inter_type);
2292 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
2293 unsigned int inter_prec = TYPE_PRECISION (inter_type);
2294 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
2295 int final_int = INTEGRAL_TYPE_P (type);
2296 int final_ptr = POINTER_TYPE_P (type);
2297 int final_float = FLOAT_TYPE_P (type);
2298 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
2299 unsigned int final_prec = TYPE_PRECISION (type);
2300 int final_unsignedp = TYPE_UNSIGNED (type);
2301
2302 /* In addition to the cases of two conversions in a row
2303 handled below, if we are converting something to its own
2304 type via an object of identical or wider precision, neither
2305 conversion is needed. */
2306 if (useless_type_conversion_p (type, inside_type)
2307 && (((inter_int || inter_ptr) && final_int)
2308 || (inter_float && final_float))
2309 && inter_prec >= final_prec)
2310 {
2311 gimple_assign_set_rhs1 (stmt, unshare_expr (defop0));
2312 gimple_assign_set_rhs_code (stmt, TREE_CODE (defop0));
2313 update_stmt (stmt);
89c8f35a 2314 return remove_prop_source_from_use (op0) ? 2 : 1;
6afd0544 2315 }
2316
2317 /* Likewise, if the intermediate and initial types are either both
2318 float or both integer, we don't need the middle conversion if the
2319 former is wider than the latter and doesn't change the signedness
2320 (for integers). Avoid this if the final type is a pointer since
2321 then we sometimes need the middle conversion. Likewise if the
2322 final type has a precision not equal to the size of its mode. */
2323 if (((inter_int && inside_int)
2324 || (inter_float && inside_float)
2325 || (inter_vec && inside_vec))
2326 && inter_prec >= inside_prec
2327 && (inter_float || inter_vec
2328 || inter_unsignedp == inside_unsignedp)
51dbf409 2329 && ! (final_prec != GET_MODE_PRECISION (TYPE_MODE (type))
6afd0544 2330 && TYPE_MODE (type) == TYPE_MODE (inter_type))
2331 && ! final_ptr
2332 && (! final_vec || inter_prec == inside_prec))
2333 {
2334 gimple_assign_set_rhs1 (stmt, defop0);
2335 update_stmt (stmt);
89c8f35a 2336 return remove_prop_source_from_use (op0) ? 2 : 1;
6afd0544 2337 }
2338
2339 /* If we have a sign-extension of a zero-extended value, we can
a6476f88 2340 replace that by a single zero-extension. Likewise if the
2341 final conversion does not change precision we can drop the
2342 intermediate conversion. */
6afd0544 2343 if (inside_int && inter_int && final_int
a6476f88 2344 && ((inside_prec < inter_prec && inter_prec < final_prec
2345 && inside_unsignedp && !inter_unsignedp)
2346 || final_prec == inter_prec))
6afd0544 2347 {
2348 gimple_assign_set_rhs1 (stmt, defop0);
2349 update_stmt (stmt);
89c8f35a 2350 return remove_prop_source_from_use (op0) ? 2 : 1;
6afd0544 2351 }
2352
2353 /* Two conversions in a row are not needed unless:
2354 - some conversion is floating-point (overstrict for now), or
2355 - some conversion is a vector (overstrict for now), or
2356 - the intermediate type is narrower than both initial and
2357 final, or
2358 - the intermediate type and innermost type differ in signedness,
2359 and the outermost type is wider than the intermediate, or
2360 - the initial type is a pointer type and the precisions of the
2361 intermediate and final types differ, or
2362 - the final type is a pointer type and the precisions of the
2363 initial and intermediate types differ. */
2364 if (! inside_float && ! inter_float && ! final_float
2365 && ! inside_vec && ! inter_vec && ! final_vec
2366 && (inter_prec >= inside_prec || inter_prec >= final_prec)
2367 && ! (inside_int && inter_int
2368 && inter_unsignedp != inside_unsignedp
2369 && inter_prec < final_prec)
2370 && ((inter_unsignedp && inter_prec > inside_prec)
2371 == (final_unsignedp && final_prec > inter_prec))
2372 && ! (inside_ptr && inter_prec != final_prec)
2373 && ! (final_ptr && inside_prec != inter_prec)
51dbf409 2374 && ! (final_prec != GET_MODE_PRECISION (TYPE_MODE (type))
6afd0544 2375 && TYPE_MODE (type) == TYPE_MODE (inter_type)))
2376 {
2377 gimple_assign_set_rhs1 (stmt, defop0);
2378 update_stmt (stmt);
89c8f35a 2379 return remove_prop_source_from_use (op0) ? 2 : 1;
6afd0544 2380 }
2381
2382 /* A truncation to an unsigned type should be canonicalized as
2383 bitwise and of a mask. */
2384 if (final_int && inter_int && inside_int
2385 && final_prec == inside_prec
2386 && final_prec > inter_prec
2387 && inter_unsignedp)
2388 {
2389 tree tem;
2390 tem = fold_build2 (BIT_AND_EXPR, inside_type,
2391 defop0,
2392 double_int_to_tree
2393 (inside_type, double_int_mask (inter_prec)));
2394 if (!useless_type_conversion_p (type, inside_type))
2395 {
2396 tem = force_gimple_operand_gsi (gsi, tem, true, NULL_TREE, true,
2397 GSI_SAME_STMT);
2398 gimple_assign_set_rhs1 (stmt, tem);
2399 }
2400 else
2401 gimple_assign_set_rhs_from_tree (gsi, tem);
2402 update_stmt (gsi_stmt (*gsi));
89c8f35a 2403 return 1;
6afd0544 2404 }
2405 }
2406
89c8f35a 2407 return 0;
6afd0544 2408}
2409
678b2f5b 2410/* Main entry point for the forward propagation and statement combine
2411 optimizer. */
4ee9c684 2412
2a1990e9 2413static unsigned int
678b2f5b 2414ssa_forward_propagate_and_combine (void)
4ee9c684 2415{
f5c8cff5 2416 basic_block bb;
c96420f8 2417 unsigned int todoflags = 0;
4ee9c684 2418
148aa112 2419 cfg_changed = false;
2420
f5c8cff5 2421 FOR_EACH_BB (bb)
2422 {
a7107e58 2423 gimple_stmt_iterator gsi, prev;
2424 bool prev_initialized;
291d763b 2425
678b2f5b 2426 /* Apply forward propagation to all stmts in the basic-block.
2427 Note we update GSI within the loop as necessary. */
75a70cf9 2428 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); )
291d763b 2429 {
75a70cf9 2430 gimple stmt = gsi_stmt (gsi);
678b2f5b 2431 tree lhs, rhs;
2432 enum tree_code code;
291d763b 2433
678b2f5b 2434 if (!is_gimple_assign (stmt))
291d763b 2435 {
678b2f5b 2436 gsi_next (&gsi);
2437 continue;
2438 }
3a938499 2439
678b2f5b 2440 lhs = gimple_assign_lhs (stmt);
2441 rhs = gimple_assign_rhs1 (stmt);
2442 code = gimple_assign_rhs_code (stmt);
2443 if (TREE_CODE (lhs) != SSA_NAME
2444 || has_zero_uses (lhs))
2445 {
2446 gsi_next (&gsi);
2447 continue;
2448 }
3a938499 2449
678b2f5b 2450 /* If this statement sets an SSA_NAME to an address,
2451 try to propagate the address into the uses of the SSA_NAME. */
2452 if (code == ADDR_EXPR
2453 /* Handle pointer conversions on invariant addresses
2454 as well, as this is valid gimple. */
2455 || (CONVERT_EXPR_CODE_P (code)
2456 && TREE_CODE (rhs) == ADDR_EXPR
2457 && POINTER_TYPE_P (TREE_TYPE (lhs))))
2458 {
2459 tree base = get_base_address (TREE_OPERAND (rhs, 0));
2460 if ((!base
2461 || !DECL_P (base)
2462 || decl_address_invariant_p (base))
2463 && !stmt_references_abnormal_ssa_name (stmt)
2464 && forward_propagate_addr_expr (lhs, rhs))
1c4607fd 2465 {
678b2f5b 2466 release_defs (stmt);
2467 todoflags |= TODO_remove_unused_locals;
2468 gsi_remove (&gsi, true);
1c4607fd 2469 }
678b2f5b 2470 else
2471 gsi_next (&gsi);
2472 }
cd22a796 2473 else if (code == POINTER_PLUS_EXPR)
678b2f5b 2474 {
cd22a796 2475 tree off = gimple_assign_rhs2 (stmt);
2476 if (TREE_CODE (off) == INTEGER_CST
2477 && can_propagate_from (stmt)
2478 && !simple_iv_increment_p (stmt)
678b2f5b 2479 /* ??? Better adjust the interface to that function
2480 instead of building new trees here. */
2481 && forward_propagate_addr_expr
cd22a796 2482 (lhs,
2483 build1_loc (gimple_location (stmt),
2484 ADDR_EXPR, TREE_TYPE (rhs),
2485 fold_build2 (MEM_REF,
2486 TREE_TYPE (TREE_TYPE (rhs)),
2487 rhs,
2488 fold_convert (ptr_type_node,
2489 off)))))
ca3c9092 2490 {
678b2f5b 2491 release_defs (stmt);
2492 todoflags |= TODO_remove_unused_locals;
2493 gsi_remove (&gsi, true);
ca3c9092 2494 }
678b2f5b 2495 else if (is_gimple_min_invariant (rhs))
6afd0544 2496 {
678b2f5b 2497 /* Make sure to fold &a[0] + off_1 here. */
50aacf4c 2498 fold_stmt_inplace (&gsi);
678b2f5b 2499 update_stmt (stmt);
2500 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
6afd0544 2501 gsi_next (&gsi);
2502 }
291d763b 2503 else
75a70cf9 2504 gsi_next (&gsi);
291d763b 2505 }
678b2f5b 2506 else if (TREE_CODE_CLASS (code) == tcc_comparison)
b5860aba 2507 {
75200312 2508 if (forward_propagate_comparison (stmt))
2509 cfg_changed = true;
75a70cf9 2510 gsi_next (&gsi);
b5860aba 2511 }
291d763b 2512 else
75a70cf9 2513 gsi_next (&gsi);
291d763b 2514 }
678b2f5b 2515
2516 /* Combine stmts with the stmts defining their operands.
2517 Note we update GSI within the loop as necessary. */
a7107e58 2518 prev_initialized = false;
2519 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
678b2f5b 2520 {
2521 gimple stmt = gsi_stmt (gsi);
2522 bool changed = false;
2523
2524 switch (gimple_code (stmt))
2525 {
2526 case GIMPLE_ASSIGN:
2527 {
2528 tree rhs1 = gimple_assign_rhs1 (stmt);
2529 enum tree_code code = gimple_assign_rhs_code (stmt);
2530
2531 if ((code == BIT_NOT_EXPR
2532 || code == NEGATE_EXPR)
2533 && TREE_CODE (rhs1) == SSA_NAME)
2534 changed = simplify_not_neg_expr (&gsi);
360b78f3 2535 else if (code == COND_EXPR
2536 || code == VEC_COND_EXPR)
678b2f5b 2537 {
2538 /* In this case the entire COND_EXPR is in rhs1. */
8a2caf10 2539 changed |= forward_propagate_into_cond (&gsi);
360b78f3 2540 changed |= combine_cond_exprs (&gsi);
678b2f5b 2541 stmt = gsi_stmt (gsi);
678b2f5b 2542 }
2543 else if (TREE_CODE_CLASS (code) == tcc_comparison)
2544 {
6f9714b3 2545 int did_something;
6f9714b3 2546 did_something = forward_propagate_into_comparison (&gsi);
2547 if (did_something == 2)
2548 cfg_changed = true;
6f9714b3 2549 changed = did_something != 0;
678b2f5b 2550 }
2551 else if (code == BIT_AND_EXPR
2552 || code == BIT_IOR_EXPR
2553 || code == BIT_XOR_EXPR)
2554 changed = simplify_bitwise_binary (&gsi);
2555 else if (code == PLUS_EXPR
2556 || code == MINUS_EXPR)
50aacf4c 2557 changed = associate_plusminus (&gsi);
678b2f5b 2558 else if (CONVERT_EXPR_CODE_P (code)
2559 || code == FLOAT_EXPR
2560 || code == FIX_TRUNC_EXPR)
89c8f35a 2561 {
2562 int did_something = combine_conversions (&gsi);
2563 if (did_something == 2)
2564 cfg_changed = true;
2565 changed = did_something != 0;
2566 }
678b2f5b 2567 break;
2568 }
2569
2570 case GIMPLE_SWITCH:
2571 changed = simplify_gimple_switch (stmt);
2572 break;
2573
2574 case GIMPLE_COND:
2575 {
2576 int did_something;
678b2f5b 2577 did_something = forward_propagate_into_gimple_cond (stmt);
2578 if (did_something == 2)
2579 cfg_changed = true;
678b2f5b 2580 changed = did_something != 0;
2581 break;
2582 }
2583
2584 case GIMPLE_CALL:
2585 {
2586 tree callee = gimple_call_fndecl (stmt);
2587 if (callee != NULL_TREE
2588 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
2589 changed = simplify_builtin_call (&gsi, callee);
2590 break;
2591 }
2592
2593 default:;
2594 }
2595
a7107e58 2596 if (changed)
2597 {
2598 /* If the stmt changed then re-visit it and the statements
2599 inserted before it. */
2600 if (!prev_initialized)
2601 gsi = gsi_start_bb (bb);
2602 else
2603 {
2604 gsi = prev;
2605 gsi_next (&gsi);
2606 }
2607 }
2608 else
2609 {
2610 prev = gsi;
2611 prev_initialized = true;
2612 gsi_next (&gsi);
2613 }
678b2f5b 2614 }
f5c8cff5 2615 }
148aa112 2616
2617 if (cfg_changed)
6fa78c7b 2618 todoflags |= TODO_cleanup_cfg;
678b2f5b 2619
c96420f8 2620 return todoflags;
4ee9c684 2621}
2622
2623
2624static bool
2625gate_forwprop (void)
2626{
408c3c77 2627 return flag_tree_forwprop;
4ee9c684 2628}
2629
48e1416a 2630struct gimple_opt_pass pass_forwprop =
20099e35 2631{
2632 {
2633 GIMPLE_PASS,
4ee9c684 2634 "forwprop", /* name */
2635 gate_forwprop, /* gate */
678b2f5b 2636 ssa_forward_propagate_and_combine, /* execute */
4ee9c684 2637 NULL, /* sub */
2638 NULL, /* next */
2639 0, /* static_pass_number */
2640 TV_TREE_FORWPROP, /* tv_id */
49290934 2641 PROP_cfg | PROP_ssa, /* properties_required */
4ee9c684 2642 0, /* properties_provided */
b6246c40 2643 0, /* properties_destroyed */
4ee9c684 2644 0, /* todo_flags_start */
771e2890 2645 TODO_ggc_collect
de6ed584 2646 | TODO_update_ssa
20099e35 2647 | TODO_verify_ssa /* todo_flags_finish */
2648 }
4ee9c684 2649};