]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/tree-ssa-forwprop.c
PR c++/49756
[thirdparty/gcc.git] / gcc / tree-ssa-forwprop.c
CommitLineData
291d763b 1/* Forward propagation of expressions for single use variables.
628ce22b 2 Copyright (C) 2004, 2005, 2007, 2008, 2009, 2010, 2011
ce084dfc 3 Free Software Foundation, Inc.
4ee9c684 4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify
8it under the terms of the GNU General Public License as published by
8c4c00c1 9the Free Software Foundation; either version 3, or (at your option)
4ee9c684 10any later version.
11
12GCC is distributed in the hope that it will be useful,
13but WITHOUT ANY WARRANTY; without even the implied warranty of
14MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15GNU General Public License for more details.
16
17You should have received a copy of the GNU General Public License
8c4c00c1 18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
4ee9c684 20
21#include "config.h"
22#include "system.h"
23#include "coretypes.h"
24#include "tm.h"
4ee9c684 25#include "tree.h"
4ee9c684 26#include "tm_p.h"
27#include "basic-block.h"
28#include "timevar.h"
e5b1e080 29#include "gimple-pretty-print.h"
4ee9c684 30#include "tree-flow.h"
31#include "tree-pass.h"
32#include "tree-dump.h"
291d763b 33#include "langhooks.h"
5adc1066 34#include "flags.h"
75a70cf9 35#include "gimple.h"
27f931ff 36#include "expr.h"
4ee9c684 37
291d763b 38/* This pass propagates the RHS of assignment statements into use
39 sites of the LHS of the assignment. It's basically a specialized
8f628ee8 40 form of tree combination. It is hoped all of this can disappear
41 when we have a generalized tree combiner.
4ee9c684 42
291d763b 43 One class of common cases we handle is forward propagating a single use
48e1416a 44 variable into a COND_EXPR.
4ee9c684 45
46 bb0:
47 x = a COND b;
48 if (x) goto ... else goto ...
49
50 Will be transformed into:
51
52 bb0:
53 if (a COND b) goto ... else goto ...
48e1416a 54
4ee9c684 55 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
56
57 Or (assuming c1 and c2 are constants):
58
59 bb0:
48e1416a 60 x = a + c1;
4ee9c684 61 if (x EQ/NEQ c2) goto ... else goto ...
62
63 Will be transformed into:
64
65 bb0:
66 if (a EQ/NEQ (c2 - c1)) goto ... else goto ...
67
68 Similarly for x = a - c1.
48e1416a 69
4ee9c684 70 Or
71
72 bb0:
73 x = !a
74 if (x) goto ... else goto ...
75
76 Will be transformed into:
77
78 bb0:
79 if (a == 0) goto ... else goto ...
80
81 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
82 For these cases, we propagate A into all, possibly more than one,
83 COND_EXPRs that use X.
84
f5c8cff5 85 Or
86
87 bb0:
88 x = (typecast) a
89 if (x) goto ... else goto ...
90
91 Will be transformed into:
92
93 bb0:
94 if (a != 0) goto ... else goto ...
95
96 (Assuming a is an integral type and x is a boolean or x is an
97 integral and a is a boolean.)
98
99 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
100 For these cases, we propagate A into all, possibly more than one,
101 COND_EXPRs that use X.
102
4ee9c684 103 In addition to eliminating the variable and the statement which assigns
104 a value to the variable, we may be able to later thread the jump without
e6dfde59 105 adding insane complexity in the dominator optimizer.
4ee9c684 106
f5c8cff5 107 Also note these transformations can cascade. We handle this by having
108 a worklist of COND_EXPR statements to examine. As we make a change to
109 a statement, we put it back on the worklist to examine on the next
110 iteration of the main loop.
111
291d763b 112 A second class of propagation opportunities arises for ADDR_EXPR
113 nodes.
114
115 ptr = &x->y->z;
116 res = *ptr;
117
118 Will get turned into
119
120 res = x->y->z;
121
50f39ec6 122 Or
123 ptr = (type1*)&type2var;
124 res = *ptr
125
126 Will get turned into (if type1 and type2 are the same size
127 and neither have volatile on them):
128 res = VIEW_CONVERT_EXPR<type1>(type2var)
129
291d763b 130 Or
131
132 ptr = &x[0];
133 ptr2 = ptr + <constant>;
134
135 Will get turned into
136
137 ptr2 = &x[constant/elementsize];
138
139 Or
140
141 ptr = &x[0];
142 offset = index * element_size;
143 offset_p = (pointer) offset;
144 ptr2 = ptr + offset_p
145
146 Will get turned into:
147
148 ptr2 = &x[index];
149
1c4607fd 150 Or
151 ssa = (int) decl
152 res = ssa & 1
153
154 Provided that decl has known alignment >= 2, will get turned into
155
156 res = 0
157
8f628ee8 158 We also propagate casts into SWITCH_EXPR and COND_EXPR conditions to
159 allow us to remove the cast and {NOT_EXPR,NEG_EXPR} into a subsequent
160 {NOT_EXPR,NEG_EXPR}.
291d763b 161
4ee9c684 162 This will (of course) be extended as other needs arise. */
163
15ec875c 164static bool forward_propagate_addr_expr (tree name, tree rhs);
148aa112 165
166/* Set to true if we delete EH edges during the optimization. */
167static bool cfg_changed;
168
75a70cf9 169static tree rhs_to_tree (tree type, gimple stmt);
148aa112 170
83a20baf 171/* Get the next statement we can propagate NAME's value into skipping
5adc1066 172 trivial copies. Returns the statement that is suitable as a
173 propagation destination or NULL_TREE if there is no such one.
174 This only returns destinations in a single-use chain. FINAL_NAME_P
175 if non-NULL is written to the ssa name that represents the use. */
a3451973 176
75a70cf9 177static gimple
5adc1066 178get_prop_dest_stmt (tree name, tree *final_name_p)
a3451973 179{
5adc1066 180 use_operand_p use;
75a70cf9 181 gimple use_stmt;
a3451973 182
5adc1066 183 do {
184 /* If name has multiple uses, bail out. */
185 if (!single_imm_use (name, &use, &use_stmt))
75a70cf9 186 return NULL;
a3451973 187
5adc1066 188 /* If this is not a trivial copy, we found it. */
8f0b877f 189 if (!gimple_assign_ssa_name_copy_p (use_stmt)
75a70cf9 190 || gimple_assign_rhs1 (use_stmt) != name)
5adc1066 191 break;
192
193 /* Continue searching uses of the copy destination. */
75a70cf9 194 name = gimple_assign_lhs (use_stmt);
5adc1066 195 } while (1);
196
197 if (final_name_p)
198 *final_name_p = name;
199
200 return use_stmt;
a3451973 201}
202
5adc1066 203/* Get the statement we can propagate from into NAME skipping
204 trivial copies. Returns the statement which defines the
205 propagation source or NULL_TREE if there is no such one.
206 If SINGLE_USE_ONLY is set considers only sources which have
207 a single use chain up to NAME. If SINGLE_USE_P is non-null,
208 it is set to whether the chain to NAME is a single use chain
209 or not. SINGLE_USE_P is not written to if SINGLE_USE_ONLY is set. */
4ee9c684 210
75a70cf9 211static gimple
5adc1066 212get_prop_source_stmt (tree name, bool single_use_only, bool *single_use_p)
f5c8cff5 213{
5adc1066 214 bool single_use = true;
215
216 do {
75a70cf9 217 gimple def_stmt = SSA_NAME_DEF_STMT (name);
5adc1066 218
219 if (!has_single_use (name))
220 {
221 single_use = false;
222 if (single_use_only)
75a70cf9 223 return NULL;
5adc1066 224 }
225
226 /* If name is defined by a PHI node or is the default def, bail out. */
8f0b877f 227 if (!is_gimple_assign (def_stmt))
75a70cf9 228 return NULL;
5adc1066 229
8f0b877f 230 /* If def_stmt is not a simple copy, we possibly found it. */
231 if (!gimple_assign_ssa_name_copy_p (def_stmt))
5adc1066 232 {
b9e98b8a 233 tree rhs;
234
5adc1066 235 if (!single_use_only && single_use_p)
236 *single_use_p = single_use;
237
b9e98b8a 238 /* We can look through pointer conversions in the search
239 for a useful stmt for the comparison folding. */
75a70cf9 240 rhs = gimple_assign_rhs1 (def_stmt);
d9659041 241 if (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt))
75a70cf9 242 && TREE_CODE (rhs) == SSA_NAME
243 && POINTER_TYPE_P (TREE_TYPE (gimple_assign_lhs (def_stmt)))
244 && POINTER_TYPE_P (TREE_TYPE (rhs)))
245 name = rhs;
b9e98b8a 246 else
247 return def_stmt;
248 }
249 else
250 {
251 /* Continue searching the def of the copy source name. */
75a70cf9 252 name = gimple_assign_rhs1 (def_stmt);
5adc1066 253 }
5adc1066 254 } while (1);
255}
e6dfde59 256
5adc1066 257/* Checks if the destination ssa name in DEF_STMT can be used as
258 propagation source. Returns true if so, otherwise false. */
e6dfde59 259
5adc1066 260static bool
75a70cf9 261can_propagate_from (gimple def_stmt)
5adc1066 262{
75a70cf9 263 gcc_assert (is_gimple_assign (def_stmt));
8f0b877f 264
484b827b 265 /* If the rhs has side-effects we cannot propagate from it. */
75a70cf9 266 if (gimple_has_volatile_ops (def_stmt))
484b827b 267 return false;
268
269 /* If the rhs is a load we cannot propagate from it. */
75a70cf9 270 if (TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)) == tcc_reference
271 || TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)) == tcc_declaration)
484b827b 272 return false;
273
b9e98b8a 274 /* Constants can be always propagated. */
8f0b877f 275 if (gimple_assign_single_p (def_stmt)
276 && is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt)))
b9e98b8a 277 return true;
278
75a70cf9 279 /* We cannot propagate ssa names that occur in abnormal phi nodes. */
32cdcc42 280 if (stmt_references_abnormal_ssa_name (def_stmt))
281 return false;
4ee9c684 282
5adc1066 283 /* If the definition is a conversion of a pointer to a function type,
75a70cf9 284 then we can not apply optimizations as some targets require
285 function pointers to be canonicalized and in this case this
286 optimization could eliminate a necessary canonicalization. */
8f0b877f 287 if (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt)))
75a70cf9 288 {
289 tree rhs = gimple_assign_rhs1 (def_stmt);
290 if (POINTER_TYPE_P (TREE_TYPE (rhs))
291 && TREE_CODE (TREE_TYPE (TREE_TYPE (rhs))) == FUNCTION_TYPE)
292 return false;
293 }
8f0b877f 294
5adc1066 295 return true;
e6dfde59 296}
297
5d2361b0 298/* Remove a copy chain ending in NAME along the defs.
299 If NAME was replaced in its only use then this function can be used
300 to clean up dead stmts. Returns true if cleanup-cfg has to run. */
8f628ee8 301
5adc1066 302static bool
5d2361b0 303remove_prop_source_from_use (tree name)
5adc1066 304{
75a70cf9 305 gimple_stmt_iterator gsi;
306 gimple stmt;
5d2361b0 307 bool cfg_changed = false;
8f628ee8 308
5adc1066 309 do {
5d2361b0 310 basic_block bb;
311
5adc1066 312 if (!has_zero_uses (name))
5d2361b0 313 return cfg_changed;
8f628ee8 314
5adc1066 315 stmt = SSA_NAME_DEF_STMT (name);
5d2361b0 316 bb = gimple_bb (stmt);
6f9714b3 317 if (!bb)
318 return cfg_changed;
319 gsi = gsi_for_stmt (stmt);
5adc1066 320 release_defs (stmt);
75a70cf9 321 gsi_remove (&gsi, true);
5d2361b0 322 cfg_changed |= gimple_purge_dead_eh_edges (bb);
8f628ee8 323
75a70cf9 324 name = (gimple_assign_copy_p (stmt)) ? gimple_assign_rhs1 (stmt) : NULL;
325 } while (name && TREE_CODE (name) == SSA_NAME);
8f628ee8 326
5d2361b0 327 return cfg_changed;
5adc1066 328}
8f628ee8 329
75a70cf9 330/* Return the rhs of a gimple_assign STMT in a form of a single tree,
331 converted to type TYPE.
48e1416a 332
75a70cf9 333 This should disappear, but is needed so we can combine expressions and use
334 the fold() interfaces. Long term, we need to develop folding and combine
335 routines that deal with gimple exclusively . */
336
337static tree
338rhs_to_tree (tree type, gimple stmt)
339{
389dd41b 340 location_t loc = gimple_location (stmt);
75a70cf9 341 enum tree_code code = gimple_assign_rhs_code (stmt);
57c45d70 342 if (get_gimple_rhs_class (code) == GIMPLE_TERNARY_RHS)
343 return fold_build3_loc (loc, code, type, gimple_assign_rhs1 (stmt),
344 gimple_assign_rhs2 (stmt),
345 gimple_assign_rhs3 (stmt));
346 else if (get_gimple_rhs_class (code) == GIMPLE_BINARY_RHS)
389dd41b 347 return fold_build2_loc (loc, code, type, gimple_assign_rhs1 (stmt),
fb8ed03f 348 gimple_assign_rhs2 (stmt));
75a70cf9 349 else if (get_gimple_rhs_class (code) == GIMPLE_UNARY_RHS)
fb8ed03f 350 return build1 (code, type, gimple_assign_rhs1 (stmt));
75a70cf9 351 else if (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS)
352 return gimple_assign_rhs1 (stmt);
353 else
354 gcc_unreachable ();
355}
356
5adc1066 357/* Combine OP0 CODE OP1 in the context of a COND_EXPR. Returns
358 the folded result in a form suitable for COND_EXPR_COND or
359 NULL_TREE, if there is no suitable simplified form. If
360 INVARIANT_ONLY is true only gimple_min_invariant results are
361 considered simplified. */
8f628ee8 362
363static tree
389dd41b 364combine_cond_expr_cond (location_t loc, enum tree_code code, tree type,
5adc1066 365 tree op0, tree op1, bool invariant_only)
8f628ee8 366{
5adc1066 367 tree t;
8f628ee8 368
5adc1066 369 gcc_assert (TREE_CODE_CLASS (code) == tcc_comparison);
8f628ee8 370
389dd41b 371 t = fold_binary_loc (loc, code, type, op0, op1);
5adc1066 372 if (!t)
373 return NULL_TREE;
8f628ee8 374
5adc1066 375 /* Require that we got a boolean type out if we put one in. */
376 gcc_assert (TREE_CODE (TREE_TYPE (t)) == TREE_CODE (type));
8f628ee8 377
a7392604 378 /* Canonicalize the combined condition for use in a COND_EXPR. */
379 t = canonicalize_cond_expr_cond (t);
8f628ee8 380
5adc1066 381 /* Bail out if we required an invariant but didn't get one. */
75a70cf9 382 if (!t || (invariant_only && !is_gimple_min_invariant (t)))
5adc1066 383 return NULL_TREE;
8f628ee8 384
a7392604 385 return t;
8f628ee8 386}
387
c8126d25 388/* Combine the comparison OP0 CODE OP1 at LOC with the defining statements
389 of its operand. Return a new comparison tree or NULL_TREE if there
390 were no simplifying combines. */
391
392static tree
678b2f5b 393forward_propagate_into_comparison_1 (location_t loc,
394 enum tree_code code, tree type,
395 tree op0, tree op1)
c8126d25 396{
397 tree tmp = NULL_TREE;
398 tree rhs0 = NULL_TREE, rhs1 = NULL_TREE;
399 bool single_use0_p = false, single_use1_p = false;
400
401 /* For comparisons use the first operand, that is likely to
402 simplify comparisons against constants. */
403 if (TREE_CODE (op0) == SSA_NAME)
404 {
405 gimple def_stmt = get_prop_source_stmt (op0, false, &single_use0_p);
406 if (def_stmt && can_propagate_from (def_stmt))
407 {
408 rhs0 = rhs_to_tree (TREE_TYPE (op1), def_stmt);
409 tmp = combine_cond_expr_cond (loc, code, type,
410 rhs0, op1, !single_use0_p);
411 if (tmp)
412 return tmp;
413 }
414 }
415
416 /* If that wasn't successful, try the second operand. */
417 if (TREE_CODE (op1) == SSA_NAME)
418 {
419 gimple def_stmt = get_prop_source_stmt (op1, false, &single_use1_p);
420 if (def_stmt && can_propagate_from (def_stmt))
421 {
422 rhs1 = rhs_to_tree (TREE_TYPE (op0), def_stmt);
423 tmp = combine_cond_expr_cond (loc, code, type,
424 op0, rhs1, !single_use1_p);
425 if (tmp)
426 return tmp;
427 }
428 }
429
430 /* If that wasn't successful either, try both operands. */
431 if (rhs0 != NULL_TREE
432 && rhs1 != NULL_TREE)
433 tmp = combine_cond_expr_cond (loc, code, type,
434 rhs0, rhs1,
435 !(single_use0_p && single_use1_p));
436
437 return tmp;
438}
439
678b2f5b 440/* Propagate from the ssa name definition statements of the assignment
441 from a comparison at *GSI into the conditional if that simplifies it.
6f9714b3 442 Returns 1 if the stmt was modified and 2 if the CFG needs cleanup,
443 otherwise returns 0. */
c8126d25 444
6f9714b3 445static int
678b2f5b 446forward_propagate_into_comparison (gimple_stmt_iterator *gsi)
c8126d25 447{
678b2f5b 448 gimple stmt = gsi_stmt (*gsi);
449 tree tmp;
6f9714b3 450 bool cfg_changed = false;
451 tree rhs1 = gimple_assign_rhs1 (stmt);
452 tree rhs2 = gimple_assign_rhs2 (stmt);
c8126d25 453
454 /* Combine the comparison with defining statements. */
678b2f5b 455 tmp = forward_propagate_into_comparison_1 (gimple_location (stmt),
456 gimple_assign_rhs_code (stmt),
457 TREE_TYPE
458 (gimple_assign_lhs (stmt)),
6f9714b3 459 rhs1, rhs2);
678b2f5b 460 if (tmp)
c8126d25 461 {
678b2f5b 462 gimple_assign_set_rhs_from_tree (gsi, tmp);
463 update_stmt (stmt);
6f9714b3 464 if (TREE_CODE (rhs1) == SSA_NAME)
465 cfg_changed |= remove_prop_source_from_use (rhs1);
466 if (TREE_CODE (rhs2) == SSA_NAME)
467 cfg_changed |= remove_prop_source_from_use (rhs2);
468 return cfg_changed ? 2 : 1;
c8126d25 469 }
470
6f9714b3 471 return 0;
c8126d25 472}
473
5adc1066 474/* Propagate from the ssa name definition statements of COND_EXPR
75a70cf9 475 in GIMPLE_COND statement STMT into the conditional if that simplifies it.
476 Returns zero if no statement was changed, one if there were
477 changes and two if cfg_cleanup needs to run.
48e1416a 478
75a70cf9 479 This must be kept in sync with forward_propagate_into_cond. */
480
481static int
482forward_propagate_into_gimple_cond (gimple stmt)
483{
48e1416a 484 location_t loc = gimple_location (stmt);
678b2f5b 485 tree tmp;
486 enum tree_code code = gimple_cond_code (stmt);
6f9714b3 487 bool cfg_changed = false;
488 tree rhs1 = gimple_cond_lhs (stmt);
489 tree rhs2 = gimple_cond_rhs (stmt);
678b2f5b 490
491 /* We can do tree combining on SSA_NAME and comparison expressions. */
492 if (TREE_CODE_CLASS (gimple_cond_code (stmt)) != tcc_comparison)
493 return 0;
494
495 tmp = forward_propagate_into_comparison_1 (loc, code,
496 boolean_type_node,
6f9714b3 497 rhs1, rhs2);
678b2f5b 498 if (tmp)
499 {
500 if (dump_file && tmp)
501 {
678b2f5b 502 fprintf (dump_file, " Replaced '");
6f9714b3 503 print_gimple_expr (dump_file, stmt, 0, 0);
678b2f5b 504 fprintf (dump_file, "' with '");
505 print_generic_expr (dump_file, tmp, 0);
506 fprintf (dump_file, "'\n");
507 }
75a70cf9 508
678b2f5b 509 gimple_cond_set_condition_from_tree (stmt, unshare_expr (tmp));
510 update_stmt (stmt);
75a70cf9 511
6f9714b3 512 if (TREE_CODE (rhs1) == SSA_NAME)
513 cfg_changed |= remove_prop_source_from_use (rhs1);
514 if (TREE_CODE (rhs2) == SSA_NAME)
515 cfg_changed |= remove_prop_source_from_use (rhs2);
516 return (cfg_changed || is_gimple_min_invariant (tmp)) ? 2 : 1;
678b2f5b 517 }
75a70cf9 518
6f9714b3 519 return 0;
75a70cf9 520}
521
522
523/* Propagate from the ssa name definition statements of COND_EXPR
524 in the rhs of statement STMT into the conditional if that simplifies it.
4c580c8c 525 Returns zero if no statement was changed, one if there were
75a70cf9 526 changes and two if cfg_cleanup needs to run.
527
528 This must be kept in sync with forward_propagate_into_gimple_cond. */
4ee9c684 529
4c580c8c 530static int
75a70cf9 531forward_propagate_into_cond (gimple_stmt_iterator *gsi_p)
e6dfde59 532{
75a70cf9 533 gimple stmt = gsi_stmt (*gsi_p);
389dd41b 534 location_t loc = gimple_location (stmt);
678b2f5b 535 tree tmp = NULL_TREE;
536 tree cond = gimple_assign_rhs1 (stmt);
d080be9e 537
678b2f5b 538 /* We can do tree combining on SSA_NAME and comparison expressions. */
539 if (COMPARISON_CLASS_P (cond))
540 tmp = forward_propagate_into_comparison_1 (loc, TREE_CODE (cond),
c8126d25 541 boolean_type_node,
542 TREE_OPERAND (cond, 0),
543 TREE_OPERAND (cond, 1));
678b2f5b 544 else if (TREE_CODE (cond) == SSA_NAME)
545 {
546 tree name = cond, rhs0;
547 gimple def_stmt = get_prop_source_stmt (name, true, NULL);
548 if (!def_stmt || !can_propagate_from (def_stmt))
6f9714b3 549 return 0;
5adc1066 550
678b2f5b 551 rhs0 = gimple_assign_rhs1 (def_stmt);
552 tmp = combine_cond_expr_cond (loc, NE_EXPR, boolean_type_node, rhs0,
553 build_int_cst (TREE_TYPE (rhs0), 0),
554 false);
555 }
5adc1066 556
678b2f5b 557 if (tmp)
558 {
559 if (dump_file && tmp)
560 {
561 fprintf (dump_file, " Replaced '");
562 print_generic_expr (dump_file, cond, 0);
563 fprintf (dump_file, "' with '");
564 print_generic_expr (dump_file, tmp, 0);
565 fprintf (dump_file, "'\n");
566 }
d080be9e 567
678b2f5b 568 gimple_assign_set_rhs_from_tree (gsi_p, unshare_expr (tmp));
569 stmt = gsi_stmt (*gsi_p);
570 update_stmt (stmt);
5adc1066 571
6f9714b3 572 return is_gimple_min_invariant (tmp) ? 2 : 1;
678b2f5b 573 }
d080be9e 574
6f9714b3 575 return 0;
4ee9c684 576}
577
48e1416a 578/* We've just substituted an ADDR_EXPR into stmt. Update all the
148aa112 579 relevant data structures to match. */
580
581static void
75a70cf9 582tidy_after_forward_propagate_addr (gimple stmt)
148aa112 583{
148aa112 584 /* We may have turned a trapping insn into a non-trapping insn. */
585 if (maybe_clean_or_replace_eh_stmt (stmt, stmt)
75a70cf9 586 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
148aa112 587 cfg_changed = true;
f2fae51f 588
75a70cf9 589 if (TREE_CODE (gimple_assign_rhs1 (stmt)) == ADDR_EXPR)
590 recompute_tree_invariant_for_addr_expr (gimple_assign_rhs1 (stmt));
148aa112 591}
592
75a70cf9 593/* DEF_RHS contains the address of the 0th element in an array.
6c01267c 594 USE_STMT uses type of DEF_RHS to compute the address of an
291d763b 595 arbitrary element within the array. The (variable) byte offset
596 of the element is contained in OFFSET.
597
598 We walk back through the use-def chains of OFFSET to verify that
599 it is indeed computing the offset of an element within the array
600 and extract the index corresponding to the given byte offset.
601
602 We then try to fold the entire address expression into a form
603 &array[index].
604
605 If we are successful, we replace the right hand side of USE_STMT
606 with the new address computation. */
607
608static bool
6c01267c 609forward_propagate_addr_into_variable_array_index (tree offset,
75a70cf9 610 tree def_rhs,
611 gimple_stmt_iterator *use_stmt_gsi)
291d763b 612{
401d1fb3 613 tree index, tunit;
75a70cf9 614 gimple offset_def, use_stmt = gsi_stmt (*use_stmt_gsi);
182cf5a9 615 tree new_rhs, tmp;
401d1fb3 616
182cf5a9 617 if (TREE_CODE (TREE_OPERAND (def_rhs, 0)) == ARRAY_REF)
618 tunit = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (def_rhs)));
619 else if (TREE_CODE (TREE_TYPE (TREE_OPERAND (def_rhs, 0))) == ARRAY_TYPE)
620 tunit = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (TREE_TYPE (def_rhs))));
621 else
622 return false;
401d1fb3 623 if (!host_integerp (tunit, 1))
624 return false;
291d763b 625
65c220cd 626 /* Get the offset's defining statement. */
627 offset_def = SSA_NAME_DEF_STMT (offset);
628
629 /* Try to find an expression for a proper index. This is either a
630 multiplication expression by the element size or just the ssa name we came
631 along in case the element size is one. In that case, however, we do not
632 allow multiplications because they can be computing index to a higher
633 level dimension (PR 37861). */
401d1fb3 634 if (integer_onep (tunit))
1a773ec5 635 {
65c220cd 636 if (is_gimple_assign (offset_def)
637 && gimple_assign_rhs_code (offset_def) == MULT_EXPR)
638 return false;
291d763b 639
65c220cd 640 index = offset;
641 }
642 else
643 {
0de36bdb 644 /* The statement which defines OFFSET before type conversion
75a70cf9 645 must be a simple GIMPLE_ASSIGN. */
65c220cd 646 if (!is_gimple_assign (offset_def))
1a773ec5 647 return false;
291d763b 648
0de36bdb 649 /* The RHS of the statement which defines OFFSET must be a
48e1416a 650 multiplication of an object by the size of the array elements.
0de36bdb 651 This implicitly verifies that the size of the array elements
652 is constant. */
401d1fb3 653 if (gimple_assign_rhs_code (offset_def) == MULT_EXPR
654 && TREE_CODE (gimple_assign_rhs2 (offset_def)) == INTEGER_CST
655 && tree_int_cst_equal (gimple_assign_rhs2 (offset_def), tunit))
656 {
657 /* The first operand to the MULT_EXPR is the desired index. */
658 index = gimple_assign_rhs1 (offset_def);
659 }
660 /* If we have idx * tunit + CST * tunit re-associate that. */
661 else if ((gimple_assign_rhs_code (offset_def) == PLUS_EXPR
662 || gimple_assign_rhs_code (offset_def) == MINUS_EXPR)
663 && TREE_CODE (gimple_assign_rhs1 (offset_def)) == SSA_NAME
664 && TREE_CODE (gimple_assign_rhs2 (offset_def)) == INTEGER_CST
665 && (tmp = div_if_zero_remainder (EXACT_DIV_EXPR,
666 gimple_assign_rhs2 (offset_def),
667 tunit)) != NULL_TREE)
668 {
669 gimple offset_def2 = SSA_NAME_DEF_STMT (gimple_assign_rhs1 (offset_def));
507b89a4 670 if (is_gimple_assign (offset_def2)
671 && gimple_assign_rhs_code (offset_def2) == MULT_EXPR
401d1fb3 672 && TREE_CODE (gimple_assign_rhs2 (offset_def2)) == INTEGER_CST
673 && tree_int_cst_equal (gimple_assign_rhs2 (offset_def2), tunit))
674 {
675 index = fold_build2 (gimple_assign_rhs_code (offset_def),
676 TREE_TYPE (offset),
677 gimple_assign_rhs1 (offset_def2), tmp);
678 }
679 else
680 return false;
681 }
682 else
1a773ec5 683 return false;
1a773ec5 684 }
291d763b 685
686 /* Replace the pointer addition with array indexing. */
401d1fb3 687 index = force_gimple_operand_gsi (use_stmt_gsi, index, true, NULL_TREE,
688 true, GSI_SAME_STMT);
182cf5a9 689 if (TREE_CODE (TREE_OPERAND (def_rhs, 0)) == ARRAY_REF)
690 {
691 new_rhs = unshare_expr (def_rhs);
692 TREE_OPERAND (TREE_OPERAND (new_rhs, 0), 1) = index;
693 }
694 else
695 {
696 new_rhs = build4 (ARRAY_REF, TREE_TYPE (TREE_TYPE (TREE_TYPE (def_rhs))),
697 unshare_expr (TREE_OPERAND (def_rhs, 0)),
698 index, integer_zero_node, NULL_TREE);
699 new_rhs = build_fold_addr_expr (new_rhs);
700 if (!useless_type_conversion_p (TREE_TYPE (gimple_assign_lhs (use_stmt)),
701 TREE_TYPE (new_rhs)))
702 {
703 new_rhs = force_gimple_operand_gsi (use_stmt_gsi, new_rhs, true,
704 NULL_TREE, true, GSI_SAME_STMT);
705 new_rhs = fold_convert (TREE_TYPE (gimple_assign_lhs (use_stmt)),
706 new_rhs);
707 }
708 }
709 gimple_assign_set_rhs_from_tree (use_stmt_gsi, new_rhs);
75a70cf9 710 use_stmt = gsi_stmt (*use_stmt_gsi);
291d763b 711
712 /* That should have created gimple, so there is no need to
713 record information to undo the propagation. */
148aa112 714 fold_stmt_inplace (use_stmt);
715 tidy_after_forward_propagate_addr (use_stmt);
291d763b 716 return true;
717}
718
15ec875c 719/* NAME is a SSA_NAME representing DEF_RHS which is of the form
720 ADDR_EXPR <whatever>.
291d763b 721
3d5cfe81 722 Try to forward propagate the ADDR_EXPR into the use USE_STMT.
291d763b 723 Often this will allow for removal of an ADDR_EXPR and INDIRECT_REF
3d5cfe81 724 node or for recovery of array indexing from pointer arithmetic.
75a70cf9 725
6b5a5c42 726 Return true if the propagation was successful (the propagation can
727 be not totally successful, yet things may have been changed). */
291d763b 728
729static bool
75a70cf9 730forward_propagate_addr_expr_1 (tree name, tree def_rhs,
731 gimple_stmt_iterator *use_stmt_gsi,
6776dec8 732 bool single_use_p)
291d763b 733{
75a70cf9 734 tree lhs, rhs, rhs2, array_ref;
75a70cf9 735 gimple use_stmt = gsi_stmt (*use_stmt_gsi);
736 enum tree_code rhs_code;
9e019299 737 bool res = true;
291d763b 738
971c637a 739 gcc_assert (TREE_CODE (def_rhs) == ADDR_EXPR);
291d763b 740
75a70cf9 741 lhs = gimple_assign_lhs (use_stmt);
742 rhs_code = gimple_assign_rhs_code (use_stmt);
743 rhs = gimple_assign_rhs1 (use_stmt);
15ec875c 744
6776dec8 745 /* Trivial cases. The use statement could be a trivial copy or a
15ec875c 746 useless conversion. Recurse to the uses of the lhs as copyprop does
971c637a 747 not copy through different variant pointers and FRE does not catch
6776dec8 748 all useless conversions. Treat the case of a single-use name and
749 a conversion to def_rhs type separate, though. */
971c637a 750 if (TREE_CODE (lhs) == SSA_NAME
75a70cf9 751 && ((rhs_code == SSA_NAME && rhs == name)
316616c9 752 || CONVERT_EXPR_CODE_P (rhs_code)))
6776dec8 753 {
316616c9 754 /* Only recurse if we don't deal with a single use or we cannot
755 do the propagation to the current statement. In particular
756 we can end up with a conversion needed for a non-invariant
757 address which we cannot do in a single statement. */
758 if (!single_use_p
759 || (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (def_rhs))
bd8d8d81 760 && (!is_gimple_min_invariant (def_rhs)
761 || (INTEGRAL_TYPE_P (TREE_TYPE (lhs))
762 && POINTER_TYPE_P (TREE_TYPE (def_rhs))
763 && (TYPE_PRECISION (TREE_TYPE (lhs))
764 > TYPE_PRECISION (TREE_TYPE (def_rhs)))))))
971c637a 765 return forward_propagate_addr_expr (lhs, def_rhs);
766
75a70cf9 767 gimple_assign_set_rhs1 (use_stmt, unshare_expr (def_rhs));
316616c9 768 if (useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (def_rhs)))
769 gimple_assign_set_rhs_code (use_stmt, TREE_CODE (def_rhs));
770 else
771 gimple_assign_set_rhs_code (use_stmt, NOP_EXPR);
6776dec8 772 return true;
773 }
971c637a 774
182cf5a9 775 /* Propagate through constant pointer adjustments. */
776 if (TREE_CODE (lhs) == SSA_NAME
777 && rhs_code == POINTER_PLUS_EXPR
778 && rhs == name
779 && TREE_CODE (gimple_assign_rhs2 (use_stmt)) == INTEGER_CST)
780 {
781 tree new_def_rhs;
782 /* As we come here with non-invariant addresses in def_rhs we need
783 to make sure we can build a valid constant offsetted address
784 for further propagation. Simply rely on fold building that
785 and check after the fact. */
786 new_def_rhs = fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (rhs)),
787 def_rhs,
788 fold_convert (ptr_type_node,
789 gimple_assign_rhs2 (use_stmt)));
790 if (TREE_CODE (new_def_rhs) == MEM_REF
f5d03f27 791 && !is_gimple_mem_ref_addr (TREE_OPERAND (new_def_rhs, 0)))
182cf5a9 792 return false;
793 new_def_rhs = build_fold_addr_expr_with_type (new_def_rhs,
794 TREE_TYPE (rhs));
795
796 /* Recurse. If we could propagate into all uses of lhs do not
797 bother to replace into the current use but just pretend we did. */
798 if (TREE_CODE (new_def_rhs) == ADDR_EXPR
799 && forward_propagate_addr_expr (lhs, new_def_rhs))
800 return true;
801
802 if (useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (new_def_rhs)))
803 gimple_assign_set_rhs_with_ops (use_stmt_gsi, TREE_CODE (new_def_rhs),
804 new_def_rhs, NULL_TREE);
805 else if (is_gimple_min_invariant (new_def_rhs))
806 gimple_assign_set_rhs_with_ops (use_stmt_gsi, NOP_EXPR,
807 new_def_rhs, NULL_TREE);
808 else
809 return false;
810 gcc_assert (gsi_stmt (*use_stmt_gsi) == use_stmt);
811 update_stmt (use_stmt);
812 return true;
813 }
814
48e1416a 815 /* Now strip away any outer COMPONENT_REF/ARRAY_REF nodes from the LHS.
971c637a 816 ADDR_EXPR will not appear on the LHS. */
182cf5a9 817 lhs = gimple_assign_lhs (use_stmt);
818 while (handled_component_p (lhs))
819 lhs = TREE_OPERAND (lhs, 0);
971c637a 820
182cf5a9 821 /* Now see if the LHS node is a MEM_REF using NAME. If so,
971c637a 822 propagate the ADDR_EXPR into the use of NAME and fold the result. */
182cf5a9 823 if (TREE_CODE (lhs) == MEM_REF
9e019299 824 && TREE_OPERAND (lhs, 0) == name)
971c637a 825 {
182cf5a9 826 tree def_rhs_base;
827 HOST_WIDE_INT def_rhs_offset;
828 /* If the address is invariant we can always fold it. */
829 if ((def_rhs_base = get_addr_base_and_unit_offset (TREE_OPERAND (def_rhs, 0),
830 &def_rhs_offset)))
9e019299 831 {
182cf5a9 832 double_int off = mem_ref_offset (lhs);
833 tree new_ptr;
834 off = double_int_add (off,
835 shwi_to_double_int (def_rhs_offset));
836 if (TREE_CODE (def_rhs_base) == MEM_REF)
837 {
838 off = double_int_add (off, mem_ref_offset (def_rhs_base));
839 new_ptr = TREE_OPERAND (def_rhs_base, 0);
840 }
841 else
842 new_ptr = build_fold_addr_expr (def_rhs_base);
843 TREE_OPERAND (lhs, 0) = new_ptr;
844 TREE_OPERAND (lhs, 1)
845 = double_int_to_tree (TREE_TYPE (TREE_OPERAND (lhs, 1)), off);
9e019299 846 tidy_after_forward_propagate_addr (use_stmt);
9e019299 847 /* Continue propagating into the RHS if this was not the only use. */
848 if (single_use_p)
849 return true;
850 }
182cf5a9 851 /* If the LHS is a plain dereference and the value type is the same as
852 that of the pointed-to type of the address we can put the
853 dereferenced address on the LHS preserving the original alias-type. */
854 else if (gimple_assign_lhs (use_stmt) == lhs
855 && useless_type_conversion_p
856 (TREE_TYPE (TREE_OPERAND (def_rhs, 0)),
857 TREE_TYPE (gimple_assign_rhs1 (use_stmt))))
858 {
859 tree *def_rhs_basep = &TREE_OPERAND (def_rhs, 0);
860 tree new_offset, new_base, saved;
861 while (handled_component_p (*def_rhs_basep))
862 def_rhs_basep = &TREE_OPERAND (*def_rhs_basep, 0);
863 saved = *def_rhs_basep;
864 if (TREE_CODE (*def_rhs_basep) == MEM_REF)
865 {
866 new_base = TREE_OPERAND (*def_rhs_basep, 0);
867 new_offset
868 = int_const_binop (PLUS_EXPR, TREE_OPERAND (lhs, 1),
317e2a67 869 TREE_OPERAND (*def_rhs_basep, 1));
182cf5a9 870 }
871 else
872 {
873 new_base = build_fold_addr_expr (*def_rhs_basep);
874 new_offset = TREE_OPERAND (lhs, 1);
875 }
876 *def_rhs_basep = build2 (MEM_REF, TREE_TYPE (*def_rhs_basep),
877 new_base, new_offset);
878 gimple_assign_set_lhs (use_stmt,
879 unshare_expr (TREE_OPERAND (def_rhs, 0)));
880 *def_rhs_basep = saved;
881 tidy_after_forward_propagate_addr (use_stmt);
882 /* Continue propagating into the RHS if this was not the
883 only use. */
884 if (single_use_p)
885 return true;
886 }
9e019299 887 else
888 /* We can have a struct assignment dereferencing our name twice.
889 Note that we didn't propagate into the lhs to not falsely
890 claim we did when propagating into the rhs. */
891 res = false;
971c637a 892 }
15ec875c 893
631d5db6 894 /* Strip away any outer COMPONENT_REF, ARRAY_REF or ADDR_EXPR
895 nodes from the RHS. */
182cf5a9 896 rhs = gimple_assign_rhs1 (use_stmt);
897 if (TREE_CODE (rhs) == ADDR_EXPR)
898 rhs = TREE_OPERAND (rhs, 0);
899 while (handled_component_p (rhs))
900 rhs = TREE_OPERAND (rhs, 0);
291d763b 901
182cf5a9 902 /* Now see if the RHS node is a MEM_REF using NAME. If so,
291d763b 903 propagate the ADDR_EXPR into the use of NAME and fold the result. */
182cf5a9 904 if (TREE_CODE (rhs) == MEM_REF
905 && TREE_OPERAND (rhs, 0) == name)
291d763b 906 {
182cf5a9 907 tree def_rhs_base;
908 HOST_WIDE_INT def_rhs_offset;
909 if ((def_rhs_base = get_addr_base_and_unit_offset (TREE_OPERAND (def_rhs, 0),
910 &def_rhs_offset)))
911 {
912 double_int off = mem_ref_offset (rhs);
913 tree new_ptr;
914 off = double_int_add (off,
915 shwi_to_double_int (def_rhs_offset));
916 if (TREE_CODE (def_rhs_base) == MEM_REF)
917 {
918 off = double_int_add (off, mem_ref_offset (def_rhs_base));
919 new_ptr = TREE_OPERAND (def_rhs_base, 0);
920 }
921 else
922 new_ptr = build_fold_addr_expr (def_rhs_base);
923 TREE_OPERAND (rhs, 0) = new_ptr;
924 TREE_OPERAND (rhs, 1)
925 = double_int_to_tree (TREE_TYPE (TREE_OPERAND (rhs, 1)), off);
926 fold_stmt_inplace (use_stmt);
927 tidy_after_forward_propagate_addr (use_stmt);
928 return res;
929 }
930 /* If the LHS is a plain dereference and the value type is the same as
931 that of the pointed-to type of the address we can put the
932 dereferenced address on the LHS preserving the original alias-type. */
933 else if (gimple_assign_rhs1 (use_stmt) == rhs
934 && useless_type_conversion_p
935 (TREE_TYPE (gimple_assign_lhs (use_stmt)),
936 TREE_TYPE (TREE_OPERAND (def_rhs, 0))))
937 {
938 tree *def_rhs_basep = &TREE_OPERAND (def_rhs, 0);
939 tree new_offset, new_base, saved;
940 while (handled_component_p (*def_rhs_basep))
941 def_rhs_basep = &TREE_OPERAND (*def_rhs_basep, 0);
942 saved = *def_rhs_basep;
943 if (TREE_CODE (*def_rhs_basep) == MEM_REF)
944 {
945 new_base = TREE_OPERAND (*def_rhs_basep, 0);
946 new_offset
947 = int_const_binop (PLUS_EXPR, TREE_OPERAND (rhs, 1),
317e2a67 948 TREE_OPERAND (*def_rhs_basep, 1));
182cf5a9 949 }
950 else
951 {
952 new_base = build_fold_addr_expr (*def_rhs_basep);
953 new_offset = TREE_OPERAND (rhs, 1);
954 }
955 *def_rhs_basep = build2 (MEM_REF, TREE_TYPE (*def_rhs_basep),
956 new_base, new_offset);
957 gimple_assign_set_rhs1 (use_stmt,
958 unshare_expr (TREE_OPERAND (def_rhs, 0)));
959 *def_rhs_basep = saved;
960 fold_stmt_inplace (use_stmt);
961 tidy_after_forward_propagate_addr (use_stmt);
962 return res;
963 }
291d763b 964 }
965
971c637a 966 /* If the use of the ADDR_EXPR is not a POINTER_PLUS_EXPR, there
967 is nothing to do. */
75a70cf9 968 if (gimple_assign_rhs_code (use_stmt) != POINTER_PLUS_EXPR
969 || gimple_assign_rhs1 (use_stmt) != name)
971c637a 970 return false;
971
291d763b 972 /* The remaining cases are all for turning pointer arithmetic into
973 array indexing. They only apply when we have the address of
974 element zero in an array. If that is not the case then there
975 is nothing to do. */
15ec875c 976 array_ref = TREE_OPERAND (def_rhs, 0);
182cf5a9 977 if ((TREE_CODE (array_ref) != ARRAY_REF
978 || TREE_CODE (TREE_TYPE (TREE_OPERAND (array_ref, 0))) != ARRAY_TYPE
979 || TREE_CODE (TREE_OPERAND (array_ref, 1)) != INTEGER_CST)
980 && TREE_CODE (TREE_TYPE (array_ref)) != ARRAY_TYPE)
291d763b 981 return false;
982
75a70cf9 983 rhs2 = gimple_assign_rhs2 (use_stmt);
088cc5d5 984 /* Try to optimize &x[C1] p+ C2 where C2 is a multiple of the size
985 of the elements in X into &x[C1 + C2/element size]. */
75a70cf9 986 if (TREE_CODE (rhs2) == INTEGER_CST)
291d763b 987 {
e60a6f7b 988 tree new_rhs = maybe_fold_stmt_addition (gimple_location (use_stmt),
989 TREE_TYPE (def_rhs),
088cc5d5 990 def_rhs, rhs2);
75a70cf9 991 if (new_rhs)
291d763b 992 {
7b705d94 993 tree type = TREE_TYPE (gimple_assign_lhs (use_stmt));
994 new_rhs = unshare_expr (new_rhs);
995 if (!useless_type_conversion_p (type, TREE_TYPE (new_rhs)))
996 {
997 if (!is_gimple_min_invariant (new_rhs))
998 new_rhs = force_gimple_operand_gsi (use_stmt_gsi, new_rhs,
999 true, NULL_TREE,
1000 true, GSI_SAME_STMT);
1001 new_rhs = fold_convert (type, new_rhs);
1002 }
1003 gimple_assign_set_rhs_from_tree (use_stmt_gsi, new_rhs);
75a70cf9 1004 use_stmt = gsi_stmt (*use_stmt_gsi);
1005 update_stmt (use_stmt);
148aa112 1006 tidy_after_forward_propagate_addr (use_stmt);
291d763b 1007 return true;
1008 }
291d763b 1009 }
1010
0de36bdb 1011 /* Try to optimize &x[0] p+ OFFSET where OFFSET is defined by
291d763b 1012 converting a multiplication of an index by the size of the
1013 array elements, then the result is converted into the proper
1014 type for the arithmetic. */
75a70cf9 1015 if (TREE_CODE (rhs2) == SSA_NAME
182cf5a9 1016 && (TREE_CODE (array_ref) != ARRAY_REF
1017 || integer_zerop (TREE_OPERAND (array_ref, 1)))
c019af4d 1018 && useless_type_conversion_p (TREE_TYPE (name), TREE_TYPE (def_rhs))
291d763b 1019 /* Avoid problems with IVopts creating PLUS_EXPRs with a
1020 different type than their operands. */
83a99d39 1021 && useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (def_rhs)))
75a70cf9 1022 return forward_propagate_addr_into_variable_array_index (rhs2, def_rhs,
1023 use_stmt_gsi);
291d763b 1024 return false;
1025}
1026
3d5cfe81 1027/* STMT is a statement of the form SSA_NAME = ADDR_EXPR <whatever>.
1028
1029 Try to forward propagate the ADDR_EXPR into all uses of the SSA_NAME.
1030 Often this will allow for removal of an ADDR_EXPR and INDIRECT_REF
1031 node or for recovery of array indexing from pointer arithmetic.
1032 Returns true, if all uses have been propagated into. */
1033
1034static bool
15ec875c 1035forward_propagate_addr_expr (tree name, tree rhs)
3d5cfe81 1036{
75a70cf9 1037 int stmt_loop_depth = gimple_bb (SSA_NAME_DEF_STMT (name))->loop_depth;
3d5cfe81 1038 imm_use_iterator iter;
75a70cf9 1039 gimple use_stmt;
3d5cfe81 1040 bool all = true;
6776dec8 1041 bool single_use_p = has_single_use (name);
3d5cfe81 1042
09aca5bc 1043 FOR_EACH_IMM_USE_STMT (use_stmt, iter, name)
3d5cfe81 1044 {
c96420f8 1045 bool result;
9481f629 1046 tree use_rhs;
3d5cfe81 1047
1048 /* If the use is not in a simple assignment statement, then
1049 there is nothing we can do. */
75a70cf9 1050 if (gimple_code (use_stmt) != GIMPLE_ASSIGN)
3d5cfe81 1051 {
688ff29b 1052 if (!is_gimple_debug (use_stmt))
9845d120 1053 all = false;
3d5cfe81 1054 continue;
1055 }
1056
a540e2fe 1057 /* If the use is in a deeper loop nest, then we do not want
ed40c3d0 1058 to propagate non-invariant ADDR_EXPRs into the loop as that
1059 is likely adding expression evaluations into the loop. */
1060 if (gimple_bb (use_stmt)->loop_depth > stmt_loop_depth
1061 && !is_gimple_min_invariant (rhs))
3d5cfe81 1062 {
1063 all = false;
1064 continue;
1065 }
a540e2fe 1066
75a70cf9 1067 {
1068 gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
1069 result = forward_propagate_addr_expr_1 (name, rhs, &gsi,
1070 single_use_p);
dd277d48 1071 /* If the use has moved to a different statement adjust
4c5fd53c 1072 the update machinery for the old statement too. */
dd277d48 1073 if (use_stmt != gsi_stmt (gsi))
1074 {
dd277d48 1075 update_stmt (use_stmt);
4c5fd53c 1076 use_stmt = gsi_stmt (gsi);
dd277d48 1077 }
4c5fd53c 1078
1079 update_stmt (use_stmt);
75a70cf9 1080 }
c96420f8 1081 all &= result;
de6ed584 1082
15ec875c 1083 /* Remove intermediate now unused copy and conversion chains. */
75a70cf9 1084 use_rhs = gimple_assign_rhs1 (use_stmt);
15ec875c 1085 if (result
75a70cf9 1086 && TREE_CODE (gimple_assign_lhs (use_stmt)) == SSA_NAME
7b705d94 1087 && TREE_CODE (use_rhs) == SSA_NAME
1088 && has_zero_uses (gimple_assign_lhs (use_stmt)))
15ec875c 1089 {
75a70cf9 1090 gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
15ec875c 1091 release_defs (use_stmt);
75a70cf9 1092 gsi_remove (&gsi, true);
15ec875c 1093 }
3d5cfe81 1094 }
1095
628ce22b 1096 return all && has_zero_uses (name);
3d5cfe81 1097}
1098
678b2f5b 1099
1100/* Forward propagate the comparison defined in STMT like
1101 cond_1 = x CMP y to uses of the form
1102 a_1 = (T')cond_1
1103 a_1 = !cond_1
1104 a_1 = cond_1 != 0
1105 Returns true if stmt is now unused. */
1106
1107static bool
1108forward_propagate_comparison (gimple stmt)
1109{
1110 tree name = gimple_assign_lhs (stmt);
1111 gimple use_stmt;
1112 tree tmp = NULL_TREE;
e5b1e080 1113 gimple_stmt_iterator gsi;
1114 enum tree_code code;
1115 tree lhs;
678b2f5b 1116
1117 /* Don't propagate ssa names that occur in abnormal phis. */
1118 if ((TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
1119 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_assign_rhs1 (stmt)))
1120 || (TREE_CODE (gimple_assign_rhs2 (stmt)) == SSA_NAME
1121 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_assign_rhs2 (stmt))))
1122 return false;
1123
1124 /* Do not un-cse comparisons. But propagate through copies. */
1125 use_stmt = get_prop_dest_stmt (name, &name);
e5b1e080 1126 if (!use_stmt
1127 || !is_gimple_assign (use_stmt))
678b2f5b 1128 return false;
1129
e5b1e080 1130 code = gimple_assign_rhs_code (use_stmt);
1131 lhs = gimple_assign_lhs (use_stmt);
1132 if (!INTEGRAL_TYPE_P (TREE_TYPE (lhs)))
1133 return false;
678b2f5b 1134
e5b1e080 1135 /* We can propagate the condition into a statement that
1136 computes the logical negation of the comparison result. */
4b5f1658 1137 if ((code == BIT_NOT_EXPR
1138 && TYPE_PRECISION (TREE_TYPE (lhs)) == 1)
1139 || (code == BIT_XOR_EXPR
1140 && integer_onep (gimple_assign_rhs2 (use_stmt))))
e5b1e080 1141 {
1142 tree type = TREE_TYPE (gimple_assign_rhs1 (stmt));
1143 bool nans = HONOR_NANS (TYPE_MODE (type));
1144 enum tree_code inv_code;
1145 inv_code = invert_tree_comparison (gimple_assign_rhs_code (stmt), nans);
1146 if (inv_code == ERROR_MARK)
678b2f5b 1147 return false;
1148
e5b1e080 1149 tmp = build2 (inv_code, TREE_TYPE (lhs), gimple_assign_rhs1 (stmt),
1150 gimple_assign_rhs2 (stmt));
1151 }
1152 else
1153 return false;
678b2f5b 1154
e5b1e080 1155 gsi = gsi_for_stmt (use_stmt);
1156 gimple_assign_set_rhs_from_tree (&gsi, unshare_expr (tmp));
1157 use_stmt = gsi_stmt (gsi);
1158 update_stmt (use_stmt);
678b2f5b 1159
e5b1e080 1160 if (dump_file && (dump_flags & TDF_DETAILS))
1161 {
1162 fprintf (dump_file, " Replaced '");
1163 print_gimple_expr (dump_file, stmt, 0, dump_flags);
1164 fprintf (dump_file, "' with '");
1165 print_gimple_expr (dump_file, use_stmt, 0, dump_flags);
1166 fprintf (dump_file, "'\n");
678b2f5b 1167 }
1168
e5b1e080 1169 /* Remove defining statements. */
1170 return remove_prop_source_from_use (name);
678b2f5b 1171}
1172
1173
3a938499 1174/* If we have lhs = ~x (STMT), look and see if earlier we had x = ~y.
1175 If so, we can change STMT into lhs = y which can later be copy
48e1416a 1176 propagated. Similarly for negation.
3a938499 1177
48e1416a 1178 This could trivially be formulated as a forward propagation
3a938499 1179 to immediate uses. However, we already had an implementation
1180 from DOM which used backward propagation via the use-def links.
1181
1182 It turns out that backward propagation is actually faster as
1183 there's less work to do for each NOT/NEG expression we find.
1184 Backwards propagation needs to look at the statement in a single
1185 backlink. Forward propagation needs to look at potentially more
678b2f5b 1186 than one forward link.
3a938499 1187
678b2f5b 1188 Returns true when the statement was changed. */
1189
1190static bool
75a70cf9 1191simplify_not_neg_expr (gimple_stmt_iterator *gsi_p)
3a938499 1192{
75a70cf9 1193 gimple stmt = gsi_stmt (*gsi_p);
1194 tree rhs = gimple_assign_rhs1 (stmt);
1195 gimple rhs_def_stmt = SSA_NAME_DEF_STMT (rhs);
3a938499 1196
1197 /* See if the RHS_DEF_STMT has the same form as our statement. */
75a70cf9 1198 if (is_gimple_assign (rhs_def_stmt)
1199 && gimple_assign_rhs_code (rhs_def_stmt) == gimple_assign_rhs_code (stmt))
3a938499 1200 {
75a70cf9 1201 tree rhs_def_operand = gimple_assign_rhs1 (rhs_def_stmt);
3a938499 1202
1203 /* Verify that RHS_DEF_OPERAND is a suitable SSA_NAME. */
1204 if (TREE_CODE (rhs_def_operand) == SSA_NAME
1205 && ! SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs_def_operand))
1206 {
75a70cf9 1207 gimple_assign_set_rhs_from_tree (gsi_p, rhs_def_operand);
1208 stmt = gsi_stmt (*gsi_p);
3a938499 1209 update_stmt (stmt);
678b2f5b 1210 return true;
3a938499 1211 }
1212 }
678b2f5b 1213
1214 return false;
3a938499 1215}
3d5cfe81 1216
b5860aba 1217/* STMT is a SWITCH_EXPR for which we attempt to find equivalent forms of
1218 the condition which we may be able to optimize better. */
1219
678b2f5b 1220static bool
75a70cf9 1221simplify_gimple_switch (gimple stmt)
b5860aba 1222{
75a70cf9 1223 tree cond = gimple_switch_index (stmt);
b5860aba 1224 tree def, to, ti;
75a70cf9 1225 gimple def_stmt;
b5860aba 1226
1227 /* The optimization that we really care about is removing unnecessary
1228 casts. That will let us do much better in propagating the inferred
1229 constant at the switch target. */
1230 if (TREE_CODE (cond) == SSA_NAME)
1231 {
75a70cf9 1232 def_stmt = SSA_NAME_DEF_STMT (cond);
1233 if (is_gimple_assign (def_stmt))
b5860aba 1234 {
75a70cf9 1235 if (gimple_assign_rhs_code (def_stmt) == NOP_EXPR)
b5860aba 1236 {
1237 int need_precision;
1238 bool fail;
1239
75a70cf9 1240 def = gimple_assign_rhs1 (def_stmt);
b5860aba 1241
b5860aba 1242 /* ??? Why was Jeff testing this? We are gimple... */
1b4345f7 1243 gcc_checking_assert (is_gimple_val (def));
b5860aba 1244
1245 to = TREE_TYPE (cond);
1246 ti = TREE_TYPE (def);
1247
1248 /* If we have an extension that preserves value, then we
1249 can copy the source value into the switch. */
1250
1251 need_precision = TYPE_PRECISION (ti);
1252 fail = false;
c5237b8b 1253 if (! INTEGRAL_TYPE_P (ti))
1254 fail = true;
1255 else if (TYPE_UNSIGNED (to) && !TYPE_UNSIGNED (ti))
b5860aba 1256 fail = true;
1257 else if (!TYPE_UNSIGNED (to) && TYPE_UNSIGNED (ti))
1258 need_precision += 1;
1259 if (TYPE_PRECISION (to) < need_precision)
1260 fail = true;
1261
1262 if (!fail)
1263 {
75a70cf9 1264 gimple_switch_set_index (stmt, def);
b5860aba 1265 update_stmt (stmt);
678b2f5b 1266 return true;
b5860aba 1267 }
1268 }
1269 }
1270 }
678b2f5b 1271
1272 return false;
b5860aba 1273}
1274
27f931ff 1275/* For pointers p2 and p1 return p2 - p1 if the
1276 difference is known and constant, otherwise return NULL. */
1277
1278static tree
1279constant_pointer_difference (tree p1, tree p2)
1280{
1281 int i, j;
1282#define CPD_ITERATIONS 5
1283 tree exps[2][CPD_ITERATIONS];
1284 tree offs[2][CPD_ITERATIONS];
1285 int cnt[2];
1286
1287 for (i = 0; i < 2; i++)
1288 {
1289 tree p = i ? p1 : p2;
1290 tree off = size_zero_node;
1291 gimple stmt;
1292 enum tree_code code;
1293
1294 /* For each of p1 and p2 we need to iterate at least
1295 twice, to handle ADDR_EXPR directly in p1/p2,
1296 SSA_NAME with ADDR_EXPR or POINTER_PLUS_EXPR etc.
1297 on definition's stmt RHS. Iterate a few extra times. */
1298 j = 0;
1299 do
1300 {
1301 if (!POINTER_TYPE_P (TREE_TYPE (p)))
1302 break;
1303 if (TREE_CODE (p) == ADDR_EXPR)
1304 {
1305 tree q = TREE_OPERAND (p, 0);
1306 HOST_WIDE_INT offset;
1307 tree base = get_addr_base_and_unit_offset (q, &offset);
1308 if (base)
1309 {
1310 q = base;
1311 if (offset)
1312 off = size_binop (PLUS_EXPR, off, size_int (offset));
1313 }
1314 if (TREE_CODE (q) == MEM_REF
1315 && TREE_CODE (TREE_OPERAND (q, 0)) == SSA_NAME)
1316 {
1317 p = TREE_OPERAND (q, 0);
1318 off = size_binop (PLUS_EXPR, off,
1319 double_int_to_tree (sizetype,
1320 mem_ref_offset (q)));
1321 }
1322 else
1323 {
1324 exps[i][j] = q;
1325 offs[i][j++] = off;
1326 break;
1327 }
1328 }
1329 if (TREE_CODE (p) != SSA_NAME)
1330 break;
1331 exps[i][j] = p;
1332 offs[i][j++] = off;
1333 if (j == CPD_ITERATIONS)
1334 break;
1335 stmt = SSA_NAME_DEF_STMT (p);
1336 if (!is_gimple_assign (stmt) || gimple_assign_lhs (stmt) != p)
1337 break;
1338 code = gimple_assign_rhs_code (stmt);
1339 if (code == POINTER_PLUS_EXPR)
1340 {
1341 if (TREE_CODE (gimple_assign_rhs2 (stmt)) != INTEGER_CST)
1342 break;
1343 off = size_binop (PLUS_EXPR, off, gimple_assign_rhs2 (stmt));
1344 p = gimple_assign_rhs1 (stmt);
1345 }
1346 else if (code == ADDR_EXPR || code == NOP_EXPR)
1347 p = gimple_assign_rhs1 (stmt);
1348 else
1349 break;
1350 }
1351 while (1);
1352 cnt[i] = j;
1353 }
1354
1355 for (i = 0; i < cnt[0]; i++)
1356 for (j = 0; j < cnt[1]; j++)
1357 if (exps[0][i] == exps[1][j])
1358 return size_binop (MINUS_EXPR, offs[0][i], offs[1][j]);
1359
1360 return NULL_TREE;
1361}
1362
1363/* *GSI_P is a GIMPLE_CALL to a builtin function.
1364 Optimize
1365 memcpy (p, "abcd", 4);
1366 memset (p + 4, ' ', 3);
1367 into
1368 memcpy (p, "abcd ", 7);
1369 call if the latter can be stored by pieces during expansion. */
1370
1371static bool
1372simplify_builtin_call (gimple_stmt_iterator *gsi_p, tree callee2)
1373{
1374 gimple stmt1, stmt2 = gsi_stmt (*gsi_p);
1375 tree vuse = gimple_vuse (stmt2);
1376 if (vuse == NULL)
1377 return false;
1378 stmt1 = SSA_NAME_DEF_STMT (vuse);
1379
1380 switch (DECL_FUNCTION_CODE (callee2))
1381 {
1382 case BUILT_IN_MEMSET:
1383 if (gimple_call_num_args (stmt2) != 3
1384 || gimple_call_lhs (stmt2)
1385 || CHAR_BIT != 8
1386 || BITS_PER_UNIT != 8)
1387 break;
1388 else
1389 {
1390 tree callee1;
1391 tree ptr1, src1, str1, off1, len1, lhs1;
1392 tree ptr2 = gimple_call_arg (stmt2, 0);
1393 tree val2 = gimple_call_arg (stmt2, 1);
1394 tree len2 = gimple_call_arg (stmt2, 2);
1395 tree diff, vdef, new_str_cst;
1396 gimple use_stmt;
1397 unsigned int ptr1_align;
1398 unsigned HOST_WIDE_INT src_len;
1399 char *src_buf;
1400 use_operand_p use_p;
1401
1402 if (!host_integerp (val2, 0)
1403 || !host_integerp (len2, 1))
1404 break;
1405 if (is_gimple_call (stmt1))
1406 {
1407 /* If first stmt is a call, it needs to be memcpy
1408 or mempcpy, with string literal as second argument and
1409 constant length. */
1410 callee1 = gimple_call_fndecl (stmt1);
1411 if (callee1 == NULL_TREE
1412 || DECL_BUILT_IN_CLASS (callee1) != BUILT_IN_NORMAL
1413 || gimple_call_num_args (stmt1) != 3)
1414 break;
1415 if (DECL_FUNCTION_CODE (callee1) != BUILT_IN_MEMCPY
1416 && DECL_FUNCTION_CODE (callee1) != BUILT_IN_MEMPCPY)
1417 break;
1418 ptr1 = gimple_call_arg (stmt1, 0);
1419 src1 = gimple_call_arg (stmt1, 1);
1420 len1 = gimple_call_arg (stmt1, 2);
1421 lhs1 = gimple_call_lhs (stmt1);
1422 if (!host_integerp (len1, 1))
1423 break;
1424 str1 = string_constant (src1, &off1);
1425 if (str1 == NULL_TREE)
1426 break;
1427 if (!host_integerp (off1, 1)
1428 || compare_tree_int (off1, TREE_STRING_LENGTH (str1) - 1) > 0
1429 || compare_tree_int (len1, TREE_STRING_LENGTH (str1)
1430 - tree_low_cst (off1, 1)) > 0
1431 || TREE_CODE (TREE_TYPE (str1)) != ARRAY_TYPE
1432 || TYPE_MODE (TREE_TYPE (TREE_TYPE (str1)))
1433 != TYPE_MODE (char_type_node))
1434 break;
1435 }
1436 else if (gimple_assign_single_p (stmt1))
1437 {
1438 /* Otherwise look for length 1 memcpy optimized into
1439 assignment. */
1440 ptr1 = gimple_assign_lhs (stmt1);
1441 src1 = gimple_assign_rhs1 (stmt1);
1442 if (TREE_CODE (ptr1) != MEM_REF
1443 || TYPE_MODE (TREE_TYPE (ptr1)) != TYPE_MODE (char_type_node)
1444 || !host_integerp (src1, 0))
1445 break;
1446 ptr1 = build_fold_addr_expr (ptr1);
1447 callee1 = NULL_TREE;
1448 len1 = size_one_node;
1449 lhs1 = NULL_TREE;
1450 off1 = size_zero_node;
1451 str1 = NULL_TREE;
1452 }
1453 else
1454 break;
1455
1456 diff = constant_pointer_difference (ptr1, ptr2);
1457 if (diff == NULL && lhs1 != NULL)
1458 {
1459 diff = constant_pointer_difference (lhs1, ptr2);
1460 if (DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY
1461 && diff != NULL)
1462 diff = size_binop (PLUS_EXPR, diff,
1463 fold_convert (sizetype, len1));
1464 }
1465 /* If the difference between the second and first destination pointer
1466 is not constant, or is bigger than memcpy length, bail out. */
1467 if (diff == NULL
1468 || !host_integerp (diff, 1)
1469 || tree_int_cst_lt (len1, diff))
1470 break;
1471
1472 /* Use maximum of difference plus memset length and memcpy length
1473 as the new memcpy length, if it is too big, bail out. */
1474 src_len = tree_low_cst (diff, 1);
1475 src_len += tree_low_cst (len2, 1);
1476 if (src_len < (unsigned HOST_WIDE_INT) tree_low_cst (len1, 1))
1477 src_len = tree_low_cst (len1, 1);
1478 if (src_len > 1024)
1479 break;
1480
1481 /* If mempcpy value is used elsewhere, bail out, as mempcpy
1482 with bigger length will return different result. */
1483 if (lhs1 != NULL_TREE
1484 && DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY
1485 && (TREE_CODE (lhs1) != SSA_NAME
1486 || !single_imm_use (lhs1, &use_p, &use_stmt)
1487 || use_stmt != stmt2))
1488 break;
1489
1490 /* If anything reads memory in between memcpy and memset
1491 call, the modified memcpy call might change it. */
1492 vdef = gimple_vdef (stmt1);
1493 if (vdef != NULL
1494 && (!single_imm_use (vdef, &use_p, &use_stmt)
1495 || use_stmt != stmt2))
1496 break;
1497
1498 ptr1_align = get_pointer_alignment (ptr1, BIGGEST_ALIGNMENT);
1499 /* Construct the new source string literal. */
1500 src_buf = XALLOCAVEC (char, src_len + 1);
1501 if (callee1)
1502 memcpy (src_buf,
1503 TREE_STRING_POINTER (str1) + tree_low_cst (off1, 1),
1504 tree_low_cst (len1, 1));
1505 else
1506 src_buf[0] = tree_low_cst (src1, 0);
1507 memset (src_buf + tree_low_cst (diff, 1),
1508 tree_low_cst (val2, 1), tree_low_cst (len2, 1));
1509 src_buf[src_len] = '\0';
1510 /* Neither builtin_strncpy_read_str nor builtin_memcpy_read_str
1511 handle embedded '\0's. */
1512 if (strlen (src_buf) != src_len)
1513 break;
1514 rtl_profile_for_bb (gimple_bb (stmt2));
1515 /* If the new memcpy wouldn't be emitted by storing the literal
1516 by pieces, this optimization might enlarge .rodata too much,
1517 as commonly used string literals couldn't be shared any
1518 longer. */
1519 if (!can_store_by_pieces (src_len,
1520 builtin_strncpy_read_str,
1521 src_buf, ptr1_align, false))
1522 break;
1523
1524 new_str_cst = build_string_literal (src_len, src_buf);
1525 if (callee1)
1526 {
1527 /* If STMT1 is a mem{,p}cpy call, adjust it and remove
1528 memset call. */
1529 if (lhs1 && DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY)
1530 gimple_call_set_lhs (stmt1, NULL_TREE);
1531 gimple_call_set_arg (stmt1, 1, new_str_cst);
1532 gimple_call_set_arg (stmt1, 2,
1533 build_int_cst (TREE_TYPE (len1), src_len));
1534 update_stmt (stmt1);
1535 unlink_stmt_vdef (stmt2);
1536 gsi_remove (gsi_p, true);
1537 release_defs (stmt2);
1538 if (lhs1 && DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY)
1539 release_ssa_name (lhs1);
1540 return true;
1541 }
1542 else
1543 {
1544 /* Otherwise, if STMT1 is length 1 memcpy optimized into
1545 assignment, remove STMT1 and change memset call into
1546 memcpy call. */
1547 gimple_stmt_iterator gsi = gsi_for_stmt (stmt1);
1548
7ecb2e7c 1549 if (!is_gimple_val (ptr1))
1550 ptr1 = force_gimple_operand_gsi (gsi_p, ptr1, true, NULL_TREE,
1551 true, GSI_SAME_STMT);
27f931ff 1552 gimple_call_set_fndecl (stmt2, built_in_decls [BUILT_IN_MEMCPY]);
1553 gimple_call_set_arg (stmt2, 0, ptr1);
1554 gimple_call_set_arg (stmt2, 1, new_str_cst);
1555 gimple_call_set_arg (stmt2, 2,
1556 build_int_cst (TREE_TYPE (len2), src_len));
1557 unlink_stmt_vdef (stmt1);
1558 gsi_remove (&gsi, true);
1559 release_defs (stmt1);
1560 update_stmt (stmt2);
1561 return false;
1562 }
1563 }
1564 break;
1565 default:
1566 break;
1567 }
1568 return false;
1569}
1570
41913fa9 1571/* Checks if expression has type of one-bit precision, or is a known
1572 truth-valued expression. */
1573static bool
1574truth_valued_ssa_name (tree name)
1575{
1576 gimple def;
1577 tree type = TREE_TYPE (name);
1578
1579 if (!INTEGRAL_TYPE_P (type))
1580 return false;
1581 /* Don't check here for BOOLEAN_TYPE as the precision isn't
1582 necessarily one and so ~X is not equal to !X. */
1583 if (TYPE_PRECISION (type) == 1)
1584 return true;
1585 def = SSA_NAME_DEF_STMT (name);
1586 if (is_gimple_assign (def))
1587 return truth_value_p (gimple_assign_rhs_code (def));
1588 return false;
1589}
1590
1591/* Helper routine for simplify_bitwise_binary_1 function.
1592 Return for the SSA name NAME the expression X if it mets condition
1593 NAME = !X. Otherwise return NULL_TREE.
1594 Detected patterns for NAME = !X are:
1595 !X and X == 0 for X with integral type.
1596 X ^ 1, X != 1,or ~X for X with integral type with precision of one. */
1597static tree
1598lookup_logical_inverted_value (tree name)
1599{
1600 tree op1, op2;
1601 enum tree_code code;
1602 gimple def;
1603
1604 /* If name has none-intergal type, or isn't a SSA_NAME, then
1605 return. */
1606 if (TREE_CODE (name) != SSA_NAME
1607 || !INTEGRAL_TYPE_P (TREE_TYPE (name)))
1608 return NULL_TREE;
1609 def = SSA_NAME_DEF_STMT (name);
1610 if (!is_gimple_assign (def))
1611 return NULL_TREE;
1612
1613 code = gimple_assign_rhs_code (def);
1614 op1 = gimple_assign_rhs1 (def);
1615 op2 = NULL_TREE;
1616
1617 /* Get for EQ_EXPR or BIT_XOR_EXPR operation the second operand.
8f4a7578 1618 If CODE isn't an EQ_EXPR, BIT_XOR_EXPR, or BIT_NOT_EXPR, then return. */
41913fa9 1619 if (code == EQ_EXPR || code == NE_EXPR
1620 || code == BIT_XOR_EXPR)
1621 op2 = gimple_assign_rhs2 (def);
1622
1623 switch (code)
1624 {
41913fa9 1625 case BIT_NOT_EXPR:
1626 if (truth_valued_ssa_name (name))
1627 return op1;
1628 break;
1629 case EQ_EXPR:
1630 /* Check if we have X == 0 and X has an integral type. */
1631 if (!INTEGRAL_TYPE_P (TREE_TYPE (op1)))
1632 break;
1633 if (integer_zerop (op2))
1634 return op1;
1635 break;
1636 case NE_EXPR:
1637 /* Check if we have X != 1 and X is a truth-valued. */
1638 if (!INTEGRAL_TYPE_P (TREE_TYPE (op1)))
1639 break;
1640 if (integer_onep (op2) && truth_valued_ssa_name (op1))
1641 return op1;
1642 break;
1643 case BIT_XOR_EXPR:
1644 /* Check if we have X ^ 1 and X is truth valued. */
1645 if (integer_onep (op2) && truth_valued_ssa_name (op1))
1646 return op1;
1647 break;
1648 default:
1649 break;
1650 }
1651
1652 return NULL_TREE;
1653}
1654
1655/* Optimize ARG1 CODE ARG2 to a constant for bitwise binary
1656 operations CODE, if one operand has the logically inverted
1657 value of the other. */
1658static tree
1659simplify_bitwise_binary_1 (enum tree_code code, tree type,
1660 tree arg1, tree arg2)
1661{
1662 tree anot;
1663
1664 /* If CODE isn't a bitwise binary operation, return NULL_TREE. */
1665 if (code != BIT_AND_EXPR && code != BIT_IOR_EXPR
1666 && code != BIT_XOR_EXPR)
1667 return NULL_TREE;
1668
1669 /* First check if operands ARG1 and ARG2 are equal. If so
1670 return NULL_TREE as this optimization is handled fold_stmt. */
1671 if (arg1 == arg2)
1672 return NULL_TREE;
1673 /* See if we have in arguments logical-not patterns. */
1674 if (((anot = lookup_logical_inverted_value (arg1)) == NULL_TREE
1675 || anot != arg2)
1676 && ((anot = lookup_logical_inverted_value (arg2)) == NULL_TREE
1677 || anot != arg1))
1678 return NULL_TREE;
1679
1680 /* X & !X -> 0. */
1681 if (code == BIT_AND_EXPR)
1682 return fold_convert (type, integer_zero_node);
1683 /* X | !X -> 1 and X ^ !X -> 1, if X is truth-valued. */
1684 if (truth_valued_ssa_name (anot))
1685 return fold_convert (type, integer_one_node);
1686
1687 /* ??? Otherwise result is (X != 0 ? X : 1). not handled. */
1688 return NULL_TREE;
1689}
1690
300da094 1691/* Simplify bitwise binary operations.
1692 Return true if a transformation applied, otherwise return false. */
1c4607fd 1693
300da094 1694static bool
1695simplify_bitwise_binary (gimple_stmt_iterator *gsi)
1c4607fd 1696{
300da094 1697 gimple stmt = gsi_stmt (*gsi);
1c4607fd 1698 tree arg1 = gimple_assign_rhs1 (stmt);
1699 tree arg2 = gimple_assign_rhs2 (stmt);
300da094 1700 enum tree_code code = gimple_assign_rhs_code (stmt);
1701 tree res;
26f54bd0 1702 gimple def1 = NULL, def2 = NULL;
1703 tree def1_arg1, def2_arg1;
1704 enum tree_code def1_code, def2_code;
1c4607fd 1705
26f54bd0 1706 def1_code = TREE_CODE (arg1);
1707 def1_arg1 = arg1;
1708 if (TREE_CODE (arg1) == SSA_NAME)
1709 {
1710 def1 = SSA_NAME_DEF_STMT (arg1);
1711 if (is_gimple_assign (def1))
1712 {
1713 def1_code = gimple_assign_rhs_code (def1);
1714 def1_arg1 = gimple_assign_rhs1 (def1);
1715 }
1716 }
1717
1718 def2_code = TREE_CODE (arg2);
1719 def2_arg1 = arg2;
1720 if (TREE_CODE (arg2) == SSA_NAME)
1721 {
1722 def2 = SSA_NAME_DEF_STMT (arg2);
1723 if (is_gimple_assign (def2))
1724 {
1725 def2_code = gimple_assign_rhs_code (def2);
1726 def2_arg1 = gimple_assign_rhs1 (def2);
1727 }
1728 }
1729
25ce0d90 1730 /* Try to fold (type) X op CST -> (type) (X op ((type-x) CST)). */
1731 if (TREE_CODE (arg2) == INTEGER_CST
1732 && CONVERT_EXPR_CODE_P (def1_code)
105fc895 1733 && INTEGRAL_TYPE_P (TREE_TYPE (def1_arg1))
25ce0d90 1734 && int_fits_type_p (arg2, TREE_TYPE (def1_arg1)))
1735 {
1736 gimple newop;
1737 tree tem = create_tmp_reg (TREE_TYPE (def1_arg1), NULL);
1738 newop =
1739 gimple_build_assign_with_ops (code, tem, def1_arg1,
1740 fold_convert_loc (gimple_location (stmt),
1741 TREE_TYPE (def1_arg1),
1742 arg2));
1743 tem = make_ssa_name (tem, newop);
1744 gimple_assign_set_lhs (newop, tem);
4b5f1658 1745 gimple_set_location (newop, gimple_location (stmt));
25ce0d90 1746 gsi_insert_before (gsi, newop, GSI_SAME_STMT);
1747 gimple_assign_set_rhs_with_ops_1 (gsi, NOP_EXPR,
1748 tem, NULL_TREE, NULL_TREE);
1749 update_stmt (gsi_stmt (*gsi));
1750 return true;
1751 }
1752
300da094 1753 /* For bitwise binary operations apply operand conversions to the
1754 binary operation result instead of to the operands. This allows
1755 to combine successive conversions and bitwise binary operations. */
26f54bd0 1756 if (CONVERT_EXPR_CODE_P (def1_code)
1757 && CONVERT_EXPR_CODE_P (def2_code)
1758 && types_compatible_p (TREE_TYPE (def1_arg1), TREE_TYPE (def2_arg1))
25ce0d90 1759 /* Make sure that the conversion widens the operands, or has same
1760 precision, or that it changes the operation to a bitfield
1761 precision. */
26f54bd0 1762 && ((TYPE_PRECISION (TREE_TYPE (def1_arg1))
25ce0d90 1763 <= TYPE_PRECISION (TREE_TYPE (arg1)))
26f54bd0 1764 || (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (arg1)))
1765 != MODE_INT)
1766 || (TYPE_PRECISION (TREE_TYPE (arg1))
1767 != GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg1))))))
1c4607fd 1768 {
26f54bd0 1769 gimple newop;
1770 tree tem = create_tmp_reg (TREE_TYPE (def1_arg1),
1771 NULL);
1772 newop = gimple_build_assign_with_ops (code, tem, def1_arg1, def2_arg1);
1773 tem = make_ssa_name (tem, newop);
1774 gimple_assign_set_lhs (newop, tem);
4b5f1658 1775 gimple_set_location (newop, gimple_location (stmt));
26f54bd0 1776 gsi_insert_before (gsi, newop, GSI_SAME_STMT);
1777 gimple_assign_set_rhs_with_ops_1 (gsi, NOP_EXPR,
1778 tem, NULL_TREE, NULL_TREE);
1779 update_stmt (gsi_stmt (*gsi));
1780 return true;
1781 }
1782
1783 /* (a | CST1) & CST2 -> (a & CST2) | (CST1 & CST2). */
1784 if (code == BIT_AND_EXPR
1785 && def1_code == BIT_IOR_EXPR
1786 && TREE_CODE (arg2) == INTEGER_CST
1787 && TREE_CODE (gimple_assign_rhs2 (def1)) == INTEGER_CST)
1788 {
1789 tree cst = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg2),
1790 arg2, gimple_assign_rhs2 (def1));
1791 tree tem;
1792 gimple newop;
1793 if (integer_zerop (cst))
300da094 1794 {
26f54bd0 1795 gimple_assign_set_rhs1 (stmt, def1_arg1);
1796 update_stmt (stmt);
1797 return true;
300da094 1798 }
26f54bd0 1799 tem = create_tmp_reg (TREE_TYPE (arg2), NULL);
1800 newop = gimple_build_assign_with_ops (BIT_AND_EXPR,
1801 tem, def1_arg1, arg2);
1802 tem = make_ssa_name (tem, newop);
1803 gimple_assign_set_lhs (newop, tem);
4b5f1658 1804 gimple_set_location (newop, gimple_location (stmt));
26f54bd0 1805 /* Make sure to re-process the new stmt as it's walking upwards. */
1806 gsi_insert_before (gsi, newop, GSI_NEW_STMT);
1807 gimple_assign_set_rhs1 (stmt, tem);
1808 gimple_assign_set_rhs2 (stmt, cst);
1809 gimple_assign_set_rhs_code (stmt, BIT_IOR_EXPR);
1810 update_stmt (stmt);
1811 return true;
1812 }
1813
1814 /* Combine successive equal operations with constants. */
1815 if ((code == BIT_AND_EXPR
1816 || code == BIT_IOR_EXPR
1817 || code == BIT_XOR_EXPR)
1818 && def1_code == code
1819 && TREE_CODE (arg2) == INTEGER_CST
1820 && TREE_CODE (gimple_assign_rhs2 (def1)) == INTEGER_CST)
1821 {
1822 tree cst = fold_build2 (code, TREE_TYPE (arg2),
1823 arg2, gimple_assign_rhs2 (def1));
1824 gimple_assign_set_rhs1 (stmt, def1_arg1);
1825 gimple_assign_set_rhs2 (stmt, cst);
1826 update_stmt (stmt);
1827 return true;
1c4607fd 1828 }
300da094 1829
8a5f403f 1830 /* Canonicalize X ^ ~0 to ~X. */
1831 if (code == BIT_XOR_EXPR
1832 && TREE_CODE (arg2) == INTEGER_CST
1833 && integer_all_onesp (arg2))
1834 {
1835 gimple_assign_set_rhs_with_ops (gsi, BIT_NOT_EXPR, arg1, NULL_TREE);
1836 gcc_assert (gsi_stmt (*gsi) == stmt);
1837 update_stmt (stmt);
1838 return true;
1839 }
1840
41913fa9 1841 /* Try simple folding for X op !X, and X op X. */
1842 res = simplify_bitwise_binary_1 (code, TREE_TYPE (arg1), arg1, arg2);
1843 if (res != NULL_TREE)
1844 {
1845 gimple_assign_set_rhs_from_tree (gsi, res);
1846 update_stmt (gsi_stmt (*gsi));
1847 return true;
1848 }
1849
300da094 1850 return false;
1c4607fd 1851}
1852
ca3c9092 1853
1854/* Perform re-associations of the plus or minus statement STMT that are
b69d1cb6 1855 always permitted. Returns true if the CFG was changed. */
ca3c9092 1856
b69d1cb6 1857static bool
ca3c9092 1858associate_plusminus (gimple stmt)
1859{
1860 tree rhs1 = gimple_assign_rhs1 (stmt);
1861 tree rhs2 = gimple_assign_rhs2 (stmt);
1862 enum tree_code code = gimple_assign_rhs_code (stmt);
1863 gimple_stmt_iterator gsi;
1864 bool changed;
1865
1866 /* We can't reassociate at all for saturating types. */
1867 if (TYPE_SATURATING (TREE_TYPE (rhs1)))
b69d1cb6 1868 return false;
ca3c9092 1869
1870 /* First contract negates. */
1871 do
1872 {
1873 changed = false;
1874
1875 /* A +- (-B) -> A -+ B. */
1876 if (TREE_CODE (rhs2) == SSA_NAME)
1877 {
1878 gimple def_stmt = SSA_NAME_DEF_STMT (rhs2);
1879 if (is_gimple_assign (def_stmt)
32cdcc42 1880 && gimple_assign_rhs_code (def_stmt) == NEGATE_EXPR
1881 && can_propagate_from (def_stmt))
ca3c9092 1882 {
1883 code = (code == MINUS_EXPR) ? PLUS_EXPR : MINUS_EXPR;
1884 gimple_assign_set_rhs_code (stmt, code);
1885 rhs2 = gimple_assign_rhs1 (def_stmt);
1886 gimple_assign_set_rhs2 (stmt, rhs2);
1887 gimple_set_modified (stmt, true);
1888 changed = true;
1889 }
1890 }
1891
1892 /* (-A) + B -> B - A. */
1893 if (TREE_CODE (rhs1) == SSA_NAME
1894 && code == PLUS_EXPR)
1895 {
1896 gimple def_stmt = SSA_NAME_DEF_STMT (rhs1);
1897 if (is_gimple_assign (def_stmt)
32cdcc42 1898 && gimple_assign_rhs_code (def_stmt) == NEGATE_EXPR
1899 && can_propagate_from (def_stmt))
ca3c9092 1900 {
1901 code = MINUS_EXPR;
1902 gimple_assign_set_rhs_code (stmt, code);
1903 rhs1 = rhs2;
1904 gimple_assign_set_rhs1 (stmt, rhs1);
1905 rhs2 = gimple_assign_rhs1 (def_stmt);
1906 gimple_assign_set_rhs2 (stmt, rhs2);
1907 gimple_set_modified (stmt, true);
1908 changed = true;
1909 }
1910 }
1911 }
1912 while (changed);
1913
1914 /* We can't reassociate floating-point or fixed-point plus or minus
1915 because of saturation to +-Inf. */
1916 if (FLOAT_TYPE_P (TREE_TYPE (rhs1))
1917 || FIXED_POINT_TYPE_P (TREE_TYPE (rhs1)))
1918 goto out;
1919
1920 /* Second match patterns that allow contracting a plus-minus pair
1921 irrespective of overflow issues.
1922
1923 (A +- B) - A -> +- B
1924 (A +- B) -+ B -> A
1925 (CST +- A) +- CST -> CST +- A
1926 (A + CST) +- CST -> A + CST
1927 ~A + A -> -1
1928 ~A + 1 -> -A
1929 A - (A +- B) -> -+ B
1930 A +- (B +- A) -> +- B
1931 CST +- (CST +- A) -> CST +- A
1932 CST +- (A +- CST) -> CST +- A
1933 A + ~A -> -1
1934
1935 via commutating the addition and contracting operations to zero
1936 by reassociation. */
1937
1938 gsi = gsi_for_stmt (stmt);
1939 if (TREE_CODE (rhs1) == SSA_NAME)
1940 {
1941 gimple def_stmt = SSA_NAME_DEF_STMT (rhs1);
32cdcc42 1942 if (is_gimple_assign (def_stmt) && can_propagate_from (def_stmt))
ca3c9092 1943 {
1944 enum tree_code def_code = gimple_assign_rhs_code (def_stmt);
1945 if (def_code == PLUS_EXPR
1946 || def_code == MINUS_EXPR)
1947 {
1948 tree def_rhs1 = gimple_assign_rhs1 (def_stmt);
1949 tree def_rhs2 = gimple_assign_rhs2 (def_stmt);
1950 if (operand_equal_p (def_rhs1, rhs2, 0)
1951 && code == MINUS_EXPR)
1952 {
1953 /* (A +- B) - A -> +- B. */
1954 code = ((def_code == PLUS_EXPR)
1955 ? TREE_CODE (def_rhs2) : NEGATE_EXPR);
1956 rhs1 = def_rhs2;
1957 rhs2 = NULL_TREE;
1958 gimple_assign_set_rhs_with_ops (&gsi, code, rhs1, NULL_TREE);
1959 gcc_assert (gsi_stmt (gsi) == stmt);
1960 gimple_set_modified (stmt, true);
1961 }
1962 else if (operand_equal_p (def_rhs2, rhs2, 0)
1963 && code != def_code)
1964 {
1965 /* (A +- B) -+ B -> A. */
1966 code = TREE_CODE (def_rhs1);
1967 rhs1 = def_rhs1;
1968 rhs2 = NULL_TREE;
1969 gimple_assign_set_rhs_with_ops (&gsi, code, rhs1, NULL_TREE);
1970 gcc_assert (gsi_stmt (gsi) == stmt);
1971 gimple_set_modified (stmt, true);
1972 }
1973 else if (TREE_CODE (rhs2) == INTEGER_CST
1974 && TREE_CODE (def_rhs1) == INTEGER_CST)
1975 {
1976 /* (CST +- A) +- CST -> CST +- A. */
1977 tree cst = fold_binary (code, TREE_TYPE (rhs1),
1978 def_rhs1, rhs2);
1979 if (cst && !TREE_OVERFLOW (cst))
1980 {
1981 code = def_code;
1982 gimple_assign_set_rhs_code (stmt, code);
1983 rhs1 = cst;
1984 gimple_assign_set_rhs1 (stmt, rhs1);
1985 rhs2 = def_rhs2;
1986 gimple_assign_set_rhs2 (stmt, rhs2);
1987 gimple_set_modified (stmt, true);
1988 }
1989 }
1990 else if (TREE_CODE (rhs2) == INTEGER_CST
1991 && TREE_CODE (def_rhs2) == INTEGER_CST
1992 && def_code == PLUS_EXPR)
1993 {
1994 /* (A + CST) +- CST -> A + CST. */
1995 tree cst = fold_binary (code, TREE_TYPE (rhs1),
1996 def_rhs2, rhs2);
1997 if (cst && !TREE_OVERFLOW (cst))
1998 {
1999 code = PLUS_EXPR;
2000 gimple_assign_set_rhs_code (stmt, code);
2001 rhs1 = def_rhs1;
2002 gimple_assign_set_rhs1 (stmt, rhs1);
2003 rhs2 = cst;
2004 gimple_assign_set_rhs2 (stmt, rhs2);
2005 gimple_set_modified (stmt, true);
2006 }
2007 }
2008 }
2009 else if (def_code == BIT_NOT_EXPR
2010 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1)))
2011 {
2012 tree def_rhs1 = gimple_assign_rhs1 (def_stmt);
2013 if (code == PLUS_EXPR
2014 && operand_equal_p (def_rhs1, rhs2, 0))
2015 {
2016 /* ~A + A -> -1. */
2017 code = INTEGER_CST;
19d861b9 2018 rhs1 = build_int_cst_type (TREE_TYPE (rhs2), -1);
ca3c9092 2019 rhs2 = NULL_TREE;
2020 gimple_assign_set_rhs_with_ops (&gsi, code, rhs1, NULL_TREE);
2021 gcc_assert (gsi_stmt (gsi) == stmt);
2022 gimple_set_modified (stmt, true);
2023 }
2024 else if (code == PLUS_EXPR
2025 && integer_onep (rhs1))
2026 {
2027 /* ~A + 1 -> -A. */
2028 code = NEGATE_EXPR;
2029 rhs1 = def_rhs1;
2030 rhs2 = NULL_TREE;
2031 gimple_assign_set_rhs_with_ops (&gsi, code, rhs1, NULL_TREE);
2032 gcc_assert (gsi_stmt (gsi) == stmt);
2033 gimple_set_modified (stmt, true);
2034 }
2035 }
2036 }
2037 }
2038
2039 if (rhs2 && TREE_CODE (rhs2) == SSA_NAME)
2040 {
2041 gimple def_stmt = SSA_NAME_DEF_STMT (rhs2);
32cdcc42 2042 if (is_gimple_assign (def_stmt) && can_propagate_from (def_stmt))
ca3c9092 2043 {
2044 enum tree_code def_code = gimple_assign_rhs_code (def_stmt);
2045 if (def_code == PLUS_EXPR
2046 || def_code == MINUS_EXPR)
2047 {
2048 tree def_rhs1 = gimple_assign_rhs1 (def_stmt);
2049 tree def_rhs2 = gimple_assign_rhs2 (def_stmt);
2050 if (operand_equal_p (def_rhs1, rhs1, 0)
2051 && code == MINUS_EXPR)
2052 {
2053 /* A - (A +- B) -> -+ B. */
2054 code = ((def_code == PLUS_EXPR)
2055 ? NEGATE_EXPR : TREE_CODE (def_rhs2));
2056 rhs1 = def_rhs2;
2057 rhs2 = NULL_TREE;
2058 gimple_assign_set_rhs_with_ops (&gsi, code, rhs1, NULL_TREE);
2059 gcc_assert (gsi_stmt (gsi) == stmt);
2060 gimple_set_modified (stmt, true);
2061 }
2062 else if (operand_equal_p (def_rhs2, rhs1, 0)
2063 && code != def_code)
2064 {
2065 /* A +- (B +- A) -> +- B. */
2066 code = ((code == PLUS_EXPR)
2067 ? TREE_CODE (def_rhs1) : NEGATE_EXPR);
2068 rhs1 = def_rhs1;
2069 rhs2 = NULL_TREE;
2070 gimple_assign_set_rhs_with_ops (&gsi, code, rhs1, NULL_TREE);
2071 gcc_assert (gsi_stmt (gsi) == stmt);
2072 gimple_set_modified (stmt, true);
2073 }
2074 else if (TREE_CODE (rhs1) == INTEGER_CST
2075 && TREE_CODE (def_rhs1) == INTEGER_CST)
2076 {
2077 /* CST +- (CST +- A) -> CST +- A. */
2078 tree cst = fold_binary (code, TREE_TYPE (rhs2),
2079 rhs1, def_rhs1);
2080 if (cst && !TREE_OVERFLOW (cst))
2081 {
2082 code = (code == def_code ? PLUS_EXPR : MINUS_EXPR);
2083 gimple_assign_set_rhs_code (stmt, code);
2084 rhs1 = cst;
2085 gimple_assign_set_rhs1 (stmt, rhs1);
2086 rhs2 = def_rhs2;
2087 gimple_assign_set_rhs2 (stmt, rhs2);
2088 gimple_set_modified (stmt, true);
2089 }
2090 }
2091 else if (TREE_CODE (rhs1) == INTEGER_CST
2092 && TREE_CODE (def_rhs2) == INTEGER_CST)
2093 {
2094 /* CST +- (A +- CST) -> CST +- A. */
2095 tree cst = fold_binary (def_code == code
2096 ? PLUS_EXPR : MINUS_EXPR,
2097 TREE_TYPE (rhs2),
2098 rhs1, def_rhs2);
2099 if (cst && !TREE_OVERFLOW (cst))
2100 {
2101 rhs1 = cst;
2102 gimple_assign_set_rhs1 (stmt, rhs1);
2103 rhs2 = def_rhs1;
2104 gimple_assign_set_rhs2 (stmt, rhs2);
2105 gimple_set_modified (stmt, true);
2106 }
2107 }
2108 }
2109 else if (def_code == BIT_NOT_EXPR
2110 && INTEGRAL_TYPE_P (TREE_TYPE (rhs2)))
2111 {
2112 tree def_rhs1 = gimple_assign_rhs1 (def_stmt);
2113 if (code == PLUS_EXPR
2114 && operand_equal_p (def_rhs1, rhs1, 0))
2115 {
2116 /* A + ~A -> -1. */
2117 code = INTEGER_CST;
19d861b9 2118 rhs1 = build_int_cst_type (TREE_TYPE (rhs1), -1);
ca3c9092 2119 rhs2 = NULL_TREE;
2120 gimple_assign_set_rhs_with_ops (&gsi, code, rhs1, NULL_TREE);
2121 gcc_assert (gsi_stmt (gsi) == stmt);
2122 gimple_set_modified (stmt, true);
2123 }
2124 }
2125 }
2126 }
2127
2128out:
2129 if (gimple_modified_p (stmt))
2130 {
2131 fold_stmt_inplace (stmt);
2132 update_stmt (stmt);
b69d1cb6 2133 if (maybe_clean_or_replace_eh_stmt (stmt, stmt)
2134 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
2135 return true;
ca3c9092 2136 }
b69d1cb6 2137
2138 return false;
ca3c9092 2139}
2140
6afd0544 2141/* Combine two conversions in a row for the second conversion at *GSI.
89c8f35a 2142 Returns 1 if there were any changes made, 2 if cfg-cleanup needs to
2143 run. Else it returns 0. */
6afd0544 2144
89c8f35a 2145static int
6afd0544 2146combine_conversions (gimple_stmt_iterator *gsi)
2147{
2148 gimple stmt = gsi_stmt (*gsi);
2149 gimple def_stmt;
2150 tree op0, lhs;
2151 enum tree_code code = gimple_assign_rhs_code (stmt);
2152
2153 gcc_checking_assert (CONVERT_EXPR_CODE_P (code)
2154 || code == FLOAT_EXPR
2155 || code == FIX_TRUNC_EXPR);
2156
2157 lhs = gimple_assign_lhs (stmt);
2158 op0 = gimple_assign_rhs1 (stmt);
2159 if (useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (op0)))
2160 {
2161 gimple_assign_set_rhs_code (stmt, TREE_CODE (op0));
89c8f35a 2162 return 1;
6afd0544 2163 }
2164
2165 if (TREE_CODE (op0) != SSA_NAME)
89c8f35a 2166 return 0;
6afd0544 2167
2168 def_stmt = SSA_NAME_DEF_STMT (op0);
2169 if (!is_gimple_assign (def_stmt))
89c8f35a 2170 return 0;
6afd0544 2171
2172 if (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt)))
2173 {
2174 tree defop0 = gimple_assign_rhs1 (def_stmt);
2175 tree type = TREE_TYPE (lhs);
2176 tree inside_type = TREE_TYPE (defop0);
2177 tree inter_type = TREE_TYPE (op0);
2178 int inside_int = INTEGRAL_TYPE_P (inside_type);
2179 int inside_ptr = POINTER_TYPE_P (inside_type);
2180 int inside_float = FLOAT_TYPE_P (inside_type);
2181 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
2182 unsigned int inside_prec = TYPE_PRECISION (inside_type);
2183 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
2184 int inter_int = INTEGRAL_TYPE_P (inter_type);
2185 int inter_ptr = POINTER_TYPE_P (inter_type);
2186 int inter_float = FLOAT_TYPE_P (inter_type);
2187 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
2188 unsigned int inter_prec = TYPE_PRECISION (inter_type);
2189 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
2190 int final_int = INTEGRAL_TYPE_P (type);
2191 int final_ptr = POINTER_TYPE_P (type);
2192 int final_float = FLOAT_TYPE_P (type);
2193 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
2194 unsigned int final_prec = TYPE_PRECISION (type);
2195 int final_unsignedp = TYPE_UNSIGNED (type);
2196
2197 /* In addition to the cases of two conversions in a row
2198 handled below, if we are converting something to its own
2199 type via an object of identical or wider precision, neither
2200 conversion is needed. */
2201 if (useless_type_conversion_p (type, inside_type)
2202 && (((inter_int || inter_ptr) && final_int)
2203 || (inter_float && final_float))
2204 && inter_prec >= final_prec)
2205 {
2206 gimple_assign_set_rhs1 (stmt, unshare_expr (defop0));
2207 gimple_assign_set_rhs_code (stmt, TREE_CODE (defop0));
2208 update_stmt (stmt);
89c8f35a 2209 return remove_prop_source_from_use (op0) ? 2 : 1;
6afd0544 2210 }
2211
2212 /* Likewise, if the intermediate and initial types are either both
2213 float or both integer, we don't need the middle conversion if the
2214 former is wider than the latter and doesn't change the signedness
2215 (for integers). Avoid this if the final type is a pointer since
2216 then we sometimes need the middle conversion. Likewise if the
2217 final type has a precision not equal to the size of its mode. */
2218 if (((inter_int && inside_int)
2219 || (inter_float && inside_float)
2220 || (inter_vec && inside_vec))
2221 && inter_prec >= inside_prec
2222 && (inter_float || inter_vec
2223 || inter_unsignedp == inside_unsignedp)
2224 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
2225 && TYPE_MODE (type) == TYPE_MODE (inter_type))
2226 && ! final_ptr
2227 && (! final_vec || inter_prec == inside_prec))
2228 {
2229 gimple_assign_set_rhs1 (stmt, defop0);
2230 update_stmt (stmt);
89c8f35a 2231 return remove_prop_source_from_use (op0) ? 2 : 1;
6afd0544 2232 }
2233
2234 /* If we have a sign-extension of a zero-extended value, we can
2235 replace that by a single zero-extension. */
2236 if (inside_int && inter_int && final_int
2237 && inside_prec < inter_prec && inter_prec < final_prec
2238 && inside_unsignedp && !inter_unsignedp)
2239 {
2240 gimple_assign_set_rhs1 (stmt, defop0);
2241 update_stmt (stmt);
89c8f35a 2242 return remove_prop_source_from_use (op0) ? 2 : 1;
6afd0544 2243 }
2244
2245 /* Two conversions in a row are not needed unless:
2246 - some conversion is floating-point (overstrict for now), or
2247 - some conversion is a vector (overstrict for now), or
2248 - the intermediate type is narrower than both initial and
2249 final, or
2250 - the intermediate type and innermost type differ in signedness,
2251 and the outermost type is wider than the intermediate, or
2252 - the initial type is a pointer type and the precisions of the
2253 intermediate and final types differ, or
2254 - the final type is a pointer type and the precisions of the
2255 initial and intermediate types differ. */
2256 if (! inside_float && ! inter_float && ! final_float
2257 && ! inside_vec && ! inter_vec && ! final_vec
2258 && (inter_prec >= inside_prec || inter_prec >= final_prec)
2259 && ! (inside_int && inter_int
2260 && inter_unsignedp != inside_unsignedp
2261 && inter_prec < final_prec)
2262 && ((inter_unsignedp && inter_prec > inside_prec)
2263 == (final_unsignedp && final_prec > inter_prec))
2264 && ! (inside_ptr && inter_prec != final_prec)
2265 && ! (final_ptr && inside_prec != inter_prec)
2266 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
2267 && TYPE_MODE (type) == TYPE_MODE (inter_type)))
2268 {
2269 gimple_assign_set_rhs1 (stmt, defop0);
2270 update_stmt (stmt);
89c8f35a 2271 return remove_prop_source_from_use (op0) ? 2 : 1;
6afd0544 2272 }
2273
2274 /* A truncation to an unsigned type should be canonicalized as
2275 bitwise and of a mask. */
2276 if (final_int && inter_int && inside_int
2277 && final_prec == inside_prec
2278 && final_prec > inter_prec
2279 && inter_unsignedp)
2280 {
2281 tree tem;
2282 tem = fold_build2 (BIT_AND_EXPR, inside_type,
2283 defop0,
2284 double_int_to_tree
2285 (inside_type, double_int_mask (inter_prec)));
2286 if (!useless_type_conversion_p (type, inside_type))
2287 {
2288 tem = force_gimple_operand_gsi (gsi, tem, true, NULL_TREE, true,
2289 GSI_SAME_STMT);
2290 gimple_assign_set_rhs1 (stmt, tem);
2291 }
2292 else
2293 gimple_assign_set_rhs_from_tree (gsi, tem);
2294 update_stmt (gsi_stmt (*gsi));
89c8f35a 2295 return 1;
6afd0544 2296 }
2297 }
2298
89c8f35a 2299 return 0;
6afd0544 2300}
2301
678b2f5b 2302/* Main entry point for the forward propagation and statement combine
2303 optimizer. */
4ee9c684 2304
2a1990e9 2305static unsigned int
678b2f5b 2306ssa_forward_propagate_and_combine (void)
4ee9c684 2307{
f5c8cff5 2308 basic_block bb;
c96420f8 2309 unsigned int todoflags = 0;
4ee9c684 2310
148aa112 2311 cfg_changed = false;
2312
f5c8cff5 2313 FOR_EACH_BB (bb)
2314 {
a7107e58 2315 gimple_stmt_iterator gsi, prev;
2316 bool prev_initialized;
291d763b 2317
678b2f5b 2318 /* Apply forward propagation to all stmts in the basic-block.
2319 Note we update GSI within the loop as necessary. */
75a70cf9 2320 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); )
291d763b 2321 {
75a70cf9 2322 gimple stmt = gsi_stmt (gsi);
678b2f5b 2323 tree lhs, rhs;
2324 enum tree_code code;
291d763b 2325
678b2f5b 2326 if (!is_gimple_assign (stmt))
291d763b 2327 {
678b2f5b 2328 gsi_next (&gsi);
2329 continue;
2330 }
3a938499 2331
678b2f5b 2332 lhs = gimple_assign_lhs (stmt);
2333 rhs = gimple_assign_rhs1 (stmt);
2334 code = gimple_assign_rhs_code (stmt);
2335 if (TREE_CODE (lhs) != SSA_NAME
2336 || has_zero_uses (lhs))
2337 {
2338 gsi_next (&gsi);
2339 continue;
2340 }
3a938499 2341
678b2f5b 2342 /* If this statement sets an SSA_NAME to an address,
2343 try to propagate the address into the uses of the SSA_NAME. */
2344 if (code == ADDR_EXPR
2345 /* Handle pointer conversions on invariant addresses
2346 as well, as this is valid gimple. */
2347 || (CONVERT_EXPR_CODE_P (code)
2348 && TREE_CODE (rhs) == ADDR_EXPR
2349 && POINTER_TYPE_P (TREE_TYPE (lhs))))
2350 {
2351 tree base = get_base_address (TREE_OPERAND (rhs, 0));
2352 if ((!base
2353 || !DECL_P (base)
2354 || decl_address_invariant_p (base))
2355 && !stmt_references_abnormal_ssa_name (stmt)
2356 && forward_propagate_addr_expr (lhs, rhs))
1c4607fd 2357 {
678b2f5b 2358 release_defs (stmt);
2359 todoflags |= TODO_remove_unused_locals;
2360 gsi_remove (&gsi, true);
1c4607fd 2361 }
678b2f5b 2362 else
2363 gsi_next (&gsi);
2364 }
32cdcc42 2365 else if (code == POINTER_PLUS_EXPR && can_propagate_from (stmt))
678b2f5b 2366 {
2367 if (TREE_CODE (gimple_assign_rhs2 (stmt)) == INTEGER_CST
2368 /* ??? Better adjust the interface to that function
2369 instead of building new trees here. */
2370 && forward_propagate_addr_expr
2371 (lhs,
2372 build1 (ADDR_EXPR,
2373 TREE_TYPE (rhs),
2374 fold_build2 (MEM_REF,
2375 TREE_TYPE (TREE_TYPE (rhs)),
2376 rhs,
2377 fold_convert
2378 (ptr_type_node,
2379 gimple_assign_rhs2 (stmt))))))
ca3c9092 2380 {
678b2f5b 2381 release_defs (stmt);
2382 todoflags |= TODO_remove_unused_locals;
2383 gsi_remove (&gsi, true);
ca3c9092 2384 }
678b2f5b 2385 else if (is_gimple_min_invariant (rhs))
6afd0544 2386 {
678b2f5b 2387 /* Make sure to fold &a[0] + off_1 here. */
2388 fold_stmt_inplace (stmt);
2389 update_stmt (stmt);
2390 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
6afd0544 2391 gsi_next (&gsi);
2392 }
291d763b 2393 else
75a70cf9 2394 gsi_next (&gsi);
291d763b 2395 }
678b2f5b 2396 else if (TREE_CODE_CLASS (code) == tcc_comparison)
b5860aba 2397 {
678b2f5b 2398 forward_propagate_comparison (stmt);
75a70cf9 2399 gsi_next (&gsi);
b5860aba 2400 }
291d763b 2401 else
75a70cf9 2402 gsi_next (&gsi);
291d763b 2403 }
678b2f5b 2404
2405 /* Combine stmts with the stmts defining their operands.
2406 Note we update GSI within the loop as necessary. */
a7107e58 2407 prev_initialized = false;
2408 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
678b2f5b 2409 {
2410 gimple stmt = gsi_stmt (gsi);
2411 bool changed = false;
2412
2413 switch (gimple_code (stmt))
2414 {
2415 case GIMPLE_ASSIGN:
2416 {
2417 tree rhs1 = gimple_assign_rhs1 (stmt);
2418 enum tree_code code = gimple_assign_rhs_code (stmt);
2419
2420 if ((code == BIT_NOT_EXPR
2421 || code == NEGATE_EXPR)
2422 && TREE_CODE (rhs1) == SSA_NAME)
2423 changed = simplify_not_neg_expr (&gsi);
2424 else if (code == COND_EXPR)
2425 {
2426 /* In this case the entire COND_EXPR is in rhs1. */
2427 int did_something;
2428 fold_defer_overflow_warnings ();
2429 did_something = forward_propagate_into_cond (&gsi);
2430 stmt = gsi_stmt (gsi);
2431 if (did_something == 2)
2432 cfg_changed = true;
2433 fold_undefer_overflow_warnings
2434 (!TREE_NO_WARNING (rhs1) && did_something, stmt,
2435 WARN_STRICT_OVERFLOW_CONDITIONAL);
2436 changed = did_something != 0;
2437 }
2438 else if (TREE_CODE_CLASS (code) == tcc_comparison)
2439 {
2440 bool no_warning = gimple_no_warning_p (stmt);
6f9714b3 2441 int did_something;
678b2f5b 2442 fold_defer_overflow_warnings ();
6f9714b3 2443 did_something = forward_propagate_into_comparison (&gsi);
2444 if (did_something == 2)
2445 cfg_changed = true;
678b2f5b 2446 fold_undefer_overflow_warnings
2447 (!no_warning && changed,
2448 stmt, WARN_STRICT_OVERFLOW_CONDITIONAL);
6f9714b3 2449 changed = did_something != 0;
678b2f5b 2450 }
2451 else if (code == BIT_AND_EXPR
2452 || code == BIT_IOR_EXPR
2453 || code == BIT_XOR_EXPR)
2454 changed = simplify_bitwise_binary (&gsi);
2455 else if (code == PLUS_EXPR
2456 || code == MINUS_EXPR)
2457 changed = associate_plusminus (stmt);
2458 else if (CONVERT_EXPR_CODE_P (code)
2459 || code == FLOAT_EXPR
2460 || code == FIX_TRUNC_EXPR)
89c8f35a 2461 {
2462 int did_something = combine_conversions (&gsi);
2463 if (did_something == 2)
2464 cfg_changed = true;
2465 changed = did_something != 0;
2466 }
678b2f5b 2467 break;
2468 }
2469
2470 case GIMPLE_SWITCH:
2471 changed = simplify_gimple_switch (stmt);
2472 break;
2473
2474 case GIMPLE_COND:
2475 {
2476 int did_something;
2477 fold_defer_overflow_warnings ();
2478 did_something = forward_propagate_into_gimple_cond (stmt);
2479 if (did_something == 2)
2480 cfg_changed = true;
2481 fold_undefer_overflow_warnings
2482 (did_something, stmt, WARN_STRICT_OVERFLOW_CONDITIONAL);
2483 changed = did_something != 0;
2484 break;
2485 }
2486
2487 case GIMPLE_CALL:
2488 {
2489 tree callee = gimple_call_fndecl (stmt);
2490 if (callee != NULL_TREE
2491 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
2492 changed = simplify_builtin_call (&gsi, callee);
2493 break;
2494 }
2495
2496 default:;
2497 }
2498
a7107e58 2499 if (changed)
2500 {
2501 /* If the stmt changed then re-visit it and the statements
2502 inserted before it. */
2503 if (!prev_initialized)
2504 gsi = gsi_start_bb (bb);
2505 else
2506 {
2507 gsi = prev;
2508 gsi_next (&gsi);
2509 }
2510 }
2511 else
2512 {
2513 prev = gsi;
2514 prev_initialized = true;
2515 gsi_next (&gsi);
2516 }
678b2f5b 2517 }
f5c8cff5 2518 }
148aa112 2519
2520 if (cfg_changed)
6fa78c7b 2521 todoflags |= TODO_cleanup_cfg;
678b2f5b 2522
c96420f8 2523 return todoflags;
4ee9c684 2524}
2525
2526
2527static bool
2528gate_forwprop (void)
2529{
408c3c77 2530 return flag_tree_forwprop;
4ee9c684 2531}
2532
48e1416a 2533struct gimple_opt_pass pass_forwprop =
20099e35 2534{
2535 {
2536 GIMPLE_PASS,
4ee9c684 2537 "forwprop", /* name */
2538 gate_forwprop, /* gate */
678b2f5b 2539 ssa_forward_propagate_and_combine, /* execute */
4ee9c684 2540 NULL, /* sub */
2541 NULL, /* next */
2542 0, /* static_pass_number */
2543 TV_TREE_FORWPROP, /* tv_id */
49290934 2544 PROP_cfg | PROP_ssa, /* properties_required */
4ee9c684 2545 0, /* properties_provided */
b6246c40 2546 0, /* properties_destroyed */
4ee9c684 2547 0, /* todo_flags_start */
771e2890 2548 TODO_ggc_collect
de6ed584 2549 | TODO_update_ssa
20099e35 2550 | TODO_verify_ssa /* todo_flags_finish */
2551 }
4ee9c684 2552};