]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/tree-ssa-forwprop.c
cuintp.c: Replace host_integerp (..., 0) with tree_fits_shwi_p throughout.
[thirdparty/gcc.git] / gcc / tree-ssa-forwprop.c
CommitLineData
a564d0f1 1/* Forward propagation of expressions for single use variables.
d1e082c2 2 Copyright (C) 2004-2013 Free Software Foundation, Inc.
6de9cd9a
DN
3
4This file is part of GCC.
5
6GCC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
9dcd6f09 8the Free Software Foundation; either version 3, or (at your option)
6de9cd9a
DN
9any later version.
10
11GCC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
9dcd6f09
NC
17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
6de9cd9a
DN
19
20#include "config.h"
21#include "system.h"
22#include "coretypes.h"
23#include "tm.h"
6de9cd9a 24#include "tree.h"
6de9cd9a
DN
25#include "tm_p.h"
26#include "basic-block.h"
aaa7ad90 27#include "gimple-pretty-print.h"
18f429e2 28#include "gimple.h"
45b0be94 29#include "gimplify.h"
5be5c238 30#include "gimple-iterator.h"
18f429e2 31#include "gimplify-me.h"
442b4905
AM
32#include "gimple-ssa.h"
33#include "tree-cfg.h"
34#include "tree-phinodes.h"
35#include "ssa-iterators.h"
36#include "tree-ssanames.h"
37#include "tree-dfa.h"
6de9cd9a 38#include "tree-pass.h"
a564d0f1 39#include "langhooks.h"
3aef2dbd 40#include "flags.h"
f4684242 41#include "expr.h"
391886c8 42#include "cfgloop.h"
1d61ee42 43#include "optabs.h"
f2167d68 44#include "tree-ssa-propagate.h"
4484a35a 45#include "tree-ssa-dom.h"
6de9cd9a 46
a564d0f1
JL
47/* This pass propagates the RHS of assignment statements into use
48 sites of the LHS of the assignment. It's basically a specialized
487bf3e6
JL
49 form of tree combination. It is hoped all of this can disappear
50 when we have a generalized tree combiner.
6de9cd9a 51
a564d0f1 52 One class of common cases we handle is forward propagating a single use
b8698a0f 53 variable into a COND_EXPR.
6de9cd9a
DN
54
55 bb0:
56 x = a COND b;
57 if (x) goto ... else goto ...
58
59 Will be transformed into:
60
61 bb0:
62 if (a COND b) goto ... else goto ...
b8698a0f 63
6de9cd9a
DN
64 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
65
66 Or (assuming c1 and c2 are constants):
67
68 bb0:
b8698a0f 69 x = a + c1;
6de9cd9a
DN
70 if (x EQ/NEQ c2) goto ... else goto ...
71
72 Will be transformed into:
73
74 bb0:
75 if (a EQ/NEQ (c2 - c1)) goto ... else goto ...
76
77 Similarly for x = a - c1.
b8698a0f 78
6de9cd9a
DN
79 Or
80
81 bb0:
82 x = !a
83 if (x) goto ... else goto ...
84
85 Will be transformed into:
86
87 bb0:
88 if (a == 0) goto ... else goto ...
89
90 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
91 For these cases, we propagate A into all, possibly more than one,
92 COND_EXPRs that use X.
93
91581bcc
JL
94 Or
95
96 bb0:
97 x = (typecast) a
98 if (x) goto ... else goto ...
99
100 Will be transformed into:
101
102 bb0:
103 if (a != 0) goto ... else goto ...
104
105 (Assuming a is an integral type and x is a boolean or x is an
106 integral and a is a boolean.)
107
108 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
109 For these cases, we propagate A into all, possibly more than one,
110 COND_EXPRs that use X.
111
6de9cd9a
DN
112 In addition to eliminating the variable and the statement which assigns
113 a value to the variable, we may be able to later thread the jump without
bec44647 114 adding insane complexity in the dominator optimizer.
6de9cd9a 115
91581bcc
JL
116 Also note these transformations can cascade. We handle this by having
117 a worklist of COND_EXPR statements to examine. As we make a change to
118 a statement, we put it back on the worklist to examine on the next
119 iteration of the main loop.
120
a564d0f1
JL
121 A second class of propagation opportunities arises for ADDR_EXPR
122 nodes.
123
124 ptr = &x->y->z;
125 res = *ptr;
126
127 Will get turned into
128
129 res = x->y->z;
130
e06f0ff9
AP
131 Or
132 ptr = (type1*)&type2var;
133 res = *ptr
134
135 Will get turned into (if type1 and type2 are the same size
136 and neither have volatile on them):
137 res = VIEW_CONVERT_EXPR<type1>(type2var)
138
a564d0f1
JL
139 Or
140
141 ptr = &x[0];
142 ptr2 = ptr + <constant>;
143
144 Will get turned into
145
146 ptr2 = &x[constant/elementsize];
147
148 Or
149
150 ptr = &x[0];
151 offset = index * element_size;
152 offset_p = (pointer) offset;
153 ptr2 = ptr + offset_p
154
155 Will get turned into:
156
157 ptr2 = &x[index];
158
617f3897
MJ
159 Or
160 ssa = (int) decl
161 res = ssa & 1
162
163 Provided that decl has known alignment >= 2, will get turned into
164
165 res = 0
166
487bf3e6
JL
167 We also propagate casts into SWITCH_EXPR and COND_EXPR conditions to
168 allow us to remove the cast and {NOT_EXPR,NEG_EXPR} into a subsequent
169 {NOT_EXPR,NEG_EXPR}.
a564d0f1 170
6de9cd9a
DN
171 This will (of course) be extended as other needs arise. */
172
5de989ed 173static bool forward_propagate_addr_expr (tree, tree, bool);
5bcd8644 174
68e72840 175/* Set to true if we delete dead edges during the optimization. */
5bcd8644
RH
176static bool cfg_changed;
177
726a989a 178static tree rhs_to_tree (tree type, gimple stmt);
5bcd8644 179
64e8a9f0 180/* Get the next statement we can propagate NAME's value into skipping
3aef2dbd
RG
181 trivial copies. Returns the statement that is suitable as a
182 propagation destination or NULL_TREE if there is no such one.
183 This only returns destinations in a single-use chain. FINAL_NAME_P
184 if non-NULL is written to the ssa name that represents the use. */
9f1054af 185
726a989a 186static gimple
3aef2dbd 187get_prop_dest_stmt (tree name, tree *final_name_p)
9f1054af 188{
3aef2dbd 189 use_operand_p use;
726a989a 190 gimple use_stmt;
9f1054af 191
3aef2dbd
RG
192 do {
193 /* If name has multiple uses, bail out. */
194 if (!single_imm_use (name, &use, &use_stmt))
726a989a 195 return NULL;
9f1054af 196
3aef2dbd 197 /* If this is not a trivial copy, we found it. */
7c3e9dc3 198 if (!gimple_assign_ssa_name_copy_p (use_stmt)
726a989a 199 || gimple_assign_rhs1 (use_stmt) != name)
3aef2dbd
RG
200 break;
201
202 /* Continue searching uses of the copy destination. */
726a989a 203 name = gimple_assign_lhs (use_stmt);
3aef2dbd
RG
204 } while (1);
205
206 if (final_name_p)
207 *final_name_p = name;
208
209 return use_stmt;
9f1054af
KH
210}
211
3aef2dbd
RG
212/* Get the statement we can propagate from into NAME skipping
213 trivial copies. Returns the statement which defines the
214 propagation source or NULL_TREE if there is no such one.
215 If SINGLE_USE_ONLY is set considers only sources which have
216 a single use chain up to NAME. If SINGLE_USE_P is non-null,
217 it is set to whether the chain to NAME is a single use chain
218 or not. SINGLE_USE_P is not written to if SINGLE_USE_ONLY is set. */
6de9cd9a 219
726a989a 220static gimple
3aef2dbd 221get_prop_source_stmt (tree name, bool single_use_only, bool *single_use_p)
91581bcc 222{
3aef2dbd
RG
223 bool single_use = true;
224
225 do {
726a989a 226 gimple def_stmt = SSA_NAME_DEF_STMT (name);
3aef2dbd
RG
227
228 if (!has_single_use (name))
229 {
230 single_use = false;
231 if (single_use_only)
726a989a 232 return NULL;
3aef2dbd
RG
233 }
234
235 /* If name is defined by a PHI node or is the default def, bail out. */
7c3e9dc3 236 if (!is_gimple_assign (def_stmt))
726a989a 237 return NULL;
3aef2dbd 238
ef78c9c7
RG
239 /* If def_stmt is a simple copy, continue looking. */
240 if (gimple_assign_rhs_code (def_stmt) == SSA_NAME)
241 name = gimple_assign_rhs1 (def_stmt);
242 else
3aef2dbd
RG
243 {
244 if (!single_use_only && single_use_p)
245 *single_use_p = single_use;
246
ef78c9c7 247 return def_stmt;
3aef2dbd 248 }
3aef2dbd
RG
249 } while (1);
250}
bec44647 251
3aef2dbd
RG
252/* Checks if the destination ssa name in DEF_STMT can be used as
253 propagation source. Returns true if so, otherwise false. */
bec44647 254
3aef2dbd 255static bool
726a989a 256can_propagate_from (gimple def_stmt)
3aef2dbd 257{
726a989a 258 gcc_assert (is_gimple_assign (def_stmt));
7c3e9dc3 259
10372bd4 260 /* If the rhs has side-effects we cannot propagate from it. */
726a989a 261 if (gimple_has_volatile_ops (def_stmt))
10372bd4
RG
262 return false;
263
264 /* If the rhs is a load we cannot propagate from it. */
726a989a
RB
265 if (TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)) == tcc_reference
266 || TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)) == tcc_declaration)
10372bd4
RG
267 return false;
268
b80280f2 269 /* Constants can be always propagated. */
7c3e9dc3
RG
270 if (gimple_assign_single_p (def_stmt)
271 && is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt)))
b80280f2
RG
272 return true;
273
726a989a 274 /* We cannot propagate ssa names that occur in abnormal phi nodes. */
931050d0
EB
275 if (stmt_references_abnormal_ssa_name (def_stmt))
276 return false;
6de9cd9a 277
3aef2dbd 278 /* If the definition is a conversion of a pointer to a function type,
726a989a
RB
279 then we can not apply optimizations as some targets require
280 function pointers to be canonicalized and in this case this
281 optimization could eliminate a necessary canonicalization. */
7c3e9dc3 282 if (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt)))
726a989a
RB
283 {
284 tree rhs = gimple_assign_rhs1 (def_stmt);
285 if (POINTER_TYPE_P (TREE_TYPE (rhs))
286 && TREE_CODE (TREE_TYPE (TREE_TYPE (rhs))) == FUNCTION_TYPE)
287 return false;
288 }
7c3e9dc3 289
3aef2dbd 290 return true;
bec44647
KH
291}
292
7fdab8d4
RG
293/* Remove a chain of dead statements starting at the definition of
294 NAME. The chain is linked via the first operand of the defining statements.
034b8ae4 295 If NAME was replaced in its only use then this function can be used
7fdab8d4
RG
296 to clean up dead stmts. The function handles already released SSA
297 names gracefully.
298 Returns true if cleanup-cfg has to run. */
487bf3e6 299
3aef2dbd 300static bool
034b8ae4 301remove_prop_source_from_use (tree name)
3aef2dbd 302{
726a989a
RB
303 gimple_stmt_iterator gsi;
304 gimple stmt;
034b8ae4 305 bool cfg_changed = false;
487bf3e6 306
3aef2dbd 307 do {
034b8ae4
RG
308 basic_block bb;
309
7fdab8d4
RG
310 if (SSA_NAME_IN_FREE_LIST (name)
311 || SSA_NAME_IS_DEFAULT_DEF (name)
312 || !has_zero_uses (name))
034b8ae4 313 return cfg_changed;
487bf3e6 314
3aef2dbd 315 stmt = SSA_NAME_DEF_STMT (name);
7fdab8d4
RG
316 if (gimple_code (stmt) == GIMPLE_PHI
317 || gimple_has_side_effects (stmt))
f8ecf734 318 return cfg_changed;
7fdab8d4
RG
319
320 bb = gimple_bb (stmt);
f8ecf734 321 gsi = gsi_for_stmt (stmt);
7fdab8d4 322 unlink_stmt_vdef (stmt);
b5b3ec3e
RG
323 if (gsi_remove (&gsi, true))
324 cfg_changed |= gimple_purge_dead_eh_edges (bb);
7fdab8d4 325 release_defs (stmt);
487bf3e6 326
7fdab8d4 327 name = is_gimple_assign (stmt) ? gimple_assign_rhs1 (stmt) : NULL_TREE;
726a989a 328 } while (name && TREE_CODE (name) == SSA_NAME);
487bf3e6 329
034b8ae4 330 return cfg_changed;
3aef2dbd 331}
487bf3e6 332
726a989a
RB
333/* Return the rhs of a gimple_assign STMT in a form of a single tree,
334 converted to type TYPE.
b8698a0f 335
726a989a
RB
336 This should disappear, but is needed so we can combine expressions and use
337 the fold() interfaces. Long term, we need to develop folding and combine
338 routines that deal with gimple exclusively . */
339
340static tree
341rhs_to_tree (tree type, gimple stmt)
342{
db3927fb 343 location_t loc = gimple_location (stmt);
726a989a 344 enum tree_code code = gimple_assign_rhs_code (stmt);
bb368470
UB
345 if (get_gimple_rhs_class (code) == GIMPLE_TERNARY_RHS)
346 return fold_build3_loc (loc, code, type, gimple_assign_rhs1 (stmt),
347 gimple_assign_rhs2 (stmt),
348 gimple_assign_rhs3 (stmt));
349 else if (get_gimple_rhs_class (code) == GIMPLE_BINARY_RHS)
db3927fb 350 return fold_build2_loc (loc, code, type, gimple_assign_rhs1 (stmt),
f7c0ffb4 351 gimple_assign_rhs2 (stmt));
726a989a 352 else if (get_gimple_rhs_class (code) == GIMPLE_UNARY_RHS)
f7c0ffb4 353 return build1 (code, type, gimple_assign_rhs1 (stmt));
726a989a
RB
354 else if (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS)
355 return gimple_assign_rhs1 (stmt);
356 else
357 gcc_unreachable ();
358}
359
3aef2dbd
RG
360/* Combine OP0 CODE OP1 in the context of a COND_EXPR. Returns
361 the folded result in a form suitable for COND_EXPR_COND or
362 NULL_TREE, if there is no suitable simplified form. If
363 INVARIANT_ONLY is true only gimple_min_invariant results are
364 considered simplified. */
487bf3e6
JL
365
366static tree
e8dbf8b5 367combine_cond_expr_cond (gimple stmt, enum tree_code code, tree type,
3aef2dbd 368 tree op0, tree op1, bool invariant_only)
487bf3e6 369{
3aef2dbd 370 tree t;
487bf3e6 371
3aef2dbd 372 gcc_assert (TREE_CODE_CLASS (code) == tcc_comparison);
487bf3e6 373
e8dbf8b5
RG
374 fold_defer_overflow_warnings ();
375 t = fold_binary_loc (gimple_location (stmt), code, type, op0, op1);
3aef2dbd 376 if (!t)
e8dbf8b5
RG
377 {
378 fold_undefer_overflow_warnings (false, NULL, 0);
379 return NULL_TREE;
380 }
487bf3e6 381
3aef2dbd
RG
382 /* Require that we got a boolean type out if we put one in. */
383 gcc_assert (TREE_CODE (TREE_TYPE (t)) == TREE_CODE (type));
487bf3e6 384
dc575233
RG
385 /* Canonicalize the combined condition for use in a COND_EXPR. */
386 t = canonicalize_cond_expr_cond (t);
487bf3e6 387
3aef2dbd 388 /* Bail out if we required an invariant but didn't get one. */
726a989a 389 if (!t || (invariant_only && !is_gimple_min_invariant (t)))
e8dbf8b5
RG
390 {
391 fold_undefer_overflow_warnings (false, NULL, 0);
392 return NULL_TREE;
393 }
394
395 fold_undefer_overflow_warnings (!gimple_no_warning_p (stmt), stmt, 0);
487bf3e6 396
dc575233 397 return t;
487bf3e6
JL
398}
399
d12d8efe
RG
400/* Combine the comparison OP0 CODE OP1 at LOC with the defining statements
401 of its operand. Return a new comparison tree or NULL_TREE if there
402 were no simplifying combines. */
403
404static tree
e8dbf8b5 405forward_propagate_into_comparison_1 (gimple stmt,
2e87621c
RG
406 enum tree_code code, tree type,
407 tree op0, tree op1)
d12d8efe
RG
408{
409 tree tmp = NULL_TREE;
410 tree rhs0 = NULL_TREE, rhs1 = NULL_TREE;
411 bool single_use0_p = false, single_use1_p = false;
412
413 /* For comparisons use the first operand, that is likely to
414 simplify comparisons against constants. */
415 if (TREE_CODE (op0) == SSA_NAME)
416 {
417 gimple def_stmt = get_prop_source_stmt (op0, false, &single_use0_p);
418 if (def_stmt && can_propagate_from (def_stmt))
419 {
420 rhs0 = rhs_to_tree (TREE_TYPE (op1), def_stmt);
e8dbf8b5 421 tmp = combine_cond_expr_cond (stmt, code, type,
d12d8efe
RG
422 rhs0, op1, !single_use0_p);
423 if (tmp)
424 return tmp;
425 }
426 }
427
428 /* If that wasn't successful, try the second operand. */
429 if (TREE_CODE (op1) == SSA_NAME)
430 {
431 gimple def_stmt = get_prop_source_stmt (op1, false, &single_use1_p);
432 if (def_stmt && can_propagate_from (def_stmt))
433 {
434 rhs1 = rhs_to_tree (TREE_TYPE (op0), def_stmt);
e8dbf8b5 435 tmp = combine_cond_expr_cond (stmt, code, type,
d12d8efe
RG
436 op0, rhs1, !single_use1_p);
437 if (tmp)
438 return tmp;
439 }
440 }
441
442 /* If that wasn't successful either, try both operands. */
443 if (rhs0 != NULL_TREE
444 && rhs1 != NULL_TREE)
e8dbf8b5 445 tmp = combine_cond_expr_cond (stmt, code, type,
d12d8efe
RG
446 rhs0, rhs1,
447 !(single_use0_p && single_use1_p));
448
449 return tmp;
450}
451
2e87621c
RG
452/* Propagate from the ssa name definition statements of the assignment
453 from a comparison at *GSI into the conditional if that simplifies it.
f8ecf734
RG
454 Returns 1 if the stmt was modified and 2 if the CFG needs cleanup,
455 otherwise returns 0. */
d12d8efe 456
f8ecf734 457static int
2e87621c 458forward_propagate_into_comparison (gimple_stmt_iterator *gsi)
d12d8efe 459{
2e87621c
RG
460 gimple stmt = gsi_stmt (*gsi);
461 tree tmp;
f8ecf734 462 bool cfg_changed = false;
75e649f6 463 tree type = TREE_TYPE (gimple_assign_lhs (stmt));
f8ecf734
RG
464 tree rhs1 = gimple_assign_rhs1 (stmt);
465 tree rhs2 = gimple_assign_rhs2 (stmt);
d12d8efe
RG
466
467 /* Combine the comparison with defining statements. */
e8dbf8b5 468 tmp = forward_propagate_into_comparison_1 (stmt,
2e87621c 469 gimple_assign_rhs_code (stmt),
75e649f6
EB
470 type, rhs1, rhs2);
471 if (tmp && useless_type_conversion_p (type, TREE_TYPE (tmp)))
d12d8efe 472 {
2e87621c 473 gimple_assign_set_rhs_from_tree (gsi, tmp);
59401b92
RG
474 fold_stmt (gsi);
475 update_stmt (gsi_stmt (*gsi));
9b80d091 476
f8ecf734
RG
477 if (TREE_CODE (rhs1) == SSA_NAME)
478 cfg_changed |= remove_prop_source_from_use (rhs1);
479 if (TREE_CODE (rhs2) == SSA_NAME)
480 cfg_changed |= remove_prop_source_from_use (rhs2);
481 return cfg_changed ? 2 : 1;
d12d8efe
RG
482 }
483
f8ecf734 484 return 0;
d12d8efe
RG
485}
486
3aef2dbd 487/* Propagate from the ssa name definition statements of COND_EXPR
726a989a
RB
488 in GIMPLE_COND statement STMT into the conditional if that simplifies it.
489 Returns zero if no statement was changed, one if there were
490 changes and two if cfg_cleanup needs to run.
b8698a0f 491
726a989a
RB
492 This must be kept in sync with forward_propagate_into_cond. */
493
494static int
495forward_propagate_into_gimple_cond (gimple stmt)
496{
2e87621c
RG
497 tree tmp;
498 enum tree_code code = gimple_cond_code (stmt);
f8ecf734
RG
499 bool cfg_changed = false;
500 tree rhs1 = gimple_cond_lhs (stmt);
501 tree rhs2 = gimple_cond_rhs (stmt);
2e87621c
RG
502
503 /* We can do tree combining on SSA_NAME and comparison expressions. */
504 if (TREE_CODE_CLASS (gimple_cond_code (stmt)) != tcc_comparison)
505 return 0;
506
e8dbf8b5 507 tmp = forward_propagate_into_comparison_1 (stmt, code,
2e87621c 508 boolean_type_node,
f8ecf734 509 rhs1, rhs2);
2e87621c
RG
510 if (tmp)
511 {
512 if (dump_file && tmp)
513 {
2e87621c 514 fprintf (dump_file, " Replaced '");
f8ecf734 515 print_gimple_expr (dump_file, stmt, 0, 0);
2e87621c
RG
516 fprintf (dump_file, "' with '");
517 print_generic_expr (dump_file, tmp, 0);
518 fprintf (dump_file, "'\n");
519 }
726a989a 520
2e87621c
RG
521 gimple_cond_set_condition_from_tree (stmt, unshare_expr (tmp));
522 update_stmt (stmt);
726a989a 523
f8ecf734
RG
524 if (TREE_CODE (rhs1) == SSA_NAME)
525 cfg_changed |= remove_prop_source_from_use (rhs1);
526 if (TREE_CODE (rhs2) == SSA_NAME)
527 cfg_changed |= remove_prop_source_from_use (rhs2);
528 return (cfg_changed || is_gimple_min_invariant (tmp)) ? 2 : 1;
2e87621c 529 }
726a989a 530
e8642944
RG
531 /* Canonicalize _Bool == 0 and _Bool != 1 to _Bool != 0 by swapping edges. */
532 if ((TREE_CODE (TREE_TYPE (rhs1)) == BOOLEAN_TYPE
533 || (INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
534 && TYPE_PRECISION (TREE_TYPE (rhs1)) == 1))
535 && ((code == EQ_EXPR
536 && integer_zerop (rhs2))
537 || (code == NE_EXPR
538 && integer_onep (rhs2))))
539 {
540 basic_block bb = gimple_bb (stmt);
541 gimple_cond_set_code (stmt, NE_EXPR);
542 gimple_cond_set_rhs (stmt, build_zero_cst (TREE_TYPE (rhs1)));
543 EDGE_SUCC (bb, 0)->flags ^= (EDGE_TRUE_VALUE|EDGE_FALSE_VALUE);
544 EDGE_SUCC (bb, 1)->flags ^= (EDGE_TRUE_VALUE|EDGE_FALSE_VALUE);
545 return 1;
546 }
547
f8ecf734 548 return 0;
726a989a
RB
549}
550
551
552/* Propagate from the ssa name definition statements of COND_EXPR
553 in the rhs of statement STMT into the conditional if that simplifies it.
4e71066d 554 Returns true zero if the stmt was changed. */
6de9cd9a 555
4e71066d 556static bool
726a989a 557forward_propagate_into_cond (gimple_stmt_iterator *gsi_p)
bec44647 558{
726a989a 559 gimple stmt = gsi_stmt (*gsi_p);
2e87621c
RG
560 tree tmp = NULL_TREE;
561 tree cond = gimple_assign_rhs1 (stmt);
a8dcc458 562 enum tree_code code = gimple_assign_rhs_code (stmt);
e8642944 563 bool swap = false;
113ab41c 564
2e87621c
RG
565 /* We can do tree combining on SSA_NAME and comparison expressions. */
566 if (COMPARISON_CLASS_P (cond))
e8dbf8b5 567 tmp = forward_propagate_into_comparison_1 (stmt, TREE_CODE (cond),
ae22ac3c 568 TREE_TYPE (cond),
d12d8efe
RG
569 TREE_OPERAND (cond, 0),
570 TREE_OPERAND (cond, 1));
2e87621c
RG
571 else if (TREE_CODE (cond) == SSA_NAME)
572 {
a8dcc458 573 enum tree_code def_code;
4e71066d 574 tree name = cond;
2e87621c
RG
575 gimple def_stmt = get_prop_source_stmt (name, true, NULL);
576 if (!def_stmt || !can_propagate_from (def_stmt))
f8ecf734 577 return 0;
3aef2dbd 578
a8dcc458
MG
579 def_code = gimple_assign_rhs_code (def_stmt);
580 if (TREE_CODE_CLASS (def_code) == tcc_comparison)
4e71066d 581 tmp = fold_build2_loc (gimple_location (def_stmt),
a8dcc458 582 def_code,
70a6aea0 583 TREE_TYPE (cond),
4e71066d
RG
584 gimple_assign_rhs1 (def_stmt),
585 gimple_assign_rhs2 (def_stmt));
a8dcc458
MG
586 else if (code == COND_EXPR
587 && ((def_code == BIT_NOT_EXPR
588 && TYPE_PRECISION (TREE_TYPE (cond)) == 1)
589 || (def_code == BIT_XOR_EXPR
590 && integer_onep (gimple_assign_rhs2 (def_stmt)))))
e8642944
RG
591 {
592 tmp = gimple_assign_rhs1 (def_stmt);
593 swap = true;
594 }
2e87621c 595 }
3aef2dbd 596
dd46054a
RG
597 if (tmp
598 && is_gimple_condexpr (tmp))
2e87621c
RG
599 {
600 if (dump_file && tmp)
601 {
602 fprintf (dump_file, " Replaced '");
603 print_generic_expr (dump_file, cond, 0);
604 fprintf (dump_file, "' with '");
605 print_generic_expr (dump_file, tmp, 0);
606 fprintf (dump_file, "'\n");
607 }
113ab41c 608
a8dcc458
MG
609 if ((code == VEC_COND_EXPR) ? integer_all_onesp (tmp)
610 : integer_onep (tmp))
4e71066d
RG
611 gimple_assign_set_rhs_from_tree (gsi_p, gimple_assign_rhs2 (stmt));
612 else if (integer_zerop (tmp))
613 gimple_assign_set_rhs_from_tree (gsi_p, gimple_assign_rhs3 (stmt));
614 else
e8642944
RG
615 {
616 gimple_assign_set_rhs1 (stmt, unshare_expr (tmp));
617 if (swap)
618 {
619 tree t = gimple_assign_rhs2 (stmt);
620 gimple_assign_set_rhs2 (stmt, gimple_assign_rhs3 (stmt));
621 gimple_assign_set_rhs3 (stmt, t);
622 }
623 }
2e87621c
RG
624 stmt = gsi_stmt (*gsi_p);
625 update_stmt (stmt);
3aef2dbd 626
4e71066d 627 return true;
2e87621c 628 }
113ab41c 629
f8ecf734 630 return 0;
6de9cd9a
DN
631}
632
2515d916
RG
633/* Propagate from the ssa name definition statements of COND_EXPR
634 values in the rhs of statement STMT into the conditional arms
635 if that simplifies it.
636 Returns true if the stmt was changed. */
637
638static bool
639combine_cond_exprs (gimple_stmt_iterator *gsi_p)
640{
641 gimple stmt = gsi_stmt (*gsi_p);
642 tree cond, val1, val2;
643 bool changed = false;
644
645 cond = gimple_assign_rhs1 (stmt);
646 val1 = gimple_assign_rhs2 (stmt);
647 if (TREE_CODE (val1) == SSA_NAME)
648 {
649 gimple def_stmt = SSA_NAME_DEF_STMT (val1);
650 if (is_gimple_assign (def_stmt)
651 && gimple_assign_rhs_code (def_stmt) == gimple_assign_rhs_code (stmt)
652 && operand_equal_p (gimple_assign_rhs1 (def_stmt), cond, 0))
653 {
654 val1 = unshare_expr (gimple_assign_rhs2 (def_stmt));
655 gimple_assign_set_rhs2 (stmt, val1);
656 changed = true;
657 }
658 }
659 val2 = gimple_assign_rhs3 (stmt);
660 if (TREE_CODE (val2) == SSA_NAME)
661 {
662 gimple def_stmt = SSA_NAME_DEF_STMT (val2);
663 if (is_gimple_assign (def_stmt)
664 && gimple_assign_rhs_code (def_stmt) == gimple_assign_rhs_code (stmt)
665 && operand_equal_p (gimple_assign_rhs1 (def_stmt), cond, 0))
666 {
667 val2 = unshare_expr (gimple_assign_rhs3 (def_stmt));
668 gimple_assign_set_rhs3 (stmt, val2);
669 changed = true;
670 }
671 }
672 if (operand_equal_p (val1, val2, 0))
673 {
674 gimple_assign_set_rhs_from_tree (gsi_p, val1);
675 stmt = gsi_stmt (*gsi_p);
676 changed = true;
677 }
678
679 if (changed)
680 update_stmt (stmt);
681
682 return changed;
683}
684
b8698a0f 685/* We've just substituted an ADDR_EXPR into stmt. Update all the
5bcd8644
RH
686 relevant data structures to match. */
687
688static void
726a989a 689tidy_after_forward_propagate_addr (gimple stmt)
5bcd8644 690{
5bcd8644
RH
691 /* We may have turned a trapping insn into a non-trapping insn. */
692 if (maybe_clean_or_replace_eh_stmt (stmt, stmt)
726a989a 693 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
5bcd8644 694 cfg_changed = true;
6cedb4ac 695
726a989a
RB
696 if (TREE_CODE (gimple_assign_rhs1 (stmt)) == ADDR_EXPR)
697 recompute_tree_invariant_for_addr_expr (gimple_assign_rhs1 (stmt));
5bcd8644
RH
698}
699
7b1737d0
RG
700/* NAME is a SSA_NAME representing DEF_RHS which is of the form
701 ADDR_EXPR <whatever>.
a564d0f1 702
d090221b 703 Try to forward propagate the ADDR_EXPR into the use USE_STMT.
a564d0f1 704 Often this will allow for removal of an ADDR_EXPR and INDIRECT_REF
d090221b 705 node or for recovery of array indexing from pointer arithmetic.
726a989a 706
2ed4b0ce
DB
707 Return true if the propagation was successful (the propagation can
708 be not totally successful, yet things may have been changed). */
a564d0f1
JL
709
710static bool
726a989a
RB
711forward_propagate_addr_expr_1 (tree name, tree def_rhs,
712 gimple_stmt_iterator *use_stmt_gsi,
f6c5fefc 713 bool single_use_p)
a564d0f1 714{
726a989a 715 tree lhs, rhs, rhs2, array_ref;
726a989a
RB
716 gimple use_stmt = gsi_stmt (*use_stmt_gsi);
717 enum tree_code rhs_code;
377f099a 718 bool res = true;
a564d0f1 719
9cadd7f7 720 gcc_assert (TREE_CODE (def_rhs) == ADDR_EXPR);
a564d0f1 721
726a989a
RB
722 lhs = gimple_assign_lhs (use_stmt);
723 rhs_code = gimple_assign_rhs_code (use_stmt);
724 rhs = gimple_assign_rhs1 (use_stmt);
7b1737d0 725
5de989ed
RB
726 /* Do not perform copy-propagation but recurse through copy chains. */
727 if (TREE_CODE (lhs) == SSA_NAME
728 && rhs_code == SSA_NAME)
729 return forward_propagate_addr_expr (lhs, def_rhs, single_use_p);
730
731 /* The use statement could be a conversion. Recurse to the uses of the
732 lhs as copyprop does not copy through pointer to integer to pointer
733 conversions and FRE does not catch all cases either.
734 Treat the case of a single-use name and
f6c5fefc 735 a conversion to def_rhs type separate, though. */
9cadd7f7 736 if (TREE_CODE (lhs) == SSA_NAME
5de989ed 737 && CONVERT_EXPR_CODE_P (rhs_code))
f6c5fefc 738 {
5de989ed
RB
739 /* If there is a point in a conversion chain where the types match
740 so we can remove a conversion re-materialize the address here
741 and stop. */
742 if (single_use_p
743 && useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (def_rhs)))
744 {
745 gimple_assign_set_rhs1 (use_stmt, unshare_expr (def_rhs));
746 gimple_assign_set_rhs_code (use_stmt, TREE_CODE (def_rhs));
747 return true;
748 }
749
750 /* Else recurse if the conversion preserves the address value. */
751 if ((INTEGRAL_TYPE_P (TREE_TYPE (lhs))
752 || POINTER_TYPE_P (TREE_TYPE (lhs)))
753 && (TYPE_PRECISION (TREE_TYPE (lhs))
754 >= TYPE_PRECISION (TREE_TYPE (def_rhs))))
755 return forward_propagate_addr_expr (lhs, def_rhs, single_use_p);
756
757 return false;
f6c5fefc 758 }
9cadd7f7 759
5de989ed
RB
760 /* If this isn't a conversion chain from this on we only can propagate
761 into compatible pointer contexts. */
762 if (!types_compatible_p (TREE_TYPE (name), TREE_TYPE (def_rhs)))
763 return false;
764
70f34814
RG
765 /* Propagate through constant pointer adjustments. */
766 if (TREE_CODE (lhs) == SSA_NAME
767 && rhs_code == POINTER_PLUS_EXPR
768 && rhs == name
769 && TREE_CODE (gimple_assign_rhs2 (use_stmt)) == INTEGER_CST)
770 {
771 tree new_def_rhs;
772 /* As we come here with non-invariant addresses in def_rhs we need
773 to make sure we can build a valid constant offsetted address
774 for further propagation. Simply rely on fold building that
775 and check after the fact. */
776 new_def_rhs = fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (rhs)),
777 def_rhs,
778 fold_convert (ptr_type_node,
779 gimple_assign_rhs2 (use_stmt)));
780 if (TREE_CODE (new_def_rhs) == MEM_REF
bf9899d4 781 && !is_gimple_mem_ref_addr (TREE_OPERAND (new_def_rhs, 0)))
70f34814
RG
782 return false;
783 new_def_rhs = build_fold_addr_expr_with_type (new_def_rhs,
784 TREE_TYPE (rhs));
785
786 /* Recurse. If we could propagate into all uses of lhs do not
787 bother to replace into the current use but just pretend we did. */
788 if (TREE_CODE (new_def_rhs) == ADDR_EXPR
5de989ed 789 && forward_propagate_addr_expr (lhs, new_def_rhs, single_use_p))
70f34814
RG
790 return true;
791
792 if (useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (new_def_rhs)))
793 gimple_assign_set_rhs_with_ops (use_stmt_gsi, TREE_CODE (new_def_rhs),
794 new_def_rhs, NULL_TREE);
795 else if (is_gimple_min_invariant (new_def_rhs))
796 gimple_assign_set_rhs_with_ops (use_stmt_gsi, NOP_EXPR,
797 new_def_rhs, NULL_TREE);
798 else
799 return false;
800 gcc_assert (gsi_stmt (*use_stmt_gsi) == use_stmt);
801 update_stmt (use_stmt);
802 return true;
803 }
804
b8698a0f 805 /* Now strip away any outer COMPONENT_REF/ARRAY_REF nodes from the LHS.
9cadd7f7 806 ADDR_EXPR will not appear on the LHS. */
e1fb4ad3
RB
807 tree *lhsp = gimple_assign_lhs_ptr (use_stmt);
808 while (handled_component_p (*lhsp))
809 lhsp = &TREE_OPERAND (*lhsp, 0);
810 lhs = *lhsp;
9cadd7f7 811
70f34814 812 /* Now see if the LHS node is a MEM_REF using NAME. If so,
9cadd7f7 813 propagate the ADDR_EXPR into the use of NAME and fold the result. */
70f34814 814 if (TREE_CODE (lhs) == MEM_REF
377f099a 815 && TREE_OPERAND (lhs, 0) == name)
9cadd7f7 816 {
70f34814
RG
817 tree def_rhs_base;
818 HOST_WIDE_INT def_rhs_offset;
819 /* If the address is invariant we can always fold it. */
820 if ((def_rhs_base = get_addr_base_and_unit_offset (TREE_OPERAND (def_rhs, 0),
821 &def_rhs_offset)))
377f099a 822 {
70f34814
RG
823 double_int off = mem_ref_offset (lhs);
824 tree new_ptr;
27bcd47c 825 off += double_int::from_shwi (def_rhs_offset);
70f34814
RG
826 if (TREE_CODE (def_rhs_base) == MEM_REF)
827 {
27bcd47c 828 off += mem_ref_offset (def_rhs_base);
70f34814
RG
829 new_ptr = TREE_OPERAND (def_rhs_base, 0);
830 }
831 else
832 new_ptr = build_fold_addr_expr (def_rhs_base);
833 TREE_OPERAND (lhs, 0) = new_ptr;
834 TREE_OPERAND (lhs, 1)
835 = double_int_to_tree (TREE_TYPE (TREE_OPERAND (lhs, 1)), off);
377f099a 836 tidy_after_forward_propagate_addr (use_stmt);
377f099a
RG
837 /* Continue propagating into the RHS if this was not the only use. */
838 if (single_use_p)
839 return true;
840 }
70f34814
RG
841 /* If the LHS is a plain dereference and the value type is the same as
842 that of the pointed-to type of the address we can put the
843 dereferenced address on the LHS preserving the original alias-type. */
e1fb4ad3
RB
844 else if (integer_zerop (TREE_OPERAND (lhs, 1))
845 && ((gimple_assign_lhs (use_stmt) == lhs
846 && useless_type_conversion_p
847 (TREE_TYPE (TREE_OPERAND (def_rhs, 0)),
848 TREE_TYPE (gimple_assign_rhs1 (use_stmt))))
849 || types_compatible_p (TREE_TYPE (lhs),
850 TREE_TYPE (TREE_OPERAND (def_rhs, 0))))
39307ba7
JJ
851 /* Don't forward anything into clobber stmts if it would result
852 in the lhs no longer being a MEM_REF. */
853 && (!gimple_clobber_p (use_stmt)
854 || TREE_CODE (TREE_OPERAND (def_rhs, 0)) == MEM_REF))
70f34814
RG
855 {
856 tree *def_rhs_basep = &TREE_OPERAND (def_rhs, 0);
8c29866f 857 tree new_offset, new_base, saved, new_lhs;
70f34814
RG
858 while (handled_component_p (*def_rhs_basep))
859 def_rhs_basep = &TREE_OPERAND (*def_rhs_basep, 0);
860 saved = *def_rhs_basep;
861 if (TREE_CODE (*def_rhs_basep) == MEM_REF)
862 {
863 new_base = TREE_OPERAND (*def_rhs_basep, 0);
fef205d5
RG
864 new_offset = fold_convert (TREE_TYPE (TREE_OPERAND (lhs, 1)),
865 TREE_OPERAND (*def_rhs_basep, 1));
70f34814
RG
866 }
867 else
868 {
869 new_base = build_fold_addr_expr (*def_rhs_basep);
870 new_offset = TREE_OPERAND (lhs, 1);
871 }
872 *def_rhs_basep = build2 (MEM_REF, TREE_TYPE (*def_rhs_basep),
873 new_base, new_offset);
27315aa6 874 TREE_THIS_VOLATILE (*def_rhs_basep) = TREE_THIS_VOLATILE (lhs);
4be257dc 875 TREE_SIDE_EFFECTS (*def_rhs_basep) = TREE_SIDE_EFFECTS (lhs);
27315aa6 876 TREE_THIS_NOTRAP (*def_rhs_basep) = TREE_THIS_NOTRAP (lhs);
8c29866f 877 new_lhs = unshare_expr (TREE_OPERAND (def_rhs, 0));
e1fb4ad3 878 *lhsp = new_lhs;
8c29866f 879 TREE_THIS_VOLATILE (new_lhs) = TREE_THIS_VOLATILE (lhs);
4be257dc 880 TREE_SIDE_EFFECTS (new_lhs) = TREE_SIDE_EFFECTS (lhs);
70f34814
RG
881 *def_rhs_basep = saved;
882 tidy_after_forward_propagate_addr (use_stmt);
883 /* Continue propagating into the RHS if this was not the
884 only use. */
885 if (single_use_p)
886 return true;
887 }
377f099a
RG
888 else
889 /* We can have a struct assignment dereferencing our name twice.
890 Note that we didn't propagate into the lhs to not falsely
891 claim we did when propagating into the rhs. */
892 res = false;
9cadd7f7 893 }
7b1737d0 894
450c3007
RG
895 /* Strip away any outer COMPONENT_REF, ARRAY_REF or ADDR_EXPR
896 nodes from the RHS. */
e1fb4ad3
RB
897 tree *rhsp = gimple_assign_rhs1_ptr (use_stmt);
898 if (TREE_CODE (*rhsp) == ADDR_EXPR)
899 rhsp = &TREE_OPERAND (*rhsp, 0);
900 while (handled_component_p (*rhsp))
901 rhsp = &TREE_OPERAND (*rhsp, 0);
902 rhs = *rhsp;
a564d0f1 903
70f34814 904 /* Now see if the RHS node is a MEM_REF using NAME. If so,
a564d0f1 905 propagate the ADDR_EXPR into the use of NAME and fold the result. */
70f34814
RG
906 if (TREE_CODE (rhs) == MEM_REF
907 && TREE_OPERAND (rhs, 0) == name)
a564d0f1 908 {
70f34814
RG
909 tree def_rhs_base;
910 HOST_WIDE_INT def_rhs_offset;
911 if ((def_rhs_base = get_addr_base_and_unit_offset (TREE_OPERAND (def_rhs, 0),
912 &def_rhs_offset)))
913 {
914 double_int off = mem_ref_offset (rhs);
915 tree new_ptr;
27bcd47c 916 off += double_int::from_shwi (def_rhs_offset);
70f34814
RG
917 if (TREE_CODE (def_rhs_base) == MEM_REF)
918 {
27bcd47c 919 off += mem_ref_offset (def_rhs_base);
70f34814
RG
920 new_ptr = TREE_OPERAND (def_rhs_base, 0);
921 }
922 else
923 new_ptr = build_fold_addr_expr (def_rhs_base);
924 TREE_OPERAND (rhs, 0) = new_ptr;
925 TREE_OPERAND (rhs, 1)
926 = double_int_to_tree (TREE_TYPE (TREE_OPERAND (rhs, 1)), off);
59401b92 927 fold_stmt_inplace (use_stmt_gsi);
70f34814
RG
928 tidy_after_forward_propagate_addr (use_stmt);
929 return res;
930 }
27315aa6 931 /* If the RHS is a plain dereference and the value type is the same as
70f34814 932 that of the pointed-to type of the address we can put the
27315aa6 933 dereferenced address on the RHS preserving the original alias-type. */
e1fb4ad3
RB
934 else if (integer_zerop (TREE_OPERAND (rhs, 1))
935 && ((gimple_assign_rhs1 (use_stmt) == rhs
936 && useless_type_conversion_p
937 (TREE_TYPE (gimple_assign_lhs (use_stmt)),
938 TREE_TYPE (TREE_OPERAND (def_rhs, 0))))
939 || types_compatible_p (TREE_TYPE (rhs),
940 TREE_TYPE (TREE_OPERAND (def_rhs, 0)))))
70f34814
RG
941 {
942 tree *def_rhs_basep = &TREE_OPERAND (def_rhs, 0);
8c29866f 943 tree new_offset, new_base, saved, new_rhs;
70f34814
RG
944 while (handled_component_p (*def_rhs_basep))
945 def_rhs_basep = &TREE_OPERAND (*def_rhs_basep, 0);
946 saved = *def_rhs_basep;
947 if (TREE_CODE (*def_rhs_basep) == MEM_REF)
948 {
949 new_base = TREE_OPERAND (*def_rhs_basep, 0);
fef205d5
RG
950 new_offset = fold_convert (TREE_TYPE (TREE_OPERAND (rhs, 1)),
951 TREE_OPERAND (*def_rhs_basep, 1));
70f34814
RG
952 }
953 else
954 {
955 new_base = build_fold_addr_expr (*def_rhs_basep);
956 new_offset = TREE_OPERAND (rhs, 1);
957 }
958 *def_rhs_basep = build2 (MEM_REF, TREE_TYPE (*def_rhs_basep),
959 new_base, new_offset);
27315aa6 960 TREE_THIS_VOLATILE (*def_rhs_basep) = TREE_THIS_VOLATILE (rhs);
4be257dc 961 TREE_SIDE_EFFECTS (*def_rhs_basep) = TREE_SIDE_EFFECTS (rhs);
27315aa6 962 TREE_THIS_NOTRAP (*def_rhs_basep) = TREE_THIS_NOTRAP (rhs);
8c29866f 963 new_rhs = unshare_expr (TREE_OPERAND (def_rhs, 0));
e1fb4ad3 964 *rhsp = new_rhs;
8c29866f 965 TREE_THIS_VOLATILE (new_rhs) = TREE_THIS_VOLATILE (rhs);
4be257dc 966 TREE_SIDE_EFFECTS (new_rhs) = TREE_SIDE_EFFECTS (rhs);
70f34814 967 *def_rhs_basep = saved;
59401b92 968 fold_stmt_inplace (use_stmt_gsi);
70f34814
RG
969 tidy_after_forward_propagate_addr (use_stmt);
970 return res;
971 }
a564d0f1
JL
972 }
973
9cadd7f7
RG
974 /* If the use of the ADDR_EXPR is not a POINTER_PLUS_EXPR, there
975 is nothing to do. */
726a989a
RB
976 if (gimple_assign_rhs_code (use_stmt) != POINTER_PLUS_EXPR
977 || gimple_assign_rhs1 (use_stmt) != name)
9cadd7f7
RG
978 return false;
979
a564d0f1
JL
980 /* The remaining cases are all for turning pointer arithmetic into
981 array indexing. They only apply when we have the address of
982 element zero in an array. If that is not the case then there
983 is nothing to do. */
7b1737d0 984 array_ref = TREE_OPERAND (def_rhs, 0);
70f34814
RG
985 if ((TREE_CODE (array_ref) != ARRAY_REF
986 || TREE_CODE (TREE_TYPE (TREE_OPERAND (array_ref, 0))) != ARRAY_TYPE
987 || TREE_CODE (TREE_OPERAND (array_ref, 1)) != INTEGER_CST)
988 && TREE_CODE (TREE_TYPE (array_ref)) != ARRAY_TYPE)
a564d0f1
JL
989 return false;
990
726a989a 991 rhs2 = gimple_assign_rhs2 (use_stmt);
315f5f1b 992 /* Optimize &x[C1] p+ C2 to &x p+ C3 with C3 = C1 * element_size + C2. */
726a989a 993 if (TREE_CODE (rhs2) == INTEGER_CST)
a564d0f1 994 {
315f5f1b
RG
995 tree new_rhs = build1_loc (gimple_location (use_stmt),
996 ADDR_EXPR, TREE_TYPE (def_rhs),
997 fold_build2 (MEM_REF,
998 TREE_TYPE (TREE_TYPE (def_rhs)),
999 unshare_expr (def_rhs),
1000 fold_convert (ptr_type_node,
1001 rhs2)));
1002 gimple_assign_set_rhs_from_tree (use_stmt_gsi, new_rhs);
1003 use_stmt = gsi_stmt (*use_stmt_gsi);
1004 update_stmt (use_stmt);
1005 tidy_after_forward_propagate_addr (use_stmt);
1006 return true;
a564d0f1
JL
1007 }
1008
a564d0f1
JL
1009 return false;
1010}
1011
d090221b
RG
1012/* STMT is a statement of the form SSA_NAME = ADDR_EXPR <whatever>.
1013
1014 Try to forward propagate the ADDR_EXPR into all uses of the SSA_NAME.
1015 Often this will allow for removal of an ADDR_EXPR and INDIRECT_REF
1016 node or for recovery of array indexing from pointer arithmetic.
5de989ed
RB
1017
1018 PARENT_SINGLE_USE_P tells if, when in a recursive invocation, NAME was
1019 the single use in the previous invocation. Pass true when calling
1020 this as toplevel.
1021
d090221b
RG
1022 Returns true, if all uses have been propagated into. */
1023
1024static bool
5de989ed 1025forward_propagate_addr_expr (tree name, tree rhs, bool parent_single_use_p)
d090221b 1026{
d090221b 1027 imm_use_iterator iter;
726a989a 1028 gimple use_stmt;
d090221b 1029 bool all = true;
5de989ed 1030 bool single_use_p = parent_single_use_p && has_single_use (name);
d090221b 1031
6c00f606 1032 FOR_EACH_IMM_USE_STMT (use_stmt, iter, name)
d090221b 1033 {
efdb3de9 1034 bool result;
25b6dd9c 1035 tree use_rhs;
d090221b
RG
1036
1037 /* If the use is not in a simple assignment statement, then
1038 there is nothing we can do. */
e6f1c509 1039 if (!is_gimple_assign (use_stmt))
d090221b 1040 {
0ca5af51 1041 if (!is_gimple_debug (use_stmt))
b5b8b0ac 1042 all = false;
d090221b
RG
1043 continue;
1044 }
1045
e6f1c509
RB
1046 gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
1047 result = forward_propagate_addr_expr_1 (name, rhs, &gsi,
1048 single_use_p);
1049 /* If the use has moved to a different statement adjust
1050 the update machinery for the old statement too. */
1051 if (use_stmt != gsi_stmt (gsi))
d090221b 1052 {
e6f1c509
RB
1053 update_stmt (use_stmt);
1054 use_stmt = gsi_stmt (gsi);
d090221b 1055 }
e6f1c509 1056 update_stmt (use_stmt);
efdb3de9 1057 all &= result;
cfaab3a9 1058
7b1737d0 1059 /* Remove intermediate now unused copy and conversion chains. */
726a989a 1060 use_rhs = gimple_assign_rhs1 (use_stmt);
7b1737d0 1061 if (result
726a989a 1062 && TREE_CODE (gimple_assign_lhs (use_stmt)) == SSA_NAME
7c90021d
RG
1063 && TREE_CODE (use_rhs) == SSA_NAME
1064 && has_zero_uses (gimple_assign_lhs (use_stmt)))
7b1737d0 1065 {
726a989a 1066 gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
7b1737d0 1067 release_defs (use_stmt);
726a989a 1068 gsi_remove (&gsi, true);
7b1737d0 1069 }
d090221b
RG
1070 }
1071
6bdfdb96 1072 return all && has_zero_uses (name);
d090221b
RG
1073}
1074
2e87621c 1075
355a7673 1076/* Forward propagate the comparison defined in *DEFGSI like
2e87621c
RG
1077 cond_1 = x CMP y to uses of the form
1078 a_1 = (T')cond_1
1079 a_1 = !cond_1
1080 a_1 = cond_1 != 0
355a7673
MM
1081 Returns true if stmt is now unused. Advance DEFGSI to the next
1082 statement. */
2e87621c
RG
1083
1084static bool
355a7673 1085forward_propagate_comparison (gimple_stmt_iterator *defgsi)
2e87621c 1086{
355a7673 1087 gimple stmt = gsi_stmt (*defgsi);
2e87621c
RG
1088 tree name = gimple_assign_lhs (stmt);
1089 gimple use_stmt;
1090 tree tmp = NULL_TREE;
aaa7ad90
RB
1091 gimple_stmt_iterator gsi;
1092 enum tree_code code;
1093 tree lhs;
2e87621c
RG
1094
1095 /* Don't propagate ssa names that occur in abnormal phis. */
1096 if ((TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
1097 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_assign_rhs1 (stmt)))
1098 || (TREE_CODE (gimple_assign_rhs2 (stmt)) == SSA_NAME
1099 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_assign_rhs2 (stmt))))
355a7673 1100 goto bailout;
2e87621c
RG
1101
1102 /* Do not un-cse comparisons. But propagate through copies. */
1103 use_stmt = get_prop_dest_stmt (name, &name);
aaa7ad90
RB
1104 if (!use_stmt
1105 || !is_gimple_assign (use_stmt))
355a7673 1106 goto bailout;
2e87621c 1107
aaa7ad90
RB
1108 code = gimple_assign_rhs_code (use_stmt);
1109 lhs = gimple_assign_lhs (use_stmt);
1110 if (!INTEGRAL_TYPE_P (TREE_TYPE (lhs)))
355a7673 1111 goto bailout;
2e87621c 1112
aaa7ad90
RB
1113 /* We can propagate the condition into a statement that
1114 computes the logical negation of the comparison result. */
7f3ff782
KT
1115 if ((code == BIT_NOT_EXPR
1116 && TYPE_PRECISION (TREE_TYPE (lhs)) == 1)
1117 || (code == BIT_XOR_EXPR
1118 && integer_onep (gimple_assign_rhs2 (use_stmt))))
aaa7ad90
RB
1119 {
1120 tree type = TREE_TYPE (gimple_assign_rhs1 (stmt));
1121 bool nans = HONOR_NANS (TYPE_MODE (type));
1122 enum tree_code inv_code;
1123 inv_code = invert_tree_comparison (gimple_assign_rhs_code (stmt), nans);
1124 if (inv_code == ERROR_MARK)
355a7673 1125 goto bailout;
2e87621c 1126
aaa7ad90
RB
1127 tmp = build2 (inv_code, TREE_TYPE (lhs), gimple_assign_rhs1 (stmt),
1128 gimple_assign_rhs2 (stmt));
1129 }
1130 else
355a7673 1131 goto bailout;
2e87621c 1132
aaa7ad90
RB
1133 gsi = gsi_for_stmt (use_stmt);
1134 gimple_assign_set_rhs_from_tree (&gsi, unshare_expr (tmp));
1135 use_stmt = gsi_stmt (gsi);
1136 update_stmt (use_stmt);
2e87621c 1137
aaa7ad90
RB
1138 if (dump_file && (dump_flags & TDF_DETAILS))
1139 {
1140 fprintf (dump_file, " Replaced '");
1141 print_gimple_expr (dump_file, stmt, 0, dump_flags);
1142 fprintf (dump_file, "' with '");
1143 print_gimple_expr (dump_file, use_stmt, 0, dump_flags);
1144 fprintf (dump_file, "'\n");
2e87621c
RG
1145 }
1146
355a7673
MM
1147 /* When we remove stmt now the iterator defgsi goes off it's current
1148 sequence, hence advance it now. */
1149 gsi_next (defgsi);
1150
aaa7ad90
RB
1151 /* Remove defining statements. */
1152 return remove_prop_source_from_use (name);
355a7673
MM
1153
1154bailout:
1155 gsi_next (defgsi);
1156 return false;
2e87621c
RG
1157}
1158
1159
3e8a33f9
JL
1160/* GSI_P points to a statement which performs a narrowing integral
1161 conversion.
1162
1163 Look for cases like:
1164
1165 t = x & c;
1166 y = (T) t;
1167
1168 Turn them into:
1169
1170 t = x & c;
1171 y = (T) x;
1172
1173 If T is narrower than X's type and C merely masks off bits outside
1174 of (T) and nothing else.
1175
1176 Normally we'd let DCE remove the dead statement. But no DCE runs
1177 after the last forwprop/combine pass, so we remove the obviously
1178 dead code ourselves.
1179
1180 Return TRUE if a change was made, FALSE otherwise. */
1181
1182static bool
1183simplify_conversion_from_bitmask (gimple_stmt_iterator *gsi_p)
1184{
1185 gimple stmt = gsi_stmt (*gsi_p);
1186 gimple rhs_def_stmt = SSA_NAME_DEF_STMT (gimple_assign_rhs1 (stmt));
1187
1188 /* See if the input for the conversion was set via a BIT_AND_EXPR and
1189 the only use of the BIT_AND_EXPR result is the conversion. */
1190 if (is_gimple_assign (rhs_def_stmt)
1191 && gimple_assign_rhs_code (rhs_def_stmt) == BIT_AND_EXPR
1192 && has_single_use (gimple_assign_lhs (rhs_def_stmt)))
1193 {
1194 tree rhs_def_operand1 = gimple_assign_rhs1 (rhs_def_stmt);
1195 tree rhs_def_operand2 = gimple_assign_rhs2 (rhs_def_stmt);
1196 tree lhs_type = TREE_TYPE (gimple_assign_lhs (stmt));
1197
1198 /* Now verify suitability of the BIT_AND_EXPR's operands.
1199 The first must be an SSA_NAME that we can propagate and the
1200 second must be an integer constant that masks out all the
1201 bits outside the final result's type, but nothing else. */
1202 if (TREE_CODE (rhs_def_operand1) == SSA_NAME
1203 && ! SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs_def_operand1)
1204 && TREE_CODE (rhs_def_operand2) == INTEGER_CST
1205 && operand_equal_p (rhs_def_operand2,
1206 build_low_bits_mask (TREE_TYPE (rhs_def_operand2),
1207 TYPE_PRECISION (lhs_type)),
1208 0))
1209 {
1210 /* This is an optimizable case. Replace the source operand
1211 in the conversion with the first source operand of the
1212 BIT_AND_EXPR. */
1213 gimple_assign_set_rhs1 (stmt, rhs_def_operand1);
1214 stmt = gsi_stmt (*gsi_p);
1215 update_stmt (stmt);
1216
1217 /* There is no DCE after the last forwprop pass. It's
1218 easy to clean up the first order effects here. */
1219 gimple_stmt_iterator si;
1220 si = gsi_for_stmt (rhs_def_stmt);
1221 gsi_remove (&si, true);
1222 release_defs (rhs_def_stmt);
1223 return true;
1224 }
1225 }
1226
1227 return false;
1228}
1229
1230
471eeb83
JL
1231/* If we have lhs = ~x (STMT), look and see if earlier we had x = ~y.
1232 If so, we can change STMT into lhs = y which can later be copy
b8698a0f 1233 propagated. Similarly for negation.
471eeb83 1234
b8698a0f 1235 This could trivially be formulated as a forward propagation
471eeb83
JL
1236 to immediate uses. However, we already had an implementation
1237 from DOM which used backward propagation via the use-def links.
1238
1239 It turns out that backward propagation is actually faster as
1240 there's less work to do for each NOT/NEG expression we find.
1241 Backwards propagation needs to look at the statement in a single
1242 backlink. Forward propagation needs to look at potentially more
2e87621c 1243 than one forward link.
471eeb83 1244
2e87621c
RG
1245 Returns true when the statement was changed. */
1246
1247static bool
726a989a 1248simplify_not_neg_expr (gimple_stmt_iterator *gsi_p)
471eeb83 1249{
726a989a
RB
1250 gimple stmt = gsi_stmt (*gsi_p);
1251 tree rhs = gimple_assign_rhs1 (stmt);
1252 gimple rhs_def_stmt = SSA_NAME_DEF_STMT (rhs);
471eeb83
JL
1253
1254 /* See if the RHS_DEF_STMT has the same form as our statement. */
726a989a
RB
1255 if (is_gimple_assign (rhs_def_stmt)
1256 && gimple_assign_rhs_code (rhs_def_stmt) == gimple_assign_rhs_code (stmt))
471eeb83 1257 {
726a989a 1258 tree rhs_def_operand = gimple_assign_rhs1 (rhs_def_stmt);
471eeb83
JL
1259
1260 /* Verify that RHS_DEF_OPERAND is a suitable SSA_NAME. */
1261 if (TREE_CODE (rhs_def_operand) == SSA_NAME
1262 && ! SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs_def_operand))
1263 {
726a989a
RB
1264 gimple_assign_set_rhs_from_tree (gsi_p, rhs_def_operand);
1265 stmt = gsi_stmt (*gsi_p);
471eeb83 1266 update_stmt (stmt);
2e87621c 1267 return true;
471eeb83
JL
1268 }
1269 }
2e87621c
RG
1270
1271 return false;
471eeb83 1272}
d090221b 1273
68e72840
SB
1274/* Helper function for simplify_gimple_switch. Remove case labels that
1275 have values outside the range of the new type. */
1276
1277static void
1278simplify_gimple_switch_label_vec (gimple stmt, tree index_type)
1279{
1280 unsigned int branch_num = gimple_switch_num_labels (stmt);
9771b263
DN
1281 vec<tree> labels;
1282 labels.create (branch_num);
68e72840
SB
1283 unsigned int i, len;
1284
1285 /* Collect the existing case labels in a VEC, and preprocess it as if
1286 we are gimplifying a GENERIC SWITCH_EXPR. */
1287 for (i = 1; i < branch_num; i++)
9771b263 1288 labels.quick_push (gimple_switch_label (stmt, i));
68e72840
SB
1289 preprocess_case_label_vec_for_gimple (labels, index_type, NULL);
1290
1291 /* If any labels were removed, replace the existing case labels
1292 in the GIMPLE_SWITCH statement with the correct ones.
1293 Note that the type updates were done in-place on the case labels,
1294 so we only have to replace the case labels in the GIMPLE_SWITCH
1295 if the number of labels changed. */
9771b263 1296 len = labels.length ();
68e72840
SB
1297 if (len < branch_num - 1)
1298 {
1299 bitmap target_blocks;
1300 edge_iterator ei;
1301 edge e;
1302
1303 /* Corner case: *all* case labels have been removed as being
1304 out-of-range for INDEX_TYPE. Push one label and let the
1305 CFG cleanups deal with this further. */
1306 if (len == 0)
1307 {
1308 tree label, elt;
1309
1310 label = CASE_LABEL (gimple_switch_default_label (stmt));
1311 elt = build_case_label (build_int_cst (index_type, 0), NULL, label);
9771b263 1312 labels.quick_push (elt);
68e72840
SB
1313 len = 1;
1314 }
1315
9771b263
DN
1316 for (i = 0; i < labels.length (); i++)
1317 gimple_switch_set_label (stmt, i + 1, labels[i]);
68e72840
SB
1318 for (i++ ; i < branch_num; i++)
1319 gimple_switch_set_label (stmt, i, NULL_TREE);
1320 gimple_switch_set_num_labels (stmt, len + 1);
1321
1322 /* Cleanup any edges that are now dead. */
1323 target_blocks = BITMAP_ALLOC (NULL);
1324 for (i = 0; i < gimple_switch_num_labels (stmt); i++)
1325 {
1326 tree elt = gimple_switch_label (stmt, i);
1327 basic_block target = label_to_block (CASE_LABEL (elt));
1328 bitmap_set_bit (target_blocks, target->index);
1329 }
1330 for (ei = ei_start (gimple_bb (stmt)->succs); (e = ei_safe_edge (ei)); )
1331 {
1332 if (! bitmap_bit_p (target_blocks, e->dest->index))
1333 {
1334 remove_edge (e);
1335 cfg_changed = true;
1336 free_dominance_info (CDI_DOMINATORS);
1337 }
1338 else
1339 ei_next (&ei);
1340 }
1341 BITMAP_FREE (target_blocks);
1342 }
1343
9771b263 1344 labels.release ();
68e72840
SB
1345}
1346
6b62dff8
JL
1347/* STMT is a SWITCH_EXPR for which we attempt to find equivalent forms of
1348 the condition which we may be able to optimize better. */
1349
2e87621c 1350static bool
726a989a 1351simplify_gimple_switch (gimple stmt)
6b62dff8 1352{
726a989a 1353 tree cond = gimple_switch_index (stmt);
6b62dff8 1354 tree def, to, ti;
726a989a 1355 gimple def_stmt;
6b62dff8
JL
1356
1357 /* The optimization that we really care about is removing unnecessary
1358 casts. That will let us do much better in propagating the inferred
1359 constant at the switch target. */
1360 if (TREE_CODE (cond) == SSA_NAME)
1361 {
726a989a
RB
1362 def_stmt = SSA_NAME_DEF_STMT (cond);
1363 if (is_gimple_assign (def_stmt))
6b62dff8 1364 {
726a989a 1365 if (gimple_assign_rhs_code (def_stmt) == NOP_EXPR)
6b62dff8
JL
1366 {
1367 int need_precision;
1368 bool fail;
1369
726a989a 1370 def = gimple_assign_rhs1 (def_stmt);
6b62dff8 1371
6b62dff8
JL
1372 to = TREE_TYPE (cond);
1373 ti = TREE_TYPE (def);
1374
1375 /* If we have an extension that preserves value, then we
1376 can copy the source value into the switch. */
1377
1378 need_precision = TYPE_PRECISION (ti);
1379 fail = false;
c7b38a85
JJ
1380 if (! INTEGRAL_TYPE_P (ti))
1381 fail = true;
1382 else if (TYPE_UNSIGNED (to) && !TYPE_UNSIGNED (ti))
6b62dff8
JL
1383 fail = true;
1384 else if (!TYPE_UNSIGNED (to) && TYPE_UNSIGNED (ti))
1385 need_precision += 1;
1386 if (TYPE_PRECISION (to) < need_precision)
1387 fail = true;
1388
1389 if (!fail)
1390 {
726a989a 1391 gimple_switch_set_index (stmt, def);
68e72840 1392 simplify_gimple_switch_label_vec (stmt, ti);
6b62dff8 1393 update_stmt (stmt);
2e87621c 1394 return true;
6b62dff8
JL
1395 }
1396 }
1397 }
1398 }
2e87621c
RG
1399
1400 return false;
6b62dff8
JL
1401}
1402
f4684242
JJ
1403/* For pointers p2 and p1 return p2 - p1 if the
1404 difference is known and constant, otherwise return NULL. */
1405
1406static tree
1407constant_pointer_difference (tree p1, tree p2)
1408{
1409 int i, j;
1410#define CPD_ITERATIONS 5
1411 tree exps[2][CPD_ITERATIONS];
1412 tree offs[2][CPD_ITERATIONS];
1413 int cnt[2];
1414
1415 for (i = 0; i < 2; i++)
1416 {
1417 tree p = i ? p1 : p2;
1418 tree off = size_zero_node;
1419 gimple stmt;
1420 enum tree_code code;
1421
1422 /* For each of p1 and p2 we need to iterate at least
1423 twice, to handle ADDR_EXPR directly in p1/p2,
1424 SSA_NAME with ADDR_EXPR or POINTER_PLUS_EXPR etc.
1425 on definition's stmt RHS. Iterate a few extra times. */
1426 j = 0;
1427 do
1428 {
1429 if (!POINTER_TYPE_P (TREE_TYPE (p)))
1430 break;
1431 if (TREE_CODE (p) == ADDR_EXPR)
1432 {
1433 tree q = TREE_OPERAND (p, 0);
1434 HOST_WIDE_INT offset;
1435 tree base = get_addr_base_and_unit_offset (q, &offset);
1436 if (base)
1437 {
1438 q = base;
1439 if (offset)
1440 off = size_binop (PLUS_EXPR, off, size_int (offset));
1441 }
1442 if (TREE_CODE (q) == MEM_REF
1443 && TREE_CODE (TREE_OPERAND (q, 0)) == SSA_NAME)
1444 {
1445 p = TREE_OPERAND (q, 0);
1446 off = size_binop (PLUS_EXPR, off,
1447 double_int_to_tree (sizetype,
1448 mem_ref_offset (q)));
1449 }
1450 else
1451 {
1452 exps[i][j] = q;
1453 offs[i][j++] = off;
1454 break;
1455 }
1456 }
1457 if (TREE_CODE (p) != SSA_NAME)
1458 break;
1459 exps[i][j] = p;
1460 offs[i][j++] = off;
1461 if (j == CPD_ITERATIONS)
1462 break;
1463 stmt = SSA_NAME_DEF_STMT (p);
1464 if (!is_gimple_assign (stmt) || gimple_assign_lhs (stmt) != p)
1465 break;
1466 code = gimple_assign_rhs_code (stmt);
1467 if (code == POINTER_PLUS_EXPR)
1468 {
1469 if (TREE_CODE (gimple_assign_rhs2 (stmt)) != INTEGER_CST)
1470 break;
1471 off = size_binop (PLUS_EXPR, off, gimple_assign_rhs2 (stmt));
1472 p = gimple_assign_rhs1 (stmt);
1473 }
1474 else if (code == ADDR_EXPR || code == NOP_EXPR)
1475 p = gimple_assign_rhs1 (stmt);
1476 else
1477 break;
1478 }
1479 while (1);
1480 cnt[i] = j;
1481 }
1482
1483 for (i = 0; i < cnt[0]; i++)
1484 for (j = 0; j < cnt[1]; j++)
1485 if (exps[0][i] == exps[1][j])
1486 return size_binop (MINUS_EXPR, offs[0][i], offs[1][j]);
1487
1488 return NULL_TREE;
1489}
1490
1491/* *GSI_P is a GIMPLE_CALL to a builtin function.
1492 Optimize
1493 memcpy (p, "abcd", 4);
1494 memset (p + 4, ' ', 3);
1495 into
1496 memcpy (p, "abcd ", 7);
1497 call if the latter can be stored by pieces during expansion. */
1498
1499static bool
1500simplify_builtin_call (gimple_stmt_iterator *gsi_p, tree callee2)
1501{
1502 gimple stmt1, stmt2 = gsi_stmt (*gsi_p);
1503 tree vuse = gimple_vuse (stmt2);
1504 if (vuse == NULL)
1505 return false;
1506 stmt1 = SSA_NAME_DEF_STMT (vuse);
1507
1508 switch (DECL_FUNCTION_CODE (callee2))
1509 {
1510 case BUILT_IN_MEMSET:
1511 if (gimple_call_num_args (stmt2) != 3
1512 || gimple_call_lhs (stmt2)
1513 || CHAR_BIT != 8
1514 || BITS_PER_UNIT != 8)
1515 break;
1516 else
1517 {
1518 tree callee1;
1519 tree ptr1, src1, str1, off1, len1, lhs1;
1520 tree ptr2 = gimple_call_arg (stmt2, 0);
1521 tree val2 = gimple_call_arg (stmt2, 1);
1522 tree len2 = gimple_call_arg (stmt2, 2);
1523 tree diff, vdef, new_str_cst;
1524 gimple use_stmt;
1525 unsigned int ptr1_align;
1526 unsigned HOST_WIDE_INT src_len;
1527 char *src_buf;
1528 use_operand_p use_p;
1529
9541ffee 1530 if (!tree_fits_shwi_p (val2)
f4684242
JJ
1531 || !host_integerp (len2, 1))
1532 break;
1533 if (is_gimple_call (stmt1))
1534 {
1535 /* If first stmt is a call, it needs to be memcpy
1536 or mempcpy, with string literal as second argument and
1537 constant length. */
1538 callee1 = gimple_call_fndecl (stmt1);
1539 if (callee1 == NULL_TREE
1540 || DECL_BUILT_IN_CLASS (callee1) != BUILT_IN_NORMAL
1541 || gimple_call_num_args (stmt1) != 3)
1542 break;
1543 if (DECL_FUNCTION_CODE (callee1) != BUILT_IN_MEMCPY
1544 && DECL_FUNCTION_CODE (callee1) != BUILT_IN_MEMPCPY)
1545 break;
1546 ptr1 = gimple_call_arg (stmt1, 0);
1547 src1 = gimple_call_arg (stmt1, 1);
1548 len1 = gimple_call_arg (stmt1, 2);
1549 lhs1 = gimple_call_lhs (stmt1);
1550 if (!host_integerp (len1, 1))
1551 break;
1552 str1 = string_constant (src1, &off1);
1553 if (str1 == NULL_TREE)
1554 break;
1555 if (!host_integerp (off1, 1)
1556 || compare_tree_int (off1, TREE_STRING_LENGTH (str1) - 1) > 0
1557 || compare_tree_int (len1, TREE_STRING_LENGTH (str1)
1558 - tree_low_cst (off1, 1)) > 0
1559 || TREE_CODE (TREE_TYPE (str1)) != ARRAY_TYPE
1560 || TYPE_MODE (TREE_TYPE (TREE_TYPE (str1)))
1561 != TYPE_MODE (char_type_node))
1562 break;
1563 }
1564 else if (gimple_assign_single_p (stmt1))
1565 {
1566 /* Otherwise look for length 1 memcpy optimized into
1567 assignment. */
1568 ptr1 = gimple_assign_lhs (stmt1);
1569 src1 = gimple_assign_rhs1 (stmt1);
1570 if (TREE_CODE (ptr1) != MEM_REF
1571 || TYPE_MODE (TREE_TYPE (ptr1)) != TYPE_MODE (char_type_node)
9541ffee 1572 || !tree_fits_shwi_p (src1))
f4684242
JJ
1573 break;
1574 ptr1 = build_fold_addr_expr (ptr1);
1575 callee1 = NULL_TREE;
1576 len1 = size_one_node;
1577 lhs1 = NULL_TREE;
1578 off1 = size_zero_node;
1579 str1 = NULL_TREE;
1580 }
1581 else
1582 break;
1583
1584 diff = constant_pointer_difference (ptr1, ptr2);
1585 if (diff == NULL && lhs1 != NULL)
1586 {
1587 diff = constant_pointer_difference (lhs1, ptr2);
1588 if (DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY
1589 && diff != NULL)
1590 diff = size_binop (PLUS_EXPR, diff,
1591 fold_convert (sizetype, len1));
1592 }
1593 /* If the difference between the second and first destination pointer
1594 is not constant, or is bigger than memcpy length, bail out. */
1595 if (diff == NULL
1596 || !host_integerp (diff, 1)
1597 || tree_int_cst_lt (len1, diff))
1598 break;
1599
1600 /* Use maximum of difference plus memset length and memcpy length
1601 as the new memcpy length, if it is too big, bail out. */
1602 src_len = tree_low_cst (diff, 1);
1603 src_len += tree_low_cst (len2, 1);
1604 if (src_len < (unsigned HOST_WIDE_INT) tree_low_cst (len1, 1))
1605 src_len = tree_low_cst (len1, 1);
1606 if (src_len > 1024)
1607 break;
1608
1609 /* If mempcpy value is used elsewhere, bail out, as mempcpy
1610 with bigger length will return different result. */
1611 if (lhs1 != NULL_TREE
1612 && DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY
1613 && (TREE_CODE (lhs1) != SSA_NAME
1614 || !single_imm_use (lhs1, &use_p, &use_stmt)
1615 || use_stmt != stmt2))
1616 break;
1617
1618 /* If anything reads memory in between memcpy and memset
1619 call, the modified memcpy call might change it. */
1620 vdef = gimple_vdef (stmt1);
1621 if (vdef != NULL
1622 && (!single_imm_use (vdef, &use_p, &use_stmt)
1623 || use_stmt != stmt2))
1624 break;
1625
0eb77834 1626 ptr1_align = get_pointer_alignment (ptr1);
f4684242
JJ
1627 /* Construct the new source string literal. */
1628 src_buf = XALLOCAVEC (char, src_len + 1);
1629 if (callee1)
1630 memcpy (src_buf,
1631 TREE_STRING_POINTER (str1) + tree_low_cst (off1, 1),
1632 tree_low_cst (len1, 1));
1633 else
1634 src_buf[0] = tree_low_cst (src1, 0);
1635 memset (src_buf + tree_low_cst (diff, 1),
081db960 1636 tree_low_cst (val2, 0), tree_low_cst (len2, 1));
f4684242
JJ
1637 src_buf[src_len] = '\0';
1638 /* Neither builtin_strncpy_read_str nor builtin_memcpy_read_str
1639 handle embedded '\0's. */
1640 if (strlen (src_buf) != src_len)
1641 break;
1642 rtl_profile_for_bb (gimple_bb (stmt2));
1643 /* If the new memcpy wouldn't be emitted by storing the literal
1644 by pieces, this optimization might enlarge .rodata too much,
1645 as commonly used string literals couldn't be shared any
1646 longer. */
1647 if (!can_store_by_pieces (src_len,
1648 builtin_strncpy_read_str,
1649 src_buf, ptr1_align, false))
1650 break;
1651
1652 new_str_cst = build_string_literal (src_len, src_buf);
1653 if (callee1)
1654 {
1655 /* If STMT1 is a mem{,p}cpy call, adjust it and remove
1656 memset call. */
1657 if (lhs1 && DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY)
1658 gimple_call_set_lhs (stmt1, NULL_TREE);
1659 gimple_call_set_arg (stmt1, 1, new_str_cst);
1660 gimple_call_set_arg (stmt1, 2,
1661 build_int_cst (TREE_TYPE (len1), src_len));
1662 update_stmt (stmt1);
1663 unlink_stmt_vdef (stmt2);
1664 gsi_remove (gsi_p, true);
1665 release_defs (stmt2);
1666 if (lhs1 && DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY)
1667 release_ssa_name (lhs1);
1668 return true;
1669 }
1670 else
1671 {
1672 /* Otherwise, if STMT1 is length 1 memcpy optimized into
1673 assignment, remove STMT1 and change memset call into
1674 memcpy call. */
1675 gimple_stmt_iterator gsi = gsi_for_stmt (stmt1);
1676
7a4f257d
JJ
1677 if (!is_gimple_val (ptr1))
1678 ptr1 = force_gimple_operand_gsi (gsi_p, ptr1, true, NULL_TREE,
1679 true, GSI_SAME_STMT);
e79983f4
MM
1680 gimple_call_set_fndecl (stmt2,
1681 builtin_decl_explicit (BUILT_IN_MEMCPY));
f4684242
JJ
1682 gimple_call_set_arg (stmt2, 0, ptr1);
1683 gimple_call_set_arg (stmt2, 1, new_str_cst);
1684 gimple_call_set_arg (stmt2, 2,
1685 build_int_cst (TREE_TYPE (len2), src_len));
1686 unlink_stmt_vdef (stmt1);
1687 gsi_remove (&gsi, true);
1688 release_defs (stmt1);
1689 update_stmt (stmt2);
1690 return false;
1691 }
1692 }
1693 break;
1694 default:
1695 break;
1696 }
1697 return false;
1698}
1699
0816a42a
KT
1700/* Checks if expression has type of one-bit precision, or is a known
1701 truth-valued expression. */
1702static bool
1703truth_valued_ssa_name (tree name)
1704{
1705 gimple def;
1706 tree type = TREE_TYPE (name);
1707
1708 if (!INTEGRAL_TYPE_P (type))
1709 return false;
1710 /* Don't check here for BOOLEAN_TYPE as the precision isn't
1711 necessarily one and so ~X is not equal to !X. */
1712 if (TYPE_PRECISION (type) == 1)
1713 return true;
1714 def = SSA_NAME_DEF_STMT (name);
1715 if (is_gimple_assign (def))
1716 return truth_value_p (gimple_assign_rhs_code (def));
1717 return false;
1718}
1719
1720/* Helper routine for simplify_bitwise_binary_1 function.
1721 Return for the SSA name NAME the expression X if it mets condition
1722 NAME = !X. Otherwise return NULL_TREE.
1723 Detected patterns for NAME = !X are:
1724 !X and X == 0 for X with integral type.
1725 X ^ 1, X != 1,or ~X for X with integral type with precision of one. */
1726static tree
1727lookup_logical_inverted_value (tree name)
1728{
1729 tree op1, op2;
1730 enum tree_code code;
1731 gimple def;
1732
1733 /* If name has none-intergal type, or isn't a SSA_NAME, then
1734 return. */
1735 if (TREE_CODE (name) != SSA_NAME
1736 || !INTEGRAL_TYPE_P (TREE_TYPE (name)))
1737 return NULL_TREE;
1738 def = SSA_NAME_DEF_STMT (name);
1739 if (!is_gimple_assign (def))
1740 return NULL_TREE;
1741
1742 code = gimple_assign_rhs_code (def);
1743 op1 = gimple_assign_rhs1 (def);
1744 op2 = NULL_TREE;
1745
1746 /* Get for EQ_EXPR or BIT_XOR_EXPR operation the second operand.
3046b1a9 1747 If CODE isn't an EQ_EXPR, BIT_XOR_EXPR, or BIT_NOT_EXPR, then return. */
0816a42a
KT
1748 if (code == EQ_EXPR || code == NE_EXPR
1749 || code == BIT_XOR_EXPR)
1750 op2 = gimple_assign_rhs2 (def);
1751
1752 switch (code)
1753 {
0816a42a
KT
1754 case BIT_NOT_EXPR:
1755 if (truth_valued_ssa_name (name))
1756 return op1;
1757 break;
1758 case EQ_EXPR:
1759 /* Check if we have X == 0 and X has an integral type. */
1760 if (!INTEGRAL_TYPE_P (TREE_TYPE (op1)))
1761 break;
1762 if (integer_zerop (op2))
1763 return op1;
1764 break;
1765 case NE_EXPR:
1766 /* Check if we have X != 1 and X is a truth-valued. */
1767 if (!INTEGRAL_TYPE_P (TREE_TYPE (op1)))
1768 break;
1769 if (integer_onep (op2) && truth_valued_ssa_name (op1))
1770 return op1;
1771 break;
1772 case BIT_XOR_EXPR:
1773 /* Check if we have X ^ 1 and X is truth valued. */
1774 if (integer_onep (op2) && truth_valued_ssa_name (op1))
1775 return op1;
1776 break;
1777 default:
1778 break;
1779 }
1780
1781 return NULL_TREE;
1782}
1783
1784/* Optimize ARG1 CODE ARG2 to a constant for bitwise binary
1785 operations CODE, if one operand has the logically inverted
1786 value of the other. */
1787static tree
1788simplify_bitwise_binary_1 (enum tree_code code, tree type,
1789 tree arg1, tree arg2)
1790{
1791 tree anot;
1792
1793 /* If CODE isn't a bitwise binary operation, return NULL_TREE. */
1794 if (code != BIT_AND_EXPR && code != BIT_IOR_EXPR
1795 && code != BIT_XOR_EXPR)
1796 return NULL_TREE;
1797
1798 /* First check if operands ARG1 and ARG2 are equal. If so
1799 return NULL_TREE as this optimization is handled fold_stmt. */
1800 if (arg1 == arg2)
1801 return NULL_TREE;
1802 /* See if we have in arguments logical-not patterns. */
1803 if (((anot = lookup_logical_inverted_value (arg1)) == NULL_TREE
1804 || anot != arg2)
1805 && ((anot = lookup_logical_inverted_value (arg2)) == NULL_TREE
1806 || anot != arg1))
1807 return NULL_TREE;
1808
1809 /* X & !X -> 0. */
1810 if (code == BIT_AND_EXPR)
1811 return fold_convert (type, integer_zero_node);
1812 /* X | !X -> 1 and X ^ !X -> 1, if X is truth-valued. */
1813 if (truth_valued_ssa_name (anot))
1814 return fold_convert (type, integer_one_node);
1815
1816 /* ??? Otherwise result is (X != 0 ? X : 1). not handled. */
1817 return NULL_TREE;
1818}
1819
a1e179f5
AP
1820/* Given a ssa_name in NAME see if it was defined by an assignment and
1821 set CODE to be the code and ARG1 to the first operand on the rhs and ARG2
1822 to the second operand on the rhs. */
1823
1824static inline void
1825defcodefor_name (tree name, enum tree_code *code, tree *arg1, tree *arg2)
1826{
1827 gimple def;
1828 enum tree_code code1;
1829 tree arg11;
1830 tree arg21;
1831 tree arg31;
1832 enum gimple_rhs_class grhs_class;
1833
1834 code1 = TREE_CODE (name);
1835 arg11 = name;
1836 arg21 = NULL_TREE;
1837 grhs_class = get_gimple_rhs_class (code1);
1838
1839 if (code1 == SSA_NAME)
1840 {
1841 def = SSA_NAME_DEF_STMT (name);
1842
1843 if (def && is_gimple_assign (def)
1844 && can_propagate_from (def))
1845 {
1846 code1 = gimple_assign_rhs_code (def);
1847 arg11 = gimple_assign_rhs1 (def);
1848 arg21 = gimple_assign_rhs2 (def);
1849 arg31 = gimple_assign_rhs2 (def);
1850 }
1851 }
1852 else if (grhs_class == GIMPLE_TERNARY_RHS
1853 || GIMPLE_BINARY_RHS
1854 || GIMPLE_UNARY_RHS
1855 || GIMPLE_SINGLE_RHS)
1856 extract_ops_from_tree_1 (name, &code1, &arg11, &arg21, &arg31);
1857
1858 *code = code1;
1859 *arg1 = arg11;
1860 if (arg2)
1861 *arg2 = arg21;
1862 /* Ignore arg3 currently. */
1863}
1864
259ee451
RB
1865/* Return true if a conversion of an operand from type FROM to type TO
1866 should be applied after performing the operation instead. */
1867
1868static bool
1869hoist_conversion_for_bitop_p (tree to, tree from)
1870{
1871 /* That's a good idea if the conversion widens the operand, thus
1872 after hoisting the conversion the operation will be narrower. */
1873 if (TYPE_PRECISION (from) < TYPE_PRECISION (to))
1874 return true;
1875
1876 /* It's also a good idea if the conversion is to a non-integer mode. */
1877 if (GET_MODE_CLASS (TYPE_MODE (to)) != MODE_INT)
1878 return true;
1879
1880 /* Or if the precision of TO is not the same as the precision
1881 of its mode. */
1882 if (TYPE_PRECISION (to) != GET_MODE_PRECISION (TYPE_MODE (to)))
1883 return true;
1884
1885 return false;
1886}
1887
d53e2f99
JL
1888/* GSI points to a statement of the form
1889
1890 result = OP0 CODE OP1
1891
1892 Where OP0 and OP1 are single bit SSA_NAMEs and CODE is either
1893 BIT_AND_EXPR or BIT_IOR_EXPR.
1894
1895 If OP0 is fed by a bitwise negation of another single bit SSA_NAME,
1896 then we can simplify the two statements into a single LT_EXPR or LE_EXPR
1897 when code is BIT_AND_EXPR and BIT_IOR_EXPR respectively.
1898
ecdbf306 1899 If a simplification is made, return TRUE, else return FALSE. */
d53e2f99
JL
1900static bool
1901simplify_bitwise_binary_boolean (gimple_stmt_iterator *gsi,
1902 enum tree_code code,
1903 tree op0, tree op1)
1904{
1905 gimple op0_def_stmt = SSA_NAME_DEF_STMT (op0);
1906
1907 if (!is_gimple_assign (op0_def_stmt)
1908 || (gimple_assign_rhs_code (op0_def_stmt) != BIT_NOT_EXPR))
1909 return false;
1910
1911 tree x = gimple_assign_rhs1 (op0_def_stmt);
1912 if (TREE_CODE (x) == SSA_NAME
1913 && INTEGRAL_TYPE_P (TREE_TYPE (x))
1914 && TYPE_PRECISION (TREE_TYPE (x)) == 1
1915 && TYPE_UNSIGNED (TREE_TYPE (x)) == TYPE_UNSIGNED (TREE_TYPE (op1)))
1916 {
1917 enum tree_code newcode;
1918
1919 gimple stmt = gsi_stmt (*gsi);
1920 gimple_assign_set_rhs1 (stmt, x);
1921 gimple_assign_set_rhs2 (stmt, op1);
1922 if (code == BIT_AND_EXPR)
1923 newcode = TYPE_UNSIGNED (TREE_TYPE (x)) ? LT_EXPR : GT_EXPR;
1924 else
1925 newcode = TYPE_UNSIGNED (TREE_TYPE (x)) ? LE_EXPR : GE_EXPR;
1926 gimple_assign_set_rhs_code (stmt, newcode);
1927 update_stmt (stmt);
1928 return true;
1929 }
1930 return false;
1931
1932}
1933
10c224a9
RG
1934/* Simplify bitwise binary operations.
1935 Return true if a transformation applied, otherwise return false. */
617f3897 1936
10c224a9
RG
1937static bool
1938simplify_bitwise_binary (gimple_stmt_iterator *gsi)
617f3897 1939{
10c224a9 1940 gimple stmt = gsi_stmt (*gsi);
617f3897
MJ
1941 tree arg1 = gimple_assign_rhs1 (stmt);
1942 tree arg2 = gimple_assign_rhs2 (stmt);
10c224a9
RG
1943 enum tree_code code = gimple_assign_rhs_code (stmt);
1944 tree res;
a1e179f5 1945 tree def1_arg1, def1_arg2, def2_arg1, def2_arg2;
51e020fc 1946 enum tree_code def1_code, def2_code;
617f3897 1947
a1e179f5
AP
1948 defcodefor_name (arg1, &def1_code, &def1_arg1, &def1_arg2);
1949 defcodefor_name (arg2, &def2_code, &def2_arg1, &def2_arg2);
51e020fc 1950
259ee451
RB
1951 /* Try to fold (type) X op CST -> (type) (X op ((type-x) CST))
1952 when profitable. */
420863a9
KT
1953 if (TREE_CODE (arg2) == INTEGER_CST
1954 && CONVERT_EXPR_CODE_P (def1_code)
259ee451 1955 && hoist_conversion_for_bitop_p (TREE_TYPE (arg1), TREE_TYPE (def1_arg1))
80d3dd38 1956 && INTEGRAL_TYPE_P (TREE_TYPE (def1_arg1))
420863a9
KT
1957 && int_fits_type_p (arg2, TREE_TYPE (def1_arg1)))
1958 {
1959 gimple newop;
83d5977e 1960 tree tem = make_ssa_name (TREE_TYPE (def1_arg1), NULL);
420863a9
KT
1961 newop =
1962 gimple_build_assign_with_ops (code, tem, def1_arg1,
1963 fold_convert_loc (gimple_location (stmt),
1964 TREE_TYPE (def1_arg1),
1965 arg2));
7f3ff782 1966 gimple_set_location (newop, gimple_location (stmt));
420863a9
KT
1967 gsi_insert_before (gsi, newop, GSI_SAME_STMT);
1968 gimple_assign_set_rhs_with_ops_1 (gsi, NOP_EXPR,
1969 tem, NULL_TREE, NULL_TREE);
1970 update_stmt (gsi_stmt (*gsi));
1971 return true;
1972 }
1973
10c224a9
RG
1974 /* For bitwise binary operations apply operand conversions to the
1975 binary operation result instead of to the operands. This allows
1976 to combine successive conversions and bitwise binary operations. */
51e020fc
RG
1977 if (CONVERT_EXPR_CODE_P (def1_code)
1978 && CONVERT_EXPR_CODE_P (def2_code)
1979 && types_compatible_p (TREE_TYPE (def1_arg1), TREE_TYPE (def2_arg1))
259ee451 1980 && hoist_conversion_for_bitop_p (TREE_TYPE (arg1), TREE_TYPE (def1_arg1)))
617f3897 1981 {
51e020fc 1982 gimple newop;
83d5977e 1983 tree tem = make_ssa_name (TREE_TYPE (def1_arg1), NULL);
51e020fc 1984 newop = gimple_build_assign_with_ops (code, tem, def1_arg1, def2_arg1);
7f3ff782 1985 gimple_set_location (newop, gimple_location (stmt));
51e020fc
RG
1986 gsi_insert_before (gsi, newop, GSI_SAME_STMT);
1987 gimple_assign_set_rhs_with_ops_1 (gsi, NOP_EXPR,
1988 tem, NULL_TREE, NULL_TREE);
1989 update_stmt (gsi_stmt (*gsi));
1990 return true;
1991 }
1992
24fc7360
AP
1993
1994 /* Simplify (A & B) OP0 (C & B) to (A OP0 C) & B. */
1995 if (def1_code == def2_code
1996 && def1_code == BIT_AND_EXPR
8eddb625
AP
1997 && operand_equal_for_phi_arg_p (def1_arg2,
1998 def2_arg2))
24fc7360 1999 {
8eddb625 2000 tree b = def1_arg2;
24fc7360
AP
2001 tree a = def1_arg1;
2002 tree c = def2_arg1;
2003 tree inner = fold_build2 (code, TREE_TYPE (arg2), a, c);
2004 /* If A OP0 C (this usually means C is the same as A) is 0
2005 then fold it down correctly. */
2006 if (integer_zerop (inner))
2007 {
2008 gimple_assign_set_rhs_from_tree (gsi, inner);
2009 update_stmt (stmt);
2010 return true;
2011 }
2012 /* If A OP0 C (this usually means C is the same as A) is a ssa_name
2013 then fold it down correctly. */
2014 else if (TREE_CODE (inner) == SSA_NAME)
2015 {
2016 tree outer = fold_build2 (def1_code, TREE_TYPE (inner),
2017 inner, b);
2018 gimple_assign_set_rhs_from_tree (gsi, outer);
2019 update_stmt (stmt);
2020 return true;
2021 }
2022 else
2023 {
2024 gimple newop;
2025 tree tem;
83d5977e 2026 tem = make_ssa_name (TREE_TYPE (arg2), NULL);
24fc7360 2027 newop = gimple_build_assign_with_ops (code, tem, a, c);
24fc7360
AP
2028 gimple_set_location (newop, gimple_location (stmt));
2029 /* Make sure to re-process the new stmt as it's walking upwards. */
2030 gsi_insert_before (gsi, newop, GSI_NEW_STMT);
2031 gimple_assign_set_rhs1 (stmt, tem);
2032 gimple_assign_set_rhs2 (stmt, b);
2033 gimple_assign_set_rhs_code (stmt, def1_code);
2034 update_stmt (stmt);
2035 return true;
2036 }
2037 }
2038
51e020fc
RG
2039 /* (a | CST1) & CST2 -> (a & CST2) | (CST1 & CST2). */
2040 if (code == BIT_AND_EXPR
2041 && def1_code == BIT_IOR_EXPR
5d418483
MG
2042 && CONSTANT_CLASS_P (arg2)
2043 && CONSTANT_CLASS_P (def1_arg2))
51e020fc
RG
2044 {
2045 tree cst = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg2),
a1e179f5 2046 arg2, def1_arg2);
51e020fc
RG
2047 tree tem;
2048 gimple newop;
2049 if (integer_zerop (cst))
10c224a9 2050 {
51e020fc
RG
2051 gimple_assign_set_rhs1 (stmt, def1_arg1);
2052 update_stmt (stmt);
2053 return true;
10c224a9 2054 }
83d5977e 2055 tem = make_ssa_name (TREE_TYPE (arg2), NULL);
51e020fc
RG
2056 newop = gimple_build_assign_with_ops (BIT_AND_EXPR,
2057 tem, def1_arg1, arg2);
7f3ff782 2058 gimple_set_location (newop, gimple_location (stmt));
51e020fc
RG
2059 /* Make sure to re-process the new stmt as it's walking upwards. */
2060 gsi_insert_before (gsi, newop, GSI_NEW_STMT);
2061 gimple_assign_set_rhs1 (stmt, tem);
2062 gimple_assign_set_rhs2 (stmt, cst);
2063 gimple_assign_set_rhs_code (stmt, BIT_IOR_EXPR);
2064 update_stmt (stmt);
2065 return true;
2066 }
2067
2068 /* Combine successive equal operations with constants. */
2069 if ((code == BIT_AND_EXPR
2070 || code == BIT_IOR_EXPR
2071 || code == BIT_XOR_EXPR)
2072 && def1_code == code
5d418483
MG
2073 && CONSTANT_CLASS_P (arg2)
2074 && CONSTANT_CLASS_P (def1_arg2))
51e020fc
RG
2075 {
2076 tree cst = fold_build2 (code, TREE_TYPE (arg2),
a1e179f5 2077 arg2, def1_arg2);
51e020fc
RG
2078 gimple_assign_set_rhs1 (stmt, def1_arg1);
2079 gimple_assign_set_rhs2 (stmt, cst);
2080 update_stmt (stmt);
2081 return true;
617f3897 2082 }
10c224a9 2083
dca412a1
RG
2084 /* Canonicalize X ^ ~0 to ~X. */
2085 if (code == BIT_XOR_EXPR
dca412a1
RG
2086 && integer_all_onesp (arg2))
2087 {
2088 gimple_assign_set_rhs_with_ops (gsi, BIT_NOT_EXPR, arg1, NULL_TREE);
2089 gcc_assert (gsi_stmt (*gsi) == stmt);
2090 update_stmt (stmt);
2091 return true;
2092 }
2093
0816a42a
KT
2094 /* Try simple folding for X op !X, and X op X. */
2095 res = simplify_bitwise_binary_1 (code, TREE_TYPE (arg1), arg1, arg2);
2096 if (res != NULL_TREE)
2097 {
2098 gimple_assign_set_rhs_from_tree (gsi, res);
2099 update_stmt (gsi_stmt (*gsi));
2100 return true;
2101 }
2102
a1e179f5
AP
2103 if (code == BIT_AND_EXPR || code == BIT_IOR_EXPR)
2104 {
2105 enum tree_code ocode = code == BIT_AND_EXPR ? BIT_IOR_EXPR : BIT_AND_EXPR;
2106 if (def1_code == ocode)
2107 {
2108 tree x = arg2;
2109 enum tree_code coden;
2110 tree a1, a2;
2111 /* ( X | Y) & X -> X */
2112 /* ( X & Y) | X -> X */
2113 if (x == def1_arg1
2114 || x == def1_arg2)
2115 {
2116 gimple_assign_set_rhs_from_tree (gsi, x);
2117 update_stmt (gsi_stmt (*gsi));
2118 return true;
2119 }
2120
2121 defcodefor_name (def1_arg1, &coden, &a1, &a2);
2122 /* (~X | Y) & X -> X & Y */
2123 /* (~X & Y) | X -> X | Y */
2124 if (coden == BIT_NOT_EXPR && a1 == x)
2125 {
2126 gimple_assign_set_rhs_with_ops (gsi, code,
2127 x, def1_arg2);
2128 gcc_assert (gsi_stmt (*gsi) == stmt);
2129 update_stmt (stmt);
2130 return true;
2131 }
2132 defcodefor_name (def1_arg2, &coden, &a1, &a2);
2133 /* (Y | ~X) & X -> X & Y */
2134 /* (Y & ~X) | X -> X | Y */
2135 if (coden == BIT_NOT_EXPR && a1 == x)
2136 {
2137 gimple_assign_set_rhs_with_ops (gsi, code,
2138 x, def1_arg1);
2139 gcc_assert (gsi_stmt (*gsi) == stmt);
2140 update_stmt (stmt);
2141 return true;
2142 }
2143 }
2144 if (def2_code == ocode)
2145 {
2146 enum tree_code coden;
2147 tree a1;
2148 tree x = arg1;
2149 /* X & ( X | Y) -> X */
2150 /* X | ( X & Y) -> X */
2151 if (x == def2_arg1
2152 || x == def2_arg2)
2153 {
2154 gimple_assign_set_rhs_from_tree (gsi, x);
2155 update_stmt (gsi_stmt (*gsi));
2156 return true;
2157 }
2158 defcodefor_name (def2_arg1, &coden, &a1, NULL);
2159 /* (~X | Y) & X -> X & Y */
2160 /* (~X & Y) | X -> X | Y */
2161 if (coden == BIT_NOT_EXPR && a1 == x)
2162 {
2163 gimple_assign_set_rhs_with_ops (gsi, code,
2164 x, def2_arg2);
2165 gcc_assert (gsi_stmt (*gsi) == stmt);
2166 update_stmt (stmt);
2167 return true;
2168 }
2169 defcodefor_name (def2_arg2, &coden, &a1, NULL);
2170 /* (Y | ~X) & X -> X & Y */
2171 /* (Y & ~X) | X -> X | Y */
2172 if (coden == BIT_NOT_EXPR && a1 == x)
2173 {
2174 gimple_assign_set_rhs_with_ops (gsi, code,
2175 x, def2_arg1);
2176 gcc_assert (gsi_stmt (*gsi) == stmt);
2177 update_stmt (stmt);
2178 return true;
2179 }
2180 }
a1e179f5 2181
d53e2f99
JL
2182 /* If arg1 and arg2 are booleans (or any single bit type)
2183 then try to simplify:
2184
2185 (~X & Y) -> X < Y
2186 (X & ~Y) -> Y < X
2187 (~X | Y) -> X <= Y
2188 (X | ~Y) -> Y <= X
2189
2190 But only do this if our result feeds into a comparison as
2191 this transformation is not always a win, particularly on
2192 targets with and-not instructions. */
2193 if (TREE_CODE (arg1) == SSA_NAME
2194 && TREE_CODE (arg2) == SSA_NAME
2195 && INTEGRAL_TYPE_P (TREE_TYPE (arg1))
2196 && TYPE_PRECISION (TREE_TYPE (arg1)) == 1
2197 && TYPE_PRECISION (TREE_TYPE (arg2)) == 1
2198 && (TYPE_UNSIGNED (TREE_TYPE (arg1))
2199 == TYPE_UNSIGNED (TREE_TYPE (arg2))))
2200 {
2201 use_operand_p use_p;
2202 gimple use_stmt;
2203
2204 if (single_imm_use (gimple_assign_lhs (stmt), &use_p, &use_stmt))
2205 {
2206 if (gimple_code (use_stmt) == GIMPLE_COND
2207 && gimple_cond_lhs (use_stmt) == gimple_assign_lhs (stmt)
2208 && integer_zerop (gimple_cond_rhs (use_stmt))
2209 && gimple_cond_code (use_stmt) == NE_EXPR)
2210 {
2211 if (simplify_bitwise_binary_boolean (gsi, code, arg1, arg2))
2212 return true;
2213 if (simplify_bitwise_binary_boolean (gsi, code, arg2, arg1))
2214 return true;
2215 }
2216 }
2217 }
2218 }
10c224a9 2219 return false;
617f3897
MJ
2220}
2221
2d698d3b 2222
cb3b8d33
JJ
2223/* Recognize rotation patterns. Return true if a transformation
2224 applied, otherwise return false.
2225
2226 We are looking for X with unsigned type T with bitsize B, OP being
2227 +, | or ^, some type T2 wider than T and
2228 (X << CNT1) OP (X >> CNT2) iff CNT1 + CNT2 == B
2229 ((T) ((T2) X << CNT1)) OP ((T) ((T2) X >> CNT2)) iff CNT1 + CNT2 == B
2230 (X << Y) OP (X >> (B - Y))
2231 (X << (int) Y) OP (X >> (int) (B - Y))
2232 ((T) ((T2) X << Y)) OP ((T) ((T2) X >> (B - Y)))
2233 ((T) ((T2) X << (int) Y)) OP ((T) ((T2) X >> (int) (B - Y)))
ae6fa899
JJ
2234 (X << Y) | (X >> ((-Y) & (B - 1)))
2235 (X << (int) Y) | (X >> (int) ((-Y) & (B - 1)))
2236 ((T) ((T2) X << Y)) | ((T) ((T2) X >> ((-Y) & (B - 1))))
2237 ((T) ((T2) X << (int) Y)) | ((T) ((T2) X >> (int) ((-Y) & (B - 1))))
cb3b8d33
JJ
2238
2239 and transform these into:
2240 X r<< CNT1
2241 X r<< Y
2242
2243 Note, in the patterns with T2 type, the type of OP operands
2244 might be even a signed type, but should have precision B. */
2245
2246static bool
2247simplify_rotate (gimple_stmt_iterator *gsi)
2248{
2249 gimple stmt = gsi_stmt (*gsi);
2250 tree arg[2], rtype, rotcnt = NULL_TREE;
2251 tree def_arg1[2], def_arg2[2];
2252 enum tree_code def_code[2];
2253 tree lhs;
2254 int i;
2255 bool swapped_p = false;
2256 gimple g;
2257
2258 arg[0] = gimple_assign_rhs1 (stmt);
2259 arg[1] = gimple_assign_rhs2 (stmt);
2260 rtype = TREE_TYPE (arg[0]);
2261
2262 /* Only create rotates in complete modes. Other cases are not
2263 expanded properly. */
2264 if (!INTEGRAL_TYPE_P (rtype)
2265 || TYPE_PRECISION (rtype) != GET_MODE_PRECISION (TYPE_MODE (rtype)))
2266 return false;
2267
2268 for (i = 0; i < 2; i++)
2269 defcodefor_name (arg[i], &def_code[i], &def_arg1[i], &def_arg2[i]);
2270
2271 /* Look through narrowing conversions. */
2272 if (CONVERT_EXPR_CODE_P (def_code[0])
2273 && CONVERT_EXPR_CODE_P (def_code[1])
2274 && INTEGRAL_TYPE_P (TREE_TYPE (def_arg1[0]))
2275 && INTEGRAL_TYPE_P (TREE_TYPE (def_arg1[1]))
2276 && TYPE_PRECISION (TREE_TYPE (def_arg1[0]))
2277 == TYPE_PRECISION (TREE_TYPE (def_arg1[1]))
2278 && TYPE_PRECISION (TREE_TYPE (def_arg1[0])) > TYPE_PRECISION (rtype)
2279 && has_single_use (arg[0])
2280 && has_single_use (arg[1]))
2281 {
2282 for (i = 0; i < 2; i++)
2283 {
2284 arg[i] = def_arg1[i];
2285 defcodefor_name (arg[i], &def_code[i], &def_arg1[i], &def_arg2[i]);
2286 }
2287 }
2288
2289 /* One operand has to be LSHIFT_EXPR and one RSHIFT_EXPR. */
2290 for (i = 0; i < 2; i++)
2291 if (def_code[i] != LSHIFT_EXPR && def_code[i] != RSHIFT_EXPR)
2292 return false;
2293 else if (!has_single_use (arg[i]))
2294 return false;
2295 if (def_code[0] == def_code[1])
2296 return false;
2297
2298 /* If we've looked through narrowing conversions before, look through
2299 widening conversions from unsigned type with the same precision
2300 as rtype here. */
2301 if (TYPE_PRECISION (TREE_TYPE (def_arg1[0])) != TYPE_PRECISION (rtype))
2302 for (i = 0; i < 2; i++)
2303 {
2304 tree tem;
2305 enum tree_code code;
2306 defcodefor_name (def_arg1[i], &code, &tem, NULL);
2307 if (!CONVERT_EXPR_CODE_P (code)
2308 || !INTEGRAL_TYPE_P (TREE_TYPE (tem))
2309 || TYPE_PRECISION (TREE_TYPE (tem)) != TYPE_PRECISION (rtype))
2310 return false;
2311 def_arg1[i] = tem;
2312 }
2313 /* Both shifts have to use the same first operand. */
2314 if (TREE_CODE (def_arg1[0]) != SSA_NAME || def_arg1[0] != def_arg1[1])
2315 return false;
2316 if (!TYPE_UNSIGNED (TREE_TYPE (def_arg1[0])))
2317 return false;
2318
2319 /* CNT1 + CNT2 == B case above. */
2320 if (host_integerp (def_arg2[0], 1)
2321 && host_integerp (def_arg2[1], 1)
2322 && (unsigned HOST_WIDE_INT) tree_low_cst (def_arg2[0], 1)
2323 + tree_low_cst (def_arg2[1], 1) == TYPE_PRECISION (rtype))
2324 rotcnt = def_arg2[0];
2325 else if (TREE_CODE (def_arg2[0]) != SSA_NAME
2326 || TREE_CODE (def_arg2[1]) != SSA_NAME)
2327 return false;
2328 else
2329 {
2330 tree cdef_arg1[2], cdef_arg2[2], def_arg2_alt[2];
2331 enum tree_code cdef_code[2];
2332 /* Look through conversion of the shift count argument.
2333 The C/C++ FE cast any shift count argument to integer_type_node.
2334 The only problem might be if the shift count type maximum value
2335 is equal or smaller than number of bits in rtype. */
2336 for (i = 0; i < 2; i++)
2337 {
2338 def_arg2_alt[i] = def_arg2[i];
2339 defcodefor_name (def_arg2[i], &cdef_code[i],
2340 &cdef_arg1[i], &cdef_arg2[i]);
2341 if (CONVERT_EXPR_CODE_P (cdef_code[i])
2342 && INTEGRAL_TYPE_P (TREE_TYPE (cdef_arg1[i]))
2343 && TYPE_PRECISION (TREE_TYPE (cdef_arg1[i]))
2344 > floor_log2 (TYPE_PRECISION (rtype))
2345 && TYPE_PRECISION (TREE_TYPE (cdef_arg1[i]))
2346 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (cdef_arg1[i]))))
2347 {
2348 def_arg2_alt[i] = cdef_arg1[i];
2349 defcodefor_name (def_arg2_alt[i], &cdef_code[i],
2350 &cdef_arg1[i], &cdef_arg2[i]);
2351 }
2352 }
2353 for (i = 0; i < 2; i++)
2354 /* Check for one shift count being Y and the other B - Y,
2355 with optional casts. */
2356 if (cdef_code[i] == MINUS_EXPR
9541ffee 2357 && tree_fits_shwi_p (cdef_arg1[i])
cb3b8d33
JJ
2358 && tree_low_cst (cdef_arg1[i], 0) == TYPE_PRECISION (rtype)
2359 && TREE_CODE (cdef_arg2[i]) == SSA_NAME)
2360 {
2361 tree tem;
2362 enum tree_code code;
2363
2364 if (cdef_arg2[i] == def_arg2[1 - i]
2365 || cdef_arg2[i] == def_arg2_alt[1 - i])
2366 {
2367 rotcnt = cdef_arg2[i];
2368 break;
2369 }
2370 defcodefor_name (cdef_arg2[i], &code, &tem, NULL);
2371 if (CONVERT_EXPR_CODE_P (code)
2372 && INTEGRAL_TYPE_P (TREE_TYPE (tem))
2373 && TYPE_PRECISION (TREE_TYPE (tem))
2374 > floor_log2 (TYPE_PRECISION (rtype))
2375 && TYPE_PRECISION (TREE_TYPE (tem))
2376 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (tem)))
2377 && (tem == def_arg2[1 - i]
2378 || tem == def_arg2_alt[1 - i]))
2379 {
2380 rotcnt = tem;
2381 break;
2382 }
2383 }
2384 /* The above sequence isn't safe for Y being 0,
2385 because then one of the shifts triggers undefined behavior.
2386 This alternative is safe even for rotation count of 0.
2387 One shift count is Y and the other (-Y) & (B - 1). */
2388 else if (cdef_code[i] == BIT_AND_EXPR
9541ffee 2389 && tree_fits_shwi_p (cdef_arg2[i])
cb3b8d33
JJ
2390 && tree_low_cst (cdef_arg2[i], 0)
2391 == TYPE_PRECISION (rtype) - 1
ae6fa899
JJ
2392 && TREE_CODE (cdef_arg1[i]) == SSA_NAME
2393 && gimple_assign_rhs_code (stmt) == BIT_IOR_EXPR)
cb3b8d33
JJ
2394 {
2395 tree tem;
2396 enum tree_code code;
2397
2398 defcodefor_name (cdef_arg1[i], &code, &tem, NULL);
2399 if (CONVERT_EXPR_CODE_P (code)
2400 && INTEGRAL_TYPE_P (TREE_TYPE (tem))
2401 && TYPE_PRECISION (TREE_TYPE (tem))
2402 > floor_log2 (TYPE_PRECISION (rtype))
2403 && TYPE_PRECISION (TREE_TYPE (tem))
2404 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (tem))))
2405 defcodefor_name (tem, &code, &tem, NULL);
2406
2407 if (code == NEGATE_EXPR)
2408 {
2409 if (tem == def_arg2[1 - i] || tem == def_arg2_alt[1 - i])
2410 {
2411 rotcnt = tem;
2412 break;
2413 }
2414 defcodefor_name (tem, &code, &tem, NULL);
2415 if (CONVERT_EXPR_CODE_P (code)
2416 && INTEGRAL_TYPE_P (TREE_TYPE (tem))
2417 && TYPE_PRECISION (TREE_TYPE (tem))
2418 > floor_log2 (TYPE_PRECISION (rtype))
2419 && TYPE_PRECISION (TREE_TYPE (tem))
2420 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (tem)))
2421 && (tem == def_arg2[1 - i]
2422 || tem == def_arg2_alt[1 - i]))
2423 {
2424 rotcnt = tem;
2425 break;
2426 }
2427 }
2428 }
2429 if (rotcnt == NULL_TREE)
2430 return false;
2431 swapped_p = i != 1;
2432 }
2433
2434 if (!useless_type_conversion_p (TREE_TYPE (def_arg2[0]),
2435 TREE_TYPE (rotcnt)))
2436 {
2437 g = gimple_build_assign_with_ops (NOP_EXPR,
2438 make_ssa_name (TREE_TYPE (def_arg2[0]),
2439 NULL),
2440 rotcnt, NULL_TREE);
2441 gsi_insert_before (gsi, g, GSI_SAME_STMT);
2442 rotcnt = gimple_assign_lhs (g);
2443 }
2444 lhs = gimple_assign_lhs (stmt);
2445 if (!useless_type_conversion_p (rtype, TREE_TYPE (def_arg1[0])))
2446 lhs = make_ssa_name (TREE_TYPE (def_arg1[0]), NULL);
2447 g = gimple_build_assign_with_ops (((def_code[0] == LSHIFT_EXPR) ^ swapped_p)
2448 ? LROTATE_EXPR : RROTATE_EXPR,
2449 lhs, def_arg1[0], rotcnt);
2450 if (!useless_type_conversion_p (rtype, TREE_TYPE (def_arg1[0])))
2451 {
2452 gsi_insert_before (gsi, g, GSI_SAME_STMT);
2453 g = gimple_build_assign_with_ops (NOP_EXPR, gimple_assign_lhs (stmt),
2454 lhs, NULL_TREE);
2455 }
2456 gsi_replace (gsi, g, false);
2457 return true;
2458}
2459
2d698d3b 2460/* Perform re-associations of the plus or minus statement STMT that are
0fdb0d27 2461 always permitted. Returns true if the CFG was changed. */
2d698d3b 2462
0fdb0d27 2463static bool
59401b92 2464associate_plusminus (gimple_stmt_iterator *gsi)
2d698d3b 2465{
59401b92 2466 gimple stmt = gsi_stmt (*gsi);
2d698d3b
RG
2467 tree rhs1 = gimple_assign_rhs1 (stmt);
2468 tree rhs2 = gimple_assign_rhs2 (stmt);
2469 enum tree_code code = gimple_assign_rhs_code (stmt);
2d698d3b
RG
2470 bool changed;
2471
2472 /* We can't reassociate at all for saturating types. */
2473 if (TYPE_SATURATING (TREE_TYPE (rhs1)))
0fdb0d27 2474 return false;
2d698d3b
RG
2475
2476 /* First contract negates. */
2477 do
2478 {
2479 changed = false;
2480
2481 /* A +- (-B) -> A -+ B. */
2482 if (TREE_CODE (rhs2) == SSA_NAME)
2483 {
2484 gimple def_stmt = SSA_NAME_DEF_STMT (rhs2);
2485 if (is_gimple_assign (def_stmt)
931050d0
EB
2486 && gimple_assign_rhs_code (def_stmt) == NEGATE_EXPR
2487 && can_propagate_from (def_stmt))
2d698d3b
RG
2488 {
2489 code = (code == MINUS_EXPR) ? PLUS_EXPR : MINUS_EXPR;
2490 gimple_assign_set_rhs_code (stmt, code);
2491 rhs2 = gimple_assign_rhs1 (def_stmt);
2492 gimple_assign_set_rhs2 (stmt, rhs2);
2493 gimple_set_modified (stmt, true);
2494 changed = true;
2495 }
2496 }
2497
2498 /* (-A) + B -> B - A. */
2499 if (TREE_CODE (rhs1) == SSA_NAME
2500 && code == PLUS_EXPR)
2501 {
2502 gimple def_stmt = SSA_NAME_DEF_STMT (rhs1);
2503 if (is_gimple_assign (def_stmt)
931050d0
EB
2504 && gimple_assign_rhs_code (def_stmt) == NEGATE_EXPR
2505 && can_propagate_from (def_stmt))
2d698d3b
RG
2506 {
2507 code = MINUS_EXPR;
2508 gimple_assign_set_rhs_code (stmt, code);
2509 rhs1 = rhs2;
2510 gimple_assign_set_rhs1 (stmt, rhs1);
2511 rhs2 = gimple_assign_rhs1 (def_stmt);
2512 gimple_assign_set_rhs2 (stmt, rhs2);
2513 gimple_set_modified (stmt, true);
2514 changed = true;
2515 }
2516 }
2517 }
2518 while (changed);
2519
2520 /* We can't reassociate floating-point or fixed-point plus or minus
2521 because of saturation to +-Inf. */
2522 if (FLOAT_TYPE_P (TREE_TYPE (rhs1))
2523 || FIXED_POINT_TYPE_P (TREE_TYPE (rhs1)))
2524 goto out;
2525
2526 /* Second match patterns that allow contracting a plus-minus pair
2527 irrespective of overflow issues.
2528
2529 (A +- B) - A -> +- B
2530 (A +- B) -+ B -> A
2531 (CST +- A) +- CST -> CST +- A
5d418483 2532 (A +- CST) +- CST -> A +- CST
2d698d3b
RG
2533 ~A + A -> -1
2534 ~A + 1 -> -A
2535 A - (A +- B) -> -+ B
2536 A +- (B +- A) -> +- B
2537 CST +- (CST +- A) -> CST +- A
2538 CST +- (A +- CST) -> CST +- A
2539 A + ~A -> -1
2540
2541 via commutating the addition and contracting operations to zero
2542 by reassociation. */
2543
2d698d3b
RG
2544 if (TREE_CODE (rhs1) == SSA_NAME)
2545 {
2546 gimple def_stmt = SSA_NAME_DEF_STMT (rhs1);
931050d0 2547 if (is_gimple_assign (def_stmt) && can_propagate_from (def_stmt))
2d698d3b
RG
2548 {
2549 enum tree_code def_code = gimple_assign_rhs_code (def_stmt);
2550 if (def_code == PLUS_EXPR
2551 || def_code == MINUS_EXPR)
2552 {
2553 tree def_rhs1 = gimple_assign_rhs1 (def_stmt);
2554 tree def_rhs2 = gimple_assign_rhs2 (def_stmt);
2555 if (operand_equal_p (def_rhs1, rhs2, 0)
2556 && code == MINUS_EXPR)
2557 {
2558 /* (A +- B) - A -> +- B. */
2559 code = ((def_code == PLUS_EXPR)
2560 ? TREE_CODE (def_rhs2) : NEGATE_EXPR);
2561 rhs1 = def_rhs2;
2562 rhs2 = NULL_TREE;
59401b92
RG
2563 gimple_assign_set_rhs_with_ops (gsi, code, rhs1, NULL_TREE);
2564 gcc_assert (gsi_stmt (*gsi) == stmt);
2d698d3b
RG
2565 gimple_set_modified (stmt, true);
2566 }
2567 else if (operand_equal_p (def_rhs2, rhs2, 0)
2568 && code != def_code)
2569 {
2570 /* (A +- B) -+ B -> A. */
2571 code = TREE_CODE (def_rhs1);
2572 rhs1 = def_rhs1;
2573 rhs2 = NULL_TREE;
59401b92
RG
2574 gimple_assign_set_rhs_with_ops (gsi, code, rhs1, NULL_TREE);
2575 gcc_assert (gsi_stmt (*gsi) == stmt);
2d698d3b
RG
2576 gimple_set_modified (stmt, true);
2577 }
5d418483
MG
2578 else if (CONSTANT_CLASS_P (rhs2)
2579 && CONSTANT_CLASS_P (def_rhs1))
2d698d3b
RG
2580 {
2581 /* (CST +- A) +- CST -> CST +- A. */
2582 tree cst = fold_binary (code, TREE_TYPE (rhs1),
2583 def_rhs1, rhs2);
2584 if (cst && !TREE_OVERFLOW (cst))
2585 {
2586 code = def_code;
2587 gimple_assign_set_rhs_code (stmt, code);
2588 rhs1 = cst;
2589 gimple_assign_set_rhs1 (stmt, rhs1);
2590 rhs2 = def_rhs2;
2591 gimple_assign_set_rhs2 (stmt, rhs2);
2592 gimple_set_modified (stmt, true);
2593 }
2594 }
5d418483
MG
2595 else if (CONSTANT_CLASS_P (rhs2)
2596 && CONSTANT_CLASS_P (def_rhs2))
2d698d3b 2597 {
5d418483
MG
2598 /* (A +- CST) +- CST -> A +- CST. */
2599 enum tree_code mix = (code == def_code)
2600 ? PLUS_EXPR : MINUS_EXPR;
2601 tree cst = fold_binary (mix, TREE_TYPE (rhs1),
2d698d3b
RG
2602 def_rhs2, rhs2);
2603 if (cst && !TREE_OVERFLOW (cst))
2604 {
5d418483 2605 code = def_code;
2d698d3b
RG
2606 gimple_assign_set_rhs_code (stmt, code);
2607 rhs1 = def_rhs1;
2608 gimple_assign_set_rhs1 (stmt, rhs1);
2609 rhs2 = cst;
2610 gimple_assign_set_rhs2 (stmt, rhs2);
2611 gimple_set_modified (stmt, true);
2612 }
2613 }
2614 }
5d418483 2615 else if (def_code == BIT_NOT_EXPR && code == PLUS_EXPR)
2d698d3b
RG
2616 {
2617 tree def_rhs1 = gimple_assign_rhs1 (def_stmt);
5d418483 2618 if (operand_equal_p (def_rhs1, rhs2, 0))
2d698d3b
RG
2619 {
2620 /* ~A + A -> -1. */
5d418483 2621 rhs1 = build_all_ones_cst (TREE_TYPE (rhs2));
2d698d3b 2622 rhs2 = NULL_TREE;
5d418483 2623 code = TREE_CODE (rhs1);
59401b92
RG
2624 gimple_assign_set_rhs_with_ops (gsi, code, rhs1, NULL_TREE);
2625 gcc_assert (gsi_stmt (*gsi) == stmt);
2d698d3b
RG
2626 gimple_set_modified (stmt, true);
2627 }
5d418483
MG
2628 else if ((TREE_CODE (TREE_TYPE (rhs2)) != COMPLEX_TYPE
2629 && integer_onep (rhs2))
2630 || (TREE_CODE (rhs2) == COMPLEX_CST
2631 && integer_onep (TREE_REALPART (rhs2))
2632 && integer_onep (TREE_IMAGPART (rhs2))))
2d698d3b
RG
2633 {
2634 /* ~A + 1 -> -A. */
2635 code = NEGATE_EXPR;
2636 rhs1 = def_rhs1;
2637 rhs2 = NULL_TREE;
59401b92
RG
2638 gimple_assign_set_rhs_with_ops (gsi, code, rhs1, NULL_TREE);
2639 gcc_assert (gsi_stmt (*gsi) == stmt);
2d698d3b
RG
2640 gimple_set_modified (stmt, true);
2641 }
2642 }
2643 }
2644 }
2645
2646 if (rhs2 && TREE_CODE (rhs2) == SSA_NAME)
2647 {
2648 gimple def_stmt = SSA_NAME_DEF_STMT (rhs2);
931050d0 2649 if (is_gimple_assign (def_stmt) && can_propagate_from (def_stmt))
2d698d3b
RG
2650 {
2651 enum tree_code def_code = gimple_assign_rhs_code (def_stmt);
2652 if (def_code == PLUS_EXPR
2653 || def_code == MINUS_EXPR)
2654 {
2655 tree def_rhs1 = gimple_assign_rhs1 (def_stmt);
2656 tree def_rhs2 = gimple_assign_rhs2 (def_stmt);
2657 if (operand_equal_p (def_rhs1, rhs1, 0)
2658 && code == MINUS_EXPR)
2659 {
2660 /* A - (A +- B) -> -+ B. */
2661 code = ((def_code == PLUS_EXPR)
2662 ? NEGATE_EXPR : TREE_CODE (def_rhs2));
2663 rhs1 = def_rhs2;
2664 rhs2 = NULL_TREE;
59401b92
RG
2665 gimple_assign_set_rhs_with_ops (gsi, code, rhs1, NULL_TREE);
2666 gcc_assert (gsi_stmt (*gsi) == stmt);
2d698d3b
RG
2667 gimple_set_modified (stmt, true);
2668 }
2669 else if (operand_equal_p (def_rhs2, rhs1, 0)
2670 && code != def_code)
2671 {
2672 /* A +- (B +- A) -> +- B. */
2673 code = ((code == PLUS_EXPR)
2674 ? TREE_CODE (def_rhs1) : NEGATE_EXPR);
2675 rhs1 = def_rhs1;
2676 rhs2 = NULL_TREE;
59401b92
RG
2677 gimple_assign_set_rhs_with_ops (gsi, code, rhs1, NULL_TREE);
2678 gcc_assert (gsi_stmt (*gsi) == stmt);
2d698d3b
RG
2679 gimple_set_modified (stmt, true);
2680 }
5d418483
MG
2681 else if (CONSTANT_CLASS_P (rhs1)
2682 && CONSTANT_CLASS_P (def_rhs1))
2d698d3b
RG
2683 {
2684 /* CST +- (CST +- A) -> CST +- A. */
2685 tree cst = fold_binary (code, TREE_TYPE (rhs2),
2686 rhs1, def_rhs1);
2687 if (cst && !TREE_OVERFLOW (cst))
2688 {
2689 code = (code == def_code ? PLUS_EXPR : MINUS_EXPR);
2690 gimple_assign_set_rhs_code (stmt, code);
2691 rhs1 = cst;
2692 gimple_assign_set_rhs1 (stmt, rhs1);
2693 rhs2 = def_rhs2;
2694 gimple_assign_set_rhs2 (stmt, rhs2);
2695 gimple_set_modified (stmt, true);
2696 }
2697 }
5d418483
MG
2698 else if (CONSTANT_CLASS_P (rhs1)
2699 && CONSTANT_CLASS_P (def_rhs2))
2d698d3b
RG
2700 {
2701 /* CST +- (A +- CST) -> CST +- A. */
2702 tree cst = fold_binary (def_code == code
2703 ? PLUS_EXPR : MINUS_EXPR,
2704 TREE_TYPE (rhs2),
2705 rhs1, def_rhs2);
2706 if (cst && !TREE_OVERFLOW (cst))
2707 {
2708 rhs1 = cst;
2709 gimple_assign_set_rhs1 (stmt, rhs1);
2710 rhs2 = def_rhs1;
2711 gimple_assign_set_rhs2 (stmt, rhs2);
2712 gimple_set_modified (stmt, true);
2713 }
2714 }
2715 }
5d418483 2716 else if (def_code == BIT_NOT_EXPR)
2d698d3b
RG
2717 {
2718 tree def_rhs1 = gimple_assign_rhs1 (def_stmt);
2719 if (code == PLUS_EXPR
2720 && operand_equal_p (def_rhs1, rhs1, 0))
2721 {
2722 /* A + ~A -> -1. */
5d418483 2723 rhs1 = build_all_ones_cst (TREE_TYPE (rhs1));
2d698d3b 2724 rhs2 = NULL_TREE;
5d418483 2725 code = TREE_CODE (rhs1);
59401b92
RG
2726 gimple_assign_set_rhs_with_ops (gsi, code, rhs1, NULL_TREE);
2727 gcc_assert (gsi_stmt (*gsi) == stmt);
2d698d3b
RG
2728 gimple_set_modified (stmt, true);
2729 }
2730 }
2731 }
2732 }
2733
2734out:
2735 if (gimple_modified_p (stmt))
2736 {
59401b92 2737 fold_stmt_inplace (gsi);
2d698d3b 2738 update_stmt (stmt);
0fdb0d27
RG
2739 if (maybe_clean_or_replace_eh_stmt (stmt, stmt)
2740 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
2741 return true;
2d698d3b 2742 }
0fdb0d27
RG
2743
2744 return false;
2d698d3b
RG
2745}
2746
a8ab21e5
RG
2747/* Associate operands of a POINTER_PLUS_EXPR assignmen at *GSI. Returns
2748 true if anything changed, false otherwise. */
2749
2750static bool
2751associate_pointerplus (gimple_stmt_iterator *gsi)
2752{
2753 gimple stmt = gsi_stmt (*gsi);
2754 gimple def_stmt;
2755 tree ptr, rhs, algn;
2756
2757 /* Pattern match
2758 tem = (sizetype) ptr;
2759 tem = tem & algn;
2760 tem = -tem;
2761 ... = ptr p+ tem;
2762 and produce the simpler and easier to analyze with respect to alignment
2763 ... = ptr & ~algn; */
2764 ptr = gimple_assign_rhs1 (stmt);
2765 rhs = gimple_assign_rhs2 (stmt);
2766 if (TREE_CODE (rhs) != SSA_NAME)
2767 return false;
2768 def_stmt = SSA_NAME_DEF_STMT (rhs);
2769 if (!is_gimple_assign (def_stmt)
2770 || gimple_assign_rhs_code (def_stmt) != NEGATE_EXPR)
2771 return false;
2772 rhs = gimple_assign_rhs1 (def_stmt);
2773 if (TREE_CODE (rhs) != SSA_NAME)
2774 return false;
2775 def_stmt = SSA_NAME_DEF_STMT (rhs);
2776 if (!is_gimple_assign (def_stmt)
2777 || gimple_assign_rhs_code (def_stmt) != BIT_AND_EXPR)
2778 return false;
2779 rhs = gimple_assign_rhs1 (def_stmt);
2780 algn = gimple_assign_rhs2 (def_stmt);
2781 if (TREE_CODE (rhs) != SSA_NAME
2782 || TREE_CODE (algn) != INTEGER_CST)
2783 return false;
2784 def_stmt = SSA_NAME_DEF_STMT (rhs);
2785 if (!is_gimple_assign (def_stmt)
2786 || !CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt)))
2787 return false;
2788 if (gimple_assign_rhs1 (def_stmt) != ptr)
2789 return false;
2790
27bcd47c 2791 algn = double_int_to_tree (TREE_TYPE (ptr), ~tree_to_double_int (algn));
a8ab21e5
RG
2792 gimple_assign_set_rhs_with_ops (gsi, BIT_AND_EXPR, ptr, algn);
2793 fold_stmt_inplace (gsi);
2794 update_stmt (stmt);
2795
2796 return true;
2797}
2798
be173289 2799/* Combine two conversions in a row for the second conversion at *GSI.
91bc6112
RG
2800 Returns 1 if there were any changes made, 2 if cfg-cleanup needs to
2801 run. Else it returns 0. */
be173289 2802
91bc6112 2803static int
be173289
RG
2804combine_conversions (gimple_stmt_iterator *gsi)
2805{
2806 gimple stmt = gsi_stmt (*gsi);
2807 gimple def_stmt;
2808 tree op0, lhs;
2809 enum tree_code code = gimple_assign_rhs_code (stmt);
07ab2b1b 2810 enum tree_code code2;
be173289
RG
2811
2812 gcc_checking_assert (CONVERT_EXPR_CODE_P (code)
2813 || code == FLOAT_EXPR
2814 || code == FIX_TRUNC_EXPR);
2815
2816 lhs = gimple_assign_lhs (stmt);
2817 op0 = gimple_assign_rhs1 (stmt);
2818 if (useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (op0)))
2819 {
2820 gimple_assign_set_rhs_code (stmt, TREE_CODE (op0));
91bc6112 2821 return 1;
be173289
RG
2822 }
2823
2824 if (TREE_CODE (op0) != SSA_NAME)
91bc6112 2825 return 0;
be173289
RG
2826
2827 def_stmt = SSA_NAME_DEF_STMT (op0);
2828 if (!is_gimple_assign (def_stmt))
91bc6112 2829 return 0;
be173289 2830
07ab2b1b
MG
2831 code2 = gimple_assign_rhs_code (def_stmt);
2832
2833 if (CONVERT_EXPR_CODE_P (code2) || code2 == FLOAT_EXPR)
be173289
RG
2834 {
2835 tree defop0 = gimple_assign_rhs1 (def_stmt);
2836 tree type = TREE_TYPE (lhs);
2837 tree inside_type = TREE_TYPE (defop0);
2838 tree inter_type = TREE_TYPE (op0);
2839 int inside_int = INTEGRAL_TYPE_P (inside_type);
2840 int inside_ptr = POINTER_TYPE_P (inside_type);
2841 int inside_float = FLOAT_TYPE_P (inside_type);
2842 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
2843 unsigned int inside_prec = TYPE_PRECISION (inside_type);
2844 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
2845 int inter_int = INTEGRAL_TYPE_P (inter_type);
2846 int inter_ptr = POINTER_TYPE_P (inter_type);
2847 int inter_float = FLOAT_TYPE_P (inter_type);
2848 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
2849 unsigned int inter_prec = TYPE_PRECISION (inter_type);
2850 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
2851 int final_int = INTEGRAL_TYPE_P (type);
2852 int final_ptr = POINTER_TYPE_P (type);
2853 int final_float = FLOAT_TYPE_P (type);
2854 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
2855 unsigned int final_prec = TYPE_PRECISION (type);
2856 int final_unsignedp = TYPE_UNSIGNED (type);
2857
9402220c
EB
2858 /* Don't propagate ssa names that occur in abnormal phis. */
2859 if (TREE_CODE (defop0) == SSA_NAME
2860 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (defop0))
2861 return 0;
2862
be173289
RG
2863 /* In addition to the cases of two conversions in a row
2864 handled below, if we are converting something to its own
2865 type via an object of identical or wider precision, neither
2866 conversion is needed. */
2867 if (useless_type_conversion_p (type, inside_type)
2868 && (((inter_int || inter_ptr) && final_int)
2869 || (inter_float && final_float))
2870 && inter_prec >= final_prec)
2871 {
2872 gimple_assign_set_rhs1 (stmt, unshare_expr (defop0));
2873 gimple_assign_set_rhs_code (stmt, TREE_CODE (defop0));
2874 update_stmt (stmt);
91bc6112 2875 return remove_prop_source_from_use (op0) ? 2 : 1;
be173289
RG
2876 }
2877
2878 /* Likewise, if the intermediate and initial types are either both
2879 float or both integer, we don't need the middle conversion if the
2880 former is wider than the latter and doesn't change the signedness
2881 (for integers). Avoid this if the final type is a pointer since
2882 then we sometimes need the middle conversion. Likewise if the
2883 final type has a precision not equal to the size of its mode. */
2884 if (((inter_int && inside_int)
2885 || (inter_float && inside_float)
2886 || (inter_vec && inside_vec))
2887 && inter_prec >= inside_prec
2888 && (inter_float || inter_vec
2889 || inter_unsignedp == inside_unsignedp)
d3ea1dbd 2890 && ! (final_prec != GET_MODE_PRECISION (TYPE_MODE (type))
be173289
RG
2891 && TYPE_MODE (type) == TYPE_MODE (inter_type))
2892 && ! final_ptr
2893 && (! final_vec || inter_prec == inside_prec))
2894 {
2895 gimple_assign_set_rhs1 (stmt, defop0);
2896 update_stmt (stmt);
91bc6112 2897 return remove_prop_source_from_use (op0) ? 2 : 1;
be173289
RG
2898 }
2899
2900 /* If we have a sign-extension of a zero-extended value, we can
1caf8dd6
RG
2901 replace that by a single zero-extension. Likewise if the
2902 final conversion does not change precision we can drop the
2903 intermediate conversion. */
be173289 2904 if (inside_int && inter_int && final_int
1caf8dd6
RG
2905 && ((inside_prec < inter_prec && inter_prec < final_prec
2906 && inside_unsignedp && !inter_unsignedp)
2907 || final_prec == inter_prec))
be173289
RG
2908 {
2909 gimple_assign_set_rhs1 (stmt, defop0);
2910 update_stmt (stmt);
91bc6112 2911 return remove_prop_source_from_use (op0) ? 2 : 1;
be173289
RG
2912 }
2913
2914 /* Two conversions in a row are not needed unless:
2915 - some conversion is floating-point (overstrict for now), or
2916 - some conversion is a vector (overstrict for now), or
2917 - the intermediate type is narrower than both initial and
2918 final, or
2919 - the intermediate type and innermost type differ in signedness,
2920 and the outermost type is wider than the intermediate, or
2921 - the initial type is a pointer type and the precisions of the
2922 intermediate and final types differ, or
2923 - the final type is a pointer type and the precisions of the
2924 initial and intermediate types differ. */
2925 if (! inside_float && ! inter_float && ! final_float
2926 && ! inside_vec && ! inter_vec && ! final_vec
2927 && (inter_prec >= inside_prec || inter_prec >= final_prec)
2928 && ! (inside_int && inter_int
2929 && inter_unsignedp != inside_unsignedp
2930 && inter_prec < final_prec)
2931 && ((inter_unsignedp && inter_prec > inside_prec)
2932 == (final_unsignedp && final_prec > inter_prec))
2933 && ! (inside_ptr && inter_prec != final_prec)
2934 && ! (final_ptr && inside_prec != inter_prec)
d3ea1dbd 2935 && ! (final_prec != GET_MODE_PRECISION (TYPE_MODE (type))
be173289
RG
2936 && TYPE_MODE (type) == TYPE_MODE (inter_type)))
2937 {
2938 gimple_assign_set_rhs1 (stmt, defop0);
2939 update_stmt (stmt);
91bc6112 2940 return remove_prop_source_from_use (op0) ? 2 : 1;
be173289
RG
2941 }
2942
2943 /* A truncation to an unsigned type should be canonicalized as
2944 bitwise and of a mask. */
2945 if (final_int && inter_int && inside_int
2946 && final_prec == inside_prec
2947 && final_prec > inter_prec
2948 && inter_unsignedp)
2949 {
2950 tree tem;
2951 tem = fold_build2 (BIT_AND_EXPR, inside_type,
2952 defop0,
2953 double_int_to_tree
27bcd47c 2954 (inside_type, double_int::mask (inter_prec)));
be173289
RG
2955 if (!useless_type_conversion_p (type, inside_type))
2956 {
2957 tem = force_gimple_operand_gsi (gsi, tem, true, NULL_TREE, true,
2958 GSI_SAME_STMT);
2959 gimple_assign_set_rhs1 (stmt, tem);
2960 }
2961 else
2962 gimple_assign_set_rhs_from_tree (gsi, tem);
2963 update_stmt (gsi_stmt (*gsi));
91bc6112 2964 return 1;
be173289 2965 }
07ab2b1b
MG
2966
2967 /* If we are converting an integer to a floating-point that can
2968 represent it exactly and back to an integer, we can skip the
2969 floating-point conversion. */
2970 if (inside_int && inter_float && final_int &&
2971 (unsigned) significand_size (TYPE_MODE (inter_type))
2972 >= inside_prec - !inside_unsignedp)
2973 {
2974 if (useless_type_conversion_p (type, inside_type))
2975 {
2976 gimple_assign_set_rhs1 (stmt, unshare_expr (defop0));
2977 gimple_assign_set_rhs_code (stmt, TREE_CODE (defop0));
2978 update_stmt (stmt);
2979 return remove_prop_source_from_use (op0) ? 2 : 1;
2980 }
2981 else
2982 {
2983 gimple_assign_set_rhs1 (stmt, defop0);
2984 gimple_assign_set_rhs_code (stmt, CONVERT_EXPR);
2985 update_stmt (stmt);
2986 return remove_prop_source_from_use (op0) ? 2 : 1;
2987 }
2988 }
be173289
RG
2989 }
2990
91bc6112 2991 return 0;
be173289
RG
2992}
2993
881a9dcd
MG
2994/* Combine an element access with a shuffle. Returns true if there were
2995 any changes made, else it returns false. */
2996
2997static bool
2998simplify_bitfield_ref (gimple_stmt_iterator *gsi)
2999{
3000 gimple stmt = gsi_stmt (*gsi);
3001 gimple def_stmt;
3002 tree op, op0, op1, op2;
3003 tree elem_type;
3004 unsigned idx, n, size;
3005 enum tree_code code;
3006
3007 op = gimple_assign_rhs1 (stmt);
3008 gcc_checking_assert (TREE_CODE (op) == BIT_FIELD_REF);
3009
3010 op0 = TREE_OPERAND (op, 0);
3011 if (TREE_CODE (op0) != SSA_NAME
3012 || TREE_CODE (TREE_TYPE (op0)) != VECTOR_TYPE)
3013 return false;
3014
f2167d68
MG
3015 def_stmt = get_prop_source_stmt (op0, false, NULL);
3016 if (!def_stmt || !can_propagate_from (def_stmt))
3017 return false;
3018
3019 op1 = TREE_OPERAND (op, 1);
3020 op2 = TREE_OPERAND (op, 2);
3021 code = gimple_assign_rhs_code (def_stmt);
3022
3023 if (code == CONSTRUCTOR)
3024 {
3025 tree tem = fold_ternary (BIT_FIELD_REF, TREE_TYPE (op),
3026 gimple_assign_rhs1 (def_stmt), op1, op2);
3027 if (!tem || !valid_gimple_rhs_p (tem))
3028 return false;
3029 gimple_assign_set_rhs_from_tree (gsi, tem);
3030 update_stmt (gsi_stmt (*gsi));
3031 return true;
3032 }
3033
881a9dcd
MG
3034 elem_type = TREE_TYPE (TREE_TYPE (op0));
3035 if (TREE_TYPE (op) != elem_type)
3036 return false;
3037
3038 size = TREE_INT_CST_LOW (TYPE_SIZE (elem_type));
881a9dcd
MG
3039 n = TREE_INT_CST_LOW (op1) / size;
3040 if (n != 1)
3041 return false;
881a9dcd
MG
3042 idx = TREE_INT_CST_LOW (op2) / size;
3043
881a9dcd
MG
3044 if (code == VEC_PERM_EXPR)
3045 {
3046 tree p, m, index, tem;
3047 unsigned nelts;
3048 m = gimple_assign_rhs3 (def_stmt);
3049 if (TREE_CODE (m) != VECTOR_CST)
3050 return false;
3051 nelts = VECTOR_CST_NELTS (m);
3052 idx = TREE_INT_CST_LOW (VECTOR_CST_ELT (m, idx));
3053 idx %= 2 * nelts;
3054 if (idx < nelts)
3055 {
3056 p = gimple_assign_rhs1 (def_stmt);
3057 }
3058 else
3059 {
3060 p = gimple_assign_rhs2 (def_stmt);
3061 idx -= nelts;
3062 }
3063 index = build_int_cst (TREE_TYPE (TREE_TYPE (m)), idx * size);
3064 tem = build3 (BIT_FIELD_REF, TREE_TYPE (op),
3ebd25e1 3065 unshare_expr (p), op1, index);
881a9dcd
MG
3066 gimple_assign_set_rhs1 (stmt, tem);
3067 fold_stmt (gsi);
3068 update_stmt (gsi_stmt (*gsi));
3069 return true;
3070 }
3071
3072 return false;
3073}
3074
8a3ffc5d
MG
3075/* Determine whether applying the 2 permutations (mask1 then mask2)
3076 gives back one of the input. */
3077
3078static int
3079is_combined_permutation_identity (tree mask1, tree mask2)
3080{
3081 tree mask;
3082 unsigned int nelts, i, j;
3083 bool maybe_identity1 = true;
3084 bool maybe_identity2 = true;
3085
3086 gcc_checking_assert (TREE_CODE (mask1) == VECTOR_CST
3087 && TREE_CODE (mask2) == VECTOR_CST);
3088 mask = fold_ternary (VEC_PERM_EXPR, TREE_TYPE (mask1), mask1, mask1, mask2);
3089 gcc_assert (TREE_CODE (mask) == VECTOR_CST);
3090
3091 nelts = VECTOR_CST_NELTS (mask);
3092 for (i = 0; i < nelts; i++)
3093 {
3094 tree val = VECTOR_CST_ELT (mask, i);
3095 gcc_assert (TREE_CODE (val) == INTEGER_CST);
3096 j = TREE_INT_CST_LOW (val) & (2 * nelts - 1);
3097 if (j == i)
3098 maybe_identity2 = false;
3099 else if (j == i + nelts)
3100 maybe_identity1 = false;
3101 else
3102 return 0;
3103 }
3104 return maybe_identity1 ? 1 : maybe_identity2 ? 2 : 0;
3105}
3106
84c3c7ce
MG
3107/* Combine a shuffle with its arguments. Returns 1 if there were any
3108 changes made, 2 if cfg-cleanup needs to run. Else it returns 0. */
8a3ffc5d
MG
3109
3110static int
3111simplify_permutation (gimple_stmt_iterator *gsi)
3112{
3113 gimple stmt = gsi_stmt (*gsi);
3114 gimple def_stmt;
84c3c7ce
MG
3115 tree op0, op1, op2, op3, arg0, arg1;
3116 enum tree_code code;
3ebd25e1 3117 bool single_use_op0 = false;
8a3ffc5d 3118
84c3c7ce 3119 gcc_checking_assert (gimple_assign_rhs_code (stmt) == VEC_PERM_EXPR);
8a3ffc5d
MG
3120
3121 op0 = gimple_assign_rhs1 (stmt);
3122 op1 = gimple_assign_rhs2 (stmt);
3123 op2 = gimple_assign_rhs3 (stmt);
3124
8a3ffc5d
MG
3125 if (TREE_CODE (op2) != VECTOR_CST)
3126 return 0;
3127
84c3c7ce
MG
3128 if (TREE_CODE (op0) == VECTOR_CST)
3129 {
3130 code = VECTOR_CST;
3131 arg0 = op0;
3132 }
3133 else if (TREE_CODE (op0) == SSA_NAME)
3134 {
3ebd25e1
MG
3135 def_stmt = get_prop_source_stmt (op0, false, &single_use_op0);
3136 if (!def_stmt || !can_propagate_from (def_stmt))
84c3c7ce 3137 return 0;
8a3ffc5d 3138
84c3c7ce
MG
3139 code = gimple_assign_rhs_code (def_stmt);
3140 arg0 = gimple_assign_rhs1 (def_stmt);
3141 }
3142 else
8a3ffc5d
MG
3143 return 0;
3144
8a3ffc5d 3145 /* Two consecutive shuffles. */
84c3c7ce 3146 if (code == VEC_PERM_EXPR)
8a3ffc5d
MG
3147 {
3148 tree orig;
3149 int ident;
84c3c7ce
MG
3150
3151 if (op0 != op1)
3152 return 0;
8a3ffc5d
MG
3153 op3 = gimple_assign_rhs3 (def_stmt);
3154 if (TREE_CODE (op3) != VECTOR_CST)
3155 return 0;
3156 ident = is_combined_permutation_identity (op3, op2);
3157 if (!ident)
3158 return 0;
3159 orig = (ident == 1) ? gimple_assign_rhs1 (def_stmt)
3160 : gimple_assign_rhs2 (def_stmt);
3161 gimple_assign_set_rhs1 (stmt, unshare_expr (orig));
3162 gimple_assign_set_rhs_code (stmt, TREE_CODE (orig));
3163 gimple_set_num_ops (stmt, 2);
3164 update_stmt (stmt);
3165 return remove_prop_source_from_use (op0) ? 2 : 1;
3166 }
3167
84c3c7ce
MG
3168 /* Shuffle of a constructor. */
3169 else if (code == CONSTRUCTOR || code == VECTOR_CST)
3170 {
3171 tree opt;
3172 bool ret = false;
3173 if (op0 != op1)
3174 {
3ebd25e1 3175 if (TREE_CODE (op0) == SSA_NAME && !single_use_op0)
84c3c7ce
MG
3176 return 0;
3177
3178 if (TREE_CODE (op1) == VECTOR_CST)
3179 arg1 = op1;
3180 else if (TREE_CODE (op1) == SSA_NAME)
3181 {
3182 enum tree_code code2;
3183
3ebd25e1
MG
3184 gimple def_stmt2 = get_prop_source_stmt (op1, true, NULL);
3185 if (!def_stmt2 || !can_propagate_from (def_stmt2))
84c3c7ce
MG
3186 return 0;
3187
3188 code2 = gimple_assign_rhs_code (def_stmt2);
3189 if (code2 != CONSTRUCTOR && code2 != VECTOR_CST)
3190 return 0;
3191 arg1 = gimple_assign_rhs1 (def_stmt2);
3192 }
3193 else
3194 return 0;
3195 }
3196 else
3197 {
3198 /* Already used twice in this statement. */
3199 if (TREE_CODE (op0) == SSA_NAME && num_imm_uses (op0) > 2)
3200 return 0;
3201 arg1 = arg0;
3202 }
c3284718 3203 opt = fold_ternary (VEC_PERM_EXPR, TREE_TYPE (op0), arg0, arg1, op2);
84c3c7ce 3204 if (!opt
c3284718 3205 || (TREE_CODE (opt) != CONSTRUCTOR && TREE_CODE (opt) != VECTOR_CST))
84c3c7ce
MG
3206 return 0;
3207 gimple_assign_set_rhs_from_tree (gsi, opt);
3208 update_stmt (gsi_stmt (*gsi));
3209 if (TREE_CODE (op0) == SSA_NAME)
3210 ret = remove_prop_source_from_use (op0);
3211 if (op0 != op1 && TREE_CODE (op1) == SSA_NAME)
3212 ret |= remove_prop_source_from_use (op1);
3213 return ret ? 2 : 1;
3214 }
3215
3216 return 0;
8a3ffc5d
MG
3217}
3218
148e45e5
MG
3219/* Recognize a VEC_PERM_EXPR. Returns true if there were any changes. */
3220
3221static bool
3222simplify_vector_constructor (gimple_stmt_iterator *gsi)
3223{
3224 gimple stmt = gsi_stmt (*gsi);
3225 gimple def_stmt;
3226 tree op, op2, orig, type, elem_type;
3227 unsigned elem_size, nelts, i;
3228 enum tree_code code;
3229 constructor_elt *elt;
3230 unsigned char *sel;
3231 bool maybe_ident;
3232
3233 gcc_checking_assert (gimple_assign_rhs_code (stmt) == CONSTRUCTOR);
3234
3235 op = gimple_assign_rhs1 (stmt);
3236 type = TREE_TYPE (op);
3237 gcc_checking_assert (TREE_CODE (type) == VECTOR_TYPE);
3238
3239 nelts = TYPE_VECTOR_SUBPARTS (type);
3240 elem_type = TREE_TYPE (type);
3241 elem_size = TREE_INT_CST_LOW (TYPE_SIZE (elem_type));
3242
3243 sel = XALLOCAVEC (unsigned char, nelts);
3244 orig = NULL;
3245 maybe_ident = true;
9771b263 3246 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (op), i, elt)
148e45e5
MG
3247 {
3248 tree ref, op1;
3249
3250 if (i >= nelts)
3251 return false;
3252
3253 if (TREE_CODE (elt->value) != SSA_NAME)
3254 return false;
3ebd25e1
MG
3255 def_stmt = get_prop_source_stmt (elt->value, false, NULL);
3256 if (!def_stmt)
148e45e5
MG
3257 return false;
3258 code = gimple_assign_rhs_code (def_stmt);
3259 if (code != BIT_FIELD_REF)
3260 return false;
3261 op1 = gimple_assign_rhs1 (def_stmt);
3262 ref = TREE_OPERAND (op1, 0);
3263 if (orig)
3264 {
3265 if (ref != orig)
3266 return false;
3267 }
3268 else
3269 {
3270 if (TREE_CODE (ref) != SSA_NAME)
3271 return false;
895e8371
MG
3272 if (!useless_type_conversion_p (type, TREE_TYPE (ref)))
3273 return false;
148e45e5
MG
3274 orig = ref;
3275 }
3276 if (TREE_INT_CST_LOW (TREE_OPERAND (op1, 1)) != elem_size)
3277 return false;
3278 sel[i] = TREE_INT_CST_LOW (TREE_OPERAND (op1, 2)) / elem_size;
3279 if (sel[i] != i) maybe_ident = false;
3280 }
3281 if (i < nelts)
3282 return false;
3283
3284 if (maybe_ident)
1d61ee42 3285 gimple_assign_set_rhs_from_tree (gsi, orig);
148e45e5
MG
3286 else
3287 {
1d61ee42
JJ
3288 tree mask_type, *mask_elts;
3289
3290 if (!can_vec_perm_p (TYPE_MODE (type), false, sel))
3291 return false;
3292 mask_type
3293 = build_vector_type (build_nonstandard_integer_type (elem_size, 1),
3294 nelts);
3295 if (GET_MODE_CLASS (TYPE_MODE (mask_type)) != MODE_VECTOR_INT
3296 || GET_MODE_SIZE (TYPE_MODE (mask_type))
3297 != GET_MODE_SIZE (TYPE_MODE (type)))
148e45e5 3298 return false;
1d61ee42
JJ
3299 mask_elts = XALLOCAVEC (tree, nelts);
3300 for (i = 0; i < nelts; i++)
3301 mask_elts[i] = build_int_cst (TREE_TYPE (mask_type), sel[i]);
3302 op2 = build_vector (mask_type, mask_elts);
148e45e5
MG
3303 gimple_assign_set_rhs_with_ops_1 (gsi, VEC_PERM_EXPR, orig, orig, op2);
3304 }
3305 update_stmt (gsi_stmt (*gsi));
3306 return true;
3307}
3308
2e87621c
RG
3309/* Main entry point for the forward propagation and statement combine
3310 optimizer. */
6de9cd9a 3311
c2924966 3312static unsigned int
2e87621c 3313ssa_forward_propagate_and_combine (void)
6de9cd9a 3314{
91581bcc 3315 basic_block bb;
efdb3de9 3316 unsigned int todoflags = 0;
6de9cd9a 3317
5bcd8644
RH
3318 cfg_changed = false;
3319
91581bcc
JL
3320 FOR_EACH_BB (bb)
3321 {
cc603b40 3322 gimple_stmt_iterator gsi;
a564d0f1 3323
2e87621c
RG
3324 /* Apply forward propagation to all stmts in the basic-block.
3325 Note we update GSI within the loop as necessary. */
726a989a 3326 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); )
a564d0f1 3327 {
726a989a 3328 gimple stmt = gsi_stmt (gsi);
2e87621c
RG
3329 tree lhs, rhs;
3330 enum tree_code code;
a564d0f1 3331
2e87621c 3332 if (!is_gimple_assign (stmt))
a564d0f1 3333 {
2e87621c
RG
3334 gsi_next (&gsi);
3335 continue;
3336 }
471eeb83 3337
2e87621c
RG
3338 lhs = gimple_assign_lhs (stmt);
3339 rhs = gimple_assign_rhs1 (stmt);
3340 code = gimple_assign_rhs_code (stmt);
3341 if (TREE_CODE (lhs) != SSA_NAME
3342 || has_zero_uses (lhs))
3343 {
3344 gsi_next (&gsi);
3345 continue;
3346 }
471eeb83 3347
2e87621c
RG
3348 /* If this statement sets an SSA_NAME to an address,
3349 try to propagate the address into the uses of the SSA_NAME. */
3350 if (code == ADDR_EXPR
3351 /* Handle pointer conversions on invariant addresses
3352 as well, as this is valid gimple. */
3353 || (CONVERT_EXPR_CODE_P (code)
3354 && TREE_CODE (rhs) == ADDR_EXPR
3355 && POINTER_TYPE_P (TREE_TYPE (lhs))))
3356 {
3357 tree base = get_base_address (TREE_OPERAND (rhs, 0));
3358 if ((!base
3359 || !DECL_P (base)
3360 || decl_address_invariant_p (base))
3361 && !stmt_references_abnormal_ssa_name (stmt)
5de989ed 3362 && forward_propagate_addr_expr (lhs, rhs, true))
617f3897 3363 {
2e87621c 3364 release_defs (stmt);
2e87621c 3365 gsi_remove (&gsi, true);
617f3897 3366 }
2e87621c
RG
3367 else
3368 gsi_next (&gsi);
3369 }
601f64e2 3370 else if (code == POINTER_PLUS_EXPR)
2e87621c 3371 {
601f64e2
RG
3372 tree off = gimple_assign_rhs2 (stmt);
3373 if (TREE_CODE (off) == INTEGER_CST
3374 && can_propagate_from (stmt)
3375 && !simple_iv_increment_p (stmt)
2e87621c
RG
3376 /* ??? Better adjust the interface to that function
3377 instead of building new trees here. */
3378 && forward_propagate_addr_expr
601f64e2
RG
3379 (lhs,
3380 build1_loc (gimple_location (stmt),
3381 ADDR_EXPR, TREE_TYPE (rhs),
3382 fold_build2 (MEM_REF,
3383 TREE_TYPE (TREE_TYPE (rhs)),
3384 rhs,
3385 fold_convert (ptr_type_node,
5de989ed 3386 off))), true))
2d698d3b 3387 {
2e87621c 3388 release_defs (stmt);
2e87621c 3389 gsi_remove (&gsi, true);
2d698d3b 3390 }
2e87621c 3391 else if (is_gimple_min_invariant (rhs))
be173289 3392 {
2e87621c 3393 /* Make sure to fold &a[0] + off_1 here. */
59401b92 3394 fold_stmt_inplace (&gsi);
2e87621c
RG
3395 update_stmt (stmt);
3396 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
be173289
RG
3397 gsi_next (&gsi);
3398 }
a564d0f1 3399 else
726a989a 3400 gsi_next (&gsi);
a564d0f1 3401 }
2e87621c 3402 else if (TREE_CODE_CLASS (code) == tcc_comparison)
6b62dff8 3403 {
355a7673 3404 if (forward_propagate_comparison (&gsi))
9b80d091 3405 cfg_changed = true;
6b62dff8 3406 }
a564d0f1 3407 else
726a989a 3408 gsi_next (&gsi);
a564d0f1 3409 }
2e87621c
RG
3410
3411 /* Combine stmts with the stmts defining their operands.
3412 Note we update GSI within the loop as necessary. */
c1ae3ca5 3413 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
2e87621c
RG
3414 {
3415 gimple stmt = gsi_stmt (gsi);
3416 bool changed = false;
3417
cc603b40
JJ
3418 /* Mark stmt as potentially needing revisiting. */
3419 gimple_set_plf (stmt, GF_PLF_1, false);
3420
2e87621c
RG
3421 switch (gimple_code (stmt))
3422 {
3423 case GIMPLE_ASSIGN:
3424 {
3425 tree rhs1 = gimple_assign_rhs1 (stmt);
3426 enum tree_code code = gimple_assign_rhs_code (stmt);
3427
3428 if ((code == BIT_NOT_EXPR
3429 || code == NEGATE_EXPR)
3430 && TREE_CODE (rhs1) == SSA_NAME)
3431 changed = simplify_not_neg_expr (&gsi);
2515d916
RG
3432 else if (code == COND_EXPR
3433 || code == VEC_COND_EXPR)
2e87621c
RG
3434 {
3435 /* In this case the entire COND_EXPR is in rhs1. */
4cbc836e
RG
3436 if (forward_propagate_into_cond (&gsi)
3437 || combine_cond_exprs (&gsi))
3438 {
3439 changed = true;
3440 stmt = gsi_stmt (gsi);
3441 }
2e87621c
RG
3442 }
3443 else if (TREE_CODE_CLASS (code) == tcc_comparison)
3444 {
f8ecf734 3445 int did_something;
f8ecf734
RG
3446 did_something = forward_propagate_into_comparison (&gsi);
3447 if (did_something == 2)
3448 cfg_changed = true;
f8ecf734 3449 changed = did_something != 0;
2e87621c 3450 }
cb3b8d33
JJ
3451 else if ((code == PLUS_EXPR
3452 || code == BIT_IOR_EXPR
3453 || code == BIT_XOR_EXPR)
3454 && simplify_rotate (&gsi))
3455 changed = true;
2e87621c
RG
3456 else if (code == BIT_AND_EXPR
3457 || code == BIT_IOR_EXPR
3458 || code == BIT_XOR_EXPR)
3459 changed = simplify_bitwise_binary (&gsi);
3460 else if (code == PLUS_EXPR
3461 || code == MINUS_EXPR)
59401b92 3462 changed = associate_plusminus (&gsi);
a8ab21e5
RG
3463 else if (code == POINTER_PLUS_EXPR)
3464 changed = associate_pointerplus (&gsi);
2e87621c
RG
3465 else if (CONVERT_EXPR_CODE_P (code)
3466 || code == FLOAT_EXPR
3467 || code == FIX_TRUNC_EXPR)
91bc6112
RG
3468 {
3469 int did_something = combine_conversions (&gsi);
3470 if (did_something == 2)
3471 cfg_changed = true;
3e8a33f9
JL
3472
3473 /* If we have a narrowing conversion to an integral
3474 type that is fed by a BIT_AND_EXPR, we might be
3475 able to remove the BIT_AND_EXPR if it merely
3476 masks off bits outside the final type (and nothing
3477 else. */
3478 if (! did_something)
3479 {
3480 tree outer_type = TREE_TYPE (gimple_assign_lhs (stmt));
3481 tree inner_type = TREE_TYPE (gimple_assign_rhs1 (stmt));
3482 if (INTEGRAL_TYPE_P (outer_type)
3483 && INTEGRAL_TYPE_P (inner_type)
3484 && (TYPE_PRECISION (outer_type)
3485 <= TYPE_PRECISION (inner_type)))
3486 did_something = simplify_conversion_from_bitmask (&gsi);
3487 }
3488
91bc6112
RG
3489 changed = did_something != 0;
3490 }
8a3ffc5d
MG
3491 else if (code == VEC_PERM_EXPR)
3492 {
3493 int did_something = simplify_permutation (&gsi);
3494 if (did_something == 2)
3495 cfg_changed = true;
3496 changed = did_something != 0;
3497 }
881a9dcd
MG
3498 else if (code == BIT_FIELD_REF)
3499 changed = simplify_bitfield_ref (&gsi);
148e45e5
MG
3500 else if (code == CONSTRUCTOR
3501 && TREE_CODE (TREE_TYPE (rhs1)) == VECTOR_TYPE)
3502 changed = simplify_vector_constructor (&gsi);
2e87621c
RG
3503 break;
3504 }
3505
3506 case GIMPLE_SWITCH:
3507 changed = simplify_gimple_switch (stmt);
3508 break;
3509
3510 case GIMPLE_COND:
3511 {
3512 int did_something;
2e87621c
RG
3513 did_something = forward_propagate_into_gimple_cond (stmt);
3514 if (did_something == 2)
3515 cfg_changed = true;
2e87621c
RG
3516 changed = did_something != 0;
3517 break;
3518 }
3519
3520 case GIMPLE_CALL:
3521 {
3522 tree callee = gimple_call_fndecl (stmt);
3523 if (callee != NULL_TREE
3524 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
3525 changed = simplify_builtin_call (&gsi, callee);
3526 break;
3527 }
3528
3529 default:;
3530 }
3531
c1ae3ca5
RG
3532 if (changed)
3533 {
3534 /* If the stmt changed then re-visit it and the statements
3535 inserted before it. */
cc603b40
JJ
3536 for (; !gsi_end_p (gsi); gsi_prev (&gsi))
3537 if (gimple_plf (gsi_stmt (gsi), GF_PLF_1))
3538 break;
3539 if (gsi_end_p (gsi))
c1ae3ca5
RG
3540 gsi = gsi_start_bb (bb);
3541 else
cc603b40 3542 gsi_next (&gsi);
c1ae3ca5
RG
3543 }
3544 else
3545 {
cc603b40
JJ
3546 /* Stmt no longer needs to be revisited. */
3547 gimple_set_plf (stmt, GF_PLF_1, true);
c1ae3ca5
RG
3548 gsi_next (&gsi);
3549 }
2e87621c 3550 }
91581bcc 3551 }
5bcd8644
RH
3552
3553 if (cfg_changed)
1994bfea 3554 todoflags |= TODO_cleanup_cfg;
2e87621c 3555
efdb3de9 3556 return todoflags;
6de9cd9a
DN
3557}
3558
3559
3560static bool
3561gate_forwprop (void)
3562{
248fc9f3 3563 return flag_tree_forwprop;
6de9cd9a
DN
3564}
3565
27a4cd48
DM
3566namespace {
3567
3568const pass_data pass_data_forwprop =
8ddbbcae 3569{
27a4cd48
DM
3570 GIMPLE_PASS, /* type */
3571 "forwprop", /* name */
3572 OPTGROUP_NONE, /* optinfo_flags */
3573 true, /* has_gate */
3574 true, /* has_execute */
3575 TV_TREE_FORWPROP, /* tv_id */
3576 ( PROP_cfg | PROP_ssa ), /* properties_required */
3577 0, /* properties_provided */
3578 0, /* properties_destroyed */
3579 0, /* todo_flags_start */
3580 ( TODO_update_ssa | TODO_verify_ssa ), /* todo_flags_finish */
6de9cd9a 3581};
27a4cd48
DM
3582
3583class pass_forwprop : public gimple_opt_pass
3584{
3585public:
c3284718
RS
3586 pass_forwprop (gcc::context *ctxt)
3587 : gimple_opt_pass (pass_data_forwprop, ctxt)
27a4cd48
DM
3588 {}
3589
3590 /* opt_pass methods: */
65d3284b 3591 opt_pass * clone () { return new pass_forwprop (m_ctxt); }
27a4cd48
DM
3592 bool gate () { return gate_forwprop (); }
3593 unsigned int execute () { return ssa_forward_propagate_and_combine (); }
3594
3595}; // class pass_forwprop
3596
3597} // anon namespace
3598
3599gimple_opt_pass *
3600make_pass_forwprop (gcc::context *ctxt)
3601{
3602 return new pass_forwprop (ctxt);
3603}