]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/tree-ssa-forwprop.c
gimple-walk.h: New File.
[thirdparty/gcc.git] / gcc / tree-ssa-forwprop.c
CommitLineData
a564d0f1 1/* Forward propagation of expressions for single use variables.
d1e082c2 2 Copyright (C) 2004-2013 Free Software Foundation, Inc.
6de9cd9a
DN
3
4This file is part of GCC.
5
6GCC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
9dcd6f09 8the Free Software Foundation; either version 3, or (at your option)
6de9cd9a
DN
9any later version.
10
11GCC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
9dcd6f09
NC
17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
6de9cd9a
DN
19
20#include "config.h"
21#include "system.h"
22#include "coretypes.h"
23#include "tm.h"
6de9cd9a 24#include "tree.h"
6de9cd9a
DN
25#include "tm_p.h"
26#include "basic-block.h"
aaa7ad90 27#include "gimple-pretty-print.h"
45b0be94 28#include "gimplify.h"
5be5c238 29#include "gimple-iterator.h"
442b4905
AM
30#include "gimple-ssa.h"
31#include "tree-cfg.h"
32#include "tree-phinodes.h"
33#include "ssa-iterators.h"
34#include "tree-ssanames.h"
35#include "tree-dfa.h"
6de9cd9a 36#include "tree-pass.h"
a564d0f1 37#include "langhooks.h"
3aef2dbd 38#include "flags.h"
f4684242 39#include "expr.h"
391886c8 40#include "cfgloop.h"
1d61ee42 41#include "optabs.h"
f2167d68 42#include "tree-ssa-propagate.h"
4484a35a 43#include "tree-ssa-dom.h"
6de9cd9a 44
a564d0f1
JL
45/* This pass propagates the RHS of assignment statements into use
46 sites of the LHS of the assignment. It's basically a specialized
487bf3e6
JL
47 form of tree combination. It is hoped all of this can disappear
48 when we have a generalized tree combiner.
6de9cd9a 49
a564d0f1 50 One class of common cases we handle is forward propagating a single use
b8698a0f 51 variable into a COND_EXPR.
6de9cd9a
DN
52
53 bb0:
54 x = a COND b;
55 if (x) goto ... else goto ...
56
57 Will be transformed into:
58
59 bb0:
60 if (a COND b) goto ... else goto ...
b8698a0f 61
6de9cd9a
DN
62 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
63
64 Or (assuming c1 and c2 are constants):
65
66 bb0:
b8698a0f 67 x = a + c1;
6de9cd9a
DN
68 if (x EQ/NEQ c2) goto ... else goto ...
69
70 Will be transformed into:
71
72 bb0:
73 if (a EQ/NEQ (c2 - c1)) goto ... else goto ...
74
75 Similarly for x = a - c1.
b8698a0f 76
6de9cd9a
DN
77 Or
78
79 bb0:
80 x = !a
81 if (x) goto ... else goto ...
82
83 Will be transformed into:
84
85 bb0:
86 if (a == 0) goto ... else goto ...
87
88 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
89 For these cases, we propagate A into all, possibly more than one,
90 COND_EXPRs that use X.
91
91581bcc
JL
92 Or
93
94 bb0:
95 x = (typecast) a
96 if (x) goto ... else goto ...
97
98 Will be transformed into:
99
100 bb0:
101 if (a != 0) goto ... else goto ...
102
103 (Assuming a is an integral type and x is a boolean or x is an
104 integral and a is a boolean.)
105
106 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
107 For these cases, we propagate A into all, possibly more than one,
108 COND_EXPRs that use X.
109
6de9cd9a
DN
110 In addition to eliminating the variable and the statement which assigns
111 a value to the variable, we may be able to later thread the jump without
bec44647 112 adding insane complexity in the dominator optimizer.
6de9cd9a 113
91581bcc
JL
114 Also note these transformations can cascade. We handle this by having
115 a worklist of COND_EXPR statements to examine. As we make a change to
116 a statement, we put it back on the worklist to examine on the next
117 iteration of the main loop.
118
a564d0f1
JL
119 A second class of propagation opportunities arises for ADDR_EXPR
120 nodes.
121
122 ptr = &x->y->z;
123 res = *ptr;
124
125 Will get turned into
126
127 res = x->y->z;
128
e06f0ff9
AP
129 Or
130 ptr = (type1*)&type2var;
131 res = *ptr
132
133 Will get turned into (if type1 and type2 are the same size
134 and neither have volatile on them):
135 res = VIEW_CONVERT_EXPR<type1>(type2var)
136
a564d0f1
JL
137 Or
138
139 ptr = &x[0];
140 ptr2 = ptr + <constant>;
141
142 Will get turned into
143
144 ptr2 = &x[constant/elementsize];
145
146 Or
147
148 ptr = &x[0];
149 offset = index * element_size;
150 offset_p = (pointer) offset;
151 ptr2 = ptr + offset_p
152
153 Will get turned into:
154
155 ptr2 = &x[index];
156
617f3897
MJ
157 Or
158 ssa = (int) decl
159 res = ssa & 1
160
161 Provided that decl has known alignment >= 2, will get turned into
162
163 res = 0
164
487bf3e6
JL
165 We also propagate casts into SWITCH_EXPR and COND_EXPR conditions to
166 allow us to remove the cast and {NOT_EXPR,NEG_EXPR} into a subsequent
167 {NOT_EXPR,NEG_EXPR}.
a564d0f1 168
6de9cd9a
DN
169 This will (of course) be extended as other needs arise. */
170
5de989ed 171static bool forward_propagate_addr_expr (tree, tree, bool);
5bcd8644 172
68e72840 173/* Set to true if we delete dead edges during the optimization. */
5bcd8644
RH
174static bool cfg_changed;
175
726a989a 176static tree rhs_to_tree (tree type, gimple stmt);
5bcd8644 177
64e8a9f0 178/* Get the next statement we can propagate NAME's value into skipping
3aef2dbd
RG
179 trivial copies. Returns the statement that is suitable as a
180 propagation destination or NULL_TREE if there is no such one.
181 This only returns destinations in a single-use chain. FINAL_NAME_P
182 if non-NULL is written to the ssa name that represents the use. */
9f1054af 183
726a989a 184static gimple
3aef2dbd 185get_prop_dest_stmt (tree name, tree *final_name_p)
9f1054af 186{
3aef2dbd 187 use_operand_p use;
726a989a 188 gimple use_stmt;
9f1054af 189
3aef2dbd
RG
190 do {
191 /* If name has multiple uses, bail out. */
192 if (!single_imm_use (name, &use, &use_stmt))
726a989a 193 return NULL;
9f1054af 194
3aef2dbd 195 /* If this is not a trivial copy, we found it. */
7c3e9dc3 196 if (!gimple_assign_ssa_name_copy_p (use_stmt)
726a989a 197 || gimple_assign_rhs1 (use_stmt) != name)
3aef2dbd
RG
198 break;
199
200 /* Continue searching uses of the copy destination. */
726a989a 201 name = gimple_assign_lhs (use_stmt);
3aef2dbd
RG
202 } while (1);
203
204 if (final_name_p)
205 *final_name_p = name;
206
207 return use_stmt;
9f1054af
KH
208}
209
3aef2dbd
RG
210/* Get the statement we can propagate from into NAME skipping
211 trivial copies. Returns the statement which defines the
212 propagation source or NULL_TREE if there is no such one.
213 If SINGLE_USE_ONLY is set considers only sources which have
214 a single use chain up to NAME. If SINGLE_USE_P is non-null,
215 it is set to whether the chain to NAME is a single use chain
216 or not. SINGLE_USE_P is not written to if SINGLE_USE_ONLY is set. */
6de9cd9a 217
726a989a 218static gimple
3aef2dbd 219get_prop_source_stmt (tree name, bool single_use_only, bool *single_use_p)
91581bcc 220{
3aef2dbd
RG
221 bool single_use = true;
222
223 do {
726a989a 224 gimple def_stmt = SSA_NAME_DEF_STMT (name);
3aef2dbd
RG
225
226 if (!has_single_use (name))
227 {
228 single_use = false;
229 if (single_use_only)
726a989a 230 return NULL;
3aef2dbd
RG
231 }
232
233 /* If name is defined by a PHI node or is the default def, bail out. */
7c3e9dc3 234 if (!is_gimple_assign (def_stmt))
726a989a 235 return NULL;
3aef2dbd 236
ef78c9c7
RG
237 /* If def_stmt is a simple copy, continue looking. */
238 if (gimple_assign_rhs_code (def_stmt) == SSA_NAME)
239 name = gimple_assign_rhs1 (def_stmt);
240 else
3aef2dbd
RG
241 {
242 if (!single_use_only && single_use_p)
243 *single_use_p = single_use;
244
ef78c9c7 245 return def_stmt;
3aef2dbd 246 }
3aef2dbd
RG
247 } while (1);
248}
bec44647 249
3aef2dbd
RG
250/* Checks if the destination ssa name in DEF_STMT can be used as
251 propagation source. Returns true if so, otherwise false. */
bec44647 252
3aef2dbd 253static bool
726a989a 254can_propagate_from (gimple def_stmt)
3aef2dbd 255{
726a989a 256 gcc_assert (is_gimple_assign (def_stmt));
7c3e9dc3 257
10372bd4 258 /* If the rhs has side-effects we cannot propagate from it. */
726a989a 259 if (gimple_has_volatile_ops (def_stmt))
10372bd4
RG
260 return false;
261
262 /* If the rhs is a load we cannot propagate from it. */
726a989a
RB
263 if (TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)) == tcc_reference
264 || TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)) == tcc_declaration)
10372bd4
RG
265 return false;
266
b80280f2 267 /* Constants can be always propagated. */
7c3e9dc3
RG
268 if (gimple_assign_single_p (def_stmt)
269 && is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt)))
b80280f2
RG
270 return true;
271
726a989a 272 /* We cannot propagate ssa names that occur in abnormal phi nodes. */
931050d0
EB
273 if (stmt_references_abnormal_ssa_name (def_stmt))
274 return false;
6de9cd9a 275
3aef2dbd 276 /* If the definition is a conversion of a pointer to a function type,
726a989a
RB
277 then we can not apply optimizations as some targets require
278 function pointers to be canonicalized and in this case this
279 optimization could eliminate a necessary canonicalization. */
7c3e9dc3 280 if (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt)))
726a989a
RB
281 {
282 tree rhs = gimple_assign_rhs1 (def_stmt);
283 if (POINTER_TYPE_P (TREE_TYPE (rhs))
284 && TREE_CODE (TREE_TYPE (TREE_TYPE (rhs))) == FUNCTION_TYPE)
285 return false;
286 }
7c3e9dc3 287
3aef2dbd 288 return true;
bec44647
KH
289}
290
7fdab8d4
RG
291/* Remove a chain of dead statements starting at the definition of
292 NAME. The chain is linked via the first operand of the defining statements.
034b8ae4 293 If NAME was replaced in its only use then this function can be used
7fdab8d4
RG
294 to clean up dead stmts. The function handles already released SSA
295 names gracefully.
296 Returns true if cleanup-cfg has to run. */
487bf3e6 297
3aef2dbd 298static bool
034b8ae4 299remove_prop_source_from_use (tree name)
3aef2dbd 300{
726a989a
RB
301 gimple_stmt_iterator gsi;
302 gimple stmt;
034b8ae4 303 bool cfg_changed = false;
487bf3e6 304
3aef2dbd 305 do {
034b8ae4
RG
306 basic_block bb;
307
7fdab8d4
RG
308 if (SSA_NAME_IN_FREE_LIST (name)
309 || SSA_NAME_IS_DEFAULT_DEF (name)
310 || !has_zero_uses (name))
034b8ae4 311 return cfg_changed;
487bf3e6 312
3aef2dbd 313 stmt = SSA_NAME_DEF_STMT (name);
7fdab8d4
RG
314 if (gimple_code (stmt) == GIMPLE_PHI
315 || gimple_has_side_effects (stmt))
f8ecf734 316 return cfg_changed;
7fdab8d4
RG
317
318 bb = gimple_bb (stmt);
f8ecf734 319 gsi = gsi_for_stmt (stmt);
7fdab8d4 320 unlink_stmt_vdef (stmt);
b5b3ec3e
RG
321 if (gsi_remove (&gsi, true))
322 cfg_changed |= gimple_purge_dead_eh_edges (bb);
7fdab8d4 323 release_defs (stmt);
487bf3e6 324
7fdab8d4 325 name = is_gimple_assign (stmt) ? gimple_assign_rhs1 (stmt) : NULL_TREE;
726a989a 326 } while (name && TREE_CODE (name) == SSA_NAME);
487bf3e6 327
034b8ae4 328 return cfg_changed;
3aef2dbd 329}
487bf3e6 330
726a989a
RB
331/* Return the rhs of a gimple_assign STMT in a form of a single tree,
332 converted to type TYPE.
b8698a0f 333
726a989a
RB
334 This should disappear, but is needed so we can combine expressions and use
335 the fold() interfaces. Long term, we need to develop folding and combine
336 routines that deal with gimple exclusively . */
337
338static tree
339rhs_to_tree (tree type, gimple stmt)
340{
db3927fb 341 location_t loc = gimple_location (stmt);
726a989a 342 enum tree_code code = gimple_assign_rhs_code (stmt);
bb368470
UB
343 if (get_gimple_rhs_class (code) == GIMPLE_TERNARY_RHS)
344 return fold_build3_loc (loc, code, type, gimple_assign_rhs1 (stmt),
345 gimple_assign_rhs2 (stmt),
346 gimple_assign_rhs3 (stmt));
347 else if (get_gimple_rhs_class (code) == GIMPLE_BINARY_RHS)
db3927fb 348 return fold_build2_loc (loc, code, type, gimple_assign_rhs1 (stmt),
f7c0ffb4 349 gimple_assign_rhs2 (stmt));
726a989a 350 else if (get_gimple_rhs_class (code) == GIMPLE_UNARY_RHS)
f7c0ffb4 351 return build1 (code, type, gimple_assign_rhs1 (stmt));
726a989a
RB
352 else if (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS)
353 return gimple_assign_rhs1 (stmt);
354 else
355 gcc_unreachable ();
356}
357
3aef2dbd
RG
358/* Combine OP0 CODE OP1 in the context of a COND_EXPR. Returns
359 the folded result in a form suitable for COND_EXPR_COND or
360 NULL_TREE, if there is no suitable simplified form. If
361 INVARIANT_ONLY is true only gimple_min_invariant results are
362 considered simplified. */
487bf3e6
JL
363
364static tree
e8dbf8b5 365combine_cond_expr_cond (gimple stmt, enum tree_code code, tree type,
3aef2dbd 366 tree op0, tree op1, bool invariant_only)
487bf3e6 367{
3aef2dbd 368 tree t;
487bf3e6 369
3aef2dbd 370 gcc_assert (TREE_CODE_CLASS (code) == tcc_comparison);
487bf3e6 371
e8dbf8b5
RG
372 fold_defer_overflow_warnings ();
373 t = fold_binary_loc (gimple_location (stmt), code, type, op0, op1);
3aef2dbd 374 if (!t)
e8dbf8b5
RG
375 {
376 fold_undefer_overflow_warnings (false, NULL, 0);
377 return NULL_TREE;
378 }
487bf3e6 379
3aef2dbd
RG
380 /* Require that we got a boolean type out if we put one in. */
381 gcc_assert (TREE_CODE (TREE_TYPE (t)) == TREE_CODE (type));
487bf3e6 382
dc575233
RG
383 /* Canonicalize the combined condition for use in a COND_EXPR. */
384 t = canonicalize_cond_expr_cond (t);
487bf3e6 385
3aef2dbd 386 /* Bail out if we required an invariant but didn't get one. */
726a989a 387 if (!t || (invariant_only && !is_gimple_min_invariant (t)))
e8dbf8b5
RG
388 {
389 fold_undefer_overflow_warnings (false, NULL, 0);
390 return NULL_TREE;
391 }
392
393 fold_undefer_overflow_warnings (!gimple_no_warning_p (stmt), stmt, 0);
487bf3e6 394
dc575233 395 return t;
487bf3e6
JL
396}
397
d12d8efe
RG
398/* Combine the comparison OP0 CODE OP1 at LOC with the defining statements
399 of its operand. Return a new comparison tree or NULL_TREE if there
400 were no simplifying combines. */
401
402static tree
e8dbf8b5 403forward_propagate_into_comparison_1 (gimple stmt,
2e87621c
RG
404 enum tree_code code, tree type,
405 tree op0, tree op1)
d12d8efe
RG
406{
407 tree tmp = NULL_TREE;
408 tree rhs0 = NULL_TREE, rhs1 = NULL_TREE;
409 bool single_use0_p = false, single_use1_p = false;
410
411 /* For comparisons use the first operand, that is likely to
412 simplify comparisons against constants. */
413 if (TREE_CODE (op0) == SSA_NAME)
414 {
415 gimple def_stmt = get_prop_source_stmt (op0, false, &single_use0_p);
416 if (def_stmt && can_propagate_from (def_stmt))
417 {
418 rhs0 = rhs_to_tree (TREE_TYPE (op1), def_stmt);
e8dbf8b5 419 tmp = combine_cond_expr_cond (stmt, code, type,
d12d8efe
RG
420 rhs0, op1, !single_use0_p);
421 if (tmp)
422 return tmp;
423 }
424 }
425
426 /* If that wasn't successful, try the second operand. */
427 if (TREE_CODE (op1) == SSA_NAME)
428 {
429 gimple def_stmt = get_prop_source_stmt (op1, false, &single_use1_p);
430 if (def_stmt && can_propagate_from (def_stmt))
431 {
432 rhs1 = rhs_to_tree (TREE_TYPE (op0), def_stmt);
e8dbf8b5 433 tmp = combine_cond_expr_cond (stmt, code, type,
d12d8efe
RG
434 op0, rhs1, !single_use1_p);
435 if (tmp)
436 return tmp;
437 }
438 }
439
440 /* If that wasn't successful either, try both operands. */
441 if (rhs0 != NULL_TREE
442 && rhs1 != NULL_TREE)
e8dbf8b5 443 tmp = combine_cond_expr_cond (stmt, code, type,
d12d8efe
RG
444 rhs0, rhs1,
445 !(single_use0_p && single_use1_p));
446
447 return tmp;
448}
449
2e87621c
RG
450/* Propagate from the ssa name definition statements of the assignment
451 from a comparison at *GSI into the conditional if that simplifies it.
f8ecf734
RG
452 Returns 1 if the stmt was modified and 2 if the CFG needs cleanup,
453 otherwise returns 0. */
d12d8efe 454
f8ecf734 455static int
2e87621c 456forward_propagate_into_comparison (gimple_stmt_iterator *gsi)
d12d8efe 457{
2e87621c
RG
458 gimple stmt = gsi_stmt (*gsi);
459 tree tmp;
f8ecf734 460 bool cfg_changed = false;
75e649f6 461 tree type = TREE_TYPE (gimple_assign_lhs (stmt));
f8ecf734
RG
462 tree rhs1 = gimple_assign_rhs1 (stmt);
463 tree rhs2 = gimple_assign_rhs2 (stmt);
d12d8efe
RG
464
465 /* Combine the comparison with defining statements. */
e8dbf8b5 466 tmp = forward_propagate_into_comparison_1 (stmt,
2e87621c 467 gimple_assign_rhs_code (stmt),
75e649f6
EB
468 type, rhs1, rhs2);
469 if (tmp && useless_type_conversion_p (type, TREE_TYPE (tmp)))
d12d8efe 470 {
2e87621c 471 gimple_assign_set_rhs_from_tree (gsi, tmp);
59401b92
RG
472 fold_stmt (gsi);
473 update_stmt (gsi_stmt (*gsi));
9b80d091 474
f8ecf734
RG
475 if (TREE_CODE (rhs1) == SSA_NAME)
476 cfg_changed |= remove_prop_source_from_use (rhs1);
477 if (TREE_CODE (rhs2) == SSA_NAME)
478 cfg_changed |= remove_prop_source_from_use (rhs2);
479 return cfg_changed ? 2 : 1;
d12d8efe
RG
480 }
481
f8ecf734 482 return 0;
d12d8efe
RG
483}
484
3aef2dbd 485/* Propagate from the ssa name definition statements of COND_EXPR
726a989a
RB
486 in GIMPLE_COND statement STMT into the conditional if that simplifies it.
487 Returns zero if no statement was changed, one if there were
488 changes and two if cfg_cleanup needs to run.
b8698a0f 489
726a989a
RB
490 This must be kept in sync with forward_propagate_into_cond. */
491
492static int
493forward_propagate_into_gimple_cond (gimple stmt)
494{
2e87621c
RG
495 tree tmp;
496 enum tree_code code = gimple_cond_code (stmt);
f8ecf734
RG
497 bool cfg_changed = false;
498 tree rhs1 = gimple_cond_lhs (stmt);
499 tree rhs2 = gimple_cond_rhs (stmt);
2e87621c
RG
500
501 /* We can do tree combining on SSA_NAME and comparison expressions. */
502 if (TREE_CODE_CLASS (gimple_cond_code (stmt)) != tcc_comparison)
503 return 0;
504
e8dbf8b5 505 tmp = forward_propagate_into_comparison_1 (stmt, code,
2e87621c 506 boolean_type_node,
f8ecf734 507 rhs1, rhs2);
2e87621c
RG
508 if (tmp)
509 {
510 if (dump_file && tmp)
511 {
2e87621c 512 fprintf (dump_file, " Replaced '");
f8ecf734 513 print_gimple_expr (dump_file, stmt, 0, 0);
2e87621c
RG
514 fprintf (dump_file, "' with '");
515 print_generic_expr (dump_file, tmp, 0);
516 fprintf (dump_file, "'\n");
517 }
726a989a 518
2e87621c
RG
519 gimple_cond_set_condition_from_tree (stmt, unshare_expr (tmp));
520 update_stmt (stmt);
726a989a 521
f8ecf734
RG
522 if (TREE_CODE (rhs1) == SSA_NAME)
523 cfg_changed |= remove_prop_source_from_use (rhs1);
524 if (TREE_CODE (rhs2) == SSA_NAME)
525 cfg_changed |= remove_prop_source_from_use (rhs2);
526 return (cfg_changed || is_gimple_min_invariant (tmp)) ? 2 : 1;
2e87621c 527 }
726a989a 528
e8642944
RG
529 /* Canonicalize _Bool == 0 and _Bool != 1 to _Bool != 0 by swapping edges. */
530 if ((TREE_CODE (TREE_TYPE (rhs1)) == BOOLEAN_TYPE
531 || (INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
532 && TYPE_PRECISION (TREE_TYPE (rhs1)) == 1))
533 && ((code == EQ_EXPR
534 && integer_zerop (rhs2))
535 || (code == NE_EXPR
536 && integer_onep (rhs2))))
537 {
538 basic_block bb = gimple_bb (stmt);
539 gimple_cond_set_code (stmt, NE_EXPR);
540 gimple_cond_set_rhs (stmt, build_zero_cst (TREE_TYPE (rhs1)));
541 EDGE_SUCC (bb, 0)->flags ^= (EDGE_TRUE_VALUE|EDGE_FALSE_VALUE);
542 EDGE_SUCC (bb, 1)->flags ^= (EDGE_TRUE_VALUE|EDGE_FALSE_VALUE);
543 return 1;
544 }
545
f8ecf734 546 return 0;
726a989a
RB
547}
548
549
550/* Propagate from the ssa name definition statements of COND_EXPR
551 in the rhs of statement STMT into the conditional if that simplifies it.
4e71066d 552 Returns true zero if the stmt was changed. */
6de9cd9a 553
4e71066d 554static bool
726a989a 555forward_propagate_into_cond (gimple_stmt_iterator *gsi_p)
bec44647 556{
726a989a 557 gimple stmt = gsi_stmt (*gsi_p);
2e87621c
RG
558 tree tmp = NULL_TREE;
559 tree cond = gimple_assign_rhs1 (stmt);
a8dcc458 560 enum tree_code code = gimple_assign_rhs_code (stmt);
e8642944 561 bool swap = false;
113ab41c 562
2e87621c
RG
563 /* We can do tree combining on SSA_NAME and comparison expressions. */
564 if (COMPARISON_CLASS_P (cond))
e8dbf8b5 565 tmp = forward_propagate_into_comparison_1 (stmt, TREE_CODE (cond),
ae22ac3c 566 TREE_TYPE (cond),
d12d8efe
RG
567 TREE_OPERAND (cond, 0),
568 TREE_OPERAND (cond, 1));
2e87621c
RG
569 else if (TREE_CODE (cond) == SSA_NAME)
570 {
a8dcc458 571 enum tree_code def_code;
4e71066d 572 tree name = cond;
2e87621c
RG
573 gimple def_stmt = get_prop_source_stmt (name, true, NULL);
574 if (!def_stmt || !can_propagate_from (def_stmt))
f8ecf734 575 return 0;
3aef2dbd 576
a8dcc458
MG
577 def_code = gimple_assign_rhs_code (def_stmt);
578 if (TREE_CODE_CLASS (def_code) == tcc_comparison)
4e71066d 579 tmp = fold_build2_loc (gimple_location (def_stmt),
a8dcc458 580 def_code,
70a6aea0 581 TREE_TYPE (cond),
4e71066d
RG
582 gimple_assign_rhs1 (def_stmt),
583 gimple_assign_rhs2 (def_stmt));
a8dcc458
MG
584 else if (code == COND_EXPR
585 && ((def_code == BIT_NOT_EXPR
586 && TYPE_PRECISION (TREE_TYPE (cond)) == 1)
587 || (def_code == BIT_XOR_EXPR
588 && integer_onep (gimple_assign_rhs2 (def_stmt)))))
e8642944
RG
589 {
590 tmp = gimple_assign_rhs1 (def_stmt);
591 swap = true;
592 }
2e87621c 593 }
3aef2dbd 594
dd46054a
RG
595 if (tmp
596 && is_gimple_condexpr (tmp))
2e87621c
RG
597 {
598 if (dump_file && tmp)
599 {
600 fprintf (dump_file, " Replaced '");
601 print_generic_expr (dump_file, cond, 0);
602 fprintf (dump_file, "' with '");
603 print_generic_expr (dump_file, tmp, 0);
604 fprintf (dump_file, "'\n");
605 }
113ab41c 606
a8dcc458
MG
607 if ((code == VEC_COND_EXPR) ? integer_all_onesp (tmp)
608 : integer_onep (tmp))
4e71066d
RG
609 gimple_assign_set_rhs_from_tree (gsi_p, gimple_assign_rhs2 (stmt));
610 else if (integer_zerop (tmp))
611 gimple_assign_set_rhs_from_tree (gsi_p, gimple_assign_rhs3 (stmt));
612 else
e8642944
RG
613 {
614 gimple_assign_set_rhs1 (stmt, unshare_expr (tmp));
615 if (swap)
616 {
617 tree t = gimple_assign_rhs2 (stmt);
618 gimple_assign_set_rhs2 (stmt, gimple_assign_rhs3 (stmt));
619 gimple_assign_set_rhs3 (stmt, t);
620 }
621 }
2e87621c
RG
622 stmt = gsi_stmt (*gsi_p);
623 update_stmt (stmt);
3aef2dbd 624
4e71066d 625 return true;
2e87621c 626 }
113ab41c 627
f8ecf734 628 return 0;
6de9cd9a
DN
629}
630
2515d916
RG
631/* Propagate from the ssa name definition statements of COND_EXPR
632 values in the rhs of statement STMT into the conditional arms
633 if that simplifies it.
634 Returns true if the stmt was changed. */
635
636static bool
637combine_cond_exprs (gimple_stmt_iterator *gsi_p)
638{
639 gimple stmt = gsi_stmt (*gsi_p);
640 tree cond, val1, val2;
641 bool changed = false;
642
643 cond = gimple_assign_rhs1 (stmt);
644 val1 = gimple_assign_rhs2 (stmt);
645 if (TREE_CODE (val1) == SSA_NAME)
646 {
647 gimple def_stmt = SSA_NAME_DEF_STMT (val1);
648 if (is_gimple_assign (def_stmt)
649 && gimple_assign_rhs_code (def_stmt) == gimple_assign_rhs_code (stmt)
650 && operand_equal_p (gimple_assign_rhs1 (def_stmt), cond, 0))
651 {
652 val1 = unshare_expr (gimple_assign_rhs2 (def_stmt));
653 gimple_assign_set_rhs2 (stmt, val1);
654 changed = true;
655 }
656 }
657 val2 = gimple_assign_rhs3 (stmt);
658 if (TREE_CODE (val2) == SSA_NAME)
659 {
660 gimple def_stmt = SSA_NAME_DEF_STMT (val2);
661 if (is_gimple_assign (def_stmt)
662 && gimple_assign_rhs_code (def_stmt) == gimple_assign_rhs_code (stmt)
663 && operand_equal_p (gimple_assign_rhs1 (def_stmt), cond, 0))
664 {
665 val2 = unshare_expr (gimple_assign_rhs3 (def_stmt));
666 gimple_assign_set_rhs3 (stmt, val2);
667 changed = true;
668 }
669 }
670 if (operand_equal_p (val1, val2, 0))
671 {
672 gimple_assign_set_rhs_from_tree (gsi_p, val1);
673 stmt = gsi_stmt (*gsi_p);
674 changed = true;
675 }
676
677 if (changed)
678 update_stmt (stmt);
679
680 return changed;
681}
682
b8698a0f 683/* We've just substituted an ADDR_EXPR into stmt. Update all the
5bcd8644
RH
684 relevant data structures to match. */
685
686static void
726a989a 687tidy_after_forward_propagate_addr (gimple stmt)
5bcd8644 688{
5bcd8644
RH
689 /* We may have turned a trapping insn into a non-trapping insn. */
690 if (maybe_clean_or_replace_eh_stmt (stmt, stmt)
726a989a 691 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
5bcd8644 692 cfg_changed = true;
6cedb4ac 693
726a989a
RB
694 if (TREE_CODE (gimple_assign_rhs1 (stmt)) == ADDR_EXPR)
695 recompute_tree_invariant_for_addr_expr (gimple_assign_rhs1 (stmt));
5bcd8644
RH
696}
697
7b1737d0
RG
698/* NAME is a SSA_NAME representing DEF_RHS which is of the form
699 ADDR_EXPR <whatever>.
a564d0f1 700
d090221b 701 Try to forward propagate the ADDR_EXPR into the use USE_STMT.
a564d0f1 702 Often this will allow for removal of an ADDR_EXPR and INDIRECT_REF
d090221b 703 node or for recovery of array indexing from pointer arithmetic.
726a989a 704
2ed4b0ce
DB
705 Return true if the propagation was successful (the propagation can
706 be not totally successful, yet things may have been changed). */
a564d0f1
JL
707
708static bool
726a989a
RB
709forward_propagate_addr_expr_1 (tree name, tree def_rhs,
710 gimple_stmt_iterator *use_stmt_gsi,
f6c5fefc 711 bool single_use_p)
a564d0f1 712{
726a989a 713 tree lhs, rhs, rhs2, array_ref;
726a989a
RB
714 gimple use_stmt = gsi_stmt (*use_stmt_gsi);
715 enum tree_code rhs_code;
377f099a 716 bool res = true;
a564d0f1 717
9cadd7f7 718 gcc_assert (TREE_CODE (def_rhs) == ADDR_EXPR);
a564d0f1 719
726a989a
RB
720 lhs = gimple_assign_lhs (use_stmt);
721 rhs_code = gimple_assign_rhs_code (use_stmt);
722 rhs = gimple_assign_rhs1 (use_stmt);
7b1737d0 723
5de989ed
RB
724 /* Do not perform copy-propagation but recurse through copy chains. */
725 if (TREE_CODE (lhs) == SSA_NAME
726 && rhs_code == SSA_NAME)
727 return forward_propagate_addr_expr (lhs, def_rhs, single_use_p);
728
729 /* The use statement could be a conversion. Recurse to the uses of the
730 lhs as copyprop does not copy through pointer to integer to pointer
731 conversions and FRE does not catch all cases either.
732 Treat the case of a single-use name and
f6c5fefc 733 a conversion to def_rhs type separate, though. */
9cadd7f7 734 if (TREE_CODE (lhs) == SSA_NAME
5de989ed 735 && CONVERT_EXPR_CODE_P (rhs_code))
f6c5fefc 736 {
5de989ed
RB
737 /* If there is a point in a conversion chain where the types match
738 so we can remove a conversion re-materialize the address here
739 and stop. */
740 if (single_use_p
741 && useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (def_rhs)))
742 {
743 gimple_assign_set_rhs1 (use_stmt, unshare_expr (def_rhs));
744 gimple_assign_set_rhs_code (use_stmt, TREE_CODE (def_rhs));
745 return true;
746 }
747
748 /* Else recurse if the conversion preserves the address value. */
749 if ((INTEGRAL_TYPE_P (TREE_TYPE (lhs))
750 || POINTER_TYPE_P (TREE_TYPE (lhs)))
751 && (TYPE_PRECISION (TREE_TYPE (lhs))
752 >= TYPE_PRECISION (TREE_TYPE (def_rhs))))
753 return forward_propagate_addr_expr (lhs, def_rhs, single_use_p);
754
755 return false;
f6c5fefc 756 }
9cadd7f7 757
5de989ed
RB
758 /* If this isn't a conversion chain from this on we only can propagate
759 into compatible pointer contexts. */
760 if (!types_compatible_p (TREE_TYPE (name), TREE_TYPE (def_rhs)))
761 return false;
762
70f34814
RG
763 /* Propagate through constant pointer adjustments. */
764 if (TREE_CODE (lhs) == SSA_NAME
765 && rhs_code == POINTER_PLUS_EXPR
766 && rhs == name
767 && TREE_CODE (gimple_assign_rhs2 (use_stmt)) == INTEGER_CST)
768 {
769 tree new_def_rhs;
770 /* As we come here with non-invariant addresses in def_rhs we need
771 to make sure we can build a valid constant offsetted address
772 for further propagation. Simply rely on fold building that
773 and check after the fact. */
774 new_def_rhs = fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (rhs)),
775 def_rhs,
776 fold_convert (ptr_type_node,
777 gimple_assign_rhs2 (use_stmt)));
778 if (TREE_CODE (new_def_rhs) == MEM_REF
bf9899d4 779 && !is_gimple_mem_ref_addr (TREE_OPERAND (new_def_rhs, 0)))
70f34814
RG
780 return false;
781 new_def_rhs = build_fold_addr_expr_with_type (new_def_rhs,
782 TREE_TYPE (rhs));
783
784 /* Recurse. If we could propagate into all uses of lhs do not
785 bother to replace into the current use but just pretend we did. */
786 if (TREE_CODE (new_def_rhs) == ADDR_EXPR
5de989ed 787 && forward_propagate_addr_expr (lhs, new_def_rhs, single_use_p))
70f34814
RG
788 return true;
789
790 if (useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (new_def_rhs)))
791 gimple_assign_set_rhs_with_ops (use_stmt_gsi, TREE_CODE (new_def_rhs),
792 new_def_rhs, NULL_TREE);
793 else if (is_gimple_min_invariant (new_def_rhs))
794 gimple_assign_set_rhs_with_ops (use_stmt_gsi, NOP_EXPR,
795 new_def_rhs, NULL_TREE);
796 else
797 return false;
798 gcc_assert (gsi_stmt (*use_stmt_gsi) == use_stmt);
799 update_stmt (use_stmt);
800 return true;
801 }
802
b8698a0f 803 /* Now strip away any outer COMPONENT_REF/ARRAY_REF nodes from the LHS.
9cadd7f7 804 ADDR_EXPR will not appear on the LHS. */
e1fb4ad3
RB
805 tree *lhsp = gimple_assign_lhs_ptr (use_stmt);
806 while (handled_component_p (*lhsp))
807 lhsp = &TREE_OPERAND (*lhsp, 0);
808 lhs = *lhsp;
9cadd7f7 809
70f34814 810 /* Now see if the LHS node is a MEM_REF using NAME. If so,
9cadd7f7 811 propagate the ADDR_EXPR into the use of NAME and fold the result. */
70f34814 812 if (TREE_CODE (lhs) == MEM_REF
377f099a 813 && TREE_OPERAND (lhs, 0) == name)
9cadd7f7 814 {
70f34814
RG
815 tree def_rhs_base;
816 HOST_WIDE_INT def_rhs_offset;
817 /* If the address is invariant we can always fold it. */
818 if ((def_rhs_base = get_addr_base_and_unit_offset (TREE_OPERAND (def_rhs, 0),
819 &def_rhs_offset)))
377f099a 820 {
70f34814
RG
821 double_int off = mem_ref_offset (lhs);
822 tree new_ptr;
27bcd47c 823 off += double_int::from_shwi (def_rhs_offset);
70f34814
RG
824 if (TREE_CODE (def_rhs_base) == MEM_REF)
825 {
27bcd47c 826 off += mem_ref_offset (def_rhs_base);
70f34814
RG
827 new_ptr = TREE_OPERAND (def_rhs_base, 0);
828 }
829 else
830 new_ptr = build_fold_addr_expr (def_rhs_base);
831 TREE_OPERAND (lhs, 0) = new_ptr;
832 TREE_OPERAND (lhs, 1)
833 = double_int_to_tree (TREE_TYPE (TREE_OPERAND (lhs, 1)), off);
377f099a 834 tidy_after_forward_propagate_addr (use_stmt);
377f099a
RG
835 /* Continue propagating into the RHS if this was not the only use. */
836 if (single_use_p)
837 return true;
838 }
70f34814
RG
839 /* If the LHS is a plain dereference and the value type is the same as
840 that of the pointed-to type of the address we can put the
841 dereferenced address on the LHS preserving the original alias-type. */
e1fb4ad3
RB
842 else if (integer_zerop (TREE_OPERAND (lhs, 1))
843 && ((gimple_assign_lhs (use_stmt) == lhs
844 && useless_type_conversion_p
845 (TREE_TYPE (TREE_OPERAND (def_rhs, 0)),
846 TREE_TYPE (gimple_assign_rhs1 (use_stmt))))
847 || types_compatible_p (TREE_TYPE (lhs),
848 TREE_TYPE (TREE_OPERAND (def_rhs, 0))))
39307ba7
JJ
849 /* Don't forward anything into clobber stmts if it would result
850 in the lhs no longer being a MEM_REF. */
851 && (!gimple_clobber_p (use_stmt)
852 || TREE_CODE (TREE_OPERAND (def_rhs, 0)) == MEM_REF))
70f34814
RG
853 {
854 tree *def_rhs_basep = &TREE_OPERAND (def_rhs, 0);
8c29866f 855 tree new_offset, new_base, saved, new_lhs;
70f34814
RG
856 while (handled_component_p (*def_rhs_basep))
857 def_rhs_basep = &TREE_OPERAND (*def_rhs_basep, 0);
858 saved = *def_rhs_basep;
859 if (TREE_CODE (*def_rhs_basep) == MEM_REF)
860 {
861 new_base = TREE_OPERAND (*def_rhs_basep, 0);
fef205d5
RG
862 new_offset = fold_convert (TREE_TYPE (TREE_OPERAND (lhs, 1)),
863 TREE_OPERAND (*def_rhs_basep, 1));
70f34814
RG
864 }
865 else
866 {
867 new_base = build_fold_addr_expr (*def_rhs_basep);
868 new_offset = TREE_OPERAND (lhs, 1);
869 }
870 *def_rhs_basep = build2 (MEM_REF, TREE_TYPE (*def_rhs_basep),
871 new_base, new_offset);
27315aa6 872 TREE_THIS_VOLATILE (*def_rhs_basep) = TREE_THIS_VOLATILE (lhs);
4be257dc 873 TREE_SIDE_EFFECTS (*def_rhs_basep) = TREE_SIDE_EFFECTS (lhs);
27315aa6 874 TREE_THIS_NOTRAP (*def_rhs_basep) = TREE_THIS_NOTRAP (lhs);
8c29866f 875 new_lhs = unshare_expr (TREE_OPERAND (def_rhs, 0));
e1fb4ad3 876 *lhsp = new_lhs;
8c29866f 877 TREE_THIS_VOLATILE (new_lhs) = TREE_THIS_VOLATILE (lhs);
4be257dc 878 TREE_SIDE_EFFECTS (new_lhs) = TREE_SIDE_EFFECTS (lhs);
70f34814
RG
879 *def_rhs_basep = saved;
880 tidy_after_forward_propagate_addr (use_stmt);
881 /* Continue propagating into the RHS if this was not the
882 only use. */
883 if (single_use_p)
884 return true;
885 }
377f099a
RG
886 else
887 /* We can have a struct assignment dereferencing our name twice.
888 Note that we didn't propagate into the lhs to not falsely
889 claim we did when propagating into the rhs. */
890 res = false;
9cadd7f7 891 }
7b1737d0 892
450c3007
RG
893 /* Strip away any outer COMPONENT_REF, ARRAY_REF or ADDR_EXPR
894 nodes from the RHS. */
e1fb4ad3
RB
895 tree *rhsp = gimple_assign_rhs1_ptr (use_stmt);
896 if (TREE_CODE (*rhsp) == ADDR_EXPR)
897 rhsp = &TREE_OPERAND (*rhsp, 0);
898 while (handled_component_p (*rhsp))
899 rhsp = &TREE_OPERAND (*rhsp, 0);
900 rhs = *rhsp;
a564d0f1 901
70f34814 902 /* Now see if the RHS node is a MEM_REF using NAME. If so,
a564d0f1 903 propagate the ADDR_EXPR into the use of NAME and fold the result. */
70f34814
RG
904 if (TREE_CODE (rhs) == MEM_REF
905 && TREE_OPERAND (rhs, 0) == name)
a564d0f1 906 {
70f34814
RG
907 tree def_rhs_base;
908 HOST_WIDE_INT def_rhs_offset;
909 if ((def_rhs_base = get_addr_base_and_unit_offset (TREE_OPERAND (def_rhs, 0),
910 &def_rhs_offset)))
911 {
912 double_int off = mem_ref_offset (rhs);
913 tree new_ptr;
27bcd47c 914 off += double_int::from_shwi (def_rhs_offset);
70f34814
RG
915 if (TREE_CODE (def_rhs_base) == MEM_REF)
916 {
27bcd47c 917 off += mem_ref_offset (def_rhs_base);
70f34814
RG
918 new_ptr = TREE_OPERAND (def_rhs_base, 0);
919 }
920 else
921 new_ptr = build_fold_addr_expr (def_rhs_base);
922 TREE_OPERAND (rhs, 0) = new_ptr;
923 TREE_OPERAND (rhs, 1)
924 = double_int_to_tree (TREE_TYPE (TREE_OPERAND (rhs, 1)), off);
59401b92 925 fold_stmt_inplace (use_stmt_gsi);
70f34814
RG
926 tidy_after_forward_propagate_addr (use_stmt);
927 return res;
928 }
27315aa6 929 /* If the RHS is a plain dereference and the value type is the same as
70f34814 930 that of the pointed-to type of the address we can put the
27315aa6 931 dereferenced address on the RHS preserving the original alias-type. */
e1fb4ad3
RB
932 else if (integer_zerop (TREE_OPERAND (rhs, 1))
933 && ((gimple_assign_rhs1 (use_stmt) == rhs
934 && useless_type_conversion_p
935 (TREE_TYPE (gimple_assign_lhs (use_stmt)),
936 TREE_TYPE (TREE_OPERAND (def_rhs, 0))))
937 || types_compatible_p (TREE_TYPE (rhs),
938 TREE_TYPE (TREE_OPERAND (def_rhs, 0)))))
70f34814
RG
939 {
940 tree *def_rhs_basep = &TREE_OPERAND (def_rhs, 0);
8c29866f 941 tree new_offset, new_base, saved, new_rhs;
70f34814
RG
942 while (handled_component_p (*def_rhs_basep))
943 def_rhs_basep = &TREE_OPERAND (*def_rhs_basep, 0);
944 saved = *def_rhs_basep;
945 if (TREE_CODE (*def_rhs_basep) == MEM_REF)
946 {
947 new_base = TREE_OPERAND (*def_rhs_basep, 0);
fef205d5
RG
948 new_offset = fold_convert (TREE_TYPE (TREE_OPERAND (rhs, 1)),
949 TREE_OPERAND (*def_rhs_basep, 1));
70f34814
RG
950 }
951 else
952 {
953 new_base = build_fold_addr_expr (*def_rhs_basep);
954 new_offset = TREE_OPERAND (rhs, 1);
955 }
956 *def_rhs_basep = build2 (MEM_REF, TREE_TYPE (*def_rhs_basep),
957 new_base, new_offset);
27315aa6 958 TREE_THIS_VOLATILE (*def_rhs_basep) = TREE_THIS_VOLATILE (rhs);
4be257dc 959 TREE_SIDE_EFFECTS (*def_rhs_basep) = TREE_SIDE_EFFECTS (rhs);
27315aa6 960 TREE_THIS_NOTRAP (*def_rhs_basep) = TREE_THIS_NOTRAP (rhs);
8c29866f 961 new_rhs = unshare_expr (TREE_OPERAND (def_rhs, 0));
e1fb4ad3 962 *rhsp = new_rhs;
8c29866f 963 TREE_THIS_VOLATILE (new_rhs) = TREE_THIS_VOLATILE (rhs);
4be257dc 964 TREE_SIDE_EFFECTS (new_rhs) = TREE_SIDE_EFFECTS (rhs);
70f34814 965 *def_rhs_basep = saved;
59401b92 966 fold_stmt_inplace (use_stmt_gsi);
70f34814
RG
967 tidy_after_forward_propagate_addr (use_stmt);
968 return res;
969 }
a564d0f1
JL
970 }
971
9cadd7f7
RG
972 /* If the use of the ADDR_EXPR is not a POINTER_PLUS_EXPR, there
973 is nothing to do. */
726a989a
RB
974 if (gimple_assign_rhs_code (use_stmt) != POINTER_PLUS_EXPR
975 || gimple_assign_rhs1 (use_stmt) != name)
9cadd7f7
RG
976 return false;
977
a564d0f1
JL
978 /* The remaining cases are all for turning pointer arithmetic into
979 array indexing. They only apply when we have the address of
980 element zero in an array. If that is not the case then there
981 is nothing to do. */
7b1737d0 982 array_ref = TREE_OPERAND (def_rhs, 0);
70f34814
RG
983 if ((TREE_CODE (array_ref) != ARRAY_REF
984 || TREE_CODE (TREE_TYPE (TREE_OPERAND (array_ref, 0))) != ARRAY_TYPE
985 || TREE_CODE (TREE_OPERAND (array_ref, 1)) != INTEGER_CST)
986 && TREE_CODE (TREE_TYPE (array_ref)) != ARRAY_TYPE)
a564d0f1
JL
987 return false;
988
726a989a 989 rhs2 = gimple_assign_rhs2 (use_stmt);
315f5f1b 990 /* Optimize &x[C1] p+ C2 to &x p+ C3 with C3 = C1 * element_size + C2. */
726a989a 991 if (TREE_CODE (rhs2) == INTEGER_CST)
a564d0f1 992 {
315f5f1b
RG
993 tree new_rhs = build1_loc (gimple_location (use_stmt),
994 ADDR_EXPR, TREE_TYPE (def_rhs),
995 fold_build2 (MEM_REF,
996 TREE_TYPE (TREE_TYPE (def_rhs)),
997 unshare_expr (def_rhs),
998 fold_convert (ptr_type_node,
999 rhs2)));
1000 gimple_assign_set_rhs_from_tree (use_stmt_gsi, new_rhs);
1001 use_stmt = gsi_stmt (*use_stmt_gsi);
1002 update_stmt (use_stmt);
1003 tidy_after_forward_propagate_addr (use_stmt);
1004 return true;
a564d0f1
JL
1005 }
1006
a564d0f1
JL
1007 return false;
1008}
1009
d090221b
RG
1010/* STMT is a statement of the form SSA_NAME = ADDR_EXPR <whatever>.
1011
1012 Try to forward propagate the ADDR_EXPR into all uses of the SSA_NAME.
1013 Often this will allow for removal of an ADDR_EXPR and INDIRECT_REF
1014 node or for recovery of array indexing from pointer arithmetic.
5de989ed
RB
1015
1016 PARENT_SINGLE_USE_P tells if, when in a recursive invocation, NAME was
1017 the single use in the previous invocation. Pass true when calling
1018 this as toplevel.
1019
d090221b
RG
1020 Returns true, if all uses have been propagated into. */
1021
1022static bool
5de989ed 1023forward_propagate_addr_expr (tree name, tree rhs, bool parent_single_use_p)
d090221b 1024{
d090221b 1025 imm_use_iterator iter;
726a989a 1026 gimple use_stmt;
d090221b 1027 bool all = true;
5de989ed 1028 bool single_use_p = parent_single_use_p && has_single_use (name);
d090221b 1029
6c00f606 1030 FOR_EACH_IMM_USE_STMT (use_stmt, iter, name)
d090221b 1031 {
efdb3de9 1032 bool result;
25b6dd9c 1033 tree use_rhs;
d090221b
RG
1034
1035 /* If the use is not in a simple assignment statement, then
1036 there is nothing we can do. */
e6f1c509 1037 if (!is_gimple_assign (use_stmt))
d090221b 1038 {
0ca5af51 1039 if (!is_gimple_debug (use_stmt))
b5b8b0ac 1040 all = false;
d090221b
RG
1041 continue;
1042 }
1043
e6f1c509
RB
1044 gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
1045 result = forward_propagate_addr_expr_1 (name, rhs, &gsi,
1046 single_use_p);
1047 /* If the use has moved to a different statement adjust
1048 the update machinery for the old statement too. */
1049 if (use_stmt != gsi_stmt (gsi))
d090221b 1050 {
e6f1c509
RB
1051 update_stmt (use_stmt);
1052 use_stmt = gsi_stmt (gsi);
d090221b 1053 }
e6f1c509 1054 update_stmt (use_stmt);
efdb3de9 1055 all &= result;
cfaab3a9 1056
7b1737d0 1057 /* Remove intermediate now unused copy and conversion chains. */
726a989a 1058 use_rhs = gimple_assign_rhs1 (use_stmt);
7b1737d0 1059 if (result
726a989a 1060 && TREE_CODE (gimple_assign_lhs (use_stmt)) == SSA_NAME
7c90021d
RG
1061 && TREE_CODE (use_rhs) == SSA_NAME
1062 && has_zero_uses (gimple_assign_lhs (use_stmt)))
7b1737d0 1063 {
726a989a 1064 gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
7b1737d0 1065 release_defs (use_stmt);
726a989a 1066 gsi_remove (&gsi, true);
7b1737d0 1067 }
d090221b
RG
1068 }
1069
6bdfdb96 1070 return all && has_zero_uses (name);
d090221b
RG
1071}
1072
2e87621c 1073
355a7673 1074/* Forward propagate the comparison defined in *DEFGSI like
2e87621c
RG
1075 cond_1 = x CMP y to uses of the form
1076 a_1 = (T')cond_1
1077 a_1 = !cond_1
1078 a_1 = cond_1 != 0
355a7673
MM
1079 Returns true if stmt is now unused. Advance DEFGSI to the next
1080 statement. */
2e87621c
RG
1081
1082static bool
355a7673 1083forward_propagate_comparison (gimple_stmt_iterator *defgsi)
2e87621c 1084{
355a7673 1085 gimple stmt = gsi_stmt (*defgsi);
2e87621c
RG
1086 tree name = gimple_assign_lhs (stmt);
1087 gimple use_stmt;
1088 tree tmp = NULL_TREE;
aaa7ad90
RB
1089 gimple_stmt_iterator gsi;
1090 enum tree_code code;
1091 tree lhs;
2e87621c
RG
1092
1093 /* Don't propagate ssa names that occur in abnormal phis. */
1094 if ((TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
1095 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_assign_rhs1 (stmt)))
1096 || (TREE_CODE (gimple_assign_rhs2 (stmt)) == SSA_NAME
1097 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_assign_rhs2 (stmt))))
355a7673 1098 goto bailout;
2e87621c
RG
1099
1100 /* Do not un-cse comparisons. But propagate through copies. */
1101 use_stmt = get_prop_dest_stmt (name, &name);
aaa7ad90
RB
1102 if (!use_stmt
1103 || !is_gimple_assign (use_stmt))
355a7673 1104 goto bailout;
2e87621c 1105
aaa7ad90
RB
1106 code = gimple_assign_rhs_code (use_stmt);
1107 lhs = gimple_assign_lhs (use_stmt);
1108 if (!INTEGRAL_TYPE_P (TREE_TYPE (lhs)))
355a7673 1109 goto bailout;
2e87621c 1110
aaa7ad90
RB
1111 /* We can propagate the condition into a statement that
1112 computes the logical negation of the comparison result. */
7f3ff782
KT
1113 if ((code == BIT_NOT_EXPR
1114 && TYPE_PRECISION (TREE_TYPE (lhs)) == 1)
1115 || (code == BIT_XOR_EXPR
1116 && integer_onep (gimple_assign_rhs2 (use_stmt))))
aaa7ad90
RB
1117 {
1118 tree type = TREE_TYPE (gimple_assign_rhs1 (stmt));
1119 bool nans = HONOR_NANS (TYPE_MODE (type));
1120 enum tree_code inv_code;
1121 inv_code = invert_tree_comparison (gimple_assign_rhs_code (stmt), nans);
1122 if (inv_code == ERROR_MARK)
355a7673 1123 goto bailout;
2e87621c 1124
aaa7ad90
RB
1125 tmp = build2 (inv_code, TREE_TYPE (lhs), gimple_assign_rhs1 (stmt),
1126 gimple_assign_rhs2 (stmt));
1127 }
1128 else
355a7673 1129 goto bailout;
2e87621c 1130
aaa7ad90
RB
1131 gsi = gsi_for_stmt (use_stmt);
1132 gimple_assign_set_rhs_from_tree (&gsi, unshare_expr (tmp));
1133 use_stmt = gsi_stmt (gsi);
1134 update_stmt (use_stmt);
2e87621c 1135
aaa7ad90
RB
1136 if (dump_file && (dump_flags & TDF_DETAILS))
1137 {
1138 fprintf (dump_file, " Replaced '");
1139 print_gimple_expr (dump_file, stmt, 0, dump_flags);
1140 fprintf (dump_file, "' with '");
1141 print_gimple_expr (dump_file, use_stmt, 0, dump_flags);
1142 fprintf (dump_file, "'\n");
2e87621c
RG
1143 }
1144
355a7673
MM
1145 /* When we remove stmt now the iterator defgsi goes off it's current
1146 sequence, hence advance it now. */
1147 gsi_next (defgsi);
1148
aaa7ad90
RB
1149 /* Remove defining statements. */
1150 return remove_prop_source_from_use (name);
355a7673
MM
1151
1152bailout:
1153 gsi_next (defgsi);
1154 return false;
2e87621c
RG
1155}
1156
1157
3e8a33f9
JL
1158/* GSI_P points to a statement which performs a narrowing integral
1159 conversion.
1160
1161 Look for cases like:
1162
1163 t = x & c;
1164 y = (T) t;
1165
1166 Turn them into:
1167
1168 t = x & c;
1169 y = (T) x;
1170
1171 If T is narrower than X's type and C merely masks off bits outside
1172 of (T) and nothing else.
1173
1174 Normally we'd let DCE remove the dead statement. But no DCE runs
1175 after the last forwprop/combine pass, so we remove the obviously
1176 dead code ourselves.
1177
1178 Return TRUE if a change was made, FALSE otherwise. */
1179
1180static bool
1181simplify_conversion_from_bitmask (gimple_stmt_iterator *gsi_p)
1182{
1183 gimple stmt = gsi_stmt (*gsi_p);
1184 gimple rhs_def_stmt = SSA_NAME_DEF_STMT (gimple_assign_rhs1 (stmt));
1185
1186 /* See if the input for the conversion was set via a BIT_AND_EXPR and
1187 the only use of the BIT_AND_EXPR result is the conversion. */
1188 if (is_gimple_assign (rhs_def_stmt)
1189 && gimple_assign_rhs_code (rhs_def_stmt) == BIT_AND_EXPR
1190 && has_single_use (gimple_assign_lhs (rhs_def_stmt)))
1191 {
1192 tree rhs_def_operand1 = gimple_assign_rhs1 (rhs_def_stmt);
1193 tree rhs_def_operand2 = gimple_assign_rhs2 (rhs_def_stmt);
1194 tree lhs_type = TREE_TYPE (gimple_assign_lhs (stmt));
1195
1196 /* Now verify suitability of the BIT_AND_EXPR's operands.
1197 The first must be an SSA_NAME that we can propagate and the
1198 second must be an integer constant that masks out all the
1199 bits outside the final result's type, but nothing else. */
1200 if (TREE_CODE (rhs_def_operand1) == SSA_NAME
1201 && ! SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs_def_operand1)
1202 && TREE_CODE (rhs_def_operand2) == INTEGER_CST
1203 && operand_equal_p (rhs_def_operand2,
1204 build_low_bits_mask (TREE_TYPE (rhs_def_operand2),
1205 TYPE_PRECISION (lhs_type)),
1206 0))
1207 {
1208 /* This is an optimizable case. Replace the source operand
1209 in the conversion with the first source operand of the
1210 BIT_AND_EXPR. */
1211 gimple_assign_set_rhs1 (stmt, rhs_def_operand1);
1212 stmt = gsi_stmt (*gsi_p);
1213 update_stmt (stmt);
1214
1215 /* There is no DCE after the last forwprop pass. It's
1216 easy to clean up the first order effects here. */
1217 gimple_stmt_iterator si;
1218 si = gsi_for_stmt (rhs_def_stmt);
1219 gsi_remove (&si, true);
1220 release_defs (rhs_def_stmt);
1221 return true;
1222 }
1223 }
1224
1225 return false;
1226}
1227
1228
471eeb83
JL
1229/* If we have lhs = ~x (STMT), look and see if earlier we had x = ~y.
1230 If so, we can change STMT into lhs = y which can later be copy
b8698a0f 1231 propagated. Similarly for negation.
471eeb83 1232
b8698a0f 1233 This could trivially be formulated as a forward propagation
471eeb83
JL
1234 to immediate uses. However, we already had an implementation
1235 from DOM which used backward propagation via the use-def links.
1236
1237 It turns out that backward propagation is actually faster as
1238 there's less work to do for each NOT/NEG expression we find.
1239 Backwards propagation needs to look at the statement in a single
1240 backlink. Forward propagation needs to look at potentially more
2e87621c 1241 than one forward link.
471eeb83 1242
2e87621c
RG
1243 Returns true when the statement was changed. */
1244
1245static bool
726a989a 1246simplify_not_neg_expr (gimple_stmt_iterator *gsi_p)
471eeb83 1247{
726a989a
RB
1248 gimple stmt = gsi_stmt (*gsi_p);
1249 tree rhs = gimple_assign_rhs1 (stmt);
1250 gimple rhs_def_stmt = SSA_NAME_DEF_STMT (rhs);
471eeb83
JL
1251
1252 /* See if the RHS_DEF_STMT has the same form as our statement. */
726a989a
RB
1253 if (is_gimple_assign (rhs_def_stmt)
1254 && gimple_assign_rhs_code (rhs_def_stmt) == gimple_assign_rhs_code (stmt))
471eeb83 1255 {
726a989a 1256 tree rhs_def_operand = gimple_assign_rhs1 (rhs_def_stmt);
471eeb83
JL
1257
1258 /* Verify that RHS_DEF_OPERAND is a suitable SSA_NAME. */
1259 if (TREE_CODE (rhs_def_operand) == SSA_NAME
1260 && ! SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs_def_operand))
1261 {
726a989a
RB
1262 gimple_assign_set_rhs_from_tree (gsi_p, rhs_def_operand);
1263 stmt = gsi_stmt (*gsi_p);
471eeb83 1264 update_stmt (stmt);
2e87621c 1265 return true;
471eeb83
JL
1266 }
1267 }
2e87621c
RG
1268
1269 return false;
471eeb83 1270}
d090221b 1271
68e72840
SB
1272/* Helper function for simplify_gimple_switch. Remove case labels that
1273 have values outside the range of the new type. */
1274
1275static void
1276simplify_gimple_switch_label_vec (gimple stmt, tree index_type)
1277{
1278 unsigned int branch_num = gimple_switch_num_labels (stmt);
9771b263
DN
1279 vec<tree> labels;
1280 labels.create (branch_num);
68e72840
SB
1281 unsigned int i, len;
1282
1283 /* Collect the existing case labels in a VEC, and preprocess it as if
1284 we are gimplifying a GENERIC SWITCH_EXPR. */
1285 for (i = 1; i < branch_num; i++)
9771b263 1286 labels.quick_push (gimple_switch_label (stmt, i));
68e72840
SB
1287 preprocess_case_label_vec_for_gimple (labels, index_type, NULL);
1288
1289 /* If any labels were removed, replace the existing case labels
1290 in the GIMPLE_SWITCH statement with the correct ones.
1291 Note that the type updates were done in-place on the case labels,
1292 so we only have to replace the case labels in the GIMPLE_SWITCH
1293 if the number of labels changed. */
9771b263 1294 len = labels.length ();
68e72840
SB
1295 if (len < branch_num - 1)
1296 {
1297 bitmap target_blocks;
1298 edge_iterator ei;
1299 edge e;
1300
1301 /* Corner case: *all* case labels have been removed as being
1302 out-of-range for INDEX_TYPE. Push one label and let the
1303 CFG cleanups deal with this further. */
1304 if (len == 0)
1305 {
1306 tree label, elt;
1307
1308 label = CASE_LABEL (gimple_switch_default_label (stmt));
1309 elt = build_case_label (build_int_cst (index_type, 0), NULL, label);
9771b263 1310 labels.quick_push (elt);
68e72840
SB
1311 len = 1;
1312 }
1313
9771b263
DN
1314 for (i = 0; i < labels.length (); i++)
1315 gimple_switch_set_label (stmt, i + 1, labels[i]);
68e72840
SB
1316 for (i++ ; i < branch_num; i++)
1317 gimple_switch_set_label (stmt, i, NULL_TREE);
1318 gimple_switch_set_num_labels (stmt, len + 1);
1319
1320 /* Cleanup any edges that are now dead. */
1321 target_blocks = BITMAP_ALLOC (NULL);
1322 for (i = 0; i < gimple_switch_num_labels (stmt); i++)
1323 {
1324 tree elt = gimple_switch_label (stmt, i);
1325 basic_block target = label_to_block (CASE_LABEL (elt));
1326 bitmap_set_bit (target_blocks, target->index);
1327 }
1328 for (ei = ei_start (gimple_bb (stmt)->succs); (e = ei_safe_edge (ei)); )
1329 {
1330 if (! bitmap_bit_p (target_blocks, e->dest->index))
1331 {
1332 remove_edge (e);
1333 cfg_changed = true;
1334 free_dominance_info (CDI_DOMINATORS);
1335 }
1336 else
1337 ei_next (&ei);
1338 }
1339 BITMAP_FREE (target_blocks);
1340 }
1341
9771b263 1342 labels.release ();
68e72840
SB
1343}
1344
6b62dff8
JL
1345/* STMT is a SWITCH_EXPR for which we attempt to find equivalent forms of
1346 the condition which we may be able to optimize better. */
1347
2e87621c 1348static bool
726a989a 1349simplify_gimple_switch (gimple stmt)
6b62dff8 1350{
726a989a 1351 tree cond = gimple_switch_index (stmt);
6b62dff8 1352 tree def, to, ti;
726a989a 1353 gimple def_stmt;
6b62dff8
JL
1354
1355 /* The optimization that we really care about is removing unnecessary
1356 casts. That will let us do much better in propagating the inferred
1357 constant at the switch target. */
1358 if (TREE_CODE (cond) == SSA_NAME)
1359 {
726a989a
RB
1360 def_stmt = SSA_NAME_DEF_STMT (cond);
1361 if (is_gimple_assign (def_stmt))
6b62dff8 1362 {
726a989a 1363 if (gimple_assign_rhs_code (def_stmt) == NOP_EXPR)
6b62dff8
JL
1364 {
1365 int need_precision;
1366 bool fail;
1367
726a989a 1368 def = gimple_assign_rhs1 (def_stmt);
6b62dff8 1369
6b62dff8
JL
1370 to = TREE_TYPE (cond);
1371 ti = TREE_TYPE (def);
1372
1373 /* If we have an extension that preserves value, then we
1374 can copy the source value into the switch. */
1375
1376 need_precision = TYPE_PRECISION (ti);
1377 fail = false;
c7b38a85
JJ
1378 if (! INTEGRAL_TYPE_P (ti))
1379 fail = true;
1380 else if (TYPE_UNSIGNED (to) && !TYPE_UNSIGNED (ti))
6b62dff8
JL
1381 fail = true;
1382 else if (!TYPE_UNSIGNED (to) && TYPE_UNSIGNED (ti))
1383 need_precision += 1;
1384 if (TYPE_PRECISION (to) < need_precision)
1385 fail = true;
1386
1387 if (!fail)
1388 {
726a989a 1389 gimple_switch_set_index (stmt, def);
68e72840 1390 simplify_gimple_switch_label_vec (stmt, ti);
6b62dff8 1391 update_stmt (stmt);
2e87621c 1392 return true;
6b62dff8
JL
1393 }
1394 }
1395 }
1396 }
2e87621c
RG
1397
1398 return false;
6b62dff8
JL
1399}
1400
f4684242
JJ
1401/* For pointers p2 and p1 return p2 - p1 if the
1402 difference is known and constant, otherwise return NULL. */
1403
1404static tree
1405constant_pointer_difference (tree p1, tree p2)
1406{
1407 int i, j;
1408#define CPD_ITERATIONS 5
1409 tree exps[2][CPD_ITERATIONS];
1410 tree offs[2][CPD_ITERATIONS];
1411 int cnt[2];
1412
1413 for (i = 0; i < 2; i++)
1414 {
1415 tree p = i ? p1 : p2;
1416 tree off = size_zero_node;
1417 gimple stmt;
1418 enum tree_code code;
1419
1420 /* For each of p1 and p2 we need to iterate at least
1421 twice, to handle ADDR_EXPR directly in p1/p2,
1422 SSA_NAME with ADDR_EXPR or POINTER_PLUS_EXPR etc.
1423 on definition's stmt RHS. Iterate a few extra times. */
1424 j = 0;
1425 do
1426 {
1427 if (!POINTER_TYPE_P (TREE_TYPE (p)))
1428 break;
1429 if (TREE_CODE (p) == ADDR_EXPR)
1430 {
1431 tree q = TREE_OPERAND (p, 0);
1432 HOST_WIDE_INT offset;
1433 tree base = get_addr_base_and_unit_offset (q, &offset);
1434 if (base)
1435 {
1436 q = base;
1437 if (offset)
1438 off = size_binop (PLUS_EXPR, off, size_int (offset));
1439 }
1440 if (TREE_CODE (q) == MEM_REF
1441 && TREE_CODE (TREE_OPERAND (q, 0)) == SSA_NAME)
1442 {
1443 p = TREE_OPERAND (q, 0);
1444 off = size_binop (PLUS_EXPR, off,
1445 double_int_to_tree (sizetype,
1446 mem_ref_offset (q)));
1447 }
1448 else
1449 {
1450 exps[i][j] = q;
1451 offs[i][j++] = off;
1452 break;
1453 }
1454 }
1455 if (TREE_CODE (p) != SSA_NAME)
1456 break;
1457 exps[i][j] = p;
1458 offs[i][j++] = off;
1459 if (j == CPD_ITERATIONS)
1460 break;
1461 stmt = SSA_NAME_DEF_STMT (p);
1462 if (!is_gimple_assign (stmt) || gimple_assign_lhs (stmt) != p)
1463 break;
1464 code = gimple_assign_rhs_code (stmt);
1465 if (code == POINTER_PLUS_EXPR)
1466 {
1467 if (TREE_CODE (gimple_assign_rhs2 (stmt)) != INTEGER_CST)
1468 break;
1469 off = size_binop (PLUS_EXPR, off, gimple_assign_rhs2 (stmt));
1470 p = gimple_assign_rhs1 (stmt);
1471 }
1472 else if (code == ADDR_EXPR || code == NOP_EXPR)
1473 p = gimple_assign_rhs1 (stmt);
1474 else
1475 break;
1476 }
1477 while (1);
1478 cnt[i] = j;
1479 }
1480
1481 for (i = 0; i < cnt[0]; i++)
1482 for (j = 0; j < cnt[1]; j++)
1483 if (exps[0][i] == exps[1][j])
1484 return size_binop (MINUS_EXPR, offs[0][i], offs[1][j]);
1485
1486 return NULL_TREE;
1487}
1488
1489/* *GSI_P is a GIMPLE_CALL to a builtin function.
1490 Optimize
1491 memcpy (p, "abcd", 4);
1492 memset (p + 4, ' ', 3);
1493 into
1494 memcpy (p, "abcd ", 7);
1495 call if the latter can be stored by pieces during expansion. */
1496
1497static bool
1498simplify_builtin_call (gimple_stmt_iterator *gsi_p, tree callee2)
1499{
1500 gimple stmt1, stmt2 = gsi_stmt (*gsi_p);
1501 tree vuse = gimple_vuse (stmt2);
1502 if (vuse == NULL)
1503 return false;
1504 stmt1 = SSA_NAME_DEF_STMT (vuse);
1505
1506 switch (DECL_FUNCTION_CODE (callee2))
1507 {
1508 case BUILT_IN_MEMSET:
1509 if (gimple_call_num_args (stmt2) != 3
1510 || gimple_call_lhs (stmt2)
1511 || CHAR_BIT != 8
1512 || BITS_PER_UNIT != 8)
1513 break;
1514 else
1515 {
1516 tree callee1;
1517 tree ptr1, src1, str1, off1, len1, lhs1;
1518 tree ptr2 = gimple_call_arg (stmt2, 0);
1519 tree val2 = gimple_call_arg (stmt2, 1);
1520 tree len2 = gimple_call_arg (stmt2, 2);
1521 tree diff, vdef, new_str_cst;
1522 gimple use_stmt;
1523 unsigned int ptr1_align;
1524 unsigned HOST_WIDE_INT src_len;
1525 char *src_buf;
1526 use_operand_p use_p;
1527
1528 if (!host_integerp (val2, 0)
1529 || !host_integerp (len2, 1))
1530 break;
1531 if (is_gimple_call (stmt1))
1532 {
1533 /* If first stmt is a call, it needs to be memcpy
1534 or mempcpy, with string literal as second argument and
1535 constant length. */
1536 callee1 = gimple_call_fndecl (stmt1);
1537 if (callee1 == NULL_TREE
1538 || DECL_BUILT_IN_CLASS (callee1) != BUILT_IN_NORMAL
1539 || gimple_call_num_args (stmt1) != 3)
1540 break;
1541 if (DECL_FUNCTION_CODE (callee1) != BUILT_IN_MEMCPY
1542 && DECL_FUNCTION_CODE (callee1) != BUILT_IN_MEMPCPY)
1543 break;
1544 ptr1 = gimple_call_arg (stmt1, 0);
1545 src1 = gimple_call_arg (stmt1, 1);
1546 len1 = gimple_call_arg (stmt1, 2);
1547 lhs1 = gimple_call_lhs (stmt1);
1548 if (!host_integerp (len1, 1))
1549 break;
1550 str1 = string_constant (src1, &off1);
1551 if (str1 == NULL_TREE)
1552 break;
1553 if (!host_integerp (off1, 1)
1554 || compare_tree_int (off1, TREE_STRING_LENGTH (str1) - 1) > 0
1555 || compare_tree_int (len1, TREE_STRING_LENGTH (str1)
1556 - tree_low_cst (off1, 1)) > 0
1557 || TREE_CODE (TREE_TYPE (str1)) != ARRAY_TYPE
1558 || TYPE_MODE (TREE_TYPE (TREE_TYPE (str1)))
1559 != TYPE_MODE (char_type_node))
1560 break;
1561 }
1562 else if (gimple_assign_single_p (stmt1))
1563 {
1564 /* Otherwise look for length 1 memcpy optimized into
1565 assignment. */
1566 ptr1 = gimple_assign_lhs (stmt1);
1567 src1 = gimple_assign_rhs1 (stmt1);
1568 if (TREE_CODE (ptr1) != MEM_REF
1569 || TYPE_MODE (TREE_TYPE (ptr1)) != TYPE_MODE (char_type_node)
1570 || !host_integerp (src1, 0))
1571 break;
1572 ptr1 = build_fold_addr_expr (ptr1);
1573 callee1 = NULL_TREE;
1574 len1 = size_one_node;
1575 lhs1 = NULL_TREE;
1576 off1 = size_zero_node;
1577 str1 = NULL_TREE;
1578 }
1579 else
1580 break;
1581
1582 diff = constant_pointer_difference (ptr1, ptr2);
1583 if (diff == NULL && lhs1 != NULL)
1584 {
1585 diff = constant_pointer_difference (lhs1, ptr2);
1586 if (DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY
1587 && diff != NULL)
1588 diff = size_binop (PLUS_EXPR, diff,
1589 fold_convert (sizetype, len1));
1590 }
1591 /* If the difference between the second and first destination pointer
1592 is not constant, or is bigger than memcpy length, bail out. */
1593 if (diff == NULL
1594 || !host_integerp (diff, 1)
1595 || tree_int_cst_lt (len1, diff))
1596 break;
1597
1598 /* Use maximum of difference plus memset length and memcpy length
1599 as the new memcpy length, if it is too big, bail out. */
1600 src_len = tree_low_cst (diff, 1);
1601 src_len += tree_low_cst (len2, 1);
1602 if (src_len < (unsigned HOST_WIDE_INT) tree_low_cst (len1, 1))
1603 src_len = tree_low_cst (len1, 1);
1604 if (src_len > 1024)
1605 break;
1606
1607 /* If mempcpy value is used elsewhere, bail out, as mempcpy
1608 with bigger length will return different result. */
1609 if (lhs1 != NULL_TREE
1610 && DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY
1611 && (TREE_CODE (lhs1) != SSA_NAME
1612 || !single_imm_use (lhs1, &use_p, &use_stmt)
1613 || use_stmt != stmt2))
1614 break;
1615
1616 /* If anything reads memory in between memcpy and memset
1617 call, the modified memcpy call might change it. */
1618 vdef = gimple_vdef (stmt1);
1619 if (vdef != NULL
1620 && (!single_imm_use (vdef, &use_p, &use_stmt)
1621 || use_stmt != stmt2))
1622 break;
1623
0eb77834 1624 ptr1_align = get_pointer_alignment (ptr1);
f4684242
JJ
1625 /* Construct the new source string literal. */
1626 src_buf = XALLOCAVEC (char, src_len + 1);
1627 if (callee1)
1628 memcpy (src_buf,
1629 TREE_STRING_POINTER (str1) + tree_low_cst (off1, 1),
1630 tree_low_cst (len1, 1));
1631 else
1632 src_buf[0] = tree_low_cst (src1, 0);
1633 memset (src_buf + tree_low_cst (diff, 1),
081db960 1634 tree_low_cst (val2, 0), tree_low_cst (len2, 1));
f4684242
JJ
1635 src_buf[src_len] = '\0';
1636 /* Neither builtin_strncpy_read_str nor builtin_memcpy_read_str
1637 handle embedded '\0's. */
1638 if (strlen (src_buf) != src_len)
1639 break;
1640 rtl_profile_for_bb (gimple_bb (stmt2));
1641 /* If the new memcpy wouldn't be emitted by storing the literal
1642 by pieces, this optimization might enlarge .rodata too much,
1643 as commonly used string literals couldn't be shared any
1644 longer. */
1645 if (!can_store_by_pieces (src_len,
1646 builtin_strncpy_read_str,
1647 src_buf, ptr1_align, false))
1648 break;
1649
1650 new_str_cst = build_string_literal (src_len, src_buf);
1651 if (callee1)
1652 {
1653 /* If STMT1 is a mem{,p}cpy call, adjust it and remove
1654 memset call. */
1655 if (lhs1 && DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY)
1656 gimple_call_set_lhs (stmt1, NULL_TREE);
1657 gimple_call_set_arg (stmt1, 1, new_str_cst);
1658 gimple_call_set_arg (stmt1, 2,
1659 build_int_cst (TREE_TYPE (len1), src_len));
1660 update_stmt (stmt1);
1661 unlink_stmt_vdef (stmt2);
1662 gsi_remove (gsi_p, true);
1663 release_defs (stmt2);
1664 if (lhs1 && DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY)
1665 release_ssa_name (lhs1);
1666 return true;
1667 }
1668 else
1669 {
1670 /* Otherwise, if STMT1 is length 1 memcpy optimized into
1671 assignment, remove STMT1 and change memset call into
1672 memcpy call. */
1673 gimple_stmt_iterator gsi = gsi_for_stmt (stmt1);
1674
7a4f257d
JJ
1675 if (!is_gimple_val (ptr1))
1676 ptr1 = force_gimple_operand_gsi (gsi_p, ptr1, true, NULL_TREE,
1677 true, GSI_SAME_STMT);
e79983f4
MM
1678 gimple_call_set_fndecl (stmt2,
1679 builtin_decl_explicit (BUILT_IN_MEMCPY));
f4684242
JJ
1680 gimple_call_set_arg (stmt2, 0, ptr1);
1681 gimple_call_set_arg (stmt2, 1, new_str_cst);
1682 gimple_call_set_arg (stmt2, 2,
1683 build_int_cst (TREE_TYPE (len2), src_len));
1684 unlink_stmt_vdef (stmt1);
1685 gsi_remove (&gsi, true);
1686 release_defs (stmt1);
1687 update_stmt (stmt2);
1688 return false;
1689 }
1690 }
1691 break;
1692 default:
1693 break;
1694 }
1695 return false;
1696}
1697
0816a42a
KT
1698/* Checks if expression has type of one-bit precision, or is a known
1699 truth-valued expression. */
1700static bool
1701truth_valued_ssa_name (tree name)
1702{
1703 gimple def;
1704 tree type = TREE_TYPE (name);
1705
1706 if (!INTEGRAL_TYPE_P (type))
1707 return false;
1708 /* Don't check here for BOOLEAN_TYPE as the precision isn't
1709 necessarily one and so ~X is not equal to !X. */
1710 if (TYPE_PRECISION (type) == 1)
1711 return true;
1712 def = SSA_NAME_DEF_STMT (name);
1713 if (is_gimple_assign (def))
1714 return truth_value_p (gimple_assign_rhs_code (def));
1715 return false;
1716}
1717
1718/* Helper routine for simplify_bitwise_binary_1 function.
1719 Return for the SSA name NAME the expression X if it mets condition
1720 NAME = !X. Otherwise return NULL_TREE.
1721 Detected patterns for NAME = !X are:
1722 !X and X == 0 for X with integral type.
1723 X ^ 1, X != 1,or ~X for X with integral type with precision of one. */
1724static tree
1725lookup_logical_inverted_value (tree name)
1726{
1727 tree op1, op2;
1728 enum tree_code code;
1729 gimple def;
1730
1731 /* If name has none-intergal type, or isn't a SSA_NAME, then
1732 return. */
1733 if (TREE_CODE (name) != SSA_NAME
1734 || !INTEGRAL_TYPE_P (TREE_TYPE (name)))
1735 return NULL_TREE;
1736 def = SSA_NAME_DEF_STMT (name);
1737 if (!is_gimple_assign (def))
1738 return NULL_TREE;
1739
1740 code = gimple_assign_rhs_code (def);
1741 op1 = gimple_assign_rhs1 (def);
1742 op2 = NULL_TREE;
1743
1744 /* Get for EQ_EXPR or BIT_XOR_EXPR operation the second operand.
3046b1a9 1745 If CODE isn't an EQ_EXPR, BIT_XOR_EXPR, or BIT_NOT_EXPR, then return. */
0816a42a
KT
1746 if (code == EQ_EXPR || code == NE_EXPR
1747 || code == BIT_XOR_EXPR)
1748 op2 = gimple_assign_rhs2 (def);
1749
1750 switch (code)
1751 {
0816a42a
KT
1752 case BIT_NOT_EXPR:
1753 if (truth_valued_ssa_name (name))
1754 return op1;
1755 break;
1756 case EQ_EXPR:
1757 /* Check if we have X == 0 and X has an integral type. */
1758 if (!INTEGRAL_TYPE_P (TREE_TYPE (op1)))
1759 break;
1760 if (integer_zerop (op2))
1761 return op1;
1762 break;
1763 case NE_EXPR:
1764 /* Check if we have X != 1 and X is a truth-valued. */
1765 if (!INTEGRAL_TYPE_P (TREE_TYPE (op1)))
1766 break;
1767 if (integer_onep (op2) && truth_valued_ssa_name (op1))
1768 return op1;
1769 break;
1770 case BIT_XOR_EXPR:
1771 /* Check if we have X ^ 1 and X is truth valued. */
1772 if (integer_onep (op2) && truth_valued_ssa_name (op1))
1773 return op1;
1774 break;
1775 default:
1776 break;
1777 }
1778
1779 return NULL_TREE;
1780}
1781
1782/* Optimize ARG1 CODE ARG2 to a constant for bitwise binary
1783 operations CODE, if one operand has the logically inverted
1784 value of the other. */
1785static tree
1786simplify_bitwise_binary_1 (enum tree_code code, tree type,
1787 tree arg1, tree arg2)
1788{
1789 tree anot;
1790
1791 /* If CODE isn't a bitwise binary operation, return NULL_TREE. */
1792 if (code != BIT_AND_EXPR && code != BIT_IOR_EXPR
1793 && code != BIT_XOR_EXPR)
1794 return NULL_TREE;
1795
1796 /* First check if operands ARG1 and ARG2 are equal. If so
1797 return NULL_TREE as this optimization is handled fold_stmt. */
1798 if (arg1 == arg2)
1799 return NULL_TREE;
1800 /* See if we have in arguments logical-not patterns. */
1801 if (((anot = lookup_logical_inverted_value (arg1)) == NULL_TREE
1802 || anot != arg2)
1803 && ((anot = lookup_logical_inverted_value (arg2)) == NULL_TREE
1804 || anot != arg1))
1805 return NULL_TREE;
1806
1807 /* X & !X -> 0. */
1808 if (code == BIT_AND_EXPR)
1809 return fold_convert (type, integer_zero_node);
1810 /* X | !X -> 1 and X ^ !X -> 1, if X is truth-valued. */
1811 if (truth_valued_ssa_name (anot))
1812 return fold_convert (type, integer_one_node);
1813
1814 /* ??? Otherwise result is (X != 0 ? X : 1). not handled. */
1815 return NULL_TREE;
1816}
1817
a1e179f5
AP
1818/* Given a ssa_name in NAME see if it was defined by an assignment and
1819 set CODE to be the code and ARG1 to the first operand on the rhs and ARG2
1820 to the second operand on the rhs. */
1821
1822static inline void
1823defcodefor_name (tree name, enum tree_code *code, tree *arg1, tree *arg2)
1824{
1825 gimple def;
1826 enum tree_code code1;
1827 tree arg11;
1828 tree arg21;
1829 tree arg31;
1830 enum gimple_rhs_class grhs_class;
1831
1832 code1 = TREE_CODE (name);
1833 arg11 = name;
1834 arg21 = NULL_TREE;
1835 grhs_class = get_gimple_rhs_class (code1);
1836
1837 if (code1 == SSA_NAME)
1838 {
1839 def = SSA_NAME_DEF_STMT (name);
1840
1841 if (def && is_gimple_assign (def)
1842 && can_propagate_from (def))
1843 {
1844 code1 = gimple_assign_rhs_code (def);
1845 arg11 = gimple_assign_rhs1 (def);
1846 arg21 = gimple_assign_rhs2 (def);
1847 arg31 = gimple_assign_rhs2 (def);
1848 }
1849 }
1850 else if (grhs_class == GIMPLE_TERNARY_RHS
1851 || GIMPLE_BINARY_RHS
1852 || GIMPLE_UNARY_RHS
1853 || GIMPLE_SINGLE_RHS)
1854 extract_ops_from_tree_1 (name, &code1, &arg11, &arg21, &arg31);
1855
1856 *code = code1;
1857 *arg1 = arg11;
1858 if (arg2)
1859 *arg2 = arg21;
1860 /* Ignore arg3 currently. */
1861}
1862
259ee451
RB
1863/* Return true if a conversion of an operand from type FROM to type TO
1864 should be applied after performing the operation instead. */
1865
1866static bool
1867hoist_conversion_for_bitop_p (tree to, tree from)
1868{
1869 /* That's a good idea if the conversion widens the operand, thus
1870 after hoisting the conversion the operation will be narrower. */
1871 if (TYPE_PRECISION (from) < TYPE_PRECISION (to))
1872 return true;
1873
1874 /* It's also a good idea if the conversion is to a non-integer mode. */
1875 if (GET_MODE_CLASS (TYPE_MODE (to)) != MODE_INT)
1876 return true;
1877
1878 /* Or if the precision of TO is not the same as the precision
1879 of its mode. */
1880 if (TYPE_PRECISION (to) != GET_MODE_PRECISION (TYPE_MODE (to)))
1881 return true;
1882
1883 return false;
1884}
1885
d53e2f99
JL
1886/* GSI points to a statement of the form
1887
1888 result = OP0 CODE OP1
1889
1890 Where OP0 and OP1 are single bit SSA_NAMEs and CODE is either
1891 BIT_AND_EXPR or BIT_IOR_EXPR.
1892
1893 If OP0 is fed by a bitwise negation of another single bit SSA_NAME,
1894 then we can simplify the two statements into a single LT_EXPR or LE_EXPR
1895 when code is BIT_AND_EXPR and BIT_IOR_EXPR respectively.
1896
ecdbf306 1897 If a simplification is made, return TRUE, else return FALSE. */
d53e2f99
JL
1898static bool
1899simplify_bitwise_binary_boolean (gimple_stmt_iterator *gsi,
1900 enum tree_code code,
1901 tree op0, tree op1)
1902{
1903 gimple op0_def_stmt = SSA_NAME_DEF_STMT (op0);
1904
1905 if (!is_gimple_assign (op0_def_stmt)
1906 || (gimple_assign_rhs_code (op0_def_stmt) != BIT_NOT_EXPR))
1907 return false;
1908
1909 tree x = gimple_assign_rhs1 (op0_def_stmt);
1910 if (TREE_CODE (x) == SSA_NAME
1911 && INTEGRAL_TYPE_P (TREE_TYPE (x))
1912 && TYPE_PRECISION (TREE_TYPE (x)) == 1
1913 && TYPE_UNSIGNED (TREE_TYPE (x)) == TYPE_UNSIGNED (TREE_TYPE (op1)))
1914 {
1915 enum tree_code newcode;
1916
1917 gimple stmt = gsi_stmt (*gsi);
1918 gimple_assign_set_rhs1 (stmt, x);
1919 gimple_assign_set_rhs2 (stmt, op1);
1920 if (code == BIT_AND_EXPR)
1921 newcode = TYPE_UNSIGNED (TREE_TYPE (x)) ? LT_EXPR : GT_EXPR;
1922 else
1923 newcode = TYPE_UNSIGNED (TREE_TYPE (x)) ? LE_EXPR : GE_EXPR;
1924 gimple_assign_set_rhs_code (stmt, newcode);
1925 update_stmt (stmt);
1926 return true;
1927 }
1928 return false;
1929
1930}
1931
10c224a9
RG
1932/* Simplify bitwise binary operations.
1933 Return true if a transformation applied, otherwise return false. */
617f3897 1934
10c224a9
RG
1935static bool
1936simplify_bitwise_binary (gimple_stmt_iterator *gsi)
617f3897 1937{
10c224a9 1938 gimple stmt = gsi_stmt (*gsi);
617f3897
MJ
1939 tree arg1 = gimple_assign_rhs1 (stmt);
1940 tree arg2 = gimple_assign_rhs2 (stmt);
10c224a9
RG
1941 enum tree_code code = gimple_assign_rhs_code (stmt);
1942 tree res;
a1e179f5 1943 tree def1_arg1, def1_arg2, def2_arg1, def2_arg2;
51e020fc 1944 enum tree_code def1_code, def2_code;
617f3897 1945
a1e179f5
AP
1946 defcodefor_name (arg1, &def1_code, &def1_arg1, &def1_arg2);
1947 defcodefor_name (arg2, &def2_code, &def2_arg1, &def2_arg2);
51e020fc 1948
259ee451
RB
1949 /* Try to fold (type) X op CST -> (type) (X op ((type-x) CST))
1950 when profitable. */
420863a9
KT
1951 if (TREE_CODE (arg2) == INTEGER_CST
1952 && CONVERT_EXPR_CODE_P (def1_code)
259ee451 1953 && hoist_conversion_for_bitop_p (TREE_TYPE (arg1), TREE_TYPE (def1_arg1))
80d3dd38 1954 && INTEGRAL_TYPE_P (TREE_TYPE (def1_arg1))
420863a9
KT
1955 && int_fits_type_p (arg2, TREE_TYPE (def1_arg1)))
1956 {
1957 gimple newop;
83d5977e 1958 tree tem = make_ssa_name (TREE_TYPE (def1_arg1), NULL);
420863a9
KT
1959 newop =
1960 gimple_build_assign_with_ops (code, tem, def1_arg1,
1961 fold_convert_loc (gimple_location (stmt),
1962 TREE_TYPE (def1_arg1),
1963 arg2));
7f3ff782 1964 gimple_set_location (newop, gimple_location (stmt));
420863a9
KT
1965 gsi_insert_before (gsi, newop, GSI_SAME_STMT);
1966 gimple_assign_set_rhs_with_ops_1 (gsi, NOP_EXPR,
1967 tem, NULL_TREE, NULL_TREE);
1968 update_stmt (gsi_stmt (*gsi));
1969 return true;
1970 }
1971
10c224a9
RG
1972 /* For bitwise binary operations apply operand conversions to the
1973 binary operation result instead of to the operands. This allows
1974 to combine successive conversions and bitwise binary operations. */
51e020fc
RG
1975 if (CONVERT_EXPR_CODE_P (def1_code)
1976 && CONVERT_EXPR_CODE_P (def2_code)
1977 && types_compatible_p (TREE_TYPE (def1_arg1), TREE_TYPE (def2_arg1))
259ee451 1978 && hoist_conversion_for_bitop_p (TREE_TYPE (arg1), TREE_TYPE (def1_arg1)))
617f3897 1979 {
51e020fc 1980 gimple newop;
83d5977e 1981 tree tem = make_ssa_name (TREE_TYPE (def1_arg1), NULL);
51e020fc 1982 newop = gimple_build_assign_with_ops (code, tem, def1_arg1, def2_arg1);
7f3ff782 1983 gimple_set_location (newop, gimple_location (stmt));
51e020fc
RG
1984 gsi_insert_before (gsi, newop, GSI_SAME_STMT);
1985 gimple_assign_set_rhs_with_ops_1 (gsi, NOP_EXPR,
1986 tem, NULL_TREE, NULL_TREE);
1987 update_stmt (gsi_stmt (*gsi));
1988 return true;
1989 }
1990
24fc7360
AP
1991
1992 /* Simplify (A & B) OP0 (C & B) to (A OP0 C) & B. */
1993 if (def1_code == def2_code
1994 && def1_code == BIT_AND_EXPR
8eddb625
AP
1995 && operand_equal_for_phi_arg_p (def1_arg2,
1996 def2_arg2))
24fc7360 1997 {
8eddb625 1998 tree b = def1_arg2;
24fc7360
AP
1999 tree a = def1_arg1;
2000 tree c = def2_arg1;
2001 tree inner = fold_build2 (code, TREE_TYPE (arg2), a, c);
2002 /* If A OP0 C (this usually means C is the same as A) is 0
2003 then fold it down correctly. */
2004 if (integer_zerop (inner))
2005 {
2006 gimple_assign_set_rhs_from_tree (gsi, inner);
2007 update_stmt (stmt);
2008 return true;
2009 }
2010 /* If A OP0 C (this usually means C is the same as A) is a ssa_name
2011 then fold it down correctly. */
2012 else if (TREE_CODE (inner) == SSA_NAME)
2013 {
2014 tree outer = fold_build2 (def1_code, TREE_TYPE (inner),
2015 inner, b);
2016 gimple_assign_set_rhs_from_tree (gsi, outer);
2017 update_stmt (stmt);
2018 return true;
2019 }
2020 else
2021 {
2022 gimple newop;
2023 tree tem;
83d5977e 2024 tem = make_ssa_name (TREE_TYPE (arg2), NULL);
24fc7360 2025 newop = gimple_build_assign_with_ops (code, tem, a, c);
24fc7360
AP
2026 gimple_set_location (newop, gimple_location (stmt));
2027 /* Make sure to re-process the new stmt as it's walking upwards. */
2028 gsi_insert_before (gsi, newop, GSI_NEW_STMT);
2029 gimple_assign_set_rhs1 (stmt, tem);
2030 gimple_assign_set_rhs2 (stmt, b);
2031 gimple_assign_set_rhs_code (stmt, def1_code);
2032 update_stmt (stmt);
2033 return true;
2034 }
2035 }
2036
51e020fc
RG
2037 /* (a | CST1) & CST2 -> (a & CST2) | (CST1 & CST2). */
2038 if (code == BIT_AND_EXPR
2039 && def1_code == BIT_IOR_EXPR
5d418483
MG
2040 && CONSTANT_CLASS_P (arg2)
2041 && CONSTANT_CLASS_P (def1_arg2))
51e020fc
RG
2042 {
2043 tree cst = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg2),
a1e179f5 2044 arg2, def1_arg2);
51e020fc
RG
2045 tree tem;
2046 gimple newop;
2047 if (integer_zerop (cst))
10c224a9 2048 {
51e020fc
RG
2049 gimple_assign_set_rhs1 (stmt, def1_arg1);
2050 update_stmt (stmt);
2051 return true;
10c224a9 2052 }
83d5977e 2053 tem = make_ssa_name (TREE_TYPE (arg2), NULL);
51e020fc
RG
2054 newop = gimple_build_assign_with_ops (BIT_AND_EXPR,
2055 tem, def1_arg1, arg2);
7f3ff782 2056 gimple_set_location (newop, gimple_location (stmt));
51e020fc
RG
2057 /* Make sure to re-process the new stmt as it's walking upwards. */
2058 gsi_insert_before (gsi, newop, GSI_NEW_STMT);
2059 gimple_assign_set_rhs1 (stmt, tem);
2060 gimple_assign_set_rhs2 (stmt, cst);
2061 gimple_assign_set_rhs_code (stmt, BIT_IOR_EXPR);
2062 update_stmt (stmt);
2063 return true;
2064 }
2065
2066 /* Combine successive equal operations with constants. */
2067 if ((code == BIT_AND_EXPR
2068 || code == BIT_IOR_EXPR
2069 || code == BIT_XOR_EXPR)
2070 && def1_code == code
5d418483
MG
2071 && CONSTANT_CLASS_P (arg2)
2072 && CONSTANT_CLASS_P (def1_arg2))
51e020fc
RG
2073 {
2074 tree cst = fold_build2 (code, TREE_TYPE (arg2),
a1e179f5 2075 arg2, def1_arg2);
51e020fc
RG
2076 gimple_assign_set_rhs1 (stmt, def1_arg1);
2077 gimple_assign_set_rhs2 (stmt, cst);
2078 update_stmt (stmt);
2079 return true;
617f3897 2080 }
10c224a9 2081
dca412a1
RG
2082 /* Canonicalize X ^ ~0 to ~X. */
2083 if (code == BIT_XOR_EXPR
dca412a1
RG
2084 && integer_all_onesp (arg2))
2085 {
2086 gimple_assign_set_rhs_with_ops (gsi, BIT_NOT_EXPR, arg1, NULL_TREE);
2087 gcc_assert (gsi_stmt (*gsi) == stmt);
2088 update_stmt (stmt);
2089 return true;
2090 }
2091
0816a42a
KT
2092 /* Try simple folding for X op !X, and X op X. */
2093 res = simplify_bitwise_binary_1 (code, TREE_TYPE (arg1), arg1, arg2);
2094 if (res != NULL_TREE)
2095 {
2096 gimple_assign_set_rhs_from_tree (gsi, res);
2097 update_stmt (gsi_stmt (*gsi));
2098 return true;
2099 }
2100
a1e179f5
AP
2101 if (code == BIT_AND_EXPR || code == BIT_IOR_EXPR)
2102 {
2103 enum tree_code ocode = code == BIT_AND_EXPR ? BIT_IOR_EXPR : BIT_AND_EXPR;
2104 if (def1_code == ocode)
2105 {
2106 tree x = arg2;
2107 enum tree_code coden;
2108 tree a1, a2;
2109 /* ( X | Y) & X -> X */
2110 /* ( X & Y) | X -> X */
2111 if (x == def1_arg1
2112 || x == def1_arg2)
2113 {
2114 gimple_assign_set_rhs_from_tree (gsi, x);
2115 update_stmt (gsi_stmt (*gsi));
2116 return true;
2117 }
2118
2119 defcodefor_name (def1_arg1, &coden, &a1, &a2);
2120 /* (~X | Y) & X -> X & Y */
2121 /* (~X & Y) | X -> X | Y */
2122 if (coden == BIT_NOT_EXPR && a1 == x)
2123 {
2124 gimple_assign_set_rhs_with_ops (gsi, code,
2125 x, def1_arg2);
2126 gcc_assert (gsi_stmt (*gsi) == stmt);
2127 update_stmt (stmt);
2128 return true;
2129 }
2130 defcodefor_name (def1_arg2, &coden, &a1, &a2);
2131 /* (Y | ~X) & X -> X & Y */
2132 /* (Y & ~X) | X -> X | Y */
2133 if (coden == BIT_NOT_EXPR && a1 == x)
2134 {
2135 gimple_assign_set_rhs_with_ops (gsi, code,
2136 x, def1_arg1);
2137 gcc_assert (gsi_stmt (*gsi) == stmt);
2138 update_stmt (stmt);
2139 return true;
2140 }
2141 }
2142 if (def2_code == ocode)
2143 {
2144 enum tree_code coden;
2145 tree a1;
2146 tree x = arg1;
2147 /* X & ( X | Y) -> X */
2148 /* X | ( X & Y) -> X */
2149 if (x == def2_arg1
2150 || x == def2_arg2)
2151 {
2152 gimple_assign_set_rhs_from_tree (gsi, x);
2153 update_stmt (gsi_stmt (*gsi));
2154 return true;
2155 }
2156 defcodefor_name (def2_arg1, &coden, &a1, NULL);
2157 /* (~X | Y) & X -> X & Y */
2158 /* (~X & Y) | X -> X | Y */
2159 if (coden == BIT_NOT_EXPR && a1 == x)
2160 {
2161 gimple_assign_set_rhs_with_ops (gsi, code,
2162 x, def2_arg2);
2163 gcc_assert (gsi_stmt (*gsi) == stmt);
2164 update_stmt (stmt);
2165 return true;
2166 }
2167 defcodefor_name (def2_arg2, &coden, &a1, NULL);
2168 /* (Y | ~X) & X -> X & Y */
2169 /* (Y & ~X) | X -> X | Y */
2170 if (coden == BIT_NOT_EXPR && a1 == x)
2171 {
2172 gimple_assign_set_rhs_with_ops (gsi, code,
2173 x, def2_arg1);
2174 gcc_assert (gsi_stmt (*gsi) == stmt);
2175 update_stmt (stmt);
2176 return true;
2177 }
2178 }
a1e179f5 2179
d53e2f99
JL
2180 /* If arg1 and arg2 are booleans (or any single bit type)
2181 then try to simplify:
2182
2183 (~X & Y) -> X < Y
2184 (X & ~Y) -> Y < X
2185 (~X | Y) -> X <= Y
2186 (X | ~Y) -> Y <= X
2187
2188 But only do this if our result feeds into a comparison as
2189 this transformation is not always a win, particularly on
2190 targets with and-not instructions. */
2191 if (TREE_CODE (arg1) == SSA_NAME
2192 && TREE_CODE (arg2) == SSA_NAME
2193 && INTEGRAL_TYPE_P (TREE_TYPE (arg1))
2194 && TYPE_PRECISION (TREE_TYPE (arg1)) == 1
2195 && TYPE_PRECISION (TREE_TYPE (arg2)) == 1
2196 && (TYPE_UNSIGNED (TREE_TYPE (arg1))
2197 == TYPE_UNSIGNED (TREE_TYPE (arg2))))
2198 {
2199 use_operand_p use_p;
2200 gimple use_stmt;
2201
2202 if (single_imm_use (gimple_assign_lhs (stmt), &use_p, &use_stmt))
2203 {
2204 if (gimple_code (use_stmt) == GIMPLE_COND
2205 && gimple_cond_lhs (use_stmt) == gimple_assign_lhs (stmt)
2206 && integer_zerop (gimple_cond_rhs (use_stmt))
2207 && gimple_cond_code (use_stmt) == NE_EXPR)
2208 {
2209 if (simplify_bitwise_binary_boolean (gsi, code, arg1, arg2))
2210 return true;
2211 if (simplify_bitwise_binary_boolean (gsi, code, arg2, arg1))
2212 return true;
2213 }
2214 }
2215 }
2216 }
10c224a9 2217 return false;
617f3897
MJ
2218}
2219
2d698d3b 2220
cb3b8d33
JJ
2221/* Recognize rotation patterns. Return true if a transformation
2222 applied, otherwise return false.
2223
2224 We are looking for X with unsigned type T with bitsize B, OP being
2225 +, | or ^, some type T2 wider than T and
2226 (X << CNT1) OP (X >> CNT2) iff CNT1 + CNT2 == B
2227 ((T) ((T2) X << CNT1)) OP ((T) ((T2) X >> CNT2)) iff CNT1 + CNT2 == B
2228 (X << Y) OP (X >> (B - Y))
2229 (X << (int) Y) OP (X >> (int) (B - Y))
2230 ((T) ((T2) X << Y)) OP ((T) ((T2) X >> (B - Y)))
2231 ((T) ((T2) X << (int) Y)) OP ((T) ((T2) X >> (int) (B - Y)))
ae6fa899
JJ
2232 (X << Y) | (X >> ((-Y) & (B - 1)))
2233 (X << (int) Y) | (X >> (int) ((-Y) & (B - 1)))
2234 ((T) ((T2) X << Y)) | ((T) ((T2) X >> ((-Y) & (B - 1))))
2235 ((T) ((T2) X << (int) Y)) | ((T) ((T2) X >> (int) ((-Y) & (B - 1))))
cb3b8d33
JJ
2236
2237 and transform these into:
2238 X r<< CNT1
2239 X r<< Y
2240
2241 Note, in the patterns with T2 type, the type of OP operands
2242 might be even a signed type, but should have precision B. */
2243
2244static bool
2245simplify_rotate (gimple_stmt_iterator *gsi)
2246{
2247 gimple stmt = gsi_stmt (*gsi);
2248 tree arg[2], rtype, rotcnt = NULL_TREE;
2249 tree def_arg1[2], def_arg2[2];
2250 enum tree_code def_code[2];
2251 tree lhs;
2252 int i;
2253 bool swapped_p = false;
2254 gimple g;
2255
2256 arg[0] = gimple_assign_rhs1 (stmt);
2257 arg[1] = gimple_assign_rhs2 (stmt);
2258 rtype = TREE_TYPE (arg[0]);
2259
2260 /* Only create rotates in complete modes. Other cases are not
2261 expanded properly. */
2262 if (!INTEGRAL_TYPE_P (rtype)
2263 || TYPE_PRECISION (rtype) != GET_MODE_PRECISION (TYPE_MODE (rtype)))
2264 return false;
2265
2266 for (i = 0; i < 2; i++)
2267 defcodefor_name (arg[i], &def_code[i], &def_arg1[i], &def_arg2[i]);
2268
2269 /* Look through narrowing conversions. */
2270 if (CONVERT_EXPR_CODE_P (def_code[0])
2271 && CONVERT_EXPR_CODE_P (def_code[1])
2272 && INTEGRAL_TYPE_P (TREE_TYPE (def_arg1[0]))
2273 && INTEGRAL_TYPE_P (TREE_TYPE (def_arg1[1]))
2274 && TYPE_PRECISION (TREE_TYPE (def_arg1[0]))
2275 == TYPE_PRECISION (TREE_TYPE (def_arg1[1]))
2276 && TYPE_PRECISION (TREE_TYPE (def_arg1[0])) > TYPE_PRECISION (rtype)
2277 && has_single_use (arg[0])
2278 && has_single_use (arg[1]))
2279 {
2280 for (i = 0; i < 2; i++)
2281 {
2282 arg[i] = def_arg1[i];
2283 defcodefor_name (arg[i], &def_code[i], &def_arg1[i], &def_arg2[i]);
2284 }
2285 }
2286
2287 /* One operand has to be LSHIFT_EXPR and one RSHIFT_EXPR. */
2288 for (i = 0; i < 2; i++)
2289 if (def_code[i] != LSHIFT_EXPR && def_code[i] != RSHIFT_EXPR)
2290 return false;
2291 else if (!has_single_use (arg[i]))
2292 return false;
2293 if (def_code[0] == def_code[1])
2294 return false;
2295
2296 /* If we've looked through narrowing conversions before, look through
2297 widening conversions from unsigned type with the same precision
2298 as rtype here. */
2299 if (TYPE_PRECISION (TREE_TYPE (def_arg1[0])) != TYPE_PRECISION (rtype))
2300 for (i = 0; i < 2; i++)
2301 {
2302 tree tem;
2303 enum tree_code code;
2304 defcodefor_name (def_arg1[i], &code, &tem, NULL);
2305 if (!CONVERT_EXPR_CODE_P (code)
2306 || !INTEGRAL_TYPE_P (TREE_TYPE (tem))
2307 || TYPE_PRECISION (TREE_TYPE (tem)) != TYPE_PRECISION (rtype))
2308 return false;
2309 def_arg1[i] = tem;
2310 }
2311 /* Both shifts have to use the same first operand. */
2312 if (TREE_CODE (def_arg1[0]) != SSA_NAME || def_arg1[0] != def_arg1[1])
2313 return false;
2314 if (!TYPE_UNSIGNED (TREE_TYPE (def_arg1[0])))
2315 return false;
2316
2317 /* CNT1 + CNT2 == B case above. */
2318 if (host_integerp (def_arg2[0], 1)
2319 && host_integerp (def_arg2[1], 1)
2320 && (unsigned HOST_WIDE_INT) tree_low_cst (def_arg2[0], 1)
2321 + tree_low_cst (def_arg2[1], 1) == TYPE_PRECISION (rtype))
2322 rotcnt = def_arg2[0];
2323 else if (TREE_CODE (def_arg2[0]) != SSA_NAME
2324 || TREE_CODE (def_arg2[1]) != SSA_NAME)
2325 return false;
2326 else
2327 {
2328 tree cdef_arg1[2], cdef_arg2[2], def_arg2_alt[2];
2329 enum tree_code cdef_code[2];
2330 /* Look through conversion of the shift count argument.
2331 The C/C++ FE cast any shift count argument to integer_type_node.
2332 The only problem might be if the shift count type maximum value
2333 is equal or smaller than number of bits in rtype. */
2334 for (i = 0; i < 2; i++)
2335 {
2336 def_arg2_alt[i] = def_arg2[i];
2337 defcodefor_name (def_arg2[i], &cdef_code[i],
2338 &cdef_arg1[i], &cdef_arg2[i]);
2339 if (CONVERT_EXPR_CODE_P (cdef_code[i])
2340 && INTEGRAL_TYPE_P (TREE_TYPE (cdef_arg1[i]))
2341 && TYPE_PRECISION (TREE_TYPE (cdef_arg1[i]))
2342 > floor_log2 (TYPE_PRECISION (rtype))
2343 && TYPE_PRECISION (TREE_TYPE (cdef_arg1[i]))
2344 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (cdef_arg1[i]))))
2345 {
2346 def_arg2_alt[i] = cdef_arg1[i];
2347 defcodefor_name (def_arg2_alt[i], &cdef_code[i],
2348 &cdef_arg1[i], &cdef_arg2[i]);
2349 }
2350 }
2351 for (i = 0; i < 2; i++)
2352 /* Check for one shift count being Y and the other B - Y,
2353 with optional casts. */
2354 if (cdef_code[i] == MINUS_EXPR
2355 && host_integerp (cdef_arg1[i], 0)
2356 && tree_low_cst (cdef_arg1[i], 0) == TYPE_PRECISION (rtype)
2357 && TREE_CODE (cdef_arg2[i]) == SSA_NAME)
2358 {
2359 tree tem;
2360 enum tree_code code;
2361
2362 if (cdef_arg2[i] == def_arg2[1 - i]
2363 || cdef_arg2[i] == def_arg2_alt[1 - i])
2364 {
2365 rotcnt = cdef_arg2[i];
2366 break;
2367 }
2368 defcodefor_name (cdef_arg2[i], &code, &tem, NULL);
2369 if (CONVERT_EXPR_CODE_P (code)
2370 && INTEGRAL_TYPE_P (TREE_TYPE (tem))
2371 && TYPE_PRECISION (TREE_TYPE (tem))
2372 > floor_log2 (TYPE_PRECISION (rtype))
2373 && TYPE_PRECISION (TREE_TYPE (tem))
2374 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (tem)))
2375 && (tem == def_arg2[1 - i]
2376 || tem == def_arg2_alt[1 - i]))
2377 {
2378 rotcnt = tem;
2379 break;
2380 }
2381 }
2382 /* The above sequence isn't safe for Y being 0,
2383 because then one of the shifts triggers undefined behavior.
2384 This alternative is safe even for rotation count of 0.
2385 One shift count is Y and the other (-Y) & (B - 1). */
2386 else if (cdef_code[i] == BIT_AND_EXPR
2387 && host_integerp (cdef_arg2[i], 0)
2388 && tree_low_cst (cdef_arg2[i], 0)
2389 == TYPE_PRECISION (rtype) - 1
ae6fa899
JJ
2390 && TREE_CODE (cdef_arg1[i]) == SSA_NAME
2391 && gimple_assign_rhs_code (stmt) == BIT_IOR_EXPR)
cb3b8d33
JJ
2392 {
2393 tree tem;
2394 enum tree_code code;
2395
2396 defcodefor_name (cdef_arg1[i], &code, &tem, NULL);
2397 if (CONVERT_EXPR_CODE_P (code)
2398 && INTEGRAL_TYPE_P (TREE_TYPE (tem))
2399 && TYPE_PRECISION (TREE_TYPE (tem))
2400 > floor_log2 (TYPE_PRECISION (rtype))
2401 && TYPE_PRECISION (TREE_TYPE (tem))
2402 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (tem))))
2403 defcodefor_name (tem, &code, &tem, NULL);
2404
2405 if (code == NEGATE_EXPR)
2406 {
2407 if (tem == def_arg2[1 - i] || tem == def_arg2_alt[1 - i])
2408 {
2409 rotcnt = tem;
2410 break;
2411 }
2412 defcodefor_name (tem, &code, &tem, NULL);
2413 if (CONVERT_EXPR_CODE_P (code)
2414 && INTEGRAL_TYPE_P (TREE_TYPE (tem))
2415 && TYPE_PRECISION (TREE_TYPE (tem))
2416 > floor_log2 (TYPE_PRECISION (rtype))
2417 && TYPE_PRECISION (TREE_TYPE (tem))
2418 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (tem)))
2419 && (tem == def_arg2[1 - i]
2420 || tem == def_arg2_alt[1 - i]))
2421 {
2422 rotcnt = tem;
2423 break;
2424 }
2425 }
2426 }
2427 if (rotcnt == NULL_TREE)
2428 return false;
2429 swapped_p = i != 1;
2430 }
2431
2432 if (!useless_type_conversion_p (TREE_TYPE (def_arg2[0]),
2433 TREE_TYPE (rotcnt)))
2434 {
2435 g = gimple_build_assign_with_ops (NOP_EXPR,
2436 make_ssa_name (TREE_TYPE (def_arg2[0]),
2437 NULL),
2438 rotcnt, NULL_TREE);
2439 gsi_insert_before (gsi, g, GSI_SAME_STMT);
2440 rotcnt = gimple_assign_lhs (g);
2441 }
2442 lhs = gimple_assign_lhs (stmt);
2443 if (!useless_type_conversion_p (rtype, TREE_TYPE (def_arg1[0])))
2444 lhs = make_ssa_name (TREE_TYPE (def_arg1[0]), NULL);
2445 g = gimple_build_assign_with_ops (((def_code[0] == LSHIFT_EXPR) ^ swapped_p)
2446 ? LROTATE_EXPR : RROTATE_EXPR,
2447 lhs, def_arg1[0], rotcnt);
2448 if (!useless_type_conversion_p (rtype, TREE_TYPE (def_arg1[0])))
2449 {
2450 gsi_insert_before (gsi, g, GSI_SAME_STMT);
2451 g = gimple_build_assign_with_ops (NOP_EXPR, gimple_assign_lhs (stmt),
2452 lhs, NULL_TREE);
2453 }
2454 gsi_replace (gsi, g, false);
2455 return true;
2456}
2457
2d698d3b 2458/* Perform re-associations of the plus or minus statement STMT that are
0fdb0d27 2459 always permitted. Returns true if the CFG was changed. */
2d698d3b 2460
0fdb0d27 2461static bool
59401b92 2462associate_plusminus (gimple_stmt_iterator *gsi)
2d698d3b 2463{
59401b92 2464 gimple stmt = gsi_stmt (*gsi);
2d698d3b
RG
2465 tree rhs1 = gimple_assign_rhs1 (stmt);
2466 tree rhs2 = gimple_assign_rhs2 (stmt);
2467 enum tree_code code = gimple_assign_rhs_code (stmt);
2d698d3b
RG
2468 bool changed;
2469
2470 /* We can't reassociate at all for saturating types. */
2471 if (TYPE_SATURATING (TREE_TYPE (rhs1)))
0fdb0d27 2472 return false;
2d698d3b
RG
2473
2474 /* First contract negates. */
2475 do
2476 {
2477 changed = false;
2478
2479 /* A +- (-B) -> A -+ B. */
2480 if (TREE_CODE (rhs2) == SSA_NAME)
2481 {
2482 gimple def_stmt = SSA_NAME_DEF_STMT (rhs2);
2483 if (is_gimple_assign (def_stmt)
931050d0
EB
2484 && gimple_assign_rhs_code (def_stmt) == NEGATE_EXPR
2485 && can_propagate_from (def_stmt))
2d698d3b
RG
2486 {
2487 code = (code == MINUS_EXPR) ? PLUS_EXPR : MINUS_EXPR;
2488 gimple_assign_set_rhs_code (stmt, code);
2489 rhs2 = gimple_assign_rhs1 (def_stmt);
2490 gimple_assign_set_rhs2 (stmt, rhs2);
2491 gimple_set_modified (stmt, true);
2492 changed = true;
2493 }
2494 }
2495
2496 /* (-A) + B -> B - A. */
2497 if (TREE_CODE (rhs1) == SSA_NAME
2498 && code == PLUS_EXPR)
2499 {
2500 gimple def_stmt = SSA_NAME_DEF_STMT (rhs1);
2501 if (is_gimple_assign (def_stmt)
931050d0
EB
2502 && gimple_assign_rhs_code (def_stmt) == NEGATE_EXPR
2503 && can_propagate_from (def_stmt))
2d698d3b
RG
2504 {
2505 code = MINUS_EXPR;
2506 gimple_assign_set_rhs_code (stmt, code);
2507 rhs1 = rhs2;
2508 gimple_assign_set_rhs1 (stmt, rhs1);
2509 rhs2 = gimple_assign_rhs1 (def_stmt);
2510 gimple_assign_set_rhs2 (stmt, rhs2);
2511 gimple_set_modified (stmt, true);
2512 changed = true;
2513 }
2514 }
2515 }
2516 while (changed);
2517
2518 /* We can't reassociate floating-point or fixed-point plus or minus
2519 because of saturation to +-Inf. */
2520 if (FLOAT_TYPE_P (TREE_TYPE (rhs1))
2521 || FIXED_POINT_TYPE_P (TREE_TYPE (rhs1)))
2522 goto out;
2523
2524 /* Second match patterns that allow contracting a plus-minus pair
2525 irrespective of overflow issues.
2526
2527 (A +- B) - A -> +- B
2528 (A +- B) -+ B -> A
2529 (CST +- A) +- CST -> CST +- A
5d418483 2530 (A +- CST) +- CST -> A +- CST
2d698d3b
RG
2531 ~A + A -> -1
2532 ~A + 1 -> -A
2533 A - (A +- B) -> -+ B
2534 A +- (B +- A) -> +- B
2535 CST +- (CST +- A) -> CST +- A
2536 CST +- (A +- CST) -> CST +- A
2537 A + ~A -> -1
2538
2539 via commutating the addition and contracting operations to zero
2540 by reassociation. */
2541
2d698d3b
RG
2542 if (TREE_CODE (rhs1) == SSA_NAME)
2543 {
2544 gimple def_stmt = SSA_NAME_DEF_STMT (rhs1);
931050d0 2545 if (is_gimple_assign (def_stmt) && can_propagate_from (def_stmt))
2d698d3b
RG
2546 {
2547 enum tree_code def_code = gimple_assign_rhs_code (def_stmt);
2548 if (def_code == PLUS_EXPR
2549 || def_code == MINUS_EXPR)
2550 {
2551 tree def_rhs1 = gimple_assign_rhs1 (def_stmt);
2552 tree def_rhs2 = gimple_assign_rhs2 (def_stmt);
2553 if (operand_equal_p (def_rhs1, rhs2, 0)
2554 && code == MINUS_EXPR)
2555 {
2556 /* (A +- B) - A -> +- B. */
2557 code = ((def_code == PLUS_EXPR)
2558 ? TREE_CODE (def_rhs2) : NEGATE_EXPR);
2559 rhs1 = def_rhs2;
2560 rhs2 = NULL_TREE;
59401b92
RG
2561 gimple_assign_set_rhs_with_ops (gsi, code, rhs1, NULL_TREE);
2562 gcc_assert (gsi_stmt (*gsi) == stmt);
2d698d3b
RG
2563 gimple_set_modified (stmt, true);
2564 }
2565 else if (operand_equal_p (def_rhs2, rhs2, 0)
2566 && code != def_code)
2567 {
2568 /* (A +- B) -+ B -> A. */
2569 code = TREE_CODE (def_rhs1);
2570 rhs1 = def_rhs1;
2571 rhs2 = NULL_TREE;
59401b92
RG
2572 gimple_assign_set_rhs_with_ops (gsi, code, rhs1, NULL_TREE);
2573 gcc_assert (gsi_stmt (*gsi) == stmt);
2d698d3b
RG
2574 gimple_set_modified (stmt, true);
2575 }
5d418483
MG
2576 else if (CONSTANT_CLASS_P (rhs2)
2577 && CONSTANT_CLASS_P (def_rhs1))
2d698d3b
RG
2578 {
2579 /* (CST +- A) +- CST -> CST +- A. */
2580 tree cst = fold_binary (code, TREE_TYPE (rhs1),
2581 def_rhs1, rhs2);
2582 if (cst && !TREE_OVERFLOW (cst))
2583 {
2584 code = def_code;
2585 gimple_assign_set_rhs_code (stmt, code);
2586 rhs1 = cst;
2587 gimple_assign_set_rhs1 (stmt, rhs1);
2588 rhs2 = def_rhs2;
2589 gimple_assign_set_rhs2 (stmt, rhs2);
2590 gimple_set_modified (stmt, true);
2591 }
2592 }
5d418483
MG
2593 else if (CONSTANT_CLASS_P (rhs2)
2594 && CONSTANT_CLASS_P (def_rhs2))
2d698d3b 2595 {
5d418483
MG
2596 /* (A +- CST) +- CST -> A +- CST. */
2597 enum tree_code mix = (code == def_code)
2598 ? PLUS_EXPR : MINUS_EXPR;
2599 tree cst = fold_binary (mix, TREE_TYPE (rhs1),
2d698d3b
RG
2600 def_rhs2, rhs2);
2601 if (cst && !TREE_OVERFLOW (cst))
2602 {
5d418483 2603 code = def_code;
2d698d3b
RG
2604 gimple_assign_set_rhs_code (stmt, code);
2605 rhs1 = def_rhs1;
2606 gimple_assign_set_rhs1 (stmt, rhs1);
2607 rhs2 = cst;
2608 gimple_assign_set_rhs2 (stmt, rhs2);
2609 gimple_set_modified (stmt, true);
2610 }
2611 }
2612 }
5d418483 2613 else if (def_code == BIT_NOT_EXPR && code == PLUS_EXPR)
2d698d3b
RG
2614 {
2615 tree def_rhs1 = gimple_assign_rhs1 (def_stmt);
5d418483 2616 if (operand_equal_p (def_rhs1, rhs2, 0))
2d698d3b
RG
2617 {
2618 /* ~A + A -> -1. */
5d418483 2619 rhs1 = build_all_ones_cst (TREE_TYPE (rhs2));
2d698d3b 2620 rhs2 = NULL_TREE;
5d418483 2621 code = TREE_CODE (rhs1);
59401b92
RG
2622 gimple_assign_set_rhs_with_ops (gsi, code, rhs1, NULL_TREE);
2623 gcc_assert (gsi_stmt (*gsi) == stmt);
2d698d3b
RG
2624 gimple_set_modified (stmt, true);
2625 }
5d418483
MG
2626 else if ((TREE_CODE (TREE_TYPE (rhs2)) != COMPLEX_TYPE
2627 && integer_onep (rhs2))
2628 || (TREE_CODE (rhs2) == COMPLEX_CST
2629 && integer_onep (TREE_REALPART (rhs2))
2630 && integer_onep (TREE_IMAGPART (rhs2))))
2d698d3b
RG
2631 {
2632 /* ~A + 1 -> -A. */
2633 code = NEGATE_EXPR;
2634 rhs1 = def_rhs1;
2635 rhs2 = NULL_TREE;
59401b92
RG
2636 gimple_assign_set_rhs_with_ops (gsi, code, rhs1, NULL_TREE);
2637 gcc_assert (gsi_stmt (*gsi) == stmt);
2d698d3b
RG
2638 gimple_set_modified (stmt, true);
2639 }
2640 }
2641 }
2642 }
2643
2644 if (rhs2 && TREE_CODE (rhs2) == SSA_NAME)
2645 {
2646 gimple def_stmt = SSA_NAME_DEF_STMT (rhs2);
931050d0 2647 if (is_gimple_assign (def_stmt) && can_propagate_from (def_stmt))
2d698d3b
RG
2648 {
2649 enum tree_code def_code = gimple_assign_rhs_code (def_stmt);
2650 if (def_code == PLUS_EXPR
2651 || def_code == MINUS_EXPR)
2652 {
2653 tree def_rhs1 = gimple_assign_rhs1 (def_stmt);
2654 tree def_rhs2 = gimple_assign_rhs2 (def_stmt);
2655 if (operand_equal_p (def_rhs1, rhs1, 0)
2656 && code == MINUS_EXPR)
2657 {
2658 /* A - (A +- B) -> -+ B. */
2659 code = ((def_code == PLUS_EXPR)
2660 ? NEGATE_EXPR : TREE_CODE (def_rhs2));
2661 rhs1 = def_rhs2;
2662 rhs2 = NULL_TREE;
59401b92
RG
2663 gimple_assign_set_rhs_with_ops (gsi, code, rhs1, NULL_TREE);
2664 gcc_assert (gsi_stmt (*gsi) == stmt);
2d698d3b
RG
2665 gimple_set_modified (stmt, true);
2666 }
2667 else if (operand_equal_p (def_rhs2, rhs1, 0)
2668 && code != def_code)
2669 {
2670 /* A +- (B +- A) -> +- B. */
2671 code = ((code == PLUS_EXPR)
2672 ? TREE_CODE (def_rhs1) : NEGATE_EXPR);
2673 rhs1 = def_rhs1;
2674 rhs2 = NULL_TREE;
59401b92
RG
2675 gimple_assign_set_rhs_with_ops (gsi, code, rhs1, NULL_TREE);
2676 gcc_assert (gsi_stmt (*gsi) == stmt);
2d698d3b
RG
2677 gimple_set_modified (stmt, true);
2678 }
5d418483
MG
2679 else if (CONSTANT_CLASS_P (rhs1)
2680 && CONSTANT_CLASS_P (def_rhs1))
2d698d3b
RG
2681 {
2682 /* CST +- (CST +- A) -> CST +- A. */
2683 tree cst = fold_binary (code, TREE_TYPE (rhs2),
2684 rhs1, def_rhs1);
2685 if (cst && !TREE_OVERFLOW (cst))
2686 {
2687 code = (code == def_code ? PLUS_EXPR : MINUS_EXPR);
2688 gimple_assign_set_rhs_code (stmt, code);
2689 rhs1 = cst;
2690 gimple_assign_set_rhs1 (stmt, rhs1);
2691 rhs2 = def_rhs2;
2692 gimple_assign_set_rhs2 (stmt, rhs2);
2693 gimple_set_modified (stmt, true);
2694 }
2695 }
5d418483
MG
2696 else if (CONSTANT_CLASS_P (rhs1)
2697 && CONSTANT_CLASS_P (def_rhs2))
2d698d3b
RG
2698 {
2699 /* CST +- (A +- CST) -> CST +- A. */
2700 tree cst = fold_binary (def_code == code
2701 ? PLUS_EXPR : MINUS_EXPR,
2702 TREE_TYPE (rhs2),
2703 rhs1, def_rhs2);
2704 if (cst && !TREE_OVERFLOW (cst))
2705 {
2706 rhs1 = cst;
2707 gimple_assign_set_rhs1 (stmt, rhs1);
2708 rhs2 = def_rhs1;
2709 gimple_assign_set_rhs2 (stmt, rhs2);
2710 gimple_set_modified (stmt, true);
2711 }
2712 }
2713 }
5d418483 2714 else if (def_code == BIT_NOT_EXPR)
2d698d3b
RG
2715 {
2716 tree def_rhs1 = gimple_assign_rhs1 (def_stmt);
2717 if (code == PLUS_EXPR
2718 && operand_equal_p (def_rhs1, rhs1, 0))
2719 {
2720 /* A + ~A -> -1. */
5d418483 2721 rhs1 = build_all_ones_cst (TREE_TYPE (rhs1));
2d698d3b 2722 rhs2 = NULL_TREE;
5d418483 2723 code = TREE_CODE (rhs1);
59401b92
RG
2724 gimple_assign_set_rhs_with_ops (gsi, code, rhs1, NULL_TREE);
2725 gcc_assert (gsi_stmt (*gsi) == stmt);
2d698d3b
RG
2726 gimple_set_modified (stmt, true);
2727 }
2728 }
2729 }
2730 }
2731
2732out:
2733 if (gimple_modified_p (stmt))
2734 {
59401b92 2735 fold_stmt_inplace (gsi);
2d698d3b 2736 update_stmt (stmt);
0fdb0d27
RG
2737 if (maybe_clean_or_replace_eh_stmt (stmt, stmt)
2738 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
2739 return true;
2d698d3b 2740 }
0fdb0d27
RG
2741
2742 return false;
2d698d3b
RG
2743}
2744
a8ab21e5
RG
2745/* Associate operands of a POINTER_PLUS_EXPR assignmen at *GSI. Returns
2746 true if anything changed, false otherwise. */
2747
2748static bool
2749associate_pointerplus (gimple_stmt_iterator *gsi)
2750{
2751 gimple stmt = gsi_stmt (*gsi);
2752 gimple def_stmt;
2753 tree ptr, rhs, algn;
2754
2755 /* Pattern match
2756 tem = (sizetype) ptr;
2757 tem = tem & algn;
2758 tem = -tem;
2759 ... = ptr p+ tem;
2760 and produce the simpler and easier to analyze with respect to alignment
2761 ... = ptr & ~algn; */
2762 ptr = gimple_assign_rhs1 (stmt);
2763 rhs = gimple_assign_rhs2 (stmt);
2764 if (TREE_CODE (rhs) != SSA_NAME)
2765 return false;
2766 def_stmt = SSA_NAME_DEF_STMT (rhs);
2767 if (!is_gimple_assign (def_stmt)
2768 || gimple_assign_rhs_code (def_stmt) != NEGATE_EXPR)
2769 return false;
2770 rhs = gimple_assign_rhs1 (def_stmt);
2771 if (TREE_CODE (rhs) != SSA_NAME)
2772 return false;
2773 def_stmt = SSA_NAME_DEF_STMT (rhs);
2774 if (!is_gimple_assign (def_stmt)
2775 || gimple_assign_rhs_code (def_stmt) != BIT_AND_EXPR)
2776 return false;
2777 rhs = gimple_assign_rhs1 (def_stmt);
2778 algn = gimple_assign_rhs2 (def_stmt);
2779 if (TREE_CODE (rhs) != SSA_NAME
2780 || TREE_CODE (algn) != INTEGER_CST)
2781 return false;
2782 def_stmt = SSA_NAME_DEF_STMT (rhs);
2783 if (!is_gimple_assign (def_stmt)
2784 || !CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt)))
2785 return false;
2786 if (gimple_assign_rhs1 (def_stmt) != ptr)
2787 return false;
2788
27bcd47c 2789 algn = double_int_to_tree (TREE_TYPE (ptr), ~tree_to_double_int (algn));
a8ab21e5
RG
2790 gimple_assign_set_rhs_with_ops (gsi, BIT_AND_EXPR, ptr, algn);
2791 fold_stmt_inplace (gsi);
2792 update_stmt (stmt);
2793
2794 return true;
2795}
2796
be173289 2797/* Combine two conversions in a row for the second conversion at *GSI.
91bc6112
RG
2798 Returns 1 if there were any changes made, 2 if cfg-cleanup needs to
2799 run. Else it returns 0. */
be173289 2800
91bc6112 2801static int
be173289
RG
2802combine_conversions (gimple_stmt_iterator *gsi)
2803{
2804 gimple stmt = gsi_stmt (*gsi);
2805 gimple def_stmt;
2806 tree op0, lhs;
2807 enum tree_code code = gimple_assign_rhs_code (stmt);
07ab2b1b 2808 enum tree_code code2;
be173289
RG
2809
2810 gcc_checking_assert (CONVERT_EXPR_CODE_P (code)
2811 || code == FLOAT_EXPR
2812 || code == FIX_TRUNC_EXPR);
2813
2814 lhs = gimple_assign_lhs (stmt);
2815 op0 = gimple_assign_rhs1 (stmt);
2816 if (useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (op0)))
2817 {
2818 gimple_assign_set_rhs_code (stmt, TREE_CODE (op0));
91bc6112 2819 return 1;
be173289
RG
2820 }
2821
2822 if (TREE_CODE (op0) != SSA_NAME)
91bc6112 2823 return 0;
be173289
RG
2824
2825 def_stmt = SSA_NAME_DEF_STMT (op0);
2826 if (!is_gimple_assign (def_stmt))
91bc6112 2827 return 0;
be173289 2828
07ab2b1b
MG
2829 code2 = gimple_assign_rhs_code (def_stmt);
2830
2831 if (CONVERT_EXPR_CODE_P (code2) || code2 == FLOAT_EXPR)
be173289
RG
2832 {
2833 tree defop0 = gimple_assign_rhs1 (def_stmt);
2834 tree type = TREE_TYPE (lhs);
2835 tree inside_type = TREE_TYPE (defop0);
2836 tree inter_type = TREE_TYPE (op0);
2837 int inside_int = INTEGRAL_TYPE_P (inside_type);
2838 int inside_ptr = POINTER_TYPE_P (inside_type);
2839 int inside_float = FLOAT_TYPE_P (inside_type);
2840 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
2841 unsigned int inside_prec = TYPE_PRECISION (inside_type);
2842 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
2843 int inter_int = INTEGRAL_TYPE_P (inter_type);
2844 int inter_ptr = POINTER_TYPE_P (inter_type);
2845 int inter_float = FLOAT_TYPE_P (inter_type);
2846 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
2847 unsigned int inter_prec = TYPE_PRECISION (inter_type);
2848 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
2849 int final_int = INTEGRAL_TYPE_P (type);
2850 int final_ptr = POINTER_TYPE_P (type);
2851 int final_float = FLOAT_TYPE_P (type);
2852 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
2853 unsigned int final_prec = TYPE_PRECISION (type);
2854 int final_unsignedp = TYPE_UNSIGNED (type);
2855
9402220c
EB
2856 /* Don't propagate ssa names that occur in abnormal phis. */
2857 if (TREE_CODE (defop0) == SSA_NAME
2858 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (defop0))
2859 return 0;
2860
be173289
RG
2861 /* In addition to the cases of two conversions in a row
2862 handled below, if we are converting something to its own
2863 type via an object of identical or wider precision, neither
2864 conversion is needed. */
2865 if (useless_type_conversion_p (type, inside_type)
2866 && (((inter_int || inter_ptr) && final_int)
2867 || (inter_float && final_float))
2868 && inter_prec >= final_prec)
2869 {
2870 gimple_assign_set_rhs1 (stmt, unshare_expr (defop0));
2871 gimple_assign_set_rhs_code (stmt, TREE_CODE (defop0));
2872 update_stmt (stmt);
91bc6112 2873 return remove_prop_source_from_use (op0) ? 2 : 1;
be173289
RG
2874 }
2875
2876 /* Likewise, if the intermediate and initial types are either both
2877 float or both integer, we don't need the middle conversion if the
2878 former is wider than the latter and doesn't change the signedness
2879 (for integers). Avoid this if the final type is a pointer since
2880 then we sometimes need the middle conversion. Likewise if the
2881 final type has a precision not equal to the size of its mode. */
2882 if (((inter_int && inside_int)
2883 || (inter_float && inside_float)
2884 || (inter_vec && inside_vec))
2885 && inter_prec >= inside_prec
2886 && (inter_float || inter_vec
2887 || inter_unsignedp == inside_unsignedp)
d3ea1dbd 2888 && ! (final_prec != GET_MODE_PRECISION (TYPE_MODE (type))
be173289
RG
2889 && TYPE_MODE (type) == TYPE_MODE (inter_type))
2890 && ! final_ptr
2891 && (! final_vec || inter_prec == inside_prec))
2892 {
2893 gimple_assign_set_rhs1 (stmt, defop0);
2894 update_stmt (stmt);
91bc6112 2895 return remove_prop_source_from_use (op0) ? 2 : 1;
be173289
RG
2896 }
2897
2898 /* If we have a sign-extension of a zero-extended value, we can
1caf8dd6
RG
2899 replace that by a single zero-extension. Likewise if the
2900 final conversion does not change precision we can drop the
2901 intermediate conversion. */
be173289 2902 if (inside_int && inter_int && final_int
1caf8dd6
RG
2903 && ((inside_prec < inter_prec && inter_prec < final_prec
2904 && inside_unsignedp && !inter_unsignedp)
2905 || final_prec == inter_prec))
be173289
RG
2906 {
2907 gimple_assign_set_rhs1 (stmt, defop0);
2908 update_stmt (stmt);
91bc6112 2909 return remove_prop_source_from_use (op0) ? 2 : 1;
be173289
RG
2910 }
2911
2912 /* Two conversions in a row are not needed unless:
2913 - some conversion is floating-point (overstrict for now), or
2914 - some conversion is a vector (overstrict for now), or
2915 - the intermediate type is narrower than both initial and
2916 final, or
2917 - the intermediate type and innermost type differ in signedness,
2918 and the outermost type is wider than the intermediate, or
2919 - the initial type is a pointer type and the precisions of the
2920 intermediate and final types differ, or
2921 - the final type is a pointer type and the precisions of the
2922 initial and intermediate types differ. */
2923 if (! inside_float && ! inter_float && ! final_float
2924 && ! inside_vec && ! inter_vec && ! final_vec
2925 && (inter_prec >= inside_prec || inter_prec >= final_prec)
2926 && ! (inside_int && inter_int
2927 && inter_unsignedp != inside_unsignedp
2928 && inter_prec < final_prec)
2929 && ((inter_unsignedp && inter_prec > inside_prec)
2930 == (final_unsignedp && final_prec > inter_prec))
2931 && ! (inside_ptr && inter_prec != final_prec)
2932 && ! (final_ptr && inside_prec != inter_prec)
d3ea1dbd 2933 && ! (final_prec != GET_MODE_PRECISION (TYPE_MODE (type))
be173289
RG
2934 && TYPE_MODE (type) == TYPE_MODE (inter_type)))
2935 {
2936 gimple_assign_set_rhs1 (stmt, defop0);
2937 update_stmt (stmt);
91bc6112 2938 return remove_prop_source_from_use (op0) ? 2 : 1;
be173289
RG
2939 }
2940
2941 /* A truncation to an unsigned type should be canonicalized as
2942 bitwise and of a mask. */
2943 if (final_int && inter_int && inside_int
2944 && final_prec == inside_prec
2945 && final_prec > inter_prec
2946 && inter_unsignedp)
2947 {
2948 tree tem;
2949 tem = fold_build2 (BIT_AND_EXPR, inside_type,
2950 defop0,
2951 double_int_to_tree
27bcd47c 2952 (inside_type, double_int::mask (inter_prec)));
be173289
RG
2953 if (!useless_type_conversion_p (type, inside_type))
2954 {
2955 tem = force_gimple_operand_gsi (gsi, tem, true, NULL_TREE, true,
2956 GSI_SAME_STMT);
2957 gimple_assign_set_rhs1 (stmt, tem);
2958 }
2959 else
2960 gimple_assign_set_rhs_from_tree (gsi, tem);
2961 update_stmt (gsi_stmt (*gsi));
91bc6112 2962 return 1;
be173289 2963 }
07ab2b1b
MG
2964
2965 /* If we are converting an integer to a floating-point that can
2966 represent it exactly and back to an integer, we can skip the
2967 floating-point conversion. */
2968 if (inside_int && inter_float && final_int &&
2969 (unsigned) significand_size (TYPE_MODE (inter_type))
2970 >= inside_prec - !inside_unsignedp)
2971 {
2972 if (useless_type_conversion_p (type, inside_type))
2973 {
2974 gimple_assign_set_rhs1 (stmt, unshare_expr (defop0));
2975 gimple_assign_set_rhs_code (stmt, TREE_CODE (defop0));
2976 update_stmt (stmt);
2977 return remove_prop_source_from_use (op0) ? 2 : 1;
2978 }
2979 else
2980 {
2981 gimple_assign_set_rhs1 (stmt, defop0);
2982 gimple_assign_set_rhs_code (stmt, CONVERT_EXPR);
2983 update_stmt (stmt);
2984 return remove_prop_source_from_use (op0) ? 2 : 1;
2985 }
2986 }
be173289
RG
2987 }
2988
91bc6112 2989 return 0;
be173289
RG
2990}
2991
881a9dcd
MG
2992/* Combine an element access with a shuffle. Returns true if there were
2993 any changes made, else it returns false. */
2994
2995static bool
2996simplify_bitfield_ref (gimple_stmt_iterator *gsi)
2997{
2998 gimple stmt = gsi_stmt (*gsi);
2999 gimple def_stmt;
3000 tree op, op0, op1, op2;
3001 tree elem_type;
3002 unsigned idx, n, size;
3003 enum tree_code code;
3004
3005 op = gimple_assign_rhs1 (stmt);
3006 gcc_checking_assert (TREE_CODE (op) == BIT_FIELD_REF);
3007
3008 op0 = TREE_OPERAND (op, 0);
3009 if (TREE_CODE (op0) != SSA_NAME
3010 || TREE_CODE (TREE_TYPE (op0)) != VECTOR_TYPE)
3011 return false;
3012
f2167d68
MG
3013 def_stmt = get_prop_source_stmt (op0, false, NULL);
3014 if (!def_stmt || !can_propagate_from (def_stmt))
3015 return false;
3016
3017 op1 = TREE_OPERAND (op, 1);
3018 op2 = TREE_OPERAND (op, 2);
3019 code = gimple_assign_rhs_code (def_stmt);
3020
3021 if (code == CONSTRUCTOR)
3022 {
3023 tree tem = fold_ternary (BIT_FIELD_REF, TREE_TYPE (op),
3024 gimple_assign_rhs1 (def_stmt), op1, op2);
3025 if (!tem || !valid_gimple_rhs_p (tem))
3026 return false;
3027 gimple_assign_set_rhs_from_tree (gsi, tem);
3028 update_stmt (gsi_stmt (*gsi));
3029 return true;
3030 }
3031
881a9dcd
MG
3032 elem_type = TREE_TYPE (TREE_TYPE (op0));
3033 if (TREE_TYPE (op) != elem_type)
3034 return false;
3035
3036 size = TREE_INT_CST_LOW (TYPE_SIZE (elem_type));
881a9dcd
MG
3037 n = TREE_INT_CST_LOW (op1) / size;
3038 if (n != 1)
3039 return false;
881a9dcd
MG
3040 idx = TREE_INT_CST_LOW (op2) / size;
3041
881a9dcd
MG
3042 if (code == VEC_PERM_EXPR)
3043 {
3044 tree p, m, index, tem;
3045 unsigned nelts;
3046 m = gimple_assign_rhs3 (def_stmt);
3047 if (TREE_CODE (m) != VECTOR_CST)
3048 return false;
3049 nelts = VECTOR_CST_NELTS (m);
3050 idx = TREE_INT_CST_LOW (VECTOR_CST_ELT (m, idx));
3051 idx %= 2 * nelts;
3052 if (idx < nelts)
3053 {
3054 p = gimple_assign_rhs1 (def_stmt);
3055 }
3056 else
3057 {
3058 p = gimple_assign_rhs2 (def_stmt);
3059 idx -= nelts;
3060 }
3061 index = build_int_cst (TREE_TYPE (TREE_TYPE (m)), idx * size);
3062 tem = build3 (BIT_FIELD_REF, TREE_TYPE (op),
3ebd25e1 3063 unshare_expr (p), op1, index);
881a9dcd
MG
3064 gimple_assign_set_rhs1 (stmt, tem);
3065 fold_stmt (gsi);
3066 update_stmt (gsi_stmt (*gsi));
3067 return true;
3068 }
3069
3070 return false;
3071}
3072
8a3ffc5d
MG
3073/* Determine whether applying the 2 permutations (mask1 then mask2)
3074 gives back one of the input. */
3075
3076static int
3077is_combined_permutation_identity (tree mask1, tree mask2)
3078{
3079 tree mask;
3080 unsigned int nelts, i, j;
3081 bool maybe_identity1 = true;
3082 bool maybe_identity2 = true;
3083
3084 gcc_checking_assert (TREE_CODE (mask1) == VECTOR_CST
3085 && TREE_CODE (mask2) == VECTOR_CST);
3086 mask = fold_ternary (VEC_PERM_EXPR, TREE_TYPE (mask1), mask1, mask1, mask2);
3087 gcc_assert (TREE_CODE (mask) == VECTOR_CST);
3088
3089 nelts = VECTOR_CST_NELTS (mask);
3090 for (i = 0; i < nelts; i++)
3091 {
3092 tree val = VECTOR_CST_ELT (mask, i);
3093 gcc_assert (TREE_CODE (val) == INTEGER_CST);
3094 j = TREE_INT_CST_LOW (val) & (2 * nelts - 1);
3095 if (j == i)
3096 maybe_identity2 = false;
3097 else if (j == i + nelts)
3098 maybe_identity1 = false;
3099 else
3100 return 0;
3101 }
3102 return maybe_identity1 ? 1 : maybe_identity2 ? 2 : 0;
3103}
3104
84c3c7ce
MG
3105/* Combine a shuffle with its arguments. Returns 1 if there were any
3106 changes made, 2 if cfg-cleanup needs to run. Else it returns 0. */
8a3ffc5d
MG
3107
3108static int
3109simplify_permutation (gimple_stmt_iterator *gsi)
3110{
3111 gimple stmt = gsi_stmt (*gsi);
3112 gimple def_stmt;
84c3c7ce
MG
3113 tree op0, op1, op2, op3, arg0, arg1;
3114 enum tree_code code;
3ebd25e1 3115 bool single_use_op0 = false;
8a3ffc5d 3116
84c3c7ce 3117 gcc_checking_assert (gimple_assign_rhs_code (stmt) == VEC_PERM_EXPR);
8a3ffc5d
MG
3118
3119 op0 = gimple_assign_rhs1 (stmt);
3120 op1 = gimple_assign_rhs2 (stmt);
3121 op2 = gimple_assign_rhs3 (stmt);
3122
8a3ffc5d
MG
3123 if (TREE_CODE (op2) != VECTOR_CST)
3124 return 0;
3125
84c3c7ce
MG
3126 if (TREE_CODE (op0) == VECTOR_CST)
3127 {
3128 code = VECTOR_CST;
3129 arg0 = op0;
3130 }
3131 else if (TREE_CODE (op0) == SSA_NAME)
3132 {
3ebd25e1
MG
3133 def_stmt = get_prop_source_stmt (op0, false, &single_use_op0);
3134 if (!def_stmt || !can_propagate_from (def_stmt))
84c3c7ce 3135 return 0;
8a3ffc5d 3136
84c3c7ce
MG
3137 code = gimple_assign_rhs_code (def_stmt);
3138 arg0 = gimple_assign_rhs1 (def_stmt);
3139 }
3140 else
8a3ffc5d
MG
3141 return 0;
3142
8a3ffc5d 3143 /* Two consecutive shuffles. */
84c3c7ce 3144 if (code == VEC_PERM_EXPR)
8a3ffc5d
MG
3145 {
3146 tree orig;
3147 int ident;
84c3c7ce
MG
3148
3149 if (op0 != op1)
3150 return 0;
8a3ffc5d
MG
3151 op3 = gimple_assign_rhs3 (def_stmt);
3152 if (TREE_CODE (op3) != VECTOR_CST)
3153 return 0;
3154 ident = is_combined_permutation_identity (op3, op2);
3155 if (!ident)
3156 return 0;
3157 orig = (ident == 1) ? gimple_assign_rhs1 (def_stmt)
3158 : gimple_assign_rhs2 (def_stmt);
3159 gimple_assign_set_rhs1 (stmt, unshare_expr (orig));
3160 gimple_assign_set_rhs_code (stmt, TREE_CODE (orig));
3161 gimple_set_num_ops (stmt, 2);
3162 update_stmt (stmt);
3163 return remove_prop_source_from_use (op0) ? 2 : 1;
3164 }
3165
84c3c7ce
MG
3166 /* Shuffle of a constructor. */
3167 else if (code == CONSTRUCTOR || code == VECTOR_CST)
3168 {
3169 tree opt;
3170 bool ret = false;
3171 if (op0 != op1)
3172 {
3ebd25e1 3173 if (TREE_CODE (op0) == SSA_NAME && !single_use_op0)
84c3c7ce
MG
3174 return 0;
3175
3176 if (TREE_CODE (op1) == VECTOR_CST)
3177 arg1 = op1;
3178 else if (TREE_CODE (op1) == SSA_NAME)
3179 {
3180 enum tree_code code2;
3181
3ebd25e1
MG
3182 gimple def_stmt2 = get_prop_source_stmt (op1, true, NULL);
3183 if (!def_stmt2 || !can_propagate_from (def_stmt2))
84c3c7ce
MG
3184 return 0;
3185
3186 code2 = gimple_assign_rhs_code (def_stmt2);
3187 if (code2 != CONSTRUCTOR && code2 != VECTOR_CST)
3188 return 0;
3189 arg1 = gimple_assign_rhs1 (def_stmt2);
3190 }
3191 else
3192 return 0;
3193 }
3194 else
3195 {
3196 /* Already used twice in this statement. */
3197 if (TREE_CODE (op0) == SSA_NAME && num_imm_uses (op0) > 2)
3198 return 0;
3199 arg1 = arg0;
3200 }
c3284718 3201 opt = fold_ternary (VEC_PERM_EXPR, TREE_TYPE (op0), arg0, arg1, op2);
84c3c7ce 3202 if (!opt
c3284718 3203 || (TREE_CODE (opt) != CONSTRUCTOR && TREE_CODE (opt) != VECTOR_CST))
84c3c7ce
MG
3204 return 0;
3205 gimple_assign_set_rhs_from_tree (gsi, opt);
3206 update_stmt (gsi_stmt (*gsi));
3207 if (TREE_CODE (op0) == SSA_NAME)
3208 ret = remove_prop_source_from_use (op0);
3209 if (op0 != op1 && TREE_CODE (op1) == SSA_NAME)
3210 ret |= remove_prop_source_from_use (op1);
3211 return ret ? 2 : 1;
3212 }
3213
3214 return 0;
8a3ffc5d
MG
3215}
3216
148e45e5
MG
3217/* Recognize a VEC_PERM_EXPR. Returns true if there were any changes. */
3218
3219static bool
3220simplify_vector_constructor (gimple_stmt_iterator *gsi)
3221{
3222 gimple stmt = gsi_stmt (*gsi);
3223 gimple def_stmt;
3224 tree op, op2, orig, type, elem_type;
3225 unsigned elem_size, nelts, i;
3226 enum tree_code code;
3227 constructor_elt *elt;
3228 unsigned char *sel;
3229 bool maybe_ident;
3230
3231 gcc_checking_assert (gimple_assign_rhs_code (stmt) == CONSTRUCTOR);
3232
3233 op = gimple_assign_rhs1 (stmt);
3234 type = TREE_TYPE (op);
3235 gcc_checking_assert (TREE_CODE (type) == VECTOR_TYPE);
3236
3237 nelts = TYPE_VECTOR_SUBPARTS (type);
3238 elem_type = TREE_TYPE (type);
3239 elem_size = TREE_INT_CST_LOW (TYPE_SIZE (elem_type));
3240
3241 sel = XALLOCAVEC (unsigned char, nelts);
3242 orig = NULL;
3243 maybe_ident = true;
9771b263 3244 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (op), i, elt)
148e45e5
MG
3245 {
3246 tree ref, op1;
3247
3248 if (i >= nelts)
3249 return false;
3250
3251 if (TREE_CODE (elt->value) != SSA_NAME)
3252 return false;
3ebd25e1
MG
3253 def_stmt = get_prop_source_stmt (elt->value, false, NULL);
3254 if (!def_stmt)
148e45e5
MG
3255 return false;
3256 code = gimple_assign_rhs_code (def_stmt);
3257 if (code != BIT_FIELD_REF)
3258 return false;
3259 op1 = gimple_assign_rhs1 (def_stmt);
3260 ref = TREE_OPERAND (op1, 0);
3261 if (orig)
3262 {
3263 if (ref != orig)
3264 return false;
3265 }
3266 else
3267 {
3268 if (TREE_CODE (ref) != SSA_NAME)
3269 return false;
895e8371
MG
3270 if (!useless_type_conversion_p (type, TREE_TYPE (ref)))
3271 return false;
148e45e5
MG
3272 orig = ref;
3273 }
3274 if (TREE_INT_CST_LOW (TREE_OPERAND (op1, 1)) != elem_size)
3275 return false;
3276 sel[i] = TREE_INT_CST_LOW (TREE_OPERAND (op1, 2)) / elem_size;
3277 if (sel[i] != i) maybe_ident = false;
3278 }
3279 if (i < nelts)
3280 return false;
3281
3282 if (maybe_ident)
1d61ee42 3283 gimple_assign_set_rhs_from_tree (gsi, orig);
148e45e5
MG
3284 else
3285 {
1d61ee42
JJ
3286 tree mask_type, *mask_elts;
3287
3288 if (!can_vec_perm_p (TYPE_MODE (type), false, sel))
3289 return false;
3290 mask_type
3291 = build_vector_type (build_nonstandard_integer_type (elem_size, 1),
3292 nelts);
3293 if (GET_MODE_CLASS (TYPE_MODE (mask_type)) != MODE_VECTOR_INT
3294 || GET_MODE_SIZE (TYPE_MODE (mask_type))
3295 != GET_MODE_SIZE (TYPE_MODE (type)))
148e45e5 3296 return false;
1d61ee42
JJ
3297 mask_elts = XALLOCAVEC (tree, nelts);
3298 for (i = 0; i < nelts; i++)
3299 mask_elts[i] = build_int_cst (TREE_TYPE (mask_type), sel[i]);
3300 op2 = build_vector (mask_type, mask_elts);
148e45e5
MG
3301 gimple_assign_set_rhs_with_ops_1 (gsi, VEC_PERM_EXPR, orig, orig, op2);
3302 }
3303 update_stmt (gsi_stmt (*gsi));
3304 return true;
3305}
3306
2e87621c
RG
3307/* Main entry point for the forward propagation and statement combine
3308 optimizer. */
6de9cd9a 3309
c2924966 3310static unsigned int
2e87621c 3311ssa_forward_propagate_and_combine (void)
6de9cd9a 3312{
91581bcc 3313 basic_block bb;
efdb3de9 3314 unsigned int todoflags = 0;
6de9cd9a 3315
5bcd8644
RH
3316 cfg_changed = false;
3317
91581bcc
JL
3318 FOR_EACH_BB (bb)
3319 {
cc603b40 3320 gimple_stmt_iterator gsi;
a564d0f1 3321
2e87621c
RG
3322 /* Apply forward propagation to all stmts in the basic-block.
3323 Note we update GSI within the loop as necessary. */
726a989a 3324 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); )
a564d0f1 3325 {
726a989a 3326 gimple stmt = gsi_stmt (gsi);
2e87621c
RG
3327 tree lhs, rhs;
3328 enum tree_code code;
a564d0f1 3329
2e87621c 3330 if (!is_gimple_assign (stmt))
a564d0f1 3331 {
2e87621c
RG
3332 gsi_next (&gsi);
3333 continue;
3334 }
471eeb83 3335
2e87621c
RG
3336 lhs = gimple_assign_lhs (stmt);
3337 rhs = gimple_assign_rhs1 (stmt);
3338 code = gimple_assign_rhs_code (stmt);
3339 if (TREE_CODE (lhs) != SSA_NAME
3340 || has_zero_uses (lhs))
3341 {
3342 gsi_next (&gsi);
3343 continue;
3344 }
471eeb83 3345
2e87621c
RG
3346 /* If this statement sets an SSA_NAME to an address,
3347 try to propagate the address into the uses of the SSA_NAME. */
3348 if (code == ADDR_EXPR
3349 /* Handle pointer conversions on invariant addresses
3350 as well, as this is valid gimple. */
3351 || (CONVERT_EXPR_CODE_P (code)
3352 && TREE_CODE (rhs) == ADDR_EXPR
3353 && POINTER_TYPE_P (TREE_TYPE (lhs))))
3354 {
3355 tree base = get_base_address (TREE_OPERAND (rhs, 0));
3356 if ((!base
3357 || !DECL_P (base)
3358 || decl_address_invariant_p (base))
3359 && !stmt_references_abnormal_ssa_name (stmt)
5de989ed 3360 && forward_propagate_addr_expr (lhs, rhs, true))
617f3897 3361 {
2e87621c 3362 release_defs (stmt);
2e87621c 3363 gsi_remove (&gsi, true);
617f3897 3364 }
2e87621c
RG
3365 else
3366 gsi_next (&gsi);
3367 }
601f64e2 3368 else if (code == POINTER_PLUS_EXPR)
2e87621c 3369 {
601f64e2
RG
3370 tree off = gimple_assign_rhs2 (stmt);
3371 if (TREE_CODE (off) == INTEGER_CST
3372 && can_propagate_from (stmt)
3373 && !simple_iv_increment_p (stmt)
2e87621c
RG
3374 /* ??? Better adjust the interface to that function
3375 instead of building new trees here. */
3376 && forward_propagate_addr_expr
601f64e2
RG
3377 (lhs,
3378 build1_loc (gimple_location (stmt),
3379 ADDR_EXPR, TREE_TYPE (rhs),
3380 fold_build2 (MEM_REF,
3381 TREE_TYPE (TREE_TYPE (rhs)),
3382 rhs,
3383 fold_convert (ptr_type_node,
5de989ed 3384 off))), true))
2d698d3b 3385 {
2e87621c 3386 release_defs (stmt);
2e87621c 3387 gsi_remove (&gsi, true);
2d698d3b 3388 }
2e87621c 3389 else if (is_gimple_min_invariant (rhs))
be173289 3390 {
2e87621c 3391 /* Make sure to fold &a[0] + off_1 here. */
59401b92 3392 fold_stmt_inplace (&gsi);
2e87621c
RG
3393 update_stmt (stmt);
3394 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
be173289
RG
3395 gsi_next (&gsi);
3396 }
a564d0f1 3397 else
726a989a 3398 gsi_next (&gsi);
a564d0f1 3399 }
2e87621c 3400 else if (TREE_CODE_CLASS (code) == tcc_comparison)
6b62dff8 3401 {
355a7673 3402 if (forward_propagate_comparison (&gsi))
9b80d091 3403 cfg_changed = true;
6b62dff8 3404 }
a564d0f1 3405 else
726a989a 3406 gsi_next (&gsi);
a564d0f1 3407 }
2e87621c
RG
3408
3409 /* Combine stmts with the stmts defining their operands.
3410 Note we update GSI within the loop as necessary. */
c1ae3ca5 3411 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
2e87621c
RG
3412 {
3413 gimple stmt = gsi_stmt (gsi);
3414 bool changed = false;
3415
cc603b40
JJ
3416 /* Mark stmt as potentially needing revisiting. */
3417 gimple_set_plf (stmt, GF_PLF_1, false);
3418
2e87621c
RG
3419 switch (gimple_code (stmt))
3420 {
3421 case GIMPLE_ASSIGN:
3422 {
3423 tree rhs1 = gimple_assign_rhs1 (stmt);
3424 enum tree_code code = gimple_assign_rhs_code (stmt);
3425
3426 if ((code == BIT_NOT_EXPR
3427 || code == NEGATE_EXPR)
3428 && TREE_CODE (rhs1) == SSA_NAME)
3429 changed = simplify_not_neg_expr (&gsi);
2515d916
RG
3430 else if (code == COND_EXPR
3431 || code == VEC_COND_EXPR)
2e87621c
RG
3432 {
3433 /* In this case the entire COND_EXPR is in rhs1. */
4cbc836e
RG
3434 if (forward_propagate_into_cond (&gsi)
3435 || combine_cond_exprs (&gsi))
3436 {
3437 changed = true;
3438 stmt = gsi_stmt (gsi);
3439 }
2e87621c
RG
3440 }
3441 else if (TREE_CODE_CLASS (code) == tcc_comparison)
3442 {
f8ecf734 3443 int did_something;
f8ecf734
RG
3444 did_something = forward_propagate_into_comparison (&gsi);
3445 if (did_something == 2)
3446 cfg_changed = true;
f8ecf734 3447 changed = did_something != 0;
2e87621c 3448 }
cb3b8d33
JJ
3449 else if ((code == PLUS_EXPR
3450 || code == BIT_IOR_EXPR
3451 || code == BIT_XOR_EXPR)
3452 && simplify_rotate (&gsi))
3453 changed = true;
2e87621c
RG
3454 else if (code == BIT_AND_EXPR
3455 || code == BIT_IOR_EXPR
3456 || code == BIT_XOR_EXPR)
3457 changed = simplify_bitwise_binary (&gsi);
3458 else if (code == PLUS_EXPR
3459 || code == MINUS_EXPR)
59401b92 3460 changed = associate_plusminus (&gsi);
a8ab21e5
RG
3461 else if (code == POINTER_PLUS_EXPR)
3462 changed = associate_pointerplus (&gsi);
2e87621c
RG
3463 else if (CONVERT_EXPR_CODE_P (code)
3464 || code == FLOAT_EXPR
3465 || code == FIX_TRUNC_EXPR)
91bc6112
RG
3466 {
3467 int did_something = combine_conversions (&gsi);
3468 if (did_something == 2)
3469 cfg_changed = true;
3e8a33f9
JL
3470
3471 /* If we have a narrowing conversion to an integral
3472 type that is fed by a BIT_AND_EXPR, we might be
3473 able to remove the BIT_AND_EXPR if it merely
3474 masks off bits outside the final type (and nothing
3475 else. */
3476 if (! did_something)
3477 {
3478 tree outer_type = TREE_TYPE (gimple_assign_lhs (stmt));
3479 tree inner_type = TREE_TYPE (gimple_assign_rhs1 (stmt));
3480 if (INTEGRAL_TYPE_P (outer_type)
3481 && INTEGRAL_TYPE_P (inner_type)
3482 && (TYPE_PRECISION (outer_type)
3483 <= TYPE_PRECISION (inner_type)))
3484 did_something = simplify_conversion_from_bitmask (&gsi);
3485 }
3486
91bc6112
RG
3487 changed = did_something != 0;
3488 }
8a3ffc5d
MG
3489 else if (code == VEC_PERM_EXPR)
3490 {
3491 int did_something = simplify_permutation (&gsi);
3492 if (did_something == 2)
3493 cfg_changed = true;
3494 changed = did_something != 0;
3495 }
881a9dcd
MG
3496 else if (code == BIT_FIELD_REF)
3497 changed = simplify_bitfield_ref (&gsi);
148e45e5
MG
3498 else if (code == CONSTRUCTOR
3499 && TREE_CODE (TREE_TYPE (rhs1)) == VECTOR_TYPE)
3500 changed = simplify_vector_constructor (&gsi);
2e87621c
RG
3501 break;
3502 }
3503
3504 case GIMPLE_SWITCH:
3505 changed = simplify_gimple_switch (stmt);
3506 break;
3507
3508 case GIMPLE_COND:
3509 {
3510 int did_something;
2e87621c
RG
3511 did_something = forward_propagate_into_gimple_cond (stmt);
3512 if (did_something == 2)
3513 cfg_changed = true;
2e87621c
RG
3514 changed = did_something != 0;
3515 break;
3516 }
3517
3518 case GIMPLE_CALL:
3519 {
3520 tree callee = gimple_call_fndecl (stmt);
3521 if (callee != NULL_TREE
3522 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
3523 changed = simplify_builtin_call (&gsi, callee);
3524 break;
3525 }
3526
3527 default:;
3528 }
3529
c1ae3ca5
RG
3530 if (changed)
3531 {
3532 /* If the stmt changed then re-visit it and the statements
3533 inserted before it. */
cc603b40
JJ
3534 for (; !gsi_end_p (gsi); gsi_prev (&gsi))
3535 if (gimple_plf (gsi_stmt (gsi), GF_PLF_1))
3536 break;
3537 if (gsi_end_p (gsi))
c1ae3ca5
RG
3538 gsi = gsi_start_bb (bb);
3539 else
cc603b40 3540 gsi_next (&gsi);
c1ae3ca5
RG
3541 }
3542 else
3543 {
cc603b40
JJ
3544 /* Stmt no longer needs to be revisited. */
3545 gimple_set_plf (stmt, GF_PLF_1, true);
c1ae3ca5
RG
3546 gsi_next (&gsi);
3547 }
2e87621c 3548 }
91581bcc 3549 }
5bcd8644
RH
3550
3551 if (cfg_changed)
1994bfea 3552 todoflags |= TODO_cleanup_cfg;
2e87621c 3553
efdb3de9 3554 return todoflags;
6de9cd9a
DN
3555}
3556
3557
3558static bool
3559gate_forwprop (void)
3560{
248fc9f3 3561 return flag_tree_forwprop;
6de9cd9a
DN
3562}
3563
27a4cd48
DM
3564namespace {
3565
3566const pass_data pass_data_forwprop =
8ddbbcae 3567{
27a4cd48
DM
3568 GIMPLE_PASS, /* type */
3569 "forwprop", /* name */
3570 OPTGROUP_NONE, /* optinfo_flags */
3571 true, /* has_gate */
3572 true, /* has_execute */
3573 TV_TREE_FORWPROP, /* tv_id */
3574 ( PROP_cfg | PROP_ssa ), /* properties_required */
3575 0, /* properties_provided */
3576 0, /* properties_destroyed */
3577 0, /* todo_flags_start */
3578 ( TODO_update_ssa | TODO_verify_ssa ), /* todo_flags_finish */
6de9cd9a 3579};
27a4cd48
DM
3580
3581class pass_forwprop : public gimple_opt_pass
3582{
3583public:
c3284718
RS
3584 pass_forwprop (gcc::context *ctxt)
3585 : gimple_opt_pass (pass_data_forwprop, ctxt)
27a4cd48
DM
3586 {}
3587
3588 /* opt_pass methods: */
65d3284b 3589 opt_pass * clone () { return new pass_forwprop (m_ctxt); }
27a4cd48
DM
3590 bool gate () { return gate_forwprop (); }
3591 unsigned int execute () { return ssa_forward_propagate_and_combine (); }
3592
3593}; // class pass_forwprop
3594
3595} // anon namespace
3596
3597gimple_opt_pass *
3598make_pass_forwprop (gcc::context *ctxt)
3599{
3600 return new pass_forwprop (ctxt);
3601}