]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/tree-ssa-ccp.c
* pt.c (dependent_type_p): Make sure we aren't called with
[thirdparty/gcc.git] / gcc / tree-ssa-ccp.c
CommitLineData
4ee9c684 1/* Conditional constant propagation pass for the GNU compiler.
aad93da1 2 Copyright (C) 2000-2017 Free Software Foundation, Inc.
4ee9c684 3 Adapted from original RTL SSA-CCP by Daniel Berlin <dberlin@dberlin.org>
4 Adapted to GIMPLE trees by Diego Novillo <dnovillo@redhat.com>
5
6This file is part of GCC.
48e1416a 7
4ee9c684 8GCC is free software; you can redistribute it and/or modify it
9under the terms of the GNU General Public License as published by the
8c4c00c1 10Free Software Foundation; either version 3, or (at your option) any
4ee9c684 11later version.
48e1416a 12
4ee9c684 13GCC is distributed in the hope that it will be useful, but WITHOUT
14ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
15FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16for more details.
48e1416a 17
4ee9c684 18You should have received a copy of the GNU General Public License
8c4c00c1 19along with GCC; see the file COPYING3. If not see
20<http://www.gnu.org/licenses/>. */
4ee9c684 21
88dbf20f 22/* Conditional constant propagation (CCP) is based on the SSA
23 propagation engine (tree-ssa-propagate.c). Constant assignments of
24 the form VAR = CST are propagated from the assignments into uses of
25 VAR, which in turn may generate new constants. The simulation uses
26 a four level lattice to keep track of constant values associated
27 with SSA names. Given an SSA name V_i, it may take one of the
28 following values:
29
bfa30570 30 UNINITIALIZED -> the initial state of the value. This value
31 is replaced with a correct initial value
32 the first time the value is used, so the
33 rest of the pass does not need to care about
34 it. Using this value simplifies initialization
35 of the pass, and prevents us from needlessly
36 scanning statements that are never reached.
88dbf20f 37
38 UNDEFINED -> V_i is a local variable whose definition
39 has not been processed yet. Therefore we
40 don't yet know if its value is a constant
41 or not.
42
43 CONSTANT -> V_i has been found to hold a constant
44 value C.
45
46 VARYING -> V_i cannot take a constant value, or if it
47 does, it is not possible to determine it
48 at compile time.
49
50 The core of SSA-CCP is in ccp_visit_stmt and ccp_visit_phi_node:
51
52 1- In ccp_visit_stmt, we are interested in assignments whose RHS
53 evaluates into a constant and conditional jumps whose predicate
54 evaluates into a boolean true or false. When an assignment of
55 the form V_i = CONST is found, V_i's lattice value is set to
56 CONSTANT and CONST is associated with it. This causes the
57 propagation engine to add all the SSA edges coming out the
58 assignment into the worklists, so that statements that use V_i
59 can be visited.
60
61 If the statement is a conditional with a constant predicate, we
62 mark the outgoing edges as executable or not executable
63 depending on the predicate's value. This is then used when
64 visiting PHI nodes to know when a PHI argument can be ignored.
48e1416a 65
88dbf20f 66
67 2- In ccp_visit_phi_node, if all the PHI arguments evaluate to the
68 same constant C, then the LHS of the PHI is set to C. This
69 evaluation is known as the "meet operation". Since one of the
70 goals of this evaluation is to optimistically return constant
71 values as often as possible, it uses two main short cuts:
72
73 - If an argument is flowing in through a non-executable edge, it
74 is ignored. This is useful in cases like this:
75
76 if (PRED)
77 a_9 = 3;
78 else
79 a_10 = 100;
80 a_11 = PHI (a_9, a_10)
81
82 If PRED is known to always evaluate to false, then we can
83 assume that a_11 will always take its value from a_10, meaning
84 that instead of consider it VARYING (a_9 and a_10 have
85 different values), we can consider it CONSTANT 100.
86
87 - If an argument has an UNDEFINED value, then it does not affect
88 the outcome of the meet operation. If a variable V_i has an
89 UNDEFINED value, it means that either its defining statement
90 hasn't been visited yet or V_i has no defining statement, in
91 which case the original symbol 'V' is being used
92 uninitialized. Since 'V' is a local variable, the compiler
93 may assume any initial value for it.
94
95
96 After propagation, every variable V_i that ends up with a lattice
97 value of CONSTANT will have the associated constant value in the
98 array CONST_VAL[i].VALUE. That is fed into substitute_and_fold for
99 final substitution and folding.
100
e913b5cd 101 This algorithm uses wide-ints at the max precision of the target.
102 This means that, with one uninteresting exception, variables with
103 UNSIGNED types never go to VARYING because the bits above the
104 precision of the type of the variable are always zero. The
105 uninteresting case is a variable of UNSIGNED type that has the
106 maximum precision of the target. Such variables can go to VARYING,
107 but this causes no loss of infomation since these variables will
108 never be extended.
109
4ee9c684 110 References:
111
112 Constant propagation with conditional branches,
113 Wegman and Zadeck, ACM TOPLAS 13(2):181-210.
114
115 Building an Optimizing Compiler,
116 Robert Morgan, Butterworth-Heinemann, 1998, Section 8.9.
117
118 Advanced Compiler Design and Implementation,
119 Steven Muchnick, Morgan Kaufmann, 1997, Section 12.6 */
120
121#include "config.h"
122#include "system.h"
123#include "coretypes.h"
9ef16211 124#include "backend.h"
7c29e30e 125#include "target.h"
4ee9c684 126#include "tree.h"
9ef16211 127#include "gimple.h"
7c29e30e 128#include "tree-pass.h"
9ef16211 129#include "ssa.h"
7c29e30e 130#include "gimple-pretty-print.h"
b20a8bb4 131#include "fold-const.h"
bc61cadb 132#include "gimple-fold.h"
133#include "tree-eh.h"
a8783bee 134#include "gimplify.h"
dcf1a1ec 135#include "gimple-iterator.h"
073c1fd5 136#include "tree-cfg.h"
41511585 137#include "tree-ssa-propagate.h"
43fb76c1 138#include "dbgcnt.h"
9a65cc0a 139#include "params.h"
f7715905 140#include "builtins.h"
058a1b7a 141#include "tree-chkp.h"
e2588447 142#include "cfgloop.h"
9c1a31e4 143#include "stor-layout.h"
144#include "optabs-query.h"
a54071b2 145#include "tree-ssa-ccp.h"
82193434 146#include "tree-dfa.h"
184fac50 147#include "diagnostic-core.h"
2dc10fae 148
4ee9c684 149/* Possible lattice values. */
150typedef enum
151{
bfa30570 152 UNINITIALIZED,
4ee9c684 153 UNDEFINED,
154 CONSTANT,
155 VARYING
88dbf20f 156} ccp_lattice_t;
4ee9c684 157
9908fe4d 158struct ccp_prop_value_t {
14f101cf 159 /* Lattice value. */
160 ccp_lattice_t lattice_val;
161
162 /* Propagated value. */
163 tree value;
b7e55469 164
e913b5cd 165 /* Mask that applies to the propagated value during CCP. For X
166 with a CONSTANT lattice value X & ~mask == value & ~mask. The
167 zero bits in the mask cover constant values. The ones mean no
168 information. */
5de9d3ed 169 widest_int mask;
14f101cf 170};
171
88dbf20f 172/* Array of propagated constant values. After propagation,
173 CONST_VAL[I].VALUE holds the constant value for SSA_NAME(I). If
174 the constant is held in an SSA name representing a memory store
4fb5e5ca 175 (i.e., a VDEF), CONST_VAL[I].MEM_REF will contain the actual
176 memory reference used to store (i.e., the LHS of the assignment
177 doing the store). */
9908fe4d 178static ccp_prop_value_t *const_val;
285df01b 179static unsigned n_const_val;
4ee9c684 180
9908fe4d 181static void canonicalize_value (ccp_prop_value_t *);
6688f8ec 182static bool ccp_fold_stmt (gimple_stmt_iterator *);
27de3d43 183static void ccp_lattice_meet (ccp_prop_value_t *, ccp_prop_value_t *);
4af351a8 184
88dbf20f 185/* Dump constant propagation value VAL to file OUTF prefixed by PREFIX. */
01406fc0 186
187static void
9908fe4d 188dump_lattice_value (FILE *outf, const char *prefix, ccp_prop_value_t val)
01406fc0 189{
41511585 190 switch (val.lattice_val)
01406fc0 191 {
88dbf20f 192 case UNINITIALIZED:
193 fprintf (outf, "%sUNINITIALIZED", prefix);
194 break;
41511585 195 case UNDEFINED:
196 fprintf (outf, "%sUNDEFINED", prefix);
197 break;
198 case VARYING:
199 fprintf (outf, "%sVARYING", prefix);
200 break;
41511585 201 case CONSTANT:
b7e55469 202 if (TREE_CODE (val.value) != INTEGER_CST
796b6678 203 || val.mask == 0)
16ab4e97 204 {
205 fprintf (outf, "%sCONSTANT ", prefix);
206 print_generic_expr (outf, val.value, dump_flags);
207 }
b7e55469 208 else
209 {
28e557ef 210 widest_int cval = wi::bit_and_not (wi::to_widest (val.value),
211 val.mask);
e913b5cd 212 fprintf (outf, "%sCONSTANT ", prefix);
213 print_hex (cval, outf);
214 fprintf (outf, " (");
215 print_hex (val.mask, outf);
216 fprintf (outf, ")");
b7e55469 217 }
41511585 218 break;
219 default:
8c0963c4 220 gcc_unreachable ();
41511585 221 }
01406fc0 222}
4ee9c684 223
4ee9c684 224
88dbf20f 225/* Print lattice value VAL to stderr. */
226
9908fe4d 227void debug_lattice_value (ccp_prop_value_t val);
88dbf20f 228
4b987fac 229DEBUG_FUNCTION void
9908fe4d 230debug_lattice_value (ccp_prop_value_t val)
88dbf20f 231{
232 dump_lattice_value (stderr, "", val);
233 fprintf (stderr, "\n");
234}
4ee9c684 235
9a93e2f7 236/* Extend NONZERO_BITS to a full mask, based on sgn. */
9c1be15e 237
238static widest_int
9a93e2f7 239extend_mask (const wide_int &nonzero_bits, signop sgn)
9c1be15e 240{
9a93e2f7 241 return widest_int::from (nonzero_bits, sgn);
9c1be15e 242}
4ee9c684 243
88dbf20f 244/* Compute a default value for variable VAR and store it in the
245 CONST_VAL array. The following rules are used to get default
246 values:
01406fc0 247
88dbf20f 248 1- Global and static variables that are declared constant are
249 considered CONSTANT.
250
251 2- Any other value is considered UNDEFINED. This is useful when
41511585 252 considering PHI nodes. PHI arguments that are undefined do not
253 change the constant value of the PHI node, which allows for more
88dbf20f 254 constants to be propagated.
4ee9c684 255
8883e700 256 3- Variables defined by statements other than assignments and PHI
88dbf20f 257 nodes are considered VARYING.
4ee9c684 258
8883e700 259 4- Initial values of variables that are not GIMPLE registers are
bfa30570 260 considered VARYING. */
4ee9c684 261
9908fe4d 262static ccp_prop_value_t
88dbf20f 263get_default_value (tree var)
264{
9908fe4d 265 ccp_prop_value_t val = { UNINITIALIZED, NULL_TREE, 0 };
42acab1c 266 gimple *stmt;
8edeb88b 267
268 stmt = SSA_NAME_DEF_STMT (var);
269
270 if (gimple_nop_p (stmt))
4ee9c684 271 {
8edeb88b 272 /* Variables defined by an empty statement are those used
273 before being initialized. If VAR is a local variable, we
274 can assume initially that it is UNDEFINED, otherwise we must
275 consider it VARYING. */
7c782c9b 276 if (!virtual_operand_p (var)
9ae1b28a 277 && SSA_NAME_VAR (var)
7c782c9b 278 && TREE_CODE (SSA_NAME_VAR (var)) == VAR_DECL)
8edeb88b 279 val.lattice_val = UNDEFINED;
280 else
b7e55469 281 {
282 val.lattice_val = VARYING;
e913b5cd 283 val.mask = -1;
fc08b993 284 if (flag_tree_bit_ccp)
285 {
9c1be15e 286 wide_int nonzero_bits = get_nonzero_bits (var);
287 if (nonzero_bits != -1)
fc08b993 288 {
289 val.lattice_val = CONSTANT;
290 val.value = build_zero_cst (TREE_TYPE (var));
9a93e2f7 291 val.mask = extend_mask (nonzero_bits, TYPE_SIGN (TREE_TYPE (var)));
fc08b993 292 }
293 }
b7e55469 294 }
4ee9c684 295 }
b45b214a 296 else if (is_gimple_assign (stmt))
41511585 297 {
8edeb88b 298 tree cst;
299 if (gimple_assign_single_p (stmt)
300 && DECL_P (gimple_assign_rhs1 (stmt))
301 && (cst = get_symbol_constant_value (gimple_assign_rhs1 (stmt))))
88dbf20f 302 {
8edeb88b 303 val.lattice_val = CONSTANT;
304 val.value = cst;
88dbf20f 305 }
306 else
b45b214a 307 {
308 /* Any other variable defined by an assignment is considered
309 UNDEFINED. */
310 val.lattice_val = UNDEFINED;
311 }
312 }
313 else if ((is_gimple_call (stmt)
314 && gimple_call_lhs (stmt) != NULL_TREE)
315 || gimple_code (stmt) == GIMPLE_PHI)
316 {
317 /* A variable defined by a call or a PHI node is considered
318 UNDEFINED. */
319 val.lattice_val = UNDEFINED;
8edeb88b 320 }
321 else
322 {
323 /* Otherwise, VAR will never take on a constant value. */
324 val.lattice_val = VARYING;
e913b5cd 325 val.mask = -1;
41511585 326 }
4ee9c684 327
41511585 328 return val;
329}
4ee9c684 330
4ee9c684 331
bfa30570 332/* Get the constant value associated with variable VAR. */
4ee9c684 333
9908fe4d 334static inline ccp_prop_value_t *
bfa30570 335get_value (tree var)
88dbf20f 336{
9908fe4d 337 ccp_prop_value_t *val;
bfa30570 338
285df01b 339 if (const_val == NULL
340 || SSA_NAME_VERSION (var) >= n_const_val)
e004838d 341 return NULL;
342
343 val = &const_val[SSA_NAME_VERSION (var)];
bfa30570 344 if (val->lattice_val == UNINITIALIZED)
4ee9c684 345 *val = get_default_value (var);
346
f5faab84 347 canonicalize_value (val);
4af351a8 348
4ee9c684 349 return val;
350}
351
15d138c9 352/* Return the constant tree value associated with VAR. */
353
354static inline tree
355get_constant_value (tree var)
356{
9908fe4d 357 ccp_prop_value_t *val;
98d92e3c 358 if (TREE_CODE (var) != SSA_NAME)
359 {
360 if (is_gimple_min_invariant (var))
361 return var;
362 return NULL_TREE;
363 }
364 val = get_value (var);
b7e55469 365 if (val
366 && val->lattice_val == CONSTANT
367 && (TREE_CODE (val->value) != INTEGER_CST
796b6678 368 || val->mask == 0))
15d138c9 369 return val->value;
370 return NULL_TREE;
371}
372
bfa30570 373/* Sets the value associated with VAR to VARYING. */
374
375static inline void
376set_value_varying (tree var)
377{
9908fe4d 378 ccp_prop_value_t *val = &const_val[SSA_NAME_VERSION (var)];
bfa30570 379
380 val->lattice_val = VARYING;
381 val->value = NULL_TREE;
e913b5cd 382 val->mask = -1;
bfa30570 383}
4ee9c684 384
0001b944 385/* For integer constants, make sure to drop TREE_OVERFLOW. */
b31eb493 386
387static void
9908fe4d 388canonicalize_value (ccp_prop_value_t *val)
b31eb493 389{
f5faab84 390 if (val->lattice_val != CONSTANT)
391 return;
392
393 if (TREE_OVERFLOW_P (val->value))
394 val->value = drop_tree_overflow (val->value);
b31eb493 395}
396
b7e55469 397/* Return whether the lattice transition is valid. */
398
399static bool
9908fe4d 400valid_lattice_transition (ccp_prop_value_t old_val, ccp_prop_value_t new_val)
b7e55469 401{
402 /* Lattice transitions must always be monotonically increasing in
403 value. */
404 if (old_val.lattice_val < new_val.lattice_val)
405 return true;
406
407 if (old_val.lattice_val != new_val.lattice_val)
408 return false;
409
410 if (!old_val.value && !new_val.value)
411 return true;
412
413 /* Now both lattice values are CONSTANT. */
414
fc6cc27b 415 /* Allow arbitrary copy changes as we might look through PHI <a_1, ...>
416 when only a single copy edge is executable. */
417 if (TREE_CODE (old_val.value) == SSA_NAME
418 && TREE_CODE (new_val.value) == SSA_NAME)
419 return true;
420
421 /* Allow transitioning from a constant to a copy. */
422 if (is_gimple_min_invariant (old_val.value)
423 && TREE_CODE (new_val.value) == SSA_NAME)
424 return true;
425
43c92e0a 426 /* Allow transitioning from PHI <&x, not executable> == &x
427 to PHI <&x, &y> == common alignment. */
b7e55469 428 if (TREE_CODE (old_val.value) != INTEGER_CST
429 && TREE_CODE (new_val.value) == INTEGER_CST)
430 return true;
431
432 /* Bit-lattices have to agree in the still valid bits. */
433 if (TREE_CODE (old_val.value) == INTEGER_CST
434 && TREE_CODE (new_val.value) == INTEGER_CST)
5de9d3ed 435 return (wi::bit_and_not (wi::to_widest (old_val.value), new_val.mask)
436 == wi::bit_and_not (wi::to_widest (new_val.value), new_val.mask));
b7e55469 437
438 /* Otherwise constant values have to agree. */
0001b944 439 if (operand_equal_p (old_val.value, new_val.value, 0))
440 return true;
441
442 /* At least the kinds and types should agree now. */
443 if (TREE_CODE (old_val.value) != TREE_CODE (new_val.value)
444 || !types_compatible_p (TREE_TYPE (old_val.value),
445 TREE_TYPE (new_val.value)))
446 return false;
447
448 /* For floats and !HONOR_NANS allow transitions from (partial) NaN
449 to non-NaN. */
450 tree type = TREE_TYPE (new_val.value);
451 if (SCALAR_FLOAT_TYPE_P (type)
93633022 452 && !HONOR_NANS (type))
0001b944 453 {
454 if (REAL_VALUE_ISNAN (TREE_REAL_CST (old_val.value)))
455 return true;
456 }
457 else if (VECTOR_FLOAT_TYPE_P (type)
93633022 458 && !HONOR_NANS (type))
0001b944 459 {
460 for (unsigned i = 0; i < VECTOR_CST_NELTS (old_val.value); ++i)
461 if (!REAL_VALUE_ISNAN
462 (TREE_REAL_CST (VECTOR_CST_ELT (old_val.value, i)))
463 && !operand_equal_p (VECTOR_CST_ELT (old_val.value, i),
464 VECTOR_CST_ELT (new_val.value, i), 0))
465 return false;
466 return true;
467 }
468 else if (COMPLEX_FLOAT_TYPE_P (type)
93633022 469 && !HONOR_NANS (type))
0001b944 470 {
471 if (!REAL_VALUE_ISNAN (TREE_REAL_CST (TREE_REALPART (old_val.value)))
472 && !operand_equal_p (TREE_REALPART (old_val.value),
473 TREE_REALPART (new_val.value), 0))
474 return false;
475 if (!REAL_VALUE_ISNAN (TREE_REAL_CST (TREE_IMAGPART (old_val.value)))
476 && !operand_equal_p (TREE_IMAGPART (old_val.value),
477 TREE_IMAGPART (new_val.value), 0))
478 return false;
479 return true;
480 }
481 return false;
b7e55469 482}
483
88dbf20f 484/* Set the value for variable VAR to NEW_VAL. Return true if the new
485 value is different from VAR's previous value. */
4ee9c684 486
41511585 487static bool
27de3d43 488set_lattice_value (tree var, ccp_prop_value_t *new_val)
4ee9c684 489{
6d0bf6d6 490 /* We can deal with old UNINITIALIZED values just fine here. */
9908fe4d 491 ccp_prop_value_t *old_val = &const_val[SSA_NAME_VERSION (var)];
88dbf20f 492
27de3d43 493 canonicalize_value (new_val);
b31eb493 494
b7e55469 495 /* We have to be careful to not go up the bitwise lattice
27de3d43 496 represented by the mask. Instead of dropping to VARYING
497 use the meet operator to retain a conservative value.
498 Missed optimizations like PR65851 makes this necessary.
499 It also ensures we converge to a stable lattice solution. */
500 if (new_val->lattice_val == CONSTANT
b7e55469 501 && old_val->lattice_val == CONSTANT
27de3d43 502 && TREE_CODE (new_val->value) != SSA_NAME)
503 ccp_lattice_meet (new_val, old_val);
bfa30570 504
27de3d43 505 gcc_checking_assert (valid_lattice_transition (*old_val, *new_val));
88dbf20f 506
b7e55469 507 /* If *OLD_VAL and NEW_VAL are the same, return false to inform the
508 caller that this was a non-transition. */
27de3d43 509 if (old_val->lattice_val != new_val->lattice_val
510 || (new_val->lattice_val == CONSTANT
511 && (TREE_CODE (new_val->value) != TREE_CODE (old_val->value)
512 || (TREE_CODE (new_val->value) == INTEGER_CST
513 && (new_val->mask != old_val->mask
bc1d3d97 514 || (wi::bit_and_not (wi::to_widest (old_val->value),
27de3d43 515 new_val->mask)
516 != wi::bit_and_not (wi::to_widest (new_val->value),
517 new_val->mask))))
518 || (TREE_CODE (new_val->value) != INTEGER_CST
519 && !operand_equal_p (new_val->value, old_val->value, 0)))))
4ee9c684 520 {
b7e55469 521 /* ??? We would like to delay creation of INTEGER_CSTs from
522 partially constants here. */
523
41511585 524 if (dump_file && (dump_flags & TDF_DETAILS))
525 {
27de3d43 526 dump_lattice_value (dump_file, "Lattice value changed to ", *new_val);
bfa30570 527 fprintf (dump_file, ". Adding SSA edges to worklist.\n");
41511585 528 }
529
27de3d43 530 *old_val = *new_val;
88dbf20f 531
27de3d43 532 gcc_assert (new_val->lattice_val != UNINITIALIZED);
bfa30570 533 return true;
4ee9c684 534 }
41511585 535
536 return false;
4ee9c684 537}
538
9908fe4d 539static ccp_prop_value_t get_value_for_expr (tree, bool);
540static ccp_prop_value_t bit_value_binop (enum tree_code, tree, tree, tree);
a54071b2 541void bit_value_binop (enum tree_code, signop, int, widest_int *, widest_int *,
542 signop, int, const widest_int &, const widest_int &,
543 signop, int, const widest_int &, const widest_int &);
b7e55469 544
5de9d3ed 545/* Return a widest_int that can be used for bitwise simplifications
b7e55469 546 from VAL. */
547
5de9d3ed 548static widest_int
9908fe4d 549value_to_wide_int (ccp_prop_value_t val)
b7e55469 550{
551 if (val.value
552 && TREE_CODE (val.value) == INTEGER_CST)
5de9d3ed 553 return wi::to_widest (val.value);
e913b5cd 554
555 return 0;
b7e55469 556}
557
558/* Return the value for the address expression EXPR based on alignment
559 information. */
6d0bf6d6 560
9908fe4d 561static ccp_prop_value_t
b7e55469 562get_value_from_alignment (tree expr)
563{
f8abb542 564 tree type = TREE_TYPE (expr);
9908fe4d 565 ccp_prop_value_t val;
f8abb542 566 unsigned HOST_WIDE_INT bitpos;
567 unsigned int align;
b7e55469 568
569 gcc_assert (TREE_CODE (expr) == ADDR_EXPR);
570
59da1bcd 571 get_pointer_alignment_1 (expr, &align, &bitpos);
cf8f0e63 572 val.mask = (POINTER_TYPE_P (type) || TYPE_UNSIGNED (type)
5de9d3ed 573 ? wi::mask <widest_int> (TYPE_PRECISION (type), false)
e913b5cd 574 : -1).and_not (align / BITS_PER_UNIT - 1);
c90d2c17 575 val.lattice_val
576 = wi::sext (val.mask, TYPE_PRECISION (type)) == -1 ? VARYING : CONSTANT;
f8abb542 577 if (val.lattice_val == CONSTANT)
796b6678 578 val.value = build_int_cstu (type, bitpos / BITS_PER_UNIT);
b7e55469 579 else
f8abb542 580 val.value = NULL_TREE;
b7e55469 581
582 return val;
583}
584
585/* Return the value for the tree operand EXPR. If FOR_BITS_P is true
586 return constant bits extracted from alignment information for
587 invariant addresses. */
588
9908fe4d 589static ccp_prop_value_t
b7e55469 590get_value_for_expr (tree expr, bool for_bits_p)
6d0bf6d6 591{
9908fe4d 592 ccp_prop_value_t val;
6d0bf6d6 593
594 if (TREE_CODE (expr) == SSA_NAME)
b7e55469 595 {
1941149a 596 ccp_prop_value_t *val_ = get_value (expr);
597 if (val_)
598 val = *val_;
599 else
600 {
601 val.lattice_val = VARYING;
602 val.value = NULL_TREE;
603 val.mask = -1;
604 }
b7e55469 605 if (for_bits_p
606 && val.lattice_val == CONSTANT
607 && TREE_CODE (val.value) == ADDR_EXPR)
608 val = get_value_from_alignment (val.value);
bc1d3d97 609 /* Fall back to a copy value. */
610 if (!for_bits_p
611 && val.lattice_val == VARYING
612 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (expr))
613 {
614 val.lattice_val = CONSTANT;
615 val.value = expr;
616 val.mask = -1;
617 }
b7e55469 618 }
619 else if (is_gimple_min_invariant (expr)
620 && (!for_bits_p || TREE_CODE (expr) != ADDR_EXPR))
6d0bf6d6 621 {
622 val.lattice_val = CONSTANT;
623 val.value = expr;
e913b5cd 624 val.mask = 0;
f5faab84 625 canonicalize_value (&val);
6d0bf6d6 626 }
b7e55469 627 else if (TREE_CODE (expr) == ADDR_EXPR)
628 val = get_value_from_alignment (expr);
6d0bf6d6 629 else
630 {
631 val.lattice_val = VARYING;
09b2913a 632 val.mask = -1;
6d0bf6d6 633 val.value = NULL_TREE;
634 }
911a6ef1 635
636 if (val.lattice_val == VARYING
637 && TYPE_UNSIGNED (TREE_TYPE (expr)))
638 val.mask = wi::zext (val.mask, TYPE_PRECISION (TREE_TYPE (expr)));
639
6d0bf6d6 640 return val;
641}
642
88dbf20f 643/* Return the likely CCP lattice value for STMT.
4ee9c684 644
41511585 645 If STMT has no operands, then return CONSTANT.
4ee9c684 646
d61b9af3 647 Else if undefinedness of operands of STMT cause its value to be
648 undefined, then return UNDEFINED.
4ee9c684 649
41511585 650 Else if any operands of STMT are constants, then return CONSTANT.
4ee9c684 651
41511585 652 Else return VARYING. */
4ee9c684 653
88dbf20f 654static ccp_lattice_t
42acab1c 655likely_value (gimple *stmt)
41511585 656{
d61b9af3 657 bool has_constant_operand, has_undefined_operand, all_undefined_operands;
a8a0d56e 658 bool has_nsa_operand;
41511585 659 tree use;
660 ssa_op_iter iter;
8edeb88b 661 unsigned i;
4ee9c684 662
590c3166 663 enum gimple_code code = gimple_code (stmt);
75a70cf9 664
665 /* This function appears to be called only for assignments, calls,
666 conditionals, and switches, due to the logic in visit_stmt. */
667 gcc_assert (code == GIMPLE_ASSIGN
668 || code == GIMPLE_CALL
669 || code == GIMPLE_COND
670 || code == GIMPLE_SWITCH);
88dbf20f 671
672 /* If the statement has volatile operands, it won't fold to a
673 constant value. */
75a70cf9 674 if (gimple_has_volatile_ops (stmt))
88dbf20f 675 return VARYING;
676
75a70cf9 677 /* Arrive here for more complex cases. */
bfa30570 678 has_constant_operand = false;
d61b9af3 679 has_undefined_operand = false;
680 all_undefined_operands = true;
a8a0d56e 681 has_nsa_operand = false;
8edeb88b 682 FOR_EACH_SSA_TREE_OPERAND (use, stmt, iter, SSA_OP_USE)
41511585 683 {
9908fe4d 684 ccp_prop_value_t *val = get_value (use);
41511585 685
1941149a 686 if (val && val->lattice_val == UNDEFINED)
d61b9af3 687 has_undefined_operand = true;
688 else
689 all_undefined_operands = false;
88dbf20f 690
1941149a 691 if (val && val->lattice_val == CONSTANT)
bfa30570 692 has_constant_operand = true;
a8a0d56e 693
694 if (SSA_NAME_IS_DEFAULT_DEF (use)
695 || !prop_simulate_again_p (SSA_NAME_DEF_STMT (use)))
696 has_nsa_operand = true;
4ee9c684 697 }
41511585 698
dd277d48 699 /* There may be constants in regular rhs operands. For calls we
700 have to ignore lhs, fndecl and static chain, otherwise only
701 the lhs. */
702 for (i = (is_gimple_call (stmt) ? 2 : 0) + gimple_has_lhs (stmt);
8edeb88b 703 i < gimple_num_ops (stmt); ++i)
704 {
705 tree op = gimple_op (stmt, i);
706 if (!op || TREE_CODE (op) == SSA_NAME)
707 continue;
708 if (is_gimple_min_invariant (op))
709 has_constant_operand = true;
710 }
711
87c0a9fc 712 if (has_constant_operand)
713 all_undefined_operands = false;
714
3d483a94 715 if (has_undefined_operand
716 && code == GIMPLE_CALL
717 && gimple_call_internal_p (stmt))
718 switch (gimple_call_internal_fn (stmt))
719 {
720 /* These 3 builtins use the first argument just as a magic
721 way how to find out a decl uid. */
722 case IFN_GOMP_SIMD_LANE:
723 case IFN_GOMP_SIMD_VF:
724 case IFN_GOMP_SIMD_LAST_LANE:
725 has_undefined_operand = false;
726 break;
727 default:
728 break;
729 }
730
d61b9af3 731 /* If the operation combines operands like COMPLEX_EXPR make sure to
732 not mark the result UNDEFINED if only one part of the result is
733 undefined. */
75a70cf9 734 if (has_undefined_operand && all_undefined_operands)
d61b9af3 735 return UNDEFINED;
75a70cf9 736 else if (code == GIMPLE_ASSIGN && has_undefined_operand)
d61b9af3 737 {
75a70cf9 738 switch (gimple_assign_rhs_code (stmt))
d61b9af3 739 {
740 /* Unary operators are handled with all_undefined_operands. */
741 case PLUS_EXPR:
742 case MINUS_EXPR:
d61b9af3 743 case POINTER_PLUS_EXPR:
c00c8b9a 744 case BIT_XOR_EXPR:
d61b9af3 745 /* Not MIN_EXPR, MAX_EXPR. One VARYING operand may be selected.
746 Not bitwise operators, one VARYING operand may specify the
c00c8b9a 747 result completely.
748 Not logical operators for the same reason, apart from XOR.
05a936a0 749 Not COMPLEX_EXPR as one VARYING operand makes the result partly
750 not UNDEFINED. Not *DIV_EXPR, comparisons and shifts because
751 the undefined operand may be promoted. */
d61b9af3 752 return UNDEFINED;
753
43c92e0a 754 case ADDR_EXPR:
755 /* If any part of an address is UNDEFINED, like the index
756 of an ARRAY_EXPR, then treat the result as UNDEFINED. */
757 return UNDEFINED;
758
d61b9af3 759 default:
760 ;
761 }
762 }
763 /* If there was an UNDEFINED operand but the result may be not UNDEFINED
c91fedc5 764 fall back to CONSTANT. During iteration UNDEFINED may still drop
765 to CONSTANT. */
d61b9af3 766 if (has_undefined_operand)
c91fedc5 767 return CONSTANT;
d61b9af3 768
8edeb88b 769 /* We do not consider virtual operands here -- load from read-only
770 memory may have only VARYING virtual operands, but still be
a8a0d56e 771 constant. Also we can combine the stmt with definitions from
772 operands whose definitions are not simulated again. */
bfa30570 773 if (has_constant_operand
a8a0d56e 774 || has_nsa_operand
8edeb88b 775 || gimple_references_memory_p (stmt))
88dbf20f 776 return CONSTANT;
777
bfa30570 778 return VARYING;
4ee9c684 779}
780
bfa30570 781/* Returns true if STMT cannot be constant. */
782
783static bool
42acab1c 784surely_varying_stmt_p (gimple *stmt)
bfa30570 785{
786 /* If the statement has operands that we cannot handle, it cannot be
787 constant. */
75a70cf9 788 if (gimple_has_volatile_ops (stmt))
bfa30570 789 return true;
790
f257af64 791 /* If it is a call and does not return a value or is not a
237e78b1 792 builtin and not an indirect call or a call to function with
793 assume_aligned/alloc_align attribute, it is varying. */
75a70cf9 794 if (is_gimple_call (stmt))
f257af64 795 {
237e78b1 796 tree fndecl, fntype = gimple_call_fntype (stmt);
f257af64 797 if (!gimple_call_lhs (stmt)
798 || ((fndecl = gimple_call_fndecl (stmt)) != NULL_TREE
237e78b1 799 && !DECL_BUILT_IN (fndecl)
800 && !lookup_attribute ("assume_aligned",
801 TYPE_ATTRIBUTES (fntype))
802 && !lookup_attribute ("alloc_align",
803 TYPE_ATTRIBUTES (fntype))))
f257af64 804 return true;
805 }
bfa30570 806
8edeb88b 807 /* Any other store operation is not interesting. */
dd277d48 808 else if (gimple_vdef (stmt))
8edeb88b 809 return true;
810
bfa30570 811 /* Anything other than assignments and conditional jumps are not
812 interesting for CCP. */
75a70cf9 813 if (gimple_code (stmt) != GIMPLE_ASSIGN
f257af64 814 && gimple_code (stmt) != GIMPLE_COND
815 && gimple_code (stmt) != GIMPLE_SWITCH
816 && gimple_code (stmt) != GIMPLE_CALL)
bfa30570 817 return true;
818
819 return false;
820}
4ee9c684 821
41511585 822/* Initialize local data structures for CCP. */
4ee9c684 823
824static void
41511585 825ccp_initialize (void)
4ee9c684 826{
41511585 827 basic_block bb;
4ee9c684 828
285df01b 829 n_const_val = num_ssa_names;
9908fe4d 830 const_val = XCNEWVEC (ccp_prop_value_t, n_const_val);
4ee9c684 831
41511585 832 /* Initialize simulation flags for PHI nodes and statements. */
fc00614f 833 FOR_EACH_BB_FN (bb, cfun)
4ee9c684 834 {
75a70cf9 835 gimple_stmt_iterator i;
4ee9c684 836
75a70cf9 837 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
41511585 838 {
42acab1c 839 gimple *stmt = gsi_stmt (i);
2193544e 840 bool is_varying;
841
842 /* If the statement is a control insn, then we do not
843 want to avoid simulating the statement once. Failure
844 to do so means that those edges will never get added. */
845 if (stmt_ends_bb_p (stmt))
846 is_varying = false;
847 else
848 is_varying = surely_varying_stmt_p (stmt);
4ee9c684 849
bfa30570 850 if (is_varying)
41511585 851 {
88dbf20f 852 tree def;
853 ssa_op_iter iter;
854
855 /* If the statement will not produce a constant, mark
856 all its outputs VARYING. */
857 FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_ALL_DEFS)
8edeb88b 858 set_value_varying (def);
41511585 859 }
75a70cf9 860 prop_set_simulate_again (stmt, !is_varying);
41511585 861 }
4ee9c684 862 }
863
75a70cf9 864 /* Now process PHI nodes. We never clear the simulate_again flag on
865 phi nodes, since we do not know which edges are executable yet,
866 except for phi nodes for virtual operands when we do not do store ccp. */
fc00614f 867 FOR_EACH_BB_FN (bb, cfun)
4ee9c684 868 {
1a91d914 869 gphi_iterator i;
41511585 870
75a70cf9 871 for (i = gsi_start_phis (bb); !gsi_end_p (i); gsi_next (&i))
872 {
1a91d914 873 gphi *phi = i.phi ();
75a70cf9 874
7c782c9b 875 if (virtual_operand_p (gimple_phi_result (phi)))
75a70cf9 876 prop_set_simulate_again (phi, false);
bfa30570 877 else
75a70cf9 878 prop_set_simulate_again (phi, true);
41511585 879 }
4ee9c684 880 }
41511585 881}
4ee9c684 882
43fb76c1 883/* Debug count support. Reset the values of ssa names
884 VARYING when the total number ssa names analyzed is
885 beyond the debug count specified. */
886
887static void
888do_dbg_cnt (void)
889{
890 unsigned i;
891 for (i = 0; i < num_ssa_names; i++)
892 {
893 if (!dbg_cnt (ccp))
894 {
895 const_val[i].lattice_val = VARYING;
e913b5cd 896 const_val[i].mask = -1;
43fb76c1 897 const_val[i].value = NULL_TREE;
898 }
899 }
900}
901
4ee9c684 902
88dbf20f 903/* Do final substitution of propagated values, cleanup the flowgraph and
d0322b7d 904 free allocated storage. If NONZERO_P, record nonzero bits.
4ee9c684 905
33a34f1e 906 Return TRUE when something was optimized. */
907
908static bool
a54071b2 909ccp_finalize (bool nonzero_p)
4ee9c684 910{
43fb76c1 911 bool something_changed;
153c3b50 912 unsigned i;
f211616e 913 tree name;
43fb76c1 914
915 do_dbg_cnt ();
153c3b50 916
917 /* Derive alignment and misalignment information from partially
fc08b993 918 constant pointers in the lattice or nonzero bits from partially
919 constant integers. */
f211616e 920 FOR_EACH_SSA_NAME (i, name, cfun)
153c3b50 921 {
9908fe4d 922 ccp_prop_value_t *val;
153c3b50 923 unsigned int tem, align;
924
f211616e 925 if (!POINTER_TYPE_P (TREE_TYPE (name))
926 && (!INTEGRAL_TYPE_P (TREE_TYPE (name))
927 /* Don't record nonzero bits before IPA to avoid
928 using too much memory. */
929 || !nonzero_p))
153c3b50 930 continue;
931
932 val = get_value (name);
933 if (val->lattice_val != CONSTANT
a54071b2 934 || TREE_CODE (val->value) != INTEGER_CST
935 || val->mask == 0)
153c3b50 936 continue;
937
fc08b993 938 if (POINTER_TYPE_P (TREE_TYPE (name)))
939 {
940 /* Trailing mask bits specify the alignment, trailing value
941 bits the misalignment. */
aeb682a2 942 tem = val->mask.to_uhwi ();
ac29ece2 943 align = least_bit_hwi (tem);
fc08b993 944 if (align > 1)
945 set_ptr_info_alignment (get_ptr_info (name), align,
f9ae6f95 946 (TREE_INT_CST_LOW (val->value)
fc08b993 947 & (align - 1)));
948 }
949 else
950 {
9c1be15e 951 unsigned int precision = TYPE_PRECISION (TREE_TYPE (val->value));
952 wide_int nonzero_bits = wide_int::from (val->mask, precision,
953 UNSIGNED) | val->value;
fc08b993 954 nonzero_bits &= get_nonzero_bits (name);
955 set_nonzero_bits (name, nonzero_bits);
956 }
153c3b50 957 }
958
88dbf20f 959 /* Perform substitutions based on the known constant values. */
723c387e 960 something_changed = substitute_and_fold (get_constant_value, ccp_fold_stmt);
4ee9c684 961
88dbf20f 962 free (const_val);
e004838d 963 const_val = NULL;
33a34f1e 964 return something_changed;;
4ee9c684 965}
966
967
88dbf20f 968/* Compute the meet operator between *VAL1 and *VAL2. Store the result
969 in VAL1.
970
971 any M UNDEFINED = any
88dbf20f 972 any M VARYING = VARYING
973 Ci M Cj = Ci if (i == j)
974 Ci M Cj = VARYING if (i != j)
bfa30570 975 */
4ee9c684 976
977static void
27de3d43 978ccp_lattice_meet (ccp_prop_value_t *val1, ccp_prop_value_t *val2)
4ee9c684 979{
fc6cc27b 980 if (val1->lattice_val == UNDEFINED
981 /* For UNDEFINED M SSA we can't always SSA because its definition
982 may not dominate the PHI node. Doing optimistic copy propagation
983 also causes a lot of gcc.dg/uninit-pred*.c FAILs. */
984 && (val2->lattice_val != CONSTANT
985 || TREE_CODE (val2->value) != SSA_NAME))
4ee9c684 986 {
88dbf20f 987 /* UNDEFINED M any = any */
988 *val1 = *val2;
41511585 989 }
fc6cc27b 990 else if (val2->lattice_val == UNDEFINED
991 /* See above. */
992 && (val1->lattice_val != CONSTANT
993 || TREE_CODE (val1->value) != SSA_NAME))
92481a4d 994 {
88dbf20f 995 /* any M UNDEFINED = any
996 Nothing to do. VAL1 already contains the value we want. */
997 ;
92481a4d 998 }
88dbf20f 999 else if (val1->lattice_val == VARYING
1000 || val2->lattice_val == VARYING)
41511585 1001 {
88dbf20f 1002 /* any M VARYING = VARYING. */
1003 val1->lattice_val = VARYING;
e913b5cd 1004 val1->mask = -1;
88dbf20f 1005 val1->value = NULL_TREE;
41511585 1006 }
b7e55469 1007 else if (val1->lattice_val == CONSTANT
1008 && val2->lattice_val == CONSTANT
1009 && TREE_CODE (val1->value) == INTEGER_CST
1010 && TREE_CODE (val2->value) == INTEGER_CST)
1011 {
1012 /* Ci M Cj = Ci if (i == j)
1013 Ci M Cj = VARYING if (i != j)
1014
1015 For INTEGER_CSTs mask unequal bits. If no equal bits remain,
1016 drop to varying. */
e913b5cd 1017 val1->mask = (val1->mask | val2->mask
5de9d3ed 1018 | (wi::to_widest (val1->value)
1019 ^ wi::to_widest (val2->value)));
c90d2c17 1020 if (wi::sext (val1->mask, TYPE_PRECISION (TREE_TYPE (val1->value))) == -1)
b7e55469 1021 {
1022 val1->lattice_val = VARYING;
1023 val1->value = NULL_TREE;
1024 }
1025 }
88dbf20f 1026 else if (val1->lattice_val == CONSTANT
1027 && val2->lattice_val == CONSTANT
27de3d43 1028 && operand_equal_p (val1->value, val2->value, 0))
41511585 1029 {
88dbf20f 1030 /* Ci M Cj = Ci if (i == j)
1031 Ci M Cj = VARYING if (i != j)
1032
b7e55469 1033 VAL1 already contains the value we want for equivalent values. */
1034 }
1035 else if (val1->lattice_val == CONSTANT
1036 && val2->lattice_val == CONSTANT
1037 && (TREE_CODE (val1->value) == ADDR_EXPR
1038 || TREE_CODE (val2->value) == ADDR_EXPR))
1039 {
1040 /* When not equal addresses are involved try meeting for
1041 alignment. */
9908fe4d 1042 ccp_prop_value_t tem = *val2;
b7e55469 1043 if (TREE_CODE (val1->value) == ADDR_EXPR)
1044 *val1 = get_value_for_expr (val1->value, true);
1045 if (TREE_CODE (val2->value) == ADDR_EXPR)
1046 tem = get_value_for_expr (val2->value, true);
27de3d43 1047 ccp_lattice_meet (val1, &tem);
41511585 1048 }
1049 else
1050 {
88dbf20f 1051 /* Any other combination is VARYING. */
1052 val1->lattice_val = VARYING;
e913b5cd 1053 val1->mask = -1;
88dbf20f 1054 val1->value = NULL_TREE;
41511585 1055 }
4ee9c684 1056}
1057
1058
41511585 1059/* Loop through the PHI_NODE's parameters for BLOCK and compare their
1060 lattice values to determine PHI_NODE's lattice value. The value of a
88dbf20f 1061 PHI node is determined calling ccp_lattice_meet with all the arguments
41511585 1062 of the PHI node that are incoming via executable edges. */
4ee9c684 1063
41511585 1064static enum ssa_prop_result
1a91d914 1065ccp_visit_phi_node (gphi *phi)
4ee9c684 1066{
75a70cf9 1067 unsigned i;
bc1d3d97 1068 ccp_prop_value_t new_val;
4ee9c684 1069
41511585 1070 if (dump_file && (dump_flags & TDF_DETAILS))
4ee9c684 1071 {
41511585 1072 fprintf (dump_file, "\nVisiting PHI node: ");
75a70cf9 1073 print_gimple_stmt (dump_file, phi, 0, dump_flags);
4ee9c684 1074 }
4ee9c684 1075
bc1d3d97 1076 new_val.lattice_val = UNDEFINED;
1077 new_val.value = NULL_TREE;
1078 new_val.mask = 0;
4ee9c684 1079
bc1d3d97 1080 bool first = true;
83c60000 1081 bool non_exec_edge = false;
75a70cf9 1082 for (i = 0; i < gimple_phi_num_args (phi); i++)
41511585 1083 {
88dbf20f 1084 /* Compute the meet operator over all the PHI arguments flowing
1085 through executable edges. */
75a70cf9 1086 edge e = gimple_phi_arg_edge (phi, i);
4ee9c684 1087
41511585 1088 if (dump_file && (dump_flags & TDF_DETAILS))
1089 {
1090 fprintf (dump_file,
1091 "\n Argument #%d (%d -> %d %sexecutable)\n",
1092 i, e->src->index, e->dest->index,
1093 (e->flags & EDGE_EXECUTABLE) ? "" : "not ");
1094 }
1095
1096 /* If the incoming edge is executable, Compute the meet operator for
1097 the existing value of the PHI node and the current PHI argument. */
1098 if (e->flags & EDGE_EXECUTABLE)
1099 {
75a70cf9 1100 tree arg = gimple_phi_arg (phi, i)->def;
9908fe4d 1101 ccp_prop_value_t arg_val = get_value_for_expr (arg, false);
4ee9c684 1102
bc1d3d97 1103 if (first)
1104 {
1105 new_val = arg_val;
1106 first = false;
1107 }
1108 else
27de3d43 1109 ccp_lattice_meet (&new_val, &arg_val);
4ee9c684 1110
41511585 1111 if (dump_file && (dump_flags & TDF_DETAILS))
1112 {
1113 fprintf (dump_file, "\t");
88dbf20f 1114 print_generic_expr (dump_file, arg, dump_flags);
1115 dump_lattice_value (dump_file, "\tValue: ", arg_val);
41511585 1116 fprintf (dump_file, "\n");
1117 }
4ee9c684 1118
41511585 1119 if (new_val.lattice_val == VARYING)
1120 break;
1121 }
83c60000 1122 else
1123 non_exec_edge = true;
1124 }
1125
1126 /* In case there were non-executable edges and the value is a copy
1127 make sure its definition dominates the PHI node. */
1128 if (non_exec_edge
1129 && new_val.lattice_val == CONSTANT
1130 && TREE_CODE (new_val.value) == SSA_NAME
1131 && ! SSA_NAME_IS_DEFAULT_DEF (new_val.value)
1132 && ! dominated_by_p (CDI_DOMINATORS, gimple_bb (phi),
1133 gimple_bb (SSA_NAME_DEF_STMT (new_val.value))))
1134 {
1135 new_val.lattice_val = VARYING;
1136 new_val.value = NULL_TREE;
1137 new_val.mask = -1;
41511585 1138 }
4ee9c684 1139
1140 if (dump_file && (dump_flags & TDF_DETAILS))
41511585 1141 {
1142 dump_lattice_value (dump_file, "\n PHI node value: ", new_val);
1143 fprintf (dump_file, "\n\n");
1144 }
1145
bfa30570 1146 /* Make the transition to the new value. */
27de3d43 1147 if (set_lattice_value (gimple_phi_result (phi), &new_val))
41511585 1148 {
1149 if (new_val.lattice_val == VARYING)
1150 return SSA_PROP_VARYING;
1151 else
1152 return SSA_PROP_INTERESTING;
1153 }
1154 else
1155 return SSA_PROP_NOT_INTERESTING;
4ee9c684 1156}
1157
15d138c9 1158/* Return the constant value for OP or OP otherwise. */
00f4f705 1159
1160static tree
15d138c9 1161valueize_op (tree op)
00f4f705 1162{
00f4f705 1163 if (TREE_CODE (op) == SSA_NAME)
1164 {
15d138c9 1165 tree tem = get_constant_value (op);
1166 if (tem)
1167 return tem;
00f4f705 1168 }
1169 return op;
1170}
1171
4e1b3545 1172/* Return the constant value for OP, but signal to not follow SSA
1173 edges if the definition may be simulated again. */
1174
1175static tree
1176valueize_op_1 (tree op)
1177{
1178 if (TREE_CODE (op) == SSA_NAME)
1179 {
4e1b3545 1180 /* If the definition may be simulated again we cannot follow
1181 this SSA edge as the SSA propagator does not necessarily
1182 re-visit the use. */
42acab1c 1183 gimple *def_stmt = SSA_NAME_DEF_STMT (op);
2160d5ba 1184 if (!gimple_nop_p (def_stmt)
1185 && prop_simulate_again_p (def_stmt))
4e1b3545 1186 return NULL_TREE;
bc8fa068 1187 tree tem = get_constant_value (op);
1188 if (tem)
1189 return tem;
4e1b3545 1190 }
1191 return op;
1192}
1193
41511585 1194/* CCP specific front-end to the non-destructive constant folding
1195 routines.
4ee9c684 1196
1197 Attempt to simplify the RHS of STMT knowing that one or more
1198 operands are constants.
1199
1200 If simplification is possible, return the simplified RHS,
75a70cf9 1201 otherwise return the original RHS or NULL_TREE. */
4ee9c684 1202
1203static tree
42acab1c 1204ccp_fold (gimple *stmt)
4ee9c684 1205{
389dd41b 1206 location_t loc = gimple_location (stmt);
75a70cf9 1207 switch (gimple_code (stmt))
88dbf20f 1208 {
75a70cf9 1209 case GIMPLE_COND:
1210 {
1211 /* Handle comparison operators that can appear in GIMPLE form. */
15d138c9 1212 tree op0 = valueize_op (gimple_cond_lhs (stmt));
1213 tree op1 = valueize_op (gimple_cond_rhs (stmt));
75a70cf9 1214 enum tree_code code = gimple_cond_code (stmt);
389dd41b 1215 return fold_binary_loc (loc, code, boolean_type_node, op0, op1);
75a70cf9 1216 }
4ee9c684 1217
75a70cf9 1218 case GIMPLE_SWITCH:
1219 {
15d138c9 1220 /* Return the constant switch index. */
1a91d914 1221 return valueize_op (gimple_switch_index (as_a <gswitch *> (stmt)));
75a70cf9 1222 }
912f109f 1223
1d0b727d 1224 case GIMPLE_ASSIGN:
1225 case GIMPLE_CALL:
4e1b3545 1226 return gimple_fold_stmt_to_constant_1 (stmt,
1227 valueize_op, valueize_op_1);
04236c3a 1228
8782adcf 1229 default:
1d0b727d 1230 gcc_unreachable ();
8782adcf 1231 }
8782adcf 1232}
75a70cf9 1233
b7e55469 1234/* Apply the operation CODE in type TYPE to the value, mask pair
1235 RVAL and RMASK representing a value of type RTYPE and set
1236 the value, mask pair *VAL and *MASK to the result. */
1237
a54071b2 1238void
1239bit_value_unop (enum tree_code code, signop type_sgn, int type_precision,
1240 widest_int *val, widest_int *mask,
1241 signop rtype_sgn, int rtype_precision,
1242 const widest_int &rval, const widest_int &rmask)
b7e55469 1243{
1244 switch (code)
1245 {
1246 case BIT_NOT_EXPR:
1247 *mask = rmask;
cf8f0e63 1248 *val = ~rval;
b7e55469 1249 break;
1250
1251 case NEGATE_EXPR:
1252 {
5de9d3ed 1253 widest_int temv, temm;
b7e55469 1254 /* Return ~rval + 1. */
a54071b2 1255 bit_value_unop (BIT_NOT_EXPR, type_sgn, type_precision, &temv, &temm,
1256 type_sgn, type_precision, rval, rmask);
1257 bit_value_binop (PLUS_EXPR, type_sgn, type_precision, val, mask,
1258 type_sgn, type_precision, temv, temm,
1259 type_sgn, type_precision, 1, 0);
b7e55469 1260 break;
1261 }
1262
1263 CASE_CONVERT:
1264 {
b7e55469 1265 /* First extend mask and value according to the original type. */
a54071b2 1266 *mask = wi::ext (rmask, rtype_precision, rtype_sgn);
1267 *val = wi::ext (rval, rtype_precision, rtype_sgn);
b7e55469 1268
1269 /* Then extend mask and value according to the target type. */
a54071b2 1270 *mask = wi::ext (*mask, type_precision, type_sgn);
1271 *val = wi::ext (*val, type_precision, type_sgn);
b7e55469 1272 break;
1273 }
1274
1275 default:
e913b5cd 1276 *mask = -1;
b7e55469 1277 break;
1278 }
1279}
1280
1281/* Apply the operation CODE in type TYPE to the value, mask pairs
1282 R1VAL, R1MASK and R2VAL, R2MASK representing a values of type R1TYPE
1283 and R2TYPE and set the value, mask pair *VAL and *MASK to the result. */
1284
a54071b2 1285void
1286bit_value_binop (enum tree_code code, signop sgn, int width,
1287 widest_int *val, widest_int *mask,
1288 signop r1type_sgn, int r1type_precision,
1289 const widest_int &r1val, const widest_int &r1mask,
1290 signop r2type_sgn, int r2type_precision,
1291 const widest_int &r2val, const widest_int &r2mask)
b7e55469 1292{
10c3fe8d 1293 bool swap_p = false;
e913b5cd 1294
1295 /* Assume we'll get a constant result. Use an initial non varying
1296 value, we fall back to varying in the end if necessary. */
1297 *mask = -1;
1298
b7e55469 1299 switch (code)
1300 {
1301 case BIT_AND_EXPR:
1302 /* The mask is constant where there is a known not
1303 set bit, (m1 | m2) & ((v1 | m1) & (v2 | m2)) */
cf8f0e63 1304 *mask = (r1mask | r2mask) & (r1val | r1mask) & (r2val | r2mask);
1305 *val = r1val & r2val;
b7e55469 1306 break;
1307
1308 case BIT_IOR_EXPR:
1309 /* The mask is constant where there is a known
1310 set bit, (m1 | m2) & ~((v1 & ~m1) | (v2 & ~m2)). */
cf8f0e63 1311 *mask = (r1mask | r2mask)
1312 .and_not (r1val.and_not (r1mask) | r2val.and_not (r2mask));
1313 *val = r1val | r2val;
b7e55469 1314 break;
1315
1316 case BIT_XOR_EXPR:
1317 /* m1 | m2 */
cf8f0e63 1318 *mask = r1mask | r2mask;
1319 *val = r1val ^ r2val;
b7e55469 1320 break;
1321
1322 case LROTATE_EXPR:
1323 case RROTATE_EXPR:
796b6678 1324 if (r2mask == 0)
b7e55469 1325 {
28e557ef 1326 widest_int shift = r2val;
796b6678 1327 if (shift == 0)
e913b5cd 1328 {
1329 *mask = r1mask;
1330 *val = r1val;
1331 }
ddb1be65 1332 else
e913b5cd 1333 {
796b6678 1334 if (wi::neg_p (shift))
e913b5cd 1335 {
1336 shift = -shift;
1337 if (code == RROTATE_EXPR)
1338 code = LROTATE_EXPR;
1339 else
1340 code = RROTATE_EXPR;
1341 }
1342 if (code == RROTATE_EXPR)
1343 {
796b6678 1344 *mask = wi::rrotate (r1mask, shift, width);
1345 *val = wi::rrotate (r1val, shift, width);
e913b5cd 1346 }
1347 else
1348 {
796b6678 1349 *mask = wi::lrotate (r1mask, shift, width);
1350 *val = wi::lrotate (r1val, shift, width);
e913b5cd 1351 }
1352 }
b7e55469 1353 }
1354 break;
1355
1356 case LSHIFT_EXPR:
1357 case RSHIFT_EXPR:
1358 /* ??? We can handle partially known shift counts if we know
1359 its sign. That way we can tell that (x << (y | 8)) & 255
1360 is zero. */
796b6678 1361 if (r2mask == 0)
b7e55469 1362 {
28e557ef 1363 widest_int shift = r2val;
796b6678 1364 if (shift == 0)
b7e55469 1365 {
1366 *mask = r1mask;
1367 *val = r1val;
1368 }
ddb1be65 1369 else
e913b5cd 1370 {
796b6678 1371 if (wi::neg_p (shift))
e913b5cd 1372 {
1373 shift = -shift;
1374 if (code == RSHIFT_EXPR)
1375 code = LSHIFT_EXPR;
1376 else
1377 code = RSHIFT_EXPR;
1378 }
1379 if (code == RSHIFT_EXPR)
1380 {
45639915 1381 *mask = wi::rshift (wi::ext (r1mask, width, sgn), shift, sgn);
1382 *val = wi::rshift (wi::ext (r1val, width, sgn), shift, sgn);
e913b5cd 1383 }
1384 else
1385 {
9fdc1ed4 1386 *mask = wi::ext (r1mask << shift, width, sgn);
1387 *val = wi::ext (r1val << shift, width, sgn);
e913b5cd 1388 }
1389 }
b7e55469 1390 }
1391 break;
1392
1393 case PLUS_EXPR:
1394 case POINTER_PLUS_EXPR:
1395 {
b7e55469 1396 /* Do the addition with unknown bits set to zero, to give carry-ins of
1397 zero wherever possible. */
ab2c1de8 1398 widest_int lo = r1val.and_not (r1mask) + r2val.and_not (r2mask);
796b6678 1399 lo = wi::ext (lo, width, sgn);
b7e55469 1400 /* Do the addition with unknown bits set to one, to give carry-ins of
1401 one wherever possible. */
ab2c1de8 1402 widest_int hi = (r1val | r1mask) + (r2val | r2mask);
796b6678 1403 hi = wi::ext (hi, width, sgn);
b7e55469 1404 /* Each bit in the result is known if (a) the corresponding bits in
1405 both inputs are known, and (b) the carry-in to that bit position
1406 is known. We can check condition (b) by seeing if we got the same
1407 result with minimised carries as with maximised carries. */
cf8f0e63 1408 *mask = r1mask | r2mask | (lo ^ hi);
796b6678 1409 *mask = wi::ext (*mask, width, sgn);
b7e55469 1410 /* It shouldn't matter whether we choose lo or hi here. */
1411 *val = lo;
1412 break;
1413 }
1414
1415 case MINUS_EXPR:
1416 {
5de9d3ed 1417 widest_int temv, temm;
a54071b2 1418 bit_value_unop (NEGATE_EXPR, r2type_sgn, r2type_precision, &temv, &temm,
1419 r2type_sgn, r2type_precision, r2val, r2mask);
1420 bit_value_binop (PLUS_EXPR, sgn, width, val, mask,
1421 r1type_sgn, r1type_precision, r1val, r1mask,
1422 r2type_sgn, r2type_precision, temv, temm);
b7e55469 1423 break;
1424 }
1425
1426 case MULT_EXPR:
1427 {
1428 /* Just track trailing zeros in both operands and transfer
1429 them to the other. */
796b6678 1430 int r1tz = wi::ctz (r1val | r1mask);
1431 int r2tz = wi::ctz (r2val | r2mask);
e913b5cd 1432 if (r1tz + r2tz >= width)
b7e55469 1433 {
e913b5cd 1434 *mask = 0;
1435 *val = 0;
b7e55469 1436 }
1437 else if (r1tz + r2tz > 0)
1438 {
5de9d3ed 1439 *mask = wi::ext (wi::mask <widest_int> (r1tz + r2tz, true),
796b6678 1440 width, sgn);
e913b5cd 1441 *val = 0;
b7e55469 1442 }
1443 break;
1444 }
1445
1446 case EQ_EXPR:
1447 case NE_EXPR:
1448 {
5de9d3ed 1449 widest_int m = r1mask | r2mask;
cf8f0e63 1450 if (r1val.and_not (m) != r2val.and_not (m))
b7e55469 1451 {
e913b5cd 1452 *mask = 0;
1453 *val = ((code == EQ_EXPR) ? 0 : 1);
b7e55469 1454 }
1455 else
1456 {
1457 /* We know the result of a comparison is always one or zero. */
e913b5cd 1458 *mask = 1;
1459 *val = 0;
b7e55469 1460 }
1461 break;
1462 }
1463
1464 case GE_EXPR:
1465 case GT_EXPR:
10c3fe8d 1466 swap_p = true;
1467 code = swap_tree_comparison (code);
1468 /* Fall through. */
b7e55469 1469 case LT_EXPR:
1470 case LE_EXPR:
1471 {
1472 int minmax, maxmin;
e913b5cd 1473
10c3fe8d 1474 const widest_int &o1val = swap_p ? r2val : r1val;
1475 const widest_int &o1mask = swap_p ? r2mask : r1mask;
1476 const widest_int &o2val = swap_p ? r1val : r2val;
1477 const widest_int &o2mask = swap_p ? r1mask : r2mask;
1478
b7e55469 1479 /* If the most significant bits are not known we know nothing. */
796b6678 1480 if (wi::neg_p (o1mask) || wi::neg_p (o2mask))
b7e55469 1481 break;
1482
90c0f5b7 1483 /* For comparisons the signedness is in the comparison operands. */
a54071b2 1484 sgn = r1type_sgn;
90c0f5b7 1485
b7e55469 1486 /* If we know the most significant bits we know the values
1487 value ranges by means of treating varying bits as zero
1488 or one. Do a cross comparison of the max/min pairs. */
796b6678 1489 maxmin = wi::cmp (o1val | o1mask, o2val.and_not (o2mask), sgn);
1490 minmax = wi::cmp (o1val.and_not (o1mask), o2val | o2mask, sgn);
e913b5cd 1491 if (maxmin < 0) /* o1 is less than o2. */
b7e55469 1492 {
e913b5cd 1493 *mask = 0;
1494 *val = 1;
b7e55469 1495 }
e913b5cd 1496 else if (minmax > 0) /* o1 is not less or equal to o2. */
b7e55469 1497 {
e913b5cd 1498 *mask = 0;
1499 *val = 0;
b7e55469 1500 }
e913b5cd 1501 else if (maxmin == minmax) /* o1 and o2 are equal. */
b7e55469 1502 {
1503 /* This probably should never happen as we'd have
1504 folded the thing during fully constant value folding. */
e913b5cd 1505 *mask = 0;
1506 *val = (code == LE_EXPR ? 1 : 0);
b7e55469 1507 }
1508 else
1509 {
1510 /* We know the result of a comparison is always one or zero. */
e913b5cd 1511 *mask = 1;
1512 *val = 0;
b7e55469 1513 }
1514 break;
1515 }
1516
1517 default:;
1518 }
1519}
1520
1521/* Return the propagation value when applying the operation CODE to
1522 the value RHS yielding type TYPE. */
1523
9908fe4d 1524static ccp_prop_value_t
b7e55469 1525bit_value_unop (enum tree_code code, tree type, tree rhs)
1526{
9908fe4d 1527 ccp_prop_value_t rval = get_value_for_expr (rhs, true);
5de9d3ed 1528 widest_int value, mask;
9908fe4d 1529 ccp_prop_value_t val;
c91fedc5 1530
1531 if (rval.lattice_val == UNDEFINED)
1532 return rval;
1533
b7e55469 1534 gcc_assert ((rval.lattice_val == CONSTANT
1535 && TREE_CODE (rval.value) == INTEGER_CST)
c90d2c17 1536 || wi::sext (rval.mask, TYPE_PRECISION (TREE_TYPE (rhs))) == -1);
a54071b2 1537 bit_value_unop (code, TYPE_SIGN (type), TYPE_PRECISION (type), &value, &mask,
1538 TYPE_SIGN (TREE_TYPE (rhs)), TYPE_PRECISION (TREE_TYPE (rhs)),
1539 value_to_wide_int (rval), rval.mask);
c90d2c17 1540 if (wi::sext (mask, TYPE_PRECISION (type)) != -1)
b7e55469 1541 {
1542 val.lattice_val = CONSTANT;
1543 val.mask = mask;
1544 /* ??? Delay building trees here. */
e913b5cd 1545 val.value = wide_int_to_tree (type, value);
b7e55469 1546 }
1547 else
1548 {
1549 val.lattice_val = VARYING;
1550 val.value = NULL_TREE;
e913b5cd 1551 val.mask = -1;
b7e55469 1552 }
1553 return val;
1554}
1555
1556/* Return the propagation value when applying the operation CODE to
1557 the values RHS1 and RHS2 yielding type TYPE. */
1558
9908fe4d 1559static ccp_prop_value_t
b7e55469 1560bit_value_binop (enum tree_code code, tree type, tree rhs1, tree rhs2)
1561{
9908fe4d 1562 ccp_prop_value_t r1val = get_value_for_expr (rhs1, true);
1563 ccp_prop_value_t r2val = get_value_for_expr (rhs2, true);
5de9d3ed 1564 widest_int value, mask;
9908fe4d 1565 ccp_prop_value_t val;
c91fedc5 1566
1567 if (r1val.lattice_val == UNDEFINED
1568 || r2val.lattice_val == UNDEFINED)
1569 {
1570 val.lattice_val = VARYING;
1571 val.value = NULL_TREE;
e913b5cd 1572 val.mask = -1;
c91fedc5 1573 return val;
1574 }
1575
b7e55469 1576 gcc_assert ((r1val.lattice_val == CONSTANT
1577 && TREE_CODE (r1val.value) == INTEGER_CST)
c90d2c17 1578 || wi::sext (r1val.mask,
1579 TYPE_PRECISION (TREE_TYPE (rhs1))) == -1);
b7e55469 1580 gcc_assert ((r2val.lattice_val == CONSTANT
1581 && TREE_CODE (r2val.value) == INTEGER_CST)
c90d2c17 1582 || wi::sext (r2val.mask,
1583 TYPE_PRECISION (TREE_TYPE (rhs2))) == -1);
a54071b2 1584 bit_value_binop (code, TYPE_SIGN (type), TYPE_PRECISION (type), &value, &mask,
1585 TYPE_SIGN (TREE_TYPE (rhs1)), TYPE_PRECISION (TREE_TYPE (rhs1)),
1586 value_to_wide_int (r1val), r1val.mask,
1587 TYPE_SIGN (TREE_TYPE (rhs2)), TYPE_PRECISION (TREE_TYPE (rhs2)),
1588 value_to_wide_int (r2val), r2val.mask);
1589
c90d2c17 1590 if (wi::sext (mask, TYPE_PRECISION (type)) != -1)
b7e55469 1591 {
1592 val.lattice_val = CONSTANT;
1593 val.mask = mask;
1594 /* ??? Delay building trees here. */
e913b5cd 1595 val.value = wide_int_to_tree (type, value);
b7e55469 1596 }
1597 else
1598 {
1599 val.lattice_val = VARYING;
1600 val.value = NULL_TREE;
e913b5cd 1601 val.mask = -1;
b7e55469 1602 }
1603 return val;
1604}
1605
237e78b1 1606/* Return the propagation value for __builtin_assume_aligned
1607 and functions with assume_aligned or alloc_aligned attribute.
1608 For __builtin_assume_aligned, ATTR is NULL_TREE,
1609 for assume_aligned attribute ATTR is non-NULL and ALLOC_ALIGNED
1610 is false, for alloc_aligned attribute ATTR is non-NULL and
1611 ALLOC_ALIGNED is true. */
fca0886c 1612
9908fe4d 1613static ccp_prop_value_t
42acab1c 1614bit_value_assume_aligned (gimple *stmt, tree attr, ccp_prop_value_t ptrval,
237e78b1 1615 bool alloc_aligned)
fca0886c 1616{
237e78b1 1617 tree align, misalign = NULL_TREE, type;
fca0886c 1618 unsigned HOST_WIDE_INT aligni, misaligni = 0;
9908fe4d 1619 ccp_prop_value_t alignval;
5de9d3ed 1620 widest_int value, mask;
9908fe4d 1621 ccp_prop_value_t val;
e913b5cd 1622
237e78b1 1623 if (attr == NULL_TREE)
1624 {
1625 tree ptr = gimple_call_arg (stmt, 0);
1626 type = TREE_TYPE (ptr);
1627 ptrval = get_value_for_expr (ptr, true);
1628 }
1629 else
1630 {
1631 tree lhs = gimple_call_lhs (stmt);
1632 type = TREE_TYPE (lhs);
1633 }
1634
fca0886c 1635 if (ptrval.lattice_val == UNDEFINED)
1636 return ptrval;
1637 gcc_assert ((ptrval.lattice_val == CONSTANT
1638 && TREE_CODE (ptrval.value) == INTEGER_CST)
c90d2c17 1639 || wi::sext (ptrval.mask, TYPE_PRECISION (type)) == -1);
237e78b1 1640 if (attr == NULL_TREE)
fca0886c 1641 {
237e78b1 1642 /* Get aligni and misaligni from __builtin_assume_aligned. */
1643 align = gimple_call_arg (stmt, 1);
1644 if (!tree_fits_uhwi_p (align))
fca0886c 1645 return ptrval;
237e78b1 1646 aligni = tree_to_uhwi (align);
1647 if (gimple_call_num_args (stmt) > 2)
1648 {
1649 misalign = gimple_call_arg (stmt, 2);
1650 if (!tree_fits_uhwi_p (misalign))
1651 return ptrval;
1652 misaligni = tree_to_uhwi (misalign);
1653 }
1654 }
1655 else
fca0886c 1656 {
237e78b1 1657 /* Get aligni and misaligni from assume_aligned or
1658 alloc_align attributes. */
1659 if (TREE_VALUE (attr) == NULL_TREE)
fca0886c 1660 return ptrval;
237e78b1 1661 attr = TREE_VALUE (attr);
1662 align = TREE_VALUE (attr);
1663 if (!tree_fits_uhwi_p (align))
fca0886c 1664 return ptrval;
237e78b1 1665 aligni = tree_to_uhwi (align);
1666 if (alloc_aligned)
1667 {
1668 if (aligni == 0 || aligni > gimple_call_num_args (stmt))
1669 return ptrval;
1670 align = gimple_call_arg (stmt, aligni - 1);
1671 if (!tree_fits_uhwi_p (align))
1672 return ptrval;
1673 aligni = tree_to_uhwi (align);
1674 }
1675 else if (TREE_CHAIN (attr) && TREE_VALUE (TREE_CHAIN (attr)))
1676 {
1677 misalign = TREE_VALUE (TREE_CHAIN (attr));
1678 if (!tree_fits_uhwi_p (misalign))
1679 return ptrval;
1680 misaligni = tree_to_uhwi (misalign);
1681 }
fca0886c 1682 }
237e78b1 1683 if (aligni <= 1 || (aligni & (aligni - 1)) != 0 || misaligni >= aligni)
1684 return ptrval;
1685
fca0886c 1686 align = build_int_cst_type (type, -aligni);
1687 alignval = get_value_for_expr (align, true);
a54071b2 1688 bit_value_binop (BIT_AND_EXPR, TYPE_SIGN (type), TYPE_PRECISION (type), &value, &mask,
1689 TYPE_SIGN (type), TYPE_PRECISION (type), value_to_wide_int (ptrval), ptrval.mask,
1690 TYPE_SIGN (type), TYPE_PRECISION (type), value_to_wide_int (alignval), alignval.mask);
1691
c90d2c17 1692 if (wi::sext (mask, TYPE_PRECISION (type)) != -1)
fca0886c 1693 {
1694 val.lattice_val = CONSTANT;
1695 val.mask = mask;
e913b5cd 1696 gcc_assert ((mask.to_uhwi () & (aligni - 1)) == 0);
1697 gcc_assert ((value.to_uhwi () & (aligni - 1)) == 0);
1698 value |= misaligni;
fca0886c 1699 /* ??? Delay building trees here. */
e913b5cd 1700 val.value = wide_int_to_tree (type, value);
fca0886c 1701 }
1702 else
1703 {
1704 val.lattice_val = VARYING;
1705 val.value = NULL_TREE;
e913b5cd 1706 val.mask = -1;
fca0886c 1707 }
1708 return val;
1709}
1710
75a70cf9 1711/* Evaluate statement STMT.
1712 Valid only for assignments, calls, conditionals, and switches. */
4ee9c684 1713
9908fe4d 1714static ccp_prop_value_t
42acab1c 1715evaluate_stmt (gimple *stmt)
4ee9c684 1716{
9908fe4d 1717 ccp_prop_value_t val;
4f61cce6 1718 tree simplified = NULL_TREE;
88dbf20f 1719 ccp_lattice_t likelyvalue = likely_value (stmt);
b7e55469 1720 bool is_constant = false;
581bf1c2 1721 unsigned int align;
88dbf20f 1722
b7e55469 1723 if (dump_file && (dump_flags & TDF_DETAILS))
1724 {
1725 fprintf (dump_file, "which is likely ");
1726 switch (likelyvalue)
1727 {
1728 case CONSTANT:
1729 fprintf (dump_file, "CONSTANT");
1730 break;
1731 case UNDEFINED:
1732 fprintf (dump_file, "UNDEFINED");
1733 break;
1734 case VARYING:
1735 fprintf (dump_file, "VARYING");
1736 break;
1737 default:;
1738 }
1739 fprintf (dump_file, "\n");
1740 }
add6ee5e 1741
4ee9c684 1742 /* If the statement is likely to have a CONSTANT result, then try
1743 to fold the statement to determine the constant value. */
75a70cf9 1744 /* FIXME. This is the only place that we call ccp_fold.
1745 Since likely_value never returns CONSTANT for calls, we will
1746 not attempt to fold them, including builtins that may profit. */
4ee9c684 1747 if (likelyvalue == CONSTANT)
b7e55469 1748 {
1749 fold_defer_overflow_warnings ();
1750 simplified = ccp_fold (stmt);
c57dab92 1751 if (simplified
3ec56105 1752 && TREE_CODE (simplified) == SSA_NAME)
1753 {
c57dab92 1754 /* We may not use values of something that may be simulated again,
1755 see valueize_op_1. */
3ec56105 1756 if (SSA_NAME_IS_DEFAULT_DEF (simplified)
1757 || ! prop_simulate_again_p (SSA_NAME_DEF_STMT (simplified)))
27de3d43 1758 {
3ec56105 1759 ccp_prop_value_t *val = get_value (simplified);
1760 if (val && val->lattice_val != VARYING)
1761 {
1762 fold_undefer_overflow_warnings (true, stmt, 0);
1763 return *val;
1764 }
27de3d43 1765 }
3ec56105 1766 else
1767 /* We may also not place a non-valueized copy in the lattice
1768 as that might become stale if we never re-visit this stmt. */
1769 simplified = NULL_TREE;
27de3d43 1770 }
b7e55469 1771 is_constant = simplified && is_gimple_min_invariant (simplified);
1772 fold_undefer_overflow_warnings (is_constant, stmt, 0);
1773 if (is_constant)
1774 {
1775 /* The statement produced a constant value. */
1776 val.lattice_val = CONSTANT;
1777 val.value = simplified;
e913b5cd 1778 val.mask = 0;
27de3d43 1779 return val;
b7e55469 1780 }
1781 }
4ee9c684 1782 /* If the statement is likely to have a VARYING result, then do not
1783 bother folding the statement. */
04236c3a 1784 else if (likelyvalue == VARYING)
75a70cf9 1785 {
590c3166 1786 enum gimple_code code = gimple_code (stmt);
75a70cf9 1787 if (code == GIMPLE_ASSIGN)
1788 {
1789 enum tree_code subcode = gimple_assign_rhs_code (stmt);
48e1416a 1790
75a70cf9 1791 /* Other cases cannot satisfy is_gimple_min_invariant
1792 without folding. */
1793 if (get_gimple_rhs_class (subcode) == GIMPLE_SINGLE_RHS)
1794 simplified = gimple_assign_rhs1 (stmt);
1795 }
1796 else if (code == GIMPLE_SWITCH)
1a91d914 1797 simplified = gimple_switch_index (as_a <gswitch *> (stmt));
75a70cf9 1798 else
a65c4d64 1799 /* These cannot satisfy is_gimple_min_invariant without folding. */
1800 gcc_assert (code == GIMPLE_CALL || code == GIMPLE_COND);
b7e55469 1801 is_constant = simplified && is_gimple_min_invariant (simplified);
1802 if (is_constant)
1803 {
1804 /* The statement produced a constant value. */
1805 val.lattice_val = CONSTANT;
1806 val.value = simplified;
e913b5cd 1807 val.mask = 0;
b7e55469 1808 }
75a70cf9 1809 }
4c9206f2 1810 /* If the statement result is likely UNDEFINED, make it so. */
1811 else if (likelyvalue == UNDEFINED)
1812 {
1813 val.lattice_val = UNDEFINED;
1814 val.value = NULL_TREE;
1815 val.mask = 0;
1816 return val;
1817 }
4ee9c684 1818
b7e55469 1819 /* Resort to simplification for bitwise tracking. */
1820 if (flag_tree_bit_ccp
db48deb0 1821 && (likelyvalue == CONSTANT || is_gimple_call (stmt)
1822 || (gimple_assign_single_p (stmt)
1823 && gimple_assign_rhs_code (stmt) == ADDR_EXPR))
b7e55469 1824 && !is_constant)
912f109f 1825 {
b7e55469 1826 enum gimple_code code = gimple_code (stmt);
1827 val.lattice_val = VARYING;
1828 val.value = NULL_TREE;
e913b5cd 1829 val.mask = -1;
b7e55469 1830 if (code == GIMPLE_ASSIGN)
912f109f 1831 {
b7e55469 1832 enum tree_code subcode = gimple_assign_rhs_code (stmt);
1833 tree rhs1 = gimple_assign_rhs1 (stmt);
c0a4b664 1834 tree lhs = gimple_assign_lhs (stmt);
1835 if ((INTEGRAL_TYPE_P (TREE_TYPE (lhs))
1836 || POINTER_TYPE_P (TREE_TYPE (lhs)))
1837 && (INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
1838 || POINTER_TYPE_P (TREE_TYPE (rhs1))))
1839 switch (get_gimple_rhs_class (subcode))
1840 {
1841 case GIMPLE_SINGLE_RHS:
1842 val = get_value_for_expr (rhs1, true);
1843 break;
1844
1845 case GIMPLE_UNARY_RHS:
1846 val = bit_value_unop (subcode, TREE_TYPE (lhs), rhs1);
1847 break;
1848
1849 case GIMPLE_BINARY_RHS:
1850 val = bit_value_binop (subcode, TREE_TYPE (lhs), rhs1,
1851 gimple_assign_rhs2 (stmt));
1852 break;
1853
1854 default:;
1855 }
912f109f 1856 }
b7e55469 1857 else if (code == GIMPLE_COND)
1858 {
1859 enum tree_code code = gimple_cond_code (stmt);
1860 tree rhs1 = gimple_cond_lhs (stmt);
1861 tree rhs2 = gimple_cond_rhs (stmt);
1862 if (INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
1863 || POINTER_TYPE_P (TREE_TYPE (rhs1)))
1864 val = bit_value_binop (code, TREE_TYPE (rhs1), rhs1, rhs2);
1865 }
0b4f0116 1866 else if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
153c3b50 1867 {
0b4f0116 1868 tree fndecl = gimple_call_fndecl (stmt);
153c3b50 1869 switch (DECL_FUNCTION_CODE (fndecl))
1870 {
1871 case BUILT_IN_MALLOC:
1872 case BUILT_IN_REALLOC:
1873 case BUILT_IN_CALLOC:
939514e9 1874 case BUILT_IN_STRDUP:
1875 case BUILT_IN_STRNDUP:
153c3b50 1876 val.lattice_val = CONSTANT;
1877 val.value = build_int_cst (TREE_TYPE (gimple_get_lhs (stmt)), 0);
796b6678 1878 val.mask = ~((HOST_WIDE_INT) MALLOC_ABI_ALIGNMENT
1879 / BITS_PER_UNIT - 1);
153c3b50 1880 break;
1881
1882 case BUILT_IN_ALLOCA:
581bf1c2 1883 case BUILT_IN_ALLOCA_WITH_ALIGN:
1884 align = (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_ALLOCA_WITH_ALIGN
f9ae6f95 1885 ? TREE_INT_CST_LOW (gimple_call_arg (stmt, 1))
581bf1c2 1886 : BIGGEST_ALIGNMENT);
153c3b50 1887 val.lattice_val = CONSTANT;
1888 val.value = build_int_cst (TREE_TYPE (gimple_get_lhs (stmt)), 0);
796b6678 1889 val.mask = ~((HOST_WIDE_INT) align / BITS_PER_UNIT - 1);
153c3b50 1890 break;
1891
939514e9 1892 /* These builtins return their first argument, unmodified. */
1893 case BUILT_IN_MEMCPY:
1894 case BUILT_IN_MEMMOVE:
1895 case BUILT_IN_MEMSET:
1896 case BUILT_IN_STRCPY:
1897 case BUILT_IN_STRNCPY:
1898 case BUILT_IN_MEMCPY_CHK:
1899 case BUILT_IN_MEMMOVE_CHK:
1900 case BUILT_IN_MEMSET_CHK:
1901 case BUILT_IN_STRCPY_CHK:
1902 case BUILT_IN_STRNCPY_CHK:
1903 val = get_value_for_expr (gimple_call_arg (stmt, 0), true);
1904 break;
1905
fca0886c 1906 case BUILT_IN_ASSUME_ALIGNED:
237e78b1 1907 val = bit_value_assume_aligned (stmt, NULL_TREE, val, false);
fca0886c 1908 break;
1909
060fc206 1910 case BUILT_IN_ALIGNED_ALLOC:
1911 {
1912 tree align = get_constant_value (gimple_call_arg (stmt, 0));
1913 if (align
1914 && tree_fits_uhwi_p (align))
1915 {
1916 unsigned HOST_WIDE_INT aligni = tree_to_uhwi (align);
1917 if (aligni > 1
1918 /* align must be power-of-two */
1919 && (aligni & (aligni - 1)) == 0)
1920 {
1921 val.lattice_val = CONSTANT;
1922 val.value = build_int_cst (ptr_type_node, 0);
cd89b631 1923 val.mask = -aligni;
060fc206 1924 }
1925 }
1926 break;
1927 }
1928
153c3b50 1929 default:;
1930 }
1931 }
237e78b1 1932 if (is_gimple_call (stmt) && gimple_call_lhs (stmt))
1933 {
1934 tree fntype = gimple_call_fntype (stmt);
1935 if (fntype)
1936 {
1937 tree attrs = lookup_attribute ("assume_aligned",
1938 TYPE_ATTRIBUTES (fntype));
1939 if (attrs)
1940 val = bit_value_assume_aligned (stmt, attrs, val, false);
1941 attrs = lookup_attribute ("alloc_align",
1942 TYPE_ATTRIBUTES (fntype));
1943 if (attrs)
1944 val = bit_value_assume_aligned (stmt, attrs, val, true);
1945 }
1946 }
b7e55469 1947 is_constant = (val.lattice_val == CONSTANT);
912f109f 1948 }
1949
fc08b993 1950 if (flag_tree_bit_ccp
1951 && ((is_constant && TREE_CODE (val.value) == INTEGER_CST)
4c9206f2 1952 || !is_constant)
fc08b993 1953 && gimple_get_lhs (stmt)
1954 && TREE_CODE (gimple_get_lhs (stmt)) == SSA_NAME)
1955 {
1956 tree lhs = gimple_get_lhs (stmt);
9c1be15e 1957 wide_int nonzero_bits = get_nonzero_bits (lhs);
1958 if (nonzero_bits != -1)
fc08b993 1959 {
1960 if (!is_constant)
1961 {
1962 val.lattice_val = CONSTANT;
1963 val.value = build_zero_cst (TREE_TYPE (lhs));
9a93e2f7 1964 val.mask = extend_mask (nonzero_bits, TYPE_SIGN (TREE_TYPE (lhs)));
fc08b993 1965 is_constant = true;
1966 }
1967 else
1968 {
9c1be15e 1969 if (wi::bit_and_not (val.value, nonzero_bits) != 0)
aeb682a2 1970 val.value = wide_int_to_tree (TREE_TYPE (lhs),
9c1be15e 1971 nonzero_bits & val.value);
aeb682a2 1972 if (nonzero_bits == 0)
1973 val.mask = 0;
fc08b993 1974 else
9a93e2f7 1975 val.mask = val.mask & extend_mask (nonzero_bits,
1976 TYPE_SIGN (TREE_TYPE (lhs)));
fc08b993 1977 }
1978 }
1979 }
1980
4c9206f2 1981 /* The statement produced a nonconstant value. */
b7e55469 1982 if (!is_constant)
4ee9c684 1983 {
fc6cc27b 1984 /* The statement produced a copy. */
1985 if (simplified && TREE_CODE (simplified) == SSA_NAME
1986 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (simplified))
1987 {
1988 val.lattice_val = CONSTANT;
1989 val.value = simplified;
1990 val.mask = -1;
1991 }
1992 /* The statement is VARYING. */
1993 else
1994 {
1995 val.lattice_val = VARYING;
1996 val.value = NULL_TREE;
1997 val.mask = -1;
1998 }
4ee9c684 1999 }
41511585 2000
2001 return val;
4ee9c684 2002}
2003
42acab1c 2004typedef hash_table<nofree_ptr_hash<gimple> > gimple_htab;
2b15d2ba 2005
582a80ed 2006/* Given a BUILT_IN_STACK_SAVE value SAVED_VAL, insert a clobber of VAR before
2007 each matching BUILT_IN_STACK_RESTORE. Mark visited phis in VISITED. */
2008
2009static void
2b15d2ba 2010insert_clobber_before_stack_restore (tree saved_val, tree var,
c1f445d2 2011 gimple_htab **visited)
582a80ed 2012{
42acab1c 2013 gimple *stmt;
1a91d914 2014 gassign *clobber_stmt;
582a80ed 2015 tree clobber;
2016 imm_use_iterator iter;
2017 gimple_stmt_iterator i;
42acab1c 2018 gimple **slot;
582a80ed 2019
2020 FOR_EACH_IMM_USE_STMT (stmt, iter, saved_val)
2021 if (gimple_call_builtin_p (stmt, BUILT_IN_STACK_RESTORE))
2022 {
f1f41a6c 2023 clobber = build_constructor (TREE_TYPE (var),
2024 NULL);
582a80ed 2025 TREE_THIS_VOLATILE (clobber) = 1;
2026 clobber_stmt = gimple_build_assign (var, clobber);
2027
2028 i = gsi_for_stmt (stmt);
2029 gsi_insert_before (&i, clobber_stmt, GSI_SAME_STMT);
2030 }
2031 else if (gimple_code (stmt) == GIMPLE_PHI)
2032 {
c1f445d2 2033 if (!*visited)
2034 *visited = new gimple_htab (10);
582a80ed 2035
c1f445d2 2036 slot = (*visited)->find_slot (stmt, INSERT);
582a80ed 2037 if (*slot != NULL)
2038 continue;
2039
2040 *slot = stmt;
2041 insert_clobber_before_stack_restore (gimple_phi_result (stmt), var,
2042 visited);
2043 }
42eed683 2044 else if (gimple_assign_ssa_name_copy_p (stmt))
2045 insert_clobber_before_stack_restore (gimple_assign_lhs (stmt), var,
2046 visited);
058a1b7a 2047 else if (chkp_gimple_call_builtin_p (stmt, BUILT_IN_CHKP_BNDRET))
2048 continue;
582a80ed 2049 else
2050 gcc_assert (is_gimple_debug (stmt));
2051}
2052
2053/* Advance the iterator to the previous non-debug gimple statement in the same
2054 or dominating basic block. */
2055
2056static inline void
2057gsi_prev_dom_bb_nondebug (gimple_stmt_iterator *i)
2058{
2059 basic_block dom;
2060
2061 gsi_prev_nondebug (i);
2062 while (gsi_end_p (*i))
2063 {
2064 dom = get_immediate_dominator (CDI_DOMINATORS, i->bb);
34154e27 2065 if (dom == NULL || dom == ENTRY_BLOCK_PTR_FOR_FN (cfun))
582a80ed 2066 return;
2067
2068 *i = gsi_last_bb (dom);
2069 }
2070}
2071
2072/* Find a BUILT_IN_STACK_SAVE dominating gsi_stmt (I), and insert
1543f720 2073 a clobber of VAR before each matching BUILT_IN_STACK_RESTORE.
2074
2075 It is possible that BUILT_IN_STACK_SAVE cannot be find in a dominator when a
2076 previous pass (such as DOM) duplicated it along multiple paths to a BB. In
2077 that case the function gives up without inserting the clobbers. */
582a80ed 2078
2079static void
2080insert_clobbers_for_var (gimple_stmt_iterator i, tree var)
2081{
42acab1c 2082 gimple *stmt;
582a80ed 2083 tree saved_val;
c1f445d2 2084 gimple_htab *visited = NULL;
582a80ed 2085
1543f720 2086 for (; !gsi_end_p (i); gsi_prev_dom_bb_nondebug (&i))
582a80ed 2087 {
2088 stmt = gsi_stmt (i);
2089
2090 if (!gimple_call_builtin_p (stmt, BUILT_IN_STACK_SAVE))
2091 continue;
582a80ed 2092
2093 saved_val = gimple_call_lhs (stmt);
2094 if (saved_val == NULL_TREE)
2095 continue;
2096
2097 insert_clobber_before_stack_restore (saved_val, var, &visited);
2098 break;
2099 }
2100
c1f445d2 2101 delete visited;
582a80ed 2102}
2103
581bf1c2 2104/* Detects a __builtin_alloca_with_align with constant size argument. Declares
2105 fixed-size array and returns the address, if found, otherwise returns
2106 NULL_TREE. */
9a65cc0a 2107
2108static tree
42acab1c 2109fold_builtin_alloca_with_align (gimple *stmt)
9a65cc0a 2110{
2111 unsigned HOST_WIDE_INT size, threshold, n_elem;
2112 tree lhs, arg, block, var, elem_type, array_type;
9a65cc0a 2113
2114 /* Get lhs. */
2115 lhs = gimple_call_lhs (stmt);
2116 if (lhs == NULL_TREE)
2117 return NULL_TREE;
2118
2119 /* Detect constant argument. */
2120 arg = get_constant_value (gimple_call_arg (stmt, 0));
6e93d308 2121 if (arg == NULL_TREE
2122 || TREE_CODE (arg) != INTEGER_CST
e913b5cd 2123 || !tree_fits_uhwi_p (arg))
9a65cc0a 2124 return NULL_TREE;
6e93d308 2125
8c53c46c 2126 size = tree_to_uhwi (arg);
9a65cc0a 2127
581bf1c2 2128 /* Heuristic: don't fold large allocas. */
9a65cc0a 2129 threshold = (unsigned HOST_WIDE_INT)PARAM_VALUE (PARAM_LARGE_STACK_FRAME);
581bf1c2 2130 /* In case the alloca is located at function entry, it has the same lifetime
2131 as a declared array, so we allow a larger size. */
9a65cc0a 2132 block = gimple_block (stmt);
2133 if (!(cfun->after_inlining
14df71e4 2134 && block
9a65cc0a 2135 && TREE_CODE (BLOCK_SUPERCONTEXT (block)) == FUNCTION_DECL))
2136 threshold /= 10;
2137 if (size > threshold)
2138 return NULL_TREE;
2139
2140 /* Declare array. */
2141 elem_type = build_nonstandard_integer_type (BITS_PER_UNIT, 1);
2142 n_elem = size * 8 / BITS_PER_UNIT;
9a65cc0a 2143 array_type = build_array_type_nelts (elem_type, n_elem);
f9e245b2 2144 var = create_tmp_var (array_type);
5d4b30ea 2145 SET_DECL_ALIGN (var, TREE_INT_CST_LOW (gimple_call_arg (stmt, 1)));
3d4a0a4b 2146 {
2147 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (lhs);
2148 if (pi != NULL && !pi->pt.anything)
2149 {
2150 bool singleton_p;
2151 unsigned uid;
5b48910f 2152 singleton_p = pt_solution_singleton_or_null_p (&pi->pt, &uid);
3d4a0a4b 2153 gcc_assert (singleton_p);
2154 SET_DECL_PT_UID (var, uid);
2155 }
2156 }
9a65cc0a 2157
2158 /* Fold alloca to the address of the array. */
2159 return fold_convert (TREE_TYPE (lhs), build_fold_addr_expr (var));
2160}
2161
6688f8ec 2162/* Fold the stmt at *GSI with CCP specific information that propagating
2163 and regular folding does not catch. */
2164
2165static bool
2166ccp_fold_stmt (gimple_stmt_iterator *gsi)
2167{
42acab1c 2168 gimple *stmt = gsi_stmt (*gsi);
6688f8ec 2169
94144e68 2170 switch (gimple_code (stmt))
2171 {
2172 case GIMPLE_COND:
2173 {
1a91d914 2174 gcond *cond_stmt = as_a <gcond *> (stmt);
9908fe4d 2175 ccp_prop_value_t val;
94144e68 2176 /* Statement evaluation will handle type mismatches in constants
2177 more gracefully than the final propagation. This allows us to
2178 fold more conditionals here. */
2179 val = evaluate_stmt (stmt);
2180 if (val.lattice_val != CONSTANT
796b6678 2181 || val.mask != 0)
94144e68 2182 return false;
2183
b7e55469 2184 if (dump_file)
2185 {
2186 fprintf (dump_file, "Folding predicate ");
2187 print_gimple_expr (dump_file, stmt, 0, 0);
2188 fprintf (dump_file, " to ");
2189 print_generic_expr (dump_file, val.value, 0);
2190 fprintf (dump_file, "\n");
2191 }
2192
94144e68 2193 if (integer_zerop (val.value))
1a91d914 2194 gimple_cond_make_false (cond_stmt);
94144e68 2195 else
1a91d914 2196 gimple_cond_make_true (cond_stmt);
6688f8ec 2197
94144e68 2198 return true;
2199 }
6688f8ec 2200
94144e68 2201 case GIMPLE_CALL:
2202 {
2203 tree lhs = gimple_call_lhs (stmt);
3064bb7b 2204 int flags = gimple_call_flags (stmt);
15d138c9 2205 tree val;
94144e68 2206 tree argt;
2207 bool changed = false;
2208 unsigned i;
2209
2210 /* If the call was folded into a constant make sure it goes
2211 away even if we cannot propagate into all uses because of
2212 type issues. */
2213 if (lhs
2214 && TREE_CODE (lhs) == SSA_NAME
3064bb7b 2215 && (val = get_constant_value (lhs))
2216 /* Don't optimize away calls that have side-effects. */
2217 && (flags & (ECF_CONST|ECF_PURE)) != 0
2218 && (flags & ECF_LOOPING_CONST_OR_PURE) == 0)
94144e68 2219 {
15d138c9 2220 tree new_rhs = unshare_expr (val);
338cce8f 2221 bool res;
94144e68 2222 if (!useless_type_conversion_p (TREE_TYPE (lhs),
2223 TREE_TYPE (new_rhs)))
2224 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
338cce8f 2225 res = update_call_from_tree (gsi, new_rhs);
2226 gcc_assert (res);
94144e68 2227 return true;
2228 }
2229
fb049fba 2230 /* Internal calls provide no argument types, so the extra laxity
2231 for normal calls does not apply. */
2232 if (gimple_call_internal_p (stmt))
2233 return false;
2234
581bf1c2 2235 /* The heuristic of fold_builtin_alloca_with_align differs before and
2236 after inlining, so we don't require the arg to be changed into a
2237 constant for folding, but just to be constant. */
2238 if (gimple_call_builtin_p (stmt, BUILT_IN_ALLOCA_WITH_ALIGN))
9a65cc0a 2239 {
581bf1c2 2240 tree new_rhs = fold_builtin_alloca_with_align (stmt);
6e93d308 2241 if (new_rhs)
2242 {
2243 bool res = update_call_from_tree (gsi, new_rhs);
582a80ed 2244 tree var = TREE_OPERAND (TREE_OPERAND (new_rhs, 0),0);
6e93d308 2245 gcc_assert (res);
582a80ed 2246 insert_clobbers_for_var (*gsi, var);
6e93d308 2247 return true;
2248 }
9a65cc0a 2249 }
2250
94144e68 2251 /* Propagate into the call arguments. Compared to replace_uses_in
2252 this can use the argument slot types for type verification
2253 instead of the current argument type. We also can safely
2254 drop qualifiers here as we are dealing with constants anyway. */
2de00a2d 2255 argt = TYPE_ARG_TYPES (gimple_call_fntype (stmt));
94144e68 2256 for (i = 0; i < gimple_call_num_args (stmt) && argt;
2257 ++i, argt = TREE_CHAIN (argt))
2258 {
2259 tree arg = gimple_call_arg (stmt, i);
2260 if (TREE_CODE (arg) == SSA_NAME
15d138c9 2261 && (val = get_constant_value (arg))
94144e68 2262 && useless_type_conversion_p
2263 (TYPE_MAIN_VARIANT (TREE_VALUE (argt)),
15d138c9 2264 TYPE_MAIN_VARIANT (TREE_TYPE (val))))
94144e68 2265 {
15d138c9 2266 gimple_call_set_arg (stmt, i, unshare_expr (val));
94144e68 2267 changed = true;
2268 }
2269 }
e16f4c39 2270
94144e68 2271 return changed;
2272 }
6688f8ec 2273
6872bf3c 2274 case GIMPLE_ASSIGN:
2275 {
2276 tree lhs = gimple_assign_lhs (stmt);
15d138c9 2277 tree val;
6872bf3c 2278
2279 /* If we have a load that turned out to be constant replace it
2280 as we cannot propagate into all uses in all cases. */
2281 if (gimple_assign_single_p (stmt)
2282 && TREE_CODE (lhs) == SSA_NAME
15d138c9 2283 && (val = get_constant_value (lhs)))
6872bf3c 2284 {
15d138c9 2285 tree rhs = unshare_expr (val);
6872bf3c 2286 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs)))
182cf5a9 2287 rhs = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (lhs), rhs);
6872bf3c 2288 gimple_assign_set_rhs_from_tree (gsi, rhs);
2289 return true;
2290 }
2291
2292 return false;
2293 }
2294
94144e68 2295 default:
2296 return false;
2297 }
6688f8ec 2298}
2299
41511585 2300/* Visit the assignment statement STMT. Set the value of its LHS to the
88dbf20f 2301 value computed by the RHS and store LHS in *OUTPUT_P. If STMT
2302 creates virtual definitions, set the value of each new name to that
75a70cf9 2303 of the RHS (if we can derive a constant out of the RHS).
2304 Value-returning call statements also perform an assignment, and
2305 are handled here. */
4ee9c684 2306
41511585 2307static enum ssa_prop_result
42acab1c 2308visit_assignment (gimple *stmt, tree *output_p)
4ee9c684 2309{
9908fe4d 2310 ccp_prop_value_t val;
fc6cc27b 2311 enum ssa_prop_result retval = SSA_PROP_NOT_INTERESTING;
4ee9c684 2312
75a70cf9 2313 tree lhs = gimple_get_lhs (stmt);
88dbf20f 2314 if (TREE_CODE (lhs) == SSA_NAME)
4ee9c684 2315 {
fc6cc27b 2316 /* Evaluate the statement, which could be
2317 either a GIMPLE_ASSIGN or a GIMPLE_CALL. */
2318 val = evaluate_stmt (stmt);
2319
88dbf20f 2320 /* If STMT is an assignment to an SSA_NAME, we only have one
2321 value to set. */
27de3d43 2322 if (set_lattice_value (lhs, &val))
88dbf20f 2323 {
2324 *output_p = lhs;
2325 if (val.lattice_val == VARYING)
2326 retval = SSA_PROP_VARYING;
2327 else
2328 retval = SSA_PROP_INTERESTING;
2329 }
4ee9c684 2330 }
88dbf20f 2331
2332 return retval;
4ee9c684 2333}
2334
4ee9c684 2335
41511585 2336/* Visit the conditional statement STMT. Return SSA_PROP_INTERESTING
2337 if it can determine which edge will be taken. Otherwise, return
2338 SSA_PROP_VARYING. */
2339
2340static enum ssa_prop_result
42acab1c 2341visit_cond_stmt (gimple *stmt, edge *taken_edge_p)
4ee9c684 2342{
9908fe4d 2343 ccp_prop_value_t val;
41511585 2344 basic_block block;
2345
75a70cf9 2346 block = gimple_bb (stmt);
41511585 2347 val = evaluate_stmt (stmt);
b7e55469 2348 if (val.lattice_val != CONSTANT
796b6678 2349 || val.mask != 0)
b7e55469 2350 return SSA_PROP_VARYING;
41511585 2351
2352 /* Find which edge out of the conditional block will be taken and add it
2353 to the worklist. If no single edge can be determined statically,
2354 return SSA_PROP_VARYING to feed all the outgoing edges to the
2355 propagation engine. */
b7e55469 2356 *taken_edge_p = find_taken_edge (block, val.value);
41511585 2357 if (*taken_edge_p)
2358 return SSA_PROP_INTERESTING;
2359 else
2360 return SSA_PROP_VARYING;
4ee9c684 2361}
2362
4ee9c684 2363
41511585 2364/* Evaluate statement STMT. If the statement produces an output value and
2365 its evaluation changes the lattice value of its output, return
2366 SSA_PROP_INTERESTING and set *OUTPUT_P to the SSA_NAME holding the
2367 output value.
48e1416a 2368
41511585 2369 If STMT is a conditional branch and we can determine its truth
2370 value, set *TAKEN_EDGE_P accordingly. If STMT produces a varying
2371 value, return SSA_PROP_VARYING. */
4ee9c684 2372
41511585 2373static enum ssa_prop_result
42acab1c 2374ccp_visit_stmt (gimple *stmt, edge *taken_edge_p, tree *output_p)
41511585 2375{
41511585 2376 tree def;
2377 ssa_op_iter iter;
4ee9c684 2378
41511585 2379 if (dump_file && (dump_flags & TDF_DETAILS))
4ee9c684 2380 {
88dbf20f 2381 fprintf (dump_file, "\nVisiting statement:\n");
75a70cf9 2382 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
4ee9c684 2383 }
4ee9c684 2384
75a70cf9 2385 switch (gimple_code (stmt))
4ee9c684 2386 {
75a70cf9 2387 case GIMPLE_ASSIGN:
2388 /* If the statement is an assignment that produces a single
2389 output value, evaluate its RHS to see if the lattice value of
2390 its output has changed. */
2391 return visit_assignment (stmt, output_p);
2392
2393 case GIMPLE_CALL:
2394 /* A value-returning call also performs an assignment. */
2395 if (gimple_call_lhs (stmt) != NULL_TREE)
2396 return visit_assignment (stmt, output_p);
2397 break;
2398
2399 case GIMPLE_COND:
2400 case GIMPLE_SWITCH:
2401 /* If STMT is a conditional branch, see if we can determine
2402 which branch will be taken. */
2403 /* FIXME. It appears that we should be able to optimize
2404 computed GOTOs here as well. */
2405 return visit_cond_stmt (stmt, taken_edge_p);
2406
2407 default:
2408 break;
4ee9c684 2409 }
4ee9c684 2410
41511585 2411 /* Any other kind of statement is not interesting for constant
2412 propagation and, therefore, not worth simulating. */
41511585 2413 if (dump_file && (dump_flags & TDF_DETAILS))
2414 fprintf (dump_file, "No interesting values produced. Marked VARYING.\n");
4ee9c684 2415
41511585 2416 /* Definitions made by statements other than assignments to
2417 SSA_NAMEs represent unknown modifications to their outputs.
2418 Mark them VARYING. */
88dbf20f 2419 FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_ALL_DEFS)
27de3d43 2420 set_value_varying (def);
4ee9c684 2421
41511585 2422 return SSA_PROP_VARYING;
2423}
4ee9c684 2424
4ee9c684 2425
d0322b7d 2426/* Main entry point for SSA Conditional Constant Propagation. If NONZERO_P,
2427 record nonzero bits. */
41511585 2428
33a34f1e 2429static unsigned int
d0322b7d 2430do_ssa_ccp (bool nonzero_p)
41511585 2431{
582a80ed 2432 unsigned int todo = 0;
2433 calculate_dominance_info (CDI_DOMINATORS);
e2588447 2434
41511585 2435 ccp_initialize ();
2436 ssa_propagate (ccp_visit_stmt, ccp_visit_phi_node);
a54071b2 2437 if (ccp_finalize (nonzero_p || flag_ipa_bit_cp))
e2588447 2438 {
2439 todo = (TODO_cleanup_cfg | TODO_update_ssa);
2440
2441 /* ccp_finalize does not preserve loop-closed ssa. */
2442 loops_state_clear (LOOP_CLOSED_SSA);
2443 }
2444
582a80ed 2445 free_dominance_info (CDI_DOMINATORS);
2446 return todo;
4ee9c684 2447}
2448
5664499b 2449
cbe8bda8 2450namespace {
2451
2452const pass_data pass_data_ccp =
41511585 2453{
cbe8bda8 2454 GIMPLE_PASS, /* type */
2455 "ccp", /* name */
2456 OPTGROUP_NONE, /* optinfo_flags */
cbe8bda8 2457 TV_TREE_CCP, /* tv_id */
2458 ( PROP_cfg | PROP_ssa ), /* properties_required */
2459 0, /* properties_provided */
2460 0, /* properties_destroyed */
2461 0, /* todo_flags_start */
8b88439e 2462 TODO_update_address_taken, /* todo_flags_finish */
41511585 2463};
4ee9c684 2464
cbe8bda8 2465class pass_ccp : public gimple_opt_pass
2466{
2467public:
9af5ce0c 2468 pass_ccp (gcc::context *ctxt)
d0322b7d 2469 : gimple_opt_pass (pass_data_ccp, ctxt), nonzero_p (false)
cbe8bda8 2470 {}
2471
2472 /* opt_pass methods: */
ae84f584 2473 opt_pass * clone () { return new pass_ccp (m_ctxt); }
d0322b7d 2474 void set_pass_param (unsigned int n, bool param)
2475 {
2476 gcc_assert (n == 0);
2477 nonzero_p = param;
2478 }
31315c24 2479 virtual bool gate (function *) { return flag_tree_ccp != 0; }
d0322b7d 2480 virtual unsigned int execute (function *) { return do_ssa_ccp (nonzero_p); }
cbe8bda8 2481
d0322b7d 2482 private:
2483 /* Determines whether the pass instance records nonzero bits. */
2484 bool nonzero_p;
cbe8bda8 2485}; // class pass_ccp
2486
2487} // anon namespace
2488
2489gimple_opt_pass *
2490make_pass_ccp (gcc::context *ctxt)
2491{
2492 return new pass_ccp (ctxt);
2493}
2494
4ee9c684 2495
75a70cf9 2496
bdd0e199 2497/* Try to optimize out __builtin_stack_restore. Optimize it out
2498 if there is another __builtin_stack_restore in the same basic
2499 block and no calls or ASM_EXPRs are in between, or if this block's
2500 only outgoing edge is to EXIT_BLOCK and there are no calls or
2501 ASM_EXPRs after this __builtin_stack_restore. */
2502
2503static tree
75a70cf9 2504optimize_stack_restore (gimple_stmt_iterator i)
bdd0e199 2505{
6ea999da 2506 tree callee;
42acab1c 2507 gimple *stmt;
75a70cf9 2508
2509 basic_block bb = gsi_bb (i);
42acab1c 2510 gimple *call = gsi_stmt (i);
bdd0e199 2511
75a70cf9 2512 if (gimple_code (call) != GIMPLE_CALL
2513 || gimple_call_num_args (call) != 1
2514 || TREE_CODE (gimple_call_arg (call, 0)) != SSA_NAME
2515 || !POINTER_TYPE_P (TREE_TYPE (gimple_call_arg (call, 0))))
bdd0e199 2516 return NULL_TREE;
2517
75a70cf9 2518 for (gsi_next (&i); !gsi_end_p (i); gsi_next (&i))
bdd0e199 2519 {
75a70cf9 2520 stmt = gsi_stmt (i);
2521 if (gimple_code (stmt) == GIMPLE_ASM)
bdd0e199 2522 return NULL_TREE;
75a70cf9 2523 if (gimple_code (stmt) != GIMPLE_CALL)
bdd0e199 2524 continue;
2525
75a70cf9 2526 callee = gimple_call_fndecl (stmt);
c40a6f90 2527 if (!callee
2528 || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL
2529 /* All regular builtins are ok, just obviously not alloca. */
581bf1c2 2530 || DECL_FUNCTION_CODE (callee) == BUILT_IN_ALLOCA
2531 || DECL_FUNCTION_CODE (callee) == BUILT_IN_ALLOCA_WITH_ALIGN)
bdd0e199 2532 return NULL_TREE;
2533
2534 if (DECL_FUNCTION_CODE (callee) == BUILT_IN_STACK_RESTORE)
6ea999da 2535 goto second_stack_restore;
bdd0e199 2536 }
2537
6ea999da 2538 if (!gsi_end_p (i))
bdd0e199 2539 return NULL_TREE;
2540
6ea999da 2541 /* Allow one successor of the exit block, or zero successors. */
2542 switch (EDGE_COUNT (bb->succs))
2543 {
2544 case 0:
2545 break;
2546 case 1:
34154e27 2547 if (single_succ_edge (bb)->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
6ea999da 2548 return NULL_TREE;
2549 break;
2550 default:
2551 return NULL_TREE;
2552 }
2553 second_stack_restore:
bdd0e199 2554
6ea999da 2555 /* If there's exactly one use, then zap the call to __builtin_stack_save.
2556 If there are multiple uses, then the last one should remove the call.
2557 In any case, whether the call to __builtin_stack_save can be removed
2558 or not is irrelevant to removing the call to __builtin_stack_restore. */
2559 if (has_single_use (gimple_call_arg (call, 0)))
2560 {
42acab1c 2561 gimple *stack_save = SSA_NAME_DEF_STMT (gimple_call_arg (call, 0));
6ea999da 2562 if (is_gimple_call (stack_save))
2563 {
2564 callee = gimple_call_fndecl (stack_save);
2565 if (callee
2566 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL
2567 && DECL_FUNCTION_CODE (callee) == BUILT_IN_STACK_SAVE)
2568 {
2569 gimple_stmt_iterator stack_save_gsi;
2570 tree rhs;
bdd0e199 2571
6ea999da 2572 stack_save_gsi = gsi_for_stmt (stack_save);
2573 rhs = build_int_cst (TREE_TYPE (gimple_call_arg (call, 0)), 0);
2574 update_call_from_tree (&stack_save_gsi, rhs);
2575 }
2576 }
2577 }
bdd0e199 2578
75a70cf9 2579 /* No effect, so the statement will be deleted. */
bdd0e199 2580 return integer_zero_node;
2581}
75a70cf9 2582
8a58ed0a 2583/* If va_list type is a simple pointer and nothing special is needed,
2584 optimize __builtin_va_start (&ap, 0) into ap = __builtin_next_arg (0),
2585 __builtin_va_end (&ap) out as NOP and __builtin_va_copy into a simple
2586 pointer assignment. */
2587
2588static tree
42acab1c 2589optimize_stdarg_builtin (gimple *call)
8a58ed0a 2590{
5f57a8b1 2591 tree callee, lhs, rhs, cfun_va_list;
8a58ed0a 2592 bool va_list_simple_ptr;
389dd41b 2593 location_t loc = gimple_location (call);
8a58ed0a 2594
75a70cf9 2595 if (gimple_code (call) != GIMPLE_CALL)
8a58ed0a 2596 return NULL_TREE;
2597
75a70cf9 2598 callee = gimple_call_fndecl (call);
5f57a8b1 2599
2600 cfun_va_list = targetm.fn_abi_va_list (callee);
2601 va_list_simple_ptr = POINTER_TYPE_P (cfun_va_list)
2602 && (TREE_TYPE (cfun_va_list) == void_type_node
2603 || TREE_TYPE (cfun_va_list) == char_type_node);
2604
8a58ed0a 2605 switch (DECL_FUNCTION_CODE (callee))
2606 {
2607 case BUILT_IN_VA_START:
2608 if (!va_list_simple_ptr
2609 || targetm.expand_builtin_va_start != NULL
e7ed5dd7 2610 || !builtin_decl_explicit_p (BUILT_IN_NEXT_ARG))
8a58ed0a 2611 return NULL_TREE;
2612
75a70cf9 2613 if (gimple_call_num_args (call) != 2)
8a58ed0a 2614 return NULL_TREE;
2615
75a70cf9 2616 lhs = gimple_call_arg (call, 0);
8a58ed0a 2617 if (!POINTER_TYPE_P (TREE_TYPE (lhs))
2618 || TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (lhs)))
5f57a8b1 2619 != TYPE_MAIN_VARIANT (cfun_va_list))
8a58ed0a 2620 return NULL_TREE;
48e1416a 2621
389dd41b 2622 lhs = build_fold_indirect_ref_loc (loc, lhs);
b9a16870 2623 rhs = build_call_expr_loc (loc, builtin_decl_explicit (BUILT_IN_NEXT_ARG),
75a70cf9 2624 1, integer_zero_node);
389dd41b 2625 rhs = fold_convert_loc (loc, TREE_TYPE (lhs), rhs);
8a58ed0a 2626 return build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, rhs);
2627
2628 case BUILT_IN_VA_COPY:
2629 if (!va_list_simple_ptr)
2630 return NULL_TREE;
2631
75a70cf9 2632 if (gimple_call_num_args (call) != 2)
8a58ed0a 2633 return NULL_TREE;
2634
75a70cf9 2635 lhs = gimple_call_arg (call, 0);
8a58ed0a 2636 if (!POINTER_TYPE_P (TREE_TYPE (lhs))
2637 || TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (lhs)))
5f57a8b1 2638 != TYPE_MAIN_VARIANT (cfun_va_list))
8a58ed0a 2639 return NULL_TREE;
2640
389dd41b 2641 lhs = build_fold_indirect_ref_loc (loc, lhs);
75a70cf9 2642 rhs = gimple_call_arg (call, 1);
8a58ed0a 2643 if (TYPE_MAIN_VARIANT (TREE_TYPE (rhs))
5f57a8b1 2644 != TYPE_MAIN_VARIANT (cfun_va_list))
8a58ed0a 2645 return NULL_TREE;
2646
389dd41b 2647 rhs = fold_convert_loc (loc, TREE_TYPE (lhs), rhs);
8a58ed0a 2648 return build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, rhs);
2649
2650 case BUILT_IN_VA_END:
75a70cf9 2651 /* No effect, so the statement will be deleted. */
8a58ed0a 2652 return integer_zero_node;
2653
2654 default:
2655 gcc_unreachable ();
2656 }
2657}
75a70cf9 2658
f87df69a 2659/* Attemp to make the block of __builtin_unreachable I unreachable by changing
2660 the incoming jumps. Return true if at least one jump was changed. */
2661
2662static bool
2663optimize_unreachable (gimple_stmt_iterator i)
2664{
2665 basic_block bb = gsi_bb (i);
2666 gimple_stmt_iterator gsi;
42acab1c 2667 gimple *stmt;
f87df69a 2668 edge_iterator ei;
2669 edge e;
2670 bool ret;
2671
f6b540af 2672 if (flag_sanitize & SANITIZE_UNREACHABLE)
2673 return false;
2674
f87df69a 2675 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2676 {
2677 stmt = gsi_stmt (gsi);
2678
2679 if (is_gimple_debug (stmt))
2680 continue;
2681
1a91d914 2682 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
f87df69a 2683 {
2684 /* Verify we do not need to preserve the label. */
1a91d914 2685 if (FORCED_LABEL (gimple_label_label (label_stmt)))
f87df69a 2686 return false;
2687
2688 continue;
2689 }
2690
2691 /* Only handle the case that __builtin_unreachable is the first statement
2692 in the block. We rely on DCE to remove stmts without side-effects
2693 before __builtin_unreachable. */
2694 if (gsi_stmt (gsi) != gsi_stmt (i))
2695 return false;
2696 }
2697
2698 ret = false;
2699 FOR_EACH_EDGE (e, ei, bb->preds)
2700 {
2701 gsi = gsi_last_bb (e->src);
522f73a1 2702 if (gsi_end_p (gsi))
2703 continue;
f87df69a 2704
522f73a1 2705 stmt = gsi_stmt (gsi);
1a91d914 2706 if (gcond *cond_stmt = dyn_cast <gcond *> (stmt))
f87df69a 2707 {
2708 if (e->flags & EDGE_TRUE_VALUE)
1a91d914 2709 gimple_cond_make_false (cond_stmt);
f87df69a 2710 else if (e->flags & EDGE_FALSE_VALUE)
1a91d914 2711 gimple_cond_make_true (cond_stmt);
f87df69a 2712 else
2713 gcc_unreachable ();
1a91d914 2714 update_stmt (cond_stmt);
f87df69a 2715 }
2716 else
2717 {
2718 /* Todo: handle other cases, f.i. switch statement. */
2719 continue;
2720 }
2721
2722 ret = true;
2723 }
2724
2725 return ret;
2726}
2727
9c1a31e4 2728/* Optimize
2729 mask_2 = 1 << cnt_1;
2730 _4 = __atomic_fetch_or_* (ptr_6, mask_2, _3);
2731 _5 = _4 & mask_2;
2732 to
2733 _4 = ATOMIC_BIT_TEST_AND_SET (ptr_6, cnt_1, 0, _3);
2734 _5 = _4;
2735 If _5 is only used in _5 != 0 or _5 == 0 comparisons, 1
2736 is passed instead of 0, and the builtin just returns a zero
2737 or 1 value instead of the actual bit.
2738 Similarly for __sync_fetch_and_or_* (without the ", _3" part
2739 in there), and/or if mask_2 is a power of 2 constant.
2740 Similarly for xor instead of or, use ATOMIC_BIT_TEST_AND_COMPLEMENT
2741 in that case. And similarly for and instead of or, except that
2742 the second argument to the builtin needs to be one's complement
2743 of the mask instead of mask. */
2744
2745static void
2746optimize_atomic_bit_test_and (gimple_stmt_iterator *gsip,
2747 enum internal_fn fn, bool has_model_arg,
2748 bool after)
2749{
2750 gimple *call = gsi_stmt (*gsip);
2751 tree lhs = gimple_call_lhs (call);
2752 use_operand_p use_p;
2753 gimple *use_stmt;
2754 tree mask, bit;
2755 optab optab;
2756
2757 if (!flag_inline_atomics
2758 || optimize_debug
2759 || !gimple_call_builtin_p (call, BUILT_IN_NORMAL)
2760 || !lhs
2761 || SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs)
2762 || !single_imm_use (lhs, &use_p, &use_stmt)
2763 || !is_gimple_assign (use_stmt)
2764 || gimple_assign_rhs_code (use_stmt) != BIT_AND_EXPR
2765 || !gimple_vdef (call))
2766 return;
2767
2768 switch (fn)
2769 {
2770 case IFN_ATOMIC_BIT_TEST_AND_SET:
2771 optab = atomic_bit_test_and_set_optab;
2772 break;
2773 case IFN_ATOMIC_BIT_TEST_AND_COMPLEMENT:
2774 optab = atomic_bit_test_and_complement_optab;
2775 break;
2776 case IFN_ATOMIC_BIT_TEST_AND_RESET:
2777 optab = atomic_bit_test_and_reset_optab;
2778 break;
2779 default:
2780 return;
2781 }
2782
2783 if (optab_handler (optab, TYPE_MODE (TREE_TYPE (lhs))) == CODE_FOR_nothing)
2784 return;
2785
2786 mask = gimple_call_arg (call, 1);
2787 tree use_lhs = gimple_assign_lhs (use_stmt);
2788 if (!use_lhs)
2789 return;
2790
2791 if (TREE_CODE (mask) == INTEGER_CST)
2792 {
2793 if (fn == IFN_ATOMIC_BIT_TEST_AND_RESET)
2794 mask = const_unop (BIT_NOT_EXPR, TREE_TYPE (mask), mask);
2795 mask = fold_convert (TREE_TYPE (lhs), mask);
2796 int ibit = tree_log2 (mask);
2797 if (ibit < 0)
2798 return;
2799 bit = build_int_cst (TREE_TYPE (lhs), ibit);
2800 }
2801 else if (TREE_CODE (mask) == SSA_NAME)
2802 {
2803 gimple *g = SSA_NAME_DEF_STMT (mask);
2804 if (fn == IFN_ATOMIC_BIT_TEST_AND_RESET)
2805 {
2806 if (!is_gimple_assign (g)
2807 || gimple_assign_rhs_code (g) != BIT_NOT_EXPR)
2808 return;
2809 mask = gimple_assign_rhs1 (g);
2810 if (TREE_CODE (mask) != SSA_NAME)
2811 return;
2812 g = SSA_NAME_DEF_STMT (mask);
2813 }
2814 if (!is_gimple_assign (g)
2815 || gimple_assign_rhs_code (g) != LSHIFT_EXPR
2816 || !integer_onep (gimple_assign_rhs1 (g)))
2817 return;
2818 bit = gimple_assign_rhs2 (g);
2819 }
2820 else
2821 return;
2822
2823 if (gimple_assign_rhs1 (use_stmt) == lhs)
2824 {
2825 if (!operand_equal_p (gimple_assign_rhs2 (use_stmt), mask, 0))
2826 return;
2827 }
2828 else if (gimple_assign_rhs2 (use_stmt) != lhs
2829 || !operand_equal_p (gimple_assign_rhs1 (use_stmt), mask, 0))
2830 return;
2831
2832 bool use_bool = true;
2833 bool has_debug_uses = false;
2834 imm_use_iterator iter;
2835 gimple *g;
2836
2837 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (use_lhs))
2838 use_bool = false;
2839 FOR_EACH_IMM_USE_STMT (g, iter, use_lhs)
2840 {
2841 enum tree_code code = ERROR_MARK;
7b645785 2842 tree op0 = NULL_TREE, op1 = NULL_TREE;
9c1a31e4 2843 if (is_gimple_debug (g))
2844 {
2845 has_debug_uses = true;
2846 continue;
2847 }
2848 else if (is_gimple_assign (g))
2849 switch (gimple_assign_rhs_code (g))
2850 {
2851 case COND_EXPR:
2852 op1 = gimple_assign_rhs1 (g);
2853 code = TREE_CODE (op1);
2854 op0 = TREE_OPERAND (op1, 0);
2855 op1 = TREE_OPERAND (op1, 1);
2856 break;
2857 case EQ_EXPR:
2858 case NE_EXPR:
2859 code = gimple_assign_rhs_code (g);
2860 op0 = gimple_assign_rhs1 (g);
2861 op1 = gimple_assign_rhs2 (g);
2862 break;
2863 default:
2864 break;
2865 }
2866 else if (gimple_code (g) == GIMPLE_COND)
2867 {
2868 code = gimple_cond_code (g);
2869 op0 = gimple_cond_lhs (g);
2870 op1 = gimple_cond_rhs (g);
2871 }
2872
2873 if ((code == EQ_EXPR || code == NE_EXPR)
2874 && op0 == use_lhs
2875 && integer_zerop (op1))
2876 {
2877 use_operand_p use_p;
2878 int n = 0;
2879 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
2880 n++;
2881 if (n == 1)
2882 continue;
2883 }
2884
2885 use_bool = false;
2886 BREAK_FROM_IMM_USE_STMT (iter);
2887 }
2888
2889 tree new_lhs = make_ssa_name (TREE_TYPE (lhs));
2890 tree flag = build_int_cst (TREE_TYPE (lhs), use_bool);
2891 if (has_model_arg)
2892 g = gimple_build_call_internal (fn, 4, gimple_call_arg (call, 0),
2893 bit, flag, gimple_call_arg (call, 2));
2894 else
2895 g = gimple_build_call_internal (fn, 3, gimple_call_arg (call, 0),
2896 bit, flag);
2897 gimple_call_set_lhs (g, new_lhs);
2898 gimple_set_location (g, gimple_location (call));
2899 gimple_set_vuse (g, gimple_vuse (call));
2900 gimple_set_vdef (g, gimple_vdef (call));
c35e53b1 2901 bool throws = stmt_can_throw_internal (call);
2902 gimple_call_set_nothrow (as_a <gcall *> (g),
2903 gimple_call_nothrow_p (as_a <gcall *> (call)));
9c1a31e4 2904 SSA_NAME_DEF_STMT (gimple_vdef (call)) = g;
2905 gimple_stmt_iterator gsi = *gsip;
2906 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
c35e53b1 2907 edge e = NULL;
2908 if (throws)
2909 {
2910 maybe_clean_or_replace_eh_stmt (call, g);
2911 if (after || (use_bool && has_debug_uses))
2912 e = find_fallthru_edge (gsi_bb (gsi)->succs);
2913 }
9c1a31e4 2914 if (after)
2915 {
2916 /* The internal function returns the value of the specified bit
2917 before the atomic operation. If we are interested in the value
2918 of the specified bit after the atomic operation (makes only sense
2919 for xor, otherwise the bit content is compile time known),
2920 we need to invert the bit. */
2921 g = gimple_build_assign (make_ssa_name (TREE_TYPE (lhs)),
2922 BIT_XOR_EXPR, new_lhs,
2923 use_bool ? build_int_cst (TREE_TYPE (lhs), 1)
2924 : mask);
2925 new_lhs = gimple_assign_lhs (g);
c35e53b1 2926 if (throws)
2927 {
2928 gsi_insert_on_edge_immediate (e, g);
2929 gsi = gsi_for_stmt (g);
2930 }
2931 else
2932 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
9c1a31e4 2933 }
2934 if (use_bool && has_debug_uses)
2935 {
c35e53b1 2936 tree temp = NULL_TREE;
2937 if (!throws || after || single_pred_p (e->dest))
2938 {
2939 temp = make_node (DEBUG_EXPR_DECL);
2940 DECL_ARTIFICIAL (temp) = 1;
2941 TREE_TYPE (temp) = TREE_TYPE (lhs);
2942 SET_DECL_MODE (temp, TYPE_MODE (TREE_TYPE (lhs)));
2943 tree t = build2 (LSHIFT_EXPR, TREE_TYPE (lhs), new_lhs, bit);
2944 g = gimple_build_debug_bind (temp, t, g);
2945 if (throws && !after)
2946 {
2947 gsi = gsi_after_labels (e->dest);
2948 gsi_insert_before (&gsi, g, GSI_SAME_STMT);
2949 }
2950 else
2951 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
2952 }
9c1a31e4 2953 FOR_EACH_IMM_USE_STMT (g, iter, use_lhs)
2954 if (is_gimple_debug (g))
2955 {
2956 use_operand_p use_p;
c35e53b1 2957 if (temp == NULL_TREE)
2958 gimple_debug_bind_reset_value (g);
2959 else
2960 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
2961 SET_USE (use_p, temp);
9c1a31e4 2962 update_stmt (g);
2963 }
2964 }
2965 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_lhs)
2966 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (use_lhs);
2967 replace_uses_by (use_lhs, new_lhs);
2968 gsi = gsi_for_stmt (use_stmt);
2969 gsi_remove (&gsi, true);
2970 release_defs (use_stmt);
2971 gsi_remove (gsip, true);
2972 release_ssa_name (lhs);
2973}
2974
82193434 2975/* Optimize
2976 a = {};
2977 b = a;
2978 into
2979 a = {};
2980 b = {};
2981 Similarly for memset (&a, ..., sizeof (a)); instead of a = {};
2982 and/or memcpy (&b, &a, sizeof (a)); instead of b = a; */
2983
2984static void
2985optimize_memcpy (gimple_stmt_iterator *gsip, tree dest, tree src, tree len)
2986{
2987 gimple *stmt = gsi_stmt (*gsip);
2988 if (gimple_has_volatile_ops (stmt))
2989 return;
2990
2991 tree vuse = gimple_vuse (stmt);
2992 if (vuse == NULL)
2993 return;
2994
2995 gimple *defstmt = SSA_NAME_DEF_STMT (vuse);
2996 tree src2 = NULL_TREE, len2 = NULL_TREE;
2997 HOST_WIDE_INT offset, offset2;
2998 tree val = integer_zero_node;
2999 if (gimple_store_p (defstmt)
3000 && gimple_assign_single_p (defstmt)
3001 && TREE_CODE (gimple_assign_rhs1 (defstmt)) == CONSTRUCTOR
3002 && !gimple_clobber_p (defstmt))
3003 src2 = gimple_assign_lhs (defstmt);
3004 else if (gimple_call_builtin_p (defstmt, BUILT_IN_MEMSET)
3005 && TREE_CODE (gimple_call_arg (defstmt, 0)) == ADDR_EXPR
3006 && TREE_CODE (gimple_call_arg (defstmt, 1)) == INTEGER_CST)
3007 {
3008 src2 = TREE_OPERAND (gimple_call_arg (defstmt, 0), 0);
3009 len2 = gimple_call_arg (defstmt, 2);
3010 val = gimple_call_arg (defstmt, 1);
3011 /* For non-0 val, we'd have to transform stmt from assignment
3012 into memset (only if dest is addressable). */
3013 if (!integer_zerop (val) && is_gimple_assign (stmt))
3014 src2 = NULL_TREE;
3015 }
3016
3017 if (src2 == NULL_TREE)
3018 return;
3019
3020 if (len == NULL_TREE)
3021 len = (TREE_CODE (src) == COMPONENT_REF
3022 ? DECL_SIZE_UNIT (TREE_OPERAND (src, 1))
3023 : TYPE_SIZE_UNIT (TREE_TYPE (src)));
3024 if (len2 == NULL_TREE)
3025 len2 = (TREE_CODE (src2) == COMPONENT_REF
3026 ? DECL_SIZE_UNIT (TREE_OPERAND (src2, 1))
3027 : TYPE_SIZE_UNIT (TREE_TYPE (src2)));
3028 if (len == NULL_TREE
3029 || TREE_CODE (len) != INTEGER_CST
3030 || len2 == NULL_TREE
3031 || TREE_CODE (len2) != INTEGER_CST)
3032 return;
3033
3034 src = get_addr_base_and_unit_offset (src, &offset);
3035 src2 = get_addr_base_and_unit_offset (src2, &offset2);
3036 if (src == NULL_TREE
3037 || src2 == NULL_TREE
3038 || offset < offset2)
3039 return;
3040
3041 if (!operand_equal_p (src, src2, 0))
3042 return;
3043
3044 /* [ src + offset2, src + offset2 + len2 - 1 ] is set to val.
3045 Make sure that
3046 [ src + offset, src + offset + len - 1 ] is a subset of that. */
3047 if (wi::to_offset (len) + (offset - offset2) > wi::to_offset (len2))
3048 return;
3049
3050 if (dump_file && (dump_flags & TDF_DETAILS))
3051 {
3052 fprintf (dump_file, "Simplified\n ");
3053 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
3054 fprintf (dump_file, "after previous\n ");
3055 print_gimple_stmt (dump_file, defstmt, 0, dump_flags);
3056 }
3057
3058 /* For simplicity, don't change the kind of the stmt,
3059 turn dest = src; into dest = {}; and memcpy (&dest, &src, len);
3060 into memset (&dest, val, len);
3061 In theory we could change dest = src into memset if dest
3062 is addressable (maybe beneficial if val is not 0), or
3063 memcpy (&dest, &src, len) into dest = {} if len is the size
3064 of dest, dest isn't volatile. */
3065 if (is_gimple_assign (stmt))
3066 {
3067 tree ctor = build_constructor (TREE_TYPE (dest), NULL);
3068 gimple_assign_set_rhs_from_tree (gsip, ctor);
3069 update_stmt (stmt);
3070 }
3071 else /* If stmt is memcpy, transform it into memset. */
3072 {
3073 gcall *call = as_a <gcall *> (stmt);
3074 tree fndecl = builtin_decl_implicit (BUILT_IN_MEMSET);
3075 gimple_call_set_fndecl (call, fndecl);
3076 gimple_call_set_fntype (call, TREE_TYPE (fndecl));
3077 gimple_call_set_arg (call, 1, val);
3078 update_stmt (stmt);
3079 }
3080
3081 if (dump_file && (dump_flags & TDF_DETAILS))
3082 {
3083 fprintf (dump_file, "into\n ");
3084 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
3085 }
3086}
3087
4ee9c684 3088/* A simple pass that attempts to fold all builtin functions. This pass
3089 is run after we've propagated as many constants as we can. */
3090
65b0537f 3091namespace {
3092
3093const pass_data pass_data_fold_builtins =
3094{
3095 GIMPLE_PASS, /* type */
3096 "fab", /* name */
3097 OPTGROUP_NONE, /* optinfo_flags */
65b0537f 3098 TV_NONE, /* tv_id */
3099 ( PROP_cfg | PROP_ssa ), /* properties_required */
3100 0, /* properties_provided */
3101 0, /* properties_destroyed */
3102 0, /* todo_flags_start */
8b88439e 3103 TODO_update_ssa, /* todo_flags_finish */
65b0537f 3104};
3105
3106class pass_fold_builtins : public gimple_opt_pass
3107{
3108public:
3109 pass_fold_builtins (gcc::context *ctxt)
3110 : gimple_opt_pass (pass_data_fold_builtins, ctxt)
3111 {}
3112
3113 /* opt_pass methods: */
3114 opt_pass * clone () { return new pass_fold_builtins (m_ctxt); }
3115 virtual unsigned int execute (function *);
3116
3117}; // class pass_fold_builtins
3118
3119unsigned int
3120pass_fold_builtins::execute (function *fun)
4ee9c684 3121{
b36237eb 3122 bool cfg_changed = false;
4ee9c684 3123 basic_block bb;
b1b7c0c4 3124 unsigned int todoflags = 0;
48e1416a 3125
65b0537f 3126 FOR_EACH_BB_FN (bb, fun)
4ee9c684 3127 {
75a70cf9 3128 gimple_stmt_iterator i;
3129 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
4ee9c684 3130 {
42acab1c 3131 gimple *stmt, *old_stmt;
40a3eb84 3132 tree callee;
0a39fd54 3133 enum built_in_function fcode;
4ee9c684 3134
75a70cf9 3135 stmt = gsi_stmt (i);
3136
3137 if (gimple_code (stmt) != GIMPLE_CALL)
0a39fd54 3138 {
896a0c42 3139 /* Remove all *ssaname_N ={v} {CLOBBER}; stmts,
3140 after the last GIMPLE DSE they aren't needed and might
3141 unnecessarily keep the SSA_NAMEs live. */
3142 if (gimple_clobber_p (stmt))
3143 {
3144 tree lhs = gimple_assign_lhs (stmt);
3145 if (TREE_CODE (lhs) == MEM_REF
3146 && TREE_CODE (TREE_OPERAND (lhs, 0)) == SSA_NAME)
3147 {
3148 unlink_stmt_vdef (stmt);
3149 gsi_remove (&i, true);
3150 release_defs (stmt);
3151 continue;
3152 }
3153 }
82193434 3154 else if (gimple_assign_load_p (stmt) && gimple_store_p (stmt))
3155 optimize_memcpy (&i, gimple_assign_lhs (stmt),
3156 gimple_assign_rhs1 (stmt), NULL_TREE);
75a70cf9 3157 gsi_next (&i);
0a39fd54 3158 continue;
3159 }
40a3eb84 3160
75a70cf9 3161 callee = gimple_call_fndecl (stmt);
4ee9c684 3162 if (!callee || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL)
0a39fd54 3163 {
75a70cf9 3164 gsi_next (&i);
0a39fd54 3165 continue;
3166 }
5a4b7e1e 3167
40a3eb84 3168 fcode = DECL_FUNCTION_CODE (callee);
3169 if (fold_stmt (&i))
3170 ;
3171 else
3172 {
3173 tree result = NULL_TREE;
3174 switch (DECL_FUNCTION_CODE (callee))
3175 {
3176 case BUILT_IN_CONSTANT_P:
3177 /* Resolve __builtin_constant_p. If it hasn't been
3178 folded to integer_one_node by now, it's fairly
3179 certain that the value simply isn't constant. */
3180 result = integer_zero_node;
3181 break;
5a4b7e1e 3182
40a3eb84 3183 case BUILT_IN_ASSUME_ALIGNED:
3184 /* Remove __builtin_assume_aligned. */
3185 result = gimple_call_arg (stmt, 0);
3186 break;
4ee9c684 3187
40a3eb84 3188 case BUILT_IN_STACK_RESTORE:
3189 result = optimize_stack_restore (i);
3190 if (result)
3191 break;
3192 gsi_next (&i);
3193 continue;
fca0886c 3194
40a3eb84 3195 case BUILT_IN_UNREACHABLE:
3196 if (optimize_unreachable (i))
3197 cfg_changed = true;
8a58ed0a 3198 break;
8a58ed0a 3199
9c1a31e4 3200 case BUILT_IN_ATOMIC_FETCH_OR_1:
3201 case BUILT_IN_ATOMIC_FETCH_OR_2:
3202 case BUILT_IN_ATOMIC_FETCH_OR_4:
3203 case BUILT_IN_ATOMIC_FETCH_OR_8:
3204 case BUILT_IN_ATOMIC_FETCH_OR_16:
3205 optimize_atomic_bit_test_and (&i,
3206 IFN_ATOMIC_BIT_TEST_AND_SET,
3207 true, false);
3208 break;
3209 case BUILT_IN_SYNC_FETCH_AND_OR_1:
3210 case BUILT_IN_SYNC_FETCH_AND_OR_2:
3211 case BUILT_IN_SYNC_FETCH_AND_OR_4:
3212 case BUILT_IN_SYNC_FETCH_AND_OR_8:
3213 case BUILT_IN_SYNC_FETCH_AND_OR_16:
3214 optimize_atomic_bit_test_and (&i,
3215 IFN_ATOMIC_BIT_TEST_AND_SET,
3216 false, false);
3217 break;
3218
3219 case BUILT_IN_ATOMIC_FETCH_XOR_1:
3220 case BUILT_IN_ATOMIC_FETCH_XOR_2:
3221 case BUILT_IN_ATOMIC_FETCH_XOR_4:
3222 case BUILT_IN_ATOMIC_FETCH_XOR_8:
3223 case BUILT_IN_ATOMIC_FETCH_XOR_16:
3224 optimize_atomic_bit_test_and
3225 (&i, IFN_ATOMIC_BIT_TEST_AND_COMPLEMENT, true, false);
3226 break;
3227 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
3228 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
3229 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
3230 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
3231 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
3232 optimize_atomic_bit_test_and
3233 (&i, IFN_ATOMIC_BIT_TEST_AND_COMPLEMENT, false, false);
3234 break;
3235
3236 case BUILT_IN_ATOMIC_XOR_FETCH_1:
3237 case BUILT_IN_ATOMIC_XOR_FETCH_2:
3238 case BUILT_IN_ATOMIC_XOR_FETCH_4:
3239 case BUILT_IN_ATOMIC_XOR_FETCH_8:
3240 case BUILT_IN_ATOMIC_XOR_FETCH_16:
3241 optimize_atomic_bit_test_and
3242 (&i, IFN_ATOMIC_BIT_TEST_AND_COMPLEMENT, true, true);
3243 break;
3244 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
3245 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
3246 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
3247 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
3248 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
3249 optimize_atomic_bit_test_and
3250 (&i, IFN_ATOMIC_BIT_TEST_AND_COMPLEMENT, false, true);
3251 break;
3252
3253 case BUILT_IN_ATOMIC_FETCH_AND_1:
3254 case BUILT_IN_ATOMIC_FETCH_AND_2:
3255 case BUILT_IN_ATOMIC_FETCH_AND_4:
3256 case BUILT_IN_ATOMIC_FETCH_AND_8:
3257 case BUILT_IN_ATOMIC_FETCH_AND_16:
3258 optimize_atomic_bit_test_and (&i,
3259 IFN_ATOMIC_BIT_TEST_AND_RESET,
3260 true, false);
3261 break;
3262 case BUILT_IN_SYNC_FETCH_AND_AND_1:
3263 case BUILT_IN_SYNC_FETCH_AND_AND_2:
3264 case BUILT_IN_SYNC_FETCH_AND_AND_4:
3265 case BUILT_IN_SYNC_FETCH_AND_AND_8:
3266 case BUILT_IN_SYNC_FETCH_AND_AND_16:
3267 optimize_atomic_bit_test_and (&i,
3268 IFN_ATOMIC_BIT_TEST_AND_RESET,
3269 false, false);
3270 break;
3271
82193434 3272 case BUILT_IN_MEMCPY:
3273 if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL)
3274 && TREE_CODE (gimple_call_arg (stmt, 0)) == ADDR_EXPR
3275 && TREE_CODE (gimple_call_arg (stmt, 1)) == ADDR_EXPR
3276 && TREE_CODE (gimple_call_arg (stmt, 2)) == INTEGER_CST)
3277 {
3278 tree dest = TREE_OPERAND (gimple_call_arg (stmt, 0), 0);
3279 tree src = TREE_OPERAND (gimple_call_arg (stmt, 1), 0);
3280 tree len = gimple_call_arg (stmt, 2);
3281 optimize_memcpy (&i, dest, src, len);
3282 }
3283 break;
3284
40a3eb84 3285 case BUILT_IN_VA_START:
3286 case BUILT_IN_VA_END:
3287 case BUILT_IN_VA_COPY:
3288 /* These shouldn't be folded before pass_stdarg. */
3289 result = optimize_stdarg_builtin (stmt);
82193434 3290 break;
f87df69a 3291
40a3eb84 3292 default:;
3293 }
bdd0e199 3294
40a3eb84 3295 if (!result)
3296 {
3297 gsi_next (&i);
3298 continue;
3299 }
4ee9c684 3300
40a3eb84 3301 if (!update_call_from_tree (&i, result))
3302 gimplify_and_update_call_from_tree (&i, result);
3303 }
3304
3305 todoflags |= TODO_update_address_taken;
f87df69a 3306
4ee9c684 3307 if (dump_file && (dump_flags & TDF_DETAILS))
3308 {
3309 fprintf (dump_file, "Simplified\n ");
75a70cf9 3310 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
4ee9c684 3311 }
3312
75a70cf9 3313 old_stmt = stmt;
75a70cf9 3314 stmt = gsi_stmt (i);
4c5fd53c 3315 update_stmt (stmt);
de6ed584 3316
75a70cf9 3317 if (maybe_clean_or_replace_eh_stmt (old_stmt, stmt)
3318 && gimple_purge_dead_eh_edges (bb))
b36237eb 3319 cfg_changed = true;
4ee9c684 3320
3321 if (dump_file && (dump_flags & TDF_DETAILS))
3322 {
3323 fprintf (dump_file, "to\n ");
75a70cf9 3324 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
4ee9c684 3325 fprintf (dump_file, "\n");
3326 }
0a39fd54 3327
3328 /* Retry the same statement if it changed into another
3329 builtin, there might be new opportunities now. */
75a70cf9 3330 if (gimple_code (stmt) != GIMPLE_CALL)
0a39fd54 3331 {
75a70cf9 3332 gsi_next (&i);
0a39fd54 3333 continue;
3334 }
75a70cf9 3335 callee = gimple_call_fndecl (stmt);
0a39fd54 3336 if (!callee
75a70cf9 3337 || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL
0a39fd54 3338 || DECL_FUNCTION_CODE (callee) == fcode)
75a70cf9 3339 gsi_next (&i);
4ee9c684 3340 }
3341 }
48e1416a 3342
b36237eb 3343 /* Delete unreachable blocks. */
b1b7c0c4 3344 if (cfg_changed)
3345 todoflags |= TODO_cleanup_cfg;
48e1416a 3346
b1b7c0c4 3347 return todoflags;
4ee9c684 3348}
3349
cbe8bda8 3350} // anon namespace
3351
3352gimple_opt_pass *
3353make_pass_fold_builtins (gcc::context *ctxt)
3354{
3355 return new pass_fold_builtins (ctxt);
3356}
184fac50 3357
3358/* A simple pass that emits some warnings post IPA. */
3359
3360namespace {
3361
3362const pass_data pass_data_post_ipa_warn =
3363{
3364 GIMPLE_PASS, /* type */
3365 "post_ipa_warn", /* name */
3366 OPTGROUP_NONE, /* optinfo_flags */
3367 TV_NONE, /* tv_id */
3368 ( PROP_cfg | PROP_ssa ), /* properties_required */
3369 0, /* properties_provided */
3370 0, /* properties_destroyed */
3371 0, /* todo_flags_start */
3372 0, /* todo_flags_finish */
3373};
3374
3375class pass_post_ipa_warn : public gimple_opt_pass
3376{
3377public:
3378 pass_post_ipa_warn (gcc::context *ctxt)
3379 : gimple_opt_pass (pass_data_post_ipa_warn, ctxt)
3380 {}
3381
3382 /* opt_pass methods: */
3383 opt_pass * clone () { return new pass_post_ipa_warn (m_ctxt); }
3384 virtual bool gate (function *) { return warn_nonnull != 0; }
3385 virtual unsigned int execute (function *);
3386
3387}; // class pass_fold_builtins
3388
3389unsigned int
3390pass_post_ipa_warn::execute (function *fun)
3391{
3392 basic_block bb;
3393
3394 FOR_EACH_BB_FN (bb, fun)
3395 {
3396 gimple_stmt_iterator gsi;
3397 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
3398 {
3399 gimple *stmt = gsi_stmt (gsi);
3400 if (!is_gimple_call (stmt) || gimple_no_warning_p (stmt))
3401 continue;
3402
3403 if (warn_nonnull)
3404 {
3405 bitmap nonnullargs
3406 = get_nonnull_args (gimple_call_fntype (stmt));
3407 if (nonnullargs)
3408 {
3409 for (unsigned i = 0; i < gimple_call_num_args (stmt); i++)
3410 {
3411 tree arg = gimple_call_arg (stmt, i);
3412 if (TREE_CODE (TREE_TYPE (arg)) != POINTER_TYPE)
3413 continue;
3414 if (!integer_zerop (arg))
3415 continue;
3416 if (!bitmap_empty_p (nonnullargs)
3417 && !bitmap_bit_p (nonnullargs, i))
3418 continue;
3419
3420 location_t loc = gimple_location (stmt);
3421 if (warning_at (loc, OPT_Wnonnull,
3422 "argument %u null where non-null "
3423 "expected", i + 1))
3424 {
3425 tree fndecl = gimple_call_fndecl (stmt);
3426 if (fndecl && DECL_IS_BUILTIN (fndecl))
3427 inform (loc, "in a call to built-in function %qD",
3428 fndecl);
3429 else if (fndecl)
3430 inform (DECL_SOURCE_LOCATION (fndecl),
3431 "in a call to function %qD declared here",
3432 fndecl);
3433
3434 }
3435 }
3436 BITMAP_FREE (nonnullargs);
3437 }
3438 }
3439 }
3440 }
3441 return 0;
3442}
3443
3444} // anon namespace
3445
3446gimple_opt_pass *
3447make_pass_post_ipa_warn (gcc::context *ctxt)
3448{
3449 return new pass_post_ipa_warn (ctxt);
3450}