]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/tree-ssa-ccp.c
Disable AVX-512VL insns for scalar mode operands on -march=knl.
[thirdparty/gcc.git] / gcc / tree-ssa-ccp.c
CommitLineData
4ee9c684 1/* Conditional constant propagation pass for the GNU compiler.
d353bf18 2 Copyright (C) 2000-2015 Free Software Foundation, Inc.
4ee9c684 3 Adapted from original RTL SSA-CCP by Daniel Berlin <dberlin@dberlin.org>
4 Adapted to GIMPLE trees by Diego Novillo <dnovillo@redhat.com>
5
6This file is part of GCC.
48e1416a 7
4ee9c684 8GCC is free software; you can redistribute it and/or modify it
9under the terms of the GNU General Public License as published by the
8c4c00c1 10Free Software Foundation; either version 3, or (at your option) any
4ee9c684 11later version.
48e1416a 12
4ee9c684 13GCC is distributed in the hope that it will be useful, but WITHOUT
14ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
15FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16for more details.
48e1416a 17
4ee9c684 18You should have received a copy of the GNU General Public License
8c4c00c1 19along with GCC; see the file COPYING3. If not see
20<http://www.gnu.org/licenses/>. */
4ee9c684 21
88dbf20f 22/* Conditional constant propagation (CCP) is based on the SSA
23 propagation engine (tree-ssa-propagate.c). Constant assignments of
24 the form VAR = CST are propagated from the assignments into uses of
25 VAR, which in turn may generate new constants. The simulation uses
26 a four level lattice to keep track of constant values associated
27 with SSA names. Given an SSA name V_i, it may take one of the
28 following values:
29
bfa30570 30 UNINITIALIZED -> the initial state of the value. This value
31 is replaced with a correct initial value
32 the first time the value is used, so the
33 rest of the pass does not need to care about
34 it. Using this value simplifies initialization
35 of the pass, and prevents us from needlessly
36 scanning statements that are never reached.
88dbf20f 37
38 UNDEFINED -> V_i is a local variable whose definition
39 has not been processed yet. Therefore we
40 don't yet know if its value is a constant
41 or not.
42
43 CONSTANT -> V_i has been found to hold a constant
44 value C.
45
46 VARYING -> V_i cannot take a constant value, or if it
47 does, it is not possible to determine it
48 at compile time.
49
50 The core of SSA-CCP is in ccp_visit_stmt and ccp_visit_phi_node:
51
52 1- In ccp_visit_stmt, we are interested in assignments whose RHS
53 evaluates into a constant and conditional jumps whose predicate
54 evaluates into a boolean true or false. When an assignment of
55 the form V_i = CONST is found, V_i's lattice value is set to
56 CONSTANT and CONST is associated with it. This causes the
57 propagation engine to add all the SSA edges coming out the
58 assignment into the worklists, so that statements that use V_i
59 can be visited.
60
61 If the statement is a conditional with a constant predicate, we
62 mark the outgoing edges as executable or not executable
63 depending on the predicate's value. This is then used when
64 visiting PHI nodes to know when a PHI argument can be ignored.
48e1416a 65
88dbf20f 66
67 2- In ccp_visit_phi_node, if all the PHI arguments evaluate to the
68 same constant C, then the LHS of the PHI is set to C. This
69 evaluation is known as the "meet operation". Since one of the
70 goals of this evaluation is to optimistically return constant
71 values as often as possible, it uses two main short cuts:
72
73 - If an argument is flowing in through a non-executable edge, it
74 is ignored. This is useful in cases like this:
75
76 if (PRED)
77 a_9 = 3;
78 else
79 a_10 = 100;
80 a_11 = PHI (a_9, a_10)
81
82 If PRED is known to always evaluate to false, then we can
83 assume that a_11 will always take its value from a_10, meaning
84 that instead of consider it VARYING (a_9 and a_10 have
85 different values), we can consider it CONSTANT 100.
86
87 - If an argument has an UNDEFINED value, then it does not affect
88 the outcome of the meet operation. If a variable V_i has an
89 UNDEFINED value, it means that either its defining statement
90 hasn't been visited yet or V_i has no defining statement, in
91 which case the original symbol 'V' is being used
92 uninitialized. Since 'V' is a local variable, the compiler
93 may assume any initial value for it.
94
95
96 After propagation, every variable V_i that ends up with a lattice
97 value of CONSTANT will have the associated constant value in the
98 array CONST_VAL[i].VALUE. That is fed into substitute_and_fold for
99 final substitution and folding.
100
e913b5cd 101 This algorithm uses wide-ints at the max precision of the target.
102 This means that, with one uninteresting exception, variables with
103 UNSIGNED types never go to VARYING because the bits above the
104 precision of the type of the variable are always zero. The
105 uninteresting case is a variable of UNSIGNED type that has the
106 maximum precision of the target. Such variables can go to VARYING,
107 but this causes no loss of infomation since these variables will
108 never be extended.
109
4ee9c684 110 References:
111
112 Constant propagation with conditional branches,
113 Wegman and Zadeck, ACM TOPLAS 13(2):181-210.
114
115 Building an Optimizing Compiler,
116 Robert Morgan, Butterworth-Heinemann, 1998, Section 8.9.
117
118 Advanced Compiler Design and Implementation,
119 Steven Muchnick, Morgan Kaufmann, 1997, Section 12.6 */
120
121#include "config.h"
122#include "system.h"
123#include "coretypes.h"
9ef16211 124#include "backend.h"
4ee9c684 125#include "tree.h"
9ef16211 126#include "gimple.h"
127#include "hard-reg-set.h"
128#include "ssa.h"
129#include "alias.h"
b20a8bb4 130#include "fold-const.h"
9ed99284 131#include "stor-layout.h"
41511585 132#include "flags.h"
4ee9c684 133#include "tm_p.h"
ce084dfc 134#include "gimple-pretty-print.h"
bc61cadb 135#include "internal-fn.h"
136#include "gimple-fold.h"
137#include "tree-eh.h"
a8783bee 138#include "gimplify.h"
dcf1a1ec 139#include "gimple-iterator.h"
073c1fd5 140#include "tree-cfg.h"
4ee9c684 141#include "tree-pass.h"
41511585 142#include "tree-ssa-propagate.h"
5a4b7e1e 143#include "value-prof.h"
41511585 144#include "langhooks.h"
8782adcf 145#include "target.h"
0b205f4c 146#include "diagnostic-core.h"
43fb76c1 147#include "dbgcnt.h"
9a65cc0a 148#include "params.h"
e913b5cd 149#include "wide-int-print.h"
f7715905 150#include "builtins.h"
058a1b7a 151#include "tree-chkp.h"
4ee9c684 152
2dc10fae 153
4ee9c684 154/* Possible lattice values. */
155typedef enum
156{
bfa30570 157 UNINITIALIZED,
4ee9c684 158 UNDEFINED,
159 CONSTANT,
160 VARYING
88dbf20f 161} ccp_lattice_t;
4ee9c684 162
9908fe4d 163struct ccp_prop_value_t {
14f101cf 164 /* Lattice value. */
165 ccp_lattice_t lattice_val;
166
167 /* Propagated value. */
168 tree value;
b7e55469 169
e913b5cd 170 /* Mask that applies to the propagated value during CCP. For X
171 with a CONSTANT lattice value X & ~mask == value & ~mask. The
172 zero bits in the mask cover constant values. The ones mean no
173 information. */
5de9d3ed 174 widest_int mask;
14f101cf 175};
176
88dbf20f 177/* Array of propagated constant values. After propagation,
178 CONST_VAL[I].VALUE holds the constant value for SSA_NAME(I). If
179 the constant is held in an SSA name representing a memory store
4fb5e5ca 180 (i.e., a VDEF), CONST_VAL[I].MEM_REF will contain the actual
181 memory reference used to store (i.e., the LHS of the assignment
182 doing the store). */
9908fe4d 183static ccp_prop_value_t *const_val;
285df01b 184static unsigned n_const_val;
4ee9c684 185
9908fe4d 186static void canonicalize_value (ccp_prop_value_t *);
6688f8ec 187static bool ccp_fold_stmt (gimple_stmt_iterator *);
27de3d43 188static void ccp_lattice_meet (ccp_prop_value_t *, ccp_prop_value_t *);
4af351a8 189
88dbf20f 190/* Dump constant propagation value VAL to file OUTF prefixed by PREFIX. */
01406fc0 191
192static void
9908fe4d 193dump_lattice_value (FILE *outf, const char *prefix, ccp_prop_value_t val)
01406fc0 194{
41511585 195 switch (val.lattice_val)
01406fc0 196 {
88dbf20f 197 case UNINITIALIZED:
198 fprintf (outf, "%sUNINITIALIZED", prefix);
199 break;
41511585 200 case UNDEFINED:
201 fprintf (outf, "%sUNDEFINED", prefix);
202 break;
203 case VARYING:
204 fprintf (outf, "%sVARYING", prefix);
205 break;
41511585 206 case CONSTANT:
b7e55469 207 if (TREE_CODE (val.value) != INTEGER_CST
796b6678 208 || val.mask == 0)
16ab4e97 209 {
210 fprintf (outf, "%sCONSTANT ", prefix);
211 print_generic_expr (outf, val.value, dump_flags);
212 }
b7e55469 213 else
214 {
28e557ef 215 widest_int cval = wi::bit_and_not (wi::to_widest (val.value),
216 val.mask);
e913b5cd 217 fprintf (outf, "%sCONSTANT ", prefix);
218 print_hex (cval, outf);
219 fprintf (outf, " (");
220 print_hex (val.mask, outf);
221 fprintf (outf, ")");
b7e55469 222 }
41511585 223 break;
224 default:
8c0963c4 225 gcc_unreachable ();
41511585 226 }
01406fc0 227}
4ee9c684 228
4ee9c684 229
88dbf20f 230/* Print lattice value VAL to stderr. */
231
9908fe4d 232void debug_lattice_value (ccp_prop_value_t val);
88dbf20f 233
4b987fac 234DEBUG_FUNCTION void
9908fe4d 235debug_lattice_value (ccp_prop_value_t val)
88dbf20f 236{
237 dump_lattice_value (stderr, "", val);
238 fprintf (stderr, "\n");
239}
4ee9c684 240
9c1be15e 241/* Extend NONZERO_BITS to a full mask, with the upper bits being set. */
242
243static widest_int
244extend_mask (const wide_int &nonzero_bits)
245{
246 return (wi::mask <widest_int> (wi::get_precision (nonzero_bits), true)
247 | widest_int::from (nonzero_bits, UNSIGNED));
248}
4ee9c684 249
88dbf20f 250/* Compute a default value for variable VAR and store it in the
251 CONST_VAL array. The following rules are used to get default
252 values:
01406fc0 253
88dbf20f 254 1- Global and static variables that are declared constant are
255 considered CONSTANT.
256
257 2- Any other value is considered UNDEFINED. This is useful when
41511585 258 considering PHI nodes. PHI arguments that are undefined do not
259 change the constant value of the PHI node, which allows for more
88dbf20f 260 constants to be propagated.
4ee9c684 261
8883e700 262 3- Variables defined by statements other than assignments and PHI
88dbf20f 263 nodes are considered VARYING.
4ee9c684 264
8883e700 265 4- Initial values of variables that are not GIMPLE registers are
bfa30570 266 considered VARYING. */
4ee9c684 267
9908fe4d 268static ccp_prop_value_t
88dbf20f 269get_default_value (tree var)
270{
9908fe4d 271 ccp_prop_value_t val = { UNINITIALIZED, NULL_TREE, 0 };
8edeb88b 272 gimple stmt;
273
274 stmt = SSA_NAME_DEF_STMT (var);
275
276 if (gimple_nop_p (stmt))
4ee9c684 277 {
8edeb88b 278 /* Variables defined by an empty statement are those used
279 before being initialized. If VAR is a local variable, we
280 can assume initially that it is UNDEFINED, otherwise we must
281 consider it VARYING. */
7c782c9b 282 if (!virtual_operand_p (var)
283 && TREE_CODE (SSA_NAME_VAR (var)) == VAR_DECL)
8edeb88b 284 val.lattice_val = UNDEFINED;
285 else
b7e55469 286 {
287 val.lattice_val = VARYING;
e913b5cd 288 val.mask = -1;
fc08b993 289 if (flag_tree_bit_ccp)
290 {
9c1be15e 291 wide_int nonzero_bits = get_nonzero_bits (var);
292 if (nonzero_bits != -1)
fc08b993 293 {
294 val.lattice_val = CONSTANT;
295 val.value = build_zero_cst (TREE_TYPE (var));
9c1be15e 296 val.mask = extend_mask (nonzero_bits);
fc08b993 297 }
298 }
b7e55469 299 }
4ee9c684 300 }
b45b214a 301 else if (is_gimple_assign (stmt))
41511585 302 {
8edeb88b 303 tree cst;
304 if (gimple_assign_single_p (stmt)
305 && DECL_P (gimple_assign_rhs1 (stmt))
306 && (cst = get_symbol_constant_value (gimple_assign_rhs1 (stmt))))
88dbf20f 307 {
8edeb88b 308 val.lattice_val = CONSTANT;
309 val.value = cst;
88dbf20f 310 }
311 else
b45b214a 312 {
313 /* Any other variable defined by an assignment is considered
314 UNDEFINED. */
315 val.lattice_val = UNDEFINED;
316 }
317 }
318 else if ((is_gimple_call (stmt)
319 && gimple_call_lhs (stmt) != NULL_TREE)
320 || gimple_code (stmt) == GIMPLE_PHI)
321 {
322 /* A variable defined by a call or a PHI node is considered
323 UNDEFINED. */
324 val.lattice_val = UNDEFINED;
8edeb88b 325 }
326 else
327 {
328 /* Otherwise, VAR will never take on a constant value. */
329 val.lattice_val = VARYING;
e913b5cd 330 val.mask = -1;
41511585 331 }
4ee9c684 332
41511585 333 return val;
334}
4ee9c684 335
4ee9c684 336
bfa30570 337/* Get the constant value associated with variable VAR. */
4ee9c684 338
9908fe4d 339static inline ccp_prop_value_t *
bfa30570 340get_value (tree var)
88dbf20f 341{
9908fe4d 342 ccp_prop_value_t *val;
bfa30570 343
285df01b 344 if (const_val == NULL
345 || SSA_NAME_VERSION (var) >= n_const_val)
e004838d 346 return NULL;
347
348 val = &const_val[SSA_NAME_VERSION (var)];
bfa30570 349 if (val->lattice_val == UNINITIALIZED)
4ee9c684 350 *val = get_default_value (var);
351
f5faab84 352 canonicalize_value (val);
4af351a8 353
4ee9c684 354 return val;
355}
356
15d138c9 357/* Return the constant tree value associated with VAR. */
358
359static inline tree
360get_constant_value (tree var)
361{
9908fe4d 362 ccp_prop_value_t *val;
98d92e3c 363 if (TREE_CODE (var) != SSA_NAME)
364 {
365 if (is_gimple_min_invariant (var))
366 return var;
367 return NULL_TREE;
368 }
369 val = get_value (var);
b7e55469 370 if (val
371 && val->lattice_val == CONSTANT
372 && (TREE_CODE (val->value) != INTEGER_CST
796b6678 373 || val->mask == 0))
15d138c9 374 return val->value;
375 return NULL_TREE;
376}
377
bfa30570 378/* Sets the value associated with VAR to VARYING. */
379
380static inline void
381set_value_varying (tree var)
382{
9908fe4d 383 ccp_prop_value_t *val = &const_val[SSA_NAME_VERSION (var)];
bfa30570 384
385 val->lattice_val = VARYING;
386 val->value = NULL_TREE;
e913b5cd 387 val->mask = -1;
bfa30570 388}
4ee9c684 389
0001b944 390/* For integer constants, make sure to drop TREE_OVERFLOW. */
b31eb493 391
392static void
9908fe4d 393canonicalize_value (ccp_prop_value_t *val)
b31eb493 394{
f5faab84 395 if (val->lattice_val != CONSTANT)
396 return;
397
398 if (TREE_OVERFLOW_P (val->value))
399 val->value = drop_tree_overflow (val->value);
b31eb493 400}
401
b7e55469 402/* Return whether the lattice transition is valid. */
403
404static bool
9908fe4d 405valid_lattice_transition (ccp_prop_value_t old_val, ccp_prop_value_t new_val)
b7e55469 406{
407 /* Lattice transitions must always be monotonically increasing in
408 value. */
409 if (old_val.lattice_val < new_val.lattice_val)
410 return true;
411
412 if (old_val.lattice_val != new_val.lattice_val)
413 return false;
414
415 if (!old_val.value && !new_val.value)
416 return true;
417
418 /* Now both lattice values are CONSTANT. */
419
fc6cc27b 420 /* Allow arbitrary copy changes as we might look through PHI <a_1, ...>
421 when only a single copy edge is executable. */
422 if (TREE_CODE (old_val.value) == SSA_NAME
423 && TREE_CODE (new_val.value) == SSA_NAME)
424 return true;
425
426 /* Allow transitioning from a constant to a copy. */
427 if (is_gimple_min_invariant (old_val.value)
428 && TREE_CODE (new_val.value) == SSA_NAME)
429 return true;
430
43c92e0a 431 /* Allow transitioning from PHI <&x, not executable> == &x
432 to PHI <&x, &y> == common alignment. */
b7e55469 433 if (TREE_CODE (old_val.value) != INTEGER_CST
434 && TREE_CODE (new_val.value) == INTEGER_CST)
435 return true;
436
437 /* Bit-lattices have to agree in the still valid bits. */
438 if (TREE_CODE (old_val.value) == INTEGER_CST
439 && TREE_CODE (new_val.value) == INTEGER_CST)
5de9d3ed 440 return (wi::bit_and_not (wi::to_widest (old_val.value), new_val.mask)
441 == wi::bit_and_not (wi::to_widest (new_val.value), new_val.mask));
b7e55469 442
443 /* Otherwise constant values have to agree. */
0001b944 444 if (operand_equal_p (old_val.value, new_val.value, 0))
445 return true;
446
447 /* At least the kinds and types should agree now. */
448 if (TREE_CODE (old_val.value) != TREE_CODE (new_val.value)
449 || !types_compatible_p (TREE_TYPE (old_val.value),
450 TREE_TYPE (new_val.value)))
451 return false;
452
453 /* For floats and !HONOR_NANS allow transitions from (partial) NaN
454 to non-NaN. */
455 tree type = TREE_TYPE (new_val.value);
456 if (SCALAR_FLOAT_TYPE_P (type)
93633022 457 && !HONOR_NANS (type))
0001b944 458 {
459 if (REAL_VALUE_ISNAN (TREE_REAL_CST (old_val.value)))
460 return true;
461 }
462 else if (VECTOR_FLOAT_TYPE_P (type)
93633022 463 && !HONOR_NANS (type))
0001b944 464 {
465 for (unsigned i = 0; i < VECTOR_CST_NELTS (old_val.value); ++i)
466 if (!REAL_VALUE_ISNAN
467 (TREE_REAL_CST (VECTOR_CST_ELT (old_val.value, i)))
468 && !operand_equal_p (VECTOR_CST_ELT (old_val.value, i),
469 VECTOR_CST_ELT (new_val.value, i), 0))
470 return false;
471 return true;
472 }
473 else if (COMPLEX_FLOAT_TYPE_P (type)
93633022 474 && !HONOR_NANS (type))
0001b944 475 {
476 if (!REAL_VALUE_ISNAN (TREE_REAL_CST (TREE_REALPART (old_val.value)))
477 && !operand_equal_p (TREE_REALPART (old_val.value),
478 TREE_REALPART (new_val.value), 0))
479 return false;
480 if (!REAL_VALUE_ISNAN (TREE_REAL_CST (TREE_IMAGPART (old_val.value)))
481 && !operand_equal_p (TREE_IMAGPART (old_val.value),
482 TREE_IMAGPART (new_val.value), 0))
483 return false;
484 return true;
485 }
486 return false;
b7e55469 487}
488
88dbf20f 489/* Set the value for variable VAR to NEW_VAL. Return true if the new
490 value is different from VAR's previous value. */
4ee9c684 491
41511585 492static bool
27de3d43 493set_lattice_value (tree var, ccp_prop_value_t *new_val)
4ee9c684 494{
6d0bf6d6 495 /* We can deal with old UNINITIALIZED values just fine here. */
9908fe4d 496 ccp_prop_value_t *old_val = &const_val[SSA_NAME_VERSION (var)];
88dbf20f 497
27de3d43 498 canonicalize_value (new_val);
b31eb493 499
b7e55469 500 /* We have to be careful to not go up the bitwise lattice
27de3d43 501 represented by the mask. Instead of dropping to VARYING
502 use the meet operator to retain a conservative value.
503 Missed optimizations like PR65851 makes this necessary.
504 It also ensures we converge to a stable lattice solution. */
505 if (new_val->lattice_val == CONSTANT
b7e55469 506 && old_val->lattice_val == CONSTANT
27de3d43 507 && TREE_CODE (new_val->value) != SSA_NAME)
508 ccp_lattice_meet (new_val, old_val);
bfa30570 509
27de3d43 510 gcc_checking_assert (valid_lattice_transition (*old_val, *new_val));
88dbf20f 511
b7e55469 512 /* If *OLD_VAL and NEW_VAL are the same, return false to inform the
513 caller that this was a non-transition. */
27de3d43 514 if (old_val->lattice_val != new_val->lattice_val
515 || (new_val->lattice_val == CONSTANT
516 && (TREE_CODE (new_val->value) != TREE_CODE (old_val->value)
517 || (TREE_CODE (new_val->value) == INTEGER_CST
518 && (new_val->mask != old_val->mask
bc1d3d97 519 || (wi::bit_and_not (wi::to_widest (old_val->value),
27de3d43 520 new_val->mask)
521 != wi::bit_and_not (wi::to_widest (new_val->value),
522 new_val->mask))))
523 || (TREE_CODE (new_val->value) != INTEGER_CST
524 && !operand_equal_p (new_val->value, old_val->value, 0)))))
4ee9c684 525 {
b7e55469 526 /* ??? We would like to delay creation of INTEGER_CSTs from
527 partially constants here. */
528
41511585 529 if (dump_file && (dump_flags & TDF_DETAILS))
530 {
27de3d43 531 dump_lattice_value (dump_file, "Lattice value changed to ", *new_val);
bfa30570 532 fprintf (dump_file, ". Adding SSA edges to worklist.\n");
41511585 533 }
534
27de3d43 535 *old_val = *new_val;
88dbf20f 536
27de3d43 537 gcc_assert (new_val->lattice_val != UNINITIALIZED);
bfa30570 538 return true;
4ee9c684 539 }
41511585 540
541 return false;
4ee9c684 542}
543
9908fe4d 544static ccp_prop_value_t get_value_for_expr (tree, bool);
545static ccp_prop_value_t bit_value_binop (enum tree_code, tree, tree, tree);
5de9d3ed 546static void bit_value_binop_1 (enum tree_code, tree, widest_int *, widest_int *,
10c3fe8d 547 tree, const widest_int &, const widest_int &,
548 tree, const widest_int &, const widest_int &);
b7e55469 549
5de9d3ed 550/* Return a widest_int that can be used for bitwise simplifications
b7e55469 551 from VAL. */
552
5de9d3ed 553static widest_int
9908fe4d 554value_to_wide_int (ccp_prop_value_t val)
b7e55469 555{
556 if (val.value
557 && TREE_CODE (val.value) == INTEGER_CST)
5de9d3ed 558 return wi::to_widest (val.value);
e913b5cd 559
560 return 0;
b7e55469 561}
562
563/* Return the value for the address expression EXPR based on alignment
564 information. */
6d0bf6d6 565
9908fe4d 566static ccp_prop_value_t
b7e55469 567get_value_from_alignment (tree expr)
568{
f8abb542 569 tree type = TREE_TYPE (expr);
9908fe4d 570 ccp_prop_value_t val;
f8abb542 571 unsigned HOST_WIDE_INT bitpos;
572 unsigned int align;
b7e55469 573
574 gcc_assert (TREE_CODE (expr) == ADDR_EXPR);
575
59da1bcd 576 get_pointer_alignment_1 (expr, &align, &bitpos);
cf8f0e63 577 val.mask = (POINTER_TYPE_P (type) || TYPE_UNSIGNED (type)
5de9d3ed 578 ? wi::mask <widest_int> (TYPE_PRECISION (type), false)
e913b5cd 579 : -1).and_not (align / BITS_PER_UNIT - 1);
c90d2c17 580 val.lattice_val
581 = wi::sext (val.mask, TYPE_PRECISION (type)) == -1 ? VARYING : CONSTANT;
f8abb542 582 if (val.lattice_val == CONSTANT)
796b6678 583 val.value = build_int_cstu (type, bitpos / BITS_PER_UNIT);
b7e55469 584 else
f8abb542 585 val.value = NULL_TREE;
b7e55469 586
587 return val;
588}
589
590/* Return the value for the tree operand EXPR. If FOR_BITS_P is true
591 return constant bits extracted from alignment information for
592 invariant addresses. */
593
9908fe4d 594static ccp_prop_value_t
b7e55469 595get_value_for_expr (tree expr, bool for_bits_p)
6d0bf6d6 596{
9908fe4d 597 ccp_prop_value_t val;
6d0bf6d6 598
599 if (TREE_CODE (expr) == SSA_NAME)
b7e55469 600 {
601 val = *get_value (expr);
602 if (for_bits_p
603 && val.lattice_val == CONSTANT
604 && TREE_CODE (val.value) == ADDR_EXPR)
605 val = get_value_from_alignment (val.value);
bc1d3d97 606 /* Fall back to a copy value. */
607 if (!for_bits_p
608 && val.lattice_val == VARYING
609 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (expr))
610 {
611 val.lattice_val = CONSTANT;
612 val.value = expr;
613 val.mask = -1;
614 }
b7e55469 615 }
616 else if (is_gimple_min_invariant (expr)
617 && (!for_bits_p || TREE_CODE (expr) != ADDR_EXPR))
6d0bf6d6 618 {
619 val.lattice_val = CONSTANT;
620 val.value = expr;
e913b5cd 621 val.mask = 0;
f5faab84 622 canonicalize_value (&val);
6d0bf6d6 623 }
b7e55469 624 else if (TREE_CODE (expr) == ADDR_EXPR)
625 val = get_value_from_alignment (expr);
6d0bf6d6 626 else
627 {
628 val.lattice_val = VARYING;
09b2913a 629 val.mask = -1;
6d0bf6d6 630 val.value = NULL_TREE;
631 }
6d0bf6d6 632 return val;
633}
634
88dbf20f 635/* Return the likely CCP lattice value for STMT.
4ee9c684 636
41511585 637 If STMT has no operands, then return CONSTANT.
4ee9c684 638
d61b9af3 639 Else if undefinedness of operands of STMT cause its value to be
640 undefined, then return UNDEFINED.
4ee9c684 641
41511585 642 Else if any operands of STMT are constants, then return CONSTANT.
4ee9c684 643
41511585 644 Else return VARYING. */
4ee9c684 645
88dbf20f 646static ccp_lattice_t
75a70cf9 647likely_value (gimple stmt)
41511585 648{
d61b9af3 649 bool has_constant_operand, has_undefined_operand, all_undefined_operands;
a8a0d56e 650 bool has_nsa_operand;
41511585 651 tree use;
652 ssa_op_iter iter;
8edeb88b 653 unsigned i;
4ee9c684 654
590c3166 655 enum gimple_code code = gimple_code (stmt);
75a70cf9 656
657 /* This function appears to be called only for assignments, calls,
658 conditionals, and switches, due to the logic in visit_stmt. */
659 gcc_assert (code == GIMPLE_ASSIGN
660 || code == GIMPLE_CALL
661 || code == GIMPLE_COND
662 || code == GIMPLE_SWITCH);
88dbf20f 663
664 /* If the statement has volatile operands, it won't fold to a
665 constant value. */
75a70cf9 666 if (gimple_has_volatile_ops (stmt))
88dbf20f 667 return VARYING;
668
75a70cf9 669 /* Arrive here for more complex cases. */
bfa30570 670 has_constant_operand = false;
d61b9af3 671 has_undefined_operand = false;
672 all_undefined_operands = true;
a8a0d56e 673 has_nsa_operand = false;
8edeb88b 674 FOR_EACH_SSA_TREE_OPERAND (use, stmt, iter, SSA_OP_USE)
41511585 675 {
9908fe4d 676 ccp_prop_value_t *val = get_value (use);
41511585 677
bfa30570 678 if (val->lattice_val == UNDEFINED)
d61b9af3 679 has_undefined_operand = true;
680 else
681 all_undefined_operands = false;
88dbf20f 682
41511585 683 if (val->lattice_val == CONSTANT)
bfa30570 684 has_constant_operand = true;
a8a0d56e 685
686 if (SSA_NAME_IS_DEFAULT_DEF (use)
687 || !prop_simulate_again_p (SSA_NAME_DEF_STMT (use)))
688 has_nsa_operand = true;
4ee9c684 689 }
41511585 690
dd277d48 691 /* There may be constants in regular rhs operands. For calls we
692 have to ignore lhs, fndecl and static chain, otherwise only
693 the lhs. */
694 for (i = (is_gimple_call (stmt) ? 2 : 0) + gimple_has_lhs (stmt);
8edeb88b 695 i < gimple_num_ops (stmt); ++i)
696 {
697 tree op = gimple_op (stmt, i);
698 if (!op || TREE_CODE (op) == SSA_NAME)
699 continue;
700 if (is_gimple_min_invariant (op))
701 has_constant_operand = true;
702 }
703
87c0a9fc 704 if (has_constant_operand)
705 all_undefined_operands = false;
706
3d483a94 707 if (has_undefined_operand
708 && code == GIMPLE_CALL
709 && gimple_call_internal_p (stmt))
710 switch (gimple_call_internal_fn (stmt))
711 {
712 /* These 3 builtins use the first argument just as a magic
713 way how to find out a decl uid. */
714 case IFN_GOMP_SIMD_LANE:
715 case IFN_GOMP_SIMD_VF:
716 case IFN_GOMP_SIMD_LAST_LANE:
717 has_undefined_operand = false;
718 break;
719 default:
720 break;
721 }
722
d61b9af3 723 /* If the operation combines operands like COMPLEX_EXPR make sure to
724 not mark the result UNDEFINED if only one part of the result is
725 undefined. */
75a70cf9 726 if (has_undefined_operand && all_undefined_operands)
d61b9af3 727 return UNDEFINED;
75a70cf9 728 else if (code == GIMPLE_ASSIGN && has_undefined_operand)
d61b9af3 729 {
75a70cf9 730 switch (gimple_assign_rhs_code (stmt))
d61b9af3 731 {
732 /* Unary operators are handled with all_undefined_operands. */
733 case PLUS_EXPR:
734 case MINUS_EXPR:
d61b9af3 735 case POINTER_PLUS_EXPR:
d61b9af3 736 /* Not MIN_EXPR, MAX_EXPR. One VARYING operand may be selected.
737 Not bitwise operators, one VARYING operand may specify the
738 result completely. Not logical operators for the same reason.
05a936a0 739 Not COMPLEX_EXPR as one VARYING operand makes the result partly
740 not UNDEFINED. Not *DIV_EXPR, comparisons and shifts because
741 the undefined operand may be promoted. */
d61b9af3 742 return UNDEFINED;
743
43c92e0a 744 case ADDR_EXPR:
745 /* If any part of an address is UNDEFINED, like the index
746 of an ARRAY_EXPR, then treat the result as UNDEFINED. */
747 return UNDEFINED;
748
d61b9af3 749 default:
750 ;
751 }
752 }
753 /* If there was an UNDEFINED operand but the result may be not UNDEFINED
c91fedc5 754 fall back to CONSTANT. During iteration UNDEFINED may still drop
755 to CONSTANT. */
d61b9af3 756 if (has_undefined_operand)
c91fedc5 757 return CONSTANT;
d61b9af3 758
8edeb88b 759 /* We do not consider virtual operands here -- load from read-only
760 memory may have only VARYING virtual operands, but still be
a8a0d56e 761 constant. Also we can combine the stmt with definitions from
762 operands whose definitions are not simulated again. */
bfa30570 763 if (has_constant_operand
a8a0d56e 764 || has_nsa_operand
8edeb88b 765 || gimple_references_memory_p (stmt))
88dbf20f 766 return CONSTANT;
767
bfa30570 768 return VARYING;
4ee9c684 769}
770
bfa30570 771/* Returns true if STMT cannot be constant. */
772
773static bool
75a70cf9 774surely_varying_stmt_p (gimple stmt)
bfa30570 775{
776 /* If the statement has operands that we cannot handle, it cannot be
777 constant. */
75a70cf9 778 if (gimple_has_volatile_ops (stmt))
bfa30570 779 return true;
780
f257af64 781 /* If it is a call and does not return a value or is not a
237e78b1 782 builtin and not an indirect call or a call to function with
783 assume_aligned/alloc_align attribute, it is varying. */
75a70cf9 784 if (is_gimple_call (stmt))
f257af64 785 {
237e78b1 786 tree fndecl, fntype = gimple_call_fntype (stmt);
f257af64 787 if (!gimple_call_lhs (stmt)
788 || ((fndecl = gimple_call_fndecl (stmt)) != NULL_TREE
237e78b1 789 && !DECL_BUILT_IN (fndecl)
790 && !lookup_attribute ("assume_aligned",
791 TYPE_ATTRIBUTES (fntype))
792 && !lookup_attribute ("alloc_align",
793 TYPE_ATTRIBUTES (fntype))))
f257af64 794 return true;
795 }
bfa30570 796
8edeb88b 797 /* Any other store operation is not interesting. */
dd277d48 798 else if (gimple_vdef (stmt))
8edeb88b 799 return true;
800
bfa30570 801 /* Anything other than assignments and conditional jumps are not
802 interesting for CCP. */
75a70cf9 803 if (gimple_code (stmt) != GIMPLE_ASSIGN
f257af64 804 && gimple_code (stmt) != GIMPLE_COND
805 && gimple_code (stmt) != GIMPLE_SWITCH
806 && gimple_code (stmt) != GIMPLE_CALL)
bfa30570 807 return true;
808
809 return false;
810}
4ee9c684 811
41511585 812/* Initialize local data structures for CCP. */
4ee9c684 813
814static void
41511585 815ccp_initialize (void)
4ee9c684 816{
41511585 817 basic_block bb;
4ee9c684 818
285df01b 819 n_const_val = num_ssa_names;
9908fe4d 820 const_val = XCNEWVEC (ccp_prop_value_t, n_const_val);
4ee9c684 821
41511585 822 /* Initialize simulation flags for PHI nodes and statements. */
fc00614f 823 FOR_EACH_BB_FN (bb, cfun)
4ee9c684 824 {
75a70cf9 825 gimple_stmt_iterator i;
4ee9c684 826
75a70cf9 827 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
41511585 828 {
75a70cf9 829 gimple stmt = gsi_stmt (i);
2193544e 830 bool is_varying;
831
832 /* If the statement is a control insn, then we do not
833 want to avoid simulating the statement once. Failure
834 to do so means that those edges will never get added. */
835 if (stmt_ends_bb_p (stmt))
836 is_varying = false;
837 else
838 is_varying = surely_varying_stmt_p (stmt);
4ee9c684 839
bfa30570 840 if (is_varying)
41511585 841 {
88dbf20f 842 tree def;
843 ssa_op_iter iter;
844
845 /* If the statement will not produce a constant, mark
846 all its outputs VARYING. */
847 FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_ALL_DEFS)
8edeb88b 848 set_value_varying (def);
41511585 849 }
75a70cf9 850 prop_set_simulate_again (stmt, !is_varying);
41511585 851 }
4ee9c684 852 }
853
75a70cf9 854 /* Now process PHI nodes. We never clear the simulate_again flag on
855 phi nodes, since we do not know which edges are executable yet,
856 except for phi nodes for virtual operands when we do not do store ccp. */
fc00614f 857 FOR_EACH_BB_FN (bb, cfun)
4ee9c684 858 {
1a91d914 859 gphi_iterator i;
41511585 860
75a70cf9 861 for (i = gsi_start_phis (bb); !gsi_end_p (i); gsi_next (&i))
862 {
1a91d914 863 gphi *phi = i.phi ();
75a70cf9 864
7c782c9b 865 if (virtual_operand_p (gimple_phi_result (phi)))
75a70cf9 866 prop_set_simulate_again (phi, false);
bfa30570 867 else
75a70cf9 868 prop_set_simulate_again (phi, true);
41511585 869 }
4ee9c684 870 }
41511585 871}
4ee9c684 872
43fb76c1 873/* Debug count support. Reset the values of ssa names
874 VARYING when the total number ssa names analyzed is
875 beyond the debug count specified. */
876
877static void
878do_dbg_cnt (void)
879{
880 unsigned i;
881 for (i = 0; i < num_ssa_names; i++)
882 {
883 if (!dbg_cnt (ccp))
884 {
885 const_val[i].lattice_val = VARYING;
e913b5cd 886 const_val[i].mask = -1;
43fb76c1 887 const_val[i].value = NULL_TREE;
888 }
889 }
890}
891
4ee9c684 892
88dbf20f 893/* Do final substitution of propagated values, cleanup the flowgraph and
48e1416a 894 free allocated storage.
4ee9c684 895
33a34f1e 896 Return TRUE when something was optimized. */
897
898static bool
88dbf20f 899ccp_finalize (void)
4ee9c684 900{
43fb76c1 901 bool something_changed;
153c3b50 902 unsigned i;
43fb76c1 903
904 do_dbg_cnt ();
153c3b50 905
906 /* Derive alignment and misalignment information from partially
fc08b993 907 constant pointers in the lattice or nonzero bits from partially
908 constant integers. */
153c3b50 909 for (i = 1; i < num_ssa_names; ++i)
910 {
911 tree name = ssa_name (i);
9908fe4d 912 ccp_prop_value_t *val;
153c3b50 913 unsigned int tem, align;
914
915 if (!name
fc08b993 916 || (!POINTER_TYPE_P (TREE_TYPE (name))
917 && (!INTEGRAL_TYPE_P (TREE_TYPE (name))
918 /* Don't record nonzero bits before IPA to avoid
919 using too much memory. */
920 || first_pass_instance)))
153c3b50 921 continue;
922
923 val = get_value (name);
924 if (val->lattice_val != CONSTANT
925 || TREE_CODE (val->value) != INTEGER_CST)
926 continue;
927
fc08b993 928 if (POINTER_TYPE_P (TREE_TYPE (name)))
929 {
930 /* Trailing mask bits specify the alignment, trailing value
931 bits the misalignment. */
aeb682a2 932 tem = val->mask.to_uhwi ();
fc08b993 933 align = (tem & -tem);
934 if (align > 1)
935 set_ptr_info_alignment (get_ptr_info (name), align,
f9ae6f95 936 (TREE_INT_CST_LOW (val->value)
fc08b993 937 & (align - 1)));
938 }
939 else
940 {
9c1be15e 941 unsigned int precision = TYPE_PRECISION (TREE_TYPE (val->value));
942 wide_int nonzero_bits = wide_int::from (val->mask, precision,
943 UNSIGNED) | val->value;
fc08b993 944 nonzero_bits &= get_nonzero_bits (name);
945 set_nonzero_bits (name, nonzero_bits);
946 }
153c3b50 947 }
948
88dbf20f 949 /* Perform substitutions based on the known constant values. */
14f101cf 950 something_changed = substitute_and_fold (get_constant_value,
951 ccp_fold_stmt, true);
4ee9c684 952
88dbf20f 953 free (const_val);
e004838d 954 const_val = NULL;
33a34f1e 955 return something_changed;;
4ee9c684 956}
957
958
88dbf20f 959/* Compute the meet operator between *VAL1 and *VAL2. Store the result
960 in VAL1.
961
962 any M UNDEFINED = any
88dbf20f 963 any M VARYING = VARYING
964 Ci M Cj = Ci if (i == j)
965 Ci M Cj = VARYING if (i != j)
bfa30570 966 */
4ee9c684 967
968static void
27de3d43 969ccp_lattice_meet (ccp_prop_value_t *val1, ccp_prop_value_t *val2)
4ee9c684 970{
fc6cc27b 971 if (val1->lattice_val == UNDEFINED
972 /* For UNDEFINED M SSA we can't always SSA because its definition
973 may not dominate the PHI node. Doing optimistic copy propagation
974 also causes a lot of gcc.dg/uninit-pred*.c FAILs. */
975 && (val2->lattice_val != CONSTANT
976 || TREE_CODE (val2->value) != SSA_NAME))
4ee9c684 977 {
88dbf20f 978 /* UNDEFINED M any = any */
979 *val1 = *val2;
41511585 980 }
fc6cc27b 981 else if (val2->lattice_val == UNDEFINED
982 /* See above. */
983 && (val1->lattice_val != CONSTANT
984 || TREE_CODE (val1->value) != SSA_NAME))
92481a4d 985 {
88dbf20f 986 /* any M UNDEFINED = any
987 Nothing to do. VAL1 already contains the value we want. */
988 ;
92481a4d 989 }
88dbf20f 990 else if (val1->lattice_val == VARYING
991 || val2->lattice_val == VARYING)
41511585 992 {
88dbf20f 993 /* any M VARYING = VARYING. */
994 val1->lattice_val = VARYING;
e913b5cd 995 val1->mask = -1;
88dbf20f 996 val1->value = NULL_TREE;
41511585 997 }
b7e55469 998 else if (val1->lattice_val == CONSTANT
999 && val2->lattice_val == CONSTANT
1000 && TREE_CODE (val1->value) == INTEGER_CST
1001 && TREE_CODE (val2->value) == INTEGER_CST)
1002 {
1003 /* Ci M Cj = Ci if (i == j)
1004 Ci M Cj = VARYING if (i != j)
1005
1006 For INTEGER_CSTs mask unequal bits. If no equal bits remain,
1007 drop to varying. */
e913b5cd 1008 val1->mask = (val1->mask | val2->mask
5de9d3ed 1009 | (wi::to_widest (val1->value)
1010 ^ wi::to_widest (val2->value)));
c90d2c17 1011 if (wi::sext (val1->mask, TYPE_PRECISION (TREE_TYPE (val1->value))) == -1)
b7e55469 1012 {
1013 val1->lattice_val = VARYING;
1014 val1->value = NULL_TREE;
1015 }
1016 }
88dbf20f 1017 else if (val1->lattice_val == CONSTANT
1018 && val2->lattice_val == CONSTANT
27de3d43 1019 && operand_equal_p (val1->value, val2->value, 0))
41511585 1020 {
88dbf20f 1021 /* Ci M Cj = Ci if (i == j)
1022 Ci M Cj = VARYING if (i != j)
1023
b7e55469 1024 VAL1 already contains the value we want for equivalent values. */
1025 }
1026 else if (val1->lattice_val == CONSTANT
1027 && val2->lattice_val == CONSTANT
1028 && (TREE_CODE (val1->value) == ADDR_EXPR
1029 || TREE_CODE (val2->value) == ADDR_EXPR))
1030 {
1031 /* When not equal addresses are involved try meeting for
1032 alignment. */
9908fe4d 1033 ccp_prop_value_t tem = *val2;
b7e55469 1034 if (TREE_CODE (val1->value) == ADDR_EXPR)
1035 *val1 = get_value_for_expr (val1->value, true);
1036 if (TREE_CODE (val2->value) == ADDR_EXPR)
1037 tem = get_value_for_expr (val2->value, true);
27de3d43 1038 ccp_lattice_meet (val1, &tem);
41511585 1039 }
1040 else
1041 {
88dbf20f 1042 /* Any other combination is VARYING. */
1043 val1->lattice_val = VARYING;
e913b5cd 1044 val1->mask = -1;
88dbf20f 1045 val1->value = NULL_TREE;
41511585 1046 }
4ee9c684 1047}
1048
1049
41511585 1050/* Loop through the PHI_NODE's parameters for BLOCK and compare their
1051 lattice values to determine PHI_NODE's lattice value. The value of a
88dbf20f 1052 PHI node is determined calling ccp_lattice_meet with all the arguments
41511585 1053 of the PHI node that are incoming via executable edges. */
4ee9c684 1054
41511585 1055static enum ssa_prop_result
1a91d914 1056ccp_visit_phi_node (gphi *phi)
4ee9c684 1057{
75a70cf9 1058 unsigned i;
bc1d3d97 1059 ccp_prop_value_t new_val;
4ee9c684 1060
41511585 1061 if (dump_file && (dump_flags & TDF_DETAILS))
4ee9c684 1062 {
41511585 1063 fprintf (dump_file, "\nVisiting PHI node: ");
75a70cf9 1064 print_gimple_stmt (dump_file, phi, 0, dump_flags);
4ee9c684 1065 }
4ee9c684 1066
bc1d3d97 1067 new_val.lattice_val = UNDEFINED;
1068 new_val.value = NULL_TREE;
1069 new_val.mask = 0;
4ee9c684 1070
bc1d3d97 1071 bool first = true;
83c60000 1072 bool non_exec_edge = false;
75a70cf9 1073 for (i = 0; i < gimple_phi_num_args (phi); i++)
41511585 1074 {
88dbf20f 1075 /* Compute the meet operator over all the PHI arguments flowing
1076 through executable edges. */
75a70cf9 1077 edge e = gimple_phi_arg_edge (phi, i);
4ee9c684 1078
41511585 1079 if (dump_file && (dump_flags & TDF_DETAILS))
1080 {
1081 fprintf (dump_file,
1082 "\n Argument #%d (%d -> %d %sexecutable)\n",
1083 i, e->src->index, e->dest->index,
1084 (e->flags & EDGE_EXECUTABLE) ? "" : "not ");
1085 }
1086
1087 /* If the incoming edge is executable, Compute the meet operator for
1088 the existing value of the PHI node and the current PHI argument. */
1089 if (e->flags & EDGE_EXECUTABLE)
1090 {
75a70cf9 1091 tree arg = gimple_phi_arg (phi, i)->def;
9908fe4d 1092 ccp_prop_value_t arg_val = get_value_for_expr (arg, false);
4ee9c684 1093
bc1d3d97 1094 if (first)
1095 {
1096 new_val = arg_val;
1097 first = false;
1098 }
1099 else
27de3d43 1100 ccp_lattice_meet (&new_val, &arg_val);
4ee9c684 1101
41511585 1102 if (dump_file && (dump_flags & TDF_DETAILS))
1103 {
1104 fprintf (dump_file, "\t");
88dbf20f 1105 print_generic_expr (dump_file, arg, dump_flags);
1106 dump_lattice_value (dump_file, "\tValue: ", arg_val);
41511585 1107 fprintf (dump_file, "\n");
1108 }
4ee9c684 1109
41511585 1110 if (new_val.lattice_val == VARYING)
1111 break;
1112 }
83c60000 1113 else
1114 non_exec_edge = true;
1115 }
1116
1117 /* In case there were non-executable edges and the value is a copy
1118 make sure its definition dominates the PHI node. */
1119 if (non_exec_edge
1120 && new_val.lattice_val == CONSTANT
1121 && TREE_CODE (new_val.value) == SSA_NAME
1122 && ! SSA_NAME_IS_DEFAULT_DEF (new_val.value)
1123 && ! dominated_by_p (CDI_DOMINATORS, gimple_bb (phi),
1124 gimple_bb (SSA_NAME_DEF_STMT (new_val.value))))
1125 {
1126 new_val.lattice_val = VARYING;
1127 new_val.value = NULL_TREE;
1128 new_val.mask = -1;
41511585 1129 }
4ee9c684 1130
1131 if (dump_file && (dump_flags & TDF_DETAILS))
41511585 1132 {
1133 dump_lattice_value (dump_file, "\n PHI node value: ", new_val);
1134 fprintf (dump_file, "\n\n");
1135 }
1136
bfa30570 1137 /* Make the transition to the new value. */
27de3d43 1138 if (set_lattice_value (gimple_phi_result (phi), &new_val))
41511585 1139 {
1140 if (new_val.lattice_val == VARYING)
1141 return SSA_PROP_VARYING;
1142 else
1143 return SSA_PROP_INTERESTING;
1144 }
1145 else
1146 return SSA_PROP_NOT_INTERESTING;
4ee9c684 1147}
1148
15d138c9 1149/* Return the constant value for OP or OP otherwise. */
00f4f705 1150
1151static tree
15d138c9 1152valueize_op (tree op)
00f4f705 1153{
00f4f705 1154 if (TREE_CODE (op) == SSA_NAME)
1155 {
15d138c9 1156 tree tem = get_constant_value (op);
1157 if (tem)
1158 return tem;
00f4f705 1159 }
1160 return op;
1161}
1162
4e1b3545 1163/* Return the constant value for OP, but signal to not follow SSA
1164 edges if the definition may be simulated again. */
1165
1166static tree
1167valueize_op_1 (tree op)
1168{
1169 if (TREE_CODE (op) == SSA_NAME)
1170 {
4e1b3545 1171 /* If the definition may be simulated again we cannot follow
1172 this SSA edge as the SSA propagator does not necessarily
1173 re-visit the use. */
1174 gimple def_stmt = SSA_NAME_DEF_STMT (op);
2160d5ba 1175 if (!gimple_nop_p (def_stmt)
1176 && prop_simulate_again_p (def_stmt))
4e1b3545 1177 return NULL_TREE;
bc8fa068 1178 tree tem = get_constant_value (op);
1179 if (tem)
1180 return tem;
4e1b3545 1181 }
1182 return op;
1183}
1184
41511585 1185/* CCP specific front-end to the non-destructive constant folding
1186 routines.
4ee9c684 1187
1188 Attempt to simplify the RHS of STMT knowing that one or more
1189 operands are constants.
1190
1191 If simplification is possible, return the simplified RHS,
75a70cf9 1192 otherwise return the original RHS or NULL_TREE. */
4ee9c684 1193
1194static tree
75a70cf9 1195ccp_fold (gimple stmt)
4ee9c684 1196{
389dd41b 1197 location_t loc = gimple_location (stmt);
75a70cf9 1198 switch (gimple_code (stmt))
88dbf20f 1199 {
75a70cf9 1200 case GIMPLE_COND:
1201 {
1202 /* Handle comparison operators that can appear in GIMPLE form. */
15d138c9 1203 tree op0 = valueize_op (gimple_cond_lhs (stmt));
1204 tree op1 = valueize_op (gimple_cond_rhs (stmt));
75a70cf9 1205 enum tree_code code = gimple_cond_code (stmt);
389dd41b 1206 return fold_binary_loc (loc, code, boolean_type_node, op0, op1);
75a70cf9 1207 }
4ee9c684 1208
75a70cf9 1209 case GIMPLE_SWITCH:
1210 {
15d138c9 1211 /* Return the constant switch index. */
1a91d914 1212 return valueize_op (gimple_switch_index (as_a <gswitch *> (stmt)));
75a70cf9 1213 }
912f109f 1214
1d0b727d 1215 case GIMPLE_ASSIGN:
1216 case GIMPLE_CALL:
4e1b3545 1217 return gimple_fold_stmt_to_constant_1 (stmt,
1218 valueize_op, valueize_op_1);
04236c3a 1219
8782adcf 1220 default:
1d0b727d 1221 gcc_unreachable ();
8782adcf 1222 }
8782adcf 1223}
75a70cf9 1224
b7e55469 1225/* Apply the operation CODE in type TYPE to the value, mask pair
1226 RVAL and RMASK representing a value of type RTYPE and set
1227 the value, mask pair *VAL and *MASK to the result. */
1228
1229static void
1230bit_value_unop_1 (enum tree_code code, tree type,
5de9d3ed 1231 widest_int *val, widest_int *mask,
1232 tree rtype, const widest_int &rval, const widest_int &rmask)
b7e55469 1233{
1234 switch (code)
1235 {
1236 case BIT_NOT_EXPR:
1237 *mask = rmask;
cf8f0e63 1238 *val = ~rval;
b7e55469 1239 break;
1240
1241 case NEGATE_EXPR:
1242 {
5de9d3ed 1243 widest_int temv, temm;
b7e55469 1244 /* Return ~rval + 1. */
1245 bit_value_unop_1 (BIT_NOT_EXPR, type, &temv, &temm, type, rval, rmask);
1246 bit_value_binop_1 (PLUS_EXPR, type, val, mask,
e913b5cd 1247 type, temv, temm, type, 1, 0);
b7e55469 1248 break;
1249 }
1250
1251 CASE_CONVERT:
1252 {
e913b5cd 1253 signop sgn;
b7e55469 1254
1255 /* First extend mask and value according to the original type. */
e913b5cd 1256 sgn = TYPE_SIGN (rtype);
796b6678 1257 *mask = wi::ext (rmask, TYPE_PRECISION (rtype), sgn);
1258 *val = wi::ext (rval, TYPE_PRECISION (rtype), sgn);
b7e55469 1259
1260 /* Then extend mask and value according to the target type. */
e913b5cd 1261 sgn = TYPE_SIGN (type);
796b6678 1262 *mask = wi::ext (*mask, TYPE_PRECISION (type), sgn);
1263 *val = wi::ext (*val, TYPE_PRECISION (type), sgn);
b7e55469 1264 break;
1265 }
1266
1267 default:
e913b5cd 1268 *mask = -1;
b7e55469 1269 break;
1270 }
1271}
1272
1273/* Apply the operation CODE in type TYPE to the value, mask pairs
1274 R1VAL, R1MASK and R2VAL, R2MASK representing a values of type R1TYPE
1275 and R2TYPE and set the value, mask pair *VAL and *MASK to the result. */
1276
1277static void
1278bit_value_binop_1 (enum tree_code code, tree type,
5de9d3ed 1279 widest_int *val, widest_int *mask,
10c3fe8d 1280 tree r1type, const widest_int &r1val,
1281 const widest_int &r1mask, tree r2type,
1282 const widest_int &r2val, const widest_int &r2mask)
b7e55469 1283{
e913b5cd 1284 signop sgn = TYPE_SIGN (type);
1285 int width = TYPE_PRECISION (type);
10c3fe8d 1286 bool swap_p = false;
e913b5cd 1287
1288 /* Assume we'll get a constant result. Use an initial non varying
1289 value, we fall back to varying in the end if necessary. */
1290 *mask = -1;
1291
b7e55469 1292 switch (code)
1293 {
1294 case BIT_AND_EXPR:
1295 /* The mask is constant where there is a known not
1296 set bit, (m1 | m2) & ((v1 | m1) & (v2 | m2)) */
cf8f0e63 1297 *mask = (r1mask | r2mask) & (r1val | r1mask) & (r2val | r2mask);
1298 *val = r1val & r2val;
b7e55469 1299 break;
1300
1301 case BIT_IOR_EXPR:
1302 /* The mask is constant where there is a known
1303 set bit, (m1 | m2) & ~((v1 & ~m1) | (v2 & ~m2)). */
cf8f0e63 1304 *mask = (r1mask | r2mask)
1305 .and_not (r1val.and_not (r1mask) | r2val.and_not (r2mask));
1306 *val = r1val | r2val;
b7e55469 1307 break;
1308
1309 case BIT_XOR_EXPR:
1310 /* m1 | m2 */
cf8f0e63 1311 *mask = r1mask | r2mask;
1312 *val = r1val ^ r2val;
b7e55469 1313 break;
1314
1315 case LROTATE_EXPR:
1316 case RROTATE_EXPR:
796b6678 1317 if (r2mask == 0)
b7e55469 1318 {
28e557ef 1319 widest_int shift = r2val;
796b6678 1320 if (shift == 0)
e913b5cd 1321 {
1322 *mask = r1mask;
1323 *val = r1val;
1324 }
ddb1be65 1325 else
e913b5cd 1326 {
796b6678 1327 if (wi::neg_p (shift))
e913b5cd 1328 {
1329 shift = -shift;
1330 if (code == RROTATE_EXPR)
1331 code = LROTATE_EXPR;
1332 else
1333 code = RROTATE_EXPR;
1334 }
1335 if (code == RROTATE_EXPR)
1336 {
796b6678 1337 *mask = wi::rrotate (r1mask, shift, width);
1338 *val = wi::rrotate (r1val, shift, width);
e913b5cd 1339 }
1340 else
1341 {
796b6678 1342 *mask = wi::lrotate (r1mask, shift, width);
1343 *val = wi::lrotate (r1val, shift, width);
e913b5cd 1344 }
1345 }
b7e55469 1346 }
1347 break;
1348
1349 case LSHIFT_EXPR:
1350 case RSHIFT_EXPR:
1351 /* ??? We can handle partially known shift counts if we know
1352 its sign. That way we can tell that (x << (y | 8)) & 255
1353 is zero. */
796b6678 1354 if (r2mask == 0)
b7e55469 1355 {
28e557ef 1356 widest_int shift = r2val;
796b6678 1357 if (shift == 0)
b7e55469 1358 {
1359 *mask = r1mask;
1360 *val = r1val;
1361 }
ddb1be65 1362 else
e913b5cd 1363 {
796b6678 1364 if (wi::neg_p (shift))
e913b5cd 1365 {
1366 shift = -shift;
1367 if (code == RSHIFT_EXPR)
1368 code = LSHIFT_EXPR;
1369 else
1370 code = RSHIFT_EXPR;
1371 }
1372 if (code == RSHIFT_EXPR)
1373 {
45639915 1374 *mask = wi::rshift (wi::ext (r1mask, width, sgn), shift, sgn);
1375 *val = wi::rshift (wi::ext (r1val, width, sgn), shift, sgn);
e913b5cd 1376 }
1377 else
1378 {
45639915 1379 *mask = wi::ext (wi::lshift (r1mask, shift), width, sgn);
1380 *val = wi::ext (wi::lshift (r1val, shift), width, sgn);
e913b5cd 1381 }
1382 }
b7e55469 1383 }
1384 break;
1385
1386 case PLUS_EXPR:
1387 case POINTER_PLUS_EXPR:
1388 {
b7e55469 1389 /* Do the addition with unknown bits set to zero, to give carry-ins of
1390 zero wherever possible. */
ab2c1de8 1391 widest_int lo = r1val.and_not (r1mask) + r2val.and_not (r2mask);
796b6678 1392 lo = wi::ext (lo, width, sgn);
b7e55469 1393 /* Do the addition with unknown bits set to one, to give carry-ins of
1394 one wherever possible. */
ab2c1de8 1395 widest_int hi = (r1val | r1mask) + (r2val | r2mask);
796b6678 1396 hi = wi::ext (hi, width, sgn);
b7e55469 1397 /* Each bit in the result is known if (a) the corresponding bits in
1398 both inputs are known, and (b) the carry-in to that bit position
1399 is known. We can check condition (b) by seeing if we got the same
1400 result with minimised carries as with maximised carries. */
cf8f0e63 1401 *mask = r1mask | r2mask | (lo ^ hi);
796b6678 1402 *mask = wi::ext (*mask, width, sgn);
b7e55469 1403 /* It shouldn't matter whether we choose lo or hi here. */
1404 *val = lo;
1405 break;
1406 }
1407
1408 case MINUS_EXPR:
1409 {
5de9d3ed 1410 widest_int temv, temm;
b7e55469 1411 bit_value_unop_1 (NEGATE_EXPR, r2type, &temv, &temm,
1412 r2type, r2val, r2mask);
1413 bit_value_binop_1 (PLUS_EXPR, type, val, mask,
1414 r1type, r1val, r1mask,
1415 r2type, temv, temm);
1416 break;
1417 }
1418
1419 case MULT_EXPR:
1420 {
1421 /* Just track trailing zeros in both operands and transfer
1422 them to the other. */
796b6678 1423 int r1tz = wi::ctz (r1val | r1mask);
1424 int r2tz = wi::ctz (r2val | r2mask);
e913b5cd 1425 if (r1tz + r2tz >= width)
b7e55469 1426 {
e913b5cd 1427 *mask = 0;
1428 *val = 0;
b7e55469 1429 }
1430 else if (r1tz + r2tz > 0)
1431 {
5de9d3ed 1432 *mask = wi::ext (wi::mask <widest_int> (r1tz + r2tz, true),
796b6678 1433 width, sgn);
e913b5cd 1434 *val = 0;
b7e55469 1435 }
1436 break;
1437 }
1438
1439 case EQ_EXPR:
1440 case NE_EXPR:
1441 {
5de9d3ed 1442 widest_int m = r1mask | r2mask;
cf8f0e63 1443 if (r1val.and_not (m) != r2val.and_not (m))
b7e55469 1444 {
e913b5cd 1445 *mask = 0;
1446 *val = ((code == EQ_EXPR) ? 0 : 1);
b7e55469 1447 }
1448 else
1449 {
1450 /* We know the result of a comparison is always one or zero. */
e913b5cd 1451 *mask = 1;
1452 *val = 0;
b7e55469 1453 }
1454 break;
1455 }
1456
1457 case GE_EXPR:
1458 case GT_EXPR:
10c3fe8d 1459 swap_p = true;
1460 code = swap_tree_comparison (code);
1461 /* Fall through. */
b7e55469 1462 case LT_EXPR:
1463 case LE_EXPR:
1464 {
1465 int minmax, maxmin;
e913b5cd 1466
10c3fe8d 1467 const widest_int &o1val = swap_p ? r2val : r1val;
1468 const widest_int &o1mask = swap_p ? r2mask : r1mask;
1469 const widest_int &o2val = swap_p ? r1val : r2val;
1470 const widest_int &o2mask = swap_p ? r1mask : r2mask;
1471
b7e55469 1472 /* If the most significant bits are not known we know nothing. */
796b6678 1473 if (wi::neg_p (o1mask) || wi::neg_p (o2mask))
b7e55469 1474 break;
1475
90c0f5b7 1476 /* For comparisons the signedness is in the comparison operands. */
e913b5cd 1477 sgn = TYPE_SIGN (r1type);
90c0f5b7 1478
b7e55469 1479 /* If we know the most significant bits we know the values
1480 value ranges by means of treating varying bits as zero
1481 or one. Do a cross comparison of the max/min pairs. */
796b6678 1482 maxmin = wi::cmp (o1val | o1mask, o2val.and_not (o2mask), sgn);
1483 minmax = wi::cmp (o1val.and_not (o1mask), o2val | o2mask, sgn);
e913b5cd 1484 if (maxmin < 0) /* o1 is less than o2. */
b7e55469 1485 {
e913b5cd 1486 *mask = 0;
1487 *val = 1;
b7e55469 1488 }
e913b5cd 1489 else if (minmax > 0) /* o1 is not less or equal to o2. */
b7e55469 1490 {
e913b5cd 1491 *mask = 0;
1492 *val = 0;
b7e55469 1493 }
e913b5cd 1494 else if (maxmin == minmax) /* o1 and o2 are equal. */
b7e55469 1495 {
1496 /* This probably should never happen as we'd have
1497 folded the thing during fully constant value folding. */
e913b5cd 1498 *mask = 0;
1499 *val = (code == LE_EXPR ? 1 : 0);
b7e55469 1500 }
1501 else
1502 {
1503 /* We know the result of a comparison is always one or zero. */
e913b5cd 1504 *mask = 1;
1505 *val = 0;
b7e55469 1506 }
1507 break;
1508 }
1509
1510 default:;
1511 }
1512}
1513
1514/* Return the propagation value when applying the operation CODE to
1515 the value RHS yielding type TYPE. */
1516
9908fe4d 1517static ccp_prop_value_t
b7e55469 1518bit_value_unop (enum tree_code code, tree type, tree rhs)
1519{
9908fe4d 1520 ccp_prop_value_t rval = get_value_for_expr (rhs, true);
5de9d3ed 1521 widest_int value, mask;
9908fe4d 1522 ccp_prop_value_t val;
c91fedc5 1523
1524 if (rval.lattice_val == UNDEFINED)
1525 return rval;
1526
b7e55469 1527 gcc_assert ((rval.lattice_val == CONSTANT
1528 && TREE_CODE (rval.value) == INTEGER_CST)
c90d2c17 1529 || wi::sext (rval.mask, TYPE_PRECISION (TREE_TYPE (rhs))) == -1);
b7e55469 1530 bit_value_unop_1 (code, type, &value, &mask,
e913b5cd 1531 TREE_TYPE (rhs), value_to_wide_int (rval), rval.mask);
c90d2c17 1532 if (wi::sext (mask, TYPE_PRECISION (type)) != -1)
b7e55469 1533 {
1534 val.lattice_val = CONSTANT;
1535 val.mask = mask;
1536 /* ??? Delay building trees here. */
e913b5cd 1537 val.value = wide_int_to_tree (type, value);
b7e55469 1538 }
1539 else
1540 {
1541 val.lattice_val = VARYING;
1542 val.value = NULL_TREE;
e913b5cd 1543 val.mask = -1;
b7e55469 1544 }
1545 return val;
1546}
1547
1548/* Return the propagation value when applying the operation CODE to
1549 the values RHS1 and RHS2 yielding type TYPE. */
1550
9908fe4d 1551static ccp_prop_value_t
b7e55469 1552bit_value_binop (enum tree_code code, tree type, tree rhs1, tree rhs2)
1553{
9908fe4d 1554 ccp_prop_value_t r1val = get_value_for_expr (rhs1, true);
1555 ccp_prop_value_t r2val = get_value_for_expr (rhs2, true);
5de9d3ed 1556 widest_int value, mask;
9908fe4d 1557 ccp_prop_value_t val;
c91fedc5 1558
1559 if (r1val.lattice_val == UNDEFINED
1560 || r2val.lattice_val == UNDEFINED)
1561 {
1562 val.lattice_val = VARYING;
1563 val.value = NULL_TREE;
e913b5cd 1564 val.mask = -1;
c91fedc5 1565 return val;
1566 }
1567
b7e55469 1568 gcc_assert ((r1val.lattice_val == CONSTANT
1569 && TREE_CODE (r1val.value) == INTEGER_CST)
c90d2c17 1570 || wi::sext (r1val.mask,
1571 TYPE_PRECISION (TREE_TYPE (rhs1))) == -1);
b7e55469 1572 gcc_assert ((r2val.lattice_val == CONSTANT
1573 && TREE_CODE (r2val.value) == INTEGER_CST)
c90d2c17 1574 || wi::sext (r2val.mask,
1575 TYPE_PRECISION (TREE_TYPE (rhs2))) == -1);
b7e55469 1576 bit_value_binop_1 (code, type, &value, &mask,
e913b5cd 1577 TREE_TYPE (rhs1), value_to_wide_int (r1val), r1val.mask,
1578 TREE_TYPE (rhs2), value_to_wide_int (r2val), r2val.mask);
c90d2c17 1579 if (wi::sext (mask, TYPE_PRECISION (type)) != -1)
b7e55469 1580 {
1581 val.lattice_val = CONSTANT;
1582 val.mask = mask;
1583 /* ??? Delay building trees here. */
e913b5cd 1584 val.value = wide_int_to_tree (type, value);
b7e55469 1585 }
1586 else
1587 {
1588 val.lattice_val = VARYING;
1589 val.value = NULL_TREE;
e913b5cd 1590 val.mask = -1;
b7e55469 1591 }
1592 return val;
1593}
1594
237e78b1 1595/* Return the propagation value for __builtin_assume_aligned
1596 and functions with assume_aligned or alloc_aligned attribute.
1597 For __builtin_assume_aligned, ATTR is NULL_TREE,
1598 for assume_aligned attribute ATTR is non-NULL and ALLOC_ALIGNED
1599 is false, for alloc_aligned attribute ATTR is non-NULL and
1600 ALLOC_ALIGNED is true. */
fca0886c 1601
9908fe4d 1602static ccp_prop_value_t
1603bit_value_assume_aligned (gimple stmt, tree attr, ccp_prop_value_t ptrval,
237e78b1 1604 bool alloc_aligned)
fca0886c 1605{
237e78b1 1606 tree align, misalign = NULL_TREE, type;
fca0886c 1607 unsigned HOST_WIDE_INT aligni, misaligni = 0;
9908fe4d 1608 ccp_prop_value_t alignval;
5de9d3ed 1609 widest_int value, mask;
9908fe4d 1610 ccp_prop_value_t val;
e913b5cd 1611
237e78b1 1612 if (attr == NULL_TREE)
1613 {
1614 tree ptr = gimple_call_arg (stmt, 0);
1615 type = TREE_TYPE (ptr);
1616 ptrval = get_value_for_expr (ptr, true);
1617 }
1618 else
1619 {
1620 tree lhs = gimple_call_lhs (stmt);
1621 type = TREE_TYPE (lhs);
1622 }
1623
fca0886c 1624 if (ptrval.lattice_val == UNDEFINED)
1625 return ptrval;
1626 gcc_assert ((ptrval.lattice_val == CONSTANT
1627 && TREE_CODE (ptrval.value) == INTEGER_CST)
c90d2c17 1628 || wi::sext (ptrval.mask, TYPE_PRECISION (type)) == -1);
237e78b1 1629 if (attr == NULL_TREE)
fca0886c 1630 {
237e78b1 1631 /* Get aligni and misaligni from __builtin_assume_aligned. */
1632 align = gimple_call_arg (stmt, 1);
1633 if (!tree_fits_uhwi_p (align))
fca0886c 1634 return ptrval;
237e78b1 1635 aligni = tree_to_uhwi (align);
1636 if (gimple_call_num_args (stmt) > 2)
1637 {
1638 misalign = gimple_call_arg (stmt, 2);
1639 if (!tree_fits_uhwi_p (misalign))
1640 return ptrval;
1641 misaligni = tree_to_uhwi (misalign);
1642 }
1643 }
1644 else
fca0886c 1645 {
237e78b1 1646 /* Get aligni and misaligni from assume_aligned or
1647 alloc_align attributes. */
1648 if (TREE_VALUE (attr) == NULL_TREE)
fca0886c 1649 return ptrval;
237e78b1 1650 attr = TREE_VALUE (attr);
1651 align = TREE_VALUE (attr);
1652 if (!tree_fits_uhwi_p (align))
fca0886c 1653 return ptrval;
237e78b1 1654 aligni = tree_to_uhwi (align);
1655 if (alloc_aligned)
1656 {
1657 if (aligni == 0 || aligni > gimple_call_num_args (stmt))
1658 return ptrval;
1659 align = gimple_call_arg (stmt, aligni - 1);
1660 if (!tree_fits_uhwi_p (align))
1661 return ptrval;
1662 aligni = tree_to_uhwi (align);
1663 }
1664 else if (TREE_CHAIN (attr) && TREE_VALUE (TREE_CHAIN (attr)))
1665 {
1666 misalign = TREE_VALUE (TREE_CHAIN (attr));
1667 if (!tree_fits_uhwi_p (misalign))
1668 return ptrval;
1669 misaligni = tree_to_uhwi (misalign);
1670 }
fca0886c 1671 }
237e78b1 1672 if (aligni <= 1 || (aligni & (aligni - 1)) != 0 || misaligni >= aligni)
1673 return ptrval;
1674
fca0886c 1675 align = build_int_cst_type (type, -aligni);
1676 alignval = get_value_for_expr (align, true);
1677 bit_value_binop_1 (BIT_AND_EXPR, type, &value, &mask,
e913b5cd 1678 type, value_to_wide_int (ptrval), ptrval.mask,
1679 type, value_to_wide_int (alignval), alignval.mask);
c90d2c17 1680 if (wi::sext (mask, TYPE_PRECISION (type)) != -1)
fca0886c 1681 {
1682 val.lattice_val = CONSTANT;
1683 val.mask = mask;
e913b5cd 1684 gcc_assert ((mask.to_uhwi () & (aligni - 1)) == 0);
1685 gcc_assert ((value.to_uhwi () & (aligni - 1)) == 0);
1686 value |= misaligni;
fca0886c 1687 /* ??? Delay building trees here. */
e913b5cd 1688 val.value = wide_int_to_tree (type, value);
fca0886c 1689 }
1690 else
1691 {
1692 val.lattice_val = VARYING;
1693 val.value = NULL_TREE;
e913b5cd 1694 val.mask = -1;
fca0886c 1695 }
1696 return val;
1697}
1698
75a70cf9 1699/* Evaluate statement STMT.
1700 Valid only for assignments, calls, conditionals, and switches. */
4ee9c684 1701
9908fe4d 1702static ccp_prop_value_t
75a70cf9 1703evaluate_stmt (gimple stmt)
4ee9c684 1704{
9908fe4d 1705 ccp_prop_value_t val;
4f61cce6 1706 tree simplified = NULL_TREE;
88dbf20f 1707 ccp_lattice_t likelyvalue = likely_value (stmt);
b7e55469 1708 bool is_constant = false;
581bf1c2 1709 unsigned int align;
88dbf20f 1710
b7e55469 1711 if (dump_file && (dump_flags & TDF_DETAILS))
1712 {
1713 fprintf (dump_file, "which is likely ");
1714 switch (likelyvalue)
1715 {
1716 case CONSTANT:
1717 fprintf (dump_file, "CONSTANT");
1718 break;
1719 case UNDEFINED:
1720 fprintf (dump_file, "UNDEFINED");
1721 break;
1722 case VARYING:
1723 fprintf (dump_file, "VARYING");
1724 break;
1725 default:;
1726 }
1727 fprintf (dump_file, "\n");
1728 }
add6ee5e 1729
4ee9c684 1730 /* If the statement is likely to have a CONSTANT result, then try
1731 to fold the statement to determine the constant value. */
75a70cf9 1732 /* FIXME. This is the only place that we call ccp_fold.
1733 Since likely_value never returns CONSTANT for calls, we will
1734 not attempt to fold them, including builtins that may profit. */
4ee9c684 1735 if (likelyvalue == CONSTANT)
b7e55469 1736 {
1737 fold_defer_overflow_warnings ();
1738 simplified = ccp_fold (stmt);
27de3d43 1739 if (simplified && TREE_CODE (simplified) == SSA_NAME)
1740 {
1741 val = *get_value (simplified);
1742 if (val.lattice_val != VARYING)
1743 {
1744 fold_undefer_overflow_warnings (true, stmt, 0);
1745 return val;
1746 }
1747 }
b7e55469 1748 is_constant = simplified && is_gimple_min_invariant (simplified);
1749 fold_undefer_overflow_warnings (is_constant, stmt, 0);
1750 if (is_constant)
1751 {
1752 /* The statement produced a constant value. */
1753 val.lattice_val = CONSTANT;
1754 val.value = simplified;
e913b5cd 1755 val.mask = 0;
27de3d43 1756 return val;
b7e55469 1757 }
1758 }
4ee9c684 1759 /* If the statement is likely to have a VARYING result, then do not
1760 bother folding the statement. */
04236c3a 1761 else if (likelyvalue == VARYING)
75a70cf9 1762 {
590c3166 1763 enum gimple_code code = gimple_code (stmt);
75a70cf9 1764 if (code == GIMPLE_ASSIGN)
1765 {
1766 enum tree_code subcode = gimple_assign_rhs_code (stmt);
48e1416a 1767
75a70cf9 1768 /* Other cases cannot satisfy is_gimple_min_invariant
1769 without folding. */
1770 if (get_gimple_rhs_class (subcode) == GIMPLE_SINGLE_RHS)
1771 simplified = gimple_assign_rhs1 (stmt);
1772 }
1773 else if (code == GIMPLE_SWITCH)
1a91d914 1774 simplified = gimple_switch_index (as_a <gswitch *> (stmt));
75a70cf9 1775 else
a65c4d64 1776 /* These cannot satisfy is_gimple_min_invariant without folding. */
1777 gcc_assert (code == GIMPLE_CALL || code == GIMPLE_COND);
b7e55469 1778 is_constant = simplified && is_gimple_min_invariant (simplified);
1779 if (is_constant)
1780 {
1781 /* The statement produced a constant value. */
1782 val.lattice_val = CONSTANT;
1783 val.value = simplified;
e913b5cd 1784 val.mask = 0;
b7e55469 1785 }
75a70cf9 1786 }
4c9206f2 1787 /* If the statement result is likely UNDEFINED, make it so. */
1788 else if (likelyvalue == UNDEFINED)
1789 {
1790 val.lattice_val = UNDEFINED;
1791 val.value = NULL_TREE;
1792 val.mask = 0;
1793 return val;
1794 }
4ee9c684 1795
b7e55469 1796 /* Resort to simplification for bitwise tracking. */
1797 if (flag_tree_bit_ccp
db48deb0 1798 && (likelyvalue == CONSTANT || is_gimple_call (stmt)
1799 || (gimple_assign_single_p (stmt)
1800 && gimple_assign_rhs_code (stmt) == ADDR_EXPR))
b7e55469 1801 && !is_constant)
912f109f 1802 {
b7e55469 1803 enum gimple_code code = gimple_code (stmt);
1804 val.lattice_val = VARYING;
1805 val.value = NULL_TREE;
e913b5cd 1806 val.mask = -1;
b7e55469 1807 if (code == GIMPLE_ASSIGN)
912f109f 1808 {
b7e55469 1809 enum tree_code subcode = gimple_assign_rhs_code (stmt);
1810 tree rhs1 = gimple_assign_rhs1 (stmt);
c0a4b664 1811 tree lhs = gimple_assign_lhs (stmt);
1812 if ((INTEGRAL_TYPE_P (TREE_TYPE (lhs))
1813 || POINTER_TYPE_P (TREE_TYPE (lhs)))
1814 && (INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
1815 || POINTER_TYPE_P (TREE_TYPE (rhs1))))
1816 switch (get_gimple_rhs_class (subcode))
1817 {
1818 case GIMPLE_SINGLE_RHS:
1819 val = get_value_for_expr (rhs1, true);
1820 break;
1821
1822 case GIMPLE_UNARY_RHS:
1823 val = bit_value_unop (subcode, TREE_TYPE (lhs), rhs1);
1824 break;
1825
1826 case GIMPLE_BINARY_RHS:
1827 val = bit_value_binop (subcode, TREE_TYPE (lhs), rhs1,
1828 gimple_assign_rhs2 (stmt));
1829 break;
1830
1831 default:;
1832 }
912f109f 1833 }
b7e55469 1834 else if (code == GIMPLE_COND)
1835 {
1836 enum tree_code code = gimple_cond_code (stmt);
1837 tree rhs1 = gimple_cond_lhs (stmt);
1838 tree rhs2 = gimple_cond_rhs (stmt);
1839 if (INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
1840 || POINTER_TYPE_P (TREE_TYPE (rhs1)))
1841 val = bit_value_binop (code, TREE_TYPE (rhs1), rhs1, rhs2);
1842 }
0b4f0116 1843 else if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
153c3b50 1844 {
0b4f0116 1845 tree fndecl = gimple_call_fndecl (stmt);
153c3b50 1846 switch (DECL_FUNCTION_CODE (fndecl))
1847 {
1848 case BUILT_IN_MALLOC:
1849 case BUILT_IN_REALLOC:
1850 case BUILT_IN_CALLOC:
939514e9 1851 case BUILT_IN_STRDUP:
1852 case BUILT_IN_STRNDUP:
153c3b50 1853 val.lattice_val = CONSTANT;
1854 val.value = build_int_cst (TREE_TYPE (gimple_get_lhs (stmt)), 0);
796b6678 1855 val.mask = ~((HOST_WIDE_INT) MALLOC_ABI_ALIGNMENT
1856 / BITS_PER_UNIT - 1);
153c3b50 1857 break;
1858
1859 case BUILT_IN_ALLOCA:
581bf1c2 1860 case BUILT_IN_ALLOCA_WITH_ALIGN:
1861 align = (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_ALLOCA_WITH_ALIGN
f9ae6f95 1862 ? TREE_INT_CST_LOW (gimple_call_arg (stmt, 1))
581bf1c2 1863 : BIGGEST_ALIGNMENT);
153c3b50 1864 val.lattice_val = CONSTANT;
1865 val.value = build_int_cst (TREE_TYPE (gimple_get_lhs (stmt)), 0);
796b6678 1866 val.mask = ~((HOST_WIDE_INT) align / BITS_PER_UNIT - 1);
153c3b50 1867 break;
1868
939514e9 1869 /* These builtins return their first argument, unmodified. */
1870 case BUILT_IN_MEMCPY:
1871 case BUILT_IN_MEMMOVE:
1872 case BUILT_IN_MEMSET:
1873 case BUILT_IN_STRCPY:
1874 case BUILT_IN_STRNCPY:
1875 case BUILT_IN_MEMCPY_CHK:
1876 case BUILT_IN_MEMMOVE_CHK:
1877 case BUILT_IN_MEMSET_CHK:
1878 case BUILT_IN_STRCPY_CHK:
1879 case BUILT_IN_STRNCPY_CHK:
1880 val = get_value_for_expr (gimple_call_arg (stmt, 0), true);
1881 break;
1882
fca0886c 1883 case BUILT_IN_ASSUME_ALIGNED:
237e78b1 1884 val = bit_value_assume_aligned (stmt, NULL_TREE, val, false);
fca0886c 1885 break;
1886
060fc206 1887 case BUILT_IN_ALIGNED_ALLOC:
1888 {
1889 tree align = get_constant_value (gimple_call_arg (stmt, 0));
1890 if (align
1891 && tree_fits_uhwi_p (align))
1892 {
1893 unsigned HOST_WIDE_INT aligni = tree_to_uhwi (align);
1894 if (aligni > 1
1895 /* align must be power-of-two */
1896 && (aligni & (aligni - 1)) == 0)
1897 {
1898 val.lattice_val = CONSTANT;
1899 val.value = build_int_cst (ptr_type_node, 0);
cd89b631 1900 val.mask = -aligni;
060fc206 1901 }
1902 }
1903 break;
1904 }
1905
153c3b50 1906 default:;
1907 }
1908 }
237e78b1 1909 if (is_gimple_call (stmt) && gimple_call_lhs (stmt))
1910 {
1911 tree fntype = gimple_call_fntype (stmt);
1912 if (fntype)
1913 {
1914 tree attrs = lookup_attribute ("assume_aligned",
1915 TYPE_ATTRIBUTES (fntype));
1916 if (attrs)
1917 val = bit_value_assume_aligned (stmt, attrs, val, false);
1918 attrs = lookup_attribute ("alloc_align",
1919 TYPE_ATTRIBUTES (fntype));
1920 if (attrs)
1921 val = bit_value_assume_aligned (stmt, attrs, val, true);
1922 }
1923 }
b7e55469 1924 is_constant = (val.lattice_val == CONSTANT);
912f109f 1925 }
1926
fc08b993 1927 if (flag_tree_bit_ccp
1928 && ((is_constant && TREE_CODE (val.value) == INTEGER_CST)
4c9206f2 1929 || !is_constant)
fc08b993 1930 && gimple_get_lhs (stmt)
1931 && TREE_CODE (gimple_get_lhs (stmt)) == SSA_NAME)
1932 {
1933 tree lhs = gimple_get_lhs (stmt);
9c1be15e 1934 wide_int nonzero_bits = get_nonzero_bits (lhs);
1935 if (nonzero_bits != -1)
fc08b993 1936 {
1937 if (!is_constant)
1938 {
1939 val.lattice_val = CONSTANT;
1940 val.value = build_zero_cst (TREE_TYPE (lhs));
9c1be15e 1941 val.mask = extend_mask (nonzero_bits);
fc08b993 1942 is_constant = true;
1943 }
1944 else
1945 {
9c1be15e 1946 if (wi::bit_and_not (val.value, nonzero_bits) != 0)
aeb682a2 1947 val.value = wide_int_to_tree (TREE_TYPE (lhs),
9c1be15e 1948 nonzero_bits & val.value);
aeb682a2 1949 if (nonzero_bits == 0)
1950 val.mask = 0;
fc08b993 1951 else
09b2913a 1952 val.mask = val.mask & extend_mask (nonzero_bits);
fc08b993 1953 }
1954 }
1955 }
1956
4c9206f2 1957 /* The statement produced a nonconstant value. */
b7e55469 1958 if (!is_constant)
4ee9c684 1959 {
fc6cc27b 1960 /* The statement produced a copy. */
1961 if (simplified && TREE_CODE (simplified) == SSA_NAME
1962 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (simplified))
1963 {
1964 val.lattice_val = CONSTANT;
1965 val.value = simplified;
1966 val.mask = -1;
1967 }
1968 /* The statement is VARYING. */
1969 else
1970 {
1971 val.lattice_val = VARYING;
1972 val.value = NULL_TREE;
1973 val.mask = -1;
1974 }
4ee9c684 1975 }
41511585 1976
1977 return val;
4ee9c684 1978}
1979
770ff93b 1980typedef hash_table<nofree_ptr_hash<gimple_statement_base> > gimple_htab;
2b15d2ba 1981
582a80ed 1982/* Given a BUILT_IN_STACK_SAVE value SAVED_VAL, insert a clobber of VAR before
1983 each matching BUILT_IN_STACK_RESTORE. Mark visited phis in VISITED. */
1984
1985static void
2b15d2ba 1986insert_clobber_before_stack_restore (tree saved_val, tree var,
c1f445d2 1987 gimple_htab **visited)
582a80ed 1988{
1a91d914 1989 gimple stmt;
1990 gassign *clobber_stmt;
582a80ed 1991 tree clobber;
1992 imm_use_iterator iter;
1993 gimple_stmt_iterator i;
1994 gimple *slot;
1995
1996 FOR_EACH_IMM_USE_STMT (stmt, iter, saved_val)
1997 if (gimple_call_builtin_p (stmt, BUILT_IN_STACK_RESTORE))
1998 {
f1f41a6c 1999 clobber = build_constructor (TREE_TYPE (var),
2000 NULL);
582a80ed 2001 TREE_THIS_VOLATILE (clobber) = 1;
2002 clobber_stmt = gimple_build_assign (var, clobber);
2003
2004 i = gsi_for_stmt (stmt);
2005 gsi_insert_before (&i, clobber_stmt, GSI_SAME_STMT);
2006 }
2007 else if (gimple_code (stmt) == GIMPLE_PHI)
2008 {
c1f445d2 2009 if (!*visited)
2010 *visited = new gimple_htab (10);
582a80ed 2011
c1f445d2 2012 slot = (*visited)->find_slot (stmt, INSERT);
582a80ed 2013 if (*slot != NULL)
2014 continue;
2015
2016 *slot = stmt;
2017 insert_clobber_before_stack_restore (gimple_phi_result (stmt), var,
2018 visited);
2019 }
42eed683 2020 else if (gimple_assign_ssa_name_copy_p (stmt))
2021 insert_clobber_before_stack_restore (gimple_assign_lhs (stmt), var,
2022 visited);
058a1b7a 2023 else if (chkp_gimple_call_builtin_p (stmt, BUILT_IN_CHKP_BNDRET))
2024 continue;
582a80ed 2025 else
2026 gcc_assert (is_gimple_debug (stmt));
2027}
2028
2029/* Advance the iterator to the previous non-debug gimple statement in the same
2030 or dominating basic block. */
2031
2032static inline void
2033gsi_prev_dom_bb_nondebug (gimple_stmt_iterator *i)
2034{
2035 basic_block dom;
2036
2037 gsi_prev_nondebug (i);
2038 while (gsi_end_p (*i))
2039 {
2040 dom = get_immediate_dominator (CDI_DOMINATORS, i->bb);
34154e27 2041 if (dom == NULL || dom == ENTRY_BLOCK_PTR_FOR_FN (cfun))
582a80ed 2042 return;
2043
2044 *i = gsi_last_bb (dom);
2045 }
2046}
2047
2048/* Find a BUILT_IN_STACK_SAVE dominating gsi_stmt (I), and insert
1543f720 2049 a clobber of VAR before each matching BUILT_IN_STACK_RESTORE.
2050
2051 It is possible that BUILT_IN_STACK_SAVE cannot be find in a dominator when a
2052 previous pass (such as DOM) duplicated it along multiple paths to a BB. In
2053 that case the function gives up without inserting the clobbers. */
582a80ed 2054
2055static void
2056insert_clobbers_for_var (gimple_stmt_iterator i, tree var)
2057{
582a80ed 2058 gimple stmt;
2059 tree saved_val;
c1f445d2 2060 gimple_htab *visited = NULL;
582a80ed 2061
1543f720 2062 for (; !gsi_end_p (i); gsi_prev_dom_bb_nondebug (&i))
582a80ed 2063 {
2064 stmt = gsi_stmt (i);
2065
2066 if (!gimple_call_builtin_p (stmt, BUILT_IN_STACK_SAVE))
2067 continue;
582a80ed 2068
2069 saved_val = gimple_call_lhs (stmt);
2070 if (saved_val == NULL_TREE)
2071 continue;
2072
2073 insert_clobber_before_stack_restore (saved_val, var, &visited);
2074 break;
2075 }
2076
c1f445d2 2077 delete visited;
582a80ed 2078}
2079
581bf1c2 2080/* Detects a __builtin_alloca_with_align with constant size argument. Declares
2081 fixed-size array and returns the address, if found, otherwise returns
2082 NULL_TREE. */
9a65cc0a 2083
2084static tree
581bf1c2 2085fold_builtin_alloca_with_align (gimple stmt)
9a65cc0a 2086{
2087 unsigned HOST_WIDE_INT size, threshold, n_elem;
2088 tree lhs, arg, block, var, elem_type, array_type;
9a65cc0a 2089
2090 /* Get lhs. */
2091 lhs = gimple_call_lhs (stmt);
2092 if (lhs == NULL_TREE)
2093 return NULL_TREE;
2094
2095 /* Detect constant argument. */
2096 arg = get_constant_value (gimple_call_arg (stmt, 0));
6e93d308 2097 if (arg == NULL_TREE
2098 || TREE_CODE (arg) != INTEGER_CST
e913b5cd 2099 || !tree_fits_uhwi_p (arg))
9a65cc0a 2100 return NULL_TREE;
6e93d308 2101
8c53c46c 2102 size = tree_to_uhwi (arg);
9a65cc0a 2103
581bf1c2 2104 /* Heuristic: don't fold large allocas. */
9a65cc0a 2105 threshold = (unsigned HOST_WIDE_INT)PARAM_VALUE (PARAM_LARGE_STACK_FRAME);
581bf1c2 2106 /* In case the alloca is located at function entry, it has the same lifetime
2107 as a declared array, so we allow a larger size. */
9a65cc0a 2108 block = gimple_block (stmt);
2109 if (!(cfun->after_inlining
2110 && TREE_CODE (BLOCK_SUPERCONTEXT (block)) == FUNCTION_DECL))
2111 threshold /= 10;
2112 if (size > threshold)
2113 return NULL_TREE;
2114
2115 /* Declare array. */
2116 elem_type = build_nonstandard_integer_type (BITS_PER_UNIT, 1);
2117 n_elem = size * 8 / BITS_PER_UNIT;
9a65cc0a 2118 array_type = build_array_type_nelts (elem_type, n_elem);
f9e245b2 2119 var = create_tmp_var (array_type);
f9ae6f95 2120 DECL_ALIGN (var) = TREE_INT_CST_LOW (gimple_call_arg (stmt, 1));
3d4a0a4b 2121 {
2122 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (lhs);
2123 if (pi != NULL && !pi->pt.anything)
2124 {
2125 bool singleton_p;
2126 unsigned uid;
2127 singleton_p = pt_solution_singleton_p (&pi->pt, &uid);
2128 gcc_assert (singleton_p);
2129 SET_DECL_PT_UID (var, uid);
2130 }
2131 }
9a65cc0a 2132
2133 /* Fold alloca to the address of the array. */
2134 return fold_convert (TREE_TYPE (lhs), build_fold_addr_expr (var));
2135}
2136
6688f8ec 2137/* Fold the stmt at *GSI with CCP specific information that propagating
2138 and regular folding does not catch. */
2139
2140static bool
2141ccp_fold_stmt (gimple_stmt_iterator *gsi)
2142{
2143 gimple stmt = gsi_stmt (*gsi);
6688f8ec 2144
94144e68 2145 switch (gimple_code (stmt))
2146 {
2147 case GIMPLE_COND:
2148 {
1a91d914 2149 gcond *cond_stmt = as_a <gcond *> (stmt);
9908fe4d 2150 ccp_prop_value_t val;
94144e68 2151 /* Statement evaluation will handle type mismatches in constants
2152 more gracefully than the final propagation. This allows us to
2153 fold more conditionals here. */
2154 val = evaluate_stmt (stmt);
2155 if (val.lattice_val != CONSTANT
796b6678 2156 || val.mask != 0)
94144e68 2157 return false;
2158
b7e55469 2159 if (dump_file)
2160 {
2161 fprintf (dump_file, "Folding predicate ");
2162 print_gimple_expr (dump_file, stmt, 0, 0);
2163 fprintf (dump_file, " to ");
2164 print_generic_expr (dump_file, val.value, 0);
2165 fprintf (dump_file, "\n");
2166 }
2167
94144e68 2168 if (integer_zerop (val.value))
1a91d914 2169 gimple_cond_make_false (cond_stmt);
94144e68 2170 else
1a91d914 2171 gimple_cond_make_true (cond_stmt);
6688f8ec 2172
94144e68 2173 return true;
2174 }
6688f8ec 2175
94144e68 2176 case GIMPLE_CALL:
2177 {
2178 tree lhs = gimple_call_lhs (stmt);
3064bb7b 2179 int flags = gimple_call_flags (stmt);
15d138c9 2180 tree val;
94144e68 2181 tree argt;
2182 bool changed = false;
2183 unsigned i;
2184
2185 /* If the call was folded into a constant make sure it goes
2186 away even if we cannot propagate into all uses because of
2187 type issues. */
2188 if (lhs
2189 && TREE_CODE (lhs) == SSA_NAME
3064bb7b 2190 && (val = get_constant_value (lhs))
2191 /* Don't optimize away calls that have side-effects. */
2192 && (flags & (ECF_CONST|ECF_PURE)) != 0
2193 && (flags & ECF_LOOPING_CONST_OR_PURE) == 0)
94144e68 2194 {
15d138c9 2195 tree new_rhs = unshare_expr (val);
338cce8f 2196 bool res;
94144e68 2197 if (!useless_type_conversion_p (TREE_TYPE (lhs),
2198 TREE_TYPE (new_rhs)))
2199 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
338cce8f 2200 res = update_call_from_tree (gsi, new_rhs);
2201 gcc_assert (res);
94144e68 2202 return true;
2203 }
2204
fb049fba 2205 /* Internal calls provide no argument types, so the extra laxity
2206 for normal calls does not apply. */
2207 if (gimple_call_internal_p (stmt))
2208 return false;
2209
581bf1c2 2210 /* The heuristic of fold_builtin_alloca_with_align differs before and
2211 after inlining, so we don't require the arg to be changed into a
2212 constant for folding, but just to be constant. */
2213 if (gimple_call_builtin_p (stmt, BUILT_IN_ALLOCA_WITH_ALIGN))
9a65cc0a 2214 {
581bf1c2 2215 tree new_rhs = fold_builtin_alloca_with_align (stmt);
6e93d308 2216 if (new_rhs)
2217 {
2218 bool res = update_call_from_tree (gsi, new_rhs);
582a80ed 2219 tree var = TREE_OPERAND (TREE_OPERAND (new_rhs, 0),0);
6e93d308 2220 gcc_assert (res);
582a80ed 2221 insert_clobbers_for_var (*gsi, var);
6e93d308 2222 return true;
2223 }
9a65cc0a 2224 }
2225
94144e68 2226 /* Propagate into the call arguments. Compared to replace_uses_in
2227 this can use the argument slot types for type verification
2228 instead of the current argument type. We also can safely
2229 drop qualifiers here as we are dealing with constants anyway. */
2de00a2d 2230 argt = TYPE_ARG_TYPES (gimple_call_fntype (stmt));
94144e68 2231 for (i = 0; i < gimple_call_num_args (stmt) && argt;
2232 ++i, argt = TREE_CHAIN (argt))
2233 {
2234 tree arg = gimple_call_arg (stmt, i);
2235 if (TREE_CODE (arg) == SSA_NAME
15d138c9 2236 && (val = get_constant_value (arg))
94144e68 2237 && useless_type_conversion_p
2238 (TYPE_MAIN_VARIANT (TREE_VALUE (argt)),
15d138c9 2239 TYPE_MAIN_VARIANT (TREE_TYPE (val))))
94144e68 2240 {
15d138c9 2241 gimple_call_set_arg (stmt, i, unshare_expr (val));
94144e68 2242 changed = true;
2243 }
2244 }
e16f4c39 2245
94144e68 2246 return changed;
2247 }
6688f8ec 2248
6872bf3c 2249 case GIMPLE_ASSIGN:
2250 {
2251 tree lhs = gimple_assign_lhs (stmt);
15d138c9 2252 tree val;
6872bf3c 2253
2254 /* If we have a load that turned out to be constant replace it
2255 as we cannot propagate into all uses in all cases. */
2256 if (gimple_assign_single_p (stmt)
2257 && TREE_CODE (lhs) == SSA_NAME
15d138c9 2258 && (val = get_constant_value (lhs)))
6872bf3c 2259 {
15d138c9 2260 tree rhs = unshare_expr (val);
6872bf3c 2261 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs)))
182cf5a9 2262 rhs = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (lhs), rhs);
6872bf3c 2263 gimple_assign_set_rhs_from_tree (gsi, rhs);
2264 return true;
2265 }
2266
2267 return false;
2268 }
2269
94144e68 2270 default:
2271 return false;
2272 }
6688f8ec 2273}
2274
41511585 2275/* Visit the assignment statement STMT. Set the value of its LHS to the
88dbf20f 2276 value computed by the RHS and store LHS in *OUTPUT_P. If STMT
2277 creates virtual definitions, set the value of each new name to that
75a70cf9 2278 of the RHS (if we can derive a constant out of the RHS).
2279 Value-returning call statements also perform an assignment, and
2280 are handled here. */
4ee9c684 2281
41511585 2282static enum ssa_prop_result
75a70cf9 2283visit_assignment (gimple stmt, tree *output_p)
4ee9c684 2284{
9908fe4d 2285 ccp_prop_value_t val;
fc6cc27b 2286 enum ssa_prop_result retval = SSA_PROP_NOT_INTERESTING;
4ee9c684 2287
75a70cf9 2288 tree lhs = gimple_get_lhs (stmt);
88dbf20f 2289 if (TREE_CODE (lhs) == SSA_NAME)
4ee9c684 2290 {
fc6cc27b 2291 /* Evaluate the statement, which could be
2292 either a GIMPLE_ASSIGN or a GIMPLE_CALL. */
2293 val = evaluate_stmt (stmt);
2294
88dbf20f 2295 /* If STMT is an assignment to an SSA_NAME, we only have one
2296 value to set. */
27de3d43 2297 if (set_lattice_value (lhs, &val))
88dbf20f 2298 {
2299 *output_p = lhs;
2300 if (val.lattice_val == VARYING)
2301 retval = SSA_PROP_VARYING;
2302 else
2303 retval = SSA_PROP_INTERESTING;
2304 }
4ee9c684 2305 }
88dbf20f 2306
2307 return retval;
4ee9c684 2308}
2309
4ee9c684 2310
41511585 2311/* Visit the conditional statement STMT. Return SSA_PROP_INTERESTING
2312 if it can determine which edge will be taken. Otherwise, return
2313 SSA_PROP_VARYING. */
2314
2315static enum ssa_prop_result
75a70cf9 2316visit_cond_stmt (gimple stmt, edge *taken_edge_p)
4ee9c684 2317{
9908fe4d 2318 ccp_prop_value_t val;
41511585 2319 basic_block block;
2320
75a70cf9 2321 block = gimple_bb (stmt);
41511585 2322 val = evaluate_stmt (stmt);
b7e55469 2323 if (val.lattice_val != CONSTANT
796b6678 2324 || val.mask != 0)
b7e55469 2325 return SSA_PROP_VARYING;
41511585 2326
2327 /* Find which edge out of the conditional block will be taken and add it
2328 to the worklist. If no single edge can be determined statically,
2329 return SSA_PROP_VARYING to feed all the outgoing edges to the
2330 propagation engine. */
b7e55469 2331 *taken_edge_p = find_taken_edge (block, val.value);
41511585 2332 if (*taken_edge_p)
2333 return SSA_PROP_INTERESTING;
2334 else
2335 return SSA_PROP_VARYING;
4ee9c684 2336}
2337
4ee9c684 2338
41511585 2339/* Evaluate statement STMT. If the statement produces an output value and
2340 its evaluation changes the lattice value of its output, return
2341 SSA_PROP_INTERESTING and set *OUTPUT_P to the SSA_NAME holding the
2342 output value.
48e1416a 2343
41511585 2344 If STMT is a conditional branch and we can determine its truth
2345 value, set *TAKEN_EDGE_P accordingly. If STMT produces a varying
2346 value, return SSA_PROP_VARYING. */
4ee9c684 2347
41511585 2348static enum ssa_prop_result
75a70cf9 2349ccp_visit_stmt (gimple stmt, edge *taken_edge_p, tree *output_p)
41511585 2350{
41511585 2351 tree def;
2352 ssa_op_iter iter;
4ee9c684 2353
41511585 2354 if (dump_file && (dump_flags & TDF_DETAILS))
4ee9c684 2355 {
88dbf20f 2356 fprintf (dump_file, "\nVisiting statement:\n");
75a70cf9 2357 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
4ee9c684 2358 }
4ee9c684 2359
75a70cf9 2360 switch (gimple_code (stmt))
4ee9c684 2361 {
75a70cf9 2362 case GIMPLE_ASSIGN:
2363 /* If the statement is an assignment that produces a single
2364 output value, evaluate its RHS to see if the lattice value of
2365 its output has changed. */
2366 return visit_assignment (stmt, output_p);
2367
2368 case GIMPLE_CALL:
2369 /* A value-returning call also performs an assignment. */
2370 if (gimple_call_lhs (stmt) != NULL_TREE)
2371 return visit_assignment (stmt, output_p);
2372 break;
2373
2374 case GIMPLE_COND:
2375 case GIMPLE_SWITCH:
2376 /* If STMT is a conditional branch, see if we can determine
2377 which branch will be taken. */
2378 /* FIXME. It appears that we should be able to optimize
2379 computed GOTOs here as well. */
2380 return visit_cond_stmt (stmt, taken_edge_p);
2381
2382 default:
2383 break;
4ee9c684 2384 }
4ee9c684 2385
41511585 2386 /* Any other kind of statement is not interesting for constant
2387 propagation and, therefore, not worth simulating. */
41511585 2388 if (dump_file && (dump_flags & TDF_DETAILS))
2389 fprintf (dump_file, "No interesting values produced. Marked VARYING.\n");
4ee9c684 2390
41511585 2391 /* Definitions made by statements other than assignments to
2392 SSA_NAMEs represent unknown modifications to their outputs.
2393 Mark them VARYING. */
88dbf20f 2394 FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_ALL_DEFS)
27de3d43 2395 set_value_varying (def);
4ee9c684 2396
41511585 2397 return SSA_PROP_VARYING;
2398}
4ee9c684 2399
4ee9c684 2400
88dbf20f 2401/* Main entry point for SSA Conditional Constant Propagation. */
41511585 2402
33a34f1e 2403static unsigned int
61207d43 2404do_ssa_ccp (void)
41511585 2405{
582a80ed 2406 unsigned int todo = 0;
2407 calculate_dominance_info (CDI_DOMINATORS);
41511585 2408 ccp_initialize ();
2409 ssa_propagate (ccp_visit_stmt, ccp_visit_phi_node);
33a34f1e 2410 if (ccp_finalize ())
560965e9 2411 todo = (TODO_cleanup_cfg | TODO_update_ssa);
582a80ed 2412 free_dominance_info (CDI_DOMINATORS);
2413 return todo;
4ee9c684 2414}
2415
5664499b 2416
cbe8bda8 2417namespace {
2418
2419const pass_data pass_data_ccp =
41511585 2420{
cbe8bda8 2421 GIMPLE_PASS, /* type */
2422 "ccp", /* name */
2423 OPTGROUP_NONE, /* optinfo_flags */
cbe8bda8 2424 TV_TREE_CCP, /* tv_id */
2425 ( PROP_cfg | PROP_ssa ), /* properties_required */
2426 0, /* properties_provided */
2427 0, /* properties_destroyed */
2428 0, /* todo_flags_start */
8b88439e 2429 TODO_update_address_taken, /* todo_flags_finish */
41511585 2430};
4ee9c684 2431
cbe8bda8 2432class pass_ccp : public gimple_opt_pass
2433{
2434public:
9af5ce0c 2435 pass_ccp (gcc::context *ctxt)
2436 : gimple_opt_pass (pass_data_ccp, ctxt)
cbe8bda8 2437 {}
2438
2439 /* opt_pass methods: */
ae84f584 2440 opt_pass * clone () { return new pass_ccp (m_ctxt); }
31315c24 2441 virtual bool gate (function *) { return flag_tree_ccp != 0; }
65b0537f 2442 virtual unsigned int execute (function *) { return do_ssa_ccp (); }
cbe8bda8 2443
2444}; // class pass_ccp
2445
2446} // anon namespace
2447
2448gimple_opt_pass *
2449make_pass_ccp (gcc::context *ctxt)
2450{
2451 return new pass_ccp (ctxt);
2452}
2453
4ee9c684 2454
75a70cf9 2455
bdd0e199 2456/* Try to optimize out __builtin_stack_restore. Optimize it out
2457 if there is another __builtin_stack_restore in the same basic
2458 block and no calls or ASM_EXPRs are in between, or if this block's
2459 only outgoing edge is to EXIT_BLOCK and there are no calls or
2460 ASM_EXPRs after this __builtin_stack_restore. */
2461
2462static tree
75a70cf9 2463optimize_stack_restore (gimple_stmt_iterator i)
bdd0e199 2464{
6ea999da 2465 tree callee;
2466 gimple stmt;
75a70cf9 2467
2468 basic_block bb = gsi_bb (i);
2469 gimple call = gsi_stmt (i);
bdd0e199 2470
75a70cf9 2471 if (gimple_code (call) != GIMPLE_CALL
2472 || gimple_call_num_args (call) != 1
2473 || TREE_CODE (gimple_call_arg (call, 0)) != SSA_NAME
2474 || !POINTER_TYPE_P (TREE_TYPE (gimple_call_arg (call, 0))))
bdd0e199 2475 return NULL_TREE;
2476
75a70cf9 2477 for (gsi_next (&i); !gsi_end_p (i); gsi_next (&i))
bdd0e199 2478 {
75a70cf9 2479 stmt = gsi_stmt (i);
2480 if (gimple_code (stmt) == GIMPLE_ASM)
bdd0e199 2481 return NULL_TREE;
75a70cf9 2482 if (gimple_code (stmt) != GIMPLE_CALL)
bdd0e199 2483 continue;
2484
75a70cf9 2485 callee = gimple_call_fndecl (stmt);
c40a6f90 2486 if (!callee
2487 || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL
2488 /* All regular builtins are ok, just obviously not alloca. */
581bf1c2 2489 || DECL_FUNCTION_CODE (callee) == BUILT_IN_ALLOCA
2490 || DECL_FUNCTION_CODE (callee) == BUILT_IN_ALLOCA_WITH_ALIGN)
bdd0e199 2491 return NULL_TREE;
2492
2493 if (DECL_FUNCTION_CODE (callee) == BUILT_IN_STACK_RESTORE)
6ea999da 2494 goto second_stack_restore;
bdd0e199 2495 }
2496
6ea999da 2497 if (!gsi_end_p (i))
bdd0e199 2498 return NULL_TREE;
2499
6ea999da 2500 /* Allow one successor of the exit block, or zero successors. */
2501 switch (EDGE_COUNT (bb->succs))
2502 {
2503 case 0:
2504 break;
2505 case 1:
34154e27 2506 if (single_succ_edge (bb)->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
6ea999da 2507 return NULL_TREE;
2508 break;
2509 default:
2510 return NULL_TREE;
2511 }
2512 second_stack_restore:
bdd0e199 2513
6ea999da 2514 /* If there's exactly one use, then zap the call to __builtin_stack_save.
2515 If there are multiple uses, then the last one should remove the call.
2516 In any case, whether the call to __builtin_stack_save can be removed
2517 or not is irrelevant to removing the call to __builtin_stack_restore. */
2518 if (has_single_use (gimple_call_arg (call, 0)))
2519 {
2520 gimple stack_save = SSA_NAME_DEF_STMT (gimple_call_arg (call, 0));
2521 if (is_gimple_call (stack_save))
2522 {
2523 callee = gimple_call_fndecl (stack_save);
2524 if (callee
2525 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL
2526 && DECL_FUNCTION_CODE (callee) == BUILT_IN_STACK_SAVE)
2527 {
2528 gimple_stmt_iterator stack_save_gsi;
2529 tree rhs;
bdd0e199 2530
6ea999da 2531 stack_save_gsi = gsi_for_stmt (stack_save);
2532 rhs = build_int_cst (TREE_TYPE (gimple_call_arg (call, 0)), 0);
2533 update_call_from_tree (&stack_save_gsi, rhs);
2534 }
2535 }
2536 }
bdd0e199 2537
75a70cf9 2538 /* No effect, so the statement will be deleted. */
bdd0e199 2539 return integer_zero_node;
2540}
75a70cf9 2541
8a58ed0a 2542/* If va_list type is a simple pointer and nothing special is needed,
2543 optimize __builtin_va_start (&ap, 0) into ap = __builtin_next_arg (0),
2544 __builtin_va_end (&ap) out as NOP and __builtin_va_copy into a simple
2545 pointer assignment. */
2546
2547static tree
75a70cf9 2548optimize_stdarg_builtin (gimple call)
8a58ed0a 2549{
5f57a8b1 2550 tree callee, lhs, rhs, cfun_va_list;
8a58ed0a 2551 bool va_list_simple_ptr;
389dd41b 2552 location_t loc = gimple_location (call);
8a58ed0a 2553
75a70cf9 2554 if (gimple_code (call) != GIMPLE_CALL)
8a58ed0a 2555 return NULL_TREE;
2556
75a70cf9 2557 callee = gimple_call_fndecl (call);
5f57a8b1 2558
2559 cfun_va_list = targetm.fn_abi_va_list (callee);
2560 va_list_simple_ptr = POINTER_TYPE_P (cfun_va_list)
2561 && (TREE_TYPE (cfun_va_list) == void_type_node
2562 || TREE_TYPE (cfun_va_list) == char_type_node);
2563
8a58ed0a 2564 switch (DECL_FUNCTION_CODE (callee))
2565 {
2566 case BUILT_IN_VA_START:
2567 if (!va_list_simple_ptr
2568 || targetm.expand_builtin_va_start != NULL
e7ed5dd7 2569 || !builtin_decl_explicit_p (BUILT_IN_NEXT_ARG))
8a58ed0a 2570 return NULL_TREE;
2571
75a70cf9 2572 if (gimple_call_num_args (call) != 2)
8a58ed0a 2573 return NULL_TREE;
2574
75a70cf9 2575 lhs = gimple_call_arg (call, 0);
8a58ed0a 2576 if (!POINTER_TYPE_P (TREE_TYPE (lhs))
2577 || TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (lhs)))
5f57a8b1 2578 != TYPE_MAIN_VARIANT (cfun_va_list))
8a58ed0a 2579 return NULL_TREE;
48e1416a 2580
389dd41b 2581 lhs = build_fold_indirect_ref_loc (loc, lhs);
b9a16870 2582 rhs = build_call_expr_loc (loc, builtin_decl_explicit (BUILT_IN_NEXT_ARG),
75a70cf9 2583 1, integer_zero_node);
389dd41b 2584 rhs = fold_convert_loc (loc, TREE_TYPE (lhs), rhs);
8a58ed0a 2585 return build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, rhs);
2586
2587 case BUILT_IN_VA_COPY:
2588 if (!va_list_simple_ptr)
2589 return NULL_TREE;
2590
75a70cf9 2591 if (gimple_call_num_args (call) != 2)
8a58ed0a 2592 return NULL_TREE;
2593
75a70cf9 2594 lhs = gimple_call_arg (call, 0);
8a58ed0a 2595 if (!POINTER_TYPE_P (TREE_TYPE (lhs))
2596 || TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (lhs)))
5f57a8b1 2597 != TYPE_MAIN_VARIANT (cfun_va_list))
8a58ed0a 2598 return NULL_TREE;
2599
389dd41b 2600 lhs = build_fold_indirect_ref_loc (loc, lhs);
75a70cf9 2601 rhs = gimple_call_arg (call, 1);
8a58ed0a 2602 if (TYPE_MAIN_VARIANT (TREE_TYPE (rhs))
5f57a8b1 2603 != TYPE_MAIN_VARIANT (cfun_va_list))
8a58ed0a 2604 return NULL_TREE;
2605
389dd41b 2606 rhs = fold_convert_loc (loc, TREE_TYPE (lhs), rhs);
8a58ed0a 2607 return build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, rhs);
2608
2609 case BUILT_IN_VA_END:
75a70cf9 2610 /* No effect, so the statement will be deleted. */
8a58ed0a 2611 return integer_zero_node;
2612
2613 default:
2614 gcc_unreachable ();
2615 }
2616}
75a70cf9 2617
f87df69a 2618/* Attemp to make the block of __builtin_unreachable I unreachable by changing
2619 the incoming jumps. Return true if at least one jump was changed. */
2620
2621static bool
2622optimize_unreachable (gimple_stmt_iterator i)
2623{
2624 basic_block bb = gsi_bb (i);
2625 gimple_stmt_iterator gsi;
2626 gimple stmt;
2627 edge_iterator ei;
2628 edge e;
2629 bool ret;
2630
f6b540af 2631 if (flag_sanitize & SANITIZE_UNREACHABLE)
2632 return false;
2633
f87df69a 2634 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2635 {
2636 stmt = gsi_stmt (gsi);
2637
2638 if (is_gimple_debug (stmt))
2639 continue;
2640
1a91d914 2641 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
f87df69a 2642 {
2643 /* Verify we do not need to preserve the label. */
1a91d914 2644 if (FORCED_LABEL (gimple_label_label (label_stmt)))
f87df69a 2645 return false;
2646
2647 continue;
2648 }
2649
2650 /* Only handle the case that __builtin_unreachable is the first statement
2651 in the block. We rely on DCE to remove stmts without side-effects
2652 before __builtin_unreachable. */
2653 if (gsi_stmt (gsi) != gsi_stmt (i))
2654 return false;
2655 }
2656
2657 ret = false;
2658 FOR_EACH_EDGE (e, ei, bb->preds)
2659 {
2660 gsi = gsi_last_bb (e->src);
522f73a1 2661 if (gsi_end_p (gsi))
2662 continue;
f87df69a 2663
522f73a1 2664 stmt = gsi_stmt (gsi);
1a91d914 2665 if (gcond *cond_stmt = dyn_cast <gcond *> (stmt))
f87df69a 2666 {
2667 if (e->flags & EDGE_TRUE_VALUE)
1a91d914 2668 gimple_cond_make_false (cond_stmt);
f87df69a 2669 else if (e->flags & EDGE_FALSE_VALUE)
1a91d914 2670 gimple_cond_make_true (cond_stmt);
f87df69a 2671 else
2672 gcc_unreachable ();
1a91d914 2673 update_stmt (cond_stmt);
f87df69a 2674 }
2675 else
2676 {
2677 /* Todo: handle other cases, f.i. switch statement. */
2678 continue;
2679 }
2680
2681 ret = true;
2682 }
2683
2684 return ret;
2685}
2686
4ee9c684 2687/* A simple pass that attempts to fold all builtin functions. This pass
2688 is run after we've propagated as many constants as we can. */
2689
65b0537f 2690namespace {
2691
2692const pass_data pass_data_fold_builtins =
2693{
2694 GIMPLE_PASS, /* type */
2695 "fab", /* name */
2696 OPTGROUP_NONE, /* optinfo_flags */
65b0537f 2697 TV_NONE, /* tv_id */
2698 ( PROP_cfg | PROP_ssa ), /* properties_required */
2699 0, /* properties_provided */
2700 0, /* properties_destroyed */
2701 0, /* todo_flags_start */
8b88439e 2702 TODO_update_ssa, /* todo_flags_finish */
65b0537f 2703};
2704
2705class pass_fold_builtins : public gimple_opt_pass
2706{
2707public:
2708 pass_fold_builtins (gcc::context *ctxt)
2709 : gimple_opt_pass (pass_data_fold_builtins, ctxt)
2710 {}
2711
2712 /* opt_pass methods: */
2713 opt_pass * clone () { return new pass_fold_builtins (m_ctxt); }
2714 virtual unsigned int execute (function *);
2715
2716}; // class pass_fold_builtins
2717
2718unsigned int
2719pass_fold_builtins::execute (function *fun)
4ee9c684 2720{
b36237eb 2721 bool cfg_changed = false;
4ee9c684 2722 basic_block bb;
b1b7c0c4 2723 unsigned int todoflags = 0;
48e1416a 2724
65b0537f 2725 FOR_EACH_BB_FN (bb, fun)
4ee9c684 2726 {
75a70cf9 2727 gimple_stmt_iterator i;
2728 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
4ee9c684 2729 {
75a70cf9 2730 gimple stmt, old_stmt;
40a3eb84 2731 tree callee;
0a39fd54 2732 enum built_in_function fcode;
4ee9c684 2733
75a70cf9 2734 stmt = gsi_stmt (i);
2735
2736 if (gimple_code (stmt) != GIMPLE_CALL)
0a39fd54 2737 {
896a0c42 2738 /* Remove all *ssaname_N ={v} {CLOBBER}; stmts,
2739 after the last GIMPLE DSE they aren't needed and might
2740 unnecessarily keep the SSA_NAMEs live. */
2741 if (gimple_clobber_p (stmt))
2742 {
2743 tree lhs = gimple_assign_lhs (stmt);
2744 if (TREE_CODE (lhs) == MEM_REF
2745 && TREE_CODE (TREE_OPERAND (lhs, 0)) == SSA_NAME)
2746 {
2747 unlink_stmt_vdef (stmt);
2748 gsi_remove (&i, true);
2749 release_defs (stmt);
2750 continue;
2751 }
2752 }
75a70cf9 2753 gsi_next (&i);
0a39fd54 2754 continue;
2755 }
40a3eb84 2756
75a70cf9 2757 callee = gimple_call_fndecl (stmt);
4ee9c684 2758 if (!callee || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL)
0a39fd54 2759 {
75a70cf9 2760 gsi_next (&i);
0a39fd54 2761 continue;
2762 }
5a4b7e1e 2763
40a3eb84 2764 fcode = DECL_FUNCTION_CODE (callee);
2765 if (fold_stmt (&i))
2766 ;
2767 else
2768 {
2769 tree result = NULL_TREE;
2770 switch (DECL_FUNCTION_CODE (callee))
2771 {
2772 case BUILT_IN_CONSTANT_P:
2773 /* Resolve __builtin_constant_p. If it hasn't been
2774 folded to integer_one_node by now, it's fairly
2775 certain that the value simply isn't constant. */
2776 result = integer_zero_node;
2777 break;
5a4b7e1e 2778
40a3eb84 2779 case BUILT_IN_ASSUME_ALIGNED:
2780 /* Remove __builtin_assume_aligned. */
2781 result = gimple_call_arg (stmt, 0);
2782 break;
4ee9c684 2783
40a3eb84 2784 case BUILT_IN_STACK_RESTORE:
2785 result = optimize_stack_restore (i);
2786 if (result)
2787 break;
2788 gsi_next (&i);
2789 continue;
fca0886c 2790
40a3eb84 2791 case BUILT_IN_UNREACHABLE:
2792 if (optimize_unreachable (i))
2793 cfg_changed = true;
8a58ed0a 2794 break;
8a58ed0a 2795
40a3eb84 2796 case BUILT_IN_VA_START:
2797 case BUILT_IN_VA_END:
2798 case BUILT_IN_VA_COPY:
2799 /* These shouldn't be folded before pass_stdarg. */
2800 result = optimize_stdarg_builtin (stmt);
2801 if (result)
2802 break;
2803 /* FALLTHRU */
f87df69a 2804
40a3eb84 2805 default:;
2806 }
bdd0e199 2807
40a3eb84 2808 if (!result)
2809 {
2810 gsi_next (&i);
2811 continue;
2812 }
4ee9c684 2813
40a3eb84 2814 if (!update_call_from_tree (&i, result))
2815 gimplify_and_update_call_from_tree (&i, result);
2816 }
2817
2818 todoflags |= TODO_update_address_taken;
f87df69a 2819
4ee9c684 2820 if (dump_file && (dump_flags & TDF_DETAILS))
2821 {
2822 fprintf (dump_file, "Simplified\n ");
75a70cf9 2823 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
4ee9c684 2824 }
2825
75a70cf9 2826 old_stmt = stmt;
75a70cf9 2827 stmt = gsi_stmt (i);
4c5fd53c 2828 update_stmt (stmt);
de6ed584 2829
75a70cf9 2830 if (maybe_clean_or_replace_eh_stmt (old_stmt, stmt)
2831 && gimple_purge_dead_eh_edges (bb))
b36237eb 2832 cfg_changed = true;
4ee9c684 2833
2834 if (dump_file && (dump_flags & TDF_DETAILS))
2835 {
2836 fprintf (dump_file, "to\n ");
75a70cf9 2837 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
4ee9c684 2838 fprintf (dump_file, "\n");
2839 }
0a39fd54 2840
2841 /* Retry the same statement if it changed into another
2842 builtin, there might be new opportunities now. */
75a70cf9 2843 if (gimple_code (stmt) != GIMPLE_CALL)
0a39fd54 2844 {
75a70cf9 2845 gsi_next (&i);
0a39fd54 2846 continue;
2847 }
75a70cf9 2848 callee = gimple_call_fndecl (stmt);
0a39fd54 2849 if (!callee
75a70cf9 2850 || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL
0a39fd54 2851 || DECL_FUNCTION_CODE (callee) == fcode)
75a70cf9 2852 gsi_next (&i);
4ee9c684 2853 }
2854 }
48e1416a 2855
b36237eb 2856 /* Delete unreachable blocks. */
b1b7c0c4 2857 if (cfg_changed)
2858 todoflags |= TODO_cleanup_cfg;
48e1416a 2859
b1b7c0c4 2860 return todoflags;
4ee9c684 2861}
2862
cbe8bda8 2863} // anon namespace
2864
2865gimple_opt_pass *
2866make_pass_fold_builtins (gcc::context *ctxt)
2867{
2868 return new pass_fold_builtins (ctxt);
2869}