]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/tree-ssa-ccp.c
2014-11-18 Paolo Carlini <paolo.carlini@oracle.com>
[thirdparty/gcc.git] / gcc / tree-ssa-ccp.c
CommitLineData
4ee9c684 1/* Conditional constant propagation pass for the GNU compiler.
3aea1f79 2 Copyright (C) 2000-2014 Free Software Foundation, Inc.
4ee9c684 3 Adapted from original RTL SSA-CCP by Daniel Berlin <dberlin@dberlin.org>
4 Adapted to GIMPLE trees by Diego Novillo <dnovillo@redhat.com>
5
6This file is part of GCC.
48e1416a 7
4ee9c684 8GCC is free software; you can redistribute it and/or modify it
9under the terms of the GNU General Public License as published by the
8c4c00c1 10Free Software Foundation; either version 3, or (at your option) any
4ee9c684 11later version.
48e1416a 12
4ee9c684 13GCC is distributed in the hope that it will be useful, but WITHOUT
14ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
15FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16for more details.
48e1416a 17
4ee9c684 18You should have received a copy of the GNU General Public License
8c4c00c1 19along with GCC; see the file COPYING3. If not see
20<http://www.gnu.org/licenses/>. */
4ee9c684 21
88dbf20f 22/* Conditional constant propagation (CCP) is based on the SSA
23 propagation engine (tree-ssa-propagate.c). Constant assignments of
24 the form VAR = CST are propagated from the assignments into uses of
25 VAR, which in turn may generate new constants. The simulation uses
26 a four level lattice to keep track of constant values associated
27 with SSA names. Given an SSA name V_i, it may take one of the
28 following values:
29
bfa30570 30 UNINITIALIZED -> the initial state of the value. This value
31 is replaced with a correct initial value
32 the first time the value is used, so the
33 rest of the pass does not need to care about
34 it. Using this value simplifies initialization
35 of the pass, and prevents us from needlessly
36 scanning statements that are never reached.
88dbf20f 37
38 UNDEFINED -> V_i is a local variable whose definition
39 has not been processed yet. Therefore we
40 don't yet know if its value is a constant
41 or not.
42
43 CONSTANT -> V_i has been found to hold a constant
44 value C.
45
46 VARYING -> V_i cannot take a constant value, or if it
47 does, it is not possible to determine it
48 at compile time.
49
50 The core of SSA-CCP is in ccp_visit_stmt and ccp_visit_phi_node:
51
52 1- In ccp_visit_stmt, we are interested in assignments whose RHS
53 evaluates into a constant and conditional jumps whose predicate
54 evaluates into a boolean true or false. When an assignment of
55 the form V_i = CONST is found, V_i's lattice value is set to
56 CONSTANT and CONST is associated with it. This causes the
57 propagation engine to add all the SSA edges coming out the
58 assignment into the worklists, so that statements that use V_i
59 can be visited.
60
61 If the statement is a conditional with a constant predicate, we
62 mark the outgoing edges as executable or not executable
63 depending on the predicate's value. This is then used when
64 visiting PHI nodes to know when a PHI argument can be ignored.
48e1416a 65
88dbf20f 66
67 2- In ccp_visit_phi_node, if all the PHI arguments evaluate to the
68 same constant C, then the LHS of the PHI is set to C. This
69 evaluation is known as the "meet operation". Since one of the
70 goals of this evaluation is to optimistically return constant
71 values as often as possible, it uses two main short cuts:
72
73 - If an argument is flowing in through a non-executable edge, it
74 is ignored. This is useful in cases like this:
75
76 if (PRED)
77 a_9 = 3;
78 else
79 a_10 = 100;
80 a_11 = PHI (a_9, a_10)
81
82 If PRED is known to always evaluate to false, then we can
83 assume that a_11 will always take its value from a_10, meaning
84 that instead of consider it VARYING (a_9 and a_10 have
85 different values), we can consider it CONSTANT 100.
86
87 - If an argument has an UNDEFINED value, then it does not affect
88 the outcome of the meet operation. If a variable V_i has an
89 UNDEFINED value, it means that either its defining statement
90 hasn't been visited yet or V_i has no defining statement, in
91 which case the original symbol 'V' is being used
92 uninitialized. Since 'V' is a local variable, the compiler
93 may assume any initial value for it.
94
95
96 After propagation, every variable V_i that ends up with a lattice
97 value of CONSTANT will have the associated constant value in the
98 array CONST_VAL[i].VALUE. That is fed into substitute_and_fold for
99 final substitution and folding.
100
e913b5cd 101 This algorithm uses wide-ints at the max precision of the target.
102 This means that, with one uninteresting exception, variables with
103 UNSIGNED types never go to VARYING because the bits above the
104 precision of the type of the variable are always zero. The
105 uninteresting case is a variable of UNSIGNED type that has the
106 maximum precision of the target. Such variables can go to VARYING,
107 but this causes no loss of infomation since these variables will
108 never be extended.
109
4ee9c684 110 References:
111
112 Constant propagation with conditional branches,
113 Wegman and Zadeck, ACM TOPLAS 13(2):181-210.
114
115 Building an Optimizing Compiler,
116 Robert Morgan, Butterworth-Heinemann, 1998, Section 8.9.
117
118 Advanced Compiler Design and Implementation,
119 Steven Muchnick, Morgan Kaufmann, 1997, Section 12.6 */
120
121#include "config.h"
122#include "system.h"
123#include "coretypes.h"
124#include "tm.h"
4ee9c684 125#include "tree.h"
9ed99284 126#include "stor-layout.h"
41511585 127#include "flags.h"
4ee9c684 128#include "tm_p.h"
94ea8568 129#include "predict.h"
130#include "vec.h"
a3020f2f 131#include "hashtab.h"
132#include "hash-set.h"
a3020f2f 133#include "machmode.h"
134#include "hard-reg-set.h"
135#include "input.h"
41511585 136#include "function.h"
94ea8568 137#include "dominance.h"
138#include "cfg.h"
139#include "basic-block.h"
ce084dfc 140#include "gimple-pretty-print.h"
bc61cadb 141#include "hash-table.h"
142#include "tree-ssa-alias.h"
143#include "internal-fn.h"
144#include "gimple-fold.h"
145#include "tree-eh.h"
146#include "gimple-expr.h"
147#include "is-a.h"
073c1fd5 148#include "gimple.h"
a8783bee 149#include "gimplify.h"
dcf1a1ec 150#include "gimple-iterator.h"
073c1fd5 151#include "gimple-ssa.h"
152#include "tree-cfg.h"
153#include "tree-phinodes.h"
154#include "ssa-iterators.h"
9ed99284 155#include "stringpool.h"
073c1fd5 156#include "tree-ssanames.h"
4ee9c684 157#include "tree-pass.h"
41511585 158#include "tree-ssa-propagate.h"
5a4b7e1e 159#include "value-prof.h"
41511585 160#include "langhooks.h"
8782adcf 161#include "target.h"
0b205f4c 162#include "diagnostic-core.h"
43fb76c1 163#include "dbgcnt.h"
9a65cc0a 164#include "params.h"
e913b5cd 165#include "wide-int-print.h"
f7715905 166#include "builtins.h"
058a1b7a 167#include "tree-chkp.h"
4ee9c684 168
2dc10fae 169
4ee9c684 170/* Possible lattice values. */
171typedef enum
172{
bfa30570 173 UNINITIALIZED,
4ee9c684 174 UNDEFINED,
175 CONSTANT,
176 VARYING
88dbf20f 177} ccp_lattice_t;
4ee9c684 178
9908fe4d 179struct ccp_prop_value_t {
14f101cf 180 /* Lattice value. */
181 ccp_lattice_t lattice_val;
182
183 /* Propagated value. */
184 tree value;
b7e55469 185
e913b5cd 186 /* Mask that applies to the propagated value during CCP. For X
187 with a CONSTANT lattice value X & ~mask == value & ~mask. The
188 zero bits in the mask cover constant values. The ones mean no
189 information. */
5de9d3ed 190 widest_int mask;
14f101cf 191};
192
88dbf20f 193/* Array of propagated constant values. After propagation,
194 CONST_VAL[I].VALUE holds the constant value for SSA_NAME(I). If
195 the constant is held in an SSA name representing a memory store
4fb5e5ca 196 (i.e., a VDEF), CONST_VAL[I].MEM_REF will contain the actual
197 memory reference used to store (i.e., the LHS of the assignment
198 doing the store). */
9908fe4d 199static ccp_prop_value_t *const_val;
285df01b 200static unsigned n_const_val;
4ee9c684 201
9908fe4d 202static void canonicalize_value (ccp_prop_value_t *);
6688f8ec 203static bool ccp_fold_stmt (gimple_stmt_iterator *);
4af351a8 204
88dbf20f 205/* Dump constant propagation value VAL to file OUTF prefixed by PREFIX. */
01406fc0 206
207static void
9908fe4d 208dump_lattice_value (FILE *outf, const char *prefix, ccp_prop_value_t val)
01406fc0 209{
41511585 210 switch (val.lattice_val)
01406fc0 211 {
88dbf20f 212 case UNINITIALIZED:
213 fprintf (outf, "%sUNINITIALIZED", prefix);
214 break;
41511585 215 case UNDEFINED:
216 fprintf (outf, "%sUNDEFINED", prefix);
217 break;
218 case VARYING:
219 fprintf (outf, "%sVARYING", prefix);
220 break;
41511585 221 case CONSTANT:
b7e55469 222 if (TREE_CODE (val.value) != INTEGER_CST
796b6678 223 || val.mask == 0)
16ab4e97 224 {
225 fprintf (outf, "%sCONSTANT ", prefix);
226 print_generic_expr (outf, val.value, dump_flags);
227 }
b7e55469 228 else
229 {
28e557ef 230 widest_int cval = wi::bit_and_not (wi::to_widest (val.value),
231 val.mask);
e913b5cd 232 fprintf (outf, "%sCONSTANT ", prefix);
233 print_hex (cval, outf);
234 fprintf (outf, " (");
235 print_hex (val.mask, outf);
236 fprintf (outf, ")");
b7e55469 237 }
41511585 238 break;
239 default:
8c0963c4 240 gcc_unreachable ();
41511585 241 }
01406fc0 242}
4ee9c684 243
4ee9c684 244
88dbf20f 245/* Print lattice value VAL to stderr. */
246
9908fe4d 247void debug_lattice_value (ccp_prop_value_t val);
88dbf20f 248
4b987fac 249DEBUG_FUNCTION void
9908fe4d 250debug_lattice_value (ccp_prop_value_t val)
88dbf20f 251{
252 dump_lattice_value (stderr, "", val);
253 fprintf (stderr, "\n");
254}
4ee9c684 255
9c1be15e 256/* Extend NONZERO_BITS to a full mask, with the upper bits being set. */
257
258static widest_int
259extend_mask (const wide_int &nonzero_bits)
260{
261 return (wi::mask <widest_int> (wi::get_precision (nonzero_bits), true)
262 | widest_int::from (nonzero_bits, UNSIGNED));
263}
4ee9c684 264
88dbf20f 265/* Compute a default value for variable VAR and store it in the
266 CONST_VAL array. The following rules are used to get default
267 values:
01406fc0 268
88dbf20f 269 1- Global and static variables that are declared constant are
270 considered CONSTANT.
271
272 2- Any other value is considered UNDEFINED. This is useful when
41511585 273 considering PHI nodes. PHI arguments that are undefined do not
274 change the constant value of the PHI node, which allows for more
88dbf20f 275 constants to be propagated.
4ee9c684 276
8883e700 277 3- Variables defined by statements other than assignments and PHI
88dbf20f 278 nodes are considered VARYING.
4ee9c684 279
8883e700 280 4- Initial values of variables that are not GIMPLE registers are
bfa30570 281 considered VARYING. */
4ee9c684 282
9908fe4d 283static ccp_prop_value_t
88dbf20f 284get_default_value (tree var)
285{
9908fe4d 286 ccp_prop_value_t val = { UNINITIALIZED, NULL_TREE, 0 };
8edeb88b 287 gimple stmt;
288
289 stmt = SSA_NAME_DEF_STMT (var);
290
291 if (gimple_nop_p (stmt))
4ee9c684 292 {
8edeb88b 293 /* Variables defined by an empty statement are those used
294 before being initialized. If VAR is a local variable, we
295 can assume initially that it is UNDEFINED, otherwise we must
296 consider it VARYING. */
7c782c9b 297 if (!virtual_operand_p (var)
298 && TREE_CODE (SSA_NAME_VAR (var)) == VAR_DECL)
8edeb88b 299 val.lattice_val = UNDEFINED;
300 else
b7e55469 301 {
302 val.lattice_val = VARYING;
e913b5cd 303 val.mask = -1;
fc08b993 304 if (flag_tree_bit_ccp)
305 {
9c1be15e 306 wide_int nonzero_bits = get_nonzero_bits (var);
307 if (nonzero_bits != -1)
fc08b993 308 {
309 val.lattice_val = CONSTANT;
310 val.value = build_zero_cst (TREE_TYPE (var));
9c1be15e 311 val.mask = extend_mask (nonzero_bits);
fc08b993 312 }
313 }
b7e55469 314 }
4ee9c684 315 }
b45b214a 316 else if (is_gimple_assign (stmt))
41511585 317 {
8edeb88b 318 tree cst;
319 if (gimple_assign_single_p (stmt)
320 && DECL_P (gimple_assign_rhs1 (stmt))
321 && (cst = get_symbol_constant_value (gimple_assign_rhs1 (stmt))))
88dbf20f 322 {
8edeb88b 323 val.lattice_val = CONSTANT;
324 val.value = cst;
88dbf20f 325 }
326 else
b45b214a 327 {
328 /* Any other variable defined by an assignment is considered
329 UNDEFINED. */
330 val.lattice_val = UNDEFINED;
331 }
332 }
333 else if ((is_gimple_call (stmt)
334 && gimple_call_lhs (stmt) != NULL_TREE)
335 || gimple_code (stmt) == GIMPLE_PHI)
336 {
337 /* A variable defined by a call or a PHI node is considered
338 UNDEFINED. */
339 val.lattice_val = UNDEFINED;
8edeb88b 340 }
341 else
342 {
343 /* Otherwise, VAR will never take on a constant value. */
344 val.lattice_val = VARYING;
e913b5cd 345 val.mask = -1;
41511585 346 }
4ee9c684 347
41511585 348 return val;
349}
4ee9c684 350
4ee9c684 351
bfa30570 352/* Get the constant value associated with variable VAR. */
4ee9c684 353
9908fe4d 354static inline ccp_prop_value_t *
bfa30570 355get_value (tree var)
88dbf20f 356{
9908fe4d 357 ccp_prop_value_t *val;
bfa30570 358
285df01b 359 if (const_val == NULL
360 || SSA_NAME_VERSION (var) >= n_const_val)
e004838d 361 return NULL;
362
363 val = &const_val[SSA_NAME_VERSION (var)];
bfa30570 364 if (val->lattice_val == UNINITIALIZED)
4ee9c684 365 *val = get_default_value (var);
366
f5faab84 367 canonicalize_value (val);
4af351a8 368
4ee9c684 369 return val;
370}
371
15d138c9 372/* Return the constant tree value associated with VAR. */
373
374static inline tree
375get_constant_value (tree var)
376{
9908fe4d 377 ccp_prop_value_t *val;
98d92e3c 378 if (TREE_CODE (var) != SSA_NAME)
379 {
380 if (is_gimple_min_invariant (var))
381 return var;
382 return NULL_TREE;
383 }
384 val = get_value (var);
b7e55469 385 if (val
386 && val->lattice_val == CONSTANT
387 && (TREE_CODE (val->value) != INTEGER_CST
796b6678 388 || val->mask == 0))
15d138c9 389 return val->value;
390 return NULL_TREE;
391}
392
bfa30570 393/* Sets the value associated with VAR to VARYING. */
394
395static inline void
396set_value_varying (tree var)
397{
9908fe4d 398 ccp_prop_value_t *val = &const_val[SSA_NAME_VERSION (var)];
bfa30570 399
400 val->lattice_val = VARYING;
401 val->value = NULL_TREE;
e913b5cd 402 val->mask = -1;
bfa30570 403}
4ee9c684 404
b31eb493 405/* For float types, modify the value of VAL to make ccp work correctly
406 for non-standard values (-0, NaN):
407
408 If HONOR_SIGNED_ZEROS is false, and VAL = -0, we canonicalize it to 0.
409 If HONOR_NANS is false, and VAL is NaN, we canonicalize it to UNDEFINED.
410 This is to fix the following problem (see PR 29921): Suppose we have
411
412 x = 0.0 * y
413
414 and we set value of y to NaN. This causes value of x to be set to NaN.
415 When we later determine that y is in fact VARYING, fold uses the fact
416 that HONOR_NANS is false, and we try to change the value of x to 0,
417 causing an ICE. With HONOR_NANS being false, the real appearance of
418 NaN would cause undefined behavior, though, so claiming that y (and x)
f5faab84 419 are UNDEFINED initially is correct.
420
421 For other constants, make sure to drop TREE_OVERFLOW. */
b31eb493 422
423static void
9908fe4d 424canonicalize_value (ccp_prop_value_t *val)
b31eb493 425{
3754d046 426 machine_mode mode;
b31eb493 427 tree type;
428 REAL_VALUE_TYPE d;
429
f5faab84 430 if (val->lattice_val != CONSTANT)
431 return;
432
433 if (TREE_OVERFLOW_P (val->value))
434 val->value = drop_tree_overflow (val->value);
435
436 if (TREE_CODE (val->value) != REAL_CST)
b31eb493 437 return;
438
439 d = TREE_REAL_CST (val->value);
440 type = TREE_TYPE (val->value);
441 mode = TYPE_MODE (type);
442
443 if (!HONOR_SIGNED_ZEROS (mode)
444 && REAL_VALUE_MINUS_ZERO (d))
445 {
446 val->value = build_real (type, dconst0);
447 return;
448 }
449
450 if (!HONOR_NANS (mode)
451 && REAL_VALUE_ISNAN (d))
452 {
453 val->lattice_val = UNDEFINED;
454 val->value = NULL;
b31eb493 455 return;
456 }
457}
458
b7e55469 459/* Return whether the lattice transition is valid. */
460
461static bool
9908fe4d 462valid_lattice_transition (ccp_prop_value_t old_val, ccp_prop_value_t new_val)
b7e55469 463{
464 /* Lattice transitions must always be monotonically increasing in
465 value. */
466 if (old_val.lattice_val < new_val.lattice_val)
467 return true;
468
469 if (old_val.lattice_val != new_val.lattice_val)
470 return false;
471
472 if (!old_val.value && !new_val.value)
473 return true;
474
475 /* Now both lattice values are CONSTANT. */
476
43c92e0a 477 /* Allow transitioning from PHI <&x, not executable> == &x
478 to PHI <&x, &y> == common alignment. */
b7e55469 479 if (TREE_CODE (old_val.value) != INTEGER_CST
480 && TREE_CODE (new_val.value) == INTEGER_CST)
481 return true;
482
483 /* Bit-lattices have to agree in the still valid bits. */
484 if (TREE_CODE (old_val.value) == INTEGER_CST
485 && TREE_CODE (new_val.value) == INTEGER_CST)
5de9d3ed 486 return (wi::bit_and_not (wi::to_widest (old_val.value), new_val.mask)
487 == wi::bit_and_not (wi::to_widest (new_val.value), new_val.mask));
b7e55469 488
489 /* Otherwise constant values have to agree. */
490 return operand_equal_p (old_val.value, new_val.value, 0);
491}
492
88dbf20f 493/* Set the value for variable VAR to NEW_VAL. Return true if the new
494 value is different from VAR's previous value. */
4ee9c684 495
41511585 496static bool
9908fe4d 497set_lattice_value (tree var, ccp_prop_value_t new_val)
4ee9c684 498{
6d0bf6d6 499 /* We can deal with old UNINITIALIZED values just fine here. */
9908fe4d 500 ccp_prop_value_t *old_val = &const_val[SSA_NAME_VERSION (var)];
88dbf20f 501
f5faab84 502 canonicalize_value (&new_val);
b31eb493 503
b7e55469 504 /* We have to be careful to not go up the bitwise lattice
505 represented by the mask.
506 ??? This doesn't seem to be the best place to enforce this. */
507 if (new_val.lattice_val == CONSTANT
508 && old_val->lattice_val == CONSTANT
509 && TREE_CODE (new_val.value) == INTEGER_CST
510 && TREE_CODE (old_val->value) == INTEGER_CST)
511 {
5de9d3ed 512 widest_int diff = (wi::to_widest (new_val.value)
513 ^ wi::to_widest (old_val->value));
cf8f0e63 514 new_val.mask = new_val.mask | old_val->mask | diff;
b7e55469 515 }
bfa30570 516
b7e55469 517 gcc_assert (valid_lattice_transition (*old_val, new_val));
88dbf20f 518
b7e55469 519 /* If *OLD_VAL and NEW_VAL are the same, return false to inform the
520 caller that this was a non-transition. */
521 if (old_val->lattice_val != new_val.lattice_val
522 || (new_val.lattice_val == CONSTANT
523 && TREE_CODE (new_val.value) == INTEGER_CST
524 && (TREE_CODE (old_val->value) != INTEGER_CST
3a54beaf 525 || new_val.mask != old_val->mask)))
4ee9c684 526 {
b7e55469 527 /* ??? We would like to delay creation of INTEGER_CSTs from
528 partially constants here. */
529
41511585 530 if (dump_file && (dump_flags & TDF_DETAILS))
531 {
88dbf20f 532 dump_lattice_value (dump_file, "Lattice value changed to ", new_val);
bfa30570 533 fprintf (dump_file, ". Adding SSA edges to worklist.\n");
41511585 534 }
535
88dbf20f 536 *old_val = new_val;
537
6d0bf6d6 538 gcc_assert (new_val.lattice_val != UNINITIALIZED);
bfa30570 539 return true;
4ee9c684 540 }
41511585 541
542 return false;
4ee9c684 543}
544
9908fe4d 545static ccp_prop_value_t get_value_for_expr (tree, bool);
546static ccp_prop_value_t bit_value_binop (enum tree_code, tree, tree, tree);
5de9d3ed 547static void bit_value_binop_1 (enum tree_code, tree, widest_int *, widest_int *,
10c3fe8d 548 tree, const widest_int &, const widest_int &,
549 tree, const widest_int &, const widest_int &);
b7e55469 550
5de9d3ed 551/* Return a widest_int that can be used for bitwise simplifications
b7e55469 552 from VAL. */
553
5de9d3ed 554static widest_int
9908fe4d 555value_to_wide_int (ccp_prop_value_t val)
b7e55469 556{
557 if (val.value
558 && TREE_CODE (val.value) == INTEGER_CST)
5de9d3ed 559 return wi::to_widest (val.value);
e913b5cd 560
561 return 0;
b7e55469 562}
563
564/* Return the value for the address expression EXPR based on alignment
565 information. */
6d0bf6d6 566
9908fe4d 567static ccp_prop_value_t
b7e55469 568get_value_from_alignment (tree expr)
569{
f8abb542 570 tree type = TREE_TYPE (expr);
9908fe4d 571 ccp_prop_value_t val;
f8abb542 572 unsigned HOST_WIDE_INT bitpos;
573 unsigned int align;
b7e55469 574
575 gcc_assert (TREE_CODE (expr) == ADDR_EXPR);
576
59da1bcd 577 get_pointer_alignment_1 (expr, &align, &bitpos);
cf8f0e63 578 val.mask = (POINTER_TYPE_P (type) || TYPE_UNSIGNED (type)
5de9d3ed 579 ? wi::mask <widest_int> (TYPE_PRECISION (type), false)
e913b5cd 580 : -1).and_not (align / BITS_PER_UNIT - 1);
796b6678 581 val.lattice_val = val.mask == -1 ? VARYING : CONSTANT;
f8abb542 582 if (val.lattice_val == CONSTANT)
796b6678 583 val.value = build_int_cstu (type, bitpos / BITS_PER_UNIT);
b7e55469 584 else
f8abb542 585 val.value = NULL_TREE;
b7e55469 586
587 return val;
588}
589
590/* Return the value for the tree operand EXPR. If FOR_BITS_P is true
591 return constant bits extracted from alignment information for
592 invariant addresses. */
593
9908fe4d 594static ccp_prop_value_t
b7e55469 595get_value_for_expr (tree expr, bool for_bits_p)
6d0bf6d6 596{
9908fe4d 597 ccp_prop_value_t val;
6d0bf6d6 598
599 if (TREE_CODE (expr) == SSA_NAME)
b7e55469 600 {
601 val = *get_value (expr);
602 if (for_bits_p
603 && val.lattice_val == CONSTANT
604 && TREE_CODE (val.value) == ADDR_EXPR)
605 val = get_value_from_alignment (val.value);
606 }
607 else if (is_gimple_min_invariant (expr)
608 && (!for_bits_p || TREE_CODE (expr) != ADDR_EXPR))
6d0bf6d6 609 {
610 val.lattice_val = CONSTANT;
611 val.value = expr;
e913b5cd 612 val.mask = 0;
f5faab84 613 canonicalize_value (&val);
6d0bf6d6 614 }
b7e55469 615 else if (TREE_CODE (expr) == ADDR_EXPR)
616 val = get_value_from_alignment (expr);
6d0bf6d6 617 else
618 {
619 val.lattice_val = VARYING;
09b2913a 620 val.mask = -1;
6d0bf6d6 621 val.value = NULL_TREE;
622 }
6d0bf6d6 623 return val;
624}
625
88dbf20f 626/* Return the likely CCP lattice value for STMT.
4ee9c684 627
41511585 628 If STMT has no operands, then return CONSTANT.
4ee9c684 629
d61b9af3 630 Else if undefinedness of operands of STMT cause its value to be
631 undefined, then return UNDEFINED.
4ee9c684 632
41511585 633 Else if any operands of STMT are constants, then return CONSTANT.
4ee9c684 634
41511585 635 Else return VARYING. */
4ee9c684 636
88dbf20f 637static ccp_lattice_t
75a70cf9 638likely_value (gimple stmt)
41511585 639{
d61b9af3 640 bool has_constant_operand, has_undefined_operand, all_undefined_operands;
41511585 641 tree use;
642 ssa_op_iter iter;
8edeb88b 643 unsigned i;
4ee9c684 644
590c3166 645 enum gimple_code code = gimple_code (stmt);
75a70cf9 646
647 /* This function appears to be called only for assignments, calls,
648 conditionals, and switches, due to the logic in visit_stmt. */
649 gcc_assert (code == GIMPLE_ASSIGN
650 || code == GIMPLE_CALL
651 || code == GIMPLE_COND
652 || code == GIMPLE_SWITCH);
88dbf20f 653
654 /* If the statement has volatile operands, it won't fold to a
655 constant value. */
75a70cf9 656 if (gimple_has_volatile_ops (stmt))
88dbf20f 657 return VARYING;
658
75a70cf9 659 /* Arrive here for more complex cases. */
bfa30570 660 has_constant_operand = false;
d61b9af3 661 has_undefined_operand = false;
662 all_undefined_operands = true;
8edeb88b 663 FOR_EACH_SSA_TREE_OPERAND (use, stmt, iter, SSA_OP_USE)
41511585 664 {
9908fe4d 665 ccp_prop_value_t *val = get_value (use);
41511585 666
bfa30570 667 if (val->lattice_val == UNDEFINED)
d61b9af3 668 has_undefined_operand = true;
669 else
670 all_undefined_operands = false;
88dbf20f 671
41511585 672 if (val->lattice_val == CONSTANT)
bfa30570 673 has_constant_operand = true;
4ee9c684 674 }
41511585 675
dd277d48 676 /* There may be constants in regular rhs operands. For calls we
677 have to ignore lhs, fndecl and static chain, otherwise only
678 the lhs. */
679 for (i = (is_gimple_call (stmt) ? 2 : 0) + gimple_has_lhs (stmt);
8edeb88b 680 i < gimple_num_ops (stmt); ++i)
681 {
682 tree op = gimple_op (stmt, i);
683 if (!op || TREE_CODE (op) == SSA_NAME)
684 continue;
685 if (is_gimple_min_invariant (op))
686 has_constant_operand = true;
687 }
688
87c0a9fc 689 if (has_constant_operand)
690 all_undefined_operands = false;
691
3d483a94 692 if (has_undefined_operand
693 && code == GIMPLE_CALL
694 && gimple_call_internal_p (stmt))
695 switch (gimple_call_internal_fn (stmt))
696 {
697 /* These 3 builtins use the first argument just as a magic
698 way how to find out a decl uid. */
699 case IFN_GOMP_SIMD_LANE:
700 case IFN_GOMP_SIMD_VF:
701 case IFN_GOMP_SIMD_LAST_LANE:
702 has_undefined_operand = false;
703 break;
704 default:
705 break;
706 }
707
d61b9af3 708 /* If the operation combines operands like COMPLEX_EXPR make sure to
709 not mark the result UNDEFINED if only one part of the result is
710 undefined. */
75a70cf9 711 if (has_undefined_operand && all_undefined_operands)
d61b9af3 712 return UNDEFINED;
75a70cf9 713 else if (code == GIMPLE_ASSIGN && has_undefined_operand)
d61b9af3 714 {
75a70cf9 715 switch (gimple_assign_rhs_code (stmt))
d61b9af3 716 {
717 /* Unary operators are handled with all_undefined_operands. */
718 case PLUS_EXPR:
719 case MINUS_EXPR:
d61b9af3 720 case POINTER_PLUS_EXPR:
d61b9af3 721 /* Not MIN_EXPR, MAX_EXPR. One VARYING operand may be selected.
722 Not bitwise operators, one VARYING operand may specify the
723 result completely. Not logical operators for the same reason.
05a936a0 724 Not COMPLEX_EXPR as one VARYING operand makes the result partly
725 not UNDEFINED. Not *DIV_EXPR, comparisons and shifts because
726 the undefined operand may be promoted. */
d61b9af3 727 return UNDEFINED;
728
43c92e0a 729 case ADDR_EXPR:
730 /* If any part of an address is UNDEFINED, like the index
731 of an ARRAY_EXPR, then treat the result as UNDEFINED. */
732 return UNDEFINED;
733
d61b9af3 734 default:
735 ;
736 }
737 }
738 /* If there was an UNDEFINED operand but the result may be not UNDEFINED
c91fedc5 739 fall back to CONSTANT. During iteration UNDEFINED may still drop
740 to CONSTANT. */
d61b9af3 741 if (has_undefined_operand)
c91fedc5 742 return CONSTANT;
d61b9af3 743
8edeb88b 744 /* We do not consider virtual operands here -- load from read-only
745 memory may have only VARYING virtual operands, but still be
746 constant. */
bfa30570 747 if (has_constant_operand
8edeb88b 748 || gimple_references_memory_p (stmt))
88dbf20f 749 return CONSTANT;
750
bfa30570 751 return VARYING;
4ee9c684 752}
753
bfa30570 754/* Returns true if STMT cannot be constant. */
755
756static bool
75a70cf9 757surely_varying_stmt_p (gimple stmt)
bfa30570 758{
759 /* If the statement has operands that we cannot handle, it cannot be
760 constant. */
75a70cf9 761 if (gimple_has_volatile_ops (stmt))
bfa30570 762 return true;
763
f257af64 764 /* If it is a call and does not return a value or is not a
237e78b1 765 builtin and not an indirect call or a call to function with
766 assume_aligned/alloc_align attribute, it is varying. */
75a70cf9 767 if (is_gimple_call (stmt))
f257af64 768 {
237e78b1 769 tree fndecl, fntype = gimple_call_fntype (stmt);
f257af64 770 if (!gimple_call_lhs (stmt)
771 || ((fndecl = gimple_call_fndecl (stmt)) != NULL_TREE
237e78b1 772 && !DECL_BUILT_IN (fndecl)
773 && !lookup_attribute ("assume_aligned",
774 TYPE_ATTRIBUTES (fntype))
775 && !lookup_attribute ("alloc_align",
776 TYPE_ATTRIBUTES (fntype))))
f257af64 777 return true;
778 }
bfa30570 779
8edeb88b 780 /* Any other store operation is not interesting. */
dd277d48 781 else if (gimple_vdef (stmt))
8edeb88b 782 return true;
783
bfa30570 784 /* Anything other than assignments and conditional jumps are not
785 interesting for CCP. */
75a70cf9 786 if (gimple_code (stmt) != GIMPLE_ASSIGN
f257af64 787 && gimple_code (stmt) != GIMPLE_COND
788 && gimple_code (stmt) != GIMPLE_SWITCH
789 && gimple_code (stmt) != GIMPLE_CALL)
bfa30570 790 return true;
791
792 return false;
793}
4ee9c684 794
41511585 795/* Initialize local data structures for CCP. */
4ee9c684 796
797static void
41511585 798ccp_initialize (void)
4ee9c684 799{
41511585 800 basic_block bb;
4ee9c684 801
285df01b 802 n_const_val = num_ssa_names;
9908fe4d 803 const_val = XCNEWVEC (ccp_prop_value_t, n_const_val);
4ee9c684 804
41511585 805 /* Initialize simulation flags for PHI nodes and statements. */
fc00614f 806 FOR_EACH_BB_FN (bb, cfun)
4ee9c684 807 {
75a70cf9 808 gimple_stmt_iterator i;
4ee9c684 809
75a70cf9 810 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
41511585 811 {
75a70cf9 812 gimple stmt = gsi_stmt (i);
2193544e 813 bool is_varying;
814
815 /* If the statement is a control insn, then we do not
816 want to avoid simulating the statement once. Failure
817 to do so means that those edges will never get added. */
818 if (stmt_ends_bb_p (stmt))
819 is_varying = false;
820 else
821 is_varying = surely_varying_stmt_p (stmt);
4ee9c684 822
bfa30570 823 if (is_varying)
41511585 824 {
88dbf20f 825 tree def;
826 ssa_op_iter iter;
827
828 /* If the statement will not produce a constant, mark
829 all its outputs VARYING. */
830 FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_ALL_DEFS)
8edeb88b 831 set_value_varying (def);
41511585 832 }
75a70cf9 833 prop_set_simulate_again (stmt, !is_varying);
41511585 834 }
4ee9c684 835 }
836
75a70cf9 837 /* Now process PHI nodes. We never clear the simulate_again flag on
838 phi nodes, since we do not know which edges are executable yet,
839 except for phi nodes for virtual operands when we do not do store ccp. */
fc00614f 840 FOR_EACH_BB_FN (bb, cfun)
4ee9c684 841 {
75a70cf9 842 gimple_stmt_iterator i;
41511585 843
75a70cf9 844 for (i = gsi_start_phis (bb); !gsi_end_p (i); gsi_next (&i))
845 {
846 gimple phi = gsi_stmt (i);
847
7c782c9b 848 if (virtual_operand_p (gimple_phi_result (phi)))
75a70cf9 849 prop_set_simulate_again (phi, false);
bfa30570 850 else
75a70cf9 851 prop_set_simulate_again (phi, true);
41511585 852 }
4ee9c684 853 }
41511585 854}
4ee9c684 855
43fb76c1 856/* Debug count support. Reset the values of ssa names
857 VARYING when the total number ssa names analyzed is
858 beyond the debug count specified. */
859
860static void
861do_dbg_cnt (void)
862{
863 unsigned i;
864 for (i = 0; i < num_ssa_names; i++)
865 {
866 if (!dbg_cnt (ccp))
867 {
868 const_val[i].lattice_val = VARYING;
e913b5cd 869 const_val[i].mask = -1;
43fb76c1 870 const_val[i].value = NULL_TREE;
871 }
872 }
873}
874
4ee9c684 875
88dbf20f 876/* Do final substitution of propagated values, cleanup the flowgraph and
48e1416a 877 free allocated storage.
4ee9c684 878
33a34f1e 879 Return TRUE when something was optimized. */
880
881static bool
88dbf20f 882ccp_finalize (void)
4ee9c684 883{
43fb76c1 884 bool something_changed;
153c3b50 885 unsigned i;
43fb76c1 886
887 do_dbg_cnt ();
153c3b50 888
889 /* Derive alignment and misalignment information from partially
fc08b993 890 constant pointers in the lattice or nonzero bits from partially
891 constant integers. */
153c3b50 892 for (i = 1; i < num_ssa_names; ++i)
893 {
894 tree name = ssa_name (i);
9908fe4d 895 ccp_prop_value_t *val;
153c3b50 896 unsigned int tem, align;
897
898 if (!name
fc08b993 899 || (!POINTER_TYPE_P (TREE_TYPE (name))
900 && (!INTEGRAL_TYPE_P (TREE_TYPE (name))
901 /* Don't record nonzero bits before IPA to avoid
902 using too much memory. */
903 || first_pass_instance)))
153c3b50 904 continue;
905
906 val = get_value (name);
907 if (val->lattice_val != CONSTANT
908 || TREE_CODE (val->value) != INTEGER_CST)
909 continue;
910
fc08b993 911 if (POINTER_TYPE_P (TREE_TYPE (name)))
912 {
913 /* Trailing mask bits specify the alignment, trailing value
914 bits the misalignment. */
aeb682a2 915 tem = val->mask.to_uhwi ();
fc08b993 916 align = (tem & -tem);
917 if (align > 1)
918 set_ptr_info_alignment (get_ptr_info (name), align,
f9ae6f95 919 (TREE_INT_CST_LOW (val->value)
fc08b993 920 & (align - 1)));
921 }
922 else
923 {
9c1be15e 924 unsigned int precision = TYPE_PRECISION (TREE_TYPE (val->value));
925 wide_int nonzero_bits = wide_int::from (val->mask, precision,
926 UNSIGNED) | val->value;
fc08b993 927 nonzero_bits &= get_nonzero_bits (name);
928 set_nonzero_bits (name, nonzero_bits);
929 }
153c3b50 930 }
931
88dbf20f 932 /* Perform substitutions based on the known constant values. */
14f101cf 933 something_changed = substitute_and_fold (get_constant_value,
934 ccp_fold_stmt, true);
4ee9c684 935
88dbf20f 936 free (const_val);
e004838d 937 const_val = NULL;
33a34f1e 938 return something_changed;;
4ee9c684 939}
940
941
88dbf20f 942/* Compute the meet operator between *VAL1 and *VAL2. Store the result
943 in VAL1.
944
945 any M UNDEFINED = any
88dbf20f 946 any M VARYING = VARYING
947 Ci M Cj = Ci if (i == j)
948 Ci M Cj = VARYING if (i != j)
bfa30570 949 */
4ee9c684 950
951static void
9908fe4d 952ccp_lattice_meet (ccp_prop_value_t *val1, ccp_prop_value_t *val2)
4ee9c684 953{
88dbf20f 954 if (val1->lattice_val == UNDEFINED)
4ee9c684 955 {
88dbf20f 956 /* UNDEFINED M any = any */
957 *val1 = *val2;
41511585 958 }
88dbf20f 959 else if (val2->lattice_val == UNDEFINED)
92481a4d 960 {
88dbf20f 961 /* any M UNDEFINED = any
962 Nothing to do. VAL1 already contains the value we want. */
963 ;
92481a4d 964 }
88dbf20f 965 else if (val1->lattice_val == VARYING
966 || val2->lattice_val == VARYING)
41511585 967 {
88dbf20f 968 /* any M VARYING = VARYING. */
969 val1->lattice_val = VARYING;
e913b5cd 970 val1->mask = -1;
88dbf20f 971 val1->value = NULL_TREE;
41511585 972 }
b7e55469 973 else if (val1->lattice_val == CONSTANT
974 && val2->lattice_val == CONSTANT
975 && TREE_CODE (val1->value) == INTEGER_CST
976 && TREE_CODE (val2->value) == INTEGER_CST)
977 {
978 /* Ci M Cj = Ci if (i == j)
979 Ci M Cj = VARYING if (i != j)
980
981 For INTEGER_CSTs mask unequal bits. If no equal bits remain,
982 drop to varying. */
e913b5cd 983 val1->mask = (val1->mask | val2->mask
5de9d3ed 984 | (wi::to_widest (val1->value)
985 ^ wi::to_widest (val2->value)));
796b6678 986 if (val1->mask == -1)
b7e55469 987 {
988 val1->lattice_val = VARYING;
989 val1->value = NULL_TREE;
990 }
991 }
88dbf20f 992 else if (val1->lattice_val == CONSTANT
993 && val2->lattice_val == CONSTANT
61207d43 994 && simple_cst_equal (val1->value, val2->value) == 1)
41511585 995 {
88dbf20f 996 /* Ci M Cj = Ci if (i == j)
997 Ci M Cj = VARYING if (i != j)
998
b7e55469 999 VAL1 already contains the value we want for equivalent values. */
1000 }
1001 else if (val1->lattice_val == CONSTANT
1002 && val2->lattice_val == CONSTANT
1003 && (TREE_CODE (val1->value) == ADDR_EXPR
1004 || TREE_CODE (val2->value) == ADDR_EXPR))
1005 {
1006 /* When not equal addresses are involved try meeting for
1007 alignment. */
9908fe4d 1008 ccp_prop_value_t tem = *val2;
b7e55469 1009 if (TREE_CODE (val1->value) == ADDR_EXPR)
1010 *val1 = get_value_for_expr (val1->value, true);
1011 if (TREE_CODE (val2->value) == ADDR_EXPR)
1012 tem = get_value_for_expr (val2->value, true);
1013 ccp_lattice_meet (val1, &tem);
41511585 1014 }
1015 else
1016 {
88dbf20f 1017 /* Any other combination is VARYING. */
1018 val1->lattice_val = VARYING;
e913b5cd 1019 val1->mask = -1;
88dbf20f 1020 val1->value = NULL_TREE;
41511585 1021 }
4ee9c684 1022}
1023
1024
41511585 1025/* Loop through the PHI_NODE's parameters for BLOCK and compare their
1026 lattice values to determine PHI_NODE's lattice value. The value of a
88dbf20f 1027 PHI node is determined calling ccp_lattice_meet with all the arguments
41511585 1028 of the PHI node that are incoming via executable edges. */
4ee9c684 1029
41511585 1030static enum ssa_prop_result
75a70cf9 1031ccp_visit_phi_node (gimple phi)
4ee9c684 1032{
75a70cf9 1033 unsigned i;
9908fe4d 1034 ccp_prop_value_t *old_val, new_val;
4ee9c684 1035
41511585 1036 if (dump_file && (dump_flags & TDF_DETAILS))
4ee9c684 1037 {
41511585 1038 fprintf (dump_file, "\nVisiting PHI node: ");
75a70cf9 1039 print_gimple_stmt (dump_file, phi, 0, dump_flags);
4ee9c684 1040 }
4ee9c684 1041
75a70cf9 1042 old_val = get_value (gimple_phi_result (phi));
41511585 1043 switch (old_val->lattice_val)
1044 {
1045 case VARYING:
88dbf20f 1046 return SSA_PROP_VARYING;
4ee9c684 1047
41511585 1048 case CONSTANT:
1049 new_val = *old_val;
1050 break;
4ee9c684 1051
41511585 1052 case UNDEFINED:
41511585 1053 new_val.lattice_val = UNDEFINED;
88dbf20f 1054 new_val.value = NULL_TREE;
41511585 1055 break;
4ee9c684 1056
41511585 1057 default:
8c0963c4 1058 gcc_unreachable ();
41511585 1059 }
4ee9c684 1060
75a70cf9 1061 for (i = 0; i < gimple_phi_num_args (phi); i++)
41511585 1062 {
88dbf20f 1063 /* Compute the meet operator over all the PHI arguments flowing
1064 through executable edges. */
75a70cf9 1065 edge e = gimple_phi_arg_edge (phi, i);
4ee9c684 1066
41511585 1067 if (dump_file && (dump_flags & TDF_DETAILS))
1068 {
1069 fprintf (dump_file,
1070 "\n Argument #%d (%d -> %d %sexecutable)\n",
1071 i, e->src->index, e->dest->index,
1072 (e->flags & EDGE_EXECUTABLE) ? "" : "not ");
1073 }
1074
1075 /* If the incoming edge is executable, Compute the meet operator for
1076 the existing value of the PHI node and the current PHI argument. */
1077 if (e->flags & EDGE_EXECUTABLE)
1078 {
75a70cf9 1079 tree arg = gimple_phi_arg (phi, i)->def;
9908fe4d 1080 ccp_prop_value_t arg_val = get_value_for_expr (arg, false);
4ee9c684 1081
88dbf20f 1082 ccp_lattice_meet (&new_val, &arg_val);
4ee9c684 1083
41511585 1084 if (dump_file && (dump_flags & TDF_DETAILS))
1085 {
1086 fprintf (dump_file, "\t");
88dbf20f 1087 print_generic_expr (dump_file, arg, dump_flags);
1088 dump_lattice_value (dump_file, "\tValue: ", arg_val);
41511585 1089 fprintf (dump_file, "\n");
1090 }
4ee9c684 1091
41511585 1092 if (new_val.lattice_val == VARYING)
1093 break;
1094 }
1095 }
4ee9c684 1096
1097 if (dump_file && (dump_flags & TDF_DETAILS))
41511585 1098 {
1099 dump_lattice_value (dump_file, "\n PHI node value: ", new_val);
1100 fprintf (dump_file, "\n\n");
1101 }
1102
bfa30570 1103 /* Make the transition to the new value. */
75a70cf9 1104 if (set_lattice_value (gimple_phi_result (phi), new_val))
41511585 1105 {
1106 if (new_val.lattice_val == VARYING)
1107 return SSA_PROP_VARYING;
1108 else
1109 return SSA_PROP_INTERESTING;
1110 }
1111 else
1112 return SSA_PROP_NOT_INTERESTING;
4ee9c684 1113}
1114
15d138c9 1115/* Return the constant value for OP or OP otherwise. */
00f4f705 1116
1117static tree
15d138c9 1118valueize_op (tree op)
00f4f705 1119{
00f4f705 1120 if (TREE_CODE (op) == SSA_NAME)
1121 {
15d138c9 1122 tree tem = get_constant_value (op);
1123 if (tem)
1124 return tem;
00f4f705 1125 }
1126 return op;
1127}
1128
4e1b3545 1129/* Return the constant value for OP, but signal to not follow SSA
1130 edges if the definition may be simulated again. */
1131
1132static tree
1133valueize_op_1 (tree op)
1134{
1135 if (TREE_CODE (op) == SSA_NAME)
1136 {
1137 tree tem = get_constant_value (op);
1138 if (tem)
1139 return tem;
1140 /* If the definition may be simulated again we cannot follow
1141 this SSA edge as the SSA propagator does not necessarily
1142 re-visit the use. */
1143 gimple def_stmt = SSA_NAME_DEF_STMT (op);
1144 if (prop_simulate_again_p (def_stmt))
1145 return NULL_TREE;
1146 }
1147 return op;
1148}
1149
41511585 1150/* CCP specific front-end to the non-destructive constant folding
1151 routines.
4ee9c684 1152
1153 Attempt to simplify the RHS of STMT knowing that one or more
1154 operands are constants.
1155
1156 If simplification is possible, return the simplified RHS,
75a70cf9 1157 otherwise return the original RHS or NULL_TREE. */
4ee9c684 1158
1159static tree
75a70cf9 1160ccp_fold (gimple stmt)
4ee9c684 1161{
389dd41b 1162 location_t loc = gimple_location (stmt);
75a70cf9 1163 switch (gimple_code (stmt))
88dbf20f 1164 {
75a70cf9 1165 case GIMPLE_COND:
1166 {
1167 /* Handle comparison operators that can appear in GIMPLE form. */
15d138c9 1168 tree op0 = valueize_op (gimple_cond_lhs (stmt));
1169 tree op1 = valueize_op (gimple_cond_rhs (stmt));
75a70cf9 1170 enum tree_code code = gimple_cond_code (stmt);
389dd41b 1171 return fold_binary_loc (loc, code, boolean_type_node, op0, op1);
75a70cf9 1172 }
4ee9c684 1173
75a70cf9 1174 case GIMPLE_SWITCH:
1175 {
15d138c9 1176 /* Return the constant switch index. */
1177 return valueize_op (gimple_switch_index (stmt));
75a70cf9 1178 }
912f109f 1179
1d0b727d 1180 case GIMPLE_ASSIGN:
1181 case GIMPLE_CALL:
4e1b3545 1182 return gimple_fold_stmt_to_constant_1 (stmt,
1183 valueize_op, valueize_op_1);
04236c3a 1184
8782adcf 1185 default:
1d0b727d 1186 gcc_unreachable ();
8782adcf 1187 }
8782adcf 1188}
75a70cf9 1189
b7e55469 1190/* Apply the operation CODE in type TYPE to the value, mask pair
1191 RVAL and RMASK representing a value of type RTYPE and set
1192 the value, mask pair *VAL and *MASK to the result. */
1193
1194static void
1195bit_value_unop_1 (enum tree_code code, tree type,
5de9d3ed 1196 widest_int *val, widest_int *mask,
1197 tree rtype, const widest_int &rval, const widest_int &rmask)
b7e55469 1198{
1199 switch (code)
1200 {
1201 case BIT_NOT_EXPR:
1202 *mask = rmask;
cf8f0e63 1203 *val = ~rval;
b7e55469 1204 break;
1205
1206 case NEGATE_EXPR:
1207 {
5de9d3ed 1208 widest_int temv, temm;
b7e55469 1209 /* Return ~rval + 1. */
1210 bit_value_unop_1 (BIT_NOT_EXPR, type, &temv, &temm, type, rval, rmask);
1211 bit_value_binop_1 (PLUS_EXPR, type, val, mask,
e913b5cd 1212 type, temv, temm, type, 1, 0);
b7e55469 1213 break;
1214 }
1215
1216 CASE_CONVERT:
1217 {
e913b5cd 1218 signop sgn;
b7e55469 1219
1220 /* First extend mask and value according to the original type. */
e913b5cd 1221 sgn = TYPE_SIGN (rtype);
796b6678 1222 *mask = wi::ext (rmask, TYPE_PRECISION (rtype), sgn);
1223 *val = wi::ext (rval, TYPE_PRECISION (rtype), sgn);
b7e55469 1224
1225 /* Then extend mask and value according to the target type. */
e913b5cd 1226 sgn = TYPE_SIGN (type);
796b6678 1227 *mask = wi::ext (*mask, TYPE_PRECISION (type), sgn);
1228 *val = wi::ext (*val, TYPE_PRECISION (type), sgn);
b7e55469 1229 break;
1230 }
1231
1232 default:
e913b5cd 1233 *mask = -1;
b7e55469 1234 break;
1235 }
1236}
1237
1238/* Apply the operation CODE in type TYPE to the value, mask pairs
1239 R1VAL, R1MASK and R2VAL, R2MASK representing a values of type R1TYPE
1240 and R2TYPE and set the value, mask pair *VAL and *MASK to the result. */
1241
1242static void
1243bit_value_binop_1 (enum tree_code code, tree type,
5de9d3ed 1244 widest_int *val, widest_int *mask,
10c3fe8d 1245 tree r1type, const widest_int &r1val,
1246 const widest_int &r1mask, tree r2type,
1247 const widest_int &r2val, const widest_int &r2mask)
b7e55469 1248{
e913b5cd 1249 signop sgn = TYPE_SIGN (type);
1250 int width = TYPE_PRECISION (type);
10c3fe8d 1251 bool swap_p = false;
e913b5cd 1252
1253 /* Assume we'll get a constant result. Use an initial non varying
1254 value, we fall back to varying in the end if necessary. */
1255 *mask = -1;
1256
b7e55469 1257 switch (code)
1258 {
1259 case BIT_AND_EXPR:
1260 /* The mask is constant where there is a known not
1261 set bit, (m1 | m2) & ((v1 | m1) & (v2 | m2)) */
cf8f0e63 1262 *mask = (r1mask | r2mask) & (r1val | r1mask) & (r2val | r2mask);
1263 *val = r1val & r2val;
b7e55469 1264 break;
1265
1266 case BIT_IOR_EXPR:
1267 /* The mask is constant where there is a known
1268 set bit, (m1 | m2) & ~((v1 & ~m1) | (v2 & ~m2)). */
cf8f0e63 1269 *mask = (r1mask | r2mask)
1270 .and_not (r1val.and_not (r1mask) | r2val.and_not (r2mask));
1271 *val = r1val | r2val;
b7e55469 1272 break;
1273
1274 case BIT_XOR_EXPR:
1275 /* m1 | m2 */
cf8f0e63 1276 *mask = r1mask | r2mask;
1277 *val = r1val ^ r2val;
b7e55469 1278 break;
1279
1280 case LROTATE_EXPR:
1281 case RROTATE_EXPR:
796b6678 1282 if (r2mask == 0)
b7e55469 1283 {
28e557ef 1284 widest_int shift = r2val;
796b6678 1285 if (shift == 0)
e913b5cd 1286 {
1287 *mask = r1mask;
1288 *val = r1val;
1289 }
ddb1be65 1290 else
e913b5cd 1291 {
796b6678 1292 if (wi::neg_p (shift))
e913b5cd 1293 {
1294 shift = -shift;
1295 if (code == RROTATE_EXPR)
1296 code = LROTATE_EXPR;
1297 else
1298 code = RROTATE_EXPR;
1299 }
1300 if (code == RROTATE_EXPR)
1301 {
796b6678 1302 *mask = wi::rrotate (r1mask, shift, width);
1303 *val = wi::rrotate (r1val, shift, width);
e913b5cd 1304 }
1305 else
1306 {
796b6678 1307 *mask = wi::lrotate (r1mask, shift, width);
1308 *val = wi::lrotate (r1val, shift, width);
e913b5cd 1309 }
1310 }
b7e55469 1311 }
1312 break;
1313
1314 case LSHIFT_EXPR:
1315 case RSHIFT_EXPR:
1316 /* ??? We can handle partially known shift counts if we know
1317 its sign. That way we can tell that (x << (y | 8)) & 255
1318 is zero. */
796b6678 1319 if (r2mask == 0)
b7e55469 1320 {
28e557ef 1321 widest_int shift = r2val;
796b6678 1322 if (shift == 0)
b7e55469 1323 {
1324 *mask = r1mask;
1325 *val = r1val;
1326 }
ddb1be65 1327 else
e913b5cd 1328 {
796b6678 1329 if (wi::neg_p (shift))
e913b5cd 1330 {
1331 shift = -shift;
1332 if (code == RSHIFT_EXPR)
1333 code = LSHIFT_EXPR;
1334 else
1335 code = RSHIFT_EXPR;
1336 }
1337 if (code == RSHIFT_EXPR)
1338 {
45639915 1339 *mask = wi::rshift (wi::ext (r1mask, width, sgn), shift, sgn);
1340 *val = wi::rshift (wi::ext (r1val, width, sgn), shift, sgn);
e913b5cd 1341 }
1342 else
1343 {
45639915 1344 *mask = wi::ext (wi::lshift (r1mask, shift), width, sgn);
1345 *val = wi::ext (wi::lshift (r1val, shift), width, sgn);
e913b5cd 1346 }
1347 }
b7e55469 1348 }
1349 break;
1350
1351 case PLUS_EXPR:
1352 case POINTER_PLUS_EXPR:
1353 {
b7e55469 1354 /* Do the addition with unknown bits set to zero, to give carry-ins of
1355 zero wherever possible. */
ab2c1de8 1356 widest_int lo = r1val.and_not (r1mask) + r2val.and_not (r2mask);
796b6678 1357 lo = wi::ext (lo, width, sgn);
b7e55469 1358 /* Do the addition with unknown bits set to one, to give carry-ins of
1359 one wherever possible. */
ab2c1de8 1360 widest_int hi = (r1val | r1mask) + (r2val | r2mask);
796b6678 1361 hi = wi::ext (hi, width, sgn);
b7e55469 1362 /* Each bit in the result is known if (a) the corresponding bits in
1363 both inputs are known, and (b) the carry-in to that bit position
1364 is known. We can check condition (b) by seeing if we got the same
1365 result with minimised carries as with maximised carries. */
cf8f0e63 1366 *mask = r1mask | r2mask | (lo ^ hi);
796b6678 1367 *mask = wi::ext (*mask, width, sgn);
b7e55469 1368 /* It shouldn't matter whether we choose lo or hi here. */
1369 *val = lo;
1370 break;
1371 }
1372
1373 case MINUS_EXPR:
1374 {
5de9d3ed 1375 widest_int temv, temm;
b7e55469 1376 bit_value_unop_1 (NEGATE_EXPR, r2type, &temv, &temm,
1377 r2type, r2val, r2mask);
1378 bit_value_binop_1 (PLUS_EXPR, type, val, mask,
1379 r1type, r1val, r1mask,
1380 r2type, temv, temm);
1381 break;
1382 }
1383
1384 case MULT_EXPR:
1385 {
1386 /* Just track trailing zeros in both operands and transfer
1387 them to the other. */
796b6678 1388 int r1tz = wi::ctz (r1val | r1mask);
1389 int r2tz = wi::ctz (r2val | r2mask);
e913b5cd 1390 if (r1tz + r2tz >= width)
b7e55469 1391 {
e913b5cd 1392 *mask = 0;
1393 *val = 0;
b7e55469 1394 }
1395 else if (r1tz + r2tz > 0)
1396 {
5de9d3ed 1397 *mask = wi::ext (wi::mask <widest_int> (r1tz + r2tz, true),
796b6678 1398 width, sgn);
e913b5cd 1399 *val = 0;
b7e55469 1400 }
1401 break;
1402 }
1403
1404 case EQ_EXPR:
1405 case NE_EXPR:
1406 {
5de9d3ed 1407 widest_int m = r1mask | r2mask;
cf8f0e63 1408 if (r1val.and_not (m) != r2val.and_not (m))
b7e55469 1409 {
e913b5cd 1410 *mask = 0;
1411 *val = ((code == EQ_EXPR) ? 0 : 1);
b7e55469 1412 }
1413 else
1414 {
1415 /* We know the result of a comparison is always one or zero. */
e913b5cd 1416 *mask = 1;
1417 *val = 0;
b7e55469 1418 }
1419 break;
1420 }
1421
1422 case GE_EXPR:
1423 case GT_EXPR:
10c3fe8d 1424 swap_p = true;
1425 code = swap_tree_comparison (code);
1426 /* Fall through. */
b7e55469 1427 case LT_EXPR:
1428 case LE_EXPR:
1429 {
1430 int minmax, maxmin;
e913b5cd 1431
10c3fe8d 1432 const widest_int &o1val = swap_p ? r2val : r1val;
1433 const widest_int &o1mask = swap_p ? r2mask : r1mask;
1434 const widest_int &o2val = swap_p ? r1val : r2val;
1435 const widest_int &o2mask = swap_p ? r1mask : r2mask;
1436
b7e55469 1437 /* If the most significant bits are not known we know nothing. */
796b6678 1438 if (wi::neg_p (o1mask) || wi::neg_p (o2mask))
b7e55469 1439 break;
1440
90c0f5b7 1441 /* For comparisons the signedness is in the comparison operands. */
e913b5cd 1442 sgn = TYPE_SIGN (r1type);
90c0f5b7 1443
b7e55469 1444 /* If we know the most significant bits we know the values
1445 value ranges by means of treating varying bits as zero
1446 or one. Do a cross comparison of the max/min pairs. */
796b6678 1447 maxmin = wi::cmp (o1val | o1mask, o2val.and_not (o2mask), sgn);
1448 minmax = wi::cmp (o1val.and_not (o1mask), o2val | o2mask, sgn);
e913b5cd 1449 if (maxmin < 0) /* o1 is less than o2. */
b7e55469 1450 {
e913b5cd 1451 *mask = 0;
1452 *val = 1;
b7e55469 1453 }
e913b5cd 1454 else if (minmax > 0) /* o1 is not less or equal to o2. */
b7e55469 1455 {
e913b5cd 1456 *mask = 0;
1457 *val = 0;
b7e55469 1458 }
e913b5cd 1459 else if (maxmin == minmax) /* o1 and o2 are equal. */
b7e55469 1460 {
1461 /* This probably should never happen as we'd have
1462 folded the thing during fully constant value folding. */
e913b5cd 1463 *mask = 0;
1464 *val = (code == LE_EXPR ? 1 : 0);
b7e55469 1465 }
1466 else
1467 {
1468 /* We know the result of a comparison is always one or zero. */
e913b5cd 1469 *mask = 1;
1470 *val = 0;
b7e55469 1471 }
1472 break;
1473 }
1474
1475 default:;
1476 }
1477}
1478
1479/* Return the propagation value when applying the operation CODE to
1480 the value RHS yielding type TYPE. */
1481
9908fe4d 1482static ccp_prop_value_t
b7e55469 1483bit_value_unop (enum tree_code code, tree type, tree rhs)
1484{
9908fe4d 1485 ccp_prop_value_t rval = get_value_for_expr (rhs, true);
5de9d3ed 1486 widest_int value, mask;
9908fe4d 1487 ccp_prop_value_t val;
c91fedc5 1488
1489 if (rval.lattice_val == UNDEFINED)
1490 return rval;
1491
b7e55469 1492 gcc_assert ((rval.lattice_val == CONSTANT
1493 && TREE_CODE (rval.value) == INTEGER_CST)
796b6678 1494 || rval.mask == -1);
b7e55469 1495 bit_value_unop_1 (code, type, &value, &mask,
e913b5cd 1496 TREE_TYPE (rhs), value_to_wide_int (rval), rval.mask);
796b6678 1497 if (mask != -1)
b7e55469 1498 {
1499 val.lattice_val = CONSTANT;
1500 val.mask = mask;
1501 /* ??? Delay building trees here. */
e913b5cd 1502 val.value = wide_int_to_tree (type, value);
b7e55469 1503 }
1504 else
1505 {
1506 val.lattice_val = VARYING;
1507 val.value = NULL_TREE;
e913b5cd 1508 val.mask = -1;
b7e55469 1509 }
1510 return val;
1511}
1512
1513/* Return the propagation value when applying the operation CODE to
1514 the values RHS1 and RHS2 yielding type TYPE. */
1515
9908fe4d 1516static ccp_prop_value_t
b7e55469 1517bit_value_binop (enum tree_code code, tree type, tree rhs1, tree rhs2)
1518{
9908fe4d 1519 ccp_prop_value_t r1val = get_value_for_expr (rhs1, true);
1520 ccp_prop_value_t r2val = get_value_for_expr (rhs2, true);
5de9d3ed 1521 widest_int value, mask;
9908fe4d 1522 ccp_prop_value_t val;
c91fedc5 1523
1524 if (r1val.lattice_val == UNDEFINED
1525 || r2val.lattice_val == UNDEFINED)
1526 {
1527 val.lattice_val = VARYING;
1528 val.value = NULL_TREE;
e913b5cd 1529 val.mask = -1;
c91fedc5 1530 return val;
1531 }
1532
b7e55469 1533 gcc_assert ((r1val.lattice_val == CONSTANT
1534 && TREE_CODE (r1val.value) == INTEGER_CST)
796b6678 1535 || r1val.mask == -1);
b7e55469 1536 gcc_assert ((r2val.lattice_val == CONSTANT
1537 && TREE_CODE (r2val.value) == INTEGER_CST)
796b6678 1538 || r2val.mask == -1);
b7e55469 1539 bit_value_binop_1 (code, type, &value, &mask,
e913b5cd 1540 TREE_TYPE (rhs1), value_to_wide_int (r1val), r1val.mask,
1541 TREE_TYPE (rhs2), value_to_wide_int (r2val), r2val.mask);
1542 if (mask != -1)
b7e55469 1543 {
1544 val.lattice_val = CONSTANT;
1545 val.mask = mask;
1546 /* ??? Delay building trees here. */
e913b5cd 1547 val.value = wide_int_to_tree (type, value);
b7e55469 1548 }
1549 else
1550 {
1551 val.lattice_val = VARYING;
1552 val.value = NULL_TREE;
e913b5cd 1553 val.mask = -1;
b7e55469 1554 }
1555 return val;
1556}
1557
237e78b1 1558/* Return the propagation value for __builtin_assume_aligned
1559 and functions with assume_aligned or alloc_aligned attribute.
1560 For __builtin_assume_aligned, ATTR is NULL_TREE,
1561 for assume_aligned attribute ATTR is non-NULL and ALLOC_ALIGNED
1562 is false, for alloc_aligned attribute ATTR is non-NULL and
1563 ALLOC_ALIGNED is true. */
fca0886c 1564
9908fe4d 1565static ccp_prop_value_t
1566bit_value_assume_aligned (gimple stmt, tree attr, ccp_prop_value_t ptrval,
237e78b1 1567 bool alloc_aligned)
fca0886c 1568{
237e78b1 1569 tree align, misalign = NULL_TREE, type;
fca0886c 1570 unsigned HOST_WIDE_INT aligni, misaligni = 0;
9908fe4d 1571 ccp_prop_value_t alignval;
5de9d3ed 1572 widest_int value, mask;
9908fe4d 1573 ccp_prop_value_t val;
e913b5cd 1574
237e78b1 1575 if (attr == NULL_TREE)
1576 {
1577 tree ptr = gimple_call_arg (stmt, 0);
1578 type = TREE_TYPE (ptr);
1579 ptrval = get_value_for_expr (ptr, true);
1580 }
1581 else
1582 {
1583 tree lhs = gimple_call_lhs (stmt);
1584 type = TREE_TYPE (lhs);
1585 }
1586
fca0886c 1587 if (ptrval.lattice_val == UNDEFINED)
1588 return ptrval;
1589 gcc_assert ((ptrval.lattice_val == CONSTANT
1590 && TREE_CODE (ptrval.value) == INTEGER_CST)
796b6678 1591 || ptrval.mask == -1);
237e78b1 1592 if (attr == NULL_TREE)
fca0886c 1593 {
237e78b1 1594 /* Get aligni and misaligni from __builtin_assume_aligned. */
1595 align = gimple_call_arg (stmt, 1);
1596 if (!tree_fits_uhwi_p (align))
fca0886c 1597 return ptrval;
237e78b1 1598 aligni = tree_to_uhwi (align);
1599 if (gimple_call_num_args (stmt) > 2)
1600 {
1601 misalign = gimple_call_arg (stmt, 2);
1602 if (!tree_fits_uhwi_p (misalign))
1603 return ptrval;
1604 misaligni = tree_to_uhwi (misalign);
1605 }
1606 }
1607 else
fca0886c 1608 {
237e78b1 1609 /* Get aligni and misaligni from assume_aligned or
1610 alloc_align attributes. */
1611 if (TREE_VALUE (attr) == NULL_TREE)
fca0886c 1612 return ptrval;
237e78b1 1613 attr = TREE_VALUE (attr);
1614 align = TREE_VALUE (attr);
1615 if (!tree_fits_uhwi_p (align))
fca0886c 1616 return ptrval;
237e78b1 1617 aligni = tree_to_uhwi (align);
1618 if (alloc_aligned)
1619 {
1620 if (aligni == 0 || aligni > gimple_call_num_args (stmt))
1621 return ptrval;
1622 align = gimple_call_arg (stmt, aligni - 1);
1623 if (!tree_fits_uhwi_p (align))
1624 return ptrval;
1625 aligni = tree_to_uhwi (align);
1626 }
1627 else if (TREE_CHAIN (attr) && TREE_VALUE (TREE_CHAIN (attr)))
1628 {
1629 misalign = TREE_VALUE (TREE_CHAIN (attr));
1630 if (!tree_fits_uhwi_p (misalign))
1631 return ptrval;
1632 misaligni = tree_to_uhwi (misalign);
1633 }
fca0886c 1634 }
237e78b1 1635 if (aligni <= 1 || (aligni & (aligni - 1)) != 0 || misaligni >= aligni)
1636 return ptrval;
1637
fca0886c 1638 align = build_int_cst_type (type, -aligni);
1639 alignval = get_value_for_expr (align, true);
1640 bit_value_binop_1 (BIT_AND_EXPR, type, &value, &mask,
e913b5cd 1641 type, value_to_wide_int (ptrval), ptrval.mask,
1642 type, value_to_wide_int (alignval), alignval.mask);
796b6678 1643 if (mask != -1)
fca0886c 1644 {
1645 val.lattice_val = CONSTANT;
1646 val.mask = mask;
e913b5cd 1647 gcc_assert ((mask.to_uhwi () & (aligni - 1)) == 0);
1648 gcc_assert ((value.to_uhwi () & (aligni - 1)) == 0);
1649 value |= misaligni;
fca0886c 1650 /* ??? Delay building trees here. */
e913b5cd 1651 val.value = wide_int_to_tree (type, value);
fca0886c 1652 }
1653 else
1654 {
1655 val.lattice_val = VARYING;
1656 val.value = NULL_TREE;
e913b5cd 1657 val.mask = -1;
fca0886c 1658 }
1659 return val;
1660}
1661
75a70cf9 1662/* Evaluate statement STMT.
1663 Valid only for assignments, calls, conditionals, and switches. */
4ee9c684 1664
9908fe4d 1665static ccp_prop_value_t
75a70cf9 1666evaluate_stmt (gimple stmt)
4ee9c684 1667{
9908fe4d 1668 ccp_prop_value_t val;
4f61cce6 1669 tree simplified = NULL_TREE;
88dbf20f 1670 ccp_lattice_t likelyvalue = likely_value (stmt);
b7e55469 1671 bool is_constant = false;
581bf1c2 1672 unsigned int align;
88dbf20f 1673
b7e55469 1674 if (dump_file && (dump_flags & TDF_DETAILS))
1675 {
1676 fprintf (dump_file, "which is likely ");
1677 switch (likelyvalue)
1678 {
1679 case CONSTANT:
1680 fprintf (dump_file, "CONSTANT");
1681 break;
1682 case UNDEFINED:
1683 fprintf (dump_file, "UNDEFINED");
1684 break;
1685 case VARYING:
1686 fprintf (dump_file, "VARYING");
1687 break;
1688 default:;
1689 }
1690 fprintf (dump_file, "\n");
1691 }
add6ee5e 1692
4ee9c684 1693 /* If the statement is likely to have a CONSTANT result, then try
1694 to fold the statement to determine the constant value. */
75a70cf9 1695 /* FIXME. This is the only place that we call ccp_fold.
1696 Since likely_value never returns CONSTANT for calls, we will
1697 not attempt to fold them, including builtins that may profit. */
4ee9c684 1698 if (likelyvalue == CONSTANT)
b7e55469 1699 {
1700 fold_defer_overflow_warnings ();
1701 simplified = ccp_fold (stmt);
1702 is_constant = simplified && is_gimple_min_invariant (simplified);
1703 fold_undefer_overflow_warnings (is_constant, stmt, 0);
1704 if (is_constant)
1705 {
1706 /* The statement produced a constant value. */
1707 val.lattice_val = CONSTANT;
1708 val.value = simplified;
e913b5cd 1709 val.mask = 0;
b7e55469 1710 }
1711 }
4ee9c684 1712 /* If the statement is likely to have a VARYING result, then do not
1713 bother folding the statement. */
04236c3a 1714 else if (likelyvalue == VARYING)
75a70cf9 1715 {
590c3166 1716 enum gimple_code code = gimple_code (stmt);
75a70cf9 1717 if (code == GIMPLE_ASSIGN)
1718 {
1719 enum tree_code subcode = gimple_assign_rhs_code (stmt);
48e1416a 1720
75a70cf9 1721 /* Other cases cannot satisfy is_gimple_min_invariant
1722 without folding. */
1723 if (get_gimple_rhs_class (subcode) == GIMPLE_SINGLE_RHS)
1724 simplified = gimple_assign_rhs1 (stmt);
1725 }
1726 else if (code == GIMPLE_SWITCH)
1727 simplified = gimple_switch_index (stmt);
1728 else
a65c4d64 1729 /* These cannot satisfy is_gimple_min_invariant without folding. */
1730 gcc_assert (code == GIMPLE_CALL || code == GIMPLE_COND);
b7e55469 1731 is_constant = simplified && is_gimple_min_invariant (simplified);
1732 if (is_constant)
1733 {
1734 /* The statement produced a constant value. */
1735 val.lattice_val = CONSTANT;
1736 val.value = simplified;
e913b5cd 1737 val.mask = 0;
b7e55469 1738 }
75a70cf9 1739 }
4ee9c684 1740
b7e55469 1741 /* Resort to simplification for bitwise tracking. */
1742 if (flag_tree_bit_ccp
939514e9 1743 && (likelyvalue == CONSTANT || is_gimple_call (stmt))
b7e55469 1744 && !is_constant)
912f109f 1745 {
b7e55469 1746 enum gimple_code code = gimple_code (stmt);
1747 val.lattice_val = VARYING;
1748 val.value = NULL_TREE;
e913b5cd 1749 val.mask = -1;
b7e55469 1750 if (code == GIMPLE_ASSIGN)
912f109f 1751 {
b7e55469 1752 enum tree_code subcode = gimple_assign_rhs_code (stmt);
1753 tree rhs1 = gimple_assign_rhs1 (stmt);
1754 switch (get_gimple_rhs_class (subcode))
1755 {
1756 case GIMPLE_SINGLE_RHS:
1757 if (INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
1758 || POINTER_TYPE_P (TREE_TYPE (rhs1)))
1759 val = get_value_for_expr (rhs1, true);
1760 break;
1761
1762 case GIMPLE_UNARY_RHS:
1763 if ((INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
1764 || POINTER_TYPE_P (TREE_TYPE (rhs1)))
1765 && (INTEGRAL_TYPE_P (gimple_expr_type (stmt))
1766 || POINTER_TYPE_P (gimple_expr_type (stmt))))
1767 val = bit_value_unop (subcode, gimple_expr_type (stmt), rhs1);
1768 break;
1769
1770 case GIMPLE_BINARY_RHS:
1771 if (INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
1772 || POINTER_TYPE_P (TREE_TYPE (rhs1)))
1773 {
e47d81e0 1774 tree lhs = gimple_assign_lhs (stmt);
b7e55469 1775 tree rhs2 = gimple_assign_rhs2 (stmt);
1776 val = bit_value_binop (subcode,
e47d81e0 1777 TREE_TYPE (lhs), rhs1, rhs2);
b7e55469 1778 }
1779 break;
1780
1781 default:;
1782 }
912f109f 1783 }
b7e55469 1784 else if (code == GIMPLE_COND)
1785 {
1786 enum tree_code code = gimple_cond_code (stmt);
1787 tree rhs1 = gimple_cond_lhs (stmt);
1788 tree rhs2 = gimple_cond_rhs (stmt);
1789 if (INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
1790 || POINTER_TYPE_P (TREE_TYPE (rhs1)))
1791 val = bit_value_binop (code, TREE_TYPE (rhs1), rhs1, rhs2);
1792 }
0b4f0116 1793 else if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
153c3b50 1794 {
0b4f0116 1795 tree fndecl = gimple_call_fndecl (stmt);
153c3b50 1796 switch (DECL_FUNCTION_CODE (fndecl))
1797 {
1798 case BUILT_IN_MALLOC:
1799 case BUILT_IN_REALLOC:
1800 case BUILT_IN_CALLOC:
939514e9 1801 case BUILT_IN_STRDUP:
1802 case BUILT_IN_STRNDUP:
153c3b50 1803 val.lattice_val = CONSTANT;
1804 val.value = build_int_cst (TREE_TYPE (gimple_get_lhs (stmt)), 0);
796b6678 1805 val.mask = ~((HOST_WIDE_INT) MALLOC_ABI_ALIGNMENT
1806 / BITS_PER_UNIT - 1);
153c3b50 1807 break;
1808
1809 case BUILT_IN_ALLOCA:
581bf1c2 1810 case BUILT_IN_ALLOCA_WITH_ALIGN:
1811 align = (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_ALLOCA_WITH_ALIGN
f9ae6f95 1812 ? TREE_INT_CST_LOW (gimple_call_arg (stmt, 1))
581bf1c2 1813 : BIGGEST_ALIGNMENT);
153c3b50 1814 val.lattice_val = CONSTANT;
1815 val.value = build_int_cst (TREE_TYPE (gimple_get_lhs (stmt)), 0);
796b6678 1816 val.mask = ~((HOST_WIDE_INT) align / BITS_PER_UNIT - 1);
153c3b50 1817 break;
1818
939514e9 1819 /* These builtins return their first argument, unmodified. */
1820 case BUILT_IN_MEMCPY:
1821 case BUILT_IN_MEMMOVE:
1822 case BUILT_IN_MEMSET:
1823 case BUILT_IN_STRCPY:
1824 case BUILT_IN_STRNCPY:
1825 case BUILT_IN_MEMCPY_CHK:
1826 case BUILT_IN_MEMMOVE_CHK:
1827 case BUILT_IN_MEMSET_CHK:
1828 case BUILT_IN_STRCPY_CHK:
1829 case BUILT_IN_STRNCPY_CHK:
1830 val = get_value_for_expr (gimple_call_arg (stmt, 0), true);
1831 break;
1832
fca0886c 1833 case BUILT_IN_ASSUME_ALIGNED:
237e78b1 1834 val = bit_value_assume_aligned (stmt, NULL_TREE, val, false);
fca0886c 1835 break;
1836
060fc206 1837 case BUILT_IN_ALIGNED_ALLOC:
1838 {
1839 tree align = get_constant_value (gimple_call_arg (stmt, 0));
1840 if (align
1841 && tree_fits_uhwi_p (align))
1842 {
1843 unsigned HOST_WIDE_INT aligni = tree_to_uhwi (align);
1844 if (aligni > 1
1845 /* align must be power-of-two */
1846 && (aligni & (aligni - 1)) == 0)
1847 {
1848 val.lattice_val = CONSTANT;
1849 val.value = build_int_cst (ptr_type_node, 0);
cd89b631 1850 val.mask = -aligni;
060fc206 1851 }
1852 }
1853 break;
1854 }
1855
153c3b50 1856 default:;
1857 }
1858 }
237e78b1 1859 if (is_gimple_call (stmt) && gimple_call_lhs (stmt))
1860 {
1861 tree fntype = gimple_call_fntype (stmt);
1862 if (fntype)
1863 {
1864 tree attrs = lookup_attribute ("assume_aligned",
1865 TYPE_ATTRIBUTES (fntype));
1866 if (attrs)
1867 val = bit_value_assume_aligned (stmt, attrs, val, false);
1868 attrs = lookup_attribute ("alloc_align",
1869 TYPE_ATTRIBUTES (fntype));
1870 if (attrs)
1871 val = bit_value_assume_aligned (stmt, attrs, val, true);
1872 }
1873 }
b7e55469 1874 is_constant = (val.lattice_val == CONSTANT);
912f109f 1875 }
1876
fc08b993 1877 if (flag_tree_bit_ccp
1878 && ((is_constant && TREE_CODE (val.value) == INTEGER_CST)
1879 || (!is_constant && likelyvalue != UNDEFINED))
1880 && gimple_get_lhs (stmt)
1881 && TREE_CODE (gimple_get_lhs (stmt)) == SSA_NAME)
1882 {
1883 tree lhs = gimple_get_lhs (stmt);
9c1be15e 1884 wide_int nonzero_bits = get_nonzero_bits (lhs);
1885 if (nonzero_bits != -1)
fc08b993 1886 {
1887 if (!is_constant)
1888 {
1889 val.lattice_val = CONSTANT;
1890 val.value = build_zero_cst (TREE_TYPE (lhs));
9c1be15e 1891 val.mask = extend_mask (nonzero_bits);
fc08b993 1892 is_constant = true;
1893 }
1894 else
1895 {
9c1be15e 1896 if (wi::bit_and_not (val.value, nonzero_bits) != 0)
aeb682a2 1897 val.value = wide_int_to_tree (TREE_TYPE (lhs),
9c1be15e 1898 nonzero_bits & val.value);
aeb682a2 1899 if (nonzero_bits == 0)
1900 val.mask = 0;
fc08b993 1901 else
09b2913a 1902 val.mask = val.mask & extend_mask (nonzero_bits);
fc08b993 1903 }
1904 }
1905 }
1906
b7e55469 1907 if (!is_constant)
4ee9c684 1908 {
1909 /* The statement produced a nonconstant value. If the statement
88dbf20f 1910 had UNDEFINED operands, then the result of the statement
1911 should be UNDEFINED. Otherwise, the statement is VARYING. */
bfa30570 1912 if (likelyvalue == UNDEFINED)
b7e55469 1913 {
1914 val.lattice_val = likelyvalue;
e913b5cd 1915 val.mask = 0;
b7e55469 1916 }
b765fa12 1917 else
b7e55469 1918 {
1919 val.lattice_val = VARYING;
e913b5cd 1920 val.mask = -1;
b7e55469 1921 }
b765fa12 1922
88dbf20f 1923 val.value = NULL_TREE;
4ee9c684 1924 }
41511585 1925
1926 return val;
4ee9c684 1927}
1928
c1f445d2 1929typedef hash_table<pointer_hash<gimple_statement_base> > gimple_htab;
2b15d2ba 1930
582a80ed 1931/* Given a BUILT_IN_STACK_SAVE value SAVED_VAL, insert a clobber of VAR before
1932 each matching BUILT_IN_STACK_RESTORE. Mark visited phis in VISITED. */
1933
1934static void
2b15d2ba 1935insert_clobber_before_stack_restore (tree saved_val, tree var,
c1f445d2 1936 gimple_htab **visited)
582a80ed 1937{
1938 gimple stmt, clobber_stmt;
1939 tree clobber;
1940 imm_use_iterator iter;
1941 gimple_stmt_iterator i;
1942 gimple *slot;
1943
1944 FOR_EACH_IMM_USE_STMT (stmt, iter, saved_val)
1945 if (gimple_call_builtin_p (stmt, BUILT_IN_STACK_RESTORE))
1946 {
f1f41a6c 1947 clobber = build_constructor (TREE_TYPE (var),
1948 NULL);
582a80ed 1949 TREE_THIS_VOLATILE (clobber) = 1;
1950 clobber_stmt = gimple_build_assign (var, clobber);
1951
1952 i = gsi_for_stmt (stmt);
1953 gsi_insert_before (&i, clobber_stmt, GSI_SAME_STMT);
1954 }
1955 else if (gimple_code (stmt) == GIMPLE_PHI)
1956 {
c1f445d2 1957 if (!*visited)
1958 *visited = new gimple_htab (10);
582a80ed 1959
c1f445d2 1960 slot = (*visited)->find_slot (stmt, INSERT);
582a80ed 1961 if (*slot != NULL)
1962 continue;
1963
1964 *slot = stmt;
1965 insert_clobber_before_stack_restore (gimple_phi_result (stmt), var,
1966 visited);
1967 }
42eed683 1968 else if (gimple_assign_ssa_name_copy_p (stmt))
1969 insert_clobber_before_stack_restore (gimple_assign_lhs (stmt), var,
1970 visited);
058a1b7a 1971 else if (chkp_gimple_call_builtin_p (stmt, BUILT_IN_CHKP_BNDRET))
1972 continue;
582a80ed 1973 else
1974 gcc_assert (is_gimple_debug (stmt));
1975}
1976
1977/* Advance the iterator to the previous non-debug gimple statement in the same
1978 or dominating basic block. */
1979
1980static inline void
1981gsi_prev_dom_bb_nondebug (gimple_stmt_iterator *i)
1982{
1983 basic_block dom;
1984
1985 gsi_prev_nondebug (i);
1986 while (gsi_end_p (*i))
1987 {
1988 dom = get_immediate_dominator (CDI_DOMINATORS, i->bb);
34154e27 1989 if (dom == NULL || dom == ENTRY_BLOCK_PTR_FOR_FN (cfun))
582a80ed 1990 return;
1991
1992 *i = gsi_last_bb (dom);
1993 }
1994}
1995
1996/* Find a BUILT_IN_STACK_SAVE dominating gsi_stmt (I), and insert
1543f720 1997 a clobber of VAR before each matching BUILT_IN_STACK_RESTORE.
1998
1999 It is possible that BUILT_IN_STACK_SAVE cannot be find in a dominator when a
2000 previous pass (such as DOM) duplicated it along multiple paths to a BB. In
2001 that case the function gives up without inserting the clobbers. */
582a80ed 2002
2003static void
2004insert_clobbers_for_var (gimple_stmt_iterator i, tree var)
2005{
582a80ed 2006 gimple stmt;
2007 tree saved_val;
c1f445d2 2008 gimple_htab *visited = NULL;
582a80ed 2009
1543f720 2010 for (; !gsi_end_p (i); gsi_prev_dom_bb_nondebug (&i))
582a80ed 2011 {
2012 stmt = gsi_stmt (i);
2013
2014 if (!gimple_call_builtin_p (stmt, BUILT_IN_STACK_SAVE))
2015 continue;
582a80ed 2016
2017 saved_val = gimple_call_lhs (stmt);
2018 if (saved_val == NULL_TREE)
2019 continue;
2020
2021 insert_clobber_before_stack_restore (saved_val, var, &visited);
2022 break;
2023 }
2024
c1f445d2 2025 delete visited;
582a80ed 2026}
2027
581bf1c2 2028/* Detects a __builtin_alloca_with_align with constant size argument. Declares
2029 fixed-size array and returns the address, if found, otherwise returns
2030 NULL_TREE. */
9a65cc0a 2031
2032static tree
581bf1c2 2033fold_builtin_alloca_with_align (gimple stmt)
9a65cc0a 2034{
2035 unsigned HOST_WIDE_INT size, threshold, n_elem;
2036 tree lhs, arg, block, var, elem_type, array_type;
9a65cc0a 2037
2038 /* Get lhs. */
2039 lhs = gimple_call_lhs (stmt);
2040 if (lhs == NULL_TREE)
2041 return NULL_TREE;
2042
2043 /* Detect constant argument. */
2044 arg = get_constant_value (gimple_call_arg (stmt, 0));
6e93d308 2045 if (arg == NULL_TREE
2046 || TREE_CODE (arg) != INTEGER_CST
e913b5cd 2047 || !tree_fits_uhwi_p (arg))
9a65cc0a 2048 return NULL_TREE;
6e93d308 2049
8c53c46c 2050 size = tree_to_uhwi (arg);
9a65cc0a 2051
581bf1c2 2052 /* Heuristic: don't fold large allocas. */
9a65cc0a 2053 threshold = (unsigned HOST_WIDE_INT)PARAM_VALUE (PARAM_LARGE_STACK_FRAME);
581bf1c2 2054 /* In case the alloca is located at function entry, it has the same lifetime
2055 as a declared array, so we allow a larger size. */
9a65cc0a 2056 block = gimple_block (stmt);
2057 if (!(cfun->after_inlining
2058 && TREE_CODE (BLOCK_SUPERCONTEXT (block)) == FUNCTION_DECL))
2059 threshold /= 10;
2060 if (size > threshold)
2061 return NULL_TREE;
2062
2063 /* Declare array. */
2064 elem_type = build_nonstandard_integer_type (BITS_PER_UNIT, 1);
2065 n_elem = size * 8 / BITS_PER_UNIT;
9a65cc0a 2066 array_type = build_array_type_nelts (elem_type, n_elem);
2067 var = create_tmp_var (array_type, NULL);
f9ae6f95 2068 DECL_ALIGN (var) = TREE_INT_CST_LOW (gimple_call_arg (stmt, 1));
3d4a0a4b 2069 {
2070 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (lhs);
2071 if (pi != NULL && !pi->pt.anything)
2072 {
2073 bool singleton_p;
2074 unsigned uid;
2075 singleton_p = pt_solution_singleton_p (&pi->pt, &uid);
2076 gcc_assert (singleton_p);
2077 SET_DECL_PT_UID (var, uid);
2078 }
2079 }
9a65cc0a 2080
2081 /* Fold alloca to the address of the array. */
2082 return fold_convert (TREE_TYPE (lhs), build_fold_addr_expr (var));
2083}
2084
6688f8ec 2085/* Fold the stmt at *GSI with CCP specific information that propagating
2086 and regular folding does not catch. */
2087
2088static bool
2089ccp_fold_stmt (gimple_stmt_iterator *gsi)
2090{
2091 gimple stmt = gsi_stmt (*gsi);
6688f8ec 2092
94144e68 2093 switch (gimple_code (stmt))
2094 {
2095 case GIMPLE_COND:
2096 {
9908fe4d 2097 ccp_prop_value_t val;
94144e68 2098 /* Statement evaluation will handle type mismatches in constants
2099 more gracefully than the final propagation. This allows us to
2100 fold more conditionals here. */
2101 val = evaluate_stmt (stmt);
2102 if (val.lattice_val != CONSTANT
796b6678 2103 || val.mask != 0)
94144e68 2104 return false;
2105
b7e55469 2106 if (dump_file)
2107 {
2108 fprintf (dump_file, "Folding predicate ");
2109 print_gimple_expr (dump_file, stmt, 0, 0);
2110 fprintf (dump_file, " to ");
2111 print_generic_expr (dump_file, val.value, 0);
2112 fprintf (dump_file, "\n");
2113 }
2114
94144e68 2115 if (integer_zerop (val.value))
2116 gimple_cond_make_false (stmt);
2117 else
2118 gimple_cond_make_true (stmt);
6688f8ec 2119
94144e68 2120 return true;
2121 }
6688f8ec 2122
94144e68 2123 case GIMPLE_CALL:
2124 {
2125 tree lhs = gimple_call_lhs (stmt);
3064bb7b 2126 int flags = gimple_call_flags (stmt);
15d138c9 2127 tree val;
94144e68 2128 tree argt;
2129 bool changed = false;
2130 unsigned i;
2131
2132 /* If the call was folded into a constant make sure it goes
2133 away even if we cannot propagate into all uses because of
2134 type issues. */
2135 if (lhs
2136 && TREE_CODE (lhs) == SSA_NAME
3064bb7b 2137 && (val = get_constant_value (lhs))
2138 /* Don't optimize away calls that have side-effects. */
2139 && (flags & (ECF_CONST|ECF_PURE)) != 0
2140 && (flags & ECF_LOOPING_CONST_OR_PURE) == 0)
94144e68 2141 {
15d138c9 2142 tree new_rhs = unshare_expr (val);
338cce8f 2143 bool res;
94144e68 2144 if (!useless_type_conversion_p (TREE_TYPE (lhs),
2145 TREE_TYPE (new_rhs)))
2146 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
338cce8f 2147 res = update_call_from_tree (gsi, new_rhs);
2148 gcc_assert (res);
94144e68 2149 return true;
2150 }
2151
fb049fba 2152 /* Internal calls provide no argument types, so the extra laxity
2153 for normal calls does not apply. */
2154 if (gimple_call_internal_p (stmt))
2155 return false;
2156
581bf1c2 2157 /* The heuristic of fold_builtin_alloca_with_align differs before and
2158 after inlining, so we don't require the arg to be changed into a
2159 constant for folding, but just to be constant. */
2160 if (gimple_call_builtin_p (stmt, BUILT_IN_ALLOCA_WITH_ALIGN))
9a65cc0a 2161 {
581bf1c2 2162 tree new_rhs = fold_builtin_alloca_with_align (stmt);
6e93d308 2163 if (new_rhs)
2164 {
2165 bool res = update_call_from_tree (gsi, new_rhs);
582a80ed 2166 tree var = TREE_OPERAND (TREE_OPERAND (new_rhs, 0),0);
6e93d308 2167 gcc_assert (res);
582a80ed 2168 insert_clobbers_for_var (*gsi, var);
6e93d308 2169 return true;
2170 }
9a65cc0a 2171 }
2172
94144e68 2173 /* Propagate into the call arguments. Compared to replace_uses_in
2174 this can use the argument slot types for type verification
2175 instead of the current argument type. We also can safely
2176 drop qualifiers here as we are dealing with constants anyway. */
2de00a2d 2177 argt = TYPE_ARG_TYPES (gimple_call_fntype (stmt));
94144e68 2178 for (i = 0; i < gimple_call_num_args (stmt) && argt;
2179 ++i, argt = TREE_CHAIN (argt))
2180 {
2181 tree arg = gimple_call_arg (stmt, i);
2182 if (TREE_CODE (arg) == SSA_NAME
15d138c9 2183 && (val = get_constant_value (arg))
94144e68 2184 && useless_type_conversion_p
2185 (TYPE_MAIN_VARIANT (TREE_VALUE (argt)),
15d138c9 2186 TYPE_MAIN_VARIANT (TREE_TYPE (val))))
94144e68 2187 {
15d138c9 2188 gimple_call_set_arg (stmt, i, unshare_expr (val));
94144e68 2189 changed = true;
2190 }
2191 }
e16f4c39 2192
94144e68 2193 return changed;
2194 }
6688f8ec 2195
6872bf3c 2196 case GIMPLE_ASSIGN:
2197 {
2198 tree lhs = gimple_assign_lhs (stmt);
15d138c9 2199 tree val;
6872bf3c 2200
2201 /* If we have a load that turned out to be constant replace it
2202 as we cannot propagate into all uses in all cases. */
2203 if (gimple_assign_single_p (stmt)
2204 && TREE_CODE (lhs) == SSA_NAME
15d138c9 2205 && (val = get_constant_value (lhs)))
6872bf3c 2206 {
15d138c9 2207 tree rhs = unshare_expr (val);
6872bf3c 2208 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs)))
182cf5a9 2209 rhs = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (lhs), rhs);
6872bf3c 2210 gimple_assign_set_rhs_from_tree (gsi, rhs);
2211 return true;
2212 }
2213
2214 return false;
2215 }
2216
94144e68 2217 default:
2218 return false;
2219 }
6688f8ec 2220}
2221
41511585 2222/* Visit the assignment statement STMT. Set the value of its LHS to the
88dbf20f 2223 value computed by the RHS and store LHS in *OUTPUT_P. If STMT
2224 creates virtual definitions, set the value of each new name to that
75a70cf9 2225 of the RHS (if we can derive a constant out of the RHS).
2226 Value-returning call statements also perform an assignment, and
2227 are handled here. */
4ee9c684 2228
41511585 2229static enum ssa_prop_result
75a70cf9 2230visit_assignment (gimple stmt, tree *output_p)
4ee9c684 2231{
9908fe4d 2232 ccp_prop_value_t val;
88dbf20f 2233 enum ssa_prop_result retval;
4ee9c684 2234
75a70cf9 2235 tree lhs = gimple_get_lhs (stmt);
4ee9c684 2236
75a70cf9 2237 gcc_assert (gimple_code (stmt) != GIMPLE_CALL
2238 || gimple_call_lhs (stmt) != NULL_TREE);
2239
15d138c9 2240 if (gimple_assign_single_p (stmt)
2241 && gimple_assign_rhs_code (stmt) == SSA_NAME)
2242 /* For a simple copy operation, we copy the lattice values. */
2243 val = *get_value (gimple_assign_rhs1 (stmt));
41511585 2244 else
75a70cf9 2245 /* Evaluate the statement, which could be
2246 either a GIMPLE_ASSIGN or a GIMPLE_CALL. */
04236c3a 2247 val = evaluate_stmt (stmt);
4ee9c684 2248
88dbf20f 2249 retval = SSA_PROP_NOT_INTERESTING;
4ee9c684 2250
41511585 2251 /* Set the lattice value of the statement's output. */
88dbf20f 2252 if (TREE_CODE (lhs) == SSA_NAME)
4ee9c684 2253 {
88dbf20f 2254 /* If STMT is an assignment to an SSA_NAME, we only have one
2255 value to set. */
2256 if (set_lattice_value (lhs, val))
2257 {
2258 *output_p = lhs;
2259 if (val.lattice_val == VARYING)
2260 retval = SSA_PROP_VARYING;
2261 else
2262 retval = SSA_PROP_INTERESTING;
2263 }
4ee9c684 2264 }
88dbf20f 2265
2266 return retval;
4ee9c684 2267}
2268
4ee9c684 2269
41511585 2270/* Visit the conditional statement STMT. Return SSA_PROP_INTERESTING
2271 if it can determine which edge will be taken. Otherwise, return
2272 SSA_PROP_VARYING. */
2273
2274static enum ssa_prop_result
75a70cf9 2275visit_cond_stmt (gimple stmt, edge *taken_edge_p)
4ee9c684 2276{
9908fe4d 2277 ccp_prop_value_t val;
41511585 2278 basic_block block;
2279
75a70cf9 2280 block = gimple_bb (stmt);
41511585 2281 val = evaluate_stmt (stmt);
b7e55469 2282 if (val.lattice_val != CONSTANT
796b6678 2283 || val.mask != 0)
b7e55469 2284 return SSA_PROP_VARYING;
41511585 2285
2286 /* Find which edge out of the conditional block will be taken and add it
2287 to the worklist. If no single edge can be determined statically,
2288 return SSA_PROP_VARYING to feed all the outgoing edges to the
2289 propagation engine. */
b7e55469 2290 *taken_edge_p = find_taken_edge (block, val.value);
41511585 2291 if (*taken_edge_p)
2292 return SSA_PROP_INTERESTING;
2293 else
2294 return SSA_PROP_VARYING;
4ee9c684 2295}
2296
4ee9c684 2297
41511585 2298/* Evaluate statement STMT. If the statement produces an output value and
2299 its evaluation changes the lattice value of its output, return
2300 SSA_PROP_INTERESTING and set *OUTPUT_P to the SSA_NAME holding the
2301 output value.
48e1416a 2302
41511585 2303 If STMT is a conditional branch and we can determine its truth
2304 value, set *TAKEN_EDGE_P accordingly. If STMT produces a varying
2305 value, return SSA_PROP_VARYING. */
4ee9c684 2306
41511585 2307static enum ssa_prop_result
75a70cf9 2308ccp_visit_stmt (gimple stmt, edge *taken_edge_p, tree *output_p)
41511585 2309{
41511585 2310 tree def;
2311 ssa_op_iter iter;
4ee9c684 2312
41511585 2313 if (dump_file && (dump_flags & TDF_DETAILS))
4ee9c684 2314 {
88dbf20f 2315 fprintf (dump_file, "\nVisiting statement:\n");
75a70cf9 2316 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
4ee9c684 2317 }
4ee9c684 2318
75a70cf9 2319 switch (gimple_code (stmt))
4ee9c684 2320 {
75a70cf9 2321 case GIMPLE_ASSIGN:
2322 /* If the statement is an assignment that produces a single
2323 output value, evaluate its RHS to see if the lattice value of
2324 its output has changed. */
2325 return visit_assignment (stmt, output_p);
2326
2327 case GIMPLE_CALL:
2328 /* A value-returning call also performs an assignment. */
2329 if (gimple_call_lhs (stmt) != NULL_TREE)
2330 return visit_assignment (stmt, output_p);
2331 break;
2332
2333 case GIMPLE_COND:
2334 case GIMPLE_SWITCH:
2335 /* If STMT is a conditional branch, see if we can determine
2336 which branch will be taken. */
2337 /* FIXME. It appears that we should be able to optimize
2338 computed GOTOs here as well. */
2339 return visit_cond_stmt (stmt, taken_edge_p);
2340
2341 default:
2342 break;
4ee9c684 2343 }
4ee9c684 2344
41511585 2345 /* Any other kind of statement is not interesting for constant
2346 propagation and, therefore, not worth simulating. */
41511585 2347 if (dump_file && (dump_flags & TDF_DETAILS))
2348 fprintf (dump_file, "No interesting values produced. Marked VARYING.\n");
4ee9c684 2349
41511585 2350 /* Definitions made by statements other than assignments to
2351 SSA_NAMEs represent unknown modifications to their outputs.
2352 Mark them VARYING. */
88dbf20f 2353 FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_ALL_DEFS)
2354 {
9908fe4d 2355 ccp_prop_value_t v = { VARYING, NULL_TREE, -1 };
88dbf20f 2356 set_lattice_value (def, v);
2357 }
4ee9c684 2358
41511585 2359 return SSA_PROP_VARYING;
2360}
4ee9c684 2361
4ee9c684 2362
88dbf20f 2363/* Main entry point for SSA Conditional Constant Propagation. */
41511585 2364
33a34f1e 2365static unsigned int
61207d43 2366do_ssa_ccp (void)
41511585 2367{
582a80ed 2368 unsigned int todo = 0;
2369 calculate_dominance_info (CDI_DOMINATORS);
41511585 2370 ccp_initialize ();
2371 ssa_propagate (ccp_visit_stmt, ccp_visit_phi_node);
33a34f1e 2372 if (ccp_finalize ())
560965e9 2373 todo = (TODO_cleanup_cfg | TODO_update_ssa);
582a80ed 2374 free_dominance_info (CDI_DOMINATORS);
2375 return todo;
4ee9c684 2376}
2377
5664499b 2378
cbe8bda8 2379namespace {
2380
2381const pass_data pass_data_ccp =
41511585 2382{
cbe8bda8 2383 GIMPLE_PASS, /* type */
2384 "ccp", /* name */
2385 OPTGROUP_NONE, /* optinfo_flags */
cbe8bda8 2386 TV_TREE_CCP, /* tv_id */
2387 ( PROP_cfg | PROP_ssa ), /* properties_required */
2388 0, /* properties_provided */
2389 0, /* properties_destroyed */
2390 0, /* todo_flags_start */
8b88439e 2391 TODO_update_address_taken, /* todo_flags_finish */
41511585 2392};
4ee9c684 2393
cbe8bda8 2394class pass_ccp : public gimple_opt_pass
2395{
2396public:
9af5ce0c 2397 pass_ccp (gcc::context *ctxt)
2398 : gimple_opt_pass (pass_data_ccp, ctxt)
cbe8bda8 2399 {}
2400
2401 /* opt_pass methods: */
ae84f584 2402 opt_pass * clone () { return new pass_ccp (m_ctxt); }
31315c24 2403 virtual bool gate (function *) { return flag_tree_ccp != 0; }
65b0537f 2404 virtual unsigned int execute (function *) { return do_ssa_ccp (); }
cbe8bda8 2405
2406}; // class pass_ccp
2407
2408} // anon namespace
2409
2410gimple_opt_pass *
2411make_pass_ccp (gcc::context *ctxt)
2412{
2413 return new pass_ccp (ctxt);
2414}
2415
4ee9c684 2416
75a70cf9 2417
bdd0e199 2418/* Try to optimize out __builtin_stack_restore. Optimize it out
2419 if there is another __builtin_stack_restore in the same basic
2420 block and no calls or ASM_EXPRs are in between, or if this block's
2421 only outgoing edge is to EXIT_BLOCK and there are no calls or
2422 ASM_EXPRs after this __builtin_stack_restore. */
2423
2424static tree
75a70cf9 2425optimize_stack_restore (gimple_stmt_iterator i)
bdd0e199 2426{
6ea999da 2427 tree callee;
2428 gimple stmt;
75a70cf9 2429
2430 basic_block bb = gsi_bb (i);
2431 gimple call = gsi_stmt (i);
bdd0e199 2432
75a70cf9 2433 if (gimple_code (call) != GIMPLE_CALL
2434 || gimple_call_num_args (call) != 1
2435 || TREE_CODE (gimple_call_arg (call, 0)) != SSA_NAME
2436 || !POINTER_TYPE_P (TREE_TYPE (gimple_call_arg (call, 0))))
bdd0e199 2437 return NULL_TREE;
2438
75a70cf9 2439 for (gsi_next (&i); !gsi_end_p (i); gsi_next (&i))
bdd0e199 2440 {
75a70cf9 2441 stmt = gsi_stmt (i);
2442 if (gimple_code (stmt) == GIMPLE_ASM)
bdd0e199 2443 return NULL_TREE;
75a70cf9 2444 if (gimple_code (stmt) != GIMPLE_CALL)
bdd0e199 2445 continue;
2446
75a70cf9 2447 callee = gimple_call_fndecl (stmt);
c40a6f90 2448 if (!callee
2449 || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL
2450 /* All regular builtins are ok, just obviously not alloca. */
581bf1c2 2451 || DECL_FUNCTION_CODE (callee) == BUILT_IN_ALLOCA
2452 || DECL_FUNCTION_CODE (callee) == BUILT_IN_ALLOCA_WITH_ALIGN)
bdd0e199 2453 return NULL_TREE;
2454
2455 if (DECL_FUNCTION_CODE (callee) == BUILT_IN_STACK_RESTORE)
6ea999da 2456 goto second_stack_restore;
bdd0e199 2457 }
2458
6ea999da 2459 if (!gsi_end_p (i))
bdd0e199 2460 return NULL_TREE;
2461
6ea999da 2462 /* Allow one successor of the exit block, or zero successors. */
2463 switch (EDGE_COUNT (bb->succs))
2464 {
2465 case 0:
2466 break;
2467 case 1:
34154e27 2468 if (single_succ_edge (bb)->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
6ea999da 2469 return NULL_TREE;
2470 break;
2471 default:
2472 return NULL_TREE;
2473 }
2474 second_stack_restore:
bdd0e199 2475
6ea999da 2476 /* If there's exactly one use, then zap the call to __builtin_stack_save.
2477 If there are multiple uses, then the last one should remove the call.
2478 In any case, whether the call to __builtin_stack_save can be removed
2479 or not is irrelevant to removing the call to __builtin_stack_restore. */
2480 if (has_single_use (gimple_call_arg (call, 0)))
2481 {
2482 gimple stack_save = SSA_NAME_DEF_STMT (gimple_call_arg (call, 0));
2483 if (is_gimple_call (stack_save))
2484 {
2485 callee = gimple_call_fndecl (stack_save);
2486 if (callee
2487 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL
2488 && DECL_FUNCTION_CODE (callee) == BUILT_IN_STACK_SAVE)
2489 {
2490 gimple_stmt_iterator stack_save_gsi;
2491 tree rhs;
bdd0e199 2492
6ea999da 2493 stack_save_gsi = gsi_for_stmt (stack_save);
2494 rhs = build_int_cst (TREE_TYPE (gimple_call_arg (call, 0)), 0);
2495 update_call_from_tree (&stack_save_gsi, rhs);
2496 }
2497 }
2498 }
bdd0e199 2499
75a70cf9 2500 /* No effect, so the statement will be deleted. */
bdd0e199 2501 return integer_zero_node;
2502}
75a70cf9 2503
8a58ed0a 2504/* If va_list type is a simple pointer and nothing special is needed,
2505 optimize __builtin_va_start (&ap, 0) into ap = __builtin_next_arg (0),
2506 __builtin_va_end (&ap) out as NOP and __builtin_va_copy into a simple
2507 pointer assignment. */
2508
2509static tree
75a70cf9 2510optimize_stdarg_builtin (gimple call)
8a58ed0a 2511{
5f57a8b1 2512 tree callee, lhs, rhs, cfun_va_list;
8a58ed0a 2513 bool va_list_simple_ptr;
389dd41b 2514 location_t loc = gimple_location (call);
8a58ed0a 2515
75a70cf9 2516 if (gimple_code (call) != GIMPLE_CALL)
8a58ed0a 2517 return NULL_TREE;
2518
75a70cf9 2519 callee = gimple_call_fndecl (call);
5f57a8b1 2520
2521 cfun_va_list = targetm.fn_abi_va_list (callee);
2522 va_list_simple_ptr = POINTER_TYPE_P (cfun_va_list)
2523 && (TREE_TYPE (cfun_va_list) == void_type_node
2524 || TREE_TYPE (cfun_va_list) == char_type_node);
2525
8a58ed0a 2526 switch (DECL_FUNCTION_CODE (callee))
2527 {
2528 case BUILT_IN_VA_START:
2529 if (!va_list_simple_ptr
2530 || targetm.expand_builtin_va_start != NULL
e7ed5dd7 2531 || !builtin_decl_explicit_p (BUILT_IN_NEXT_ARG))
8a58ed0a 2532 return NULL_TREE;
2533
75a70cf9 2534 if (gimple_call_num_args (call) != 2)
8a58ed0a 2535 return NULL_TREE;
2536
75a70cf9 2537 lhs = gimple_call_arg (call, 0);
8a58ed0a 2538 if (!POINTER_TYPE_P (TREE_TYPE (lhs))
2539 || TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (lhs)))
5f57a8b1 2540 != TYPE_MAIN_VARIANT (cfun_va_list))
8a58ed0a 2541 return NULL_TREE;
48e1416a 2542
389dd41b 2543 lhs = build_fold_indirect_ref_loc (loc, lhs);
b9a16870 2544 rhs = build_call_expr_loc (loc, builtin_decl_explicit (BUILT_IN_NEXT_ARG),
75a70cf9 2545 1, integer_zero_node);
389dd41b 2546 rhs = fold_convert_loc (loc, TREE_TYPE (lhs), rhs);
8a58ed0a 2547 return build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, rhs);
2548
2549 case BUILT_IN_VA_COPY:
2550 if (!va_list_simple_ptr)
2551 return NULL_TREE;
2552
75a70cf9 2553 if (gimple_call_num_args (call) != 2)
8a58ed0a 2554 return NULL_TREE;
2555
75a70cf9 2556 lhs = gimple_call_arg (call, 0);
8a58ed0a 2557 if (!POINTER_TYPE_P (TREE_TYPE (lhs))
2558 || TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (lhs)))
5f57a8b1 2559 != TYPE_MAIN_VARIANT (cfun_va_list))
8a58ed0a 2560 return NULL_TREE;
2561
389dd41b 2562 lhs = build_fold_indirect_ref_loc (loc, lhs);
75a70cf9 2563 rhs = gimple_call_arg (call, 1);
8a58ed0a 2564 if (TYPE_MAIN_VARIANT (TREE_TYPE (rhs))
5f57a8b1 2565 != TYPE_MAIN_VARIANT (cfun_va_list))
8a58ed0a 2566 return NULL_TREE;
2567
389dd41b 2568 rhs = fold_convert_loc (loc, TREE_TYPE (lhs), rhs);
8a58ed0a 2569 return build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, rhs);
2570
2571 case BUILT_IN_VA_END:
75a70cf9 2572 /* No effect, so the statement will be deleted. */
8a58ed0a 2573 return integer_zero_node;
2574
2575 default:
2576 gcc_unreachable ();
2577 }
2578}
75a70cf9 2579
f87df69a 2580/* Attemp to make the block of __builtin_unreachable I unreachable by changing
2581 the incoming jumps. Return true if at least one jump was changed. */
2582
2583static bool
2584optimize_unreachable (gimple_stmt_iterator i)
2585{
2586 basic_block bb = gsi_bb (i);
2587 gimple_stmt_iterator gsi;
2588 gimple stmt;
2589 edge_iterator ei;
2590 edge e;
2591 bool ret;
2592
f6b540af 2593 if (flag_sanitize & SANITIZE_UNREACHABLE)
2594 return false;
2595
f87df69a 2596 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2597 {
2598 stmt = gsi_stmt (gsi);
2599
2600 if (is_gimple_debug (stmt))
2601 continue;
2602
2603 if (gimple_code (stmt) == GIMPLE_LABEL)
2604 {
2605 /* Verify we do not need to preserve the label. */
2606 if (FORCED_LABEL (gimple_label_label (stmt)))
2607 return false;
2608
2609 continue;
2610 }
2611
2612 /* Only handle the case that __builtin_unreachable is the first statement
2613 in the block. We rely on DCE to remove stmts without side-effects
2614 before __builtin_unreachable. */
2615 if (gsi_stmt (gsi) != gsi_stmt (i))
2616 return false;
2617 }
2618
2619 ret = false;
2620 FOR_EACH_EDGE (e, ei, bb->preds)
2621 {
2622 gsi = gsi_last_bb (e->src);
522f73a1 2623 if (gsi_end_p (gsi))
2624 continue;
f87df69a 2625
522f73a1 2626 stmt = gsi_stmt (gsi);
2627 if (gimple_code (stmt) == GIMPLE_COND)
f87df69a 2628 {
2629 if (e->flags & EDGE_TRUE_VALUE)
2630 gimple_cond_make_false (stmt);
2631 else if (e->flags & EDGE_FALSE_VALUE)
2632 gimple_cond_make_true (stmt);
2633 else
2634 gcc_unreachable ();
a03a52b4 2635 update_stmt (stmt);
f87df69a 2636 }
2637 else
2638 {
2639 /* Todo: handle other cases, f.i. switch statement. */
2640 continue;
2641 }
2642
2643 ret = true;
2644 }
2645
2646 return ret;
2647}
2648
4ee9c684 2649/* A simple pass that attempts to fold all builtin functions. This pass
2650 is run after we've propagated as many constants as we can. */
2651
65b0537f 2652namespace {
2653
2654const pass_data pass_data_fold_builtins =
2655{
2656 GIMPLE_PASS, /* type */
2657 "fab", /* name */
2658 OPTGROUP_NONE, /* optinfo_flags */
65b0537f 2659 TV_NONE, /* tv_id */
2660 ( PROP_cfg | PROP_ssa ), /* properties_required */
2661 0, /* properties_provided */
2662 0, /* properties_destroyed */
2663 0, /* todo_flags_start */
8b88439e 2664 TODO_update_ssa, /* todo_flags_finish */
65b0537f 2665};
2666
2667class pass_fold_builtins : public gimple_opt_pass
2668{
2669public:
2670 pass_fold_builtins (gcc::context *ctxt)
2671 : gimple_opt_pass (pass_data_fold_builtins, ctxt)
2672 {}
2673
2674 /* opt_pass methods: */
2675 opt_pass * clone () { return new pass_fold_builtins (m_ctxt); }
2676 virtual unsigned int execute (function *);
2677
2678}; // class pass_fold_builtins
2679
2680unsigned int
2681pass_fold_builtins::execute (function *fun)
4ee9c684 2682{
b36237eb 2683 bool cfg_changed = false;
4ee9c684 2684 basic_block bb;
b1b7c0c4 2685 unsigned int todoflags = 0;
48e1416a 2686
65b0537f 2687 FOR_EACH_BB_FN (bb, fun)
4ee9c684 2688 {
75a70cf9 2689 gimple_stmt_iterator i;
2690 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
4ee9c684 2691 {
75a70cf9 2692 gimple stmt, old_stmt;
40a3eb84 2693 tree callee;
0a39fd54 2694 enum built_in_function fcode;
4ee9c684 2695
75a70cf9 2696 stmt = gsi_stmt (i);
2697
2698 if (gimple_code (stmt) != GIMPLE_CALL)
0a39fd54 2699 {
896a0c42 2700 /* Remove all *ssaname_N ={v} {CLOBBER}; stmts,
2701 after the last GIMPLE DSE they aren't needed and might
2702 unnecessarily keep the SSA_NAMEs live. */
2703 if (gimple_clobber_p (stmt))
2704 {
2705 tree lhs = gimple_assign_lhs (stmt);
2706 if (TREE_CODE (lhs) == MEM_REF
2707 && TREE_CODE (TREE_OPERAND (lhs, 0)) == SSA_NAME)
2708 {
2709 unlink_stmt_vdef (stmt);
2710 gsi_remove (&i, true);
2711 release_defs (stmt);
2712 continue;
2713 }
2714 }
75a70cf9 2715 gsi_next (&i);
0a39fd54 2716 continue;
2717 }
40a3eb84 2718
75a70cf9 2719 callee = gimple_call_fndecl (stmt);
4ee9c684 2720 if (!callee || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL)
0a39fd54 2721 {
75a70cf9 2722 gsi_next (&i);
0a39fd54 2723 continue;
2724 }
5a4b7e1e 2725
40a3eb84 2726 fcode = DECL_FUNCTION_CODE (callee);
2727 if (fold_stmt (&i))
2728 ;
2729 else
2730 {
2731 tree result = NULL_TREE;
2732 switch (DECL_FUNCTION_CODE (callee))
2733 {
2734 case BUILT_IN_CONSTANT_P:
2735 /* Resolve __builtin_constant_p. If it hasn't been
2736 folded to integer_one_node by now, it's fairly
2737 certain that the value simply isn't constant. */
2738 result = integer_zero_node;
2739 break;
5a4b7e1e 2740
40a3eb84 2741 case BUILT_IN_ASSUME_ALIGNED:
2742 /* Remove __builtin_assume_aligned. */
2743 result = gimple_call_arg (stmt, 0);
2744 break;
4ee9c684 2745
40a3eb84 2746 case BUILT_IN_STACK_RESTORE:
2747 result = optimize_stack_restore (i);
2748 if (result)
2749 break;
2750 gsi_next (&i);
2751 continue;
fca0886c 2752
40a3eb84 2753 case BUILT_IN_UNREACHABLE:
2754 if (optimize_unreachable (i))
2755 cfg_changed = true;
8a58ed0a 2756 break;
8a58ed0a 2757
40a3eb84 2758 case BUILT_IN_VA_START:
2759 case BUILT_IN_VA_END:
2760 case BUILT_IN_VA_COPY:
2761 /* These shouldn't be folded before pass_stdarg. */
2762 result = optimize_stdarg_builtin (stmt);
2763 if (result)
2764 break;
2765 /* FALLTHRU */
f87df69a 2766
40a3eb84 2767 default:;
2768 }
bdd0e199 2769
40a3eb84 2770 if (!result)
2771 {
2772 gsi_next (&i);
2773 continue;
2774 }
4ee9c684 2775
40a3eb84 2776 if (!update_call_from_tree (&i, result))
2777 gimplify_and_update_call_from_tree (&i, result);
2778 }
2779
2780 todoflags |= TODO_update_address_taken;
f87df69a 2781
4ee9c684 2782 if (dump_file && (dump_flags & TDF_DETAILS))
2783 {
2784 fprintf (dump_file, "Simplified\n ");
75a70cf9 2785 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
4ee9c684 2786 }
2787
75a70cf9 2788 old_stmt = stmt;
75a70cf9 2789 stmt = gsi_stmt (i);
4c5fd53c 2790 update_stmt (stmt);
de6ed584 2791
75a70cf9 2792 if (maybe_clean_or_replace_eh_stmt (old_stmt, stmt)
2793 && gimple_purge_dead_eh_edges (bb))
b36237eb 2794 cfg_changed = true;
4ee9c684 2795
2796 if (dump_file && (dump_flags & TDF_DETAILS))
2797 {
2798 fprintf (dump_file, "to\n ");
75a70cf9 2799 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
4ee9c684 2800 fprintf (dump_file, "\n");
2801 }
0a39fd54 2802
2803 /* Retry the same statement if it changed into another
2804 builtin, there might be new opportunities now. */
75a70cf9 2805 if (gimple_code (stmt) != GIMPLE_CALL)
0a39fd54 2806 {
75a70cf9 2807 gsi_next (&i);
0a39fd54 2808 continue;
2809 }
75a70cf9 2810 callee = gimple_call_fndecl (stmt);
0a39fd54 2811 if (!callee
75a70cf9 2812 || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL
0a39fd54 2813 || DECL_FUNCTION_CODE (callee) == fcode)
75a70cf9 2814 gsi_next (&i);
4ee9c684 2815 }
2816 }
48e1416a 2817
b36237eb 2818 /* Delete unreachable blocks. */
b1b7c0c4 2819 if (cfg_changed)
2820 todoflags |= TODO_cleanup_cfg;
48e1416a 2821
b1b7c0c4 2822 return todoflags;
4ee9c684 2823}
2824
cbe8bda8 2825} // anon namespace
2826
2827gimple_opt_pass *
2828make_pass_fold_builtins (gcc::context *ctxt)
2829{
2830 return new pass_fold_builtins (ctxt);
2831}