]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/tree-ssa-ccp.c
wide-int version of SSA_NAME_ANTI_ALIAS_P patch.
[thirdparty/gcc.git] / gcc / tree-ssa-ccp.c
CommitLineData
4ee9c684 1/* Conditional constant propagation pass for the GNU compiler.
711789cc 2 Copyright (C) 2000-2013 Free Software Foundation, Inc.
4ee9c684 3 Adapted from original RTL SSA-CCP by Daniel Berlin <dberlin@dberlin.org>
4 Adapted to GIMPLE trees by Diego Novillo <dnovillo@redhat.com>
5
6This file is part of GCC.
48e1416a 7
4ee9c684 8GCC is free software; you can redistribute it and/or modify it
9under the terms of the GNU General Public License as published by the
8c4c00c1 10Free Software Foundation; either version 3, or (at your option) any
4ee9c684 11later version.
48e1416a 12
4ee9c684 13GCC is distributed in the hope that it will be useful, but WITHOUT
14ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
15FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16for more details.
48e1416a 17
4ee9c684 18You should have received a copy of the GNU General Public License
8c4c00c1 19along with GCC; see the file COPYING3. If not see
20<http://www.gnu.org/licenses/>. */
4ee9c684 21
88dbf20f 22/* Conditional constant propagation (CCP) is based on the SSA
23 propagation engine (tree-ssa-propagate.c). Constant assignments of
24 the form VAR = CST are propagated from the assignments into uses of
25 VAR, which in turn may generate new constants. The simulation uses
26 a four level lattice to keep track of constant values associated
27 with SSA names. Given an SSA name V_i, it may take one of the
28 following values:
29
bfa30570 30 UNINITIALIZED -> the initial state of the value. This value
31 is replaced with a correct initial value
32 the first time the value is used, so the
33 rest of the pass does not need to care about
34 it. Using this value simplifies initialization
35 of the pass, and prevents us from needlessly
36 scanning statements that are never reached.
88dbf20f 37
38 UNDEFINED -> V_i is a local variable whose definition
39 has not been processed yet. Therefore we
40 don't yet know if its value is a constant
41 or not.
42
43 CONSTANT -> V_i has been found to hold a constant
44 value C.
45
46 VARYING -> V_i cannot take a constant value, or if it
47 does, it is not possible to determine it
48 at compile time.
49
50 The core of SSA-CCP is in ccp_visit_stmt and ccp_visit_phi_node:
51
52 1- In ccp_visit_stmt, we are interested in assignments whose RHS
53 evaluates into a constant and conditional jumps whose predicate
54 evaluates into a boolean true or false. When an assignment of
55 the form V_i = CONST is found, V_i's lattice value is set to
56 CONSTANT and CONST is associated with it. This causes the
57 propagation engine to add all the SSA edges coming out the
58 assignment into the worklists, so that statements that use V_i
59 can be visited.
60
61 If the statement is a conditional with a constant predicate, we
62 mark the outgoing edges as executable or not executable
63 depending on the predicate's value. This is then used when
64 visiting PHI nodes to know when a PHI argument can be ignored.
48e1416a 65
88dbf20f 66
67 2- In ccp_visit_phi_node, if all the PHI arguments evaluate to the
68 same constant C, then the LHS of the PHI is set to C. This
69 evaluation is known as the "meet operation". Since one of the
70 goals of this evaluation is to optimistically return constant
71 values as often as possible, it uses two main short cuts:
72
73 - If an argument is flowing in through a non-executable edge, it
74 is ignored. This is useful in cases like this:
75
76 if (PRED)
77 a_9 = 3;
78 else
79 a_10 = 100;
80 a_11 = PHI (a_9, a_10)
81
82 If PRED is known to always evaluate to false, then we can
83 assume that a_11 will always take its value from a_10, meaning
84 that instead of consider it VARYING (a_9 and a_10 have
85 different values), we can consider it CONSTANT 100.
86
87 - If an argument has an UNDEFINED value, then it does not affect
88 the outcome of the meet operation. If a variable V_i has an
89 UNDEFINED value, it means that either its defining statement
90 hasn't been visited yet or V_i has no defining statement, in
91 which case the original symbol 'V' is being used
92 uninitialized. Since 'V' is a local variable, the compiler
93 may assume any initial value for it.
94
95
96 After propagation, every variable V_i that ends up with a lattice
97 value of CONSTANT will have the associated constant value in the
98 array CONST_VAL[i].VALUE. That is fed into substitute_and_fold for
99 final substitution and folding.
100
e913b5cd 101 This algorithm uses wide-ints at the max precision of the target.
102 This means that, with one uninteresting exception, variables with
103 UNSIGNED types never go to VARYING because the bits above the
104 precision of the type of the variable are always zero. The
105 uninteresting case is a variable of UNSIGNED type that has the
106 maximum precision of the target. Such variables can go to VARYING,
107 but this causes no loss of infomation since these variables will
108 never be extended.
109
4ee9c684 110 References:
111
112 Constant propagation with conditional branches,
113 Wegman and Zadeck, ACM TOPLAS 13(2):181-210.
114
115 Building an Optimizing Compiler,
116 Robert Morgan, Butterworth-Heinemann, 1998, Section 8.9.
117
118 Advanced Compiler Design and Implementation,
119 Steven Muchnick, Morgan Kaufmann, 1997, Section 12.6 */
120
121#include "config.h"
122#include "system.h"
123#include "coretypes.h"
124#include "tm.h"
4ee9c684 125#include "tree.h"
9ed99284 126#include "stor-layout.h"
41511585 127#include "flags.h"
4ee9c684 128#include "tm_p.h"
4ee9c684 129#include "basic-block.h"
41511585 130#include "function.h"
ce084dfc 131#include "gimple-pretty-print.h"
073c1fd5 132#include "gimple.h"
a8783bee 133#include "gimplify.h"
dcf1a1ec 134#include "gimple-iterator.h"
073c1fd5 135#include "gimple-ssa.h"
136#include "tree-cfg.h"
137#include "tree-phinodes.h"
138#include "ssa-iterators.h"
9ed99284 139#include "stringpool.h"
073c1fd5 140#include "tree-ssanames.h"
4ee9c684 141#include "tree-pass.h"
41511585 142#include "tree-ssa-propagate.h"
5a4b7e1e 143#include "value-prof.h"
41511585 144#include "langhooks.h"
8782adcf 145#include "target.h"
0b205f4c 146#include "diagnostic-core.h"
43fb76c1 147#include "dbgcnt.h"
9a65cc0a 148#include "params.h"
2b15d2ba 149#include "hash-table.h"
e913b5cd 150#include "wide-int-print.h"
4ee9c684 151
2dc10fae 152
4ee9c684 153/* Possible lattice values. */
154typedef enum
155{
bfa30570 156 UNINITIALIZED,
4ee9c684 157 UNDEFINED,
158 CONSTANT,
159 VARYING
88dbf20f 160} ccp_lattice_t;
4ee9c684 161
14f101cf 162struct prop_value_d {
163 /* Lattice value. */
164 ccp_lattice_t lattice_val;
165
166 /* Propagated value. */
167 tree value;
b7e55469 168
e913b5cd 169 /* Mask that applies to the propagated value during CCP. For X
170 with a CONSTANT lattice value X & ~mask == value & ~mask. The
171 zero bits in the mask cover constant values. The ones mean no
172 information. */
5de9d3ed 173 widest_int mask;
14f101cf 174};
175
176typedef struct prop_value_d prop_value_t;
177
88dbf20f 178/* Array of propagated constant values. After propagation,
179 CONST_VAL[I].VALUE holds the constant value for SSA_NAME(I). If
180 the constant is held in an SSA name representing a memory store
4fb5e5ca 181 (i.e., a VDEF), CONST_VAL[I].MEM_REF will contain the actual
182 memory reference used to store (i.e., the LHS of the assignment
183 doing the store). */
20140406 184static prop_value_t *const_val;
285df01b 185static unsigned n_const_val;
4ee9c684 186
f5faab84 187static void canonicalize_value (prop_value_t *);
6688f8ec 188static bool ccp_fold_stmt (gimple_stmt_iterator *);
4af351a8 189
88dbf20f 190/* Dump constant propagation value VAL to file OUTF prefixed by PREFIX. */
01406fc0 191
192static void
88dbf20f 193dump_lattice_value (FILE *outf, const char *prefix, prop_value_t val)
01406fc0 194{
41511585 195 switch (val.lattice_val)
01406fc0 196 {
88dbf20f 197 case UNINITIALIZED:
198 fprintf (outf, "%sUNINITIALIZED", prefix);
199 break;
41511585 200 case UNDEFINED:
201 fprintf (outf, "%sUNDEFINED", prefix);
202 break;
203 case VARYING:
204 fprintf (outf, "%sVARYING", prefix);
205 break;
41511585 206 case CONSTANT:
b7e55469 207 if (TREE_CODE (val.value) != INTEGER_CST
796b6678 208 || val.mask == 0)
16ab4e97 209 {
210 fprintf (outf, "%sCONSTANT ", prefix);
211 print_generic_expr (outf, val.value, dump_flags);
212 }
b7e55469 213 else
214 {
5de9d3ed 215 wide_int cval = wi::bit_and_not (wi::to_widest (val.value), val.mask);
e913b5cd 216 fprintf (outf, "%sCONSTANT ", prefix);
217 print_hex (cval, outf);
218 fprintf (outf, " (");
219 print_hex (val.mask, outf);
220 fprintf (outf, ")");
b7e55469 221 }
41511585 222 break;
223 default:
8c0963c4 224 gcc_unreachable ();
41511585 225 }
01406fc0 226}
4ee9c684 227
4ee9c684 228
88dbf20f 229/* Print lattice value VAL to stderr. */
230
231void debug_lattice_value (prop_value_t val);
232
4b987fac 233DEBUG_FUNCTION void
88dbf20f 234debug_lattice_value (prop_value_t val)
235{
236 dump_lattice_value (stderr, "", val);
237 fprintf (stderr, "\n");
238}
4ee9c684 239
4ee9c684 240
88dbf20f 241/* Compute a default value for variable VAR and store it in the
242 CONST_VAL array. The following rules are used to get default
243 values:
01406fc0 244
88dbf20f 245 1- Global and static variables that are declared constant are
246 considered CONSTANT.
247
248 2- Any other value is considered UNDEFINED. This is useful when
41511585 249 considering PHI nodes. PHI arguments that are undefined do not
250 change the constant value of the PHI node, which allows for more
88dbf20f 251 constants to be propagated.
4ee9c684 252
8883e700 253 3- Variables defined by statements other than assignments and PHI
88dbf20f 254 nodes are considered VARYING.
4ee9c684 255
8883e700 256 4- Initial values of variables that are not GIMPLE registers are
bfa30570 257 considered VARYING. */
4ee9c684 258
88dbf20f 259static prop_value_t
260get_default_value (tree var)
261{
e913b5cd 262 prop_value_t val = { UNINITIALIZED, NULL_TREE, 0 };
8edeb88b 263 gimple stmt;
264
265 stmt = SSA_NAME_DEF_STMT (var);
266
267 if (gimple_nop_p (stmt))
4ee9c684 268 {
8edeb88b 269 /* Variables defined by an empty statement are those used
270 before being initialized. If VAR is a local variable, we
271 can assume initially that it is UNDEFINED, otherwise we must
272 consider it VARYING. */
7c782c9b 273 if (!virtual_operand_p (var)
274 && TREE_CODE (SSA_NAME_VAR (var)) == VAR_DECL)
8edeb88b 275 val.lattice_val = UNDEFINED;
276 else
b7e55469 277 {
278 val.lattice_val = VARYING;
e913b5cd 279 val.mask = -1;
fc08b993 280 if (flag_tree_bit_ccp)
281 {
aeb682a2 282 widest_int nonzero_bits = get_nonzero_bits (var);
283 widest_int mask
284 = wi::mask <widest_int> (TYPE_PRECISION (TREE_TYPE (var)), false);
285 if (nonzero_bits != -1 && nonzero_bits != mask)
fc08b993 286 {
287 val.lattice_val = CONSTANT;
288 val.value = build_zero_cst (TREE_TYPE (var));
289 /* CCP wants the bits above precision set. */
290 val.mask = nonzero_bits | ~mask;
291 }
292 }
b7e55469 293 }
4ee9c684 294 }
b45b214a 295 else if (is_gimple_assign (stmt))
41511585 296 {
8edeb88b 297 tree cst;
298 if (gimple_assign_single_p (stmt)
299 && DECL_P (gimple_assign_rhs1 (stmt))
300 && (cst = get_symbol_constant_value (gimple_assign_rhs1 (stmt))))
88dbf20f 301 {
8edeb88b 302 val.lattice_val = CONSTANT;
303 val.value = cst;
88dbf20f 304 }
305 else
b45b214a 306 {
307 /* Any other variable defined by an assignment is considered
308 UNDEFINED. */
309 val.lattice_val = UNDEFINED;
310 }
311 }
312 else if ((is_gimple_call (stmt)
313 && gimple_call_lhs (stmt) != NULL_TREE)
314 || gimple_code (stmt) == GIMPLE_PHI)
315 {
316 /* A variable defined by a call or a PHI node is considered
317 UNDEFINED. */
318 val.lattice_val = UNDEFINED;
8edeb88b 319 }
320 else
321 {
322 /* Otherwise, VAR will never take on a constant value. */
323 val.lattice_val = VARYING;
e913b5cd 324 val.mask = -1;
41511585 325 }
4ee9c684 326
41511585 327 return val;
328}
4ee9c684 329
4ee9c684 330
bfa30570 331/* Get the constant value associated with variable VAR. */
4ee9c684 332
bfa30570 333static inline prop_value_t *
334get_value (tree var)
88dbf20f 335{
e004838d 336 prop_value_t *val;
bfa30570 337
285df01b 338 if (const_val == NULL
339 || SSA_NAME_VERSION (var) >= n_const_val)
e004838d 340 return NULL;
341
342 val = &const_val[SSA_NAME_VERSION (var)];
bfa30570 343 if (val->lattice_val == UNINITIALIZED)
4ee9c684 344 *val = get_default_value (var);
345
f5faab84 346 canonicalize_value (val);
4af351a8 347
4ee9c684 348 return val;
349}
350
15d138c9 351/* Return the constant tree value associated with VAR. */
352
353static inline tree
354get_constant_value (tree var)
355{
98d92e3c 356 prop_value_t *val;
357 if (TREE_CODE (var) != SSA_NAME)
358 {
359 if (is_gimple_min_invariant (var))
360 return var;
361 return NULL_TREE;
362 }
363 val = get_value (var);
b7e55469 364 if (val
365 && val->lattice_val == CONSTANT
366 && (TREE_CODE (val->value) != INTEGER_CST
796b6678 367 || val->mask == 0))
15d138c9 368 return val->value;
369 return NULL_TREE;
370}
371
bfa30570 372/* Sets the value associated with VAR to VARYING. */
373
374static inline void
375set_value_varying (tree var)
376{
377 prop_value_t *val = &const_val[SSA_NAME_VERSION (var)];
378
379 val->lattice_val = VARYING;
380 val->value = NULL_TREE;
e913b5cd 381 val->mask = -1;
bfa30570 382}
4ee9c684 383
b31eb493 384/* For float types, modify the value of VAL to make ccp work correctly
385 for non-standard values (-0, NaN):
386
387 If HONOR_SIGNED_ZEROS is false, and VAL = -0, we canonicalize it to 0.
388 If HONOR_NANS is false, and VAL is NaN, we canonicalize it to UNDEFINED.
389 This is to fix the following problem (see PR 29921): Suppose we have
390
391 x = 0.0 * y
392
393 and we set value of y to NaN. This causes value of x to be set to NaN.
394 When we later determine that y is in fact VARYING, fold uses the fact
395 that HONOR_NANS is false, and we try to change the value of x to 0,
396 causing an ICE. With HONOR_NANS being false, the real appearance of
397 NaN would cause undefined behavior, though, so claiming that y (and x)
f5faab84 398 are UNDEFINED initially is correct.
399
400 For other constants, make sure to drop TREE_OVERFLOW. */
b31eb493 401
402static void
f5faab84 403canonicalize_value (prop_value_t *val)
b31eb493 404{
405 enum machine_mode mode;
406 tree type;
407 REAL_VALUE_TYPE d;
408
f5faab84 409 if (val->lattice_val != CONSTANT)
410 return;
411
412 if (TREE_OVERFLOW_P (val->value))
413 val->value = drop_tree_overflow (val->value);
414
415 if (TREE_CODE (val->value) != REAL_CST)
b31eb493 416 return;
417
418 d = TREE_REAL_CST (val->value);
419 type = TREE_TYPE (val->value);
420 mode = TYPE_MODE (type);
421
422 if (!HONOR_SIGNED_ZEROS (mode)
423 && REAL_VALUE_MINUS_ZERO (d))
424 {
425 val->value = build_real (type, dconst0);
426 return;
427 }
428
429 if (!HONOR_NANS (mode)
430 && REAL_VALUE_ISNAN (d))
431 {
432 val->lattice_val = UNDEFINED;
433 val->value = NULL;
b31eb493 434 return;
435 }
436}
437
b7e55469 438/* Return whether the lattice transition is valid. */
439
440static bool
441valid_lattice_transition (prop_value_t old_val, prop_value_t new_val)
442{
443 /* Lattice transitions must always be monotonically increasing in
444 value. */
445 if (old_val.lattice_val < new_val.lattice_val)
446 return true;
447
448 if (old_val.lattice_val != new_val.lattice_val)
449 return false;
450
451 if (!old_val.value && !new_val.value)
452 return true;
453
454 /* Now both lattice values are CONSTANT. */
455
43c92e0a 456 /* Allow transitioning from PHI <&x, not executable> == &x
457 to PHI <&x, &y> == common alignment. */
b7e55469 458 if (TREE_CODE (old_val.value) != INTEGER_CST
459 && TREE_CODE (new_val.value) == INTEGER_CST)
460 return true;
461
462 /* Bit-lattices have to agree in the still valid bits. */
463 if (TREE_CODE (old_val.value) == INTEGER_CST
464 && TREE_CODE (new_val.value) == INTEGER_CST)
5de9d3ed 465 return (wi::bit_and_not (wi::to_widest (old_val.value), new_val.mask)
466 == wi::bit_and_not (wi::to_widest (new_val.value), new_val.mask));
b7e55469 467
468 /* Otherwise constant values have to agree. */
469 return operand_equal_p (old_val.value, new_val.value, 0);
470}
471
88dbf20f 472/* Set the value for variable VAR to NEW_VAL. Return true if the new
473 value is different from VAR's previous value. */
4ee9c684 474
41511585 475static bool
88dbf20f 476set_lattice_value (tree var, prop_value_t new_val)
4ee9c684 477{
6d0bf6d6 478 /* We can deal with old UNINITIALIZED values just fine here. */
479 prop_value_t *old_val = &const_val[SSA_NAME_VERSION (var)];
88dbf20f 480
f5faab84 481 canonicalize_value (&new_val);
b31eb493 482
b7e55469 483 /* We have to be careful to not go up the bitwise lattice
484 represented by the mask.
485 ??? This doesn't seem to be the best place to enforce this. */
486 if (new_val.lattice_val == CONSTANT
487 && old_val->lattice_val == CONSTANT
488 && TREE_CODE (new_val.value) == INTEGER_CST
489 && TREE_CODE (old_val->value) == INTEGER_CST)
490 {
5de9d3ed 491 widest_int diff = (wi::to_widest (new_val.value)
492 ^ wi::to_widest (old_val->value));
cf8f0e63 493 new_val.mask = new_val.mask | old_val->mask | diff;
b7e55469 494 }
bfa30570 495
b7e55469 496 gcc_assert (valid_lattice_transition (*old_val, new_val));
88dbf20f 497
b7e55469 498 /* If *OLD_VAL and NEW_VAL are the same, return false to inform the
499 caller that this was a non-transition. */
500 if (old_val->lattice_val != new_val.lattice_val
501 || (new_val.lattice_val == CONSTANT
502 && TREE_CODE (new_val.value) == INTEGER_CST
503 && (TREE_CODE (old_val->value) != INTEGER_CST
3a54beaf 504 || new_val.mask != old_val->mask)))
4ee9c684 505 {
b7e55469 506 /* ??? We would like to delay creation of INTEGER_CSTs from
507 partially constants here. */
508
41511585 509 if (dump_file && (dump_flags & TDF_DETAILS))
510 {
88dbf20f 511 dump_lattice_value (dump_file, "Lattice value changed to ", new_val);
bfa30570 512 fprintf (dump_file, ". Adding SSA edges to worklist.\n");
41511585 513 }
514
88dbf20f 515 *old_val = new_val;
516
6d0bf6d6 517 gcc_assert (new_val.lattice_val != UNINITIALIZED);
bfa30570 518 return true;
4ee9c684 519 }
41511585 520
521 return false;
4ee9c684 522}
523
b7e55469 524static prop_value_t get_value_for_expr (tree, bool);
525static prop_value_t bit_value_binop (enum tree_code, tree, tree, tree);
5de9d3ed 526static void bit_value_binop_1 (enum tree_code, tree, widest_int *, widest_int *,
527 tree, widest_int, widest_int,
528 tree, widest_int, widest_int);
b7e55469 529
5de9d3ed 530/* Return a widest_int that can be used for bitwise simplifications
b7e55469 531 from VAL. */
532
5de9d3ed 533static widest_int
e913b5cd 534value_to_wide_int (prop_value_t val)
b7e55469 535{
536 if (val.value
537 && TREE_CODE (val.value) == INTEGER_CST)
5de9d3ed 538 return wi::to_widest (val.value);
e913b5cd 539
540 return 0;
b7e55469 541}
542
543/* Return the value for the address expression EXPR based on alignment
544 information. */
6d0bf6d6 545
546static prop_value_t
b7e55469 547get_value_from_alignment (tree expr)
548{
f8abb542 549 tree type = TREE_TYPE (expr);
b7e55469 550 prop_value_t val;
f8abb542 551 unsigned HOST_WIDE_INT bitpos;
552 unsigned int align;
b7e55469 553
554 gcc_assert (TREE_CODE (expr) == ADDR_EXPR);
555
59da1bcd 556 get_pointer_alignment_1 (expr, &align, &bitpos);
cf8f0e63 557 val.mask = (POINTER_TYPE_P (type) || TYPE_UNSIGNED (type)
5de9d3ed 558 ? wi::mask <widest_int> (TYPE_PRECISION (type), false)
e913b5cd 559 : -1).and_not (align / BITS_PER_UNIT - 1);
796b6678 560 val.lattice_val = val.mask == -1 ? VARYING : CONSTANT;
f8abb542 561 if (val.lattice_val == CONSTANT)
796b6678 562 val.value = build_int_cstu (type, bitpos / BITS_PER_UNIT);
b7e55469 563 else
f8abb542 564 val.value = NULL_TREE;
b7e55469 565
566 return val;
567}
568
569/* Return the value for the tree operand EXPR. If FOR_BITS_P is true
570 return constant bits extracted from alignment information for
571 invariant addresses. */
572
573static prop_value_t
574get_value_for_expr (tree expr, bool for_bits_p)
6d0bf6d6 575{
576 prop_value_t val;
577
578 if (TREE_CODE (expr) == SSA_NAME)
b7e55469 579 {
580 val = *get_value (expr);
581 if (for_bits_p
582 && val.lattice_val == CONSTANT
583 && TREE_CODE (val.value) == ADDR_EXPR)
584 val = get_value_from_alignment (val.value);
585 }
586 else if (is_gimple_min_invariant (expr)
587 && (!for_bits_p || TREE_CODE (expr) != ADDR_EXPR))
6d0bf6d6 588 {
589 val.lattice_val = CONSTANT;
590 val.value = expr;
e913b5cd 591 val.mask = 0;
f5faab84 592 canonicalize_value (&val);
6d0bf6d6 593 }
b7e55469 594 else if (TREE_CODE (expr) == ADDR_EXPR)
595 val = get_value_from_alignment (expr);
6d0bf6d6 596 else
597 {
598 val.lattice_val = VARYING;
e913b5cd 599 val.mask = 1;
6d0bf6d6 600 val.value = NULL_TREE;
601 }
6d0bf6d6 602 return val;
603}
604
88dbf20f 605/* Return the likely CCP lattice value for STMT.
4ee9c684 606
41511585 607 If STMT has no operands, then return CONSTANT.
4ee9c684 608
d61b9af3 609 Else if undefinedness of operands of STMT cause its value to be
610 undefined, then return UNDEFINED.
4ee9c684 611
41511585 612 Else if any operands of STMT are constants, then return CONSTANT.
4ee9c684 613
41511585 614 Else return VARYING. */
4ee9c684 615
88dbf20f 616static ccp_lattice_t
75a70cf9 617likely_value (gimple stmt)
41511585 618{
d61b9af3 619 bool has_constant_operand, has_undefined_operand, all_undefined_operands;
41511585 620 tree use;
621 ssa_op_iter iter;
8edeb88b 622 unsigned i;
4ee9c684 623
590c3166 624 enum gimple_code code = gimple_code (stmt);
75a70cf9 625
626 /* This function appears to be called only for assignments, calls,
627 conditionals, and switches, due to the logic in visit_stmt. */
628 gcc_assert (code == GIMPLE_ASSIGN
629 || code == GIMPLE_CALL
630 || code == GIMPLE_COND
631 || code == GIMPLE_SWITCH);
88dbf20f 632
633 /* If the statement has volatile operands, it won't fold to a
634 constant value. */
75a70cf9 635 if (gimple_has_volatile_ops (stmt))
88dbf20f 636 return VARYING;
637
75a70cf9 638 /* Arrive here for more complex cases. */
bfa30570 639 has_constant_operand = false;
d61b9af3 640 has_undefined_operand = false;
641 all_undefined_operands = true;
8edeb88b 642 FOR_EACH_SSA_TREE_OPERAND (use, stmt, iter, SSA_OP_USE)
41511585 643 {
bfa30570 644 prop_value_t *val = get_value (use);
41511585 645
bfa30570 646 if (val->lattice_val == UNDEFINED)
d61b9af3 647 has_undefined_operand = true;
648 else
649 all_undefined_operands = false;
88dbf20f 650
41511585 651 if (val->lattice_val == CONSTANT)
bfa30570 652 has_constant_operand = true;
4ee9c684 653 }
41511585 654
dd277d48 655 /* There may be constants in regular rhs operands. For calls we
656 have to ignore lhs, fndecl and static chain, otherwise only
657 the lhs. */
658 for (i = (is_gimple_call (stmt) ? 2 : 0) + gimple_has_lhs (stmt);
8edeb88b 659 i < gimple_num_ops (stmt); ++i)
660 {
661 tree op = gimple_op (stmt, i);
662 if (!op || TREE_CODE (op) == SSA_NAME)
663 continue;
664 if (is_gimple_min_invariant (op))
665 has_constant_operand = true;
666 }
667
87c0a9fc 668 if (has_constant_operand)
669 all_undefined_operands = false;
670
3d483a94 671 if (has_undefined_operand
672 && code == GIMPLE_CALL
673 && gimple_call_internal_p (stmt))
674 switch (gimple_call_internal_fn (stmt))
675 {
676 /* These 3 builtins use the first argument just as a magic
677 way how to find out a decl uid. */
678 case IFN_GOMP_SIMD_LANE:
679 case IFN_GOMP_SIMD_VF:
680 case IFN_GOMP_SIMD_LAST_LANE:
681 has_undefined_operand = false;
682 break;
683 default:
684 break;
685 }
686
d61b9af3 687 /* If the operation combines operands like COMPLEX_EXPR make sure to
688 not mark the result UNDEFINED if only one part of the result is
689 undefined. */
75a70cf9 690 if (has_undefined_operand && all_undefined_operands)
d61b9af3 691 return UNDEFINED;
75a70cf9 692 else if (code == GIMPLE_ASSIGN && has_undefined_operand)
d61b9af3 693 {
75a70cf9 694 switch (gimple_assign_rhs_code (stmt))
d61b9af3 695 {
696 /* Unary operators are handled with all_undefined_operands. */
697 case PLUS_EXPR:
698 case MINUS_EXPR:
d61b9af3 699 case POINTER_PLUS_EXPR:
d61b9af3 700 /* Not MIN_EXPR, MAX_EXPR. One VARYING operand may be selected.
701 Not bitwise operators, one VARYING operand may specify the
702 result completely. Not logical operators for the same reason.
05a936a0 703 Not COMPLEX_EXPR as one VARYING operand makes the result partly
704 not UNDEFINED. Not *DIV_EXPR, comparisons and shifts because
705 the undefined operand may be promoted. */
d61b9af3 706 return UNDEFINED;
707
43c92e0a 708 case ADDR_EXPR:
709 /* If any part of an address is UNDEFINED, like the index
710 of an ARRAY_EXPR, then treat the result as UNDEFINED. */
711 return UNDEFINED;
712
d61b9af3 713 default:
714 ;
715 }
716 }
717 /* If there was an UNDEFINED operand but the result may be not UNDEFINED
c91fedc5 718 fall back to CONSTANT. During iteration UNDEFINED may still drop
719 to CONSTANT. */
d61b9af3 720 if (has_undefined_operand)
c91fedc5 721 return CONSTANT;
d61b9af3 722
8edeb88b 723 /* We do not consider virtual operands here -- load from read-only
724 memory may have only VARYING virtual operands, but still be
725 constant. */
bfa30570 726 if (has_constant_operand
8edeb88b 727 || gimple_references_memory_p (stmt))
88dbf20f 728 return CONSTANT;
729
bfa30570 730 return VARYING;
4ee9c684 731}
732
bfa30570 733/* Returns true if STMT cannot be constant. */
734
735static bool
75a70cf9 736surely_varying_stmt_p (gimple stmt)
bfa30570 737{
738 /* If the statement has operands that we cannot handle, it cannot be
739 constant. */
75a70cf9 740 if (gimple_has_volatile_ops (stmt))
bfa30570 741 return true;
742
f257af64 743 /* If it is a call and does not return a value or is not a
744 builtin and not an indirect call, it is varying. */
75a70cf9 745 if (is_gimple_call (stmt))
f257af64 746 {
747 tree fndecl;
748 if (!gimple_call_lhs (stmt)
749 || ((fndecl = gimple_call_fndecl (stmt)) != NULL_TREE
5768aeb3 750 && !DECL_BUILT_IN (fndecl)))
f257af64 751 return true;
752 }
bfa30570 753
8edeb88b 754 /* Any other store operation is not interesting. */
dd277d48 755 else if (gimple_vdef (stmt))
8edeb88b 756 return true;
757
bfa30570 758 /* Anything other than assignments and conditional jumps are not
759 interesting for CCP. */
75a70cf9 760 if (gimple_code (stmt) != GIMPLE_ASSIGN
f257af64 761 && gimple_code (stmt) != GIMPLE_COND
762 && gimple_code (stmt) != GIMPLE_SWITCH
763 && gimple_code (stmt) != GIMPLE_CALL)
bfa30570 764 return true;
765
766 return false;
767}
4ee9c684 768
41511585 769/* Initialize local data structures for CCP. */
4ee9c684 770
771static void
41511585 772ccp_initialize (void)
4ee9c684 773{
41511585 774 basic_block bb;
4ee9c684 775
285df01b 776 n_const_val = num_ssa_names;
777 const_val = XCNEWVEC (prop_value_t, n_const_val);
4ee9c684 778
41511585 779 /* Initialize simulation flags for PHI nodes and statements. */
780 FOR_EACH_BB (bb)
4ee9c684 781 {
75a70cf9 782 gimple_stmt_iterator i;
4ee9c684 783
75a70cf9 784 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
41511585 785 {
75a70cf9 786 gimple stmt = gsi_stmt (i);
2193544e 787 bool is_varying;
788
789 /* If the statement is a control insn, then we do not
790 want to avoid simulating the statement once. Failure
791 to do so means that those edges will never get added. */
792 if (stmt_ends_bb_p (stmt))
793 is_varying = false;
794 else
795 is_varying = surely_varying_stmt_p (stmt);
4ee9c684 796
bfa30570 797 if (is_varying)
41511585 798 {
88dbf20f 799 tree def;
800 ssa_op_iter iter;
801
802 /* If the statement will not produce a constant, mark
803 all its outputs VARYING. */
804 FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_ALL_DEFS)
8edeb88b 805 set_value_varying (def);
41511585 806 }
75a70cf9 807 prop_set_simulate_again (stmt, !is_varying);
41511585 808 }
4ee9c684 809 }
810
75a70cf9 811 /* Now process PHI nodes. We never clear the simulate_again flag on
812 phi nodes, since we do not know which edges are executable yet,
813 except for phi nodes for virtual operands when we do not do store ccp. */
41511585 814 FOR_EACH_BB (bb)
4ee9c684 815 {
75a70cf9 816 gimple_stmt_iterator i;
41511585 817
75a70cf9 818 for (i = gsi_start_phis (bb); !gsi_end_p (i); gsi_next (&i))
819 {
820 gimple phi = gsi_stmt (i);
821
7c782c9b 822 if (virtual_operand_p (gimple_phi_result (phi)))
75a70cf9 823 prop_set_simulate_again (phi, false);
bfa30570 824 else
75a70cf9 825 prop_set_simulate_again (phi, true);
41511585 826 }
4ee9c684 827 }
41511585 828}
4ee9c684 829
43fb76c1 830/* Debug count support. Reset the values of ssa names
831 VARYING when the total number ssa names analyzed is
832 beyond the debug count specified. */
833
834static void
835do_dbg_cnt (void)
836{
837 unsigned i;
838 for (i = 0; i < num_ssa_names; i++)
839 {
840 if (!dbg_cnt (ccp))
841 {
842 const_val[i].lattice_val = VARYING;
e913b5cd 843 const_val[i].mask = -1;
43fb76c1 844 const_val[i].value = NULL_TREE;
845 }
846 }
847}
848
4ee9c684 849
88dbf20f 850/* Do final substitution of propagated values, cleanup the flowgraph and
48e1416a 851 free allocated storage.
4ee9c684 852
33a34f1e 853 Return TRUE when something was optimized. */
854
855static bool
88dbf20f 856ccp_finalize (void)
4ee9c684 857{
43fb76c1 858 bool something_changed;
153c3b50 859 unsigned i;
43fb76c1 860
861 do_dbg_cnt ();
153c3b50 862
863 /* Derive alignment and misalignment information from partially
fc08b993 864 constant pointers in the lattice or nonzero bits from partially
865 constant integers. */
153c3b50 866 for (i = 1; i < num_ssa_names; ++i)
867 {
868 tree name = ssa_name (i);
869 prop_value_t *val;
153c3b50 870 unsigned int tem, align;
871
872 if (!name
fc08b993 873 || (!POINTER_TYPE_P (TREE_TYPE (name))
874 && (!INTEGRAL_TYPE_P (TREE_TYPE (name))
875 /* Don't record nonzero bits before IPA to avoid
876 using too much memory. */
877 || first_pass_instance)))
153c3b50 878 continue;
879
880 val = get_value (name);
881 if (val->lattice_val != CONSTANT
882 || TREE_CODE (val->value) != INTEGER_CST)
883 continue;
884
fc08b993 885 if (POINTER_TYPE_P (TREE_TYPE (name)))
886 {
887 /* Trailing mask bits specify the alignment, trailing value
888 bits the misalignment. */
aeb682a2 889 tem = val->mask.to_uhwi ();
fc08b993 890 align = (tem & -tem);
891 if (align > 1)
892 set_ptr_info_alignment (get_ptr_info (name), align,
f9ae6f95 893 (TREE_INT_CST_LOW (val->value)
fc08b993 894 & (align - 1)));
895 }
896 else
897 {
6da74b21 898 widest_int nonzero_bits = val->mask | wi::to_widest (val->value);
fc08b993 899 nonzero_bits &= get_nonzero_bits (name);
900 set_nonzero_bits (name, nonzero_bits);
901 }
153c3b50 902 }
903
88dbf20f 904 /* Perform substitutions based on the known constant values. */
14f101cf 905 something_changed = substitute_and_fold (get_constant_value,
906 ccp_fold_stmt, true);
4ee9c684 907
88dbf20f 908 free (const_val);
e004838d 909 const_val = NULL;
33a34f1e 910 return something_changed;;
4ee9c684 911}
912
913
88dbf20f 914/* Compute the meet operator between *VAL1 and *VAL2. Store the result
915 in VAL1.
916
917 any M UNDEFINED = any
88dbf20f 918 any M VARYING = VARYING
919 Ci M Cj = Ci if (i == j)
920 Ci M Cj = VARYING if (i != j)
bfa30570 921 */
4ee9c684 922
923static void
88dbf20f 924ccp_lattice_meet (prop_value_t *val1, prop_value_t *val2)
4ee9c684 925{
88dbf20f 926 if (val1->lattice_val == UNDEFINED)
4ee9c684 927 {
88dbf20f 928 /* UNDEFINED M any = any */
929 *val1 = *val2;
41511585 930 }
88dbf20f 931 else if (val2->lattice_val == UNDEFINED)
92481a4d 932 {
88dbf20f 933 /* any M UNDEFINED = any
934 Nothing to do. VAL1 already contains the value we want. */
935 ;
92481a4d 936 }
88dbf20f 937 else if (val1->lattice_val == VARYING
938 || val2->lattice_val == VARYING)
41511585 939 {
88dbf20f 940 /* any M VARYING = VARYING. */
941 val1->lattice_val = VARYING;
e913b5cd 942 val1->mask = -1;
88dbf20f 943 val1->value = NULL_TREE;
41511585 944 }
b7e55469 945 else if (val1->lattice_val == CONSTANT
946 && val2->lattice_val == CONSTANT
947 && TREE_CODE (val1->value) == INTEGER_CST
948 && TREE_CODE (val2->value) == INTEGER_CST)
949 {
950 /* Ci M Cj = Ci if (i == j)
951 Ci M Cj = VARYING if (i != j)
952
953 For INTEGER_CSTs mask unequal bits. If no equal bits remain,
954 drop to varying. */
e913b5cd 955 val1->mask = (val1->mask | val2->mask
5de9d3ed 956 | (wi::to_widest (val1->value)
957 ^ wi::to_widest (val2->value)));
796b6678 958 if (val1->mask == -1)
b7e55469 959 {
960 val1->lattice_val = VARYING;
961 val1->value = NULL_TREE;
962 }
963 }
88dbf20f 964 else if (val1->lattice_val == CONSTANT
965 && val2->lattice_val == CONSTANT
61207d43 966 && simple_cst_equal (val1->value, val2->value) == 1)
41511585 967 {
88dbf20f 968 /* Ci M Cj = Ci if (i == j)
969 Ci M Cj = VARYING if (i != j)
970
b7e55469 971 VAL1 already contains the value we want for equivalent values. */
972 }
973 else if (val1->lattice_val == CONSTANT
974 && val2->lattice_val == CONSTANT
975 && (TREE_CODE (val1->value) == ADDR_EXPR
976 || TREE_CODE (val2->value) == ADDR_EXPR))
977 {
978 /* When not equal addresses are involved try meeting for
979 alignment. */
980 prop_value_t tem = *val2;
981 if (TREE_CODE (val1->value) == ADDR_EXPR)
982 *val1 = get_value_for_expr (val1->value, true);
983 if (TREE_CODE (val2->value) == ADDR_EXPR)
984 tem = get_value_for_expr (val2->value, true);
985 ccp_lattice_meet (val1, &tem);
41511585 986 }
987 else
988 {
88dbf20f 989 /* Any other combination is VARYING. */
990 val1->lattice_val = VARYING;
e913b5cd 991 val1->mask = -1;
88dbf20f 992 val1->value = NULL_TREE;
41511585 993 }
4ee9c684 994}
995
996
41511585 997/* Loop through the PHI_NODE's parameters for BLOCK and compare their
998 lattice values to determine PHI_NODE's lattice value. The value of a
88dbf20f 999 PHI node is determined calling ccp_lattice_meet with all the arguments
41511585 1000 of the PHI node that are incoming via executable edges. */
4ee9c684 1001
41511585 1002static enum ssa_prop_result
75a70cf9 1003ccp_visit_phi_node (gimple phi)
4ee9c684 1004{
75a70cf9 1005 unsigned i;
88dbf20f 1006 prop_value_t *old_val, new_val;
4ee9c684 1007
41511585 1008 if (dump_file && (dump_flags & TDF_DETAILS))
4ee9c684 1009 {
41511585 1010 fprintf (dump_file, "\nVisiting PHI node: ");
75a70cf9 1011 print_gimple_stmt (dump_file, phi, 0, dump_flags);
4ee9c684 1012 }
4ee9c684 1013
75a70cf9 1014 old_val = get_value (gimple_phi_result (phi));
41511585 1015 switch (old_val->lattice_val)
1016 {
1017 case VARYING:
88dbf20f 1018 return SSA_PROP_VARYING;
4ee9c684 1019
41511585 1020 case CONSTANT:
1021 new_val = *old_val;
1022 break;
4ee9c684 1023
41511585 1024 case UNDEFINED:
41511585 1025 new_val.lattice_val = UNDEFINED;
88dbf20f 1026 new_val.value = NULL_TREE;
41511585 1027 break;
4ee9c684 1028
41511585 1029 default:
8c0963c4 1030 gcc_unreachable ();
41511585 1031 }
4ee9c684 1032
75a70cf9 1033 for (i = 0; i < gimple_phi_num_args (phi); i++)
41511585 1034 {
88dbf20f 1035 /* Compute the meet operator over all the PHI arguments flowing
1036 through executable edges. */
75a70cf9 1037 edge e = gimple_phi_arg_edge (phi, i);
4ee9c684 1038
41511585 1039 if (dump_file && (dump_flags & TDF_DETAILS))
1040 {
1041 fprintf (dump_file,
1042 "\n Argument #%d (%d -> %d %sexecutable)\n",
1043 i, e->src->index, e->dest->index,
1044 (e->flags & EDGE_EXECUTABLE) ? "" : "not ");
1045 }
1046
1047 /* If the incoming edge is executable, Compute the meet operator for
1048 the existing value of the PHI node and the current PHI argument. */
1049 if (e->flags & EDGE_EXECUTABLE)
1050 {
75a70cf9 1051 tree arg = gimple_phi_arg (phi, i)->def;
b7e55469 1052 prop_value_t arg_val = get_value_for_expr (arg, false);
4ee9c684 1053
88dbf20f 1054 ccp_lattice_meet (&new_val, &arg_val);
4ee9c684 1055
41511585 1056 if (dump_file && (dump_flags & TDF_DETAILS))
1057 {
1058 fprintf (dump_file, "\t");
88dbf20f 1059 print_generic_expr (dump_file, arg, dump_flags);
1060 dump_lattice_value (dump_file, "\tValue: ", arg_val);
41511585 1061 fprintf (dump_file, "\n");
1062 }
4ee9c684 1063
41511585 1064 if (new_val.lattice_val == VARYING)
1065 break;
1066 }
1067 }
4ee9c684 1068
1069 if (dump_file && (dump_flags & TDF_DETAILS))
41511585 1070 {
1071 dump_lattice_value (dump_file, "\n PHI node value: ", new_val);
1072 fprintf (dump_file, "\n\n");
1073 }
1074
bfa30570 1075 /* Make the transition to the new value. */
75a70cf9 1076 if (set_lattice_value (gimple_phi_result (phi), new_val))
41511585 1077 {
1078 if (new_val.lattice_val == VARYING)
1079 return SSA_PROP_VARYING;
1080 else
1081 return SSA_PROP_INTERESTING;
1082 }
1083 else
1084 return SSA_PROP_NOT_INTERESTING;
4ee9c684 1085}
1086
15d138c9 1087/* Return the constant value for OP or OP otherwise. */
00f4f705 1088
1089static tree
15d138c9 1090valueize_op (tree op)
00f4f705 1091{
00f4f705 1092 if (TREE_CODE (op) == SSA_NAME)
1093 {
15d138c9 1094 tree tem = get_constant_value (op);
1095 if (tem)
1096 return tem;
00f4f705 1097 }
1098 return op;
1099}
1100
41511585 1101/* CCP specific front-end to the non-destructive constant folding
1102 routines.
4ee9c684 1103
1104 Attempt to simplify the RHS of STMT knowing that one or more
1105 operands are constants.
1106
1107 If simplification is possible, return the simplified RHS,
75a70cf9 1108 otherwise return the original RHS or NULL_TREE. */
4ee9c684 1109
1110static tree
75a70cf9 1111ccp_fold (gimple stmt)
4ee9c684 1112{
389dd41b 1113 location_t loc = gimple_location (stmt);
75a70cf9 1114 switch (gimple_code (stmt))
88dbf20f 1115 {
75a70cf9 1116 case GIMPLE_COND:
1117 {
1118 /* Handle comparison operators that can appear in GIMPLE form. */
15d138c9 1119 tree op0 = valueize_op (gimple_cond_lhs (stmt));
1120 tree op1 = valueize_op (gimple_cond_rhs (stmt));
75a70cf9 1121 enum tree_code code = gimple_cond_code (stmt);
389dd41b 1122 return fold_binary_loc (loc, code, boolean_type_node, op0, op1);
75a70cf9 1123 }
4ee9c684 1124
75a70cf9 1125 case GIMPLE_SWITCH:
1126 {
15d138c9 1127 /* Return the constant switch index. */
1128 return valueize_op (gimple_switch_index (stmt));
75a70cf9 1129 }
912f109f 1130
1d0b727d 1131 case GIMPLE_ASSIGN:
1132 case GIMPLE_CALL:
1133 return gimple_fold_stmt_to_constant_1 (stmt, valueize_op);
04236c3a 1134
8782adcf 1135 default:
1d0b727d 1136 gcc_unreachable ();
8782adcf 1137 }
8782adcf 1138}
75a70cf9 1139
b7e55469 1140/* Apply the operation CODE in type TYPE to the value, mask pair
1141 RVAL and RMASK representing a value of type RTYPE and set
1142 the value, mask pair *VAL and *MASK to the result. */
1143
1144static void
1145bit_value_unop_1 (enum tree_code code, tree type,
5de9d3ed 1146 widest_int *val, widest_int *mask,
1147 tree rtype, const widest_int &rval, const widest_int &rmask)
b7e55469 1148{
1149 switch (code)
1150 {
1151 case BIT_NOT_EXPR:
1152 *mask = rmask;
cf8f0e63 1153 *val = ~rval;
b7e55469 1154 break;
1155
1156 case NEGATE_EXPR:
1157 {
5de9d3ed 1158 widest_int temv, temm;
b7e55469 1159 /* Return ~rval + 1. */
1160 bit_value_unop_1 (BIT_NOT_EXPR, type, &temv, &temm, type, rval, rmask);
1161 bit_value_binop_1 (PLUS_EXPR, type, val, mask,
e913b5cd 1162 type, temv, temm, type, 1, 0);
b7e55469 1163 break;
1164 }
1165
1166 CASE_CONVERT:
1167 {
e913b5cd 1168 signop sgn;
b7e55469 1169
1170 /* First extend mask and value according to the original type. */
e913b5cd 1171 sgn = TYPE_SIGN (rtype);
796b6678 1172 *mask = wi::ext (rmask, TYPE_PRECISION (rtype), sgn);
1173 *val = wi::ext (rval, TYPE_PRECISION (rtype), sgn);
b7e55469 1174
1175 /* Then extend mask and value according to the target type. */
e913b5cd 1176 sgn = TYPE_SIGN (type);
796b6678 1177 *mask = wi::ext (*mask, TYPE_PRECISION (type), sgn);
1178 *val = wi::ext (*val, TYPE_PRECISION (type), sgn);
b7e55469 1179 break;
1180 }
1181
1182 default:
e913b5cd 1183 *mask = -1;
b7e55469 1184 break;
1185 }
1186}
1187
1188/* Apply the operation CODE in type TYPE to the value, mask pairs
1189 R1VAL, R1MASK and R2VAL, R2MASK representing a values of type R1TYPE
1190 and R2TYPE and set the value, mask pair *VAL and *MASK to the result. */
1191
1192static void
1193bit_value_binop_1 (enum tree_code code, tree type,
5de9d3ed 1194 widest_int *val, widest_int *mask,
1195 tree r1type, widest_int r1val, widest_int r1mask,
1196 tree r2type, widest_int r2val, widest_int r2mask)
b7e55469 1197{
e913b5cd 1198 signop sgn = TYPE_SIGN (type);
1199 int width = TYPE_PRECISION (type);
1200
1201 /* Assume we'll get a constant result. Use an initial non varying
1202 value, we fall back to varying in the end if necessary. */
1203 *mask = -1;
1204
b7e55469 1205 switch (code)
1206 {
1207 case BIT_AND_EXPR:
1208 /* The mask is constant where there is a known not
1209 set bit, (m1 | m2) & ((v1 | m1) & (v2 | m2)) */
cf8f0e63 1210 *mask = (r1mask | r2mask) & (r1val | r1mask) & (r2val | r2mask);
1211 *val = r1val & r2val;
b7e55469 1212 break;
1213
1214 case BIT_IOR_EXPR:
1215 /* The mask is constant where there is a known
1216 set bit, (m1 | m2) & ~((v1 & ~m1) | (v2 & ~m2)). */
cf8f0e63 1217 *mask = (r1mask | r2mask)
1218 .and_not (r1val.and_not (r1mask) | r2val.and_not (r2mask));
1219 *val = r1val | r2val;
b7e55469 1220 break;
1221
1222 case BIT_XOR_EXPR:
1223 /* m1 | m2 */
cf8f0e63 1224 *mask = r1mask | r2mask;
1225 *val = r1val ^ r2val;
b7e55469 1226 break;
1227
1228 case LROTATE_EXPR:
1229 case RROTATE_EXPR:
796b6678 1230 if (r2mask == 0)
b7e55469 1231 {
e913b5cd 1232 wide_int shift = r2val;
796b6678 1233 if (shift == 0)
e913b5cd 1234 {
1235 *mask = r1mask;
1236 *val = r1val;
1237 }
ddb1be65 1238 else
e913b5cd 1239 {
796b6678 1240 if (wi::neg_p (shift))
e913b5cd 1241 {
1242 shift = -shift;
1243 if (code == RROTATE_EXPR)
1244 code = LROTATE_EXPR;
1245 else
1246 code = RROTATE_EXPR;
1247 }
1248 if (code == RROTATE_EXPR)
1249 {
796b6678 1250 *mask = wi::rrotate (r1mask, shift, width);
1251 *val = wi::rrotate (r1val, shift, width);
e913b5cd 1252 }
1253 else
1254 {
796b6678 1255 *mask = wi::lrotate (r1mask, shift, width);
1256 *val = wi::lrotate (r1val, shift, width);
e913b5cd 1257 }
1258 }
b7e55469 1259 }
1260 break;
1261
1262 case LSHIFT_EXPR:
1263 case RSHIFT_EXPR:
1264 /* ??? We can handle partially known shift counts if we know
1265 its sign. That way we can tell that (x << (y | 8)) & 255
1266 is zero. */
796b6678 1267 if (r2mask == 0)
b7e55469 1268 {
e913b5cd 1269 wide_int shift = r2val;
796b6678 1270 if (shift == 0)
b7e55469 1271 {
1272 *mask = r1mask;
1273 *val = r1val;
1274 }
ddb1be65 1275 else
e913b5cd 1276 {
796b6678 1277 if (wi::neg_p (shift))
e913b5cd 1278 {
1279 shift = -shift;
1280 if (code == RSHIFT_EXPR)
1281 code = LSHIFT_EXPR;
1282 else
1283 code = RSHIFT_EXPR;
1284 }
1285 if (code == RSHIFT_EXPR)
1286 {
45639915 1287 *mask = wi::rshift (wi::ext (r1mask, width, sgn), shift, sgn);
1288 *val = wi::rshift (wi::ext (r1val, width, sgn), shift, sgn);
e913b5cd 1289 }
1290 else
1291 {
45639915 1292 *mask = wi::ext (wi::lshift (r1mask, shift), width, sgn);
1293 *val = wi::ext (wi::lshift (r1val, shift), width, sgn);
e913b5cd 1294 }
1295 }
b7e55469 1296 }
1297 break;
1298
1299 case PLUS_EXPR:
1300 case POINTER_PLUS_EXPR:
1301 {
b7e55469 1302 /* Do the addition with unknown bits set to zero, to give carry-ins of
1303 zero wherever possible. */
ab2c1de8 1304 widest_int lo = r1val.and_not (r1mask) + r2val.and_not (r2mask);
796b6678 1305 lo = wi::ext (lo, width, sgn);
b7e55469 1306 /* Do the addition with unknown bits set to one, to give carry-ins of
1307 one wherever possible. */
ab2c1de8 1308 widest_int hi = (r1val | r1mask) + (r2val | r2mask);
796b6678 1309 hi = wi::ext (hi, width, sgn);
b7e55469 1310 /* Each bit in the result is known if (a) the corresponding bits in
1311 both inputs are known, and (b) the carry-in to that bit position
1312 is known. We can check condition (b) by seeing if we got the same
1313 result with minimised carries as with maximised carries. */
cf8f0e63 1314 *mask = r1mask | r2mask | (lo ^ hi);
796b6678 1315 *mask = wi::ext (*mask, width, sgn);
b7e55469 1316 /* It shouldn't matter whether we choose lo or hi here. */
1317 *val = lo;
1318 break;
1319 }
1320
1321 case MINUS_EXPR:
1322 {
5de9d3ed 1323 widest_int temv, temm;
b7e55469 1324 bit_value_unop_1 (NEGATE_EXPR, r2type, &temv, &temm,
1325 r2type, r2val, r2mask);
1326 bit_value_binop_1 (PLUS_EXPR, type, val, mask,
1327 r1type, r1val, r1mask,
1328 r2type, temv, temm);
1329 break;
1330 }
1331
1332 case MULT_EXPR:
1333 {
1334 /* Just track trailing zeros in both operands and transfer
1335 them to the other. */
796b6678 1336 int r1tz = wi::ctz (r1val | r1mask);
1337 int r2tz = wi::ctz (r2val | r2mask);
e913b5cd 1338 if (r1tz + r2tz >= width)
b7e55469 1339 {
e913b5cd 1340 *mask = 0;
1341 *val = 0;
b7e55469 1342 }
1343 else if (r1tz + r2tz > 0)
1344 {
5de9d3ed 1345 *mask = wi::ext (wi::mask <widest_int> (r1tz + r2tz, true),
796b6678 1346 width, sgn);
e913b5cd 1347 *val = 0;
b7e55469 1348 }
1349 break;
1350 }
1351
1352 case EQ_EXPR:
1353 case NE_EXPR:
1354 {
5de9d3ed 1355 widest_int m = r1mask | r2mask;
cf8f0e63 1356 if (r1val.and_not (m) != r2val.and_not (m))
b7e55469 1357 {
e913b5cd 1358 *mask = 0;
1359 *val = ((code == EQ_EXPR) ? 0 : 1);
b7e55469 1360 }
1361 else
1362 {
1363 /* We know the result of a comparison is always one or zero. */
e913b5cd 1364 *mask = 1;
1365 *val = 0;
b7e55469 1366 }
1367 break;
1368 }
1369
1370 case GE_EXPR:
1371 case GT_EXPR:
b7e55469 1372 case LT_EXPR:
1373 case LE_EXPR:
1374 {
5de9d3ed 1375 widest_int o1val, o2val, o1mask, o2mask;
b7e55469 1376 int minmax, maxmin;
e913b5cd 1377
ddb1be65 1378 if ((code == GE_EXPR) || (code == GT_EXPR))
e913b5cd 1379 {
1380 o1val = r2val;
1381 o1mask = r2mask;
1382 o2val = r1val;
1383 o2mask = r1mask;
1384 code = swap_tree_comparison (code);
ddb1be65 1385 }
e913b5cd 1386 else
1387 {
1388 o1val = r1val;
1389 o1mask = r1mask;
1390 o2val = r2val;
1391 o2mask = r2mask;
1392 }
b7e55469 1393 /* If the most significant bits are not known we know nothing. */
796b6678 1394 if (wi::neg_p (o1mask) || wi::neg_p (o2mask))
b7e55469 1395 break;
1396
90c0f5b7 1397 /* For comparisons the signedness is in the comparison operands. */
e913b5cd 1398 sgn = TYPE_SIGN (r1type);
90c0f5b7 1399
b7e55469 1400 /* If we know the most significant bits we know the values
1401 value ranges by means of treating varying bits as zero
1402 or one. Do a cross comparison of the max/min pairs. */
796b6678 1403 maxmin = wi::cmp (o1val | o1mask, o2val.and_not (o2mask), sgn);
1404 minmax = wi::cmp (o1val.and_not (o1mask), o2val | o2mask, sgn);
e913b5cd 1405 if (maxmin < 0) /* o1 is less than o2. */
b7e55469 1406 {
e913b5cd 1407 *mask = 0;
1408 *val = 1;
b7e55469 1409 }
e913b5cd 1410 else if (minmax > 0) /* o1 is not less or equal to o2. */
b7e55469 1411 {
e913b5cd 1412 *mask = 0;
1413 *val = 0;
b7e55469 1414 }
e913b5cd 1415 else if (maxmin == minmax) /* o1 and o2 are equal. */
b7e55469 1416 {
1417 /* This probably should never happen as we'd have
1418 folded the thing during fully constant value folding. */
e913b5cd 1419 *mask = 0;
1420 *val = (code == LE_EXPR ? 1 : 0);
b7e55469 1421 }
1422 else
1423 {
1424 /* We know the result of a comparison is always one or zero. */
e913b5cd 1425 *mask = 1;
1426 *val = 0;
b7e55469 1427 }
1428 break;
1429 }
1430
1431 default:;
1432 }
1433}
1434
1435/* Return the propagation value when applying the operation CODE to
1436 the value RHS yielding type TYPE. */
1437
1438static prop_value_t
1439bit_value_unop (enum tree_code code, tree type, tree rhs)
1440{
1441 prop_value_t rval = get_value_for_expr (rhs, true);
5de9d3ed 1442 widest_int value, mask;
b7e55469 1443 prop_value_t val;
c91fedc5 1444
1445 if (rval.lattice_val == UNDEFINED)
1446 return rval;
1447
b7e55469 1448 gcc_assert ((rval.lattice_val == CONSTANT
1449 && TREE_CODE (rval.value) == INTEGER_CST)
796b6678 1450 || rval.mask == -1);
b7e55469 1451 bit_value_unop_1 (code, type, &value, &mask,
e913b5cd 1452 TREE_TYPE (rhs), value_to_wide_int (rval), rval.mask);
796b6678 1453 if (mask != -1)
b7e55469 1454 {
1455 val.lattice_val = CONSTANT;
1456 val.mask = mask;
1457 /* ??? Delay building trees here. */
e913b5cd 1458 val.value = wide_int_to_tree (type, value);
b7e55469 1459 }
1460 else
1461 {
1462 val.lattice_val = VARYING;
1463 val.value = NULL_TREE;
e913b5cd 1464 val.mask = -1;
b7e55469 1465 }
1466 return val;
1467}
1468
1469/* Return the propagation value when applying the operation CODE to
1470 the values RHS1 and RHS2 yielding type TYPE. */
1471
1472static prop_value_t
1473bit_value_binop (enum tree_code code, tree type, tree rhs1, tree rhs2)
1474{
1475 prop_value_t r1val = get_value_for_expr (rhs1, true);
1476 prop_value_t r2val = get_value_for_expr (rhs2, true);
5de9d3ed 1477 widest_int value, mask;
b7e55469 1478 prop_value_t val;
c91fedc5 1479
1480 if (r1val.lattice_val == UNDEFINED
1481 || r2val.lattice_val == UNDEFINED)
1482 {
1483 val.lattice_val = VARYING;
1484 val.value = NULL_TREE;
e913b5cd 1485 val.mask = -1;
c91fedc5 1486 return val;
1487 }
1488
b7e55469 1489 gcc_assert ((r1val.lattice_val == CONSTANT
1490 && TREE_CODE (r1val.value) == INTEGER_CST)
796b6678 1491 || r1val.mask == -1);
b7e55469 1492 gcc_assert ((r2val.lattice_val == CONSTANT
1493 && TREE_CODE (r2val.value) == INTEGER_CST)
796b6678 1494 || r2val.mask == -1);
b7e55469 1495 bit_value_binop_1 (code, type, &value, &mask,
e913b5cd 1496 TREE_TYPE (rhs1), value_to_wide_int (r1val), r1val.mask,
1497 TREE_TYPE (rhs2), value_to_wide_int (r2val), r2val.mask);
1498 if (mask != -1)
b7e55469 1499 {
1500 val.lattice_val = CONSTANT;
1501 val.mask = mask;
1502 /* ??? Delay building trees here. */
e913b5cd 1503 val.value = wide_int_to_tree (type, value);
b7e55469 1504 }
1505 else
1506 {
1507 val.lattice_val = VARYING;
1508 val.value = NULL_TREE;
e913b5cd 1509 val.mask = -1;
b7e55469 1510 }
1511 return val;
1512}
1513
fca0886c 1514/* Return the propagation value when applying __builtin_assume_aligned to
1515 its arguments. */
1516
1517static prop_value_t
1518bit_value_assume_aligned (gimple stmt)
1519{
1520 tree ptr = gimple_call_arg (stmt, 0), align, misalign = NULL_TREE;
1521 tree type = TREE_TYPE (ptr);
1522 unsigned HOST_WIDE_INT aligni, misaligni = 0;
1523 prop_value_t ptrval = get_value_for_expr (ptr, true);
1524 prop_value_t alignval;
5de9d3ed 1525 widest_int value, mask;
fca0886c 1526 prop_value_t val;
e913b5cd 1527
fca0886c 1528 if (ptrval.lattice_val == UNDEFINED)
1529 return ptrval;
1530 gcc_assert ((ptrval.lattice_val == CONSTANT
1531 && TREE_CODE (ptrval.value) == INTEGER_CST)
796b6678 1532 || ptrval.mask == -1);
fca0886c 1533 align = gimple_call_arg (stmt, 1);
e913b5cd 1534 if (!tree_fits_uhwi_p (align))
fca0886c 1535 return ptrval;
e913b5cd 1536 aligni = tree_to_uhwi (align);
fca0886c 1537 if (aligni <= 1
1538 || (aligni & (aligni - 1)) != 0)
1539 return ptrval;
1540 if (gimple_call_num_args (stmt) > 2)
1541 {
1542 misalign = gimple_call_arg (stmt, 2);
e913b5cd 1543 if (!tree_fits_uhwi_p (misalign))
fca0886c 1544 return ptrval;
e913b5cd 1545 misaligni = tree_to_uhwi (misalign);
fca0886c 1546 if (misaligni >= aligni)
1547 return ptrval;
1548 }
1549 align = build_int_cst_type (type, -aligni);
1550 alignval = get_value_for_expr (align, true);
1551 bit_value_binop_1 (BIT_AND_EXPR, type, &value, &mask,
e913b5cd 1552 type, value_to_wide_int (ptrval), ptrval.mask,
1553 type, value_to_wide_int (alignval), alignval.mask);
796b6678 1554 if (mask != -1)
fca0886c 1555 {
1556 val.lattice_val = CONSTANT;
1557 val.mask = mask;
e913b5cd 1558 gcc_assert ((mask.to_uhwi () & (aligni - 1)) == 0);
1559 gcc_assert ((value.to_uhwi () & (aligni - 1)) == 0);
1560 value |= misaligni;
fca0886c 1561 /* ??? Delay building trees here. */
e913b5cd 1562 val.value = wide_int_to_tree (type, value);
fca0886c 1563 }
1564 else
1565 {
1566 val.lattice_val = VARYING;
1567 val.value = NULL_TREE;
e913b5cd 1568 val.mask = -1;
fca0886c 1569 }
1570 return val;
1571}
1572
75a70cf9 1573/* Evaluate statement STMT.
1574 Valid only for assignments, calls, conditionals, and switches. */
4ee9c684 1575
88dbf20f 1576static prop_value_t
75a70cf9 1577evaluate_stmt (gimple stmt)
4ee9c684 1578{
88dbf20f 1579 prop_value_t val;
4f61cce6 1580 tree simplified = NULL_TREE;
88dbf20f 1581 ccp_lattice_t likelyvalue = likely_value (stmt);
b7e55469 1582 bool is_constant = false;
581bf1c2 1583 unsigned int align;
88dbf20f 1584
b7e55469 1585 if (dump_file && (dump_flags & TDF_DETAILS))
1586 {
1587 fprintf (dump_file, "which is likely ");
1588 switch (likelyvalue)
1589 {
1590 case CONSTANT:
1591 fprintf (dump_file, "CONSTANT");
1592 break;
1593 case UNDEFINED:
1594 fprintf (dump_file, "UNDEFINED");
1595 break;
1596 case VARYING:
1597 fprintf (dump_file, "VARYING");
1598 break;
1599 default:;
1600 }
1601 fprintf (dump_file, "\n");
1602 }
add6ee5e 1603
4ee9c684 1604 /* If the statement is likely to have a CONSTANT result, then try
1605 to fold the statement to determine the constant value. */
75a70cf9 1606 /* FIXME. This is the only place that we call ccp_fold.
1607 Since likely_value never returns CONSTANT for calls, we will
1608 not attempt to fold them, including builtins that may profit. */
4ee9c684 1609 if (likelyvalue == CONSTANT)
b7e55469 1610 {
1611 fold_defer_overflow_warnings ();
1612 simplified = ccp_fold (stmt);
1613 is_constant = simplified && is_gimple_min_invariant (simplified);
1614 fold_undefer_overflow_warnings (is_constant, stmt, 0);
1615 if (is_constant)
1616 {
1617 /* The statement produced a constant value. */
1618 val.lattice_val = CONSTANT;
1619 val.value = simplified;
e913b5cd 1620 val.mask = 0;
b7e55469 1621 }
1622 }
4ee9c684 1623 /* If the statement is likely to have a VARYING result, then do not
1624 bother folding the statement. */
04236c3a 1625 else if (likelyvalue == VARYING)
75a70cf9 1626 {
590c3166 1627 enum gimple_code code = gimple_code (stmt);
75a70cf9 1628 if (code == GIMPLE_ASSIGN)
1629 {
1630 enum tree_code subcode = gimple_assign_rhs_code (stmt);
48e1416a 1631
75a70cf9 1632 /* Other cases cannot satisfy is_gimple_min_invariant
1633 without folding. */
1634 if (get_gimple_rhs_class (subcode) == GIMPLE_SINGLE_RHS)
1635 simplified = gimple_assign_rhs1 (stmt);
1636 }
1637 else if (code == GIMPLE_SWITCH)
1638 simplified = gimple_switch_index (stmt);
1639 else
a65c4d64 1640 /* These cannot satisfy is_gimple_min_invariant without folding. */
1641 gcc_assert (code == GIMPLE_CALL || code == GIMPLE_COND);
b7e55469 1642 is_constant = simplified && is_gimple_min_invariant (simplified);
1643 if (is_constant)
1644 {
1645 /* The statement produced a constant value. */
1646 val.lattice_val = CONSTANT;
1647 val.value = simplified;
e913b5cd 1648 val.mask = 0;
b7e55469 1649 }
75a70cf9 1650 }
4ee9c684 1651
b7e55469 1652 /* Resort to simplification for bitwise tracking. */
1653 if (flag_tree_bit_ccp
939514e9 1654 && (likelyvalue == CONSTANT || is_gimple_call (stmt))
b7e55469 1655 && !is_constant)
912f109f 1656 {
b7e55469 1657 enum gimple_code code = gimple_code (stmt);
1658 val.lattice_val = VARYING;
1659 val.value = NULL_TREE;
e913b5cd 1660 val.mask = -1;
b7e55469 1661 if (code == GIMPLE_ASSIGN)
912f109f 1662 {
b7e55469 1663 enum tree_code subcode = gimple_assign_rhs_code (stmt);
1664 tree rhs1 = gimple_assign_rhs1 (stmt);
1665 switch (get_gimple_rhs_class (subcode))
1666 {
1667 case GIMPLE_SINGLE_RHS:
1668 if (INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
1669 || POINTER_TYPE_P (TREE_TYPE (rhs1)))
1670 val = get_value_for_expr (rhs1, true);
1671 break;
1672
1673 case GIMPLE_UNARY_RHS:
1674 if ((INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
1675 || POINTER_TYPE_P (TREE_TYPE (rhs1)))
1676 && (INTEGRAL_TYPE_P (gimple_expr_type (stmt))
1677 || POINTER_TYPE_P (gimple_expr_type (stmt))))
1678 val = bit_value_unop (subcode, gimple_expr_type (stmt), rhs1);
1679 break;
1680
1681 case GIMPLE_BINARY_RHS:
1682 if (INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
1683 || POINTER_TYPE_P (TREE_TYPE (rhs1)))
1684 {
e47d81e0 1685 tree lhs = gimple_assign_lhs (stmt);
b7e55469 1686 tree rhs2 = gimple_assign_rhs2 (stmt);
1687 val = bit_value_binop (subcode,
e47d81e0 1688 TREE_TYPE (lhs), rhs1, rhs2);
b7e55469 1689 }
1690 break;
1691
1692 default:;
1693 }
912f109f 1694 }
b7e55469 1695 else if (code == GIMPLE_COND)
1696 {
1697 enum tree_code code = gimple_cond_code (stmt);
1698 tree rhs1 = gimple_cond_lhs (stmt);
1699 tree rhs2 = gimple_cond_rhs (stmt);
1700 if (INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
1701 || POINTER_TYPE_P (TREE_TYPE (rhs1)))
1702 val = bit_value_binop (code, TREE_TYPE (rhs1), rhs1, rhs2);
1703 }
0b4f0116 1704 else if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
153c3b50 1705 {
0b4f0116 1706 tree fndecl = gimple_call_fndecl (stmt);
153c3b50 1707 switch (DECL_FUNCTION_CODE (fndecl))
1708 {
1709 case BUILT_IN_MALLOC:
1710 case BUILT_IN_REALLOC:
1711 case BUILT_IN_CALLOC:
939514e9 1712 case BUILT_IN_STRDUP:
1713 case BUILT_IN_STRNDUP:
153c3b50 1714 val.lattice_val = CONSTANT;
1715 val.value = build_int_cst (TREE_TYPE (gimple_get_lhs (stmt)), 0);
796b6678 1716 val.mask = ~((HOST_WIDE_INT) MALLOC_ABI_ALIGNMENT
1717 / BITS_PER_UNIT - 1);
153c3b50 1718 break;
1719
1720 case BUILT_IN_ALLOCA:
581bf1c2 1721 case BUILT_IN_ALLOCA_WITH_ALIGN:
1722 align = (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_ALLOCA_WITH_ALIGN
f9ae6f95 1723 ? TREE_INT_CST_LOW (gimple_call_arg (stmt, 1))
581bf1c2 1724 : BIGGEST_ALIGNMENT);
153c3b50 1725 val.lattice_val = CONSTANT;
1726 val.value = build_int_cst (TREE_TYPE (gimple_get_lhs (stmt)), 0);
796b6678 1727 val.mask = ~((HOST_WIDE_INT) align / BITS_PER_UNIT - 1);
153c3b50 1728 break;
1729
939514e9 1730 /* These builtins return their first argument, unmodified. */
1731 case BUILT_IN_MEMCPY:
1732 case BUILT_IN_MEMMOVE:
1733 case BUILT_IN_MEMSET:
1734 case BUILT_IN_STRCPY:
1735 case BUILT_IN_STRNCPY:
1736 case BUILT_IN_MEMCPY_CHK:
1737 case BUILT_IN_MEMMOVE_CHK:
1738 case BUILT_IN_MEMSET_CHK:
1739 case BUILT_IN_STRCPY_CHK:
1740 case BUILT_IN_STRNCPY_CHK:
1741 val = get_value_for_expr (gimple_call_arg (stmt, 0), true);
1742 break;
1743
fca0886c 1744 case BUILT_IN_ASSUME_ALIGNED:
1745 val = bit_value_assume_aligned (stmt);
1746 break;
1747
153c3b50 1748 default:;
1749 }
1750 }
b7e55469 1751 is_constant = (val.lattice_val == CONSTANT);
912f109f 1752 }
1753
fc08b993 1754 if (flag_tree_bit_ccp
1755 && ((is_constant && TREE_CODE (val.value) == INTEGER_CST)
1756 || (!is_constant && likelyvalue != UNDEFINED))
1757 && gimple_get_lhs (stmt)
1758 && TREE_CODE (gimple_get_lhs (stmt)) == SSA_NAME)
1759 {
1760 tree lhs = gimple_get_lhs (stmt);
aeb682a2 1761 widest_int nonzero_bits = get_nonzero_bits (lhs);
3a54beaf 1762 widest_int mask
1763 = wi::mask <widest_int> (TYPE_PRECISION (TREE_TYPE (lhs)), false);
aeb682a2 1764 if (nonzero_bits != -1 && nonzero_bits != mask)
fc08b993 1765 {
1766 if (!is_constant)
1767 {
1768 val.lattice_val = CONSTANT;
1769 val.value = build_zero_cst (TREE_TYPE (lhs));
1770 /* CCP wants the bits above precision set. */
1771 val.mask = nonzero_bits | ~mask;
1772 is_constant = true;
1773 }
1774 else
1775 {
aeb682a2 1776 widest_int valv = wi::to_widest (val.value);
1777 if ((valv & ~nonzero_bits & mask) != 0)
1778 val.value = wide_int_to_tree (TREE_TYPE (lhs),
1779 valv & nonzero_bits);
1780 if (nonzero_bits == 0)
1781 val.mask = 0;
fc08b993 1782 else
1783 val.mask = val.mask & (nonzero_bits | ~mask);
1784 }
1785 }
1786 }
1787
b7e55469 1788 if (!is_constant)
4ee9c684 1789 {
1790 /* The statement produced a nonconstant value. If the statement
88dbf20f 1791 had UNDEFINED operands, then the result of the statement
1792 should be UNDEFINED. Otherwise, the statement is VARYING. */
bfa30570 1793 if (likelyvalue == UNDEFINED)
b7e55469 1794 {
1795 val.lattice_val = likelyvalue;
e913b5cd 1796 val.mask = 0;
b7e55469 1797 }
b765fa12 1798 else
b7e55469 1799 {
1800 val.lattice_val = VARYING;
e913b5cd 1801 val.mask = -1;
b7e55469 1802 }
b765fa12 1803
88dbf20f 1804 val.value = NULL_TREE;
4ee9c684 1805 }
41511585 1806
1807 return val;
4ee9c684 1808}
1809
de6bd75e 1810typedef hash_table <pointer_hash <gimple_statement_base> > gimple_htab;
2b15d2ba 1811
582a80ed 1812/* Given a BUILT_IN_STACK_SAVE value SAVED_VAL, insert a clobber of VAR before
1813 each matching BUILT_IN_STACK_RESTORE. Mark visited phis in VISITED. */
1814
1815static void
2b15d2ba 1816insert_clobber_before_stack_restore (tree saved_val, tree var,
1817 gimple_htab *visited)
582a80ed 1818{
1819 gimple stmt, clobber_stmt;
1820 tree clobber;
1821 imm_use_iterator iter;
1822 gimple_stmt_iterator i;
1823 gimple *slot;
1824
1825 FOR_EACH_IMM_USE_STMT (stmt, iter, saved_val)
1826 if (gimple_call_builtin_p (stmt, BUILT_IN_STACK_RESTORE))
1827 {
f1f41a6c 1828 clobber = build_constructor (TREE_TYPE (var),
1829 NULL);
582a80ed 1830 TREE_THIS_VOLATILE (clobber) = 1;
1831 clobber_stmt = gimple_build_assign (var, clobber);
1832
1833 i = gsi_for_stmt (stmt);
1834 gsi_insert_before (&i, clobber_stmt, GSI_SAME_STMT);
1835 }
1836 else if (gimple_code (stmt) == GIMPLE_PHI)
1837 {
2b15d2ba 1838 if (!visited->is_created ())
1839 visited->create (10);
582a80ed 1840
2b15d2ba 1841 slot = visited->find_slot (stmt, INSERT);
582a80ed 1842 if (*slot != NULL)
1843 continue;
1844
1845 *slot = stmt;
1846 insert_clobber_before_stack_restore (gimple_phi_result (stmt), var,
1847 visited);
1848 }
42eed683 1849 else if (gimple_assign_ssa_name_copy_p (stmt))
1850 insert_clobber_before_stack_restore (gimple_assign_lhs (stmt), var,
1851 visited);
582a80ed 1852 else
1853 gcc_assert (is_gimple_debug (stmt));
1854}
1855
1856/* Advance the iterator to the previous non-debug gimple statement in the same
1857 or dominating basic block. */
1858
1859static inline void
1860gsi_prev_dom_bb_nondebug (gimple_stmt_iterator *i)
1861{
1862 basic_block dom;
1863
1864 gsi_prev_nondebug (i);
1865 while (gsi_end_p (*i))
1866 {
1867 dom = get_immediate_dominator (CDI_DOMINATORS, i->bb);
34154e27 1868 if (dom == NULL || dom == ENTRY_BLOCK_PTR_FOR_FN (cfun))
582a80ed 1869 return;
1870
1871 *i = gsi_last_bb (dom);
1872 }
1873}
1874
1875/* Find a BUILT_IN_STACK_SAVE dominating gsi_stmt (I), and insert
1543f720 1876 a clobber of VAR before each matching BUILT_IN_STACK_RESTORE.
1877
1878 It is possible that BUILT_IN_STACK_SAVE cannot be find in a dominator when a
1879 previous pass (such as DOM) duplicated it along multiple paths to a BB. In
1880 that case the function gives up without inserting the clobbers. */
582a80ed 1881
1882static void
1883insert_clobbers_for_var (gimple_stmt_iterator i, tree var)
1884{
582a80ed 1885 gimple stmt;
1886 tree saved_val;
2b15d2ba 1887 gimple_htab visited;
582a80ed 1888
1543f720 1889 for (; !gsi_end_p (i); gsi_prev_dom_bb_nondebug (&i))
582a80ed 1890 {
1891 stmt = gsi_stmt (i);
1892
1893 if (!gimple_call_builtin_p (stmt, BUILT_IN_STACK_SAVE))
1894 continue;
582a80ed 1895
1896 saved_val = gimple_call_lhs (stmt);
1897 if (saved_val == NULL_TREE)
1898 continue;
1899
1900 insert_clobber_before_stack_restore (saved_val, var, &visited);
1901 break;
1902 }
1903
2b15d2ba 1904 if (visited.is_created ())
1905 visited.dispose ();
582a80ed 1906}
1907
581bf1c2 1908/* Detects a __builtin_alloca_with_align with constant size argument. Declares
1909 fixed-size array and returns the address, if found, otherwise returns
1910 NULL_TREE. */
9a65cc0a 1911
1912static tree
581bf1c2 1913fold_builtin_alloca_with_align (gimple stmt)
9a65cc0a 1914{
1915 unsigned HOST_WIDE_INT size, threshold, n_elem;
1916 tree lhs, arg, block, var, elem_type, array_type;
9a65cc0a 1917
1918 /* Get lhs. */
1919 lhs = gimple_call_lhs (stmt);
1920 if (lhs == NULL_TREE)
1921 return NULL_TREE;
1922
1923 /* Detect constant argument. */
1924 arg = get_constant_value (gimple_call_arg (stmt, 0));
6e93d308 1925 if (arg == NULL_TREE
1926 || TREE_CODE (arg) != INTEGER_CST
e913b5cd 1927 || !tree_fits_uhwi_p (arg))
9a65cc0a 1928 return NULL_TREE;
6e93d308 1929
8c53c46c 1930 size = tree_to_uhwi (arg);
9a65cc0a 1931
581bf1c2 1932 /* Heuristic: don't fold large allocas. */
9a65cc0a 1933 threshold = (unsigned HOST_WIDE_INT)PARAM_VALUE (PARAM_LARGE_STACK_FRAME);
581bf1c2 1934 /* In case the alloca is located at function entry, it has the same lifetime
1935 as a declared array, so we allow a larger size. */
9a65cc0a 1936 block = gimple_block (stmt);
1937 if (!(cfun->after_inlining
1938 && TREE_CODE (BLOCK_SUPERCONTEXT (block)) == FUNCTION_DECL))
1939 threshold /= 10;
1940 if (size > threshold)
1941 return NULL_TREE;
1942
1943 /* Declare array. */
1944 elem_type = build_nonstandard_integer_type (BITS_PER_UNIT, 1);
1945 n_elem = size * 8 / BITS_PER_UNIT;
9a65cc0a 1946 array_type = build_array_type_nelts (elem_type, n_elem);
1947 var = create_tmp_var (array_type, NULL);
f9ae6f95 1948 DECL_ALIGN (var) = TREE_INT_CST_LOW (gimple_call_arg (stmt, 1));
3d4a0a4b 1949 {
1950 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (lhs);
1951 if (pi != NULL && !pi->pt.anything)
1952 {
1953 bool singleton_p;
1954 unsigned uid;
1955 singleton_p = pt_solution_singleton_p (&pi->pt, &uid);
1956 gcc_assert (singleton_p);
1957 SET_DECL_PT_UID (var, uid);
1958 }
1959 }
9a65cc0a 1960
1961 /* Fold alloca to the address of the array. */
1962 return fold_convert (TREE_TYPE (lhs), build_fold_addr_expr (var));
1963}
1964
6688f8ec 1965/* Fold the stmt at *GSI with CCP specific information that propagating
1966 and regular folding does not catch. */
1967
1968static bool
1969ccp_fold_stmt (gimple_stmt_iterator *gsi)
1970{
1971 gimple stmt = gsi_stmt (*gsi);
6688f8ec 1972
94144e68 1973 switch (gimple_code (stmt))
1974 {
1975 case GIMPLE_COND:
1976 {
1977 prop_value_t val;
1978 /* Statement evaluation will handle type mismatches in constants
1979 more gracefully than the final propagation. This allows us to
1980 fold more conditionals here. */
1981 val = evaluate_stmt (stmt);
1982 if (val.lattice_val != CONSTANT
796b6678 1983 || val.mask != 0)
94144e68 1984 return false;
1985
b7e55469 1986 if (dump_file)
1987 {
1988 fprintf (dump_file, "Folding predicate ");
1989 print_gimple_expr (dump_file, stmt, 0, 0);
1990 fprintf (dump_file, " to ");
1991 print_generic_expr (dump_file, val.value, 0);
1992 fprintf (dump_file, "\n");
1993 }
1994
94144e68 1995 if (integer_zerop (val.value))
1996 gimple_cond_make_false (stmt);
1997 else
1998 gimple_cond_make_true (stmt);
6688f8ec 1999
94144e68 2000 return true;
2001 }
6688f8ec 2002
94144e68 2003 case GIMPLE_CALL:
2004 {
2005 tree lhs = gimple_call_lhs (stmt);
3064bb7b 2006 int flags = gimple_call_flags (stmt);
15d138c9 2007 tree val;
94144e68 2008 tree argt;
2009 bool changed = false;
2010 unsigned i;
2011
2012 /* If the call was folded into a constant make sure it goes
2013 away even if we cannot propagate into all uses because of
2014 type issues. */
2015 if (lhs
2016 && TREE_CODE (lhs) == SSA_NAME
3064bb7b 2017 && (val = get_constant_value (lhs))
2018 /* Don't optimize away calls that have side-effects. */
2019 && (flags & (ECF_CONST|ECF_PURE)) != 0
2020 && (flags & ECF_LOOPING_CONST_OR_PURE) == 0)
94144e68 2021 {
15d138c9 2022 tree new_rhs = unshare_expr (val);
338cce8f 2023 bool res;
94144e68 2024 if (!useless_type_conversion_p (TREE_TYPE (lhs),
2025 TREE_TYPE (new_rhs)))
2026 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
338cce8f 2027 res = update_call_from_tree (gsi, new_rhs);
2028 gcc_assert (res);
94144e68 2029 return true;
2030 }
2031
fb049fba 2032 /* Internal calls provide no argument types, so the extra laxity
2033 for normal calls does not apply. */
2034 if (gimple_call_internal_p (stmt))
2035 return false;
2036
581bf1c2 2037 /* The heuristic of fold_builtin_alloca_with_align differs before and
2038 after inlining, so we don't require the arg to be changed into a
2039 constant for folding, but just to be constant. */
2040 if (gimple_call_builtin_p (stmt, BUILT_IN_ALLOCA_WITH_ALIGN))
9a65cc0a 2041 {
581bf1c2 2042 tree new_rhs = fold_builtin_alloca_with_align (stmt);
6e93d308 2043 if (new_rhs)
2044 {
2045 bool res = update_call_from_tree (gsi, new_rhs);
582a80ed 2046 tree var = TREE_OPERAND (TREE_OPERAND (new_rhs, 0),0);
6e93d308 2047 gcc_assert (res);
582a80ed 2048 insert_clobbers_for_var (*gsi, var);
6e93d308 2049 return true;
2050 }
9a65cc0a 2051 }
2052
94144e68 2053 /* Propagate into the call arguments. Compared to replace_uses_in
2054 this can use the argument slot types for type verification
2055 instead of the current argument type. We also can safely
2056 drop qualifiers here as we are dealing with constants anyway. */
2de00a2d 2057 argt = TYPE_ARG_TYPES (gimple_call_fntype (stmt));
94144e68 2058 for (i = 0; i < gimple_call_num_args (stmt) && argt;
2059 ++i, argt = TREE_CHAIN (argt))
2060 {
2061 tree arg = gimple_call_arg (stmt, i);
2062 if (TREE_CODE (arg) == SSA_NAME
15d138c9 2063 && (val = get_constant_value (arg))
94144e68 2064 && useless_type_conversion_p
2065 (TYPE_MAIN_VARIANT (TREE_VALUE (argt)),
15d138c9 2066 TYPE_MAIN_VARIANT (TREE_TYPE (val))))
94144e68 2067 {
15d138c9 2068 gimple_call_set_arg (stmt, i, unshare_expr (val));
94144e68 2069 changed = true;
2070 }
2071 }
e16f4c39 2072
94144e68 2073 return changed;
2074 }
6688f8ec 2075
6872bf3c 2076 case GIMPLE_ASSIGN:
2077 {
2078 tree lhs = gimple_assign_lhs (stmt);
15d138c9 2079 tree val;
6872bf3c 2080
2081 /* If we have a load that turned out to be constant replace it
2082 as we cannot propagate into all uses in all cases. */
2083 if (gimple_assign_single_p (stmt)
2084 && TREE_CODE (lhs) == SSA_NAME
15d138c9 2085 && (val = get_constant_value (lhs)))
6872bf3c 2086 {
15d138c9 2087 tree rhs = unshare_expr (val);
6872bf3c 2088 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs)))
182cf5a9 2089 rhs = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (lhs), rhs);
6872bf3c 2090 gimple_assign_set_rhs_from_tree (gsi, rhs);
2091 return true;
2092 }
2093
2094 return false;
2095 }
2096
94144e68 2097 default:
2098 return false;
2099 }
6688f8ec 2100}
2101
41511585 2102/* Visit the assignment statement STMT. Set the value of its LHS to the
88dbf20f 2103 value computed by the RHS and store LHS in *OUTPUT_P. If STMT
2104 creates virtual definitions, set the value of each new name to that
75a70cf9 2105 of the RHS (if we can derive a constant out of the RHS).
2106 Value-returning call statements also perform an assignment, and
2107 are handled here. */
4ee9c684 2108
41511585 2109static enum ssa_prop_result
75a70cf9 2110visit_assignment (gimple stmt, tree *output_p)
4ee9c684 2111{
88dbf20f 2112 prop_value_t val;
88dbf20f 2113 enum ssa_prop_result retval;
4ee9c684 2114
75a70cf9 2115 tree lhs = gimple_get_lhs (stmt);
4ee9c684 2116
75a70cf9 2117 gcc_assert (gimple_code (stmt) != GIMPLE_CALL
2118 || gimple_call_lhs (stmt) != NULL_TREE);
2119
15d138c9 2120 if (gimple_assign_single_p (stmt)
2121 && gimple_assign_rhs_code (stmt) == SSA_NAME)
2122 /* For a simple copy operation, we copy the lattice values. */
2123 val = *get_value (gimple_assign_rhs1 (stmt));
41511585 2124 else
75a70cf9 2125 /* Evaluate the statement, which could be
2126 either a GIMPLE_ASSIGN or a GIMPLE_CALL. */
04236c3a 2127 val = evaluate_stmt (stmt);
4ee9c684 2128
88dbf20f 2129 retval = SSA_PROP_NOT_INTERESTING;
4ee9c684 2130
41511585 2131 /* Set the lattice value of the statement's output. */
88dbf20f 2132 if (TREE_CODE (lhs) == SSA_NAME)
4ee9c684 2133 {
88dbf20f 2134 /* If STMT is an assignment to an SSA_NAME, we only have one
2135 value to set. */
2136 if (set_lattice_value (lhs, val))
2137 {
2138 *output_p = lhs;
2139 if (val.lattice_val == VARYING)
2140 retval = SSA_PROP_VARYING;
2141 else
2142 retval = SSA_PROP_INTERESTING;
2143 }
4ee9c684 2144 }
88dbf20f 2145
2146 return retval;
4ee9c684 2147}
2148
4ee9c684 2149
41511585 2150/* Visit the conditional statement STMT. Return SSA_PROP_INTERESTING
2151 if it can determine which edge will be taken. Otherwise, return
2152 SSA_PROP_VARYING. */
2153
2154static enum ssa_prop_result
75a70cf9 2155visit_cond_stmt (gimple stmt, edge *taken_edge_p)
4ee9c684 2156{
88dbf20f 2157 prop_value_t val;
41511585 2158 basic_block block;
2159
75a70cf9 2160 block = gimple_bb (stmt);
41511585 2161 val = evaluate_stmt (stmt);
b7e55469 2162 if (val.lattice_val != CONSTANT
796b6678 2163 || val.mask != 0)
b7e55469 2164 return SSA_PROP_VARYING;
41511585 2165
2166 /* Find which edge out of the conditional block will be taken and add it
2167 to the worklist. If no single edge can be determined statically,
2168 return SSA_PROP_VARYING to feed all the outgoing edges to the
2169 propagation engine. */
b7e55469 2170 *taken_edge_p = find_taken_edge (block, val.value);
41511585 2171 if (*taken_edge_p)
2172 return SSA_PROP_INTERESTING;
2173 else
2174 return SSA_PROP_VARYING;
4ee9c684 2175}
2176
4ee9c684 2177
41511585 2178/* Evaluate statement STMT. If the statement produces an output value and
2179 its evaluation changes the lattice value of its output, return
2180 SSA_PROP_INTERESTING and set *OUTPUT_P to the SSA_NAME holding the
2181 output value.
48e1416a 2182
41511585 2183 If STMT is a conditional branch and we can determine its truth
2184 value, set *TAKEN_EDGE_P accordingly. If STMT produces a varying
2185 value, return SSA_PROP_VARYING. */
4ee9c684 2186
41511585 2187static enum ssa_prop_result
75a70cf9 2188ccp_visit_stmt (gimple stmt, edge *taken_edge_p, tree *output_p)
41511585 2189{
41511585 2190 tree def;
2191 ssa_op_iter iter;
4ee9c684 2192
41511585 2193 if (dump_file && (dump_flags & TDF_DETAILS))
4ee9c684 2194 {
88dbf20f 2195 fprintf (dump_file, "\nVisiting statement:\n");
75a70cf9 2196 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
4ee9c684 2197 }
4ee9c684 2198
75a70cf9 2199 switch (gimple_code (stmt))
4ee9c684 2200 {
75a70cf9 2201 case GIMPLE_ASSIGN:
2202 /* If the statement is an assignment that produces a single
2203 output value, evaluate its RHS to see if the lattice value of
2204 its output has changed. */
2205 return visit_assignment (stmt, output_p);
2206
2207 case GIMPLE_CALL:
2208 /* A value-returning call also performs an assignment. */
2209 if (gimple_call_lhs (stmt) != NULL_TREE)
2210 return visit_assignment (stmt, output_p);
2211 break;
2212
2213 case GIMPLE_COND:
2214 case GIMPLE_SWITCH:
2215 /* If STMT is a conditional branch, see if we can determine
2216 which branch will be taken. */
2217 /* FIXME. It appears that we should be able to optimize
2218 computed GOTOs here as well. */
2219 return visit_cond_stmt (stmt, taken_edge_p);
2220
2221 default:
2222 break;
4ee9c684 2223 }
4ee9c684 2224
41511585 2225 /* Any other kind of statement is not interesting for constant
2226 propagation and, therefore, not worth simulating. */
41511585 2227 if (dump_file && (dump_flags & TDF_DETAILS))
2228 fprintf (dump_file, "No interesting values produced. Marked VARYING.\n");
4ee9c684 2229
41511585 2230 /* Definitions made by statements other than assignments to
2231 SSA_NAMEs represent unknown modifications to their outputs.
2232 Mark them VARYING. */
88dbf20f 2233 FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_ALL_DEFS)
2234 {
e913b5cd 2235 prop_value_t v = { VARYING, NULL_TREE, -1 };
88dbf20f 2236 set_lattice_value (def, v);
2237 }
4ee9c684 2238
41511585 2239 return SSA_PROP_VARYING;
2240}
4ee9c684 2241
4ee9c684 2242
88dbf20f 2243/* Main entry point for SSA Conditional Constant Propagation. */
41511585 2244
33a34f1e 2245static unsigned int
61207d43 2246do_ssa_ccp (void)
41511585 2247{
582a80ed 2248 unsigned int todo = 0;
2249 calculate_dominance_info (CDI_DOMINATORS);
41511585 2250 ccp_initialize ();
2251 ssa_propagate (ccp_visit_stmt, ccp_visit_phi_node);
33a34f1e 2252 if (ccp_finalize ())
560965e9 2253 todo = (TODO_cleanup_cfg | TODO_update_ssa);
582a80ed 2254 free_dominance_info (CDI_DOMINATORS);
2255 return todo;
4ee9c684 2256}
2257
5664499b 2258
2259static bool
41511585 2260gate_ccp (void)
5664499b 2261{
41511585 2262 return flag_tree_ccp != 0;
5664499b 2263}
2264
4ee9c684 2265
cbe8bda8 2266namespace {
2267
2268const pass_data pass_data_ccp =
41511585 2269{
cbe8bda8 2270 GIMPLE_PASS, /* type */
2271 "ccp", /* name */
2272 OPTGROUP_NONE, /* optinfo_flags */
2273 true, /* has_gate */
2274 true, /* has_execute */
2275 TV_TREE_CCP, /* tv_id */
2276 ( PROP_cfg | PROP_ssa ), /* properties_required */
2277 0, /* properties_provided */
2278 0, /* properties_destroyed */
2279 0, /* todo_flags_start */
2280 ( TODO_verify_ssa | TODO_update_address_taken
2281 | TODO_verify_stmts ), /* todo_flags_finish */
41511585 2282};
4ee9c684 2283
cbe8bda8 2284class pass_ccp : public gimple_opt_pass
2285{
2286public:
9af5ce0c 2287 pass_ccp (gcc::context *ctxt)
2288 : gimple_opt_pass (pass_data_ccp, ctxt)
cbe8bda8 2289 {}
2290
2291 /* opt_pass methods: */
ae84f584 2292 opt_pass * clone () { return new pass_ccp (m_ctxt); }
cbe8bda8 2293 bool gate () { return gate_ccp (); }
2294 unsigned int execute () { return do_ssa_ccp (); }
2295
2296}; // class pass_ccp
2297
2298} // anon namespace
2299
2300gimple_opt_pass *
2301make_pass_ccp (gcc::context *ctxt)
2302{
2303 return new pass_ccp (ctxt);
2304}
2305
4ee9c684 2306
75a70cf9 2307
bdd0e199 2308/* Try to optimize out __builtin_stack_restore. Optimize it out
2309 if there is another __builtin_stack_restore in the same basic
2310 block and no calls or ASM_EXPRs are in between, or if this block's
2311 only outgoing edge is to EXIT_BLOCK and there are no calls or
2312 ASM_EXPRs after this __builtin_stack_restore. */
2313
2314static tree
75a70cf9 2315optimize_stack_restore (gimple_stmt_iterator i)
bdd0e199 2316{
6ea999da 2317 tree callee;
2318 gimple stmt;
75a70cf9 2319
2320 basic_block bb = gsi_bb (i);
2321 gimple call = gsi_stmt (i);
bdd0e199 2322
75a70cf9 2323 if (gimple_code (call) != GIMPLE_CALL
2324 || gimple_call_num_args (call) != 1
2325 || TREE_CODE (gimple_call_arg (call, 0)) != SSA_NAME
2326 || !POINTER_TYPE_P (TREE_TYPE (gimple_call_arg (call, 0))))
bdd0e199 2327 return NULL_TREE;
2328
75a70cf9 2329 for (gsi_next (&i); !gsi_end_p (i); gsi_next (&i))
bdd0e199 2330 {
75a70cf9 2331 stmt = gsi_stmt (i);
2332 if (gimple_code (stmt) == GIMPLE_ASM)
bdd0e199 2333 return NULL_TREE;
75a70cf9 2334 if (gimple_code (stmt) != GIMPLE_CALL)
bdd0e199 2335 continue;
2336
75a70cf9 2337 callee = gimple_call_fndecl (stmt);
c40a6f90 2338 if (!callee
2339 || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL
2340 /* All regular builtins are ok, just obviously not alloca. */
581bf1c2 2341 || DECL_FUNCTION_CODE (callee) == BUILT_IN_ALLOCA
2342 || DECL_FUNCTION_CODE (callee) == BUILT_IN_ALLOCA_WITH_ALIGN)
bdd0e199 2343 return NULL_TREE;
2344
2345 if (DECL_FUNCTION_CODE (callee) == BUILT_IN_STACK_RESTORE)
6ea999da 2346 goto second_stack_restore;
bdd0e199 2347 }
2348
6ea999da 2349 if (!gsi_end_p (i))
bdd0e199 2350 return NULL_TREE;
2351
6ea999da 2352 /* Allow one successor of the exit block, or zero successors. */
2353 switch (EDGE_COUNT (bb->succs))
2354 {
2355 case 0:
2356 break;
2357 case 1:
34154e27 2358 if (single_succ_edge (bb)->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
6ea999da 2359 return NULL_TREE;
2360 break;
2361 default:
2362 return NULL_TREE;
2363 }
2364 second_stack_restore:
bdd0e199 2365
6ea999da 2366 /* If there's exactly one use, then zap the call to __builtin_stack_save.
2367 If there are multiple uses, then the last one should remove the call.
2368 In any case, whether the call to __builtin_stack_save can be removed
2369 or not is irrelevant to removing the call to __builtin_stack_restore. */
2370 if (has_single_use (gimple_call_arg (call, 0)))
2371 {
2372 gimple stack_save = SSA_NAME_DEF_STMT (gimple_call_arg (call, 0));
2373 if (is_gimple_call (stack_save))
2374 {
2375 callee = gimple_call_fndecl (stack_save);
2376 if (callee
2377 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL
2378 && DECL_FUNCTION_CODE (callee) == BUILT_IN_STACK_SAVE)
2379 {
2380 gimple_stmt_iterator stack_save_gsi;
2381 tree rhs;
bdd0e199 2382
6ea999da 2383 stack_save_gsi = gsi_for_stmt (stack_save);
2384 rhs = build_int_cst (TREE_TYPE (gimple_call_arg (call, 0)), 0);
2385 update_call_from_tree (&stack_save_gsi, rhs);
2386 }
2387 }
2388 }
bdd0e199 2389
75a70cf9 2390 /* No effect, so the statement will be deleted. */
bdd0e199 2391 return integer_zero_node;
2392}
75a70cf9 2393
8a58ed0a 2394/* If va_list type is a simple pointer and nothing special is needed,
2395 optimize __builtin_va_start (&ap, 0) into ap = __builtin_next_arg (0),
2396 __builtin_va_end (&ap) out as NOP and __builtin_va_copy into a simple
2397 pointer assignment. */
2398
2399static tree
75a70cf9 2400optimize_stdarg_builtin (gimple call)
8a58ed0a 2401{
5f57a8b1 2402 tree callee, lhs, rhs, cfun_va_list;
8a58ed0a 2403 bool va_list_simple_ptr;
389dd41b 2404 location_t loc = gimple_location (call);
8a58ed0a 2405
75a70cf9 2406 if (gimple_code (call) != GIMPLE_CALL)
8a58ed0a 2407 return NULL_TREE;
2408
75a70cf9 2409 callee = gimple_call_fndecl (call);
5f57a8b1 2410
2411 cfun_va_list = targetm.fn_abi_va_list (callee);
2412 va_list_simple_ptr = POINTER_TYPE_P (cfun_va_list)
2413 && (TREE_TYPE (cfun_va_list) == void_type_node
2414 || TREE_TYPE (cfun_va_list) == char_type_node);
2415
8a58ed0a 2416 switch (DECL_FUNCTION_CODE (callee))
2417 {
2418 case BUILT_IN_VA_START:
2419 if (!va_list_simple_ptr
2420 || targetm.expand_builtin_va_start != NULL
e7ed5dd7 2421 || !builtin_decl_explicit_p (BUILT_IN_NEXT_ARG))
8a58ed0a 2422 return NULL_TREE;
2423
75a70cf9 2424 if (gimple_call_num_args (call) != 2)
8a58ed0a 2425 return NULL_TREE;
2426
75a70cf9 2427 lhs = gimple_call_arg (call, 0);
8a58ed0a 2428 if (!POINTER_TYPE_P (TREE_TYPE (lhs))
2429 || TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (lhs)))
5f57a8b1 2430 != TYPE_MAIN_VARIANT (cfun_va_list))
8a58ed0a 2431 return NULL_TREE;
48e1416a 2432
389dd41b 2433 lhs = build_fold_indirect_ref_loc (loc, lhs);
b9a16870 2434 rhs = build_call_expr_loc (loc, builtin_decl_explicit (BUILT_IN_NEXT_ARG),
75a70cf9 2435 1, integer_zero_node);
389dd41b 2436 rhs = fold_convert_loc (loc, TREE_TYPE (lhs), rhs);
8a58ed0a 2437 return build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, rhs);
2438
2439 case BUILT_IN_VA_COPY:
2440 if (!va_list_simple_ptr)
2441 return NULL_TREE;
2442
75a70cf9 2443 if (gimple_call_num_args (call) != 2)
8a58ed0a 2444 return NULL_TREE;
2445
75a70cf9 2446 lhs = gimple_call_arg (call, 0);
8a58ed0a 2447 if (!POINTER_TYPE_P (TREE_TYPE (lhs))
2448 || TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (lhs)))
5f57a8b1 2449 != TYPE_MAIN_VARIANT (cfun_va_list))
8a58ed0a 2450 return NULL_TREE;
2451
389dd41b 2452 lhs = build_fold_indirect_ref_loc (loc, lhs);
75a70cf9 2453 rhs = gimple_call_arg (call, 1);
8a58ed0a 2454 if (TYPE_MAIN_VARIANT (TREE_TYPE (rhs))
5f57a8b1 2455 != TYPE_MAIN_VARIANT (cfun_va_list))
8a58ed0a 2456 return NULL_TREE;
2457
389dd41b 2458 rhs = fold_convert_loc (loc, TREE_TYPE (lhs), rhs);
8a58ed0a 2459 return build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, rhs);
2460
2461 case BUILT_IN_VA_END:
75a70cf9 2462 /* No effect, so the statement will be deleted. */
8a58ed0a 2463 return integer_zero_node;
2464
2465 default:
2466 gcc_unreachable ();
2467 }
2468}
75a70cf9 2469
f87df69a 2470/* Attemp to make the block of __builtin_unreachable I unreachable by changing
2471 the incoming jumps. Return true if at least one jump was changed. */
2472
2473static bool
2474optimize_unreachable (gimple_stmt_iterator i)
2475{
2476 basic_block bb = gsi_bb (i);
2477 gimple_stmt_iterator gsi;
2478 gimple stmt;
2479 edge_iterator ei;
2480 edge e;
2481 bool ret;
2482
2483 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2484 {
2485 stmt = gsi_stmt (gsi);
2486
2487 if (is_gimple_debug (stmt))
2488 continue;
2489
2490 if (gimple_code (stmt) == GIMPLE_LABEL)
2491 {
2492 /* Verify we do not need to preserve the label. */
2493 if (FORCED_LABEL (gimple_label_label (stmt)))
2494 return false;
2495
2496 continue;
2497 }
2498
2499 /* Only handle the case that __builtin_unreachable is the first statement
2500 in the block. We rely on DCE to remove stmts without side-effects
2501 before __builtin_unreachable. */
2502 if (gsi_stmt (gsi) != gsi_stmt (i))
2503 return false;
2504 }
2505
2506 ret = false;
2507 FOR_EACH_EDGE (e, ei, bb->preds)
2508 {
2509 gsi = gsi_last_bb (e->src);
522f73a1 2510 if (gsi_end_p (gsi))
2511 continue;
f87df69a 2512
522f73a1 2513 stmt = gsi_stmt (gsi);
2514 if (gimple_code (stmt) == GIMPLE_COND)
f87df69a 2515 {
2516 if (e->flags & EDGE_TRUE_VALUE)
2517 gimple_cond_make_false (stmt);
2518 else if (e->flags & EDGE_FALSE_VALUE)
2519 gimple_cond_make_true (stmt);
2520 else
2521 gcc_unreachable ();
a03a52b4 2522 update_stmt (stmt);
f87df69a 2523 }
2524 else
2525 {
2526 /* Todo: handle other cases, f.i. switch statement. */
2527 continue;
2528 }
2529
2530 ret = true;
2531 }
2532
2533 return ret;
2534}
2535
4ee9c684 2536/* A simple pass that attempts to fold all builtin functions. This pass
2537 is run after we've propagated as many constants as we can. */
2538
2a1990e9 2539static unsigned int
4ee9c684 2540execute_fold_all_builtins (void)
2541{
b36237eb 2542 bool cfg_changed = false;
4ee9c684 2543 basic_block bb;
b1b7c0c4 2544 unsigned int todoflags = 0;
48e1416a 2545
4ee9c684 2546 FOR_EACH_BB (bb)
2547 {
75a70cf9 2548 gimple_stmt_iterator i;
2549 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
4ee9c684 2550 {
75a70cf9 2551 gimple stmt, old_stmt;
4ee9c684 2552 tree callee, result;
0a39fd54 2553 enum built_in_function fcode;
4ee9c684 2554
75a70cf9 2555 stmt = gsi_stmt (i);
2556
2557 if (gimple_code (stmt) != GIMPLE_CALL)
0a39fd54 2558 {
896a0c42 2559 /* Remove all *ssaname_N ={v} {CLOBBER}; stmts,
2560 after the last GIMPLE DSE they aren't needed and might
2561 unnecessarily keep the SSA_NAMEs live. */
2562 if (gimple_clobber_p (stmt))
2563 {
2564 tree lhs = gimple_assign_lhs (stmt);
2565 if (TREE_CODE (lhs) == MEM_REF
2566 && TREE_CODE (TREE_OPERAND (lhs, 0)) == SSA_NAME)
2567 {
2568 unlink_stmt_vdef (stmt);
2569 gsi_remove (&i, true);
2570 release_defs (stmt);
2571 continue;
2572 }
2573 }
75a70cf9 2574 gsi_next (&i);
0a39fd54 2575 continue;
2576 }
75a70cf9 2577 callee = gimple_call_fndecl (stmt);
4ee9c684 2578 if (!callee || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL)
0a39fd54 2579 {
75a70cf9 2580 gsi_next (&i);
0a39fd54 2581 continue;
2582 }
2583 fcode = DECL_FUNCTION_CODE (callee);
4ee9c684 2584
2d18b16d 2585 result = gimple_fold_builtin (stmt);
5a4b7e1e 2586
2587 if (result)
75a70cf9 2588 gimple_remove_stmt_histograms (cfun, stmt);
5a4b7e1e 2589
4ee9c684 2590 if (!result)
2591 switch (DECL_FUNCTION_CODE (callee))
2592 {
2593 case BUILT_IN_CONSTANT_P:
2594 /* Resolve __builtin_constant_p. If it hasn't been
2595 folded to integer_one_node by now, it's fairly
2596 certain that the value simply isn't constant. */
75a70cf9 2597 result = integer_zero_node;
4ee9c684 2598 break;
2599
fca0886c 2600 case BUILT_IN_ASSUME_ALIGNED:
2601 /* Remove __builtin_assume_aligned. */
2602 result = gimple_call_arg (stmt, 0);
2603 break;
2604
bdd0e199 2605 case BUILT_IN_STACK_RESTORE:
75a70cf9 2606 result = optimize_stack_restore (i);
8a58ed0a 2607 if (result)
2608 break;
75a70cf9 2609 gsi_next (&i);
8a58ed0a 2610 continue;
2611
f87df69a 2612 case BUILT_IN_UNREACHABLE:
2613 if (optimize_unreachable (i))
2614 cfg_changed = true;
2615 break;
2616
8a58ed0a 2617 case BUILT_IN_VA_START:
2618 case BUILT_IN_VA_END:
2619 case BUILT_IN_VA_COPY:
2620 /* These shouldn't be folded before pass_stdarg. */
75a70cf9 2621 result = optimize_stdarg_builtin (stmt);
bdd0e199 2622 if (result)
2623 break;
2624 /* FALLTHRU */
2625
4ee9c684 2626 default:
75a70cf9 2627 gsi_next (&i);
4ee9c684 2628 continue;
2629 }
2630
f87df69a 2631 if (result == NULL_TREE)
2632 break;
2633
4ee9c684 2634 if (dump_file && (dump_flags & TDF_DETAILS))
2635 {
2636 fprintf (dump_file, "Simplified\n ");
75a70cf9 2637 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
4ee9c684 2638 }
2639
75a70cf9 2640 old_stmt = stmt;
75a70cf9 2641 if (!update_call_from_tree (&i, result))
0fefde02 2642 {
2643 gimplify_and_update_call_from_tree (&i, result);
2644 todoflags |= TODO_update_address_taken;
2645 }
de6ed584 2646
75a70cf9 2647 stmt = gsi_stmt (i);
4c5fd53c 2648 update_stmt (stmt);
de6ed584 2649
75a70cf9 2650 if (maybe_clean_or_replace_eh_stmt (old_stmt, stmt)
2651 && gimple_purge_dead_eh_edges (bb))
b36237eb 2652 cfg_changed = true;
4ee9c684 2653
2654 if (dump_file && (dump_flags & TDF_DETAILS))
2655 {
2656 fprintf (dump_file, "to\n ");
75a70cf9 2657 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
4ee9c684 2658 fprintf (dump_file, "\n");
2659 }
0a39fd54 2660
2661 /* Retry the same statement if it changed into another
2662 builtin, there might be new opportunities now. */
75a70cf9 2663 if (gimple_code (stmt) != GIMPLE_CALL)
0a39fd54 2664 {
75a70cf9 2665 gsi_next (&i);
0a39fd54 2666 continue;
2667 }
75a70cf9 2668 callee = gimple_call_fndecl (stmt);
0a39fd54 2669 if (!callee
75a70cf9 2670 || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL
0a39fd54 2671 || DECL_FUNCTION_CODE (callee) == fcode)
75a70cf9 2672 gsi_next (&i);
4ee9c684 2673 }
2674 }
48e1416a 2675
b36237eb 2676 /* Delete unreachable blocks. */
b1b7c0c4 2677 if (cfg_changed)
2678 todoflags |= TODO_cleanup_cfg;
48e1416a 2679
b1b7c0c4 2680 return todoflags;
4ee9c684 2681}
2682
41511585 2683
cbe8bda8 2684namespace {
2685
2686const pass_data pass_data_fold_builtins =
4ee9c684 2687{
cbe8bda8 2688 GIMPLE_PASS, /* type */
2689 "fab", /* name */
2690 OPTGROUP_NONE, /* optinfo_flags */
2691 false, /* has_gate */
2692 true, /* has_execute */
2693 TV_NONE, /* tv_id */
2694 ( PROP_cfg | PROP_ssa ), /* properties_required */
2695 0, /* properties_provided */
2696 0, /* properties_destroyed */
2697 0, /* todo_flags_start */
2698 ( TODO_verify_ssa | TODO_update_ssa ), /* todo_flags_finish */
4ee9c684 2699};
cbe8bda8 2700
2701class pass_fold_builtins : public gimple_opt_pass
2702{
2703public:
9af5ce0c 2704 pass_fold_builtins (gcc::context *ctxt)
2705 : gimple_opt_pass (pass_data_fold_builtins, ctxt)
cbe8bda8 2706 {}
2707
2708 /* opt_pass methods: */
ae84f584 2709 opt_pass * clone () { return new pass_fold_builtins (m_ctxt); }
cbe8bda8 2710 unsigned int execute () { return execute_fold_all_builtins (); }
2711
2712}; // class pass_fold_builtins
2713
2714} // anon namespace
2715
2716gimple_opt_pass *
2717make_pass_fold_builtins (gcc::context *ctxt)
2718{
2719 return new pass_fold_builtins (ctxt);
2720}