]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/tree-ssa-ccp.c
Proposed fix for an RA bug that was preventing bootstrap with the next patch.
[thirdparty/gcc.git] / gcc / tree-ssa-ccp.c
CommitLineData
4ee9c684 1/* Conditional constant propagation pass for the GNU compiler.
711789cc 2 Copyright (C) 2000-2013 Free Software Foundation, Inc.
4ee9c684 3 Adapted from original RTL SSA-CCP by Daniel Berlin <dberlin@dberlin.org>
4 Adapted to GIMPLE trees by Diego Novillo <dnovillo@redhat.com>
5
6This file is part of GCC.
48e1416a 7
4ee9c684 8GCC is free software; you can redistribute it and/or modify it
9under the terms of the GNU General Public License as published by the
8c4c00c1 10Free Software Foundation; either version 3, or (at your option) any
4ee9c684 11later version.
48e1416a 12
4ee9c684 13GCC is distributed in the hope that it will be useful, but WITHOUT
14ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
15FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16for more details.
48e1416a 17
4ee9c684 18You should have received a copy of the GNU General Public License
8c4c00c1 19along with GCC; see the file COPYING3. If not see
20<http://www.gnu.org/licenses/>. */
4ee9c684 21
88dbf20f 22/* Conditional constant propagation (CCP) is based on the SSA
23 propagation engine (tree-ssa-propagate.c). Constant assignments of
24 the form VAR = CST are propagated from the assignments into uses of
25 VAR, which in turn may generate new constants. The simulation uses
26 a four level lattice to keep track of constant values associated
27 with SSA names. Given an SSA name V_i, it may take one of the
28 following values:
29
bfa30570 30 UNINITIALIZED -> the initial state of the value. This value
31 is replaced with a correct initial value
32 the first time the value is used, so the
33 rest of the pass does not need to care about
34 it. Using this value simplifies initialization
35 of the pass, and prevents us from needlessly
36 scanning statements that are never reached.
88dbf20f 37
38 UNDEFINED -> V_i is a local variable whose definition
39 has not been processed yet. Therefore we
40 don't yet know if its value is a constant
41 or not.
42
43 CONSTANT -> V_i has been found to hold a constant
44 value C.
45
46 VARYING -> V_i cannot take a constant value, or if it
47 does, it is not possible to determine it
48 at compile time.
49
50 The core of SSA-CCP is in ccp_visit_stmt and ccp_visit_phi_node:
51
52 1- In ccp_visit_stmt, we are interested in assignments whose RHS
53 evaluates into a constant and conditional jumps whose predicate
54 evaluates into a boolean true or false. When an assignment of
55 the form V_i = CONST is found, V_i's lattice value is set to
56 CONSTANT and CONST is associated with it. This causes the
57 propagation engine to add all the SSA edges coming out the
58 assignment into the worklists, so that statements that use V_i
59 can be visited.
60
61 If the statement is a conditional with a constant predicate, we
62 mark the outgoing edges as executable or not executable
63 depending on the predicate's value. This is then used when
64 visiting PHI nodes to know when a PHI argument can be ignored.
48e1416a 65
88dbf20f 66
67 2- In ccp_visit_phi_node, if all the PHI arguments evaluate to the
68 same constant C, then the LHS of the PHI is set to C. This
69 evaluation is known as the "meet operation". Since one of the
70 goals of this evaluation is to optimistically return constant
71 values as often as possible, it uses two main short cuts:
72
73 - If an argument is flowing in through a non-executable edge, it
74 is ignored. This is useful in cases like this:
75
76 if (PRED)
77 a_9 = 3;
78 else
79 a_10 = 100;
80 a_11 = PHI (a_9, a_10)
81
82 If PRED is known to always evaluate to false, then we can
83 assume that a_11 will always take its value from a_10, meaning
84 that instead of consider it VARYING (a_9 and a_10 have
85 different values), we can consider it CONSTANT 100.
86
87 - If an argument has an UNDEFINED value, then it does not affect
88 the outcome of the meet operation. If a variable V_i has an
89 UNDEFINED value, it means that either its defining statement
90 hasn't been visited yet or V_i has no defining statement, in
91 which case the original symbol 'V' is being used
92 uninitialized. Since 'V' is a local variable, the compiler
93 may assume any initial value for it.
94
95
96 After propagation, every variable V_i that ends up with a lattice
97 value of CONSTANT will have the associated constant value in the
98 array CONST_VAL[i].VALUE. That is fed into substitute_and_fold for
99 final substitution and folding.
100
e913b5cd 101 This algorithm uses wide-ints at the max precision of the target.
102 This means that, with one uninteresting exception, variables with
103 UNSIGNED types never go to VARYING because the bits above the
104 precision of the type of the variable are always zero. The
105 uninteresting case is a variable of UNSIGNED type that has the
106 maximum precision of the target. Such variables can go to VARYING,
107 but this causes no loss of infomation since these variables will
108 never be extended.
109
4ee9c684 110 References:
111
112 Constant propagation with conditional branches,
113 Wegman and Zadeck, ACM TOPLAS 13(2):181-210.
114
115 Building an Optimizing Compiler,
116 Robert Morgan, Butterworth-Heinemann, 1998, Section 8.9.
117
118 Advanced Compiler Design and Implementation,
119 Steven Muchnick, Morgan Kaufmann, 1997, Section 12.6 */
120
121#include "config.h"
122#include "system.h"
123#include "coretypes.h"
124#include "tm.h"
4ee9c684 125#include "tree.h"
41511585 126#include "flags.h"
4ee9c684 127#include "tm_p.h"
4ee9c684 128#include "basic-block.h"
41511585 129#include "function.h"
ce084dfc 130#include "gimple-pretty-print.h"
41511585 131#include "tree-flow.h"
4ee9c684 132#include "tree-pass.h"
41511585 133#include "tree-ssa-propagate.h"
5a4b7e1e 134#include "value-prof.h"
41511585 135#include "langhooks.h"
8782adcf 136#include "target.h"
0b205f4c 137#include "diagnostic-core.h"
43fb76c1 138#include "dbgcnt.h"
1d0b727d 139#include "gimple-fold.h"
9a65cc0a 140#include "params.h"
2b15d2ba 141#include "hash-table.h"
e913b5cd 142#include "wide-int-print.h"
4ee9c684 143
144/* Possible lattice values. */
145typedef enum
146{
bfa30570 147 UNINITIALIZED,
4ee9c684 148 UNDEFINED,
149 CONSTANT,
150 VARYING
88dbf20f 151} ccp_lattice_t;
4ee9c684 152
14f101cf 153struct prop_value_d {
154 /* Lattice value. */
155 ccp_lattice_t lattice_val;
156
157 /* Propagated value. */
158 tree value;
b7e55469 159
e913b5cd 160 /* Mask that applies to the propagated value during CCP. For X
161 with a CONSTANT lattice value X & ~mask == value & ~mask. The
162 zero bits in the mask cover constant values. The ones mean no
163 information. */
164 max_wide_int mask;
14f101cf 165};
166
167typedef struct prop_value_d prop_value_t;
168
88dbf20f 169/* Array of propagated constant values. After propagation,
170 CONST_VAL[I].VALUE holds the constant value for SSA_NAME(I). If
171 the constant is held in an SSA name representing a memory store
4fb5e5ca 172 (i.e., a VDEF), CONST_VAL[I].MEM_REF will contain the actual
173 memory reference used to store (i.e., the LHS of the assignment
174 doing the store). */
20140406 175static prop_value_t *const_val;
285df01b 176static unsigned n_const_val;
4ee9c684 177
4af351a8 178static void canonicalize_float_value (prop_value_t *);
6688f8ec 179static bool ccp_fold_stmt (gimple_stmt_iterator *);
4af351a8 180
88dbf20f 181/* Dump constant propagation value VAL to file OUTF prefixed by PREFIX. */
01406fc0 182
183static void
88dbf20f 184dump_lattice_value (FILE *outf, const char *prefix, prop_value_t val)
01406fc0 185{
41511585 186 switch (val.lattice_val)
01406fc0 187 {
88dbf20f 188 case UNINITIALIZED:
189 fprintf (outf, "%sUNINITIALIZED", prefix);
190 break;
41511585 191 case UNDEFINED:
192 fprintf (outf, "%sUNDEFINED", prefix);
193 break;
194 case VARYING:
195 fprintf (outf, "%sVARYING", prefix);
196 break;
41511585 197 case CONSTANT:
b7e55469 198 if (TREE_CODE (val.value) != INTEGER_CST
e913b5cd 199 || val.mask.zero_p ())
16ab4e97 200 {
201 fprintf (outf, "%sCONSTANT ", prefix);
202 print_generic_expr (outf, val.value, dump_flags);
203 }
b7e55469 204 else
205 {
e913b5cd 206 wide_int cval = (max_wide_int (val.value)
207 .and_not (val.mask));
208 fprintf (outf, "%sCONSTANT ", prefix);
209 print_hex (cval, outf);
210 fprintf (outf, " (");
211 print_hex (val.mask, outf);
212 fprintf (outf, ")");
b7e55469 213 }
41511585 214 break;
215 default:
8c0963c4 216 gcc_unreachable ();
41511585 217 }
01406fc0 218}
4ee9c684 219
4ee9c684 220
88dbf20f 221/* Print lattice value VAL to stderr. */
222
223void debug_lattice_value (prop_value_t val);
224
4b987fac 225DEBUG_FUNCTION void
88dbf20f 226debug_lattice_value (prop_value_t val)
227{
228 dump_lattice_value (stderr, "", val);
229 fprintf (stderr, "\n");
230}
4ee9c684 231
4ee9c684 232
88dbf20f 233/* Compute a default value for variable VAR and store it in the
234 CONST_VAL array. The following rules are used to get default
235 values:
01406fc0 236
88dbf20f 237 1- Global and static variables that are declared constant are
238 considered CONSTANT.
239
240 2- Any other value is considered UNDEFINED. This is useful when
41511585 241 considering PHI nodes. PHI arguments that are undefined do not
242 change the constant value of the PHI node, which allows for more
88dbf20f 243 constants to be propagated.
4ee9c684 244
8883e700 245 3- Variables defined by statements other than assignments and PHI
88dbf20f 246 nodes are considered VARYING.
4ee9c684 247
8883e700 248 4- Initial values of variables that are not GIMPLE registers are
bfa30570 249 considered VARYING. */
4ee9c684 250
88dbf20f 251static prop_value_t
252get_default_value (tree var)
253{
e913b5cd 254 prop_value_t val = { UNINITIALIZED, NULL_TREE, 0 };
8edeb88b 255 gimple stmt;
256
257 stmt = SSA_NAME_DEF_STMT (var);
258
259 if (gimple_nop_p (stmt))
4ee9c684 260 {
8edeb88b 261 /* Variables defined by an empty statement are those used
262 before being initialized. If VAR is a local variable, we
263 can assume initially that it is UNDEFINED, otherwise we must
264 consider it VARYING. */
7c782c9b 265 if (!virtual_operand_p (var)
266 && TREE_CODE (SSA_NAME_VAR (var)) == VAR_DECL)
8edeb88b 267 val.lattice_val = UNDEFINED;
268 else
b7e55469 269 {
270 val.lattice_val = VARYING;
e913b5cd 271 val.mask = -1;
b7e55469 272 }
4ee9c684 273 }
b45b214a 274 else if (is_gimple_assign (stmt))
41511585 275 {
8edeb88b 276 tree cst;
277 if (gimple_assign_single_p (stmt)
278 && DECL_P (gimple_assign_rhs1 (stmt))
279 && (cst = get_symbol_constant_value (gimple_assign_rhs1 (stmt))))
88dbf20f 280 {
8edeb88b 281 val.lattice_val = CONSTANT;
282 val.value = cst;
88dbf20f 283 }
284 else
b45b214a 285 {
286 /* Any other variable defined by an assignment is considered
287 UNDEFINED. */
288 val.lattice_val = UNDEFINED;
289 }
290 }
291 else if ((is_gimple_call (stmt)
292 && gimple_call_lhs (stmt) != NULL_TREE)
293 || gimple_code (stmt) == GIMPLE_PHI)
294 {
295 /* A variable defined by a call or a PHI node is considered
296 UNDEFINED. */
297 val.lattice_val = UNDEFINED;
8edeb88b 298 }
299 else
300 {
301 /* Otherwise, VAR will never take on a constant value. */
302 val.lattice_val = VARYING;
e913b5cd 303 val.mask = -1;
41511585 304 }
4ee9c684 305
41511585 306 return val;
307}
4ee9c684 308
4ee9c684 309
bfa30570 310/* Get the constant value associated with variable VAR. */
4ee9c684 311
bfa30570 312static inline prop_value_t *
313get_value (tree var)
88dbf20f 314{
e004838d 315 prop_value_t *val;
bfa30570 316
285df01b 317 if (const_val == NULL
318 || SSA_NAME_VERSION (var) >= n_const_val)
e004838d 319 return NULL;
320
321 val = &const_val[SSA_NAME_VERSION (var)];
bfa30570 322 if (val->lattice_val == UNINITIALIZED)
4ee9c684 323 *val = get_default_value (var);
324
4af351a8 325 canonicalize_float_value (val);
326
4ee9c684 327 return val;
328}
329
15d138c9 330/* Return the constant tree value associated with VAR. */
331
332static inline tree
333get_constant_value (tree var)
334{
98d92e3c 335 prop_value_t *val;
336 if (TREE_CODE (var) != SSA_NAME)
337 {
338 if (is_gimple_min_invariant (var))
339 return var;
340 return NULL_TREE;
341 }
342 val = get_value (var);
b7e55469 343 if (val
344 && val->lattice_val == CONSTANT
345 && (TREE_CODE (val->value) != INTEGER_CST
e913b5cd 346 || val->mask.zero_p ()))
15d138c9 347 return val->value;
348 return NULL_TREE;
349}
350
bfa30570 351/* Sets the value associated with VAR to VARYING. */
352
353static inline void
354set_value_varying (tree var)
355{
356 prop_value_t *val = &const_val[SSA_NAME_VERSION (var)];
357
358 val->lattice_val = VARYING;
359 val->value = NULL_TREE;
e913b5cd 360 val->mask = -1;
bfa30570 361}
4ee9c684 362
b31eb493 363/* For float types, modify the value of VAL to make ccp work correctly
364 for non-standard values (-0, NaN):
365
366 If HONOR_SIGNED_ZEROS is false, and VAL = -0, we canonicalize it to 0.
367 If HONOR_NANS is false, and VAL is NaN, we canonicalize it to UNDEFINED.
368 This is to fix the following problem (see PR 29921): Suppose we have
369
370 x = 0.0 * y
371
372 and we set value of y to NaN. This causes value of x to be set to NaN.
373 When we later determine that y is in fact VARYING, fold uses the fact
374 that HONOR_NANS is false, and we try to change the value of x to 0,
375 causing an ICE. With HONOR_NANS being false, the real appearance of
376 NaN would cause undefined behavior, though, so claiming that y (and x)
377 are UNDEFINED initially is correct. */
378
379static void
380canonicalize_float_value (prop_value_t *val)
381{
382 enum machine_mode mode;
383 tree type;
384 REAL_VALUE_TYPE d;
385
386 if (val->lattice_val != CONSTANT
387 || TREE_CODE (val->value) != REAL_CST)
388 return;
389
390 d = TREE_REAL_CST (val->value);
391 type = TREE_TYPE (val->value);
392 mode = TYPE_MODE (type);
393
394 if (!HONOR_SIGNED_ZEROS (mode)
395 && REAL_VALUE_MINUS_ZERO (d))
396 {
397 val->value = build_real (type, dconst0);
398 return;
399 }
400
401 if (!HONOR_NANS (mode)
402 && REAL_VALUE_ISNAN (d))
403 {
404 val->lattice_val = UNDEFINED;
405 val->value = NULL;
b31eb493 406 return;
407 }
408}
409
b7e55469 410/* Return whether the lattice transition is valid. */
411
412static bool
413valid_lattice_transition (prop_value_t old_val, prop_value_t new_val)
414{
415 /* Lattice transitions must always be monotonically increasing in
416 value. */
417 if (old_val.lattice_val < new_val.lattice_val)
418 return true;
419
420 if (old_val.lattice_val != new_val.lattice_val)
421 return false;
422
423 if (!old_val.value && !new_val.value)
424 return true;
425
426 /* Now both lattice values are CONSTANT. */
427
43c92e0a 428 /* Allow transitioning from PHI <&x, not executable> == &x
429 to PHI <&x, &y> == common alignment. */
b7e55469 430 if (TREE_CODE (old_val.value) != INTEGER_CST
431 && TREE_CODE (new_val.value) == INTEGER_CST)
432 return true;
433
434 /* Bit-lattices have to agree in the still valid bits. */
435 if (TREE_CODE (old_val.value) == INTEGER_CST
436 && TREE_CODE (new_val.value) == INTEGER_CST)
e913b5cd 437 return (max_wide_int (old_val.value).and_not (new_val.mask)
438 == max_wide_int (new_val.value).and_not (new_val.mask));
b7e55469 439
440 /* Otherwise constant values have to agree. */
441 return operand_equal_p (old_val.value, new_val.value, 0);
442}
443
88dbf20f 444/* Set the value for variable VAR to NEW_VAL. Return true if the new
445 value is different from VAR's previous value. */
4ee9c684 446
41511585 447static bool
88dbf20f 448set_lattice_value (tree var, prop_value_t new_val)
4ee9c684 449{
6d0bf6d6 450 /* We can deal with old UNINITIALIZED values just fine here. */
451 prop_value_t *old_val = &const_val[SSA_NAME_VERSION (var)];
88dbf20f 452
b31eb493 453 canonicalize_float_value (&new_val);
454
b7e55469 455 /* We have to be careful to not go up the bitwise lattice
456 represented by the mask.
457 ??? This doesn't seem to be the best place to enforce this. */
458 if (new_val.lattice_val == CONSTANT
459 && old_val->lattice_val == CONSTANT
460 && TREE_CODE (new_val.value) == INTEGER_CST
461 && TREE_CODE (old_val->value) == INTEGER_CST)
462 {
e913b5cd 463 max_wide_int diff = max_wide_int (new_val.value) ^ old_val->value;
cf8f0e63 464 new_val.mask = new_val.mask | old_val->mask | diff;
b7e55469 465 }
bfa30570 466
b7e55469 467 gcc_assert (valid_lattice_transition (*old_val, new_val));
88dbf20f 468
b7e55469 469 /* If *OLD_VAL and NEW_VAL are the same, return false to inform the
470 caller that this was a non-transition. */
471 if (old_val->lattice_val != new_val.lattice_val
472 || (new_val.lattice_val == CONSTANT
473 && TREE_CODE (new_val.value) == INTEGER_CST
474 && (TREE_CODE (old_val->value) != INTEGER_CST
e913b5cd 475 || new_val.mask
476 != old_val->mask)))
4ee9c684 477 {
b7e55469 478 /* ??? We would like to delay creation of INTEGER_CSTs from
479 partially constants here. */
480
41511585 481 if (dump_file && (dump_flags & TDF_DETAILS))
482 {
88dbf20f 483 dump_lattice_value (dump_file, "Lattice value changed to ", new_val);
bfa30570 484 fprintf (dump_file, ". Adding SSA edges to worklist.\n");
41511585 485 }
486
88dbf20f 487 *old_val = new_val;
488
6d0bf6d6 489 gcc_assert (new_val.lattice_val != UNINITIALIZED);
bfa30570 490 return true;
4ee9c684 491 }
41511585 492
493 return false;
4ee9c684 494}
495
b7e55469 496static prop_value_t get_value_for_expr (tree, bool);
497static prop_value_t bit_value_binop (enum tree_code, tree, tree, tree);
e913b5cd 498static void bit_value_binop_1 (enum tree_code, tree, max_wide_int *, max_wide_int *,
499 tree, max_wide_int, max_wide_int,
500 tree, max_wide_int, max_wide_int);
b7e55469 501
e913b5cd 502/* Return a max_wide_int that can be used for bitwise simplifications
b7e55469 503 from VAL. */
504
e913b5cd 505static max_wide_int
506value_to_wide_int (prop_value_t val)
b7e55469 507{
508 if (val.value
509 && TREE_CODE (val.value) == INTEGER_CST)
e913b5cd 510 return val.value;
511
512 return 0;
b7e55469 513}
514
515/* Return the value for the address expression EXPR based on alignment
516 information. */
6d0bf6d6 517
518static prop_value_t
b7e55469 519get_value_from_alignment (tree expr)
520{
f8abb542 521 tree type = TREE_TYPE (expr);
b7e55469 522 prop_value_t val;
f8abb542 523 unsigned HOST_WIDE_INT bitpos;
524 unsigned int align;
b7e55469 525
526 gcc_assert (TREE_CODE (expr) == ADDR_EXPR);
527
59da1bcd 528 get_pointer_alignment_1 (expr, &align, &bitpos);
cf8f0e63 529 val.mask = (POINTER_TYPE_P (type) || TYPE_UNSIGNED (type)
e913b5cd 530 ? max_wide_int::mask (TYPE_PRECISION (type), false)
531 : -1).and_not (align / BITS_PER_UNIT - 1);
532 val.lattice_val = val.mask.minus_one_p () ? VARYING : CONSTANT;
f8abb542 533 if (val.lattice_val == CONSTANT)
e913b5cd 534 val.value = wide_int_to_tree (type, bitpos / BITS_PER_UNIT);
b7e55469 535 else
f8abb542 536 val.value = NULL_TREE;
b7e55469 537
538 return val;
539}
540
541/* Return the value for the tree operand EXPR. If FOR_BITS_P is true
542 return constant bits extracted from alignment information for
543 invariant addresses. */
544
545static prop_value_t
546get_value_for_expr (tree expr, bool for_bits_p)
6d0bf6d6 547{
548 prop_value_t val;
549
550 if (TREE_CODE (expr) == SSA_NAME)
b7e55469 551 {
552 val = *get_value (expr);
553 if (for_bits_p
554 && val.lattice_val == CONSTANT
555 && TREE_CODE (val.value) == ADDR_EXPR)
556 val = get_value_from_alignment (val.value);
557 }
558 else if (is_gimple_min_invariant (expr)
559 && (!for_bits_p || TREE_CODE (expr) != ADDR_EXPR))
6d0bf6d6 560 {
561 val.lattice_val = CONSTANT;
562 val.value = expr;
e913b5cd 563 val.mask = 0;
6d0bf6d6 564 canonicalize_float_value (&val);
565 }
b7e55469 566 else if (TREE_CODE (expr) == ADDR_EXPR)
567 val = get_value_from_alignment (expr);
6d0bf6d6 568 else
569 {
570 val.lattice_val = VARYING;
e913b5cd 571 val.mask = 1;
6d0bf6d6 572 val.value = NULL_TREE;
573 }
6d0bf6d6 574 return val;
575}
576
88dbf20f 577/* Return the likely CCP lattice value for STMT.
4ee9c684 578
41511585 579 If STMT has no operands, then return CONSTANT.
4ee9c684 580
d61b9af3 581 Else if undefinedness of operands of STMT cause its value to be
582 undefined, then return UNDEFINED.
4ee9c684 583
41511585 584 Else if any operands of STMT are constants, then return CONSTANT.
4ee9c684 585
41511585 586 Else return VARYING. */
4ee9c684 587
88dbf20f 588static ccp_lattice_t
75a70cf9 589likely_value (gimple stmt)
41511585 590{
d61b9af3 591 bool has_constant_operand, has_undefined_operand, all_undefined_operands;
41511585 592 tree use;
593 ssa_op_iter iter;
8edeb88b 594 unsigned i;
4ee9c684 595
590c3166 596 enum gimple_code code = gimple_code (stmt);
75a70cf9 597
598 /* This function appears to be called only for assignments, calls,
599 conditionals, and switches, due to the logic in visit_stmt. */
600 gcc_assert (code == GIMPLE_ASSIGN
601 || code == GIMPLE_CALL
602 || code == GIMPLE_COND
603 || code == GIMPLE_SWITCH);
88dbf20f 604
605 /* If the statement has volatile operands, it won't fold to a
606 constant value. */
75a70cf9 607 if (gimple_has_volatile_ops (stmt))
88dbf20f 608 return VARYING;
609
75a70cf9 610 /* Arrive here for more complex cases. */
bfa30570 611 has_constant_operand = false;
d61b9af3 612 has_undefined_operand = false;
613 all_undefined_operands = true;
8edeb88b 614 FOR_EACH_SSA_TREE_OPERAND (use, stmt, iter, SSA_OP_USE)
41511585 615 {
bfa30570 616 prop_value_t *val = get_value (use);
41511585 617
bfa30570 618 if (val->lattice_val == UNDEFINED)
d61b9af3 619 has_undefined_operand = true;
620 else
621 all_undefined_operands = false;
88dbf20f 622
41511585 623 if (val->lattice_val == CONSTANT)
bfa30570 624 has_constant_operand = true;
4ee9c684 625 }
41511585 626
dd277d48 627 /* There may be constants in regular rhs operands. For calls we
628 have to ignore lhs, fndecl and static chain, otherwise only
629 the lhs. */
630 for (i = (is_gimple_call (stmt) ? 2 : 0) + gimple_has_lhs (stmt);
8edeb88b 631 i < gimple_num_ops (stmt); ++i)
632 {
633 tree op = gimple_op (stmt, i);
634 if (!op || TREE_CODE (op) == SSA_NAME)
635 continue;
636 if (is_gimple_min_invariant (op))
637 has_constant_operand = true;
638 }
639
87c0a9fc 640 if (has_constant_operand)
641 all_undefined_operands = false;
642
3d483a94 643 if (has_undefined_operand
644 && code == GIMPLE_CALL
645 && gimple_call_internal_p (stmt))
646 switch (gimple_call_internal_fn (stmt))
647 {
648 /* These 3 builtins use the first argument just as a magic
649 way how to find out a decl uid. */
650 case IFN_GOMP_SIMD_LANE:
651 case IFN_GOMP_SIMD_VF:
652 case IFN_GOMP_SIMD_LAST_LANE:
653 has_undefined_operand = false;
654 break;
655 default:
656 break;
657 }
658
d61b9af3 659 /* If the operation combines operands like COMPLEX_EXPR make sure to
660 not mark the result UNDEFINED if only one part of the result is
661 undefined. */
75a70cf9 662 if (has_undefined_operand && all_undefined_operands)
d61b9af3 663 return UNDEFINED;
75a70cf9 664 else if (code == GIMPLE_ASSIGN && has_undefined_operand)
d61b9af3 665 {
75a70cf9 666 switch (gimple_assign_rhs_code (stmt))
d61b9af3 667 {
668 /* Unary operators are handled with all_undefined_operands. */
669 case PLUS_EXPR:
670 case MINUS_EXPR:
d61b9af3 671 case POINTER_PLUS_EXPR:
d61b9af3 672 /* Not MIN_EXPR, MAX_EXPR. One VARYING operand may be selected.
673 Not bitwise operators, one VARYING operand may specify the
674 result completely. Not logical operators for the same reason.
05a936a0 675 Not COMPLEX_EXPR as one VARYING operand makes the result partly
676 not UNDEFINED. Not *DIV_EXPR, comparisons and shifts because
677 the undefined operand may be promoted. */
d61b9af3 678 return UNDEFINED;
679
43c92e0a 680 case ADDR_EXPR:
681 /* If any part of an address is UNDEFINED, like the index
682 of an ARRAY_EXPR, then treat the result as UNDEFINED. */
683 return UNDEFINED;
684
d61b9af3 685 default:
686 ;
687 }
688 }
689 /* If there was an UNDEFINED operand but the result may be not UNDEFINED
c91fedc5 690 fall back to CONSTANT. During iteration UNDEFINED may still drop
691 to CONSTANT. */
d61b9af3 692 if (has_undefined_operand)
c91fedc5 693 return CONSTANT;
d61b9af3 694
8edeb88b 695 /* We do not consider virtual operands here -- load from read-only
696 memory may have only VARYING virtual operands, but still be
697 constant. */
bfa30570 698 if (has_constant_operand
8edeb88b 699 || gimple_references_memory_p (stmt))
88dbf20f 700 return CONSTANT;
701
bfa30570 702 return VARYING;
4ee9c684 703}
704
bfa30570 705/* Returns true if STMT cannot be constant. */
706
707static bool
75a70cf9 708surely_varying_stmt_p (gimple stmt)
bfa30570 709{
710 /* If the statement has operands that we cannot handle, it cannot be
711 constant. */
75a70cf9 712 if (gimple_has_volatile_ops (stmt))
bfa30570 713 return true;
714
f257af64 715 /* If it is a call and does not return a value or is not a
716 builtin and not an indirect call, it is varying. */
75a70cf9 717 if (is_gimple_call (stmt))
f257af64 718 {
719 tree fndecl;
720 if (!gimple_call_lhs (stmt)
721 || ((fndecl = gimple_call_fndecl (stmt)) != NULL_TREE
5768aeb3 722 && !DECL_BUILT_IN (fndecl)))
f257af64 723 return true;
724 }
bfa30570 725
8edeb88b 726 /* Any other store operation is not interesting. */
dd277d48 727 else if (gimple_vdef (stmt))
8edeb88b 728 return true;
729
bfa30570 730 /* Anything other than assignments and conditional jumps are not
731 interesting for CCP. */
75a70cf9 732 if (gimple_code (stmt) != GIMPLE_ASSIGN
f257af64 733 && gimple_code (stmt) != GIMPLE_COND
734 && gimple_code (stmt) != GIMPLE_SWITCH
735 && gimple_code (stmt) != GIMPLE_CALL)
bfa30570 736 return true;
737
738 return false;
739}
4ee9c684 740
41511585 741/* Initialize local data structures for CCP. */
4ee9c684 742
743static void
41511585 744ccp_initialize (void)
4ee9c684 745{
41511585 746 basic_block bb;
4ee9c684 747
285df01b 748 n_const_val = num_ssa_names;
749 const_val = XCNEWVEC (prop_value_t, n_const_val);
4ee9c684 750
41511585 751 /* Initialize simulation flags for PHI nodes and statements. */
752 FOR_EACH_BB (bb)
4ee9c684 753 {
75a70cf9 754 gimple_stmt_iterator i;
4ee9c684 755
75a70cf9 756 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
41511585 757 {
75a70cf9 758 gimple stmt = gsi_stmt (i);
2193544e 759 bool is_varying;
760
761 /* If the statement is a control insn, then we do not
762 want to avoid simulating the statement once. Failure
763 to do so means that those edges will never get added. */
764 if (stmt_ends_bb_p (stmt))
765 is_varying = false;
766 else
767 is_varying = surely_varying_stmt_p (stmt);
4ee9c684 768
bfa30570 769 if (is_varying)
41511585 770 {
88dbf20f 771 tree def;
772 ssa_op_iter iter;
773
774 /* If the statement will not produce a constant, mark
775 all its outputs VARYING. */
776 FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_ALL_DEFS)
8edeb88b 777 set_value_varying (def);
41511585 778 }
75a70cf9 779 prop_set_simulate_again (stmt, !is_varying);
41511585 780 }
4ee9c684 781 }
782
75a70cf9 783 /* Now process PHI nodes. We never clear the simulate_again flag on
784 phi nodes, since we do not know which edges are executable yet,
785 except for phi nodes for virtual operands when we do not do store ccp. */
41511585 786 FOR_EACH_BB (bb)
4ee9c684 787 {
75a70cf9 788 gimple_stmt_iterator i;
41511585 789
75a70cf9 790 for (i = gsi_start_phis (bb); !gsi_end_p (i); gsi_next (&i))
791 {
792 gimple phi = gsi_stmt (i);
793
7c782c9b 794 if (virtual_operand_p (gimple_phi_result (phi)))
75a70cf9 795 prop_set_simulate_again (phi, false);
bfa30570 796 else
75a70cf9 797 prop_set_simulate_again (phi, true);
41511585 798 }
4ee9c684 799 }
41511585 800}
4ee9c684 801
43fb76c1 802/* Debug count support. Reset the values of ssa names
803 VARYING when the total number ssa names analyzed is
804 beyond the debug count specified. */
805
806static void
807do_dbg_cnt (void)
808{
809 unsigned i;
810 for (i = 0; i < num_ssa_names; i++)
811 {
812 if (!dbg_cnt (ccp))
813 {
814 const_val[i].lattice_val = VARYING;
e913b5cd 815 const_val[i].mask = -1;
43fb76c1 816 const_val[i].value = NULL_TREE;
817 }
818 }
819}
820
4ee9c684 821
88dbf20f 822/* Do final substitution of propagated values, cleanup the flowgraph and
48e1416a 823 free allocated storage.
4ee9c684 824
33a34f1e 825 Return TRUE when something was optimized. */
826
827static bool
88dbf20f 828ccp_finalize (void)
4ee9c684 829{
43fb76c1 830 bool something_changed;
153c3b50 831 unsigned i;
43fb76c1 832
833 do_dbg_cnt ();
153c3b50 834
835 /* Derive alignment and misalignment information from partially
836 constant pointers in the lattice. */
837 for (i = 1; i < num_ssa_names; ++i)
838 {
839 tree name = ssa_name (i);
840 prop_value_t *val;
153c3b50 841 unsigned int tem, align;
842
843 if (!name
844 || !POINTER_TYPE_P (TREE_TYPE (name)))
845 continue;
846
847 val = get_value (name);
848 if (val->lattice_val != CONSTANT
849 || TREE_CODE (val->value) != INTEGER_CST)
850 continue;
851
852 /* Trailing constant bits specify the alignment, trailing value
853 bits the misalignment. */
e913b5cd 854 tem = val->mask.to_uhwi ();
153c3b50 855 align = (tem & -tem);
ceea063b 856 if (align > 1)
857 set_ptr_info_alignment (get_ptr_info (name), align,
e913b5cd 858 tree_to_hwi (val->value) & (align - 1));
153c3b50 859 }
860
88dbf20f 861 /* Perform substitutions based on the known constant values. */
14f101cf 862 something_changed = substitute_and_fold (get_constant_value,
863 ccp_fold_stmt, true);
4ee9c684 864
88dbf20f 865 free (const_val);
e004838d 866 const_val = NULL;
33a34f1e 867 return something_changed;;
4ee9c684 868}
869
870
88dbf20f 871/* Compute the meet operator between *VAL1 and *VAL2. Store the result
872 in VAL1.
873
874 any M UNDEFINED = any
88dbf20f 875 any M VARYING = VARYING
876 Ci M Cj = Ci if (i == j)
877 Ci M Cj = VARYING if (i != j)
bfa30570 878 */
4ee9c684 879
880static void
88dbf20f 881ccp_lattice_meet (prop_value_t *val1, prop_value_t *val2)
4ee9c684 882{
88dbf20f 883 if (val1->lattice_val == UNDEFINED)
4ee9c684 884 {
88dbf20f 885 /* UNDEFINED M any = any */
886 *val1 = *val2;
41511585 887 }
88dbf20f 888 else if (val2->lattice_val == UNDEFINED)
92481a4d 889 {
88dbf20f 890 /* any M UNDEFINED = any
891 Nothing to do. VAL1 already contains the value we want. */
892 ;
92481a4d 893 }
88dbf20f 894 else if (val1->lattice_val == VARYING
895 || val2->lattice_val == VARYING)
41511585 896 {
88dbf20f 897 /* any M VARYING = VARYING. */
898 val1->lattice_val = VARYING;
e913b5cd 899 val1->mask = -1;
88dbf20f 900 val1->value = NULL_TREE;
41511585 901 }
b7e55469 902 else if (val1->lattice_val == CONSTANT
903 && val2->lattice_val == CONSTANT
904 && TREE_CODE (val1->value) == INTEGER_CST
905 && TREE_CODE (val2->value) == INTEGER_CST)
906 {
907 /* Ci M Cj = Ci if (i == j)
908 Ci M Cj = VARYING if (i != j)
909
910 For INTEGER_CSTs mask unequal bits. If no equal bits remain,
911 drop to varying. */
e913b5cd 912 val1->mask = (val1->mask | val2->mask
913 | (max_wide_int (val1->value)
914 ^ val2->value));
915 if (val1->mask.minus_one_p ())
b7e55469 916 {
917 val1->lattice_val = VARYING;
918 val1->value = NULL_TREE;
919 }
920 }
88dbf20f 921 else if (val1->lattice_val == CONSTANT
922 && val2->lattice_val == CONSTANT
61207d43 923 && simple_cst_equal (val1->value, val2->value) == 1)
41511585 924 {
88dbf20f 925 /* Ci M Cj = Ci if (i == j)
926 Ci M Cj = VARYING if (i != j)
927
b7e55469 928 VAL1 already contains the value we want for equivalent values. */
929 }
930 else if (val1->lattice_val == CONSTANT
931 && val2->lattice_val == CONSTANT
932 && (TREE_CODE (val1->value) == ADDR_EXPR
933 || TREE_CODE (val2->value) == ADDR_EXPR))
934 {
935 /* When not equal addresses are involved try meeting for
936 alignment. */
937 prop_value_t tem = *val2;
938 if (TREE_CODE (val1->value) == ADDR_EXPR)
939 *val1 = get_value_for_expr (val1->value, true);
940 if (TREE_CODE (val2->value) == ADDR_EXPR)
941 tem = get_value_for_expr (val2->value, true);
942 ccp_lattice_meet (val1, &tem);
41511585 943 }
944 else
945 {
88dbf20f 946 /* Any other combination is VARYING. */
947 val1->lattice_val = VARYING;
e913b5cd 948 val1->mask = -1;
88dbf20f 949 val1->value = NULL_TREE;
41511585 950 }
4ee9c684 951}
952
953
41511585 954/* Loop through the PHI_NODE's parameters for BLOCK and compare their
955 lattice values to determine PHI_NODE's lattice value. The value of a
88dbf20f 956 PHI node is determined calling ccp_lattice_meet with all the arguments
41511585 957 of the PHI node that are incoming via executable edges. */
4ee9c684 958
41511585 959static enum ssa_prop_result
75a70cf9 960ccp_visit_phi_node (gimple phi)
4ee9c684 961{
75a70cf9 962 unsigned i;
88dbf20f 963 prop_value_t *old_val, new_val;
4ee9c684 964
41511585 965 if (dump_file && (dump_flags & TDF_DETAILS))
4ee9c684 966 {
41511585 967 fprintf (dump_file, "\nVisiting PHI node: ");
75a70cf9 968 print_gimple_stmt (dump_file, phi, 0, dump_flags);
4ee9c684 969 }
4ee9c684 970
75a70cf9 971 old_val = get_value (gimple_phi_result (phi));
41511585 972 switch (old_val->lattice_val)
973 {
974 case VARYING:
88dbf20f 975 return SSA_PROP_VARYING;
4ee9c684 976
41511585 977 case CONSTANT:
978 new_val = *old_val;
979 break;
4ee9c684 980
41511585 981 case UNDEFINED:
41511585 982 new_val.lattice_val = UNDEFINED;
88dbf20f 983 new_val.value = NULL_TREE;
41511585 984 break;
4ee9c684 985
41511585 986 default:
8c0963c4 987 gcc_unreachable ();
41511585 988 }
4ee9c684 989
75a70cf9 990 for (i = 0; i < gimple_phi_num_args (phi); i++)
41511585 991 {
88dbf20f 992 /* Compute the meet operator over all the PHI arguments flowing
993 through executable edges. */
75a70cf9 994 edge e = gimple_phi_arg_edge (phi, i);
4ee9c684 995
41511585 996 if (dump_file && (dump_flags & TDF_DETAILS))
997 {
998 fprintf (dump_file,
999 "\n Argument #%d (%d -> %d %sexecutable)\n",
1000 i, e->src->index, e->dest->index,
1001 (e->flags & EDGE_EXECUTABLE) ? "" : "not ");
1002 }
1003
1004 /* If the incoming edge is executable, Compute the meet operator for
1005 the existing value of the PHI node and the current PHI argument. */
1006 if (e->flags & EDGE_EXECUTABLE)
1007 {
75a70cf9 1008 tree arg = gimple_phi_arg (phi, i)->def;
b7e55469 1009 prop_value_t arg_val = get_value_for_expr (arg, false);
4ee9c684 1010
88dbf20f 1011 ccp_lattice_meet (&new_val, &arg_val);
4ee9c684 1012
41511585 1013 if (dump_file && (dump_flags & TDF_DETAILS))
1014 {
1015 fprintf (dump_file, "\t");
88dbf20f 1016 print_generic_expr (dump_file, arg, dump_flags);
1017 dump_lattice_value (dump_file, "\tValue: ", arg_val);
41511585 1018 fprintf (dump_file, "\n");
1019 }
4ee9c684 1020
41511585 1021 if (new_val.lattice_val == VARYING)
1022 break;
1023 }
1024 }
4ee9c684 1025
1026 if (dump_file && (dump_flags & TDF_DETAILS))
41511585 1027 {
1028 dump_lattice_value (dump_file, "\n PHI node value: ", new_val);
1029 fprintf (dump_file, "\n\n");
1030 }
1031
bfa30570 1032 /* Make the transition to the new value. */
75a70cf9 1033 if (set_lattice_value (gimple_phi_result (phi), new_val))
41511585 1034 {
1035 if (new_val.lattice_val == VARYING)
1036 return SSA_PROP_VARYING;
1037 else
1038 return SSA_PROP_INTERESTING;
1039 }
1040 else
1041 return SSA_PROP_NOT_INTERESTING;
4ee9c684 1042}
1043
15d138c9 1044/* Return the constant value for OP or OP otherwise. */
00f4f705 1045
1046static tree
15d138c9 1047valueize_op (tree op)
00f4f705 1048{
00f4f705 1049 if (TREE_CODE (op) == SSA_NAME)
1050 {
15d138c9 1051 tree tem = get_constant_value (op);
1052 if (tem)
1053 return tem;
00f4f705 1054 }
1055 return op;
1056}
1057
41511585 1058/* CCP specific front-end to the non-destructive constant folding
1059 routines.
4ee9c684 1060
1061 Attempt to simplify the RHS of STMT knowing that one or more
1062 operands are constants.
1063
1064 If simplification is possible, return the simplified RHS,
75a70cf9 1065 otherwise return the original RHS or NULL_TREE. */
4ee9c684 1066
1067static tree
75a70cf9 1068ccp_fold (gimple stmt)
4ee9c684 1069{
389dd41b 1070 location_t loc = gimple_location (stmt);
75a70cf9 1071 switch (gimple_code (stmt))
88dbf20f 1072 {
75a70cf9 1073 case GIMPLE_COND:
1074 {
1075 /* Handle comparison operators that can appear in GIMPLE form. */
15d138c9 1076 tree op0 = valueize_op (gimple_cond_lhs (stmt));
1077 tree op1 = valueize_op (gimple_cond_rhs (stmt));
75a70cf9 1078 enum tree_code code = gimple_cond_code (stmt);
389dd41b 1079 return fold_binary_loc (loc, code, boolean_type_node, op0, op1);
75a70cf9 1080 }
4ee9c684 1081
75a70cf9 1082 case GIMPLE_SWITCH:
1083 {
15d138c9 1084 /* Return the constant switch index. */
1085 return valueize_op (gimple_switch_index (stmt));
75a70cf9 1086 }
912f109f 1087
1d0b727d 1088 case GIMPLE_ASSIGN:
1089 case GIMPLE_CALL:
1090 return gimple_fold_stmt_to_constant_1 (stmt, valueize_op);
04236c3a 1091
8782adcf 1092 default:
1d0b727d 1093 gcc_unreachable ();
8782adcf 1094 }
8782adcf 1095}
75a70cf9 1096
b7e55469 1097/* Apply the operation CODE in type TYPE to the value, mask pair
1098 RVAL and RMASK representing a value of type RTYPE and set
1099 the value, mask pair *VAL and *MASK to the result. */
1100
1101static void
1102bit_value_unop_1 (enum tree_code code, tree type,
e913b5cd 1103 max_wide_int *val, max_wide_int *mask,
1104 tree rtype, const max_wide_int &rval, const max_wide_int &rmask)
b7e55469 1105{
1106 switch (code)
1107 {
1108 case BIT_NOT_EXPR:
1109 *mask = rmask;
cf8f0e63 1110 *val = ~rval;
b7e55469 1111 break;
1112
1113 case NEGATE_EXPR:
1114 {
e913b5cd 1115 max_wide_int temv, temm;
b7e55469 1116 /* Return ~rval + 1. */
1117 bit_value_unop_1 (BIT_NOT_EXPR, type, &temv, &temm, type, rval, rmask);
1118 bit_value_binop_1 (PLUS_EXPR, type, val, mask,
e913b5cd 1119 type, temv, temm, type, 1, 0);
b7e55469 1120 break;
1121 }
1122
1123 CASE_CONVERT:
1124 {
e913b5cd 1125 signop sgn;
b7e55469 1126
1127 /* First extend mask and value according to the original type. */
e913b5cd 1128 sgn = TYPE_SIGN (rtype);
1129 *mask = rmask.ext (TYPE_PRECISION (rtype), sgn);
1130 *val = rval.ext (TYPE_PRECISION (rtype), sgn);
b7e55469 1131
1132 /* Then extend mask and value according to the target type. */
e913b5cd 1133 sgn = TYPE_SIGN (type);
1134 *mask = (*mask).ext (TYPE_PRECISION (type), sgn);
1135 *val = (*val).ext (TYPE_PRECISION (type), sgn);
b7e55469 1136 break;
1137 }
1138
1139 default:
e913b5cd 1140 *mask = -1;
b7e55469 1141 break;
1142 }
1143}
1144
1145/* Apply the operation CODE in type TYPE to the value, mask pairs
1146 R1VAL, R1MASK and R2VAL, R2MASK representing a values of type R1TYPE
1147 and R2TYPE and set the value, mask pair *VAL and *MASK to the result. */
1148
1149static void
1150bit_value_binop_1 (enum tree_code code, tree type,
e913b5cd 1151 max_wide_int *val, max_wide_int *mask,
1152 tree r1type, max_wide_int r1val, max_wide_int r1mask,
1153 tree r2type, max_wide_int r2val, max_wide_int r2mask)
b7e55469 1154{
e913b5cd 1155 signop sgn = TYPE_SIGN (type);
1156 int width = TYPE_PRECISION (type);
1157
1158 /* Assume we'll get a constant result. Use an initial non varying
1159 value, we fall back to varying in the end if necessary. */
1160 *mask = -1;
1161
b7e55469 1162 switch (code)
1163 {
1164 case BIT_AND_EXPR:
1165 /* The mask is constant where there is a known not
1166 set bit, (m1 | m2) & ((v1 | m1) & (v2 | m2)) */
cf8f0e63 1167 *mask = (r1mask | r2mask) & (r1val | r1mask) & (r2val | r2mask);
1168 *val = r1val & r2val;
b7e55469 1169 break;
1170
1171 case BIT_IOR_EXPR:
1172 /* The mask is constant where there is a known
1173 set bit, (m1 | m2) & ~((v1 & ~m1) | (v2 & ~m2)). */
cf8f0e63 1174 *mask = (r1mask | r2mask)
1175 .and_not (r1val.and_not (r1mask) | r2val.and_not (r2mask));
1176 *val = r1val | r2val;
b7e55469 1177 break;
1178
1179 case BIT_XOR_EXPR:
1180 /* m1 | m2 */
cf8f0e63 1181 *mask = r1mask | r2mask;
1182 *val = r1val ^ r2val;
b7e55469 1183 break;
1184
1185 case LROTATE_EXPR:
1186 case RROTATE_EXPR:
e913b5cd 1187 if (r2mask.zero_p ())
b7e55469 1188 {
e913b5cd 1189 wide_int shift = r2val;
1190 if (shift.zero_p ())
1191 {
1192 *mask = r1mask;
1193 *val = r1val;
1194 }
1195 else
1196 {
4d69eb47 1197 if (shift.neg_p ())
e913b5cd 1198 {
1199 shift = -shift;
1200 if (code == RROTATE_EXPR)
1201 code = LROTATE_EXPR;
1202 else
1203 code = RROTATE_EXPR;
1204 }
1205 if (code == RROTATE_EXPR)
1206 {
1207 *mask = r1mask.rrotate (shift, width);
1208 *val = r1val.rrotate (shift, width);
1209 }
1210 else
1211 {
1212 *mask = r1mask.lrotate (shift, width);
1213 *val = r1val.lrotate (shift, width);
1214 }
1215 }
b7e55469 1216 }
1217 break;
1218
1219 case LSHIFT_EXPR:
1220 case RSHIFT_EXPR:
1221 /* ??? We can handle partially known shift counts if we know
1222 its sign. That way we can tell that (x << (y | 8)) & 255
1223 is zero. */
e913b5cd 1224 if (r2mask.zero_p ())
b7e55469 1225 {
e913b5cd 1226 wide_int shift = r2val;
1227 if (shift.zero_p ())
b7e55469 1228 {
1229 *mask = r1mask;
1230 *val = r1val;
1231 }
e913b5cd 1232 else
1233 {
4d69eb47 1234 if (shift.neg_p ())
e913b5cd 1235 {
1236 shift = -shift;
1237 if (code == RSHIFT_EXPR)
1238 code = LSHIFT_EXPR;
1239 else
1240 code = RSHIFT_EXPR;
1241 }
1242 if (code == RSHIFT_EXPR)
1243 {
1244 *mask = r1mask.ext (width, sgn).rshift (shift, sgn);
1245 *val = r1val.ext (width, sgn).rshift (shift, sgn);
1246 }
1247 else
1248 {
1249 *mask = r1mask.lshift (shift).sext (width);
1250 *val = r1val.lshift (shift).sext (width);
1251 }
1252 }
b7e55469 1253 }
1254 break;
1255
1256 case PLUS_EXPR:
1257 case POINTER_PLUS_EXPR:
1258 {
e913b5cd 1259 max_wide_int lo, hi;
b7e55469 1260 /* Do the addition with unknown bits set to zero, to give carry-ins of
1261 zero wherever possible. */
cf8f0e63 1262 lo = r1val.and_not (r1mask) + r2val.and_not (r2mask);
e913b5cd 1263 lo = lo.ext (width, sgn);
b7e55469 1264 /* Do the addition with unknown bits set to one, to give carry-ins of
1265 one wherever possible. */
cf8f0e63 1266 hi = (r1val | r1mask) + (r2val | r2mask);
e913b5cd 1267 hi = hi.ext (width, sgn);
b7e55469 1268 /* Each bit in the result is known if (a) the corresponding bits in
1269 both inputs are known, and (b) the carry-in to that bit position
1270 is known. We can check condition (b) by seeing if we got the same
1271 result with minimised carries as with maximised carries. */
cf8f0e63 1272 *mask = r1mask | r2mask | (lo ^ hi);
e913b5cd 1273 *mask = (*mask).ext (width, sgn);
b7e55469 1274 /* It shouldn't matter whether we choose lo or hi here. */
1275 *val = lo;
1276 break;
1277 }
1278
1279 case MINUS_EXPR:
1280 {
e913b5cd 1281 max_wide_int temv, temm;
b7e55469 1282 bit_value_unop_1 (NEGATE_EXPR, r2type, &temv, &temm,
1283 r2type, r2val, r2mask);
1284 bit_value_binop_1 (PLUS_EXPR, type, val, mask,
1285 r1type, r1val, r1mask,
1286 r2type, temv, temm);
1287 break;
1288 }
1289
1290 case MULT_EXPR:
1291 {
1292 /* Just track trailing zeros in both operands and transfer
1293 them to the other. */
e913b5cd 1294 int r1tz = (r1val | r1mask).ctz ().to_shwi ();
1295 int r2tz = (r2val | r2mask).ctz ().to_shwi ();
1296 if (r1tz + r2tz >= width)
b7e55469 1297 {
e913b5cd 1298 *mask = 0;
1299 *val = 0;
b7e55469 1300 }
1301 else if (r1tz + r2tz > 0)
1302 {
e913b5cd 1303 *mask = max_wide_int::mask (r1tz + r2tz, true).ext (width, sgn);
1304 *val = 0;
b7e55469 1305 }
1306 break;
1307 }
1308
1309 case EQ_EXPR:
1310 case NE_EXPR:
1311 {
e913b5cd 1312 max_wide_int m = r1mask | r2mask;
cf8f0e63 1313 if (r1val.and_not (m) != r2val.and_not (m))
b7e55469 1314 {
e913b5cd 1315 *mask = 0;
1316 *val = ((code == EQ_EXPR) ? 0 : 1);
b7e55469 1317 }
1318 else
1319 {
1320 /* We know the result of a comparison is always one or zero. */
e913b5cd 1321 *mask = 1;
1322 *val = 0;
b7e55469 1323 }
1324 break;
1325 }
1326
1327 case GE_EXPR:
1328 case GT_EXPR:
b7e55469 1329 case LT_EXPR:
1330 case LE_EXPR:
1331 {
e913b5cd 1332 max_wide_int o1val, o2val, o1mask, o2mask;
b7e55469 1333 int minmax, maxmin;
e913b5cd 1334
1335 if ((code == GE_EXPR) || (code == GT_EXPR))
1336 {
1337 o1val = r2val;
1338 o1mask = r2mask;
1339 o2val = r1val;
1340 o2mask = r1mask;
1341 code = swap_tree_comparison (code);
1342 }
1343 else
1344 {
1345 o1val = r1val;
1346 o1mask = r1mask;
1347 o2val = r2val;
1348 o2mask = r2mask;
1349 }
b7e55469 1350 /* If the most significant bits are not known we know nothing. */
4d69eb47 1351 if (o1mask.neg_p () || o2mask.neg_p ())
b7e55469 1352 break;
1353
90c0f5b7 1354 /* For comparisons the signedness is in the comparison operands. */
e913b5cd 1355 sgn = TYPE_SIGN (r1type);
90c0f5b7 1356
b7e55469 1357 /* If we know the most significant bits we know the values
1358 value ranges by means of treating varying bits as zero
1359 or one. Do a cross comparison of the max/min pairs. */
e913b5cd 1360 maxmin = (o1val | o1mask).cmp (o2val.and_not (o2mask), sgn);
1361 minmax = o1val.and_not (o1mask).cmp (o2val | o2mask, sgn);
1362 if (maxmin < 0) /* o1 is less than o2. */
b7e55469 1363 {
e913b5cd 1364 *mask = 0;
1365 *val = 1;
b7e55469 1366 }
e913b5cd 1367 else if (minmax > 0) /* o1 is not less or equal to o2. */
b7e55469 1368 {
e913b5cd 1369 *mask = 0;
1370 *val = 0;
b7e55469 1371 }
e913b5cd 1372 else if (maxmin == minmax) /* o1 and o2 are equal. */
b7e55469 1373 {
1374 /* This probably should never happen as we'd have
1375 folded the thing during fully constant value folding. */
e913b5cd 1376 *mask = 0;
1377 *val = (code == LE_EXPR ? 1 : 0);
b7e55469 1378 }
1379 else
1380 {
1381 /* We know the result of a comparison is always one or zero. */
e913b5cd 1382 *mask = 1;
1383 *val = 0;
b7e55469 1384 }
1385 break;
1386 }
1387
1388 default:;
1389 }
1390}
1391
1392/* Return the propagation value when applying the operation CODE to
1393 the value RHS yielding type TYPE. */
1394
1395static prop_value_t
1396bit_value_unop (enum tree_code code, tree type, tree rhs)
1397{
1398 prop_value_t rval = get_value_for_expr (rhs, true);
e913b5cd 1399 max_wide_int value, mask;
b7e55469 1400 prop_value_t val;
c91fedc5 1401
1402 if (rval.lattice_val == UNDEFINED)
1403 return rval;
1404
b7e55469 1405 gcc_assert ((rval.lattice_val == CONSTANT
1406 && TREE_CODE (rval.value) == INTEGER_CST)
e913b5cd 1407 || rval.mask.minus_one_p ());
b7e55469 1408 bit_value_unop_1 (code, type, &value, &mask,
e913b5cd 1409 TREE_TYPE (rhs), value_to_wide_int (rval), rval.mask);
1410 if (!mask.minus_one_p ())
b7e55469 1411 {
1412 val.lattice_val = CONSTANT;
1413 val.mask = mask;
1414 /* ??? Delay building trees here. */
e913b5cd 1415 val.value = wide_int_to_tree (type, value);
b7e55469 1416 }
1417 else
1418 {
1419 val.lattice_val = VARYING;
1420 val.value = NULL_TREE;
e913b5cd 1421 val.mask = -1;
b7e55469 1422 }
1423 return val;
1424}
1425
1426/* Return the propagation value when applying the operation CODE to
1427 the values RHS1 and RHS2 yielding type TYPE. */
1428
1429static prop_value_t
1430bit_value_binop (enum tree_code code, tree type, tree rhs1, tree rhs2)
1431{
1432 prop_value_t r1val = get_value_for_expr (rhs1, true);
1433 prop_value_t r2val = get_value_for_expr (rhs2, true);
e913b5cd 1434 max_wide_int value, mask;
b7e55469 1435 prop_value_t val;
c91fedc5 1436
1437 if (r1val.lattice_val == UNDEFINED
1438 || r2val.lattice_val == UNDEFINED)
1439 {
1440 val.lattice_val = VARYING;
1441 val.value = NULL_TREE;
e913b5cd 1442 val.mask = -1;
c91fedc5 1443 return val;
1444 }
1445
b7e55469 1446 gcc_assert ((r1val.lattice_val == CONSTANT
1447 && TREE_CODE (r1val.value) == INTEGER_CST)
e913b5cd 1448 || r1val.mask.minus_one_p ());
b7e55469 1449 gcc_assert ((r2val.lattice_val == CONSTANT
1450 && TREE_CODE (r2val.value) == INTEGER_CST)
e913b5cd 1451 || r2val.mask.minus_one_p ());
b7e55469 1452 bit_value_binop_1 (code, type, &value, &mask,
e913b5cd 1453 TREE_TYPE (rhs1), value_to_wide_int (r1val), r1val.mask,
1454 TREE_TYPE (rhs2), value_to_wide_int (r2val), r2val.mask);
1455 if (mask != -1)
b7e55469 1456 {
1457 val.lattice_val = CONSTANT;
1458 val.mask = mask;
1459 /* ??? Delay building trees here. */
e913b5cd 1460 val.value = wide_int_to_tree (type, value);
b7e55469 1461 }
1462 else
1463 {
1464 val.lattice_val = VARYING;
1465 val.value = NULL_TREE;
e913b5cd 1466 val.mask = -1;
b7e55469 1467 }
1468 return val;
1469}
1470
fca0886c 1471/* Return the propagation value when applying __builtin_assume_aligned to
1472 its arguments. */
1473
1474static prop_value_t
1475bit_value_assume_aligned (gimple stmt)
1476{
1477 tree ptr = gimple_call_arg (stmt, 0), align, misalign = NULL_TREE;
1478 tree type = TREE_TYPE (ptr);
1479 unsigned HOST_WIDE_INT aligni, misaligni = 0;
1480 prop_value_t ptrval = get_value_for_expr (ptr, true);
1481 prop_value_t alignval;
e913b5cd 1482 max_wide_int value, mask;
fca0886c 1483 prop_value_t val;
e913b5cd 1484
fca0886c 1485 if (ptrval.lattice_val == UNDEFINED)
1486 return ptrval;
1487 gcc_assert ((ptrval.lattice_val == CONSTANT
1488 && TREE_CODE (ptrval.value) == INTEGER_CST)
e913b5cd 1489 || ptrval.mask.minus_one_p ());
fca0886c 1490 align = gimple_call_arg (stmt, 1);
e913b5cd 1491 if (!tree_fits_uhwi_p (align))
fca0886c 1492 return ptrval;
e913b5cd 1493 aligni = tree_to_uhwi (align);
fca0886c 1494 if (aligni <= 1
1495 || (aligni & (aligni - 1)) != 0)
1496 return ptrval;
1497 if (gimple_call_num_args (stmt) > 2)
1498 {
1499 misalign = gimple_call_arg (stmt, 2);
e913b5cd 1500 if (!tree_fits_uhwi_p (misalign))
fca0886c 1501 return ptrval;
e913b5cd 1502 misaligni = tree_to_uhwi (misalign);
fca0886c 1503 if (misaligni >= aligni)
1504 return ptrval;
1505 }
1506 align = build_int_cst_type (type, -aligni);
1507 alignval = get_value_for_expr (align, true);
1508 bit_value_binop_1 (BIT_AND_EXPR, type, &value, &mask,
e913b5cd 1509 type, value_to_wide_int (ptrval), ptrval.mask,
1510 type, value_to_wide_int (alignval), alignval.mask);
1511 if (!mask.minus_one_p ())
fca0886c 1512 {
1513 val.lattice_val = CONSTANT;
1514 val.mask = mask;
e913b5cd 1515 gcc_assert ((mask.to_uhwi () & (aligni - 1)) == 0);
1516 gcc_assert ((value.to_uhwi () & (aligni - 1)) == 0);
1517 value |= misaligni;
fca0886c 1518 /* ??? Delay building trees here. */
e913b5cd 1519 val.value = wide_int_to_tree (type, value);
fca0886c 1520 }
1521 else
1522 {
1523 val.lattice_val = VARYING;
1524 val.value = NULL_TREE;
e913b5cd 1525 val.mask = -1;
fca0886c 1526 }
1527 return val;
1528}
1529
75a70cf9 1530/* Evaluate statement STMT.
1531 Valid only for assignments, calls, conditionals, and switches. */
4ee9c684 1532
88dbf20f 1533static prop_value_t
75a70cf9 1534evaluate_stmt (gimple stmt)
4ee9c684 1535{
88dbf20f 1536 prop_value_t val;
4f61cce6 1537 tree simplified = NULL_TREE;
88dbf20f 1538 ccp_lattice_t likelyvalue = likely_value (stmt);
b7e55469 1539 bool is_constant = false;
581bf1c2 1540 unsigned int align;
88dbf20f 1541
b7e55469 1542 if (dump_file && (dump_flags & TDF_DETAILS))
1543 {
1544 fprintf (dump_file, "which is likely ");
1545 switch (likelyvalue)
1546 {
1547 case CONSTANT:
1548 fprintf (dump_file, "CONSTANT");
1549 break;
1550 case UNDEFINED:
1551 fprintf (dump_file, "UNDEFINED");
1552 break;
1553 case VARYING:
1554 fprintf (dump_file, "VARYING");
1555 break;
1556 default:;
1557 }
1558 fprintf (dump_file, "\n");
1559 }
add6ee5e 1560
4ee9c684 1561 /* If the statement is likely to have a CONSTANT result, then try
1562 to fold the statement to determine the constant value. */
75a70cf9 1563 /* FIXME. This is the only place that we call ccp_fold.
1564 Since likely_value never returns CONSTANT for calls, we will
1565 not attempt to fold them, including builtins that may profit. */
4ee9c684 1566 if (likelyvalue == CONSTANT)
b7e55469 1567 {
1568 fold_defer_overflow_warnings ();
1569 simplified = ccp_fold (stmt);
1570 is_constant = simplified && is_gimple_min_invariant (simplified);
1571 fold_undefer_overflow_warnings (is_constant, stmt, 0);
1572 if (is_constant)
1573 {
1574 /* The statement produced a constant value. */
1575 val.lattice_val = CONSTANT;
1576 val.value = simplified;
e913b5cd 1577 val.mask = 0;
b7e55469 1578 }
1579 }
4ee9c684 1580 /* If the statement is likely to have a VARYING result, then do not
1581 bother folding the statement. */
04236c3a 1582 else if (likelyvalue == VARYING)
75a70cf9 1583 {
590c3166 1584 enum gimple_code code = gimple_code (stmt);
75a70cf9 1585 if (code == GIMPLE_ASSIGN)
1586 {
1587 enum tree_code subcode = gimple_assign_rhs_code (stmt);
48e1416a 1588
75a70cf9 1589 /* Other cases cannot satisfy is_gimple_min_invariant
1590 without folding. */
1591 if (get_gimple_rhs_class (subcode) == GIMPLE_SINGLE_RHS)
1592 simplified = gimple_assign_rhs1 (stmt);
1593 }
1594 else if (code == GIMPLE_SWITCH)
1595 simplified = gimple_switch_index (stmt);
1596 else
a65c4d64 1597 /* These cannot satisfy is_gimple_min_invariant without folding. */
1598 gcc_assert (code == GIMPLE_CALL || code == GIMPLE_COND);
b7e55469 1599 is_constant = simplified && is_gimple_min_invariant (simplified);
1600 if (is_constant)
1601 {
1602 /* The statement produced a constant value. */
1603 val.lattice_val = CONSTANT;
1604 val.value = simplified;
e913b5cd 1605 val.mask = 0;
b7e55469 1606 }
75a70cf9 1607 }
4ee9c684 1608
b7e55469 1609 /* Resort to simplification for bitwise tracking. */
1610 if (flag_tree_bit_ccp
939514e9 1611 && (likelyvalue == CONSTANT || is_gimple_call (stmt))
b7e55469 1612 && !is_constant)
912f109f 1613 {
b7e55469 1614 enum gimple_code code = gimple_code (stmt);
1615 val.lattice_val = VARYING;
1616 val.value = NULL_TREE;
e913b5cd 1617 val.mask = -1;
b7e55469 1618 if (code == GIMPLE_ASSIGN)
912f109f 1619 {
b7e55469 1620 enum tree_code subcode = gimple_assign_rhs_code (stmt);
1621 tree rhs1 = gimple_assign_rhs1 (stmt);
1622 switch (get_gimple_rhs_class (subcode))
1623 {
1624 case GIMPLE_SINGLE_RHS:
1625 if (INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
1626 || POINTER_TYPE_P (TREE_TYPE (rhs1)))
1627 val = get_value_for_expr (rhs1, true);
1628 break;
1629
1630 case GIMPLE_UNARY_RHS:
1631 if ((INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
1632 || POINTER_TYPE_P (TREE_TYPE (rhs1)))
1633 && (INTEGRAL_TYPE_P (gimple_expr_type (stmt))
1634 || POINTER_TYPE_P (gimple_expr_type (stmt))))
1635 val = bit_value_unop (subcode, gimple_expr_type (stmt), rhs1);
1636 break;
1637
1638 case GIMPLE_BINARY_RHS:
1639 if (INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
1640 || POINTER_TYPE_P (TREE_TYPE (rhs1)))
1641 {
e47d81e0 1642 tree lhs = gimple_assign_lhs (stmt);
b7e55469 1643 tree rhs2 = gimple_assign_rhs2 (stmt);
1644 val = bit_value_binop (subcode,
e47d81e0 1645 TREE_TYPE (lhs), rhs1, rhs2);
b7e55469 1646 }
1647 break;
1648
1649 default:;
1650 }
912f109f 1651 }
b7e55469 1652 else if (code == GIMPLE_COND)
1653 {
1654 enum tree_code code = gimple_cond_code (stmt);
1655 tree rhs1 = gimple_cond_lhs (stmt);
1656 tree rhs2 = gimple_cond_rhs (stmt);
1657 if (INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
1658 || POINTER_TYPE_P (TREE_TYPE (rhs1)))
1659 val = bit_value_binop (code, TREE_TYPE (rhs1), rhs1, rhs2);
1660 }
0b4f0116 1661 else if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
153c3b50 1662 {
0b4f0116 1663 tree fndecl = gimple_call_fndecl (stmt);
153c3b50 1664 switch (DECL_FUNCTION_CODE (fndecl))
1665 {
1666 case BUILT_IN_MALLOC:
1667 case BUILT_IN_REALLOC:
1668 case BUILT_IN_CALLOC:
939514e9 1669 case BUILT_IN_STRDUP:
1670 case BUILT_IN_STRNDUP:
153c3b50 1671 val.lattice_val = CONSTANT;
1672 val.value = build_int_cst (TREE_TYPE (gimple_get_lhs (stmt)), 0);
e913b5cd 1673 val.mask = max_wide_int (~(((HOST_WIDE_INT) MALLOC_ABI_ALIGNMENT)
1674 / BITS_PER_UNIT - 1));
153c3b50 1675 break;
1676
1677 case BUILT_IN_ALLOCA:
581bf1c2 1678 case BUILT_IN_ALLOCA_WITH_ALIGN:
1679 align = (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_ALLOCA_WITH_ALIGN
e913b5cd 1680 ? tree_to_hwi (gimple_call_arg (stmt, 1))
581bf1c2 1681 : BIGGEST_ALIGNMENT);
153c3b50 1682 val.lattice_val = CONSTANT;
1683 val.value = build_int_cst (TREE_TYPE (gimple_get_lhs (stmt)), 0);
e913b5cd 1684 val.mask = max_wide_int (~(((HOST_WIDE_INT) align)
1685 / BITS_PER_UNIT - 1));
153c3b50 1686 break;
1687
939514e9 1688 /* These builtins return their first argument, unmodified. */
1689 case BUILT_IN_MEMCPY:
1690 case BUILT_IN_MEMMOVE:
1691 case BUILT_IN_MEMSET:
1692 case BUILT_IN_STRCPY:
1693 case BUILT_IN_STRNCPY:
1694 case BUILT_IN_MEMCPY_CHK:
1695 case BUILT_IN_MEMMOVE_CHK:
1696 case BUILT_IN_MEMSET_CHK:
1697 case BUILT_IN_STRCPY_CHK:
1698 case BUILT_IN_STRNCPY_CHK:
1699 val = get_value_for_expr (gimple_call_arg (stmt, 0), true);
1700 break;
1701
fca0886c 1702 case BUILT_IN_ASSUME_ALIGNED:
1703 val = bit_value_assume_aligned (stmt);
1704 break;
1705
153c3b50 1706 default:;
1707 }
1708 }
b7e55469 1709 is_constant = (val.lattice_val == CONSTANT);
912f109f 1710 }
1711
b7e55469 1712 if (!is_constant)
4ee9c684 1713 {
1714 /* The statement produced a nonconstant value. If the statement
88dbf20f 1715 had UNDEFINED operands, then the result of the statement
1716 should be UNDEFINED. Otherwise, the statement is VARYING. */
bfa30570 1717 if (likelyvalue == UNDEFINED)
b7e55469 1718 {
1719 val.lattice_val = likelyvalue;
e913b5cd 1720 val.mask = 0;
b7e55469 1721 }
b765fa12 1722 else
b7e55469 1723 {
1724 val.lattice_val = VARYING;
e913b5cd 1725 val.mask = -1;
b7e55469 1726 }
b765fa12 1727
88dbf20f 1728 val.value = NULL_TREE;
4ee9c684 1729 }
41511585 1730
1731 return val;
4ee9c684 1732}
1733
494bbaae 1734typedef hash_table <pointer_hash <gimple_statement_d> > gimple_htab;
2b15d2ba 1735
582a80ed 1736/* Given a BUILT_IN_STACK_SAVE value SAVED_VAL, insert a clobber of VAR before
1737 each matching BUILT_IN_STACK_RESTORE. Mark visited phis in VISITED. */
1738
1739static void
2b15d2ba 1740insert_clobber_before_stack_restore (tree saved_val, tree var,
1741 gimple_htab *visited)
582a80ed 1742{
1743 gimple stmt, clobber_stmt;
1744 tree clobber;
1745 imm_use_iterator iter;
1746 gimple_stmt_iterator i;
1747 gimple *slot;
1748
1749 FOR_EACH_IMM_USE_STMT (stmt, iter, saved_val)
1750 if (gimple_call_builtin_p (stmt, BUILT_IN_STACK_RESTORE))
1751 {
f1f41a6c 1752 clobber = build_constructor (TREE_TYPE (var),
1753 NULL);
582a80ed 1754 TREE_THIS_VOLATILE (clobber) = 1;
1755 clobber_stmt = gimple_build_assign (var, clobber);
1756
1757 i = gsi_for_stmt (stmt);
1758 gsi_insert_before (&i, clobber_stmt, GSI_SAME_STMT);
1759 }
1760 else if (gimple_code (stmt) == GIMPLE_PHI)
1761 {
2b15d2ba 1762 if (!visited->is_created ())
1763 visited->create (10);
582a80ed 1764
2b15d2ba 1765 slot = visited->find_slot (stmt, INSERT);
582a80ed 1766 if (*slot != NULL)
1767 continue;
1768
1769 *slot = stmt;
1770 insert_clobber_before_stack_restore (gimple_phi_result (stmt), var,
1771 visited);
1772 }
1773 else
1774 gcc_assert (is_gimple_debug (stmt));
1775}
1776
1777/* Advance the iterator to the previous non-debug gimple statement in the same
1778 or dominating basic block. */
1779
1780static inline void
1781gsi_prev_dom_bb_nondebug (gimple_stmt_iterator *i)
1782{
1783 basic_block dom;
1784
1785 gsi_prev_nondebug (i);
1786 while (gsi_end_p (*i))
1787 {
1788 dom = get_immediate_dominator (CDI_DOMINATORS, i->bb);
1789 if (dom == NULL || dom == ENTRY_BLOCK_PTR)
1790 return;
1791
1792 *i = gsi_last_bb (dom);
1793 }
1794}
1795
1796/* Find a BUILT_IN_STACK_SAVE dominating gsi_stmt (I), and insert
1543f720 1797 a clobber of VAR before each matching BUILT_IN_STACK_RESTORE.
1798
1799 It is possible that BUILT_IN_STACK_SAVE cannot be find in a dominator when a
1800 previous pass (such as DOM) duplicated it along multiple paths to a BB. In
1801 that case the function gives up without inserting the clobbers. */
582a80ed 1802
1803static void
1804insert_clobbers_for_var (gimple_stmt_iterator i, tree var)
1805{
582a80ed 1806 gimple stmt;
1807 tree saved_val;
2b15d2ba 1808 gimple_htab visited;
582a80ed 1809
1543f720 1810 for (; !gsi_end_p (i); gsi_prev_dom_bb_nondebug (&i))
582a80ed 1811 {
1812 stmt = gsi_stmt (i);
1813
1814 if (!gimple_call_builtin_p (stmt, BUILT_IN_STACK_SAVE))
1815 continue;
582a80ed 1816
1817 saved_val = gimple_call_lhs (stmt);
1818 if (saved_val == NULL_TREE)
1819 continue;
1820
1821 insert_clobber_before_stack_restore (saved_val, var, &visited);
1822 break;
1823 }
1824
2b15d2ba 1825 if (visited.is_created ())
1826 visited.dispose ();
582a80ed 1827}
1828
581bf1c2 1829/* Detects a __builtin_alloca_with_align with constant size argument. Declares
1830 fixed-size array and returns the address, if found, otherwise returns
1831 NULL_TREE. */
9a65cc0a 1832
1833static tree
581bf1c2 1834fold_builtin_alloca_with_align (gimple stmt)
9a65cc0a 1835{
1836 unsigned HOST_WIDE_INT size, threshold, n_elem;
1837 tree lhs, arg, block, var, elem_type, array_type;
9a65cc0a 1838
1839 /* Get lhs. */
1840 lhs = gimple_call_lhs (stmt);
1841 if (lhs == NULL_TREE)
1842 return NULL_TREE;
1843
1844 /* Detect constant argument. */
1845 arg = get_constant_value (gimple_call_arg (stmt, 0));
6e93d308 1846 if (arg == NULL_TREE
1847 || TREE_CODE (arg) != INTEGER_CST
e913b5cd 1848 || !tree_fits_uhwi_p (arg))
9a65cc0a 1849 return NULL_TREE;
6e93d308 1850
e913b5cd 1851 size = tree_to_hwi (arg);
9a65cc0a 1852
581bf1c2 1853 /* Heuristic: don't fold large allocas. */
9a65cc0a 1854 threshold = (unsigned HOST_WIDE_INT)PARAM_VALUE (PARAM_LARGE_STACK_FRAME);
581bf1c2 1855 /* In case the alloca is located at function entry, it has the same lifetime
1856 as a declared array, so we allow a larger size. */
9a65cc0a 1857 block = gimple_block (stmt);
1858 if (!(cfun->after_inlining
1859 && TREE_CODE (BLOCK_SUPERCONTEXT (block)) == FUNCTION_DECL))
1860 threshold /= 10;
1861 if (size > threshold)
1862 return NULL_TREE;
1863
1864 /* Declare array. */
1865 elem_type = build_nonstandard_integer_type (BITS_PER_UNIT, 1);
1866 n_elem = size * 8 / BITS_PER_UNIT;
9a65cc0a 1867 array_type = build_array_type_nelts (elem_type, n_elem);
1868 var = create_tmp_var (array_type, NULL);
e913b5cd 1869 DECL_ALIGN (var) = tree_to_hwi (gimple_call_arg (stmt, 1));
3d4a0a4b 1870 {
1871 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (lhs);
1872 if (pi != NULL && !pi->pt.anything)
1873 {
1874 bool singleton_p;
1875 unsigned uid;
1876 singleton_p = pt_solution_singleton_p (&pi->pt, &uid);
1877 gcc_assert (singleton_p);
1878 SET_DECL_PT_UID (var, uid);
1879 }
1880 }
9a65cc0a 1881
1882 /* Fold alloca to the address of the array. */
1883 return fold_convert (TREE_TYPE (lhs), build_fold_addr_expr (var));
1884}
1885
6688f8ec 1886/* Fold the stmt at *GSI with CCP specific information that propagating
1887 and regular folding does not catch. */
1888
1889static bool
1890ccp_fold_stmt (gimple_stmt_iterator *gsi)
1891{
1892 gimple stmt = gsi_stmt (*gsi);
6688f8ec 1893
94144e68 1894 switch (gimple_code (stmt))
1895 {
1896 case GIMPLE_COND:
1897 {
1898 prop_value_t val;
1899 /* Statement evaluation will handle type mismatches in constants
1900 more gracefully than the final propagation. This allows us to
1901 fold more conditionals here. */
1902 val = evaluate_stmt (stmt);
1903 if (val.lattice_val != CONSTANT
e913b5cd 1904 || !val.mask.zero_p ())
94144e68 1905 return false;
1906
b7e55469 1907 if (dump_file)
1908 {
1909 fprintf (dump_file, "Folding predicate ");
1910 print_gimple_expr (dump_file, stmt, 0, 0);
1911 fprintf (dump_file, " to ");
1912 print_generic_expr (dump_file, val.value, 0);
1913 fprintf (dump_file, "\n");
1914 }
1915
94144e68 1916 if (integer_zerop (val.value))
1917 gimple_cond_make_false (stmt);
1918 else
1919 gimple_cond_make_true (stmt);
6688f8ec 1920
94144e68 1921 return true;
1922 }
6688f8ec 1923
94144e68 1924 case GIMPLE_CALL:
1925 {
1926 tree lhs = gimple_call_lhs (stmt);
3064bb7b 1927 int flags = gimple_call_flags (stmt);
15d138c9 1928 tree val;
94144e68 1929 tree argt;
1930 bool changed = false;
1931 unsigned i;
1932
1933 /* If the call was folded into a constant make sure it goes
1934 away even if we cannot propagate into all uses because of
1935 type issues. */
1936 if (lhs
1937 && TREE_CODE (lhs) == SSA_NAME
3064bb7b 1938 && (val = get_constant_value (lhs))
1939 /* Don't optimize away calls that have side-effects. */
1940 && (flags & (ECF_CONST|ECF_PURE)) != 0
1941 && (flags & ECF_LOOPING_CONST_OR_PURE) == 0)
94144e68 1942 {
15d138c9 1943 tree new_rhs = unshare_expr (val);
338cce8f 1944 bool res;
94144e68 1945 if (!useless_type_conversion_p (TREE_TYPE (lhs),
1946 TREE_TYPE (new_rhs)))
1947 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
338cce8f 1948 res = update_call_from_tree (gsi, new_rhs);
1949 gcc_assert (res);
94144e68 1950 return true;
1951 }
1952
fb049fba 1953 /* Internal calls provide no argument types, so the extra laxity
1954 for normal calls does not apply. */
1955 if (gimple_call_internal_p (stmt))
1956 return false;
1957
581bf1c2 1958 /* The heuristic of fold_builtin_alloca_with_align differs before and
1959 after inlining, so we don't require the arg to be changed into a
1960 constant for folding, but just to be constant. */
1961 if (gimple_call_builtin_p (stmt, BUILT_IN_ALLOCA_WITH_ALIGN))
9a65cc0a 1962 {
581bf1c2 1963 tree new_rhs = fold_builtin_alloca_with_align (stmt);
6e93d308 1964 if (new_rhs)
1965 {
1966 bool res = update_call_from_tree (gsi, new_rhs);
582a80ed 1967 tree var = TREE_OPERAND (TREE_OPERAND (new_rhs, 0),0);
6e93d308 1968 gcc_assert (res);
582a80ed 1969 insert_clobbers_for_var (*gsi, var);
6e93d308 1970 return true;
1971 }
9a65cc0a 1972 }
1973
94144e68 1974 /* Propagate into the call arguments. Compared to replace_uses_in
1975 this can use the argument slot types for type verification
1976 instead of the current argument type. We also can safely
1977 drop qualifiers here as we are dealing with constants anyway. */
2de00a2d 1978 argt = TYPE_ARG_TYPES (gimple_call_fntype (stmt));
94144e68 1979 for (i = 0; i < gimple_call_num_args (stmt) && argt;
1980 ++i, argt = TREE_CHAIN (argt))
1981 {
1982 tree arg = gimple_call_arg (stmt, i);
1983 if (TREE_CODE (arg) == SSA_NAME
15d138c9 1984 && (val = get_constant_value (arg))
94144e68 1985 && useless_type_conversion_p
1986 (TYPE_MAIN_VARIANT (TREE_VALUE (argt)),
15d138c9 1987 TYPE_MAIN_VARIANT (TREE_TYPE (val))))
94144e68 1988 {
15d138c9 1989 gimple_call_set_arg (stmt, i, unshare_expr (val));
94144e68 1990 changed = true;
1991 }
1992 }
e16f4c39 1993
94144e68 1994 return changed;
1995 }
6688f8ec 1996
6872bf3c 1997 case GIMPLE_ASSIGN:
1998 {
1999 tree lhs = gimple_assign_lhs (stmt);
15d138c9 2000 tree val;
6872bf3c 2001
2002 /* If we have a load that turned out to be constant replace it
2003 as we cannot propagate into all uses in all cases. */
2004 if (gimple_assign_single_p (stmt)
2005 && TREE_CODE (lhs) == SSA_NAME
15d138c9 2006 && (val = get_constant_value (lhs)))
6872bf3c 2007 {
15d138c9 2008 tree rhs = unshare_expr (val);
6872bf3c 2009 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs)))
182cf5a9 2010 rhs = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (lhs), rhs);
6872bf3c 2011 gimple_assign_set_rhs_from_tree (gsi, rhs);
2012 return true;
2013 }
2014
2015 return false;
2016 }
2017
94144e68 2018 default:
2019 return false;
2020 }
6688f8ec 2021}
2022
41511585 2023/* Visit the assignment statement STMT. Set the value of its LHS to the
88dbf20f 2024 value computed by the RHS and store LHS in *OUTPUT_P. If STMT
2025 creates virtual definitions, set the value of each new name to that
75a70cf9 2026 of the RHS (if we can derive a constant out of the RHS).
2027 Value-returning call statements also perform an assignment, and
2028 are handled here. */
4ee9c684 2029
41511585 2030static enum ssa_prop_result
75a70cf9 2031visit_assignment (gimple stmt, tree *output_p)
4ee9c684 2032{
88dbf20f 2033 prop_value_t val;
88dbf20f 2034 enum ssa_prop_result retval;
4ee9c684 2035
75a70cf9 2036 tree lhs = gimple_get_lhs (stmt);
4ee9c684 2037
75a70cf9 2038 gcc_assert (gimple_code (stmt) != GIMPLE_CALL
2039 || gimple_call_lhs (stmt) != NULL_TREE);
2040
15d138c9 2041 if (gimple_assign_single_p (stmt)
2042 && gimple_assign_rhs_code (stmt) == SSA_NAME)
2043 /* For a simple copy operation, we copy the lattice values. */
2044 val = *get_value (gimple_assign_rhs1 (stmt));
41511585 2045 else
75a70cf9 2046 /* Evaluate the statement, which could be
2047 either a GIMPLE_ASSIGN or a GIMPLE_CALL. */
04236c3a 2048 val = evaluate_stmt (stmt);
4ee9c684 2049
88dbf20f 2050 retval = SSA_PROP_NOT_INTERESTING;
4ee9c684 2051
41511585 2052 /* Set the lattice value of the statement's output. */
88dbf20f 2053 if (TREE_CODE (lhs) == SSA_NAME)
4ee9c684 2054 {
88dbf20f 2055 /* If STMT is an assignment to an SSA_NAME, we only have one
2056 value to set. */
2057 if (set_lattice_value (lhs, val))
2058 {
2059 *output_p = lhs;
2060 if (val.lattice_val == VARYING)
2061 retval = SSA_PROP_VARYING;
2062 else
2063 retval = SSA_PROP_INTERESTING;
2064 }
4ee9c684 2065 }
88dbf20f 2066
2067 return retval;
4ee9c684 2068}
2069
4ee9c684 2070
41511585 2071/* Visit the conditional statement STMT. Return SSA_PROP_INTERESTING
2072 if it can determine which edge will be taken. Otherwise, return
2073 SSA_PROP_VARYING. */
2074
2075static enum ssa_prop_result
75a70cf9 2076visit_cond_stmt (gimple stmt, edge *taken_edge_p)
4ee9c684 2077{
88dbf20f 2078 prop_value_t val;
41511585 2079 basic_block block;
2080
75a70cf9 2081 block = gimple_bb (stmt);
41511585 2082 val = evaluate_stmt (stmt);
b7e55469 2083 if (val.lattice_val != CONSTANT
e913b5cd 2084 || !val.mask.zero_p ())
b7e55469 2085 return SSA_PROP_VARYING;
41511585 2086
2087 /* Find which edge out of the conditional block will be taken and add it
2088 to the worklist. If no single edge can be determined statically,
2089 return SSA_PROP_VARYING to feed all the outgoing edges to the
2090 propagation engine. */
b7e55469 2091 *taken_edge_p = find_taken_edge (block, val.value);
41511585 2092 if (*taken_edge_p)
2093 return SSA_PROP_INTERESTING;
2094 else
2095 return SSA_PROP_VARYING;
4ee9c684 2096}
2097
4ee9c684 2098
41511585 2099/* Evaluate statement STMT. If the statement produces an output value and
2100 its evaluation changes the lattice value of its output, return
2101 SSA_PROP_INTERESTING and set *OUTPUT_P to the SSA_NAME holding the
2102 output value.
48e1416a 2103
41511585 2104 If STMT is a conditional branch and we can determine its truth
2105 value, set *TAKEN_EDGE_P accordingly. If STMT produces a varying
2106 value, return SSA_PROP_VARYING. */
4ee9c684 2107
41511585 2108static enum ssa_prop_result
75a70cf9 2109ccp_visit_stmt (gimple stmt, edge *taken_edge_p, tree *output_p)
41511585 2110{
41511585 2111 tree def;
2112 ssa_op_iter iter;
4ee9c684 2113
41511585 2114 if (dump_file && (dump_flags & TDF_DETAILS))
4ee9c684 2115 {
88dbf20f 2116 fprintf (dump_file, "\nVisiting statement:\n");
75a70cf9 2117 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
4ee9c684 2118 }
4ee9c684 2119
75a70cf9 2120 switch (gimple_code (stmt))
4ee9c684 2121 {
75a70cf9 2122 case GIMPLE_ASSIGN:
2123 /* If the statement is an assignment that produces a single
2124 output value, evaluate its RHS to see if the lattice value of
2125 its output has changed. */
2126 return visit_assignment (stmt, output_p);
2127
2128 case GIMPLE_CALL:
2129 /* A value-returning call also performs an assignment. */
2130 if (gimple_call_lhs (stmt) != NULL_TREE)
2131 return visit_assignment (stmt, output_p);
2132 break;
2133
2134 case GIMPLE_COND:
2135 case GIMPLE_SWITCH:
2136 /* If STMT is a conditional branch, see if we can determine
2137 which branch will be taken. */
2138 /* FIXME. It appears that we should be able to optimize
2139 computed GOTOs here as well. */
2140 return visit_cond_stmt (stmt, taken_edge_p);
2141
2142 default:
2143 break;
4ee9c684 2144 }
4ee9c684 2145
41511585 2146 /* Any other kind of statement is not interesting for constant
2147 propagation and, therefore, not worth simulating. */
41511585 2148 if (dump_file && (dump_flags & TDF_DETAILS))
2149 fprintf (dump_file, "No interesting values produced. Marked VARYING.\n");
4ee9c684 2150
41511585 2151 /* Definitions made by statements other than assignments to
2152 SSA_NAMEs represent unknown modifications to their outputs.
2153 Mark them VARYING. */
88dbf20f 2154 FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_ALL_DEFS)
2155 {
e913b5cd 2156 prop_value_t v = { VARYING, NULL_TREE, -1 };
88dbf20f 2157 set_lattice_value (def, v);
2158 }
4ee9c684 2159
41511585 2160 return SSA_PROP_VARYING;
2161}
4ee9c684 2162
4ee9c684 2163
88dbf20f 2164/* Main entry point for SSA Conditional Constant Propagation. */
41511585 2165
33a34f1e 2166static unsigned int
61207d43 2167do_ssa_ccp (void)
41511585 2168{
582a80ed 2169 unsigned int todo = 0;
2170 calculate_dominance_info (CDI_DOMINATORS);
41511585 2171 ccp_initialize ();
2172 ssa_propagate (ccp_visit_stmt, ccp_visit_phi_node);
33a34f1e 2173 if (ccp_finalize ())
560965e9 2174 todo = (TODO_cleanup_cfg | TODO_update_ssa);
582a80ed 2175 free_dominance_info (CDI_DOMINATORS);
2176 return todo;
4ee9c684 2177}
2178
5664499b 2179
2180static bool
41511585 2181gate_ccp (void)
5664499b 2182{
41511585 2183 return flag_tree_ccp != 0;
5664499b 2184}
2185
4ee9c684 2186
cbe8bda8 2187namespace {
2188
2189const pass_data pass_data_ccp =
41511585 2190{
cbe8bda8 2191 GIMPLE_PASS, /* type */
2192 "ccp", /* name */
2193 OPTGROUP_NONE, /* optinfo_flags */
2194 true, /* has_gate */
2195 true, /* has_execute */
2196 TV_TREE_CCP, /* tv_id */
2197 ( PROP_cfg | PROP_ssa ), /* properties_required */
2198 0, /* properties_provided */
2199 0, /* properties_destroyed */
2200 0, /* todo_flags_start */
2201 ( TODO_verify_ssa | TODO_update_address_taken
2202 | TODO_verify_stmts ), /* todo_flags_finish */
41511585 2203};
4ee9c684 2204
cbe8bda8 2205class pass_ccp : public gimple_opt_pass
2206{
2207public:
2208 pass_ccp(gcc::context *ctxt)
2209 : gimple_opt_pass(pass_data_ccp, ctxt)
2210 {}
2211
2212 /* opt_pass methods: */
2213 opt_pass * clone () { return new pass_ccp (ctxt_); }
2214 bool gate () { return gate_ccp (); }
2215 unsigned int execute () { return do_ssa_ccp (); }
2216
2217}; // class pass_ccp
2218
2219} // anon namespace
2220
2221gimple_opt_pass *
2222make_pass_ccp (gcc::context *ctxt)
2223{
2224 return new pass_ccp (ctxt);
2225}
2226
4ee9c684 2227
75a70cf9 2228
bdd0e199 2229/* Try to optimize out __builtin_stack_restore. Optimize it out
2230 if there is another __builtin_stack_restore in the same basic
2231 block and no calls or ASM_EXPRs are in between, or if this block's
2232 only outgoing edge is to EXIT_BLOCK and there are no calls or
2233 ASM_EXPRs after this __builtin_stack_restore. */
2234
2235static tree
75a70cf9 2236optimize_stack_restore (gimple_stmt_iterator i)
bdd0e199 2237{
6ea999da 2238 tree callee;
2239 gimple stmt;
75a70cf9 2240
2241 basic_block bb = gsi_bb (i);
2242 gimple call = gsi_stmt (i);
bdd0e199 2243
75a70cf9 2244 if (gimple_code (call) != GIMPLE_CALL
2245 || gimple_call_num_args (call) != 1
2246 || TREE_CODE (gimple_call_arg (call, 0)) != SSA_NAME
2247 || !POINTER_TYPE_P (TREE_TYPE (gimple_call_arg (call, 0))))
bdd0e199 2248 return NULL_TREE;
2249
75a70cf9 2250 for (gsi_next (&i); !gsi_end_p (i); gsi_next (&i))
bdd0e199 2251 {
75a70cf9 2252 stmt = gsi_stmt (i);
2253 if (gimple_code (stmt) == GIMPLE_ASM)
bdd0e199 2254 return NULL_TREE;
75a70cf9 2255 if (gimple_code (stmt) != GIMPLE_CALL)
bdd0e199 2256 continue;
2257
75a70cf9 2258 callee = gimple_call_fndecl (stmt);
c40a6f90 2259 if (!callee
2260 || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL
2261 /* All regular builtins are ok, just obviously not alloca. */
581bf1c2 2262 || DECL_FUNCTION_CODE (callee) == BUILT_IN_ALLOCA
2263 || DECL_FUNCTION_CODE (callee) == BUILT_IN_ALLOCA_WITH_ALIGN)
bdd0e199 2264 return NULL_TREE;
2265
2266 if (DECL_FUNCTION_CODE (callee) == BUILT_IN_STACK_RESTORE)
6ea999da 2267 goto second_stack_restore;
bdd0e199 2268 }
2269
6ea999da 2270 if (!gsi_end_p (i))
bdd0e199 2271 return NULL_TREE;
2272
6ea999da 2273 /* Allow one successor of the exit block, or zero successors. */
2274 switch (EDGE_COUNT (bb->succs))
2275 {
2276 case 0:
2277 break;
2278 case 1:
2279 if (single_succ_edge (bb)->dest != EXIT_BLOCK_PTR)
2280 return NULL_TREE;
2281 break;
2282 default:
2283 return NULL_TREE;
2284 }
2285 second_stack_restore:
bdd0e199 2286
6ea999da 2287 /* If there's exactly one use, then zap the call to __builtin_stack_save.
2288 If there are multiple uses, then the last one should remove the call.
2289 In any case, whether the call to __builtin_stack_save can be removed
2290 or not is irrelevant to removing the call to __builtin_stack_restore. */
2291 if (has_single_use (gimple_call_arg (call, 0)))
2292 {
2293 gimple stack_save = SSA_NAME_DEF_STMT (gimple_call_arg (call, 0));
2294 if (is_gimple_call (stack_save))
2295 {
2296 callee = gimple_call_fndecl (stack_save);
2297 if (callee
2298 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL
2299 && DECL_FUNCTION_CODE (callee) == BUILT_IN_STACK_SAVE)
2300 {
2301 gimple_stmt_iterator stack_save_gsi;
2302 tree rhs;
bdd0e199 2303
6ea999da 2304 stack_save_gsi = gsi_for_stmt (stack_save);
2305 rhs = build_int_cst (TREE_TYPE (gimple_call_arg (call, 0)), 0);
2306 update_call_from_tree (&stack_save_gsi, rhs);
2307 }
2308 }
2309 }
bdd0e199 2310
75a70cf9 2311 /* No effect, so the statement will be deleted. */
bdd0e199 2312 return integer_zero_node;
2313}
75a70cf9 2314
8a58ed0a 2315/* If va_list type is a simple pointer and nothing special is needed,
2316 optimize __builtin_va_start (&ap, 0) into ap = __builtin_next_arg (0),
2317 __builtin_va_end (&ap) out as NOP and __builtin_va_copy into a simple
2318 pointer assignment. */
2319
2320static tree
75a70cf9 2321optimize_stdarg_builtin (gimple call)
8a58ed0a 2322{
5f57a8b1 2323 tree callee, lhs, rhs, cfun_va_list;
8a58ed0a 2324 bool va_list_simple_ptr;
389dd41b 2325 location_t loc = gimple_location (call);
8a58ed0a 2326
75a70cf9 2327 if (gimple_code (call) != GIMPLE_CALL)
8a58ed0a 2328 return NULL_TREE;
2329
75a70cf9 2330 callee = gimple_call_fndecl (call);
5f57a8b1 2331
2332 cfun_va_list = targetm.fn_abi_va_list (callee);
2333 va_list_simple_ptr = POINTER_TYPE_P (cfun_va_list)
2334 && (TREE_TYPE (cfun_va_list) == void_type_node
2335 || TREE_TYPE (cfun_va_list) == char_type_node);
2336
8a58ed0a 2337 switch (DECL_FUNCTION_CODE (callee))
2338 {
2339 case BUILT_IN_VA_START:
2340 if (!va_list_simple_ptr
2341 || targetm.expand_builtin_va_start != NULL
e7ed5dd7 2342 || !builtin_decl_explicit_p (BUILT_IN_NEXT_ARG))
8a58ed0a 2343 return NULL_TREE;
2344
75a70cf9 2345 if (gimple_call_num_args (call) != 2)
8a58ed0a 2346 return NULL_TREE;
2347
75a70cf9 2348 lhs = gimple_call_arg (call, 0);
8a58ed0a 2349 if (!POINTER_TYPE_P (TREE_TYPE (lhs))
2350 || TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (lhs)))
5f57a8b1 2351 != TYPE_MAIN_VARIANT (cfun_va_list))
8a58ed0a 2352 return NULL_TREE;
48e1416a 2353
389dd41b 2354 lhs = build_fold_indirect_ref_loc (loc, lhs);
b9a16870 2355 rhs = build_call_expr_loc (loc, builtin_decl_explicit (BUILT_IN_NEXT_ARG),
75a70cf9 2356 1, integer_zero_node);
389dd41b 2357 rhs = fold_convert_loc (loc, TREE_TYPE (lhs), rhs);
8a58ed0a 2358 return build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, rhs);
2359
2360 case BUILT_IN_VA_COPY:
2361 if (!va_list_simple_ptr)
2362 return NULL_TREE;
2363
75a70cf9 2364 if (gimple_call_num_args (call) != 2)
8a58ed0a 2365 return NULL_TREE;
2366
75a70cf9 2367 lhs = gimple_call_arg (call, 0);
8a58ed0a 2368 if (!POINTER_TYPE_P (TREE_TYPE (lhs))
2369 || TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (lhs)))
5f57a8b1 2370 != TYPE_MAIN_VARIANT (cfun_va_list))
8a58ed0a 2371 return NULL_TREE;
2372
389dd41b 2373 lhs = build_fold_indirect_ref_loc (loc, lhs);
75a70cf9 2374 rhs = gimple_call_arg (call, 1);
8a58ed0a 2375 if (TYPE_MAIN_VARIANT (TREE_TYPE (rhs))
5f57a8b1 2376 != TYPE_MAIN_VARIANT (cfun_va_list))
8a58ed0a 2377 return NULL_TREE;
2378
389dd41b 2379 rhs = fold_convert_loc (loc, TREE_TYPE (lhs), rhs);
8a58ed0a 2380 return build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, rhs);
2381
2382 case BUILT_IN_VA_END:
75a70cf9 2383 /* No effect, so the statement will be deleted. */
8a58ed0a 2384 return integer_zero_node;
2385
2386 default:
2387 gcc_unreachable ();
2388 }
2389}
75a70cf9 2390
f87df69a 2391/* Attemp to make the block of __builtin_unreachable I unreachable by changing
2392 the incoming jumps. Return true if at least one jump was changed. */
2393
2394static bool
2395optimize_unreachable (gimple_stmt_iterator i)
2396{
2397 basic_block bb = gsi_bb (i);
2398 gimple_stmt_iterator gsi;
2399 gimple stmt;
2400 edge_iterator ei;
2401 edge e;
2402 bool ret;
2403
2404 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2405 {
2406 stmt = gsi_stmt (gsi);
2407
2408 if (is_gimple_debug (stmt))
2409 continue;
2410
2411 if (gimple_code (stmt) == GIMPLE_LABEL)
2412 {
2413 /* Verify we do not need to preserve the label. */
2414 if (FORCED_LABEL (gimple_label_label (stmt)))
2415 return false;
2416
2417 continue;
2418 }
2419
2420 /* Only handle the case that __builtin_unreachable is the first statement
2421 in the block. We rely on DCE to remove stmts without side-effects
2422 before __builtin_unreachable. */
2423 if (gsi_stmt (gsi) != gsi_stmt (i))
2424 return false;
2425 }
2426
2427 ret = false;
2428 FOR_EACH_EDGE (e, ei, bb->preds)
2429 {
2430 gsi = gsi_last_bb (e->src);
522f73a1 2431 if (gsi_end_p (gsi))
2432 continue;
f87df69a 2433
522f73a1 2434 stmt = gsi_stmt (gsi);
2435 if (gimple_code (stmt) == GIMPLE_COND)
f87df69a 2436 {
2437 if (e->flags & EDGE_TRUE_VALUE)
2438 gimple_cond_make_false (stmt);
2439 else if (e->flags & EDGE_FALSE_VALUE)
2440 gimple_cond_make_true (stmt);
2441 else
2442 gcc_unreachable ();
a03a52b4 2443 update_stmt (stmt);
f87df69a 2444 }
2445 else
2446 {
2447 /* Todo: handle other cases, f.i. switch statement. */
2448 continue;
2449 }
2450
2451 ret = true;
2452 }
2453
2454 return ret;
2455}
2456
4ee9c684 2457/* A simple pass that attempts to fold all builtin functions. This pass
2458 is run after we've propagated as many constants as we can. */
2459
2a1990e9 2460static unsigned int
4ee9c684 2461execute_fold_all_builtins (void)
2462{
b36237eb 2463 bool cfg_changed = false;
4ee9c684 2464 basic_block bb;
b1b7c0c4 2465 unsigned int todoflags = 0;
48e1416a 2466
4ee9c684 2467 FOR_EACH_BB (bb)
2468 {
75a70cf9 2469 gimple_stmt_iterator i;
2470 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
4ee9c684 2471 {
75a70cf9 2472 gimple stmt, old_stmt;
4ee9c684 2473 tree callee, result;
0a39fd54 2474 enum built_in_function fcode;
4ee9c684 2475
75a70cf9 2476 stmt = gsi_stmt (i);
2477
2478 if (gimple_code (stmt) != GIMPLE_CALL)
0a39fd54 2479 {
896a0c42 2480 /* Remove all *ssaname_N ={v} {CLOBBER}; stmts,
2481 after the last GIMPLE DSE they aren't needed and might
2482 unnecessarily keep the SSA_NAMEs live. */
2483 if (gimple_clobber_p (stmt))
2484 {
2485 tree lhs = gimple_assign_lhs (stmt);
2486 if (TREE_CODE (lhs) == MEM_REF
2487 && TREE_CODE (TREE_OPERAND (lhs, 0)) == SSA_NAME)
2488 {
2489 unlink_stmt_vdef (stmt);
2490 gsi_remove (&i, true);
2491 release_defs (stmt);
2492 continue;
2493 }
2494 }
75a70cf9 2495 gsi_next (&i);
0a39fd54 2496 continue;
2497 }
75a70cf9 2498 callee = gimple_call_fndecl (stmt);
4ee9c684 2499 if (!callee || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL)
0a39fd54 2500 {
75a70cf9 2501 gsi_next (&i);
0a39fd54 2502 continue;
2503 }
2504 fcode = DECL_FUNCTION_CODE (callee);
4ee9c684 2505
2d18b16d 2506 result = gimple_fold_builtin (stmt);
5a4b7e1e 2507
2508 if (result)
75a70cf9 2509 gimple_remove_stmt_histograms (cfun, stmt);
5a4b7e1e 2510
4ee9c684 2511 if (!result)
2512 switch (DECL_FUNCTION_CODE (callee))
2513 {
2514 case BUILT_IN_CONSTANT_P:
2515 /* Resolve __builtin_constant_p. If it hasn't been
2516 folded to integer_one_node by now, it's fairly
2517 certain that the value simply isn't constant. */
75a70cf9 2518 result = integer_zero_node;
4ee9c684 2519 break;
2520
fca0886c 2521 case BUILT_IN_ASSUME_ALIGNED:
2522 /* Remove __builtin_assume_aligned. */
2523 result = gimple_call_arg (stmt, 0);
2524 break;
2525
bdd0e199 2526 case BUILT_IN_STACK_RESTORE:
75a70cf9 2527 result = optimize_stack_restore (i);
8a58ed0a 2528 if (result)
2529 break;
75a70cf9 2530 gsi_next (&i);
8a58ed0a 2531 continue;
2532
f87df69a 2533 case BUILT_IN_UNREACHABLE:
2534 if (optimize_unreachable (i))
2535 cfg_changed = true;
2536 break;
2537
8a58ed0a 2538 case BUILT_IN_VA_START:
2539 case BUILT_IN_VA_END:
2540 case BUILT_IN_VA_COPY:
2541 /* These shouldn't be folded before pass_stdarg. */
75a70cf9 2542 result = optimize_stdarg_builtin (stmt);
bdd0e199 2543 if (result)
2544 break;
2545 /* FALLTHRU */
2546
4ee9c684 2547 default:
75a70cf9 2548 gsi_next (&i);
4ee9c684 2549 continue;
2550 }
2551
f87df69a 2552 if (result == NULL_TREE)
2553 break;
2554
4ee9c684 2555 if (dump_file && (dump_flags & TDF_DETAILS))
2556 {
2557 fprintf (dump_file, "Simplified\n ");
75a70cf9 2558 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
4ee9c684 2559 }
2560
75a70cf9 2561 old_stmt = stmt;
75a70cf9 2562 if (!update_call_from_tree (&i, result))
0fefde02 2563 {
2564 gimplify_and_update_call_from_tree (&i, result);
2565 todoflags |= TODO_update_address_taken;
2566 }
de6ed584 2567
75a70cf9 2568 stmt = gsi_stmt (i);
4c5fd53c 2569 update_stmt (stmt);
de6ed584 2570
75a70cf9 2571 if (maybe_clean_or_replace_eh_stmt (old_stmt, stmt)
2572 && gimple_purge_dead_eh_edges (bb))
b36237eb 2573 cfg_changed = true;
4ee9c684 2574
2575 if (dump_file && (dump_flags & TDF_DETAILS))
2576 {
2577 fprintf (dump_file, "to\n ");
75a70cf9 2578 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
4ee9c684 2579 fprintf (dump_file, "\n");
2580 }
0a39fd54 2581
2582 /* Retry the same statement if it changed into another
2583 builtin, there might be new opportunities now. */
75a70cf9 2584 if (gimple_code (stmt) != GIMPLE_CALL)
0a39fd54 2585 {
75a70cf9 2586 gsi_next (&i);
0a39fd54 2587 continue;
2588 }
75a70cf9 2589 callee = gimple_call_fndecl (stmt);
0a39fd54 2590 if (!callee
75a70cf9 2591 || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL
0a39fd54 2592 || DECL_FUNCTION_CODE (callee) == fcode)
75a70cf9 2593 gsi_next (&i);
4ee9c684 2594 }
2595 }
48e1416a 2596
b36237eb 2597 /* Delete unreachable blocks. */
b1b7c0c4 2598 if (cfg_changed)
2599 todoflags |= TODO_cleanup_cfg;
48e1416a 2600
b1b7c0c4 2601 return todoflags;
4ee9c684 2602}
2603
41511585 2604
cbe8bda8 2605namespace {
2606
2607const pass_data pass_data_fold_builtins =
4ee9c684 2608{
cbe8bda8 2609 GIMPLE_PASS, /* type */
2610 "fab", /* name */
2611 OPTGROUP_NONE, /* optinfo_flags */
2612 false, /* has_gate */
2613 true, /* has_execute */
2614 TV_NONE, /* tv_id */
2615 ( PROP_cfg | PROP_ssa ), /* properties_required */
2616 0, /* properties_provided */
2617 0, /* properties_destroyed */
2618 0, /* todo_flags_start */
2619 ( TODO_verify_ssa | TODO_update_ssa ), /* todo_flags_finish */
4ee9c684 2620};
cbe8bda8 2621
2622class pass_fold_builtins : public gimple_opt_pass
2623{
2624public:
2625 pass_fold_builtins(gcc::context *ctxt)
2626 : gimple_opt_pass(pass_data_fold_builtins, ctxt)
2627 {}
2628
2629 /* opt_pass methods: */
2630 opt_pass * clone () { return new pass_fold_builtins (ctxt_); }
2631 unsigned int execute () { return execute_fold_all_builtins (); }
2632
2633}; // class pass_fold_builtins
2634
2635} // anon namespace
2636
2637gimple_opt_pass *
2638make_pass_fold_builtins (gcc::context *ctxt)
2639{
2640 return new pass_fold_builtins (ctxt);
2641}