]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/tree-ssa-ccp.c
* cselib.h (cselib_add_permanent_equiv): Declare.
[thirdparty/gcc.git] / gcc / tree-ssa-ccp.c
CommitLineData
4ee9c684 1/* Conditional constant propagation pass for the GNU compiler.
87c0a9fc 2 Copyright (C) 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009,
42b4f48b 3 2010, 2011 Free Software Foundation, Inc.
4ee9c684 4 Adapted from original RTL SSA-CCP by Daniel Berlin <dberlin@dberlin.org>
5 Adapted to GIMPLE trees by Diego Novillo <dnovillo@redhat.com>
6
7This file is part of GCC.
48e1416a 8
4ee9c684 9GCC is free software; you can redistribute it and/or modify it
10under the terms of the GNU General Public License as published by the
8c4c00c1 11Free Software Foundation; either version 3, or (at your option) any
4ee9c684 12later version.
48e1416a 13
4ee9c684 14GCC is distributed in the hope that it will be useful, but WITHOUT
15ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
16FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17for more details.
48e1416a 18
4ee9c684 19You should have received a copy of the GNU General Public License
8c4c00c1 20along with GCC; see the file COPYING3. If not see
21<http://www.gnu.org/licenses/>. */
4ee9c684 22
88dbf20f 23/* Conditional constant propagation (CCP) is based on the SSA
24 propagation engine (tree-ssa-propagate.c). Constant assignments of
25 the form VAR = CST are propagated from the assignments into uses of
26 VAR, which in turn may generate new constants. The simulation uses
27 a four level lattice to keep track of constant values associated
28 with SSA names. Given an SSA name V_i, it may take one of the
29 following values:
30
bfa30570 31 UNINITIALIZED -> the initial state of the value. This value
32 is replaced with a correct initial value
33 the first time the value is used, so the
34 rest of the pass does not need to care about
35 it. Using this value simplifies initialization
36 of the pass, and prevents us from needlessly
37 scanning statements that are never reached.
88dbf20f 38
39 UNDEFINED -> V_i is a local variable whose definition
40 has not been processed yet. Therefore we
41 don't yet know if its value is a constant
42 or not.
43
44 CONSTANT -> V_i has been found to hold a constant
45 value C.
46
47 VARYING -> V_i cannot take a constant value, or if it
48 does, it is not possible to determine it
49 at compile time.
50
51 The core of SSA-CCP is in ccp_visit_stmt and ccp_visit_phi_node:
52
53 1- In ccp_visit_stmt, we are interested in assignments whose RHS
54 evaluates into a constant and conditional jumps whose predicate
55 evaluates into a boolean true or false. When an assignment of
56 the form V_i = CONST is found, V_i's lattice value is set to
57 CONSTANT and CONST is associated with it. This causes the
58 propagation engine to add all the SSA edges coming out the
59 assignment into the worklists, so that statements that use V_i
60 can be visited.
61
62 If the statement is a conditional with a constant predicate, we
63 mark the outgoing edges as executable or not executable
64 depending on the predicate's value. This is then used when
65 visiting PHI nodes to know when a PHI argument can be ignored.
48e1416a 66
88dbf20f 67
68 2- In ccp_visit_phi_node, if all the PHI arguments evaluate to the
69 same constant C, then the LHS of the PHI is set to C. This
70 evaluation is known as the "meet operation". Since one of the
71 goals of this evaluation is to optimistically return constant
72 values as often as possible, it uses two main short cuts:
73
74 - If an argument is flowing in through a non-executable edge, it
75 is ignored. This is useful in cases like this:
76
77 if (PRED)
78 a_9 = 3;
79 else
80 a_10 = 100;
81 a_11 = PHI (a_9, a_10)
82
83 If PRED is known to always evaluate to false, then we can
84 assume that a_11 will always take its value from a_10, meaning
85 that instead of consider it VARYING (a_9 and a_10 have
86 different values), we can consider it CONSTANT 100.
87
88 - If an argument has an UNDEFINED value, then it does not affect
89 the outcome of the meet operation. If a variable V_i has an
90 UNDEFINED value, it means that either its defining statement
91 hasn't been visited yet or V_i has no defining statement, in
92 which case the original symbol 'V' is being used
93 uninitialized. Since 'V' is a local variable, the compiler
94 may assume any initial value for it.
95
96
97 After propagation, every variable V_i that ends up with a lattice
98 value of CONSTANT will have the associated constant value in the
99 array CONST_VAL[i].VALUE. That is fed into substitute_and_fold for
100 final substitution and folding.
101
4ee9c684 102 References:
103
104 Constant propagation with conditional branches,
105 Wegman and Zadeck, ACM TOPLAS 13(2):181-210.
106
107 Building an Optimizing Compiler,
108 Robert Morgan, Butterworth-Heinemann, 1998, Section 8.9.
109
110 Advanced Compiler Design and Implementation,
111 Steven Muchnick, Morgan Kaufmann, 1997, Section 12.6 */
112
113#include "config.h"
114#include "system.h"
115#include "coretypes.h"
116#include "tm.h"
4ee9c684 117#include "tree.h"
41511585 118#include "flags.h"
4ee9c684 119#include "tm_p.h"
4ee9c684 120#include "basic-block.h"
41511585 121#include "output.h"
41511585 122#include "function.h"
ce084dfc 123#include "tree-pretty-print.h"
124#include "gimple-pretty-print.h"
41511585 125#include "timevar.h"
4ee9c684 126#include "tree-dump.h"
41511585 127#include "tree-flow.h"
4ee9c684 128#include "tree-pass.h"
41511585 129#include "tree-ssa-propagate.h"
5a4b7e1e 130#include "value-prof.h"
41511585 131#include "langhooks.h"
8782adcf 132#include "target.h"
0b205f4c 133#include "diagnostic-core.h"
43fb76c1 134#include "dbgcnt.h"
1d0b727d 135#include "gimple-fold.h"
9a65cc0a 136#include "params.h"
4ee9c684 137
138
139/* Possible lattice values. */
140typedef enum
141{
bfa30570 142 UNINITIALIZED,
4ee9c684 143 UNDEFINED,
144 CONSTANT,
145 VARYING
88dbf20f 146} ccp_lattice_t;
4ee9c684 147
14f101cf 148struct prop_value_d {
149 /* Lattice value. */
150 ccp_lattice_t lattice_val;
151
152 /* Propagated value. */
153 tree value;
b7e55469 154
155 /* Mask that applies to the propagated value during CCP. For
156 X with a CONSTANT lattice value X & ~mask == value & ~mask. */
157 double_int mask;
14f101cf 158};
159
160typedef struct prop_value_d prop_value_t;
161
88dbf20f 162/* Array of propagated constant values. After propagation,
163 CONST_VAL[I].VALUE holds the constant value for SSA_NAME(I). If
164 the constant is held in an SSA name representing a memory store
4fb5e5ca 165 (i.e., a VDEF), CONST_VAL[I].MEM_REF will contain the actual
166 memory reference used to store (i.e., the LHS of the assignment
167 doing the store). */
20140406 168static prop_value_t *const_val;
4ee9c684 169
4af351a8 170static void canonicalize_float_value (prop_value_t *);
6688f8ec 171static bool ccp_fold_stmt (gimple_stmt_iterator *);
4af351a8 172
88dbf20f 173/* Dump constant propagation value VAL to file OUTF prefixed by PREFIX. */
01406fc0 174
175static void
88dbf20f 176dump_lattice_value (FILE *outf, const char *prefix, prop_value_t val)
01406fc0 177{
41511585 178 switch (val.lattice_val)
01406fc0 179 {
88dbf20f 180 case UNINITIALIZED:
181 fprintf (outf, "%sUNINITIALIZED", prefix);
182 break;
41511585 183 case UNDEFINED:
184 fprintf (outf, "%sUNDEFINED", prefix);
185 break;
186 case VARYING:
187 fprintf (outf, "%sVARYING", prefix);
188 break;
41511585 189 case CONSTANT:
190 fprintf (outf, "%sCONSTANT ", prefix);
b7e55469 191 if (TREE_CODE (val.value) != INTEGER_CST
192 || double_int_zero_p (val.mask))
193 print_generic_expr (outf, val.value, dump_flags);
194 else
195 {
196 double_int cval = double_int_and_not (tree_to_double_int (val.value),
197 val.mask);
198 fprintf (outf, "%sCONSTANT " HOST_WIDE_INT_PRINT_DOUBLE_HEX,
199 prefix, cval.high, cval.low);
200 fprintf (outf, " (" HOST_WIDE_INT_PRINT_DOUBLE_HEX ")",
201 val.mask.high, val.mask.low);
202 }
41511585 203 break;
204 default:
8c0963c4 205 gcc_unreachable ();
41511585 206 }
01406fc0 207}
4ee9c684 208
4ee9c684 209
88dbf20f 210/* Print lattice value VAL to stderr. */
211
212void debug_lattice_value (prop_value_t val);
213
4b987fac 214DEBUG_FUNCTION void
88dbf20f 215debug_lattice_value (prop_value_t val)
216{
217 dump_lattice_value (stderr, "", val);
218 fprintf (stderr, "\n");
219}
4ee9c684 220
4ee9c684 221
88dbf20f 222/* Compute a default value for variable VAR and store it in the
223 CONST_VAL array. The following rules are used to get default
224 values:
01406fc0 225
88dbf20f 226 1- Global and static variables that are declared constant are
227 considered CONSTANT.
228
229 2- Any other value is considered UNDEFINED. This is useful when
41511585 230 considering PHI nodes. PHI arguments that are undefined do not
231 change the constant value of the PHI node, which allows for more
88dbf20f 232 constants to be propagated.
4ee9c684 233
8883e700 234 3- Variables defined by statements other than assignments and PHI
88dbf20f 235 nodes are considered VARYING.
4ee9c684 236
8883e700 237 4- Initial values of variables that are not GIMPLE registers are
bfa30570 238 considered VARYING. */
4ee9c684 239
88dbf20f 240static prop_value_t
241get_default_value (tree var)
242{
243 tree sym = SSA_NAME_VAR (var);
b7e55469 244 prop_value_t val = { UNINITIALIZED, NULL_TREE, { 0, 0 } };
8edeb88b 245 gimple stmt;
246
247 stmt = SSA_NAME_DEF_STMT (var);
248
249 if (gimple_nop_p (stmt))
4ee9c684 250 {
8edeb88b 251 /* Variables defined by an empty statement are those used
252 before being initialized. If VAR is a local variable, we
253 can assume initially that it is UNDEFINED, otherwise we must
254 consider it VARYING. */
524a0531 255 if (is_gimple_reg (sym)
256 && TREE_CODE (sym) == VAR_DECL)
8edeb88b 257 val.lattice_val = UNDEFINED;
258 else
b7e55469 259 {
260 val.lattice_val = VARYING;
261 val.mask = double_int_minus_one;
262 }
4ee9c684 263 }
8edeb88b 264 else if (is_gimple_assign (stmt)
265 /* Value-returning GIMPLE_CALL statements assign to
266 a variable, and are treated similarly to GIMPLE_ASSIGN. */
267 || (is_gimple_call (stmt)
268 && gimple_call_lhs (stmt) != NULL_TREE)
269 || gimple_code (stmt) == GIMPLE_PHI)
41511585 270 {
8edeb88b 271 tree cst;
272 if (gimple_assign_single_p (stmt)
273 && DECL_P (gimple_assign_rhs1 (stmt))
274 && (cst = get_symbol_constant_value (gimple_assign_rhs1 (stmt))))
88dbf20f 275 {
8edeb88b 276 val.lattice_val = CONSTANT;
277 val.value = cst;
88dbf20f 278 }
279 else
8edeb88b 280 /* Any other variable defined by an assignment or a PHI node
281 is considered UNDEFINED. */
282 val.lattice_val = UNDEFINED;
283 }
284 else
285 {
286 /* Otherwise, VAR will never take on a constant value. */
287 val.lattice_val = VARYING;
b7e55469 288 val.mask = double_int_minus_one;
41511585 289 }
4ee9c684 290
41511585 291 return val;
292}
4ee9c684 293
4ee9c684 294
bfa30570 295/* Get the constant value associated with variable VAR. */
4ee9c684 296
bfa30570 297static inline prop_value_t *
298get_value (tree var)
88dbf20f 299{
e004838d 300 prop_value_t *val;
bfa30570 301
e004838d 302 if (const_val == NULL)
303 return NULL;
304
305 val = &const_val[SSA_NAME_VERSION (var)];
bfa30570 306 if (val->lattice_val == UNINITIALIZED)
4ee9c684 307 *val = get_default_value (var);
308
4af351a8 309 canonicalize_float_value (val);
310
4ee9c684 311 return val;
312}
313
15d138c9 314/* Return the constant tree value associated with VAR. */
315
316static inline tree
317get_constant_value (tree var)
318{
98d92e3c 319 prop_value_t *val;
320 if (TREE_CODE (var) != SSA_NAME)
321 {
322 if (is_gimple_min_invariant (var))
323 return var;
324 return NULL_TREE;
325 }
326 val = get_value (var);
b7e55469 327 if (val
328 && val->lattice_val == CONSTANT
329 && (TREE_CODE (val->value) != INTEGER_CST
330 || double_int_zero_p (val->mask)))
15d138c9 331 return val->value;
332 return NULL_TREE;
333}
334
bfa30570 335/* Sets the value associated with VAR to VARYING. */
336
337static inline void
338set_value_varying (tree var)
339{
340 prop_value_t *val = &const_val[SSA_NAME_VERSION (var)];
341
342 val->lattice_val = VARYING;
343 val->value = NULL_TREE;
b7e55469 344 val->mask = double_int_minus_one;
bfa30570 345}
4ee9c684 346
b31eb493 347/* For float types, modify the value of VAL to make ccp work correctly
348 for non-standard values (-0, NaN):
349
350 If HONOR_SIGNED_ZEROS is false, and VAL = -0, we canonicalize it to 0.
351 If HONOR_NANS is false, and VAL is NaN, we canonicalize it to UNDEFINED.
352 This is to fix the following problem (see PR 29921): Suppose we have
353
354 x = 0.0 * y
355
356 and we set value of y to NaN. This causes value of x to be set to NaN.
357 When we later determine that y is in fact VARYING, fold uses the fact
358 that HONOR_NANS is false, and we try to change the value of x to 0,
359 causing an ICE. With HONOR_NANS being false, the real appearance of
360 NaN would cause undefined behavior, though, so claiming that y (and x)
361 are UNDEFINED initially is correct. */
362
363static void
364canonicalize_float_value (prop_value_t *val)
365{
366 enum machine_mode mode;
367 tree type;
368 REAL_VALUE_TYPE d;
369
370 if (val->lattice_val != CONSTANT
371 || TREE_CODE (val->value) != REAL_CST)
372 return;
373
374 d = TREE_REAL_CST (val->value);
375 type = TREE_TYPE (val->value);
376 mode = TYPE_MODE (type);
377
378 if (!HONOR_SIGNED_ZEROS (mode)
379 && REAL_VALUE_MINUS_ZERO (d))
380 {
381 val->value = build_real (type, dconst0);
382 return;
383 }
384
385 if (!HONOR_NANS (mode)
386 && REAL_VALUE_ISNAN (d))
387 {
388 val->lattice_val = UNDEFINED;
389 val->value = NULL;
b31eb493 390 return;
391 }
392}
393
b7e55469 394/* Return whether the lattice transition is valid. */
395
396static bool
397valid_lattice_transition (prop_value_t old_val, prop_value_t new_val)
398{
399 /* Lattice transitions must always be monotonically increasing in
400 value. */
401 if (old_val.lattice_val < new_val.lattice_val)
402 return true;
403
404 if (old_val.lattice_val != new_val.lattice_val)
405 return false;
406
407 if (!old_val.value && !new_val.value)
408 return true;
409
410 /* Now both lattice values are CONSTANT. */
411
412 /* Allow transitioning from &x to &x & ~3. */
413 if (TREE_CODE (old_val.value) != INTEGER_CST
414 && TREE_CODE (new_val.value) == INTEGER_CST)
415 return true;
416
417 /* Bit-lattices have to agree in the still valid bits. */
418 if (TREE_CODE (old_val.value) == INTEGER_CST
419 && TREE_CODE (new_val.value) == INTEGER_CST)
420 return double_int_equal_p
421 (double_int_and_not (tree_to_double_int (old_val.value),
422 new_val.mask),
423 double_int_and_not (tree_to_double_int (new_val.value),
424 new_val.mask));
425
426 /* Otherwise constant values have to agree. */
427 return operand_equal_p (old_val.value, new_val.value, 0);
428}
429
88dbf20f 430/* Set the value for variable VAR to NEW_VAL. Return true if the new
431 value is different from VAR's previous value. */
4ee9c684 432
41511585 433static bool
88dbf20f 434set_lattice_value (tree var, prop_value_t new_val)
4ee9c684 435{
6d0bf6d6 436 /* We can deal with old UNINITIALIZED values just fine here. */
437 prop_value_t *old_val = &const_val[SSA_NAME_VERSION (var)];
88dbf20f 438
b31eb493 439 canonicalize_float_value (&new_val);
440
b7e55469 441 /* We have to be careful to not go up the bitwise lattice
442 represented by the mask.
443 ??? This doesn't seem to be the best place to enforce this. */
444 if (new_val.lattice_val == CONSTANT
445 && old_val->lattice_val == CONSTANT
446 && TREE_CODE (new_val.value) == INTEGER_CST
447 && TREE_CODE (old_val->value) == INTEGER_CST)
448 {
449 double_int diff;
450 diff = double_int_xor (tree_to_double_int (new_val.value),
451 tree_to_double_int (old_val->value));
452 new_val.mask = double_int_ior (new_val.mask,
453 double_int_ior (old_val->mask, diff));
454 }
bfa30570 455
b7e55469 456 gcc_assert (valid_lattice_transition (*old_val, new_val));
88dbf20f 457
b7e55469 458 /* If *OLD_VAL and NEW_VAL are the same, return false to inform the
459 caller that this was a non-transition. */
460 if (old_val->lattice_val != new_val.lattice_val
461 || (new_val.lattice_val == CONSTANT
462 && TREE_CODE (new_val.value) == INTEGER_CST
463 && (TREE_CODE (old_val->value) != INTEGER_CST
464 || !double_int_equal_p (new_val.mask, old_val->mask))))
4ee9c684 465 {
b7e55469 466 /* ??? We would like to delay creation of INTEGER_CSTs from
467 partially constants here. */
468
41511585 469 if (dump_file && (dump_flags & TDF_DETAILS))
470 {
88dbf20f 471 dump_lattice_value (dump_file, "Lattice value changed to ", new_val);
bfa30570 472 fprintf (dump_file, ". Adding SSA edges to worklist.\n");
41511585 473 }
474
88dbf20f 475 *old_val = new_val;
476
6d0bf6d6 477 gcc_assert (new_val.lattice_val != UNINITIALIZED);
bfa30570 478 return true;
4ee9c684 479 }
41511585 480
481 return false;
4ee9c684 482}
483
b7e55469 484static prop_value_t get_value_for_expr (tree, bool);
485static prop_value_t bit_value_binop (enum tree_code, tree, tree, tree);
486static void bit_value_binop_1 (enum tree_code, tree, double_int *, double_int *,
487 tree, double_int, double_int,
488 tree, double_int, double_int);
489
490/* Return a double_int that can be used for bitwise simplifications
491 from VAL. */
492
493static double_int
494value_to_double_int (prop_value_t val)
495{
496 if (val.value
497 && TREE_CODE (val.value) == INTEGER_CST)
498 return tree_to_double_int (val.value);
499 else
500 return double_int_zero;
501}
502
503/* Return the value for the address expression EXPR based on alignment
504 information. */
6d0bf6d6 505
506static prop_value_t
b7e55469 507get_value_from_alignment (tree expr)
508{
f8abb542 509 tree type = TREE_TYPE (expr);
b7e55469 510 prop_value_t val;
f8abb542 511 unsigned HOST_WIDE_INT bitpos;
512 unsigned int align;
b7e55469 513
514 gcc_assert (TREE_CODE (expr) == ADDR_EXPR);
515
f8abb542 516 align = get_object_alignment_1 (TREE_OPERAND (expr, 0), &bitpos);
517 val.mask
518 = double_int_and_not (POINTER_TYPE_P (type) || TYPE_UNSIGNED (type)
519 ? double_int_mask (TYPE_PRECISION (type))
520 : double_int_minus_one,
521 uhwi_to_double_int (align / BITS_PER_UNIT - 1));
522 val.lattice_val = double_int_minus_one_p (val.mask) ? VARYING : CONSTANT;
523 if (val.lattice_val == CONSTANT)
524 val.value
525 = double_int_to_tree (type, uhwi_to_double_int (bitpos / BITS_PER_UNIT));
b7e55469 526 else
f8abb542 527 val.value = NULL_TREE;
b7e55469 528
529 return val;
530}
531
532/* Return the value for the tree operand EXPR. If FOR_BITS_P is true
533 return constant bits extracted from alignment information for
534 invariant addresses. */
535
536static prop_value_t
537get_value_for_expr (tree expr, bool for_bits_p)
6d0bf6d6 538{
539 prop_value_t val;
540
541 if (TREE_CODE (expr) == SSA_NAME)
b7e55469 542 {
543 val = *get_value (expr);
544 if (for_bits_p
545 && val.lattice_val == CONSTANT
546 && TREE_CODE (val.value) == ADDR_EXPR)
547 val = get_value_from_alignment (val.value);
548 }
549 else if (is_gimple_min_invariant (expr)
550 && (!for_bits_p || TREE_CODE (expr) != ADDR_EXPR))
6d0bf6d6 551 {
552 val.lattice_val = CONSTANT;
553 val.value = expr;
b7e55469 554 val.mask = double_int_zero;
6d0bf6d6 555 canonicalize_float_value (&val);
556 }
b7e55469 557 else if (TREE_CODE (expr) == ADDR_EXPR)
558 val = get_value_from_alignment (expr);
6d0bf6d6 559 else
560 {
561 val.lattice_val = VARYING;
b7e55469 562 val.mask = double_int_minus_one;
6d0bf6d6 563 val.value = NULL_TREE;
564 }
6d0bf6d6 565 return val;
566}
567
88dbf20f 568/* Return the likely CCP lattice value for STMT.
4ee9c684 569
41511585 570 If STMT has no operands, then return CONSTANT.
4ee9c684 571
d61b9af3 572 Else if undefinedness of operands of STMT cause its value to be
573 undefined, then return UNDEFINED.
4ee9c684 574
41511585 575 Else if any operands of STMT are constants, then return CONSTANT.
4ee9c684 576
41511585 577 Else return VARYING. */
4ee9c684 578
88dbf20f 579static ccp_lattice_t
75a70cf9 580likely_value (gimple stmt)
41511585 581{
d61b9af3 582 bool has_constant_operand, has_undefined_operand, all_undefined_operands;
41511585 583 tree use;
584 ssa_op_iter iter;
8edeb88b 585 unsigned i;
4ee9c684 586
590c3166 587 enum gimple_code code = gimple_code (stmt);
75a70cf9 588
589 /* This function appears to be called only for assignments, calls,
590 conditionals, and switches, due to the logic in visit_stmt. */
591 gcc_assert (code == GIMPLE_ASSIGN
592 || code == GIMPLE_CALL
593 || code == GIMPLE_COND
594 || code == GIMPLE_SWITCH);
88dbf20f 595
596 /* If the statement has volatile operands, it won't fold to a
597 constant value. */
75a70cf9 598 if (gimple_has_volatile_ops (stmt))
88dbf20f 599 return VARYING;
600
75a70cf9 601 /* Arrive here for more complex cases. */
bfa30570 602 has_constant_operand = false;
d61b9af3 603 has_undefined_operand = false;
604 all_undefined_operands = true;
8edeb88b 605 FOR_EACH_SSA_TREE_OPERAND (use, stmt, iter, SSA_OP_USE)
41511585 606 {
bfa30570 607 prop_value_t *val = get_value (use);
41511585 608
bfa30570 609 if (val->lattice_val == UNDEFINED)
d61b9af3 610 has_undefined_operand = true;
611 else
612 all_undefined_operands = false;
88dbf20f 613
41511585 614 if (val->lattice_val == CONSTANT)
bfa30570 615 has_constant_operand = true;
4ee9c684 616 }
41511585 617
dd277d48 618 /* There may be constants in regular rhs operands. For calls we
619 have to ignore lhs, fndecl and static chain, otherwise only
620 the lhs. */
621 for (i = (is_gimple_call (stmt) ? 2 : 0) + gimple_has_lhs (stmt);
8edeb88b 622 i < gimple_num_ops (stmt); ++i)
623 {
624 tree op = gimple_op (stmt, i);
625 if (!op || TREE_CODE (op) == SSA_NAME)
626 continue;
627 if (is_gimple_min_invariant (op))
628 has_constant_operand = true;
629 }
630
87c0a9fc 631 if (has_constant_operand)
632 all_undefined_operands = false;
633
d61b9af3 634 /* If the operation combines operands like COMPLEX_EXPR make sure to
635 not mark the result UNDEFINED if only one part of the result is
636 undefined. */
75a70cf9 637 if (has_undefined_operand && all_undefined_operands)
d61b9af3 638 return UNDEFINED;
75a70cf9 639 else if (code == GIMPLE_ASSIGN && has_undefined_operand)
d61b9af3 640 {
75a70cf9 641 switch (gimple_assign_rhs_code (stmt))
d61b9af3 642 {
643 /* Unary operators are handled with all_undefined_operands. */
644 case PLUS_EXPR:
645 case MINUS_EXPR:
d61b9af3 646 case POINTER_PLUS_EXPR:
d61b9af3 647 /* Not MIN_EXPR, MAX_EXPR. One VARYING operand may be selected.
648 Not bitwise operators, one VARYING operand may specify the
649 result completely. Not logical operators for the same reason.
05a936a0 650 Not COMPLEX_EXPR as one VARYING operand makes the result partly
651 not UNDEFINED. Not *DIV_EXPR, comparisons and shifts because
652 the undefined operand may be promoted. */
d61b9af3 653 return UNDEFINED;
654
655 default:
656 ;
657 }
658 }
659 /* If there was an UNDEFINED operand but the result may be not UNDEFINED
660 fall back to VARYING even if there were CONSTANT operands. */
661 if (has_undefined_operand)
662 return VARYING;
663
8edeb88b 664 /* We do not consider virtual operands here -- load from read-only
665 memory may have only VARYING virtual operands, but still be
666 constant. */
bfa30570 667 if (has_constant_operand
8edeb88b 668 || gimple_references_memory_p (stmt))
88dbf20f 669 return CONSTANT;
670
bfa30570 671 return VARYING;
4ee9c684 672}
673
bfa30570 674/* Returns true if STMT cannot be constant. */
675
676static bool
75a70cf9 677surely_varying_stmt_p (gimple stmt)
bfa30570 678{
679 /* If the statement has operands that we cannot handle, it cannot be
680 constant. */
75a70cf9 681 if (gimple_has_volatile_ops (stmt))
bfa30570 682 return true;
683
f257af64 684 /* If it is a call and does not return a value or is not a
685 builtin and not an indirect call, it is varying. */
75a70cf9 686 if (is_gimple_call (stmt))
f257af64 687 {
688 tree fndecl;
689 if (!gimple_call_lhs (stmt)
690 || ((fndecl = gimple_call_fndecl (stmt)) != NULL_TREE
5768aeb3 691 && !DECL_BUILT_IN (fndecl)))
f257af64 692 return true;
693 }
bfa30570 694
8edeb88b 695 /* Any other store operation is not interesting. */
dd277d48 696 else if (gimple_vdef (stmt))
8edeb88b 697 return true;
698
bfa30570 699 /* Anything other than assignments and conditional jumps are not
700 interesting for CCP. */
75a70cf9 701 if (gimple_code (stmt) != GIMPLE_ASSIGN
f257af64 702 && gimple_code (stmt) != GIMPLE_COND
703 && gimple_code (stmt) != GIMPLE_SWITCH
704 && gimple_code (stmt) != GIMPLE_CALL)
bfa30570 705 return true;
706
707 return false;
708}
4ee9c684 709
41511585 710/* Initialize local data structures for CCP. */
4ee9c684 711
712static void
41511585 713ccp_initialize (void)
4ee9c684 714{
41511585 715 basic_block bb;
4ee9c684 716
43959b95 717 const_val = XCNEWVEC (prop_value_t, num_ssa_names);
4ee9c684 718
41511585 719 /* Initialize simulation flags for PHI nodes and statements. */
720 FOR_EACH_BB (bb)
4ee9c684 721 {
75a70cf9 722 gimple_stmt_iterator i;
4ee9c684 723
75a70cf9 724 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
41511585 725 {
75a70cf9 726 gimple stmt = gsi_stmt (i);
2193544e 727 bool is_varying;
728
729 /* If the statement is a control insn, then we do not
730 want to avoid simulating the statement once. Failure
731 to do so means that those edges will never get added. */
732 if (stmt_ends_bb_p (stmt))
733 is_varying = false;
734 else
735 is_varying = surely_varying_stmt_p (stmt);
4ee9c684 736
bfa30570 737 if (is_varying)
41511585 738 {
88dbf20f 739 tree def;
740 ssa_op_iter iter;
741
742 /* If the statement will not produce a constant, mark
743 all its outputs VARYING. */
744 FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_ALL_DEFS)
8edeb88b 745 set_value_varying (def);
41511585 746 }
75a70cf9 747 prop_set_simulate_again (stmt, !is_varying);
41511585 748 }
4ee9c684 749 }
750
75a70cf9 751 /* Now process PHI nodes. We never clear the simulate_again flag on
752 phi nodes, since we do not know which edges are executable yet,
753 except for phi nodes for virtual operands when we do not do store ccp. */
41511585 754 FOR_EACH_BB (bb)
4ee9c684 755 {
75a70cf9 756 gimple_stmt_iterator i;
41511585 757
75a70cf9 758 for (i = gsi_start_phis (bb); !gsi_end_p (i); gsi_next (&i))
759 {
760 gimple phi = gsi_stmt (i);
761
61207d43 762 if (!is_gimple_reg (gimple_phi_result (phi)))
75a70cf9 763 prop_set_simulate_again (phi, false);
bfa30570 764 else
75a70cf9 765 prop_set_simulate_again (phi, true);
41511585 766 }
4ee9c684 767 }
41511585 768}
4ee9c684 769
43fb76c1 770/* Debug count support. Reset the values of ssa names
771 VARYING when the total number ssa names analyzed is
772 beyond the debug count specified. */
773
774static void
775do_dbg_cnt (void)
776{
777 unsigned i;
778 for (i = 0; i < num_ssa_names; i++)
779 {
780 if (!dbg_cnt (ccp))
781 {
782 const_val[i].lattice_val = VARYING;
b7e55469 783 const_val[i].mask = double_int_minus_one;
43fb76c1 784 const_val[i].value = NULL_TREE;
785 }
786 }
787}
788
4ee9c684 789
88dbf20f 790/* Do final substitution of propagated values, cleanup the flowgraph and
48e1416a 791 free allocated storage.
4ee9c684 792
33a34f1e 793 Return TRUE when something was optimized. */
794
795static bool
88dbf20f 796ccp_finalize (void)
4ee9c684 797{
43fb76c1 798 bool something_changed;
153c3b50 799 unsigned i;
43fb76c1 800
801 do_dbg_cnt ();
153c3b50 802
803 /* Derive alignment and misalignment information from partially
804 constant pointers in the lattice. */
805 for (i = 1; i < num_ssa_names; ++i)
806 {
807 tree name = ssa_name (i);
808 prop_value_t *val;
809 struct ptr_info_def *pi;
810 unsigned int tem, align;
811
812 if (!name
813 || !POINTER_TYPE_P (TREE_TYPE (name)))
814 continue;
815
816 val = get_value (name);
817 if (val->lattice_val != CONSTANT
818 || TREE_CODE (val->value) != INTEGER_CST)
819 continue;
820
821 /* Trailing constant bits specify the alignment, trailing value
822 bits the misalignment. */
823 tem = val->mask.low;
824 align = (tem & -tem);
825 if (align == 1)
826 continue;
827
828 pi = get_ptr_info (name);
829 pi->align = align;
830 pi->misalign = TREE_INT_CST_LOW (val->value) & (align - 1);
831 }
832
88dbf20f 833 /* Perform substitutions based on the known constant values. */
14f101cf 834 something_changed = substitute_and_fold (get_constant_value,
835 ccp_fold_stmt, true);
4ee9c684 836
88dbf20f 837 free (const_val);
e004838d 838 const_val = NULL;
33a34f1e 839 return something_changed;;
4ee9c684 840}
841
842
88dbf20f 843/* Compute the meet operator between *VAL1 and *VAL2. Store the result
844 in VAL1.
845
846 any M UNDEFINED = any
88dbf20f 847 any M VARYING = VARYING
848 Ci M Cj = Ci if (i == j)
849 Ci M Cj = VARYING if (i != j)
bfa30570 850 */
4ee9c684 851
852static void
88dbf20f 853ccp_lattice_meet (prop_value_t *val1, prop_value_t *val2)
4ee9c684 854{
88dbf20f 855 if (val1->lattice_val == UNDEFINED)
4ee9c684 856 {
88dbf20f 857 /* UNDEFINED M any = any */
858 *val1 = *val2;
41511585 859 }
88dbf20f 860 else if (val2->lattice_val == UNDEFINED)
92481a4d 861 {
88dbf20f 862 /* any M UNDEFINED = any
863 Nothing to do. VAL1 already contains the value we want. */
864 ;
92481a4d 865 }
88dbf20f 866 else if (val1->lattice_val == VARYING
867 || val2->lattice_val == VARYING)
41511585 868 {
88dbf20f 869 /* any M VARYING = VARYING. */
870 val1->lattice_val = VARYING;
b7e55469 871 val1->mask = double_int_minus_one;
88dbf20f 872 val1->value = NULL_TREE;
41511585 873 }
b7e55469 874 else if (val1->lattice_val == CONSTANT
875 && val2->lattice_val == CONSTANT
876 && TREE_CODE (val1->value) == INTEGER_CST
877 && TREE_CODE (val2->value) == INTEGER_CST)
878 {
879 /* Ci M Cj = Ci if (i == j)
880 Ci M Cj = VARYING if (i != j)
881
882 For INTEGER_CSTs mask unequal bits. If no equal bits remain,
883 drop to varying. */
884 val1->mask
885 = double_int_ior (double_int_ior (val1->mask,
886 val2->mask),
887 double_int_xor (tree_to_double_int (val1->value),
888 tree_to_double_int (val2->value)));
889 if (double_int_minus_one_p (val1->mask))
890 {
891 val1->lattice_val = VARYING;
892 val1->value = NULL_TREE;
893 }
894 }
88dbf20f 895 else if (val1->lattice_val == CONSTANT
896 && val2->lattice_val == CONSTANT
61207d43 897 && simple_cst_equal (val1->value, val2->value) == 1)
41511585 898 {
88dbf20f 899 /* Ci M Cj = Ci if (i == j)
900 Ci M Cj = VARYING if (i != j)
901
b7e55469 902 VAL1 already contains the value we want for equivalent values. */
903 }
904 else if (val1->lattice_val == CONSTANT
905 && val2->lattice_val == CONSTANT
906 && (TREE_CODE (val1->value) == ADDR_EXPR
907 || TREE_CODE (val2->value) == ADDR_EXPR))
908 {
909 /* When not equal addresses are involved try meeting for
910 alignment. */
911 prop_value_t tem = *val2;
912 if (TREE_CODE (val1->value) == ADDR_EXPR)
913 *val1 = get_value_for_expr (val1->value, true);
914 if (TREE_CODE (val2->value) == ADDR_EXPR)
915 tem = get_value_for_expr (val2->value, true);
916 ccp_lattice_meet (val1, &tem);
41511585 917 }
918 else
919 {
88dbf20f 920 /* Any other combination is VARYING. */
921 val1->lattice_val = VARYING;
b7e55469 922 val1->mask = double_int_minus_one;
88dbf20f 923 val1->value = NULL_TREE;
41511585 924 }
4ee9c684 925}
926
927
41511585 928/* Loop through the PHI_NODE's parameters for BLOCK and compare their
929 lattice values to determine PHI_NODE's lattice value. The value of a
88dbf20f 930 PHI node is determined calling ccp_lattice_meet with all the arguments
41511585 931 of the PHI node that are incoming via executable edges. */
4ee9c684 932
41511585 933static enum ssa_prop_result
75a70cf9 934ccp_visit_phi_node (gimple phi)
4ee9c684 935{
75a70cf9 936 unsigned i;
88dbf20f 937 prop_value_t *old_val, new_val;
4ee9c684 938
41511585 939 if (dump_file && (dump_flags & TDF_DETAILS))
4ee9c684 940 {
41511585 941 fprintf (dump_file, "\nVisiting PHI node: ");
75a70cf9 942 print_gimple_stmt (dump_file, phi, 0, dump_flags);
4ee9c684 943 }
4ee9c684 944
75a70cf9 945 old_val = get_value (gimple_phi_result (phi));
41511585 946 switch (old_val->lattice_val)
947 {
948 case VARYING:
88dbf20f 949 return SSA_PROP_VARYING;
4ee9c684 950
41511585 951 case CONSTANT:
952 new_val = *old_val;
953 break;
4ee9c684 954
41511585 955 case UNDEFINED:
41511585 956 new_val.lattice_val = UNDEFINED;
88dbf20f 957 new_val.value = NULL_TREE;
41511585 958 break;
4ee9c684 959
41511585 960 default:
8c0963c4 961 gcc_unreachable ();
41511585 962 }
4ee9c684 963
75a70cf9 964 for (i = 0; i < gimple_phi_num_args (phi); i++)
41511585 965 {
88dbf20f 966 /* Compute the meet operator over all the PHI arguments flowing
967 through executable edges. */
75a70cf9 968 edge e = gimple_phi_arg_edge (phi, i);
4ee9c684 969
41511585 970 if (dump_file && (dump_flags & TDF_DETAILS))
971 {
972 fprintf (dump_file,
973 "\n Argument #%d (%d -> %d %sexecutable)\n",
974 i, e->src->index, e->dest->index,
975 (e->flags & EDGE_EXECUTABLE) ? "" : "not ");
976 }
977
978 /* If the incoming edge is executable, Compute the meet operator for
979 the existing value of the PHI node and the current PHI argument. */
980 if (e->flags & EDGE_EXECUTABLE)
981 {
75a70cf9 982 tree arg = gimple_phi_arg (phi, i)->def;
b7e55469 983 prop_value_t arg_val = get_value_for_expr (arg, false);
4ee9c684 984
88dbf20f 985 ccp_lattice_meet (&new_val, &arg_val);
4ee9c684 986
41511585 987 if (dump_file && (dump_flags & TDF_DETAILS))
988 {
989 fprintf (dump_file, "\t");
88dbf20f 990 print_generic_expr (dump_file, arg, dump_flags);
991 dump_lattice_value (dump_file, "\tValue: ", arg_val);
41511585 992 fprintf (dump_file, "\n");
993 }
4ee9c684 994
41511585 995 if (new_val.lattice_val == VARYING)
996 break;
997 }
998 }
4ee9c684 999
1000 if (dump_file && (dump_flags & TDF_DETAILS))
41511585 1001 {
1002 dump_lattice_value (dump_file, "\n PHI node value: ", new_val);
1003 fprintf (dump_file, "\n\n");
1004 }
1005
bfa30570 1006 /* Make the transition to the new value. */
75a70cf9 1007 if (set_lattice_value (gimple_phi_result (phi), new_val))
41511585 1008 {
1009 if (new_val.lattice_val == VARYING)
1010 return SSA_PROP_VARYING;
1011 else
1012 return SSA_PROP_INTERESTING;
1013 }
1014 else
1015 return SSA_PROP_NOT_INTERESTING;
4ee9c684 1016}
1017
15d138c9 1018/* Return the constant value for OP or OP otherwise. */
00f4f705 1019
1020static tree
15d138c9 1021valueize_op (tree op)
00f4f705 1022{
00f4f705 1023 if (TREE_CODE (op) == SSA_NAME)
1024 {
15d138c9 1025 tree tem = get_constant_value (op);
1026 if (tem)
1027 return tem;
00f4f705 1028 }
1029 return op;
1030}
1031
41511585 1032/* CCP specific front-end to the non-destructive constant folding
1033 routines.
4ee9c684 1034
1035 Attempt to simplify the RHS of STMT knowing that one or more
1036 operands are constants.
1037
1038 If simplification is possible, return the simplified RHS,
75a70cf9 1039 otherwise return the original RHS or NULL_TREE. */
4ee9c684 1040
1041static tree
75a70cf9 1042ccp_fold (gimple stmt)
4ee9c684 1043{
389dd41b 1044 location_t loc = gimple_location (stmt);
75a70cf9 1045 switch (gimple_code (stmt))
88dbf20f 1046 {
75a70cf9 1047 case GIMPLE_COND:
1048 {
1049 /* Handle comparison operators that can appear in GIMPLE form. */
15d138c9 1050 tree op0 = valueize_op (gimple_cond_lhs (stmt));
1051 tree op1 = valueize_op (gimple_cond_rhs (stmt));
75a70cf9 1052 enum tree_code code = gimple_cond_code (stmt);
389dd41b 1053 return fold_binary_loc (loc, code, boolean_type_node, op0, op1);
75a70cf9 1054 }
4ee9c684 1055
75a70cf9 1056 case GIMPLE_SWITCH:
1057 {
15d138c9 1058 /* Return the constant switch index. */
1059 return valueize_op (gimple_switch_index (stmt));
75a70cf9 1060 }
912f109f 1061
1d0b727d 1062 case GIMPLE_ASSIGN:
1063 case GIMPLE_CALL:
1064 return gimple_fold_stmt_to_constant_1 (stmt, valueize_op);
04236c3a 1065
8782adcf 1066 default:
1d0b727d 1067 gcc_unreachable ();
8782adcf 1068 }
8782adcf 1069}
75a70cf9 1070
b7e55469 1071/* Apply the operation CODE in type TYPE to the value, mask pair
1072 RVAL and RMASK representing a value of type RTYPE and set
1073 the value, mask pair *VAL and *MASK to the result. */
1074
1075static void
1076bit_value_unop_1 (enum tree_code code, tree type,
1077 double_int *val, double_int *mask,
1078 tree rtype, double_int rval, double_int rmask)
1079{
1080 switch (code)
1081 {
1082 case BIT_NOT_EXPR:
1083 *mask = rmask;
1084 *val = double_int_not (rval);
1085 break;
1086
1087 case NEGATE_EXPR:
1088 {
1089 double_int temv, temm;
1090 /* Return ~rval + 1. */
1091 bit_value_unop_1 (BIT_NOT_EXPR, type, &temv, &temm, type, rval, rmask);
1092 bit_value_binop_1 (PLUS_EXPR, type, val, mask,
1093 type, temv, temm,
1094 type, double_int_one, double_int_zero);
1095 break;
1096 }
1097
1098 CASE_CONVERT:
1099 {
1100 bool uns;
1101
1102 /* First extend mask and value according to the original type. */
1103 uns = (TREE_CODE (rtype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (rtype)
1104 ? 0 : TYPE_UNSIGNED (rtype));
1105 *mask = double_int_ext (rmask, TYPE_PRECISION (rtype), uns);
1106 *val = double_int_ext (rval, TYPE_PRECISION (rtype), uns);
1107
1108 /* Then extend mask and value according to the target type. */
1109 uns = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1110 ? 0 : TYPE_UNSIGNED (type));
1111 *mask = double_int_ext (*mask, TYPE_PRECISION (type), uns);
1112 *val = double_int_ext (*val, TYPE_PRECISION (type), uns);
1113 break;
1114 }
1115
1116 default:
1117 *mask = double_int_minus_one;
1118 break;
1119 }
1120}
1121
1122/* Apply the operation CODE in type TYPE to the value, mask pairs
1123 R1VAL, R1MASK and R2VAL, R2MASK representing a values of type R1TYPE
1124 and R2TYPE and set the value, mask pair *VAL and *MASK to the result. */
1125
1126static void
1127bit_value_binop_1 (enum tree_code code, tree type,
1128 double_int *val, double_int *mask,
1129 tree r1type, double_int r1val, double_int r1mask,
1130 tree r2type, double_int r2val, double_int r2mask)
1131{
90c0f5b7 1132 bool uns = (TREE_CODE (type) == INTEGER_TYPE
1133 && TYPE_IS_SIZETYPE (type) ? 0 : TYPE_UNSIGNED (type));
b7e55469 1134 /* Assume we'll get a constant result. Use an initial varying value,
1135 we fall back to varying in the end if necessary. */
1136 *mask = double_int_minus_one;
1137 switch (code)
1138 {
1139 case BIT_AND_EXPR:
1140 /* The mask is constant where there is a known not
1141 set bit, (m1 | m2) & ((v1 | m1) & (v2 | m2)) */
1142 *mask = double_int_and (double_int_ior (r1mask, r2mask),
1143 double_int_and (double_int_ior (r1val, r1mask),
1144 double_int_ior (r2val, r2mask)));
1145 *val = double_int_and (r1val, r2val);
1146 break;
1147
1148 case BIT_IOR_EXPR:
1149 /* The mask is constant where there is a known
1150 set bit, (m1 | m2) & ~((v1 & ~m1) | (v2 & ~m2)). */
1151 *mask = double_int_and_not
1152 (double_int_ior (r1mask, r2mask),
1153 double_int_ior (double_int_and_not (r1val, r1mask),
1154 double_int_and_not (r2val, r2mask)));
1155 *val = double_int_ior (r1val, r2val);
1156 break;
1157
1158 case BIT_XOR_EXPR:
1159 /* m1 | m2 */
1160 *mask = double_int_ior (r1mask, r2mask);
1161 *val = double_int_xor (r1val, r2val);
1162 break;
1163
1164 case LROTATE_EXPR:
1165 case RROTATE_EXPR:
1166 if (double_int_zero_p (r2mask))
1167 {
1168 HOST_WIDE_INT shift = r2val.low;
1169 if (code == RROTATE_EXPR)
1170 shift = -shift;
1171 *mask = double_int_lrotate (r1mask, shift, TYPE_PRECISION (type));
1172 *val = double_int_lrotate (r1val, shift, TYPE_PRECISION (type));
1173 }
1174 break;
1175
1176 case LSHIFT_EXPR:
1177 case RSHIFT_EXPR:
1178 /* ??? We can handle partially known shift counts if we know
1179 its sign. That way we can tell that (x << (y | 8)) & 255
1180 is zero. */
1181 if (double_int_zero_p (r2mask))
1182 {
1183 HOST_WIDE_INT shift = r2val.low;
1184 if (code == RSHIFT_EXPR)
1185 shift = -shift;
1186 /* We need to know if we are doing a left or a right shift
1187 to properly shift in zeros for left shift and unsigned
1188 right shifts and the sign bit for signed right shifts.
1189 For signed right shifts we shift in varying in case
1190 the sign bit was varying. */
1191 if (shift > 0)
1192 {
1193 *mask = double_int_lshift (r1mask, shift,
1194 TYPE_PRECISION (type), false);
1195 *val = double_int_lshift (r1val, shift,
1196 TYPE_PRECISION (type), false);
1197 }
1198 else if (shift < 0)
1199 {
90c0f5b7 1200 /* ??? We can have sizetype related inconsistencies in
1201 the IL. */
1202 if ((TREE_CODE (r1type) == INTEGER_TYPE
1203 && (TYPE_IS_SIZETYPE (r1type)
1204 ? 0 : TYPE_UNSIGNED (r1type))) != uns)
1205 break;
1206
b7e55469 1207 shift = -shift;
1208 *mask = double_int_rshift (r1mask, shift,
1209 TYPE_PRECISION (type), !uns);
1210 *val = double_int_rshift (r1val, shift,
1211 TYPE_PRECISION (type), !uns);
1212 }
1213 else
1214 {
1215 *mask = r1mask;
1216 *val = r1val;
1217 }
1218 }
1219 break;
1220
1221 case PLUS_EXPR:
1222 case POINTER_PLUS_EXPR:
1223 {
1224 double_int lo, hi;
1225 /* Do the addition with unknown bits set to zero, to give carry-ins of
1226 zero wherever possible. */
1227 lo = double_int_add (double_int_and_not (r1val, r1mask),
1228 double_int_and_not (r2val, r2mask));
1229 lo = double_int_ext (lo, TYPE_PRECISION (type), uns);
1230 /* Do the addition with unknown bits set to one, to give carry-ins of
1231 one wherever possible. */
1232 hi = double_int_add (double_int_ior (r1val, r1mask),
1233 double_int_ior (r2val, r2mask));
1234 hi = double_int_ext (hi, TYPE_PRECISION (type), uns);
1235 /* Each bit in the result is known if (a) the corresponding bits in
1236 both inputs are known, and (b) the carry-in to that bit position
1237 is known. We can check condition (b) by seeing if we got the same
1238 result with minimised carries as with maximised carries. */
1239 *mask = double_int_ior (double_int_ior (r1mask, r2mask),
1240 double_int_xor (lo, hi));
1241 *mask = double_int_ext (*mask, TYPE_PRECISION (type), uns);
1242 /* It shouldn't matter whether we choose lo or hi here. */
1243 *val = lo;
1244 break;
1245 }
1246
1247 case MINUS_EXPR:
1248 {
1249 double_int temv, temm;
1250 bit_value_unop_1 (NEGATE_EXPR, r2type, &temv, &temm,
1251 r2type, r2val, r2mask);
1252 bit_value_binop_1 (PLUS_EXPR, type, val, mask,
1253 r1type, r1val, r1mask,
1254 r2type, temv, temm);
1255 break;
1256 }
1257
1258 case MULT_EXPR:
1259 {
1260 /* Just track trailing zeros in both operands and transfer
1261 them to the other. */
1262 int r1tz = double_int_ctz (double_int_ior (r1val, r1mask));
1263 int r2tz = double_int_ctz (double_int_ior (r2val, r2mask));
1264 if (r1tz + r2tz >= HOST_BITS_PER_DOUBLE_INT)
1265 {
1266 *mask = double_int_zero;
1267 *val = double_int_zero;
1268 }
1269 else if (r1tz + r2tz > 0)
1270 {
1271 *mask = double_int_not (double_int_mask (r1tz + r2tz));
1272 *mask = double_int_ext (*mask, TYPE_PRECISION (type), uns);
1273 *val = double_int_zero;
1274 }
1275 break;
1276 }
1277
1278 case EQ_EXPR:
1279 case NE_EXPR:
1280 {
1281 double_int m = double_int_ior (r1mask, r2mask);
1282 if (!double_int_equal_p (double_int_and_not (r1val, m),
1283 double_int_and_not (r2val, m)))
1284 {
1285 *mask = double_int_zero;
1286 *val = ((code == EQ_EXPR) ? double_int_zero : double_int_one);
1287 }
1288 else
1289 {
1290 /* We know the result of a comparison is always one or zero. */
1291 *mask = double_int_one;
1292 *val = double_int_zero;
1293 }
1294 break;
1295 }
1296
1297 case GE_EXPR:
1298 case GT_EXPR:
1299 {
1300 double_int tem = r1val;
1301 r1val = r2val;
1302 r2val = tem;
1303 tem = r1mask;
1304 r1mask = r2mask;
1305 r2mask = tem;
1306 code = swap_tree_comparison (code);
1307 }
1308 /* Fallthru. */
1309 case LT_EXPR:
1310 case LE_EXPR:
1311 {
1312 int minmax, maxmin;
1313 /* If the most significant bits are not known we know nothing. */
1314 if (double_int_negative_p (r1mask) || double_int_negative_p (r2mask))
1315 break;
1316
90c0f5b7 1317 /* For comparisons the signedness is in the comparison operands. */
1318 uns = (TREE_CODE (r1type) == INTEGER_TYPE
1319 && TYPE_IS_SIZETYPE (r1type) ? 0 : TYPE_UNSIGNED (r1type));
1320 /* ??? We can have sizetype related inconsistencies in the IL. */
1321 if ((TREE_CODE (r2type) == INTEGER_TYPE
1322 && TYPE_IS_SIZETYPE (r2type) ? 0 : TYPE_UNSIGNED (r2type)) != uns)
1323 break;
1324
b7e55469 1325 /* If we know the most significant bits we know the values
1326 value ranges by means of treating varying bits as zero
1327 or one. Do a cross comparison of the max/min pairs. */
1328 maxmin = double_int_cmp (double_int_ior (r1val, r1mask),
1329 double_int_and_not (r2val, r2mask), uns);
1330 minmax = double_int_cmp (double_int_and_not (r1val, r1mask),
1331 double_int_ior (r2val, r2mask), uns);
1332 if (maxmin < 0) /* r1 is less than r2. */
1333 {
1334 *mask = double_int_zero;
1335 *val = double_int_one;
1336 }
1337 else if (minmax > 0) /* r1 is not less or equal to r2. */
1338 {
1339 *mask = double_int_zero;
1340 *val = double_int_zero;
1341 }
1342 else if (maxmin == minmax) /* r1 and r2 are equal. */
1343 {
1344 /* This probably should never happen as we'd have
1345 folded the thing during fully constant value folding. */
1346 *mask = double_int_zero;
1347 *val = (code == LE_EXPR ? double_int_one : double_int_zero);
1348 }
1349 else
1350 {
1351 /* We know the result of a comparison is always one or zero. */
1352 *mask = double_int_one;
1353 *val = double_int_zero;
1354 }
1355 break;
1356 }
1357
1358 default:;
1359 }
1360}
1361
1362/* Return the propagation value when applying the operation CODE to
1363 the value RHS yielding type TYPE. */
1364
1365static prop_value_t
1366bit_value_unop (enum tree_code code, tree type, tree rhs)
1367{
1368 prop_value_t rval = get_value_for_expr (rhs, true);
1369 double_int value, mask;
1370 prop_value_t val;
1371 gcc_assert ((rval.lattice_val == CONSTANT
1372 && TREE_CODE (rval.value) == INTEGER_CST)
1373 || double_int_minus_one_p (rval.mask));
1374 bit_value_unop_1 (code, type, &value, &mask,
1375 TREE_TYPE (rhs), value_to_double_int (rval), rval.mask);
1376 if (!double_int_minus_one_p (mask))
1377 {
1378 val.lattice_val = CONSTANT;
1379 val.mask = mask;
1380 /* ??? Delay building trees here. */
1381 val.value = double_int_to_tree (type, value);
1382 }
1383 else
1384 {
1385 val.lattice_val = VARYING;
1386 val.value = NULL_TREE;
1387 val.mask = double_int_minus_one;
1388 }
1389 return val;
1390}
1391
1392/* Return the propagation value when applying the operation CODE to
1393 the values RHS1 and RHS2 yielding type TYPE. */
1394
1395static prop_value_t
1396bit_value_binop (enum tree_code code, tree type, tree rhs1, tree rhs2)
1397{
1398 prop_value_t r1val = get_value_for_expr (rhs1, true);
1399 prop_value_t r2val = get_value_for_expr (rhs2, true);
1400 double_int value, mask;
1401 prop_value_t val;
1402 gcc_assert ((r1val.lattice_val == CONSTANT
1403 && TREE_CODE (r1val.value) == INTEGER_CST)
1404 || double_int_minus_one_p (r1val.mask));
1405 gcc_assert ((r2val.lattice_val == CONSTANT
1406 && TREE_CODE (r2val.value) == INTEGER_CST)
1407 || double_int_minus_one_p (r2val.mask));
1408 bit_value_binop_1 (code, type, &value, &mask,
1409 TREE_TYPE (rhs1), value_to_double_int (r1val), r1val.mask,
1410 TREE_TYPE (rhs2), value_to_double_int (r2val), r2val.mask);
1411 if (!double_int_minus_one_p (mask))
1412 {
1413 val.lattice_val = CONSTANT;
1414 val.mask = mask;
1415 /* ??? Delay building trees here. */
1416 val.value = double_int_to_tree (type, value);
1417 }
1418 else
1419 {
1420 val.lattice_val = VARYING;
1421 val.value = NULL_TREE;
1422 val.mask = double_int_minus_one;
1423 }
1424 return val;
1425}
1426
fca0886c 1427/* Return the propagation value when applying __builtin_assume_aligned to
1428 its arguments. */
1429
1430static prop_value_t
1431bit_value_assume_aligned (gimple stmt)
1432{
1433 tree ptr = gimple_call_arg (stmt, 0), align, misalign = NULL_TREE;
1434 tree type = TREE_TYPE (ptr);
1435 unsigned HOST_WIDE_INT aligni, misaligni = 0;
1436 prop_value_t ptrval = get_value_for_expr (ptr, true);
1437 prop_value_t alignval;
1438 double_int value, mask;
1439 prop_value_t val;
1440 if (ptrval.lattice_val == UNDEFINED)
1441 return ptrval;
1442 gcc_assert ((ptrval.lattice_val == CONSTANT
1443 && TREE_CODE (ptrval.value) == INTEGER_CST)
1444 || double_int_minus_one_p (ptrval.mask));
1445 align = gimple_call_arg (stmt, 1);
1446 if (!host_integerp (align, 1))
1447 return ptrval;
1448 aligni = tree_low_cst (align, 1);
1449 if (aligni <= 1
1450 || (aligni & (aligni - 1)) != 0)
1451 return ptrval;
1452 if (gimple_call_num_args (stmt) > 2)
1453 {
1454 misalign = gimple_call_arg (stmt, 2);
1455 if (!host_integerp (misalign, 1))
1456 return ptrval;
1457 misaligni = tree_low_cst (misalign, 1);
1458 if (misaligni >= aligni)
1459 return ptrval;
1460 }
1461 align = build_int_cst_type (type, -aligni);
1462 alignval = get_value_for_expr (align, true);
1463 bit_value_binop_1 (BIT_AND_EXPR, type, &value, &mask,
1464 type, value_to_double_int (ptrval), ptrval.mask,
1465 type, value_to_double_int (alignval), alignval.mask);
1466 if (!double_int_minus_one_p (mask))
1467 {
1468 val.lattice_val = CONSTANT;
1469 val.mask = mask;
1470 gcc_assert ((mask.low & (aligni - 1)) == 0);
1471 gcc_assert ((value.low & (aligni - 1)) == 0);
1472 value.low |= misaligni;
1473 /* ??? Delay building trees here. */
1474 val.value = double_int_to_tree (type, value);
1475 }
1476 else
1477 {
1478 val.lattice_val = VARYING;
1479 val.value = NULL_TREE;
1480 val.mask = double_int_minus_one;
1481 }
1482 return val;
1483}
1484
75a70cf9 1485/* Evaluate statement STMT.
1486 Valid only for assignments, calls, conditionals, and switches. */
4ee9c684 1487
88dbf20f 1488static prop_value_t
75a70cf9 1489evaluate_stmt (gimple stmt)
4ee9c684 1490{
88dbf20f 1491 prop_value_t val;
4f61cce6 1492 tree simplified = NULL_TREE;
88dbf20f 1493 ccp_lattice_t likelyvalue = likely_value (stmt);
b7e55469 1494 bool is_constant = false;
581bf1c2 1495 unsigned int align;
88dbf20f 1496
b7e55469 1497 if (dump_file && (dump_flags & TDF_DETAILS))
1498 {
1499 fprintf (dump_file, "which is likely ");
1500 switch (likelyvalue)
1501 {
1502 case CONSTANT:
1503 fprintf (dump_file, "CONSTANT");
1504 break;
1505 case UNDEFINED:
1506 fprintf (dump_file, "UNDEFINED");
1507 break;
1508 case VARYING:
1509 fprintf (dump_file, "VARYING");
1510 break;
1511 default:;
1512 }
1513 fprintf (dump_file, "\n");
1514 }
add6ee5e 1515
4ee9c684 1516 /* If the statement is likely to have a CONSTANT result, then try
1517 to fold the statement to determine the constant value. */
75a70cf9 1518 /* FIXME. This is the only place that we call ccp_fold.
1519 Since likely_value never returns CONSTANT for calls, we will
1520 not attempt to fold them, including builtins that may profit. */
4ee9c684 1521 if (likelyvalue == CONSTANT)
b7e55469 1522 {
1523 fold_defer_overflow_warnings ();
1524 simplified = ccp_fold (stmt);
1525 is_constant = simplified && is_gimple_min_invariant (simplified);
1526 fold_undefer_overflow_warnings (is_constant, stmt, 0);
1527 if (is_constant)
1528 {
1529 /* The statement produced a constant value. */
1530 val.lattice_val = CONSTANT;
1531 val.value = simplified;
1532 val.mask = double_int_zero;
1533 }
1534 }
4ee9c684 1535 /* If the statement is likely to have a VARYING result, then do not
1536 bother folding the statement. */
04236c3a 1537 else if (likelyvalue == VARYING)
75a70cf9 1538 {
590c3166 1539 enum gimple_code code = gimple_code (stmt);
75a70cf9 1540 if (code == GIMPLE_ASSIGN)
1541 {
1542 enum tree_code subcode = gimple_assign_rhs_code (stmt);
48e1416a 1543
75a70cf9 1544 /* Other cases cannot satisfy is_gimple_min_invariant
1545 without folding. */
1546 if (get_gimple_rhs_class (subcode) == GIMPLE_SINGLE_RHS)
1547 simplified = gimple_assign_rhs1 (stmt);
1548 }
1549 else if (code == GIMPLE_SWITCH)
1550 simplified = gimple_switch_index (stmt);
1551 else
a65c4d64 1552 /* These cannot satisfy is_gimple_min_invariant without folding. */
1553 gcc_assert (code == GIMPLE_CALL || code == GIMPLE_COND);
b7e55469 1554 is_constant = simplified && is_gimple_min_invariant (simplified);
1555 if (is_constant)
1556 {
1557 /* The statement produced a constant value. */
1558 val.lattice_val = CONSTANT;
1559 val.value = simplified;
1560 val.mask = double_int_zero;
1561 }
75a70cf9 1562 }
4ee9c684 1563
b7e55469 1564 /* Resort to simplification for bitwise tracking. */
1565 if (flag_tree_bit_ccp
939514e9 1566 && (likelyvalue == CONSTANT || is_gimple_call (stmt))
b7e55469 1567 && !is_constant)
912f109f 1568 {
b7e55469 1569 enum gimple_code code = gimple_code (stmt);
153c3b50 1570 tree fndecl;
b7e55469 1571 val.lattice_val = VARYING;
1572 val.value = NULL_TREE;
1573 val.mask = double_int_minus_one;
1574 if (code == GIMPLE_ASSIGN)
912f109f 1575 {
b7e55469 1576 enum tree_code subcode = gimple_assign_rhs_code (stmt);
1577 tree rhs1 = gimple_assign_rhs1 (stmt);
1578 switch (get_gimple_rhs_class (subcode))
1579 {
1580 case GIMPLE_SINGLE_RHS:
1581 if (INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
1582 || POINTER_TYPE_P (TREE_TYPE (rhs1)))
1583 val = get_value_for_expr (rhs1, true);
1584 break;
1585
1586 case GIMPLE_UNARY_RHS:
1587 if ((INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
1588 || POINTER_TYPE_P (TREE_TYPE (rhs1)))
1589 && (INTEGRAL_TYPE_P (gimple_expr_type (stmt))
1590 || POINTER_TYPE_P (gimple_expr_type (stmt))))
1591 val = bit_value_unop (subcode, gimple_expr_type (stmt), rhs1);
1592 break;
1593
1594 case GIMPLE_BINARY_RHS:
1595 if (INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
1596 || POINTER_TYPE_P (TREE_TYPE (rhs1)))
1597 {
e47d81e0 1598 tree lhs = gimple_assign_lhs (stmt);
b7e55469 1599 tree rhs2 = gimple_assign_rhs2 (stmt);
1600 val = bit_value_binop (subcode,
e47d81e0 1601 TREE_TYPE (lhs), rhs1, rhs2);
b7e55469 1602 }
1603 break;
1604
1605 default:;
1606 }
912f109f 1607 }
b7e55469 1608 else if (code == GIMPLE_COND)
1609 {
1610 enum tree_code code = gimple_cond_code (stmt);
1611 tree rhs1 = gimple_cond_lhs (stmt);
1612 tree rhs2 = gimple_cond_rhs (stmt);
1613 if (INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
1614 || POINTER_TYPE_P (TREE_TYPE (rhs1)))
1615 val = bit_value_binop (code, TREE_TYPE (rhs1), rhs1, rhs2);
1616 }
153c3b50 1617 else if (code == GIMPLE_CALL
1618 && (fndecl = gimple_call_fndecl (stmt))
1619 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
1620 {
1621 switch (DECL_FUNCTION_CODE (fndecl))
1622 {
1623 case BUILT_IN_MALLOC:
1624 case BUILT_IN_REALLOC:
1625 case BUILT_IN_CALLOC:
939514e9 1626 case BUILT_IN_STRDUP:
1627 case BUILT_IN_STRNDUP:
153c3b50 1628 val.lattice_val = CONSTANT;
1629 val.value = build_int_cst (TREE_TYPE (gimple_get_lhs (stmt)), 0);
1630 val.mask = shwi_to_double_int
1631 (~(((HOST_WIDE_INT) MALLOC_ABI_ALIGNMENT)
1632 / BITS_PER_UNIT - 1));
1633 break;
1634
1635 case BUILT_IN_ALLOCA:
581bf1c2 1636 case BUILT_IN_ALLOCA_WITH_ALIGN:
1637 align = (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_ALLOCA_WITH_ALIGN
1638 ? TREE_INT_CST_LOW (gimple_call_arg (stmt, 1))
1639 : BIGGEST_ALIGNMENT);
153c3b50 1640 val.lattice_val = CONSTANT;
1641 val.value = build_int_cst (TREE_TYPE (gimple_get_lhs (stmt)), 0);
1642 val.mask = shwi_to_double_int
581bf1c2 1643 (~(((HOST_WIDE_INT) align)
153c3b50 1644 / BITS_PER_UNIT - 1));
1645 break;
1646
939514e9 1647 /* These builtins return their first argument, unmodified. */
1648 case BUILT_IN_MEMCPY:
1649 case BUILT_IN_MEMMOVE:
1650 case BUILT_IN_MEMSET:
1651 case BUILT_IN_STRCPY:
1652 case BUILT_IN_STRNCPY:
1653 case BUILT_IN_MEMCPY_CHK:
1654 case BUILT_IN_MEMMOVE_CHK:
1655 case BUILT_IN_MEMSET_CHK:
1656 case BUILT_IN_STRCPY_CHK:
1657 case BUILT_IN_STRNCPY_CHK:
1658 val = get_value_for_expr (gimple_call_arg (stmt, 0), true);
1659 break;
1660
fca0886c 1661 case BUILT_IN_ASSUME_ALIGNED:
1662 val = bit_value_assume_aligned (stmt);
1663 break;
1664
153c3b50 1665 default:;
1666 }
1667 }
b7e55469 1668 is_constant = (val.lattice_val == CONSTANT);
912f109f 1669 }
1670
b7e55469 1671 if (!is_constant)
4ee9c684 1672 {
1673 /* The statement produced a nonconstant value. If the statement
88dbf20f 1674 had UNDEFINED operands, then the result of the statement
1675 should be UNDEFINED. Otherwise, the statement is VARYING. */
bfa30570 1676 if (likelyvalue == UNDEFINED)
b7e55469 1677 {
1678 val.lattice_val = likelyvalue;
1679 val.mask = double_int_zero;
1680 }
b765fa12 1681 else
b7e55469 1682 {
1683 val.lattice_val = VARYING;
1684 val.mask = double_int_minus_one;
1685 }
b765fa12 1686
88dbf20f 1687 val.value = NULL_TREE;
4ee9c684 1688 }
41511585 1689
1690 return val;
4ee9c684 1691}
1692
582a80ed 1693/* Given a BUILT_IN_STACK_SAVE value SAVED_VAL, insert a clobber of VAR before
1694 each matching BUILT_IN_STACK_RESTORE. Mark visited phis in VISITED. */
1695
1696static void
1697insert_clobber_before_stack_restore (tree saved_val, tree var, htab_t *visited)
1698{
1699 gimple stmt, clobber_stmt;
1700 tree clobber;
1701 imm_use_iterator iter;
1702 gimple_stmt_iterator i;
1703 gimple *slot;
1704
1705 FOR_EACH_IMM_USE_STMT (stmt, iter, saved_val)
1706 if (gimple_call_builtin_p (stmt, BUILT_IN_STACK_RESTORE))
1707 {
1708 clobber = build_constructor (TREE_TYPE (var), NULL);
1709 TREE_THIS_VOLATILE (clobber) = 1;
1710 clobber_stmt = gimple_build_assign (var, clobber);
1711
1712 i = gsi_for_stmt (stmt);
1713 gsi_insert_before (&i, clobber_stmt, GSI_SAME_STMT);
1714 }
1715 else if (gimple_code (stmt) == GIMPLE_PHI)
1716 {
1717 if (*visited == NULL)
1718 *visited = htab_create (10, htab_hash_pointer, htab_eq_pointer, NULL);
1719
1720 slot = (gimple *)htab_find_slot (*visited, stmt, INSERT);
1721 if (*slot != NULL)
1722 continue;
1723
1724 *slot = stmt;
1725 insert_clobber_before_stack_restore (gimple_phi_result (stmt), var,
1726 visited);
1727 }
1728 else
1729 gcc_assert (is_gimple_debug (stmt));
1730}
1731
1732/* Advance the iterator to the previous non-debug gimple statement in the same
1733 or dominating basic block. */
1734
1735static inline void
1736gsi_prev_dom_bb_nondebug (gimple_stmt_iterator *i)
1737{
1738 basic_block dom;
1739
1740 gsi_prev_nondebug (i);
1741 while (gsi_end_p (*i))
1742 {
1743 dom = get_immediate_dominator (CDI_DOMINATORS, i->bb);
1744 if (dom == NULL || dom == ENTRY_BLOCK_PTR)
1745 return;
1746
1747 *i = gsi_last_bb (dom);
1748 }
1749}
1750
1751/* Find a BUILT_IN_STACK_SAVE dominating gsi_stmt (I), and insert
1752 a clobber of VAR before each matching BUILT_IN_STACK_RESTORE. */
1753
1754static void
1755insert_clobbers_for_var (gimple_stmt_iterator i, tree var)
1756{
1757 bool save_found;
1758 gimple stmt;
1759 tree saved_val;
1760 htab_t visited = NULL;
1761
1762 for (save_found = false; !gsi_end_p (i); gsi_prev_dom_bb_nondebug (&i))
1763 {
1764 stmt = gsi_stmt (i);
1765
1766 if (!gimple_call_builtin_p (stmt, BUILT_IN_STACK_SAVE))
1767 continue;
1768 save_found = true;
1769
1770 saved_val = gimple_call_lhs (stmt);
1771 if (saved_val == NULL_TREE)
1772 continue;
1773
1774 insert_clobber_before_stack_restore (saved_val, var, &visited);
1775 break;
1776 }
1777
1778 if (visited != NULL)
1779 htab_delete (visited);
1780 gcc_assert (save_found);
1781}
1782
581bf1c2 1783/* Detects a __builtin_alloca_with_align with constant size argument. Declares
1784 fixed-size array and returns the address, if found, otherwise returns
1785 NULL_TREE. */
9a65cc0a 1786
1787static tree
581bf1c2 1788fold_builtin_alloca_with_align (gimple stmt)
9a65cc0a 1789{
1790 unsigned HOST_WIDE_INT size, threshold, n_elem;
1791 tree lhs, arg, block, var, elem_type, array_type;
9a65cc0a 1792
1793 /* Get lhs. */
1794 lhs = gimple_call_lhs (stmt);
1795 if (lhs == NULL_TREE)
1796 return NULL_TREE;
1797
1798 /* Detect constant argument. */
1799 arg = get_constant_value (gimple_call_arg (stmt, 0));
6e93d308 1800 if (arg == NULL_TREE
1801 || TREE_CODE (arg) != INTEGER_CST
9a65cc0a 1802 || !host_integerp (arg, 1))
1803 return NULL_TREE;
6e93d308 1804
9a65cc0a 1805 size = TREE_INT_CST_LOW (arg);
1806
581bf1c2 1807 /* Heuristic: don't fold large allocas. */
9a65cc0a 1808 threshold = (unsigned HOST_WIDE_INT)PARAM_VALUE (PARAM_LARGE_STACK_FRAME);
581bf1c2 1809 /* In case the alloca is located at function entry, it has the same lifetime
1810 as a declared array, so we allow a larger size. */
9a65cc0a 1811 block = gimple_block (stmt);
1812 if (!(cfun->after_inlining
1813 && TREE_CODE (BLOCK_SUPERCONTEXT (block)) == FUNCTION_DECL))
1814 threshold /= 10;
1815 if (size > threshold)
1816 return NULL_TREE;
1817
1818 /* Declare array. */
1819 elem_type = build_nonstandard_integer_type (BITS_PER_UNIT, 1);
1820 n_elem = size * 8 / BITS_PER_UNIT;
9a65cc0a 1821 array_type = build_array_type_nelts (elem_type, n_elem);
1822 var = create_tmp_var (array_type, NULL);
581bf1c2 1823 DECL_ALIGN (var) = TREE_INT_CST_LOW (gimple_call_arg (stmt, 1));
3d4a0a4b 1824 {
1825 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (lhs);
1826 if (pi != NULL && !pi->pt.anything)
1827 {
1828 bool singleton_p;
1829 unsigned uid;
1830 singleton_p = pt_solution_singleton_p (&pi->pt, &uid);
1831 gcc_assert (singleton_p);
1832 SET_DECL_PT_UID (var, uid);
1833 }
1834 }
9a65cc0a 1835
1836 /* Fold alloca to the address of the array. */
1837 return fold_convert (TREE_TYPE (lhs), build_fold_addr_expr (var));
1838}
1839
6688f8ec 1840/* Fold the stmt at *GSI with CCP specific information that propagating
1841 and regular folding does not catch. */
1842
1843static bool
1844ccp_fold_stmt (gimple_stmt_iterator *gsi)
1845{
1846 gimple stmt = gsi_stmt (*gsi);
6688f8ec 1847
94144e68 1848 switch (gimple_code (stmt))
1849 {
1850 case GIMPLE_COND:
1851 {
1852 prop_value_t val;
1853 /* Statement evaluation will handle type mismatches in constants
1854 more gracefully than the final propagation. This allows us to
1855 fold more conditionals here. */
1856 val = evaluate_stmt (stmt);
1857 if (val.lattice_val != CONSTANT
b7e55469 1858 || !double_int_zero_p (val.mask))
94144e68 1859 return false;
1860
b7e55469 1861 if (dump_file)
1862 {
1863 fprintf (dump_file, "Folding predicate ");
1864 print_gimple_expr (dump_file, stmt, 0, 0);
1865 fprintf (dump_file, " to ");
1866 print_generic_expr (dump_file, val.value, 0);
1867 fprintf (dump_file, "\n");
1868 }
1869
94144e68 1870 if (integer_zerop (val.value))
1871 gimple_cond_make_false (stmt);
1872 else
1873 gimple_cond_make_true (stmt);
6688f8ec 1874
94144e68 1875 return true;
1876 }
6688f8ec 1877
94144e68 1878 case GIMPLE_CALL:
1879 {
1880 tree lhs = gimple_call_lhs (stmt);
15d138c9 1881 tree val;
94144e68 1882 tree argt;
1883 bool changed = false;
1884 unsigned i;
1885
1886 /* If the call was folded into a constant make sure it goes
1887 away even if we cannot propagate into all uses because of
1888 type issues. */
1889 if (lhs
1890 && TREE_CODE (lhs) == SSA_NAME
15d138c9 1891 && (val = get_constant_value (lhs)))
94144e68 1892 {
15d138c9 1893 tree new_rhs = unshare_expr (val);
338cce8f 1894 bool res;
94144e68 1895 if (!useless_type_conversion_p (TREE_TYPE (lhs),
1896 TREE_TYPE (new_rhs)))
1897 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
338cce8f 1898 res = update_call_from_tree (gsi, new_rhs);
1899 gcc_assert (res);
94144e68 1900 return true;
1901 }
1902
fb049fba 1903 /* Internal calls provide no argument types, so the extra laxity
1904 for normal calls does not apply. */
1905 if (gimple_call_internal_p (stmt))
1906 return false;
1907
581bf1c2 1908 /* The heuristic of fold_builtin_alloca_with_align differs before and
1909 after inlining, so we don't require the arg to be changed into a
1910 constant for folding, but just to be constant. */
1911 if (gimple_call_builtin_p (stmt, BUILT_IN_ALLOCA_WITH_ALIGN))
9a65cc0a 1912 {
581bf1c2 1913 tree new_rhs = fold_builtin_alloca_with_align (stmt);
6e93d308 1914 if (new_rhs)
1915 {
1916 bool res = update_call_from_tree (gsi, new_rhs);
582a80ed 1917 tree var = TREE_OPERAND (TREE_OPERAND (new_rhs, 0),0);
6e93d308 1918 gcc_assert (res);
582a80ed 1919 insert_clobbers_for_var (*gsi, var);
6e93d308 1920 return true;
1921 }
9a65cc0a 1922 }
1923
94144e68 1924 /* Propagate into the call arguments. Compared to replace_uses_in
1925 this can use the argument slot types for type verification
1926 instead of the current argument type. We also can safely
1927 drop qualifiers here as we are dealing with constants anyway. */
2de00a2d 1928 argt = TYPE_ARG_TYPES (gimple_call_fntype (stmt));
94144e68 1929 for (i = 0; i < gimple_call_num_args (stmt) && argt;
1930 ++i, argt = TREE_CHAIN (argt))
1931 {
1932 tree arg = gimple_call_arg (stmt, i);
1933 if (TREE_CODE (arg) == SSA_NAME
15d138c9 1934 && (val = get_constant_value (arg))
94144e68 1935 && useless_type_conversion_p
1936 (TYPE_MAIN_VARIANT (TREE_VALUE (argt)),
15d138c9 1937 TYPE_MAIN_VARIANT (TREE_TYPE (val))))
94144e68 1938 {
15d138c9 1939 gimple_call_set_arg (stmt, i, unshare_expr (val));
94144e68 1940 changed = true;
1941 }
1942 }
e16f4c39 1943
94144e68 1944 return changed;
1945 }
6688f8ec 1946
6872bf3c 1947 case GIMPLE_ASSIGN:
1948 {
1949 tree lhs = gimple_assign_lhs (stmt);
15d138c9 1950 tree val;
6872bf3c 1951
1952 /* If we have a load that turned out to be constant replace it
1953 as we cannot propagate into all uses in all cases. */
1954 if (gimple_assign_single_p (stmt)
1955 && TREE_CODE (lhs) == SSA_NAME
15d138c9 1956 && (val = get_constant_value (lhs)))
6872bf3c 1957 {
15d138c9 1958 tree rhs = unshare_expr (val);
6872bf3c 1959 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs)))
182cf5a9 1960 rhs = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (lhs), rhs);
6872bf3c 1961 gimple_assign_set_rhs_from_tree (gsi, rhs);
1962 return true;
1963 }
1964
1965 return false;
1966 }
1967
94144e68 1968 default:
1969 return false;
1970 }
6688f8ec 1971}
1972
41511585 1973/* Visit the assignment statement STMT. Set the value of its LHS to the
88dbf20f 1974 value computed by the RHS and store LHS in *OUTPUT_P. If STMT
1975 creates virtual definitions, set the value of each new name to that
75a70cf9 1976 of the RHS (if we can derive a constant out of the RHS).
1977 Value-returning call statements also perform an assignment, and
1978 are handled here. */
4ee9c684 1979
41511585 1980static enum ssa_prop_result
75a70cf9 1981visit_assignment (gimple stmt, tree *output_p)
4ee9c684 1982{
88dbf20f 1983 prop_value_t val;
88dbf20f 1984 enum ssa_prop_result retval;
4ee9c684 1985
75a70cf9 1986 tree lhs = gimple_get_lhs (stmt);
4ee9c684 1987
75a70cf9 1988 gcc_assert (gimple_code (stmt) != GIMPLE_CALL
1989 || gimple_call_lhs (stmt) != NULL_TREE);
1990
15d138c9 1991 if (gimple_assign_single_p (stmt)
1992 && gimple_assign_rhs_code (stmt) == SSA_NAME)
1993 /* For a simple copy operation, we copy the lattice values. */
1994 val = *get_value (gimple_assign_rhs1 (stmt));
41511585 1995 else
75a70cf9 1996 /* Evaluate the statement, which could be
1997 either a GIMPLE_ASSIGN or a GIMPLE_CALL. */
04236c3a 1998 val = evaluate_stmt (stmt);
4ee9c684 1999
88dbf20f 2000 retval = SSA_PROP_NOT_INTERESTING;
4ee9c684 2001
41511585 2002 /* Set the lattice value of the statement's output. */
88dbf20f 2003 if (TREE_CODE (lhs) == SSA_NAME)
4ee9c684 2004 {
88dbf20f 2005 /* If STMT is an assignment to an SSA_NAME, we only have one
2006 value to set. */
2007 if (set_lattice_value (lhs, val))
2008 {
2009 *output_p = lhs;
2010 if (val.lattice_val == VARYING)
2011 retval = SSA_PROP_VARYING;
2012 else
2013 retval = SSA_PROP_INTERESTING;
2014 }
4ee9c684 2015 }
88dbf20f 2016
2017 return retval;
4ee9c684 2018}
2019
4ee9c684 2020
41511585 2021/* Visit the conditional statement STMT. Return SSA_PROP_INTERESTING
2022 if it can determine which edge will be taken. Otherwise, return
2023 SSA_PROP_VARYING. */
2024
2025static enum ssa_prop_result
75a70cf9 2026visit_cond_stmt (gimple stmt, edge *taken_edge_p)
4ee9c684 2027{
88dbf20f 2028 prop_value_t val;
41511585 2029 basic_block block;
2030
75a70cf9 2031 block = gimple_bb (stmt);
41511585 2032 val = evaluate_stmt (stmt);
b7e55469 2033 if (val.lattice_val != CONSTANT
2034 || !double_int_zero_p (val.mask))
2035 return SSA_PROP_VARYING;
41511585 2036
2037 /* Find which edge out of the conditional block will be taken and add it
2038 to the worklist. If no single edge can be determined statically,
2039 return SSA_PROP_VARYING to feed all the outgoing edges to the
2040 propagation engine. */
b7e55469 2041 *taken_edge_p = find_taken_edge (block, val.value);
41511585 2042 if (*taken_edge_p)
2043 return SSA_PROP_INTERESTING;
2044 else
2045 return SSA_PROP_VARYING;
4ee9c684 2046}
2047
4ee9c684 2048
41511585 2049/* Evaluate statement STMT. If the statement produces an output value and
2050 its evaluation changes the lattice value of its output, return
2051 SSA_PROP_INTERESTING and set *OUTPUT_P to the SSA_NAME holding the
2052 output value.
48e1416a 2053
41511585 2054 If STMT is a conditional branch and we can determine its truth
2055 value, set *TAKEN_EDGE_P accordingly. If STMT produces a varying
2056 value, return SSA_PROP_VARYING. */
4ee9c684 2057
41511585 2058static enum ssa_prop_result
75a70cf9 2059ccp_visit_stmt (gimple stmt, edge *taken_edge_p, tree *output_p)
41511585 2060{
41511585 2061 tree def;
2062 ssa_op_iter iter;
4ee9c684 2063
41511585 2064 if (dump_file && (dump_flags & TDF_DETAILS))
4ee9c684 2065 {
88dbf20f 2066 fprintf (dump_file, "\nVisiting statement:\n");
75a70cf9 2067 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
4ee9c684 2068 }
4ee9c684 2069
75a70cf9 2070 switch (gimple_code (stmt))
4ee9c684 2071 {
75a70cf9 2072 case GIMPLE_ASSIGN:
2073 /* If the statement is an assignment that produces a single
2074 output value, evaluate its RHS to see if the lattice value of
2075 its output has changed. */
2076 return visit_assignment (stmt, output_p);
2077
2078 case GIMPLE_CALL:
2079 /* A value-returning call also performs an assignment. */
2080 if (gimple_call_lhs (stmt) != NULL_TREE)
2081 return visit_assignment (stmt, output_p);
2082 break;
2083
2084 case GIMPLE_COND:
2085 case GIMPLE_SWITCH:
2086 /* If STMT is a conditional branch, see if we can determine
2087 which branch will be taken. */
2088 /* FIXME. It appears that we should be able to optimize
2089 computed GOTOs here as well. */
2090 return visit_cond_stmt (stmt, taken_edge_p);
2091
2092 default:
2093 break;
4ee9c684 2094 }
4ee9c684 2095
41511585 2096 /* Any other kind of statement is not interesting for constant
2097 propagation and, therefore, not worth simulating. */
41511585 2098 if (dump_file && (dump_flags & TDF_DETAILS))
2099 fprintf (dump_file, "No interesting values produced. Marked VARYING.\n");
4ee9c684 2100
41511585 2101 /* Definitions made by statements other than assignments to
2102 SSA_NAMEs represent unknown modifications to their outputs.
2103 Mark them VARYING. */
88dbf20f 2104 FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_ALL_DEFS)
2105 {
b7e55469 2106 prop_value_t v = { VARYING, NULL_TREE, { -1, (HOST_WIDE_INT) -1 } };
88dbf20f 2107 set_lattice_value (def, v);
2108 }
4ee9c684 2109
41511585 2110 return SSA_PROP_VARYING;
2111}
4ee9c684 2112
4ee9c684 2113
88dbf20f 2114/* Main entry point for SSA Conditional Constant Propagation. */
41511585 2115
33a34f1e 2116static unsigned int
61207d43 2117do_ssa_ccp (void)
41511585 2118{
582a80ed 2119 unsigned int todo = 0;
2120 calculate_dominance_info (CDI_DOMINATORS);
41511585 2121 ccp_initialize ();
2122 ssa_propagate (ccp_visit_stmt, ccp_visit_phi_node);
33a34f1e 2123 if (ccp_finalize ())
582a80ed 2124 todo = (TODO_cleanup_cfg | TODO_update_ssa | TODO_remove_unused_locals);
2125 free_dominance_info (CDI_DOMINATORS);
2126 return todo;
4ee9c684 2127}
2128
5664499b 2129
2130static bool
41511585 2131gate_ccp (void)
5664499b 2132{
41511585 2133 return flag_tree_ccp != 0;
5664499b 2134}
2135
4ee9c684 2136
48e1416a 2137struct gimple_opt_pass pass_ccp =
41511585 2138{
20099e35 2139 {
2140 GIMPLE_PASS,
41511585 2141 "ccp", /* name */
2142 gate_ccp, /* gate */
88dbf20f 2143 do_ssa_ccp, /* execute */
41511585 2144 NULL, /* sub */
2145 NULL, /* next */
2146 0, /* static_pass_number */
2147 TV_TREE_CCP, /* tv_id */
49290934 2148 PROP_cfg | PROP_ssa, /* properties_required */
41511585 2149 0, /* properties_provided */
b6246c40 2150 0, /* properties_destroyed */
41511585 2151 0, /* todo_flags_start */
771e2890 2152 TODO_verify_ssa
20099e35 2153 | TODO_verify_stmts | TODO_ggc_collect/* todo_flags_finish */
2154 }
41511585 2155};
4ee9c684 2156
4ee9c684 2157
75a70cf9 2158
bdd0e199 2159/* Try to optimize out __builtin_stack_restore. Optimize it out
2160 if there is another __builtin_stack_restore in the same basic
2161 block and no calls or ASM_EXPRs are in between, or if this block's
2162 only outgoing edge is to EXIT_BLOCK and there are no calls or
2163 ASM_EXPRs after this __builtin_stack_restore. */
2164
2165static tree
75a70cf9 2166optimize_stack_restore (gimple_stmt_iterator i)
bdd0e199 2167{
6ea999da 2168 tree callee;
2169 gimple stmt;
75a70cf9 2170
2171 basic_block bb = gsi_bb (i);
2172 gimple call = gsi_stmt (i);
bdd0e199 2173
75a70cf9 2174 if (gimple_code (call) != GIMPLE_CALL
2175 || gimple_call_num_args (call) != 1
2176 || TREE_CODE (gimple_call_arg (call, 0)) != SSA_NAME
2177 || !POINTER_TYPE_P (TREE_TYPE (gimple_call_arg (call, 0))))
bdd0e199 2178 return NULL_TREE;
2179
75a70cf9 2180 for (gsi_next (&i); !gsi_end_p (i); gsi_next (&i))
bdd0e199 2181 {
75a70cf9 2182 stmt = gsi_stmt (i);
2183 if (gimple_code (stmt) == GIMPLE_ASM)
bdd0e199 2184 return NULL_TREE;
75a70cf9 2185 if (gimple_code (stmt) != GIMPLE_CALL)
bdd0e199 2186 continue;
2187
75a70cf9 2188 callee = gimple_call_fndecl (stmt);
c40a6f90 2189 if (!callee
2190 || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL
2191 /* All regular builtins are ok, just obviously not alloca. */
581bf1c2 2192 || DECL_FUNCTION_CODE (callee) == BUILT_IN_ALLOCA
2193 || DECL_FUNCTION_CODE (callee) == BUILT_IN_ALLOCA_WITH_ALIGN)
bdd0e199 2194 return NULL_TREE;
2195
2196 if (DECL_FUNCTION_CODE (callee) == BUILT_IN_STACK_RESTORE)
6ea999da 2197 goto second_stack_restore;
bdd0e199 2198 }
2199
6ea999da 2200 if (!gsi_end_p (i))
bdd0e199 2201 return NULL_TREE;
2202
6ea999da 2203 /* Allow one successor of the exit block, or zero successors. */
2204 switch (EDGE_COUNT (bb->succs))
2205 {
2206 case 0:
2207 break;
2208 case 1:
2209 if (single_succ_edge (bb)->dest != EXIT_BLOCK_PTR)
2210 return NULL_TREE;
2211 break;
2212 default:
2213 return NULL_TREE;
2214 }
2215 second_stack_restore:
bdd0e199 2216
6ea999da 2217 /* If there's exactly one use, then zap the call to __builtin_stack_save.
2218 If there are multiple uses, then the last one should remove the call.
2219 In any case, whether the call to __builtin_stack_save can be removed
2220 or not is irrelevant to removing the call to __builtin_stack_restore. */
2221 if (has_single_use (gimple_call_arg (call, 0)))
2222 {
2223 gimple stack_save = SSA_NAME_DEF_STMT (gimple_call_arg (call, 0));
2224 if (is_gimple_call (stack_save))
2225 {
2226 callee = gimple_call_fndecl (stack_save);
2227 if (callee
2228 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL
2229 && DECL_FUNCTION_CODE (callee) == BUILT_IN_STACK_SAVE)
2230 {
2231 gimple_stmt_iterator stack_save_gsi;
2232 tree rhs;
bdd0e199 2233
6ea999da 2234 stack_save_gsi = gsi_for_stmt (stack_save);
2235 rhs = build_int_cst (TREE_TYPE (gimple_call_arg (call, 0)), 0);
2236 update_call_from_tree (&stack_save_gsi, rhs);
2237 }
2238 }
2239 }
bdd0e199 2240
75a70cf9 2241 /* No effect, so the statement will be deleted. */
bdd0e199 2242 return integer_zero_node;
2243}
75a70cf9 2244
8a58ed0a 2245/* If va_list type is a simple pointer and nothing special is needed,
2246 optimize __builtin_va_start (&ap, 0) into ap = __builtin_next_arg (0),
2247 __builtin_va_end (&ap) out as NOP and __builtin_va_copy into a simple
2248 pointer assignment. */
2249
2250static tree
75a70cf9 2251optimize_stdarg_builtin (gimple call)
8a58ed0a 2252{
5f57a8b1 2253 tree callee, lhs, rhs, cfun_va_list;
8a58ed0a 2254 bool va_list_simple_ptr;
389dd41b 2255 location_t loc = gimple_location (call);
8a58ed0a 2256
75a70cf9 2257 if (gimple_code (call) != GIMPLE_CALL)
8a58ed0a 2258 return NULL_TREE;
2259
75a70cf9 2260 callee = gimple_call_fndecl (call);
5f57a8b1 2261
2262 cfun_va_list = targetm.fn_abi_va_list (callee);
2263 va_list_simple_ptr = POINTER_TYPE_P (cfun_va_list)
2264 && (TREE_TYPE (cfun_va_list) == void_type_node
2265 || TREE_TYPE (cfun_va_list) == char_type_node);
2266
8a58ed0a 2267 switch (DECL_FUNCTION_CODE (callee))
2268 {
2269 case BUILT_IN_VA_START:
2270 if (!va_list_simple_ptr
2271 || targetm.expand_builtin_va_start != NULL
b9a16870 2272 || builtin_decl_explicit_p (BUILT_IN_NEXT_ARG))
8a58ed0a 2273 return NULL_TREE;
2274
75a70cf9 2275 if (gimple_call_num_args (call) != 2)
8a58ed0a 2276 return NULL_TREE;
2277
75a70cf9 2278 lhs = gimple_call_arg (call, 0);
8a58ed0a 2279 if (!POINTER_TYPE_P (TREE_TYPE (lhs))
2280 || TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (lhs)))
5f57a8b1 2281 != TYPE_MAIN_VARIANT (cfun_va_list))
8a58ed0a 2282 return NULL_TREE;
48e1416a 2283
389dd41b 2284 lhs = build_fold_indirect_ref_loc (loc, lhs);
b9a16870 2285 rhs = build_call_expr_loc (loc, builtin_decl_explicit (BUILT_IN_NEXT_ARG),
75a70cf9 2286 1, integer_zero_node);
389dd41b 2287 rhs = fold_convert_loc (loc, TREE_TYPE (lhs), rhs);
8a58ed0a 2288 return build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, rhs);
2289
2290 case BUILT_IN_VA_COPY:
2291 if (!va_list_simple_ptr)
2292 return NULL_TREE;
2293
75a70cf9 2294 if (gimple_call_num_args (call) != 2)
8a58ed0a 2295 return NULL_TREE;
2296
75a70cf9 2297 lhs = gimple_call_arg (call, 0);
8a58ed0a 2298 if (!POINTER_TYPE_P (TREE_TYPE (lhs))
2299 || TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (lhs)))
5f57a8b1 2300 != TYPE_MAIN_VARIANT (cfun_va_list))
8a58ed0a 2301 return NULL_TREE;
2302
389dd41b 2303 lhs = build_fold_indirect_ref_loc (loc, lhs);
75a70cf9 2304 rhs = gimple_call_arg (call, 1);
8a58ed0a 2305 if (TYPE_MAIN_VARIANT (TREE_TYPE (rhs))
5f57a8b1 2306 != TYPE_MAIN_VARIANT (cfun_va_list))
8a58ed0a 2307 return NULL_TREE;
2308
389dd41b 2309 rhs = fold_convert_loc (loc, TREE_TYPE (lhs), rhs);
8a58ed0a 2310 return build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, rhs);
2311
2312 case BUILT_IN_VA_END:
75a70cf9 2313 /* No effect, so the statement will be deleted. */
8a58ed0a 2314 return integer_zero_node;
2315
2316 default:
2317 gcc_unreachable ();
2318 }
2319}
75a70cf9 2320
4ee9c684 2321/* A simple pass that attempts to fold all builtin functions. This pass
2322 is run after we've propagated as many constants as we can. */
2323
2a1990e9 2324static unsigned int
4ee9c684 2325execute_fold_all_builtins (void)
2326{
b36237eb 2327 bool cfg_changed = false;
4ee9c684 2328 basic_block bb;
b1b7c0c4 2329 unsigned int todoflags = 0;
48e1416a 2330
4ee9c684 2331 FOR_EACH_BB (bb)
2332 {
75a70cf9 2333 gimple_stmt_iterator i;
2334 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
4ee9c684 2335 {
75a70cf9 2336 gimple stmt, old_stmt;
4ee9c684 2337 tree callee, result;
0a39fd54 2338 enum built_in_function fcode;
4ee9c684 2339
75a70cf9 2340 stmt = gsi_stmt (i);
2341
2342 if (gimple_code (stmt) != GIMPLE_CALL)
0a39fd54 2343 {
75a70cf9 2344 gsi_next (&i);
0a39fd54 2345 continue;
2346 }
75a70cf9 2347 callee = gimple_call_fndecl (stmt);
4ee9c684 2348 if (!callee || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL)
0a39fd54 2349 {
75a70cf9 2350 gsi_next (&i);
0a39fd54 2351 continue;
2352 }
2353 fcode = DECL_FUNCTION_CODE (callee);
4ee9c684 2354
2d18b16d 2355 result = gimple_fold_builtin (stmt);
5a4b7e1e 2356
2357 if (result)
75a70cf9 2358 gimple_remove_stmt_histograms (cfun, stmt);
5a4b7e1e 2359
4ee9c684 2360 if (!result)
2361 switch (DECL_FUNCTION_CODE (callee))
2362 {
2363 case BUILT_IN_CONSTANT_P:
2364 /* Resolve __builtin_constant_p. If it hasn't been
2365 folded to integer_one_node by now, it's fairly
2366 certain that the value simply isn't constant. */
75a70cf9 2367 result = integer_zero_node;
4ee9c684 2368 break;
2369
fca0886c 2370 case BUILT_IN_ASSUME_ALIGNED:
2371 /* Remove __builtin_assume_aligned. */
2372 result = gimple_call_arg (stmt, 0);
2373 break;
2374
bdd0e199 2375 case BUILT_IN_STACK_RESTORE:
75a70cf9 2376 result = optimize_stack_restore (i);
8a58ed0a 2377 if (result)
2378 break;
75a70cf9 2379 gsi_next (&i);
8a58ed0a 2380 continue;
2381
2382 case BUILT_IN_VA_START:
2383 case BUILT_IN_VA_END:
2384 case BUILT_IN_VA_COPY:
2385 /* These shouldn't be folded before pass_stdarg. */
75a70cf9 2386 result = optimize_stdarg_builtin (stmt);
bdd0e199 2387 if (result)
2388 break;
2389 /* FALLTHRU */
2390
4ee9c684 2391 default:
75a70cf9 2392 gsi_next (&i);
4ee9c684 2393 continue;
2394 }
2395
2396 if (dump_file && (dump_flags & TDF_DETAILS))
2397 {
2398 fprintf (dump_file, "Simplified\n ");
75a70cf9 2399 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
4ee9c684 2400 }
2401
75a70cf9 2402 old_stmt = stmt;
75a70cf9 2403 if (!update_call_from_tree (&i, result))
0fefde02 2404 {
2405 gimplify_and_update_call_from_tree (&i, result);
2406 todoflags |= TODO_update_address_taken;
2407 }
de6ed584 2408
75a70cf9 2409 stmt = gsi_stmt (i);
4c5fd53c 2410 update_stmt (stmt);
de6ed584 2411
75a70cf9 2412 if (maybe_clean_or_replace_eh_stmt (old_stmt, stmt)
2413 && gimple_purge_dead_eh_edges (bb))
b36237eb 2414 cfg_changed = true;
4ee9c684 2415
2416 if (dump_file && (dump_flags & TDF_DETAILS))
2417 {
2418 fprintf (dump_file, "to\n ");
75a70cf9 2419 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
4ee9c684 2420 fprintf (dump_file, "\n");
2421 }
0a39fd54 2422
2423 /* Retry the same statement if it changed into another
2424 builtin, there might be new opportunities now. */
75a70cf9 2425 if (gimple_code (stmt) != GIMPLE_CALL)
0a39fd54 2426 {
75a70cf9 2427 gsi_next (&i);
0a39fd54 2428 continue;
2429 }
75a70cf9 2430 callee = gimple_call_fndecl (stmt);
0a39fd54 2431 if (!callee
75a70cf9 2432 || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL
0a39fd54 2433 || DECL_FUNCTION_CODE (callee) == fcode)
75a70cf9 2434 gsi_next (&i);
4ee9c684 2435 }
2436 }
48e1416a 2437
b36237eb 2438 /* Delete unreachable blocks. */
b1b7c0c4 2439 if (cfg_changed)
2440 todoflags |= TODO_cleanup_cfg;
48e1416a 2441
b1b7c0c4 2442 return todoflags;
4ee9c684 2443}
2444
41511585 2445
48e1416a 2446struct gimple_opt_pass pass_fold_builtins =
4ee9c684 2447{
20099e35 2448 {
2449 GIMPLE_PASS,
4ee9c684 2450 "fab", /* name */
2451 NULL, /* gate */
2452 execute_fold_all_builtins, /* execute */
2453 NULL, /* sub */
2454 NULL, /* next */
2455 0, /* static_pass_number */
0b1615c1 2456 TV_NONE, /* tv_id */
49290934 2457 PROP_cfg | PROP_ssa, /* properties_required */
4ee9c684 2458 0, /* properties_provided */
2459 0, /* properties_destroyed */
2460 0, /* todo_flags_start */
771e2890 2461 TODO_verify_ssa
20099e35 2462 | TODO_update_ssa /* todo_flags_finish */
2463 }
4ee9c684 2464};