]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/tree-ssa-ccp.c
re PR libstdc++/50268 ([C++0x] bitset doesn't sanitize input)
[thirdparty/gcc.git] / gcc / tree-ssa-ccp.c
CommitLineData
6de9cd9a 1/* Conditional constant propagation pass for the GNU compiler.
1cdaa211 2 Copyright (C) 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009,
4c0c3228 3 2010, 2011 Free Software Foundation, Inc.
6de9cd9a
DN
4 Adapted from original RTL SSA-CCP by Daniel Berlin <dberlin@dberlin.org>
5 Adapted to GIMPLE trees by Diego Novillo <dnovillo@redhat.com>
6
7This file is part of GCC.
b8698a0f 8
6de9cd9a
DN
9GCC is free software; you can redistribute it and/or modify it
10under the terms of the GNU General Public License as published by the
9dcd6f09 11Free Software Foundation; either version 3, or (at your option) any
6de9cd9a 12later version.
b8698a0f 13
6de9cd9a
DN
14GCC is distributed in the hope that it will be useful, but WITHOUT
15ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
16FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17for more details.
b8698a0f 18
6de9cd9a 19You should have received a copy of the GNU General Public License
9dcd6f09
NC
20along with GCC; see the file COPYING3. If not see
21<http://www.gnu.org/licenses/>. */
6de9cd9a 22
0bca51f0
DN
23/* Conditional constant propagation (CCP) is based on the SSA
24 propagation engine (tree-ssa-propagate.c). Constant assignments of
25 the form VAR = CST are propagated from the assignments into uses of
26 VAR, which in turn may generate new constants. The simulation uses
27 a four level lattice to keep track of constant values associated
28 with SSA names. Given an SSA name V_i, it may take one of the
29 following values:
30
106dec71
ZD
31 UNINITIALIZED -> the initial state of the value. This value
32 is replaced with a correct initial value
33 the first time the value is used, so the
34 rest of the pass does not need to care about
35 it. Using this value simplifies initialization
36 of the pass, and prevents us from needlessly
37 scanning statements that are never reached.
0bca51f0
DN
38
39 UNDEFINED -> V_i is a local variable whose definition
40 has not been processed yet. Therefore we
41 don't yet know if its value is a constant
42 or not.
43
44 CONSTANT -> V_i has been found to hold a constant
45 value C.
46
47 VARYING -> V_i cannot take a constant value, or if it
48 does, it is not possible to determine it
49 at compile time.
50
51 The core of SSA-CCP is in ccp_visit_stmt and ccp_visit_phi_node:
52
53 1- In ccp_visit_stmt, we are interested in assignments whose RHS
54 evaluates into a constant and conditional jumps whose predicate
55 evaluates into a boolean true or false. When an assignment of
56 the form V_i = CONST is found, V_i's lattice value is set to
57 CONSTANT and CONST is associated with it. This causes the
58 propagation engine to add all the SSA edges coming out the
59 assignment into the worklists, so that statements that use V_i
60 can be visited.
61
62 If the statement is a conditional with a constant predicate, we
63 mark the outgoing edges as executable or not executable
64 depending on the predicate's value. This is then used when
65 visiting PHI nodes to know when a PHI argument can be ignored.
b8698a0f 66
0bca51f0
DN
67
68 2- In ccp_visit_phi_node, if all the PHI arguments evaluate to the
69 same constant C, then the LHS of the PHI is set to C. This
70 evaluation is known as the "meet operation". Since one of the
71 goals of this evaluation is to optimistically return constant
72 values as often as possible, it uses two main short cuts:
73
74 - If an argument is flowing in through a non-executable edge, it
75 is ignored. This is useful in cases like this:
76
77 if (PRED)
78 a_9 = 3;
79 else
80 a_10 = 100;
81 a_11 = PHI (a_9, a_10)
82
83 If PRED is known to always evaluate to false, then we can
84 assume that a_11 will always take its value from a_10, meaning
85 that instead of consider it VARYING (a_9 and a_10 have
86 different values), we can consider it CONSTANT 100.
87
88 - If an argument has an UNDEFINED value, then it does not affect
89 the outcome of the meet operation. If a variable V_i has an
90 UNDEFINED value, it means that either its defining statement
91 hasn't been visited yet or V_i has no defining statement, in
92 which case the original symbol 'V' is being used
93 uninitialized. Since 'V' is a local variable, the compiler
94 may assume any initial value for it.
95
96
97 After propagation, every variable V_i that ends up with a lattice
98 value of CONSTANT will have the associated constant value in the
99 array CONST_VAL[i].VALUE. That is fed into substitute_and_fold for
100 final substitution and folding.
101
6de9cd9a
DN
102 References:
103
104 Constant propagation with conditional branches,
105 Wegman and Zadeck, ACM TOPLAS 13(2):181-210.
106
107 Building an Optimizing Compiler,
108 Robert Morgan, Butterworth-Heinemann, 1998, Section 8.9.
109
110 Advanced Compiler Design and Implementation,
111 Steven Muchnick, Morgan Kaufmann, 1997, Section 12.6 */
112
113#include "config.h"
114#include "system.h"
115#include "coretypes.h"
116#include "tm.h"
6de9cd9a 117#include "tree.h"
750628d8 118#include "flags.h"
6de9cd9a 119#include "tm_p.h"
6de9cd9a 120#include "basic-block.h"
750628d8 121#include "output.h"
750628d8 122#include "function.h"
cf835838
JM
123#include "tree-pretty-print.h"
124#include "gimple-pretty-print.h"
750628d8 125#include "timevar.h"
6de9cd9a 126#include "tree-dump.h"
750628d8 127#include "tree-flow.h"
6de9cd9a 128#include "tree-pass.h"
750628d8 129#include "tree-ssa-propagate.h"
53a8f709 130#include "value-prof.h"
750628d8 131#include "langhooks.h"
ae3df618 132#include "target.h"
718f9c0f 133#include "diagnostic-core.h"
74fe548b 134#include "dbgcnt.h"
cfef45c8 135#include "gimple-fold.h"
1fed1006 136#include "params.h"
6de9cd9a
DN
137
138
139/* Possible lattice values. */
140typedef enum
141{
106dec71 142 UNINITIALIZED,
6de9cd9a
DN
143 UNDEFINED,
144 CONSTANT,
145 VARYING
0bca51f0 146} ccp_lattice_t;
6de9cd9a 147
455e6d5b
RG
148struct prop_value_d {
149 /* Lattice value. */
150 ccp_lattice_t lattice_val;
151
152 /* Propagated value. */
153 tree value;
0b4b14ac
RG
154
155 /* Mask that applies to the propagated value during CCP. For
156 X with a CONSTANT lattice value X & ~mask == value & ~mask. */
157 double_int mask;
455e6d5b
RG
158};
159
160typedef struct prop_value_d prop_value_t;
161
0bca51f0
DN
162/* Array of propagated constant values. After propagation,
163 CONST_VAL[I].VALUE holds the constant value for SSA_NAME(I). If
164 the constant is held in an SSA name representing a memory store
38635499
DN
165 (i.e., a VDEF), CONST_VAL[I].MEM_REF will contain the actual
166 memory reference used to store (i.e., the LHS of the assignment
167 doing the store). */
404f4351 168static prop_value_t *const_val;
6de9cd9a 169
25e20805 170static void canonicalize_float_value (prop_value_t *);
f61e18ec 171static bool ccp_fold_stmt (gimple_stmt_iterator *);
25e20805 172
0bca51f0 173/* Dump constant propagation value VAL to file OUTF prefixed by PREFIX. */
95eec0d6
DB
174
175static void
0bca51f0 176dump_lattice_value (FILE *outf, const char *prefix, prop_value_t val)
95eec0d6 177{
750628d8 178 switch (val.lattice_val)
95eec0d6 179 {
0bca51f0
DN
180 case UNINITIALIZED:
181 fprintf (outf, "%sUNINITIALIZED", prefix);
182 break;
750628d8
DN
183 case UNDEFINED:
184 fprintf (outf, "%sUNDEFINED", prefix);
185 break;
186 case VARYING:
187 fprintf (outf, "%sVARYING", prefix);
188 break;
750628d8
DN
189 case CONSTANT:
190 fprintf (outf, "%sCONSTANT ", prefix);
0b4b14ac
RG
191 if (TREE_CODE (val.value) != INTEGER_CST
192 || double_int_zero_p (val.mask))
193 print_generic_expr (outf, val.value, dump_flags);
194 else
195 {
196 double_int cval = double_int_and_not (tree_to_double_int (val.value),
197 val.mask);
198 fprintf (outf, "%sCONSTANT " HOST_WIDE_INT_PRINT_DOUBLE_HEX,
199 prefix, cval.high, cval.low);
200 fprintf (outf, " (" HOST_WIDE_INT_PRINT_DOUBLE_HEX ")",
201 val.mask.high, val.mask.low);
202 }
750628d8
DN
203 break;
204 default:
1e128c5f 205 gcc_unreachable ();
750628d8 206 }
95eec0d6 207}
6de9cd9a 208
6de9cd9a 209
0bca51f0
DN
210/* Print lattice value VAL to stderr. */
211
212void debug_lattice_value (prop_value_t val);
213
24e47c76 214DEBUG_FUNCTION void
0bca51f0
DN
215debug_lattice_value (prop_value_t val)
216{
217 dump_lattice_value (stderr, "", val);
218 fprintf (stderr, "\n");
219}
6de9cd9a 220
6de9cd9a 221
0bca51f0
DN
222/* Compute a default value for variable VAR and store it in the
223 CONST_VAL array. The following rules are used to get default
224 values:
95eec0d6 225
0bca51f0
DN
226 1- Global and static variables that are declared constant are
227 considered CONSTANT.
228
229 2- Any other value is considered UNDEFINED. This is useful when
750628d8
DN
230 considering PHI nodes. PHI arguments that are undefined do not
231 change the constant value of the PHI node, which allows for more
0bca51f0 232 constants to be propagated.
6de9cd9a 233
caf55296 234 3- Variables defined by statements other than assignments and PHI
0bca51f0 235 nodes are considered VARYING.
6de9cd9a 236
caf55296 237 4- Initial values of variables that are not GIMPLE registers are
106dec71 238 considered VARYING. */
6de9cd9a 239
0bca51f0
DN
240static prop_value_t
241get_default_value (tree var)
242{
243 tree sym = SSA_NAME_VAR (var);
0b4b14ac 244 prop_value_t val = { UNINITIALIZED, NULL_TREE, { 0, 0 } };
e8114fba
RG
245 gimple stmt;
246
247 stmt = SSA_NAME_DEF_STMT (var);
248
249 if (gimple_nop_p (stmt))
6de9cd9a 250 {
e8114fba
RG
251 /* Variables defined by an empty statement are those used
252 before being initialized. If VAR is a local variable, we
253 can assume initially that it is UNDEFINED, otherwise we must
254 consider it VARYING. */
6938f93f
JH
255 if (is_gimple_reg (sym)
256 && TREE_CODE (sym) == VAR_DECL)
e8114fba
RG
257 val.lattice_val = UNDEFINED;
258 else
0b4b14ac
RG
259 {
260 val.lattice_val = VARYING;
261 val.mask = double_int_minus_one;
262 }
6de9cd9a 263 }
e8114fba
RG
264 else if (is_gimple_assign (stmt)
265 /* Value-returning GIMPLE_CALL statements assign to
266 a variable, and are treated similarly to GIMPLE_ASSIGN. */
267 || (is_gimple_call (stmt)
268 && gimple_call_lhs (stmt) != NULL_TREE)
269 || gimple_code (stmt) == GIMPLE_PHI)
750628d8 270 {
e8114fba
RG
271 tree cst;
272 if (gimple_assign_single_p (stmt)
273 && DECL_P (gimple_assign_rhs1 (stmt))
274 && (cst = get_symbol_constant_value (gimple_assign_rhs1 (stmt))))
0bca51f0 275 {
e8114fba
RG
276 val.lattice_val = CONSTANT;
277 val.value = cst;
0bca51f0
DN
278 }
279 else
e8114fba
RG
280 /* Any other variable defined by an assignment or a PHI node
281 is considered UNDEFINED. */
282 val.lattice_val = UNDEFINED;
283 }
284 else
285 {
286 /* Otherwise, VAR will never take on a constant value. */
287 val.lattice_val = VARYING;
0b4b14ac 288 val.mask = double_int_minus_one;
750628d8 289 }
6de9cd9a 290
750628d8
DN
291 return val;
292}
6de9cd9a 293
6de9cd9a 294
106dec71 295/* Get the constant value associated with variable VAR. */
6de9cd9a 296
106dec71
ZD
297static inline prop_value_t *
298get_value (tree var)
0bca51f0 299{
ed97ddc6 300 prop_value_t *val;
106dec71 301
ed97ddc6
RG
302 if (const_val == NULL)
303 return NULL;
304
305 val = &const_val[SSA_NAME_VERSION (var)];
106dec71 306 if (val->lattice_val == UNINITIALIZED)
6de9cd9a
DN
307 *val = get_default_value (var);
308
25e20805
RG
309 canonicalize_float_value (val);
310
6de9cd9a
DN
311 return val;
312}
313
84d77ca6
RG
314/* Return the constant tree value associated with VAR. */
315
316static inline tree
317get_constant_value (tree var)
318{
e196b221
JH
319 prop_value_t *val;
320 if (TREE_CODE (var) != SSA_NAME)
321 {
322 if (is_gimple_min_invariant (var))
323 return var;
324 return NULL_TREE;
325 }
326 val = get_value (var);
0b4b14ac
RG
327 if (val
328 && val->lattice_val == CONSTANT
329 && (TREE_CODE (val->value) != INTEGER_CST
330 || double_int_zero_p (val->mask)))
84d77ca6
RG
331 return val->value;
332 return NULL_TREE;
333}
334
106dec71
ZD
335/* Sets the value associated with VAR to VARYING. */
336
337static inline void
338set_value_varying (tree var)
339{
340 prop_value_t *val = &const_val[SSA_NAME_VERSION (var)];
341
342 val->lattice_val = VARYING;
343 val->value = NULL_TREE;
0b4b14ac 344 val->mask = double_int_minus_one;
106dec71 345}
6de9cd9a 346
fbb5445b
L
347/* For float types, modify the value of VAL to make ccp work correctly
348 for non-standard values (-0, NaN):
349
350 If HONOR_SIGNED_ZEROS is false, and VAL = -0, we canonicalize it to 0.
351 If HONOR_NANS is false, and VAL is NaN, we canonicalize it to UNDEFINED.
352 This is to fix the following problem (see PR 29921): Suppose we have
353
354 x = 0.0 * y
355
356 and we set value of y to NaN. This causes value of x to be set to NaN.
357 When we later determine that y is in fact VARYING, fold uses the fact
358 that HONOR_NANS is false, and we try to change the value of x to 0,
359 causing an ICE. With HONOR_NANS being false, the real appearance of
360 NaN would cause undefined behavior, though, so claiming that y (and x)
361 are UNDEFINED initially is correct. */
362
363static void
364canonicalize_float_value (prop_value_t *val)
365{
366 enum machine_mode mode;
367 tree type;
368 REAL_VALUE_TYPE d;
369
370 if (val->lattice_val != CONSTANT
371 || TREE_CODE (val->value) != REAL_CST)
372 return;
373
374 d = TREE_REAL_CST (val->value);
375 type = TREE_TYPE (val->value);
376 mode = TYPE_MODE (type);
377
378 if (!HONOR_SIGNED_ZEROS (mode)
379 && REAL_VALUE_MINUS_ZERO (d))
380 {
381 val->value = build_real (type, dconst0);
382 return;
383 }
384
385 if (!HONOR_NANS (mode)
386 && REAL_VALUE_ISNAN (d))
387 {
388 val->lattice_val = UNDEFINED;
389 val->value = NULL;
fbb5445b
L
390 return;
391 }
392}
393
0b4b14ac
RG
394/* Return whether the lattice transition is valid. */
395
396static bool
397valid_lattice_transition (prop_value_t old_val, prop_value_t new_val)
398{
399 /* Lattice transitions must always be monotonically increasing in
400 value. */
401 if (old_val.lattice_val < new_val.lattice_val)
402 return true;
403
404 if (old_val.lattice_val != new_val.lattice_val)
405 return false;
406
407 if (!old_val.value && !new_val.value)
408 return true;
409
410 /* Now both lattice values are CONSTANT. */
411
412 /* Allow transitioning from &x to &x & ~3. */
413 if (TREE_CODE (old_val.value) != INTEGER_CST
414 && TREE_CODE (new_val.value) == INTEGER_CST)
415 return true;
416
417 /* Bit-lattices have to agree in the still valid bits. */
418 if (TREE_CODE (old_val.value) == INTEGER_CST
419 && TREE_CODE (new_val.value) == INTEGER_CST)
420 return double_int_equal_p
421 (double_int_and_not (tree_to_double_int (old_val.value),
422 new_val.mask),
423 double_int_and_not (tree_to_double_int (new_val.value),
424 new_val.mask));
425
426 /* Otherwise constant values have to agree. */
427 return operand_equal_p (old_val.value, new_val.value, 0);
428}
429
0bca51f0
DN
430/* Set the value for variable VAR to NEW_VAL. Return true if the new
431 value is different from VAR's previous value. */
6de9cd9a 432
750628d8 433static bool
0bca51f0 434set_lattice_value (tree var, prop_value_t new_val)
6de9cd9a 435{
7a95d078
RG
436 /* We can deal with old UNINITIALIZED values just fine here. */
437 prop_value_t *old_val = &const_val[SSA_NAME_VERSION (var)];
0bca51f0 438
fbb5445b
L
439 canonicalize_float_value (&new_val);
440
0b4b14ac
RG
441 /* We have to be careful to not go up the bitwise lattice
442 represented by the mask.
443 ??? This doesn't seem to be the best place to enforce this. */
444 if (new_val.lattice_val == CONSTANT
445 && old_val->lattice_val == CONSTANT
446 && TREE_CODE (new_val.value) == INTEGER_CST
447 && TREE_CODE (old_val->value) == INTEGER_CST)
448 {
449 double_int diff;
450 diff = double_int_xor (tree_to_double_int (new_val.value),
451 tree_to_double_int (old_val->value));
452 new_val.mask = double_int_ior (new_val.mask,
453 double_int_ior (old_val->mask, diff));
454 }
106dec71 455
0b4b14ac 456 gcc_assert (valid_lattice_transition (*old_val, new_val));
0bca51f0 457
0b4b14ac
RG
458 /* If *OLD_VAL and NEW_VAL are the same, return false to inform the
459 caller that this was a non-transition. */
460 if (old_val->lattice_val != new_val.lattice_val
461 || (new_val.lattice_val == CONSTANT
462 && TREE_CODE (new_val.value) == INTEGER_CST
463 && (TREE_CODE (old_val->value) != INTEGER_CST
464 || !double_int_equal_p (new_val.mask, old_val->mask))))
6de9cd9a 465 {
0b4b14ac
RG
466 /* ??? We would like to delay creation of INTEGER_CSTs from
467 partially constants here. */
468
750628d8
DN
469 if (dump_file && (dump_flags & TDF_DETAILS))
470 {
0bca51f0 471 dump_lattice_value (dump_file, "Lattice value changed to ", new_val);
106dec71 472 fprintf (dump_file, ". Adding SSA edges to worklist.\n");
750628d8
DN
473 }
474
0bca51f0
DN
475 *old_val = new_val;
476
7a95d078 477 gcc_assert (new_val.lattice_val != UNINITIALIZED);
106dec71 478 return true;
6de9cd9a 479 }
750628d8
DN
480
481 return false;
6de9cd9a
DN
482}
483
0b4b14ac
RG
484static prop_value_t get_value_for_expr (tree, bool);
485static prop_value_t bit_value_binop (enum tree_code, tree, tree, tree);
486static void bit_value_binop_1 (enum tree_code, tree, double_int *, double_int *,
487 tree, double_int, double_int,
488 tree, double_int, double_int);
489
490/* Return a double_int that can be used for bitwise simplifications
491 from VAL. */
492
493static double_int
494value_to_double_int (prop_value_t val)
495{
496 if (val.value
497 && TREE_CODE (val.value) == INTEGER_CST)
498 return tree_to_double_int (val.value);
499 else
500 return double_int_zero;
501}
502
503/* Return the value for the address expression EXPR based on alignment
504 information. */
7a95d078
RG
505
506static prop_value_t
0b4b14ac
RG
507get_value_from_alignment (tree expr)
508{
d1f4e15f 509 tree type = TREE_TYPE (expr);
0b4b14ac 510 prop_value_t val;
d1f4e15f
RG
511 unsigned HOST_WIDE_INT bitpos;
512 unsigned int align;
0b4b14ac
RG
513
514 gcc_assert (TREE_CODE (expr) == ADDR_EXPR);
515
d1f4e15f
RG
516 align = get_object_alignment_1 (TREE_OPERAND (expr, 0), &bitpos);
517 val.mask
518 = double_int_and_not (POINTER_TYPE_P (type) || TYPE_UNSIGNED (type)
519 ? double_int_mask (TYPE_PRECISION (type))
520 : double_int_minus_one,
521 uhwi_to_double_int (align / BITS_PER_UNIT - 1));
522 val.lattice_val = double_int_minus_one_p (val.mask) ? VARYING : CONSTANT;
523 if (val.lattice_val == CONSTANT)
524 val.value
525 = double_int_to_tree (type, uhwi_to_double_int (bitpos / BITS_PER_UNIT));
0b4b14ac 526 else
d1f4e15f 527 val.value = NULL_TREE;
0b4b14ac
RG
528
529 return val;
530}
531
532/* Return the value for the tree operand EXPR. If FOR_BITS_P is true
533 return constant bits extracted from alignment information for
534 invariant addresses. */
535
536static prop_value_t
537get_value_for_expr (tree expr, bool for_bits_p)
7a95d078
RG
538{
539 prop_value_t val;
540
541 if (TREE_CODE (expr) == SSA_NAME)
0b4b14ac
RG
542 {
543 val = *get_value (expr);
544 if (for_bits_p
545 && val.lattice_val == CONSTANT
546 && TREE_CODE (val.value) == ADDR_EXPR)
547 val = get_value_from_alignment (val.value);
548 }
549 else if (is_gimple_min_invariant (expr)
550 && (!for_bits_p || TREE_CODE (expr) != ADDR_EXPR))
7a95d078
RG
551 {
552 val.lattice_val = CONSTANT;
553 val.value = expr;
0b4b14ac 554 val.mask = double_int_zero;
7a95d078
RG
555 canonicalize_float_value (&val);
556 }
0b4b14ac
RG
557 else if (TREE_CODE (expr) == ADDR_EXPR)
558 val = get_value_from_alignment (expr);
7a95d078
RG
559 else
560 {
561 val.lattice_val = VARYING;
0b4b14ac 562 val.mask = double_int_minus_one;
7a95d078
RG
563 val.value = NULL_TREE;
564 }
7a95d078
RG
565 return val;
566}
567
0bca51f0 568/* Return the likely CCP lattice value for STMT.
6de9cd9a 569
750628d8 570 If STMT has no operands, then return CONSTANT.
6de9cd9a 571
7f879c96
RG
572 Else if undefinedness of operands of STMT cause its value to be
573 undefined, then return UNDEFINED.
6de9cd9a 574
750628d8 575 Else if any operands of STMT are constants, then return CONSTANT.
6de9cd9a 576
750628d8 577 Else return VARYING. */
6de9cd9a 578
0bca51f0 579static ccp_lattice_t
726a989a 580likely_value (gimple stmt)
750628d8 581{
7f879c96 582 bool has_constant_operand, has_undefined_operand, all_undefined_operands;
750628d8
DN
583 tree use;
584 ssa_op_iter iter;
e8114fba 585 unsigned i;
6de9cd9a 586
e0c68ce9 587 enum gimple_code code = gimple_code (stmt);
726a989a
RB
588
589 /* This function appears to be called only for assignments, calls,
590 conditionals, and switches, due to the logic in visit_stmt. */
591 gcc_assert (code == GIMPLE_ASSIGN
592 || code == GIMPLE_CALL
593 || code == GIMPLE_COND
594 || code == GIMPLE_SWITCH);
0bca51f0
DN
595
596 /* If the statement has volatile operands, it won't fold to a
597 constant value. */
726a989a 598 if (gimple_has_volatile_ops (stmt))
0bca51f0
DN
599 return VARYING;
600
726a989a 601 /* Arrive here for more complex cases. */
106dec71 602 has_constant_operand = false;
7f879c96
RG
603 has_undefined_operand = false;
604 all_undefined_operands = true;
e8114fba 605 FOR_EACH_SSA_TREE_OPERAND (use, stmt, iter, SSA_OP_USE)
750628d8 606 {
106dec71 607 prop_value_t *val = get_value (use);
750628d8 608
106dec71 609 if (val->lattice_val == UNDEFINED)
7f879c96
RG
610 has_undefined_operand = true;
611 else
612 all_undefined_operands = false;
0bca51f0 613
750628d8 614 if (val->lattice_val == CONSTANT)
106dec71 615 has_constant_operand = true;
6de9cd9a 616 }
750628d8 617
5006671f
RG
618 /* There may be constants in regular rhs operands. For calls we
619 have to ignore lhs, fndecl and static chain, otherwise only
620 the lhs. */
621 for (i = (is_gimple_call (stmt) ? 2 : 0) + gimple_has_lhs (stmt);
e8114fba
RG
622 i < gimple_num_ops (stmt); ++i)
623 {
624 tree op = gimple_op (stmt, i);
625 if (!op || TREE_CODE (op) == SSA_NAME)
626 continue;
627 if (is_gimple_min_invariant (op))
628 has_constant_operand = true;
629 }
630
1cdaa211
RG
631 if (has_constant_operand)
632 all_undefined_operands = false;
633
7f879c96
RG
634 /* If the operation combines operands like COMPLEX_EXPR make sure to
635 not mark the result UNDEFINED if only one part of the result is
636 undefined. */
726a989a 637 if (has_undefined_operand && all_undefined_operands)
7f879c96 638 return UNDEFINED;
726a989a 639 else if (code == GIMPLE_ASSIGN && has_undefined_operand)
7f879c96 640 {
726a989a 641 switch (gimple_assign_rhs_code (stmt))
7f879c96
RG
642 {
643 /* Unary operators are handled with all_undefined_operands. */
644 case PLUS_EXPR:
645 case MINUS_EXPR:
7f879c96 646 case POINTER_PLUS_EXPR:
7f879c96
RG
647 /* Not MIN_EXPR, MAX_EXPR. One VARYING operand may be selected.
648 Not bitwise operators, one VARYING operand may specify the
649 result completely. Not logical operators for the same reason.
0cedb9e9
RG
650 Not COMPLEX_EXPR as one VARYING operand makes the result partly
651 not UNDEFINED. Not *DIV_EXPR, comparisons and shifts because
652 the undefined operand may be promoted. */
7f879c96
RG
653 return UNDEFINED;
654
655 default:
656 ;
657 }
658 }
659 /* If there was an UNDEFINED operand but the result may be not UNDEFINED
660 fall back to VARYING even if there were CONSTANT operands. */
661 if (has_undefined_operand)
662 return VARYING;
663
e8114fba
RG
664 /* We do not consider virtual operands here -- load from read-only
665 memory may have only VARYING virtual operands, but still be
666 constant. */
106dec71 667 if (has_constant_operand
e8114fba 668 || gimple_references_memory_p (stmt))
0bca51f0
DN
669 return CONSTANT;
670
106dec71 671 return VARYING;
6de9cd9a
DN
672}
673
106dec71
ZD
674/* Returns true if STMT cannot be constant. */
675
676static bool
726a989a 677surely_varying_stmt_p (gimple stmt)
106dec71
ZD
678{
679 /* If the statement has operands that we cannot handle, it cannot be
680 constant. */
726a989a 681 if (gimple_has_volatile_ops (stmt))
106dec71
ZD
682 return true;
683
174ef36d
RG
684 /* If it is a call and does not return a value or is not a
685 builtin and not an indirect call, it is varying. */
726a989a 686 if (is_gimple_call (stmt))
174ef36d
RG
687 {
688 tree fndecl;
689 if (!gimple_call_lhs (stmt)
690 || ((fndecl = gimple_call_fndecl (stmt)) != NULL_TREE
99f536cc 691 && !DECL_BUILT_IN (fndecl)))
174ef36d
RG
692 return true;
693 }
106dec71 694
e8114fba 695 /* Any other store operation is not interesting. */
5006671f 696 else if (gimple_vdef (stmt))
e8114fba
RG
697 return true;
698
106dec71
ZD
699 /* Anything other than assignments and conditional jumps are not
700 interesting for CCP. */
726a989a 701 if (gimple_code (stmt) != GIMPLE_ASSIGN
174ef36d
RG
702 && gimple_code (stmt) != GIMPLE_COND
703 && gimple_code (stmt) != GIMPLE_SWITCH
704 && gimple_code (stmt) != GIMPLE_CALL)
106dec71
ZD
705 return true;
706
707 return false;
708}
6de9cd9a 709
750628d8 710/* Initialize local data structures for CCP. */
6de9cd9a
DN
711
712static void
750628d8 713ccp_initialize (void)
6de9cd9a 714{
750628d8 715 basic_block bb;
6de9cd9a 716
b9eae1a9 717 const_val = XCNEWVEC (prop_value_t, num_ssa_names);
6de9cd9a 718
750628d8
DN
719 /* Initialize simulation flags for PHI nodes and statements. */
720 FOR_EACH_BB (bb)
6de9cd9a 721 {
726a989a 722 gimple_stmt_iterator i;
6de9cd9a 723
726a989a 724 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
750628d8 725 {
726a989a 726 gimple stmt = gsi_stmt (i);
cd6ea7a2
RH
727 bool is_varying;
728
729 /* If the statement is a control insn, then we do not
730 want to avoid simulating the statement once. Failure
731 to do so means that those edges will never get added. */
732 if (stmt_ends_bb_p (stmt))
733 is_varying = false;
734 else
735 is_varying = surely_varying_stmt_p (stmt);
6de9cd9a 736
106dec71 737 if (is_varying)
750628d8 738 {
0bca51f0
DN
739 tree def;
740 ssa_op_iter iter;
741
742 /* If the statement will not produce a constant, mark
743 all its outputs VARYING. */
744 FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_ALL_DEFS)
e8114fba 745 set_value_varying (def);
750628d8 746 }
726a989a 747 prop_set_simulate_again (stmt, !is_varying);
750628d8 748 }
6de9cd9a
DN
749 }
750
726a989a
RB
751 /* Now process PHI nodes. We never clear the simulate_again flag on
752 phi nodes, since we do not know which edges are executable yet,
753 except for phi nodes for virtual operands when we do not do store ccp. */
750628d8 754 FOR_EACH_BB (bb)
6de9cd9a 755 {
726a989a 756 gimple_stmt_iterator i;
750628d8 757
726a989a
RB
758 for (i = gsi_start_phis (bb); !gsi_end_p (i); gsi_next (&i))
759 {
760 gimple phi = gsi_stmt (i);
761
dce2b2f6 762 if (!is_gimple_reg (gimple_phi_result (phi)))
726a989a 763 prop_set_simulate_again (phi, false);
106dec71 764 else
726a989a 765 prop_set_simulate_again (phi, true);
750628d8 766 }
6de9cd9a 767 }
750628d8 768}
6de9cd9a 769
74fe548b
XDL
770/* Debug count support. Reset the values of ssa names
771 VARYING when the total number ssa names analyzed is
772 beyond the debug count specified. */
773
774static void
775do_dbg_cnt (void)
776{
777 unsigned i;
778 for (i = 0; i < num_ssa_names; i++)
779 {
780 if (!dbg_cnt (ccp))
781 {
782 const_val[i].lattice_val = VARYING;
0b4b14ac 783 const_val[i].mask = double_int_minus_one;
74fe548b
XDL
784 const_val[i].value = NULL_TREE;
785 }
786 }
787}
788
6de9cd9a 789
0bca51f0 790/* Do final substitution of propagated values, cleanup the flowgraph and
b8698a0f 791 free allocated storage.
6de9cd9a 792
3253eafb
JH
793 Return TRUE when something was optimized. */
794
795static bool
0bca51f0 796ccp_finalize (void)
6de9cd9a 797{
74fe548b 798 bool something_changed;
1be38ccb 799 unsigned i;
74fe548b
XDL
800
801 do_dbg_cnt ();
1be38ccb
RG
802
803 /* Derive alignment and misalignment information from partially
804 constant pointers in the lattice. */
805 for (i = 1; i < num_ssa_names; ++i)
806 {
807 tree name = ssa_name (i);
808 prop_value_t *val;
809 struct ptr_info_def *pi;
810 unsigned int tem, align;
811
812 if (!name
813 || !POINTER_TYPE_P (TREE_TYPE (name)))
814 continue;
815
816 val = get_value (name);
817 if (val->lattice_val != CONSTANT
818 || TREE_CODE (val->value) != INTEGER_CST)
819 continue;
820
821 /* Trailing constant bits specify the alignment, trailing value
822 bits the misalignment. */
823 tem = val->mask.low;
824 align = (tem & -tem);
825 if (align == 1)
826 continue;
827
828 pi = get_ptr_info (name);
829 pi->align = align;
830 pi->misalign = TREE_INT_CST_LOW (val->value) & (align - 1);
831 }
832
0bca51f0 833 /* Perform substitutions based on the known constant values. */
455e6d5b
RG
834 something_changed = substitute_and_fold (get_constant_value,
835 ccp_fold_stmt, true);
6de9cd9a 836
0bca51f0 837 free (const_val);
ed97ddc6 838 const_val = NULL;
3253eafb 839 return something_changed;;
6de9cd9a
DN
840}
841
842
0bca51f0
DN
843/* Compute the meet operator between *VAL1 and *VAL2. Store the result
844 in VAL1.
845
846 any M UNDEFINED = any
0bca51f0
DN
847 any M VARYING = VARYING
848 Ci M Cj = Ci if (i == j)
849 Ci M Cj = VARYING if (i != j)
106dec71 850 */
6de9cd9a
DN
851
852static void
0bca51f0 853ccp_lattice_meet (prop_value_t *val1, prop_value_t *val2)
6de9cd9a 854{
0bca51f0 855 if (val1->lattice_val == UNDEFINED)
6de9cd9a 856 {
0bca51f0
DN
857 /* UNDEFINED M any = any */
858 *val1 = *val2;
750628d8 859 }
0bca51f0 860 else if (val2->lattice_val == UNDEFINED)
195da47b 861 {
0bca51f0
DN
862 /* any M UNDEFINED = any
863 Nothing to do. VAL1 already contains the value we want. */
864 ;
195da47b 865 }
0bca51f0
DN
866 else if (val1->lattice_val == VARYING
867 || val2->lattice_val == VARYING)
750628d8 868 {
0bca51f0
DN
869 /* any M VARYING = VARYING. */
870 val1->lattice_val = VARYING;
0b4b14ac 871 val1->mask = double_int_minus_one;
0bca51f0 872 val1->value = NULL_TREE;
750628d8 873 }
0b4b14ac
RG
874 else if (val1->lattice_val == CONSTANT
875 && val2->lattice_val == CONSTANT
876 && TREE_CODE (val1->value) == INTEGER_CST
877 && TREE_CODE (val2->value) == INTEGER_CST)
878 {
879 /* Ci M Cj = Ci if (i == j)
880 Ci M Cj = VARYING if (i != j)
881
882 For INTEGER_CSTs mask unequal bits. If no equal bits remain,
883 drop to varying. */
884 val1->mask
885 = double_int_ior (double_int_ior (val1->mask,
886 val2->mask),
887 double_int_xor (tree_to_double_int (val1->value),
888 tree_to_double_int (val2->value)));
889 if (double_int_minus_one_p (val1->mask))
890 {
891 val1->lattice_val = VARYING;
892 val1->value = NULL_TREE;
893 }
894 }
0bca51f0
DN
895 else if (val1->lattice_val == CONSTANT
896 && val2->lattice_val == CONSTANT
dce2b2f6 897 && simple_cst_equal (val1->value, val2->value) == 1)
750628d8 898 {
0bca51f0
DN
899 /* Ci M Cj = Ci if (i == j)
900 Ci M Cj = VARYING if (i != j)
901
0b4b14ac
RG
902 VAL1 already contains the value we want for equivalent values. */
903 }
904 else if (val1->lattice_val == CONSTANT
905 && val2->lattice_val == CONSTANT
906 && (TREE_CODE (val1->value) == ADDR_EXPR
907 || TREE_CODE (val2->value) == ADDR_EXPR))
908 {
909 /* When not equal addresses are involved try meeting for
910 alignment. */
911 prop_value_t tem = *val2;
912 if (TREE_CODE (val1->value) == ADDR_EXPR)
913 *val1 = get_value_for_expr (val1->value, true);
914 if (TREE_CODE (val2->value) == ADDR_EXPR)
915 tem = get_value_for_expr (val2->value, true);
916 ccp_lattice_meet (val1, &tem);
750628d8
DN
917 }
918 else
919 {
0bca51f0
DN
920 /* Any other combination is VARYING. */
921 val1->lattice_val = VARYING;
0b4b14ac 922 val1->mask = double_int_minus_one;
0bca51f0 923 val1->value = NULL_TREE;
750628d8 924 }
6de9cd9a
DN
925}
926
927
750628d8
DN
928/* Loop through the PHI_NODE's parameters for BLOCK and compare their
929 lattice values to determine PHI_NODE's lattice value. The value of a
0bca51f0 930 PHI node is determined calling ccp_lattice_meet with all the arguments
750628d8 931 of the PHI node that are incoming via executable edges. */
6de9cd9a 932
750628d8 933static enum ssa_prop_result
726a989a 934ccp_visit_phi_node (gimple phi)
6de9cd9a 935{
726a989a 936 unsigned i;
0bca51f0 937 prop_value_t *old_val, new_val;
6de9cd9a 938
750628d8 939 if (dump_file && (dump_flags & TDF_DETAILS))
6de9cd9a 940 {
750628d8 941 fprintf (dump_file, "\nVisiting PHI node: ");
726a989a 942 print_gimple_stmt (dump_file, phi, 0, dump_flags);
6de9cd9a 943 }
6de9cd9a 944
726a989a 945 old_val = get_value (gimple_phi_result (phi));
750628d8
DN
946 switch (old_val->lattice_val)
947 {
948 case VARYING:
0bca51f0 949 return SSA_PROP_VARYING;
6de9cd9a 950
750628d8
DN
951 case CONSTANT:
952 new_val = *old_val;
953 break;
6de9cd9a 954
750628d8 955 case UNDEFINED:
750628d8 956 new_val.lattice_val = UNDEFINED;
0bca51f0 957 new_val.value = NULL_TREE;
750628d8 958 break;
6de9cd9a 959
750628d8 960 default:
1e128c5f 961 gcc_unreachable ();
750628d8 962 }
6de9cd9a 963
726a989a 964 for (i = 0; i < gimple_phi_num_args (phi); i++)
750628d8 965 {
0bca51f0
DN
966 /* Compute the meet operator over all the PHI arguments flowing
967 through executable edges. */
726a989a 968 edge e = gimple_phi_arg_edge (phi, i);
6de9cd9a 969
750628d8
DN
970 if (dump_file && (dump_flags & TDF_DETAILS))
971 {
972 fprintf (dump_file,
973 "\n Argument #%d (%d -> %d %sexecutable)\n",
974 i, e->src->index, e->dest->index,
975 (e->flags & EDGE_EXECUTABLE) ? "" : "not ");
976 }
977
978 /* If the incoming edge is executable, Compute the meet operator for
979 the existing value of the PHI node and the current PHI argument. */
980 if (e->flags & EDGE_EXECUTABLE)
981 {
726a989a 982 tree arg = gimple_phi_arg (phi, i)->def;
0b4b14ac 983 prop_value_t arg_val = get_value_for_expr (arg, false);
6de9cd9a 984
0bca51f0 985 ccp_lattice_meet (&new_val, &arg_val);
6de9cd9a 986
750628d8
DN
987 if (dump_file && (dump_flags & TDF_DETAILS))
988 {
989 fprintf (dump_file, "\t");
0bca51f0
DN
990 print_generic_expr (dump_file, arg, dump_flags);
991 dump_lattice_value (dump_file, "\tValue: ", arg_val);
750628d8
DN
992 fprintf (dump_file, "\n");
993 }
6de9cd9a 994
750628d8
DN
995 if (new_val.lattice_val == VARYING)
996 break;
997 }
998 }
6de9cd9a
DN
999
1000 if (dump_file && (dump_flags & TDF_DETAILS))
750628d8
DN
1001 {
1002 dump_lattice_value (dump_file, "\n PHI node value: ", new_val);
1003 fprintf (dump_file, "\n\n");
1004 }
1005
106dec71 1006 /* Make the transition to the new value. */
726a989a 1007 if (set_lattice_value (gimple_phi_result (phi), new_val))
750628d8
DN
1008 {
1009 if (new_val.lattice_val == VARYING)
1010 return SSA_PROP_VARYING;
1011 else
1012 return SSA_PROP_INTERESTING;
1013 }
1014 else
1015 return SSA_PROP_NOT_INTERESTING;
6de9cd9a
DN
1016}
1017
84d77ca6 1018/* Return the constant value for OP or OP otherwise. */
0354c0c7
BS
1019
1020static tree
84d77ca6 1021valueize_op (tree op)
0354c0c7 1022{
0354c0c7
BS
1023 if (TREE_CODE (op) == SSA_NAME)
1024 {
84d77ca6
RG
1025 tree tem = get_constant_value (op);
1026 if (tem)
1027 return tem;
0354c0c7
BS
1028 }
1029 return op;
1030}
1031
750628d8
DN
1032/* CCP specific front-end to the non-destructive constant folding
1033 routines.
6de9cd9a
DN
1034
1035 Attempt to simplify the RHS of STMT knowing that one or more
1036 operands are constants.
1037
1038 If simplification is possible, return the simplified RHS,
726a989a 1039 otherwise return the original RHS or NULL_TREE. */
6de9cd9a
DN
1040
1041static tree
726a989a 1042ccp_fold (gimple stmt)
6de9cd9a 1043{
db3927fb 1044 location_t loc = gimple_location (stmt);
726a989a 1045 switch (gimple_code (stmt))
0bca51f0 1046 {
726a989a
RB
1047 case GIMPLE_COND:
1048 {
1049 /* Handle comparison operators that can appear in GIMPLE form. */
84d77ca6
RG
1050 tree op0 = valueize_op (gimple_cond_lhs (stmt));
1051 tree op1 = valueize_op (gimple_cond_rhs (stmt));
726a989a 1052 enum tree_code code = gimple_cond_code (stmt);
db3927fb 1053 return fold_binary_loc (loc, code, boolean_type_node, op0, op1);
726a989a 1054 }
6de9cd9a 1055
726a989a
RB
1056 case GIMPLE_SWITCH:
1057 {
84d77ca6
RG
1058 /* Return the constant switch index. */
1059 return valueize_op (gimple_switch_index (stmt));
726a989a 1060 }
00d382a8 1061
cfef45c8
RG
1062 case GIMPLE_ASSIGN:
1063 case GIMPLE_CALL:
1064 return gimple_fold_stmt_to_constant_1 (stmt, valueize_op);
87e1e42b 1065
ae3df618 1066 default:
cfef45c8 1067 gcc_unreachable ();
ae3df618 1068 }
ae3df618 1069}
726a989a 1070
0b4b14ac
RG
1071/* Apply the operation CODE in type TYPE to the value, mask pair
1072 RVAL and RMASK representing a value of type RTYPE and set
1073 the value, mask pair *VAL and *MASK to the result. */
1074
1075static void
1076bit_value_unop_1 (enum tree_code code, tree type,
1077 double_int *val, double_int *mask,
1078 tree rtype, double_int rval, double_int rmask)
1079{
1080 switch (code)
1081 {
1082 case BIT_NOT_EXPR:
1083 *mask = rmask;
1084 *val = double_int_not (rval);
1085 break;
1086
1087 case NEGATE_EXPR:
1088 {
1089 double_int temv, temm;
1090 /* Return ~rval + 1. */
1091 bit_value_unop_1 (BIT_NOT_EXPR, type, &temv, &temm, type, rval, rmask);
1092 bit_value_binop_1 (PLUS_EXPR, type, val, mask,
1093 type, temv, temm,
1094 type, double_int_one, double_int_zero);
1095 break;
1096 }
1097
1098 CASE_CONVERT:
1099 {
1100 bool uns;
1101
1102 /* First extend mask and value according to the original type. */
1103 uns = (TREE_CODE (rtype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (rtype)
1104 ? 0 : TYPE_UNSIGNED (rtype));
1105 *mask = double_int_ext (rmask, TYPE_PRECISION (rtype), uns);
1106 *val = double_int_ext (rval, TYPE_PRECISION (rtype), uns);
1107
1108 /* Then extend mask and value according to the target type. */
1109 uns = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1110 ? 0 : TYPE_UNSIGNED (type));
1111 *mask = double_int_ext (*mask, TYPE_PRECISION (type), uns);
1112 *val = double_int_ext (*val, TYPE_PRECISION (type), uns);
1113 break;
1114 }
1115
1116 default:
1117 *mask = double_int_minus_one;
1118 break;
1119 }
1120}
1121
1122/* Apply the operation CODE in type TYPE to the value, mask pairs
1123 R1VAL, R1MASK and R2VAL, R2MASK representing a values of type R1TYPE
1124 and R2TYPE and set the value, mask pair *VAL and *MASK to the result. */
1125
1126static void
1127bit_value_binop_1 (enum tree_code code, tree type,
1128 double_int *val, double_int *mask,
1129 tree r1type, double_int r1val, double_int r1mask,
1130 tree r2type, double_int r2val, double_int r2mask)
1131{
0425d6f5
JJ
1132 bool uns = (TREE_CODE (type) == INTEGER_TYPE
1133 && TYPE_IS_SIZETYPE (type) ? 0 : TYPE_UNSIGNED (type));
0b4b14ac
RG
1134 /* Assume we'll get a constant result. Use an initial varying value,
1135 we fall back to varying in the end if necessary. */
1136 *mask = double_int_minus_one;
1137 switch (code)
1138 {
1139 case BIT_AND_EXPR:
1140 /* The mask is constant where there is a known not
1141 set bit, (m1 | m2) & ((v1 | m1) & (v2 | m2)) */
1142 *mask = double_int_and (double_int_ior (r1mask, r2mask),
1143 double_int_and (double_int_ior (r1val, r1mask),
1144 double_int_ior (r2val, r2mask)));
1145 *val = double_int_and (r1val, r2val);
1146 break;
1147
1148 case BIT_IOR_EXPR:
1149 /* The mask is constant where there is a known
1150 set bit, (m1 | m2) & ~((v1 & ~m1) | (v2 & ~m2)). */
1151 *mask = double_int_and_not
1152 (double_int_ior (r1mask, r2mask),
1153 double_int_ior (double_int_and_not (r1val, r1mask),
1154 double_int_and_not (r2val, r2mask)));
1155 *val = double_int_ior (r1val, r2val);
1156 break;
1157
1158 case BIT_XOR_EXPR:
1159 /* m1 | m2 */
1160 *mask = double_int_ior (r1mask, r2mask);
1161 *val = double_int_xor (r1val, r2val);
1162 break;
1163
1164 case LROTATE_EXPR:
1165 case RROTATE_EXPR:
1166 if (double_int_zero_p (r2mask))
1167 {
1168 HOST_WIDE_INT shift = r2val.low;
1169 if (code == RROTATE_EXPR)
1170 shift = -shift;
1171 *mask = double_int_lrotate (r1mask, shift, TYPE_PRECISION (type));
1172 *val = double_int_lrotate (r1val, shift, TYPE_PRECISION (type));
1173 }
1174 break;
1175
1176 case LSHIFT_EXPR:
1177 case RSHIFT_EXPR:
1178 /* ??? We can handle partially known shift counts if we know
1179 its sign. That way we can tell that (x << (y | 8)) & 255
1180 is zero. */
1181 if (double_int_zero_p (r2mask))
1182 {
1183 HOST_WIDE_INT shift = r2val.low;
1184 if (code == RSHIFT_EXPR)
1185 shift = -shift;
1186 /* We need to know if we are doing a left or a right shift
1187 to properly shift in zeros for left shift and unsigned
1188 right shifts and the sign bit for signed right shifts.
1189 For signed right shifts we shift in varying in case
1190 the sign bit was varying. */
1191 if (shift > 0)
1192 {
1193 *mask = double_int_lshift (r1mask, shift,
1194 TYPE_PRECISION (type), false);
1195 *val = double_int_lshift (r1val, shift,
1196 TYPE_PRECISION (type), false);
1197 }
1198 else if (shift < 0)
1199 {
0425d6f5
JJ
1200 /* ??? We can have sizetype related inconsistencies in
1201 the IL. */
1202 if ((TREE_CODE (r1type) == INTEGER_TYPE
1203 && (TYPE_IS_SIZETYPE (r1type)
1204 ? 0 : TYPE_UNSIGNED (r1type))) != uns)
1205 break;
1206
0b4b14ac
RG
1207 shift = -shift;
1208 *mask = double_int_rshift (r1mask, shift,
1209 TYPE_PRECISION (type), !uns);
1210 *val = double_int_rshift (r1val, shift,
1211 TYPE_PRECISION (type), !uns);
1212 }
1213 else
1214 {
1215 *mask = r1mask;
1216 *val = r1val;
1217 }
1218 }
1219 break;
1220
1221 case PLUS_EXPR:
1222 case POINTER_PLUS_EXPR:
1223 {
1224 double_int lo, hi;
1225 /* Do the addition with unknown bits set to zero, to give carry-ins of
1226 zero wherever possible. */
1227 lo = double_int_add (double_int_and_not (r1val, r1mask),
1228 double_int_and_not (r2val, r2mask));
1229 lo = double_int_ext (lo, TYPE_PRECISION (type), uns);
1230 /* Do the addition with unknown bits set to one, to give carry-ins of
1231 one wherever possible. */
1232 hi = double_int_add (double_int_ior (r1val, r1mask),
1233 double_int_ior (r2val, r2mask));
1234 hi = double_int_ext (hi, TYPE_PRECISION (type), uns);
1235 /* Each bit in the result is known if (a) the corresponding bits in
1236 both inputs are known, and (b) the carry-in to that bit position
1237 is known. We can check condition (b) by seeing if we got the same
1238 result with minimised carries as with maximised carries. */
1239 *mask = double_int_ior (double_int_ior (r1mask, r2mask),
1240 double_int_xor (lo, hi));
1241 *mask = double_int_ext (*mask, TYPE_PRECISION (type), uns);
1242 /* It shouldn't matter whether we choose lo or hi here. */
1243 *val = lo;
1244 break;
1245 }
1246
1247 case MINUS_EXPR:
1248 {
1249 double_int temv, temm;
1250 bit_value_unop_1 (NEGATE_EXPR, r2type, &temv, &temm,
1251 r2type, r2val, r2mask);
1252 bit_value_binop_1 (PLUS_EXPR, type, val, mask,
1253 r1type, r1val, r1mask,
1254 r2type, temv, temm);
1255 break;
1256 }
1257
1258 case MULT_EXPR:
1259 {
1260 /* Just track trailing zeros in both operands and transfer
1261 them to the other. */
1262 int r1tz = double_int_ctz (double_int_ior (r1val, r1mask));
1263 int r2tz = double_int_ctz (double_int_ior (r2val, r2mask));
1264 if (r1tz + r2tz >= HOST_BITS_PER_DOUBLE_INT)
1265 {
1266 *mask = double_int_zero;
1267 *val = double_int_zero;
1268 }
1269 else if (r1tz + r2tz > 0)
1270 {
1271 *mask = double_int_not (double_int_mask (r1tz + r2tz));
1272 *mask = double_int_ext (*mask, TYPE_PRECISION (type), uns);
1273 *val = double_int_zero;
1274 }
1275 break;
1276 }
1277
1278 case EQ_EXPR:
1279 case NE_EXPR:
1280 {
1281 double_int m = double_int_ior (r1mask, r2mask);
1282 if (!double_int_equal_p (double_int_and_not (r1val, m),
1283 double_int_and_not (r2val, m)))
1284 {
1285 *mask = double_int_zero;
1286 *val = ((code == EQ_EXPR) ? double_int_zero : double_int_one);
1287 }
1288 else
1289 {
1290 /* We know the result of a comparison is always one or zero. */
1291 *mask = double_int_one;
1292 *val = double_int_zero;
1293 }
1294 break;
1295 }
1296
1297 case GE_EXPR:
1298 case GT_EXPR:
1299 {
1300 double_int tem = r1val;
1301 r1val = r2val;
1302 r2val = tem;
1303 tem = r1mask;
1304 r1mask = r2mask;
1305 r2mask = tem;
1306 code = swap_tree_comparison (code);
1307 }
1308 /* Fallthru. */
1309 case LT_EXPR:
1310 case LE_EXPR:
1311 {
1312 int minmax, maxmin;
1313 /* If the most significant bits are not known we know nothing. */
1314 if (double_int_negative_p (r1mask) || double_int_negative_p (r2mask))
1315 break;
1316
0425d6f5
JJ
1317 /* For comparisons the signedness is in the comparison operands. */
1318 uns = (TREE_CODE (r1type) == INTEGER_TYPE
1319 && TYPE_IS_SIZETYPE (r1type) ? 0 : TYPE_UNSIGNED (r1type));
1320 /* ??? We can have sizetype related inconsistencies in the IL. */
1321 if ((TREE_CODE (r2type) == INTEGER_TYPE
1322 && TYPE_IS_SIZETYPE (r2type) ? 0 : TYPE_UNSIGNED (r2type)) != uns)
1323 break;
1324
0b4b14ac
RG
1325 /* If we know the most significant bits we know the values
1326 value ranges by means of treating varying bits as zero
1327 or one. Do a cross comparison of the max/min pairs. */
1328 maxmin = double_int_cmp (double_int_ior (r1val, r1mask),
1329 double_int_and_not (r2val, r2mask), uns);
1330 minmax = double_int_cmp (double_int_and_not (r1val, r1mask),
1331 double_int_ior (r2val, r2mask), uns);
1332 if (maxmin < 0) /* r1 is less than r2. */
1333 {
1334 *mask = double_int_zero;
1335 *val = double_int_one;
1336 }
1337 else if (minmax > 0) /* r1 is not less or equal to r2. */
1338 {
1339 *mask = double_int_zero;
1340 *val = double_int_zero;
1341 }
1342 else if (maxmin == minmax) /* r1 and r2 are equal. */
1343 {
1344 /* This probably should never happen as we'd have
1345 folded the thing during fully constant value folding. */
1346 *mask = double_int_zero;
1347 *val = (code == LE_EXPR ? double_int_one : double_int_zero);
1348 }
1349 else
1350 {
1351 /* We know the result of a comparison is always one or zero. */
1352 *mask = double_int_one;
1353 *val = double_int_zero;
1354 }
1355 break;
1356 }
1357
1358 default:;
1359 }
1360}
1361
1362/* Return the propagation value when applying the operation CODE to
1363 the value RHS yielding type TYPE. */
1364
1365static prop_value_t
1366bit_value_unop (enum tree_code code, tree type, tree rhs)
1367{
1368 prop_value_t rval = get_value_for_expr (rhs, true);
1369 double_int value, mask;
1370 prop_value_t val;
1371 gcc_assert ((rval.lattice_val == CONSTANT
1372 && TREE_CODE (rval.value) == INTEGER_CST)
1373 || double_int_minus_one_p (rval.mask));
1374 bit_value_unop_1 (code, type, &value, &mask,
1375 TREE_TYPE (rhs), value_to_double_int (rval), rval.mask);
1376 if (!double_int_minus_one_p (mask))
1377 {
1378 val.lattice_val = CONSTANT;
1379 val.mask = mask;
1380 /* ??? Delay building trees here. */
1381 val.value = double_int_to_tree (type, value);
1382 }
1383 else
1384 {
1385 val.lattice_val = VARYING;
1386 val.value = NULL_TREE;
1387 val.mask = double_int_minus_one;
1388 }
1389 return val;
1390}
1391
1392/* Return the propagation value when applying the operation CODE to
1393 the values RHS1 and RHS2 yielding type TYPE. */
1394
1395static prop_value_t
1396bit_value_binop (enum tree_code code, tree type, tree rhs1, tree rhs2)
1397{
1398 prop_value_t r1val = get_value_for_expr (rhs1, true);
1399 prop_value_t r2val = get_value_for_expr (rhs2, true);
1400 double_int value, mask;
1401 prop_value_t val;
1402 gcc_assert ((r1val.lattice_val == CONSTANT
1403 && TREE_CODE (r1val.value) == INTEGER_CST)
1404 || double_int_minus_one_p (r1val.mask));
1405 gcc_assert ((r2val.lattice_val == CONSTANT
1406 && TREE_CODE (r2val.value) == INTEGER_CST)
1407 || double_int_minus_one_p (r2val.mask));
1408 bit_value_binop_1 (code, type, &value, &mask,
1409 TREE_TYPE (rhs1), value_to_double_int (r1val), r1val.mask,
1410 TREE_TYPE (rhs2), value_to_double_int (r2val), r2val.mask);
1411 if (!double_int_minus_one_p (mask))
1412 {
1413 val.lattice_val = CONSTANT;
1414 val.mask = mask;
1415 /* ??? Delay building trees here. */
1416 val.value = double_int_to_tree (type, value);
1417 }
1418 else
1419 {
1420 val.lattice_val = VARYING;
1421 val.value = NULL_TREE;
1422 val.mask = double_int_minus_one;
1423 }
1424 return val;
1425}
1426
45d439ac
JJ
1427/* Return the propagation value when applying __builtin_assume_aligned to
1428 its arguments. */
1429
1430static prop_value_t
1431bit_value_assume_aligned (gimple stmt)
1432{
1433 tree ptr = gimple_call_arg (stmt, 0), align, misalign = NULL_TREE;
1434 tree type = TREE_TYPE (ptr);
1435 unsigned HOST_WIDE_INT aligni, misaligni = 0;
1436 prop_value_t ptrval = get_value_for_expr (ptr, true);
1437 prop_value_t alignval;
1438 double_int value, mask;
1439 prop_value_t val;
1440 if (ptrval.lattice_val == UNDEFINED)
1441 return ptrval;
1442 gcc_assert ((ptrval.lattice_val == CONSTANT
1443 && TREE_CODE (ptrval.value) == INTEGER_CST)
1444 || double_int_minus_one_p (ptrval.mask));
1445 align = gimple_call_arg (stmt, 1);
1446 if (!host_integerp (align, 1))
1447 return ptrval;
1448 aligni = tree_low_cst (align, 1);
1449 if (aligni <= 1
1450 || (aligni & (aligni - 1)) != 0)
1451 return ptrval;
1452 if (gimple_call_num_args (stmt) > 2)
1453 {
1454 misalign = gimple_call_arg (stmt, 2);
1455 if (!host_integerp (misalign, 1))
1456 return ptrval;
1457 misaligni = tree_low_cst (misalign, 1);
1458 if (misaligni >= aligni)
1459 return ptrval;
1460 }
1461 align = build_int_cst_type (type, -aligni);
1462 alignval = get_value_for_expr (align, true);
1463 bit_value_binop_1 (BIT_AND_EXPR, type, &value, &mask,
1464 type, value_to_double_int (ptrval), ptrval.mask,
1465 type, value_to_double_int (alignval), alignval.mask);
1466 if (!double_int_minus_one_p (mask))
1467 {
1468 val.lattice_val = CONSTANT;
1469 val.mask = mask;
1470 gcc_assert ((mask.low & (aligni - 1)) == 0);
1471 gcc_assert ((value.low & (aligni - 1)) == 0);
1472 value.low |= misaligni;
1473 /* ??? Delay building trees here. */
1474 val.value = double_int_to_tree (type, value);
1475 }
1476 else
1477 {
1478 val.lattice_val = VARYING;
1479 val.value = NULL_TREE;
1480 val.mask = double_int_minus_one;
1481 }
1482 return val;
1483}
1484
726a989a
RB
1485/* Evaluate statement STMT.
1486 Valid only for assignments, calls, conditionals, and switches. */
6de9cd9a 1487
0bca51f0 1488static prop_value_t
726a989a 1489evaluate_stmt (gimple stmt)
6de9cd9a 1490{
0bca51f0 1491 prop_value_t val;
faaf1436 1492 tree simplified = NULL_TREE;
0bca51f0 1493 ccp_lattice_t likelyvalue = likely_value (stmt);
0b4b14ac 1494 bool is_constant = false;
0bca51f0 1495
0b4b14ac
RG
1496 if (dump_file && (dump_flags & TDF_DETAILS))
1497 {
1498 fprintf (dump_file, "which is likely ");
1499 switch (likelyvalue)
1500 {
1501 case CONSTANT:
1502 fprintf (dump_file, "CONSTANT");
1503 break;
1504 case UNDEFINED:
1505 fprintf (dump_file, "UNDEFINED");
1506 break;
1507 case VARYING:
1508 fprintf (dump_file, "VARYING");
1509 break;
1510 default:;
1511 }
1512 fprintf (dump_file, "\n");
1513 }
6ac01510 1514
6de9cd9a
DN
1515 /* If the statement is likely to have a CONSTANT result, then try
1516 to fold the statement to determine the constant value. */
726a989a
RB
1517 /* FIXME. This is the only place that we call ccp_fold.
1518 Since likely_value never returns CONSTANT for calls, we will
1519 not attempt to fold them, including builtins that may profit. */
6de9cd9a 1520 if (likelyvalue == CONSTANT)
0b4b14ac
RG
1521 {
1522 fold_defer_overflow_warnings ();
1523 simplified = ccp_fold (stmt);
1524 is_constant = simplified && is_gimple_min_invariant (simplified);
1525 fold_undefer_overflow_warnings (is_constant, stmt, 0);
1526 if (is_constant)
1527 {
1528 /* The statement produced a constant value. */
1529 val.lattice_val = CONSTANT;
1530 val.value = simplified;
1531 val.mask = double_int_zero;
1532 }
1533 }
6de9cd9a
DN
1534 /* If the statement is likely to have a VARYING result, then do not
1535 bother folding the statement. */
87e1e42b 1536 else if (likelyvalue == VARYING)
726a989a 1537 {
e0c68ce9 1538 enum gimple_code code = gimple_code (stmt);
726a989a
RB
1539 if (code == GIMPLE_ASSIGN)
1540 {
1541 enum tree_code subcode = gimple_assign_rhs_code (stmt);
b8698a0f 1542
726a989a
RB
1543 /* Other cases cannot satisfy is_gimple_min_invariant
1544 without folding. */
1545 if (get_gimple_rhs_class (subcode) == GIMPLE_SINGLE_RHS)
1546 simplified = gimple_assign_rhs1 (stmt);
1547 }
1548 else if (code == GIMPLE_SWITCH)
1549 simplified = gimple_switch_index (stmt);
1550 else
44e10129
MM
1551 /* These cannot satisfy is_gimple_min_invariant without folding. */
1552 gcc_assert (code == GIMPLE_CALL || code == GIMPLE_COND);
0b4b14ac
RG
1553 is_constant = simplified && is_gimple_min_invariant (simplified);
1554 if (is_constant)
1555 {
1556 /* The statement produced a constant value. */
1557 val.lattice_val = CONSTANT;
1558 val.value = simplified;
1559 val.mask = double_int_zero;
1560 }
726a989a 1561 }
6de9cd9a 1562
0b4b14ac
RG
1563 /* Resort to simplification for bitwise tracking. */
1564 if (flag_tree_bit_ccp
36dc1a88 1565 && (likelyvalue == CONSTANT || is_gimple_call (stmt))
0b4b14ac 1566 && !is_constant)
00d382a8 1567 {
0b4b14ac 1568 enum gimple_code code = gimple_code (stmt);
1be38ccb 1569 tree fndecl;
0b4b14ac
RG
1570 val.lattice_val = VARYING;
1571 val.value = NULL_TREE;
1572 val.mask = double_int_minus_one;
1573 if (code == GIMPLE_ASSIGN)
00d382a8 1574 {
0b4b14ac
RG
1575 enum tree_code subcode = gimple_assign_rhs_code (stmt);
1576 tree rhs1 = gimple_assign_rhs1 (stmt);
1577 switch (get_gimple_rhs_class (subcode))
1578 {
1579 case GIMPLE_SINGLE_RHS:
1580 if (INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
1581 || POINTER_TYPE_P (TREE_TYPE (rhs1)))
1582 val = get_value_for_expr (rhs1, true);
1583 break;
1584
1585 case GIMPLE_UNARY_RHS:
1586 if ((INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
1587 || POINTER_TYPE_P (TREE_TYPE (rhs1)))
1588 && (INTEGRAL_TYPE_P (gimple_expr_type (stmt))
1589 || POINTER_TYPE_P (gimple_expr_type (stmt))))
1590 val = bit_value_unop (subcode, gimple_expr_type (stmt), rhs1);
1591 break;
1592
1593 case GIMPLE_BINARY_RHS:
1594 if (INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
1595 || POINTER_TYPE_P (TREE_TYPE (rhs1)))
1596 {
4e996296 1597 tree lhs = gimple_assign_lhs (stmt);
0b4b14ac
RG
1598 tree rhs2 = gimple_assign_rhs2 (stmt);
1599 val = bit_value_binop (subcode,
4e996296 1600 TREE_TYPE (lhs), rhs1, rhs2);
0b4b14ac
RG
1601 }
1602 break;
1603
1604 default:;
1605 }
00d382a8 1606 }
0b4b14ac
RG
1607 else if (code == GIMPLE_COND)
1608 {
1609 enum tree_code code = gimple_cond_code (stmt);
1610 tree rhs1 = gimple_cond_lhs (stmt);
1611 tree rhs2 = gimple_cond_rhs (stmt);
1612 if (INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
1613 || POINTER_TYPE_P (TREE_TYPE (rhs1)))
1614 val = bit_value_binop (code, TREE_TYPE (rhs1), rhs1, rhs2);
1615 }
1be38ccb
RG
1616 else if (code == GIMPLE_CALL
1617 && (fndecl = gimple_call_fndecl (stmt))
1618 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
1619 {
1620 switch (DECL_FUNCTION_CODE (fndecl))
1621 {
1622 case BUILT_IN_MALLOC:
1623 case BUILT_IN_REALLOC:
1624 case BUILT_IN_CALLOC:
36dc1a88
JJ
1625 case BUILT_IN_STRDUP:
1626 case BUILT_IN_STRNDUP:
1be38ccb
RG
1627 val.lattice_val = CONSTANT;
1628 val.value = build_int_cst (TREE_TYPE (gimple_get_lhs (stmt)), 0);
1629 val.mask = shwi_to_double_int
1630 (~(((HOST_WIDE_INT) MALLOC_ABI_ALIGNMENT)
1631 / BITS_PER_UNIT - 1));
1632 break;
1633
1634 case BUILT_IN_ALLOCA:
1635 val.lattice_val = CONSTANT;
1636 val.value = build_int_cst (TREE_TYPE (gimple_get_lhs (stmt)), 0);
1637 val.mask = shwi_to_double_int
1638 (~(((HOST_WIDE_INT) BIGGEST_ALIGNMENT)
1639 / BITS_PER_UNIT - 1));
1640 break;
1641
36dc1a88
JJ
1642 /* These builtins return their first argument, unmodified. */
1643 case BUILT_IN_MEMCPY:
1644 case BUILT_IN_MEMMOVE:
1645 case BUILT_IN_MEMSET:
1646 case BUILT_IN_STRCPY:
1647 case BUILT_IN_STRNCPY:
1648 case BUILT_IN_MEMCPY_CHK:
1649 case BUILT_IN_MEMMOVE_CHK:
1650 case BUILT_IN_MEMSET_CHK:
1651 case BUILT_IN_STRCPY_CHK:
1652 case BUILT_IN_STRNCPY_CHK:
1653 val = get_value_for_expr (gimple_call_arg (stmt, 0), true);
1654 break;
1655
45d439ac
JJ
1656 case BUILT_IN_ASSUME_ALIGNED:
1657 val = bit_value_assume_aligned (stmt);
1658 break;
1659
1be38ccb
RG
1660 default:;
1661 }
1662 }
0b4b14ac 1663 is_constant = (val.lattice_val == CONSTANT);
00d382a8
RG
1664 }
1665
0b4b14ac 1666 if (!is_constant)
6de9cd9a
DN
1667 {
1668 /* The statement produced a nonconstant value. If the statement
0bca51f0
DN
1669 had UNDEFINED operands, then the result of the statement
1670 should be UNDEFINED. Otherwise, the statement is VARYING. */
106dec71 1671 if (likelyvalue == UNDEFINED)
0b4b14ac
RG
1672 {
1673 val.lattice_val = likelyvalue;
1674 val.mask = double_int_zero;
1675 }
a318e3ac 1676 else
0b4b14ac
RG
1677 {
1678 val.lattice_val = VARYING;
1679 val.mask = double_int_minus_one;
1680 }
a318e3ac 1681
0bca51f0 1682 val.value = NULL_TREE;
6de9cd9a 1683 }
750628d8
DN
1684
1685 return val;
6de9cd9a
DN
1686}
1687
1fed1006
TV
1688/* Detects a vla-related alloca with a constant argument. Declares fixed-size
1689 array and return the address, if found, otherwise returns NULL_TREE. */
1690
1691static tree
1692fold_builtin_alloca_for_var (gimple stmt)
1693{
1694 unsigned HOST_WIDE_INT size, threshold, n_elem;
1695 tree lhs, arg, block, var, elem_type, array_type;
1696 unsigned int align;
1697
1698 /* Get lhs. */
1699 lhs = gimple_call_lhs (stmt);
1700 if (lhs == NULL_TREE)
1701 return NULL_TREE;
1702
1703 /* Detect constant argument. */
1704 arg = get_constant_value (gimple_call_arg (stmt, 0));
1705 if (arg == NULL_TREE || TREE_CODE (arg) != INTEGER_CST
1706 || !host_integerp (arg, 1))
1707 return NULL_TREE;
1708 size = TREE_INT_CST_LOW (arg);
1709
1710 /* Heuristic: don't fold large vlas. */
1711 threshold = (unsigned HOST_WIDE_INT)PARAM_VALUE (PARAM_LARGE_STACK_FRAME);
1712 /* In case a vla is declared at function scope, it has the same lifetime as a
1713 declared array, so we allow a larger size. */
1714 block = gimple_block (stmt);
1715 if (!(cfun->after_inlining
1716 && TREE_CODE (BLOCK_SUPERCONTEXT (block)) == FUNCTION_DECL))
1717 threshold /= 10;
1718 if (size > threshold)
1719 return NULL_TREE;
1720
1721 /* Declare array. */
1722 elem_type = build_nonstandard_integer_type (BITS_PER_UNIT, 1);
1723 n_elem = size * 8 / BITS_PER_UNIT;
1724 align = MIN (size * 8, BIGGEST_ALIGNMENT);
1725 array_type = build_array_type_nelts (elem_type, n_elem);
1726 var = create_tmp_var (array_type, NULL);
1727 DECL_ALIGN (var) = align;
1728
1729 /* Fold alloca to the address of the array. */
1730 return fold_convert (TREE_TYPE (lhs), build_fold_addr_expr (var));
1731}
1732
f61e18ec
RG
1733/* Fold the stmt at *GSI with CCP specific information that propagating
1734 and regular folding does not catch. */
1735
1736static bool
1737ccp_fold_stmt (gimple_stmt_iterator *gsi)
1738{
1739 gimple stmt = gsi_stmt (*gsi);
f61e18ec 1740
830bc550
RG
1741 switch (gimple_code (stmt))
1742 {
1743 case GIMPLE_COND:
1744 {
1745 prop_value_t val;
1746 /* Statement evaluation will handle type mismatches in constants
1747 more gracefully than the final propagation. This allows us to
1748 fold more conditionals here. */
1749 val = evaluate_stmt (stmt);
1750 if (val.lattice_val != CONSTANT
0b4b14ac 1751 || !double_int_zero_p (val.mask))
830bc550
RG
1752 return false;
1753
0b4b14ac
RG
1754 if (dump_file)
1755 {
1756 fprintf (dump_file, "Folding predicate ");
1757 print_gimple_expr (dump_file, stmt, 0, 0);
1758 fprintf (dump_file, " to ");
1759 print_generic_expr (dump_file, val.value, 0);
1760 fprintf (dump_file, "\n");
1761 }
1762
830bc550
RG
1763 if (integer_zerop (val.value))
1764 gimple_cond_make_false (stmt);
1765 else
1766 gimple_cond_make_true (stmt);
f61e18ec 1767
830bc550
RG
1768 return true;
1769 }
f61e18ec 1770
830bc550
RG
1771 case GIMPLE_CALL:
1772 {
1773 tree lhs = gimple_call_lhs (stmt);
84d77ca6 1774 tree val;
830bc550
RG
1775 tree argt;
1776 bool changed = false;
1777 unsigned i;
1778
1779 /* If the call was folded into a constant make sure it goes
1780 away even if we cannot propagate into all uses because of
1781 type issues. */
1782 if (lhs
1783 && TREE_CODE (lhs) == SSA_NAME
84d77ca6 1784 && (val = get_constant_value (lhs)))
830bc550 1785 {
84d77ca6 1786 tree new_rhs = unshare_expr (val);
eb6b98c7 1787 bool res;
830bc550
RG
1788 if (!useless_type_conversion_p (TREE_TYPE (lhs),
1789 TREE_TYPE (new_rhs)))
1790 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
eb6b98c7
RG
1791 res = update_call_from_tree (gsi, new_rhs);
1792 gcc_assert (res);
830bc550
RG
1793 return true;
1794 }
1795
25583c4f
RS
1796 /* Internal calls provide no argument types, so the extra laxity
1797 for normal calls does not apply. */
1798 if (gimple_call_internal_p (stmt))
1799 return false;
1800
1fed1006
TV
1801 /* The heuristic of fold_builtin_alloca_for_var differs before and after
1802 inlining, so we don't require the arg to be changed into a constant
1803 for folding, but just to be constant. */
1804 if (gimple_call_alloca_for_var_p (stmt))
1805 {
1806 tree new_rhs = fold_builtin_alloca_for_var (stmt);
1807 bool res;
1808 if (new_rhs == NULL_TREE)
1809 return false;
1810 res = update_call_from_tree (gsi, new_rhs);
1811 gcc_assert (res);
1812 return true;
1813 }
1814
830bc550
RG
1815 /* Propagate into the call arguments. Compared to replace_uses_in
1816 this can use the argument slot types for type verification
1817 instead of the current argument type. We also can safely
1818 drop qualifiers here as we are dealing with constants anyway. */
9bfc434b 1819 argt = TYPE_ARG_TYPES (gimple_call_fntype (stmt));
830bc550
RG
1820 for (i = 0; i < gimple_call_num_args (stmt) && argt;
1821 ++i, argt = TREE_CHAIN (argt))
1822 {
1823 tree arg = gimple_call_arg (stmt, i);
1824 if (TREE_CODE (arg) == SSA_NAME
84d77ca6 1825 && (val = get_constant_value (arg))
830bc550
RG
1826 && useless_type_conversion_p
1827 (TYPE_MAIN_VARIANT (TREE_VALUE (argt)),
84d77ca6 1828 TYPE_MAIN_VARIANT (TREE_TYPE (val))))
830bc550 1829 {
84d77ca6 1830 gimple_call_set_arg (stmt, i, unshare_expr (val));
830bc550
RG
1831 changed = true;
1832 }
1833 }
74e80a24 1834
830bc550
RG
1835 return changed;
1836 }
f61e18ec 1837
5c95f07b
RG
1838 case GIMPLE_ASSIGN:
1839 {
1840 tree lhs = gimple_assign_lhs (stmt);
84d77ca6 1841 tree val;
5c95f07b
RG
1842
1843 /* If we have a load that turned out to be constant replace it
1844 as we cannot propagate into all uses in all cases. */
1845 if (gimple_assign_single_p (stmt)
1846 && TREE_CODE (lhs) == SSA_NAME
84d77ca6 1847 && (val = get_constant_value (lhs)))
5c95f07b 1848 {
84d77ca6 1849 tree rhs = unshare_expr (val);
5c95f07b 1850 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs)))
70f34814 1851 rhs = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (lhs), rhs);
5c95f07b
RG
1852 gimple_assign_set_rhs_from_tree (gsi, rhs);
1853 return true;
1854 }
1855
1856 return false;
1857 }
1858
830bc550
RG
1859 default:
1860 return false;
1861 }
f61e18ec
RG
1862}
1863
750628d8 1864/* Visit the assignment statement STMT. Set the value of its LHS to the
0bca51f0
DN
1865 value computed by the RHS and store LHS in *OUTPUT_P. If STMT
1866 creates virtual definitions, set the value of each new name to that
726a989a
RB
1867 of the RHS (if we can derive a constant out of the RHS).
1868 Value-returning call statements also perform an assignment, and
1869 are handled here. */
6de9cd9a 1870
750628d8 1871static enum ssa_prop_result
726a989a 1872visit_assignment (gimple stmt, tree *output_p)
6de9cd9a 1873{
0bca51f0 1874 prop_value_t val;
0bca51f0 1875 enum ssa_prop_result retval;
6de9cd9a 1876
726a989a 1877 tree lhs = gimple_get_lhs (stmt);
6de9cd9a 1878
726a989a
RB
1879 gcc_assert (gimple_code (stmt) != GIMPLE_CALL
1880 || gimple_call_lhs (stmt) != NULL_TREE);
1881
84d77ca6
RG
1882 if (gimple_assign_single_p (stmt)
1883 && gimple_assign_rhs_code (stmt) == SSA_NAME)
1884 /* For a simple copy operation, we copy the lattice values. */
1885 val = *get_value (gimple_assign_rhs1 (stmt));
750628d8 1886 else
726a989a
RB
1887 /* Evaluate the statement, which could be
1888 either a GIMPLE_ASSIGN or a GIMPLE_CALL. */
87e1e42b 1889 val = evaluate_stmt (stmt);
6de9cd9a 1890
0bca51f0 1891 retval = SSA_PROP_NOT_INTERESTING;
6de9cd9a 1892
750628d8 1893 /* Set the lattice value of the statement's output. */
0bca51f0 1894 if (TREE_CODE (lhs) == SSA_NAME)
6de9cd9a 1895 {
0bca51f0
DN
1896 /* If STMT is an assignment to an SSA_NAME, we only have one
1897 value to set. */
1898 if (set_lattice_value (lhs, val))
1899 {
1900 *output_p = lhs;
1901 if (val.lattice_val == VARYING)
1902 retval = SSA_PROP_VARYING;
1903 else
1904 retval = SSA_PROP_INTERESTING;
1905 }
6de9cd9a 1906 }
0bca51f0
DN
1907
1908 return retval;
6de9cd9a
DN
1909}
1910
6de9cd9a 1911
750628d8
DN
1912/* Visit the conditional statement STMT. Return SSA_PROP_INTERESTING
1913 if it can determine which edge will be taken. Otherwise, return
1914 SSA_PROP_VARYING. */
1915
1916static enum ssa_prop_result
726a989a 1917visit_cond_stmt (gimple stmt, edge *taken_edge_p)
6de9cd9a 1918{
0bca51f0 1919 prop_value_t val;
750628d8
DN
1920 basic_block block;
1921
726a989a 1922 block = gimple_bb (stmt);
750628d8 1923 val = evaluate_stmt (stmt);
0b4b14ac
RG
1924 if (val.lattice_val != CONSTANT
1925 || !double_int_zero_p (val.mask))
1926 return SSA_PROP_VARYING;
750628d8
DN
1927
1928 /* Find which edge out of the conditional block will be taken and add it
1929 to the worklist. If no single edge can be determined statically,
1930 return SSA_PROP_VARYING to feed all the outgoing edges to the
1931 propagation engine. */
0b4b14ac 1932 *taken_edge_p = find_taken_edge (block, val.value);
750628d8
DN
1933 if (*taken_edge_p)
1934 return SSA_PROP_INTERESTING;
1935 else
1936 return SSA_PROP_VARYING;
6de9cd9a
DN
1937}
1938
6de9cd9a 1939
750628d8
DN
1940/* Evaluate statement STMT. If the statement produces an output value and
1941 its evaluation changes the lattice value of its output, return
1942 SSA_PROP_INTERESTING and set *OUTPUT_P to the SSA_NAME holding the
1943 output value.
b8698a0f 1944
750628d8
DN
1945 If STMT is a conditional branch and we can determine its truth
1946 value, set *TAKEN_EDGE_P accordingly. If STMT produces a varying
1947 value, return SSA_PROP_VARYING. */
6de9cd9a 1948
750628d8 1949static enum ssa_prop_result
726a989a 1950ccp_visit_stmt (gimple stmt, edge *taken_edge_p, tree *output_p)
750628d8 1951{
750628d8
DN
1952 tree def;
1953 ssa_op_iter iter;
6de9cd9a 1954
750628d8 1955 if (dump_file && (dump_flags & TDF_DETAILS))
6de9cd9a 1956 {
0bca51f0 1957 fprintf (dump_file, "\nVisiting statement:\n");
726a989a 1958 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
6de9cd9a 1959 }
6de9cd9a 1960
726a989a 1961 switch (gimple_code (stmt))
6de9cd9a 1962 {
726a989a
RB
1963 case GIMPLE_ASSIGN:
1964 /* If the statement is an assignment that produces a single
1965 output value, evaluate its RHS to see if the lattice value of
1966 its output has changed. */
1967 return visit_assignment (stmt, output_p);
1968
1969 case GIMPLE_CALL:
1970 /* A value-returning call also performs an assignment. */
1971 if (gimple_call_lhs (stmt) != NULL_TREE)
1972 return visit_assignment (stmt, output_p);
1973 break;
1974
1975 case GIMPLE_COND:
1976 case GIMPLE_SWITCH:
1977 /* If STMT is a conditional branch, see if we can determine
1978 which branch will be taken. */
1979 /* FIXME. It appears that we should be able to optimize
1980 computed GOTOs here as well. */
1981 return visit_cond_stmt (stmt, taken_edge_p);
1982
1983 default:
1984 break;
6de9cd9a 1985 }
6de9cd9a 1986
750628d8
DN
1987 /* Any other kind of statement is not interesting for constant
1988 propagation and, therefore, not worth simulating. */
750628d8
DN
1989 if (dump_file && (dump_flags & TDF_DETAILS))
1990 fprintf (dump_file, "No interesting values produced. Marked VARYING.\n");
6de9cd9a 1991
750628d8
DN
1992 /* Definitions made by statements other than assignments to
1993 SSA_NAMEs represent unknown modifications to their outputs.
1994 Mark them VARYING. */
0bca51f0
DN
1995 FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_ALL_DEFS)
1996 {
0b4b14ac 1997 prop_value_t v = { VARYING, NULL_TREE, { -1, (HOST_WIDE_INT) -1 } };
0bca51f0
DN
1998 set_lattice_value (def, v);
1999 }
6de9cd9a 2000
750628d8
DN
2001 return SSA_PROP_VARYING;
2002}
6de9cd9a 2003
6de9cd9a 2004
0bca51f0 2005/* Main entry point for SSA Conditional Constant Propagation. */
750628d8 2006
3253eafb 2007static unsigned int
dce2b2f6 2008do_ssa_ccp (void)
750628d8
DN
2009{
2010 ccp_initialize ();
2011 ssa_propagate (ccp_visit_stmt, ccp_visit_phi_node);
3253eafb 2012 if (ccp_finalize ())
706ca88e 2013 return (TODO_cleanup_cfg | TODO_update_ssa | TODO_remove_unused_locals);
3253eafb
JH
2014 else
2015 return 0;
6de9cd9a
DN
2016}
2017
173b818d
BB
2018
2019static bool
750628d8 2020gate_ccp (void)
173b818d 2021{
750628d8 2022 return flag_tree_ccp != 0;
173b818d
BB
2023}
2024
6de9cd9a 2025
b8698a0f 2026struct gimple_opt_pass pass_ccp =
750628d8 2027{
8ddbbcae
JH
2028 {
2029 GIMPLE_PASS,
750628d8
DN
2030 "ccp", /* name */
2031 gate_ccp, /* gate */
0bca51f0 2032 do_ssa_ccp, /* execute */
750628d8
DN
2033 NULL, /* sub */
2034 NULL, /* next */
2035 0, /* static_pass_number */
2036 TV_TREE_CCP, /* tv_id */
7faade0f 2037 PROP_cfg | PROP_ssa, /* properties_required */
750628d8 2038 0, /* properties_provided */
ae07b463 2039 0, /* properties_destroyed */
750628d8 2040 0, /* todo_flags_start */
22c5fa5f 2041 TODO_verify_ssa
8ddbbcae
JH
2042 | TODO_verify_stmts | TODO_ggc_collect/* todo_flags_finish */
2043 }
750628d8 2044};
6de9cd9a 2045
6de9cd9a 2046
726a989a 2047
cb8e078d
JJ
2048/* Try to optimize out __builtin_stack_restore. Optimize it out
2049 if there is another __builtin_stack_restore in the same basic
2050 block and no calls or ASM_EXPRs are in between, or if this block's
2051 only outgoing edge is to EXIT_BLOCK and there are no calls or
2052 ASM_EXPRs after this __builtin_stack_restore. */
2053
2054static tree
726a989a 2055optimize_stack_restore (gimple_stmt_iterator i)
cb8e078d 2056{
ff9d1adc
RH
2057 tree callee;
2058 gimple stmt;
726a989a
RB
2059
2060 basic_block bb = gsi_bb (i);
2061 gimple call = gsi_stmt (i);
cb8e078d 2062
726a989a
RB
2063 if (gimple_code (call) != GIMPLE_CALL
2064 || gimple_call_num_args (call) != 1
2065 || TREE_CODE (gimple_call_arg (call, 0)) != SSA_NAME
2066 || !POINTER_TYPE_P (TREE_TYPE (gimple_call_arg (call, 0))))
cb8e078d
JJ
2067 return NULL_TREE;
2068
726a989a 2069 for (gsi_next (&i); !gsi_end_p (i); gsi_next (&i))
cb8e078d 2070 {
726a989a
RB
2071 stmt = gsi_stmt (i);
2072 if (gimple_code (stmt) == GIMPLE_ASM)
cb8e078d 2073 return NULL_TREE;
726a989a 2074 if (gimple_code (stmt) != GIMPLE_CALL)
cb8e078d
JJ
2075 continue;
2076
726a989a 2077 callee = gimple_call_fndecl (stmt);
12f9ddbc
RG
2078 if (!callee
2079 || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL
2080 /* All regular builtins are ok, just obviously not alloca. */
2081 || DECL_FUNCTION_CODE (callee) == BUILT_IN_ALLOCA)
cb8e078d
JJ
2082 return NULL_TREE;
2083
2084 if (DECL_FUNCTION_CODE (callee) == BUILT_IN_STACK_RESTORE)
ff9d1adc 2085 goto second_stack_restore;
cb8e078d
JJ
2086 }
2087
ff9d1adc 2088 if (!gsi_end_p (i))
cb8e078d
JJ
2089 return NULL_TREE;
2090
ff9d1adc
RH
2091 /* Allow one successor of the exit block, or zero successors. */
2092 switch (EDGE_COUNT (bb->succs))
2093 {
2094 case 0:
2095 break;
2096 case 1:
2097 if (single_succ_edge (bb)->dest != EXIT_BLOCK_PTR)
2098 return NULL_TREE;
2099 break;
2100 default:
2101 return NULL_TREE;
2102 }
2103 second_stack_restore:
cb8e078d 2104
ff9d1adc
RH
2105 /* If there's exactly one use, then zap the call to __builtin_stack_save.
2106 If there are multiple uses, then the last one should remove the call.
2107 In any case, whether the call to __builtin_stack_save can be removed
2108 or not is irrelevant to removing the call to __builtin_stack_restore. */
2109 if (has_single_use (gimple_call_arg (call, 0)))
2110 {
2111 gimple stack_save = SSA_NAME_DEF_STMT (gimple_call_arg (call, 0));
2112 if (is_gimple_call (stack_save))
2113 {
2114 callee = gimple_call_fndecl (stack_save);
2115 if (callee
2116 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL
2117 && DECL_FUNCTION_CODE (callee) == BUILT_IN_STACK_SAVE)
2118 {
2119 gimple_stmt_iterator stack_save_gsi;
2120 tree rhs;
cb8e078d 2121
ff9d1adc
RH
2122 stack_save_gsi = gsi_for_stmt (stack_save);
2123 rhs = build_int_cst (TREE_TYPE (gimple_call_arg (call, 0)), 0);
2124 update_call_from_tree (&stack_save_gsi, rhs);
2125 }
2126 }
2127 }
cb8e078d 2128
726a989a 2129 /* No effect, so the statement will be deleted. */
cb8e078d
JJ
2130 return integer_zero_node;
2131}
726a989a 2132
d7bd8aeb
JJ
2133/* If va_list type is a simple pointer and nothing special is needed,
2134 optimize __builtin_va_start (&ap, 0) into ap = __builtin_next_arg (0),
2135 __builtin_va_end (&ap) out as NOP and __builtin_va_copy into a simple
2136 pointer assignment. */
2137
2138static tree
726a989a 2139optimize_stdarg_builtin (gimple call)
d7bd8aeb 2140{
35cbb299 2141 tree callee, lhs, rhs, cfun_va_list;
d7bd8aeb 2142 bool va_list_simple_ptr;
db3927fb 2143 location_t loc = gimple_location (call);
d7bd8aeb 2144
726a989a 2145 if (gimple_code (call) != GIMPLE_CALL)
d7bd8aeb
JJ
2146 return NULL_TREE;
2147
726a989a 2148 callee = gimple_call_fndecl (call);
35cbb299
KT
2149
2150 cfun_va_list = targetm.fn_abi_va_list (callee);
2151 va_list_simple_ptr = POINTER_TYPE_P (cfun_va_list)
2152 && (TREE_TYPE (cfun_va_list) == void_type_node
2153 || TREE_TYPE (cfun_va_list) == char_type_node);
2154
d7bd8aeb
JJ
2155 switch (DECL_FUNCTION_CODE (callee))
2156 {
2157 case BUILT_IN_VA_START:
2158 if (!va_list_simple_ptr
2159 || targetm.expand_builtin_va_start != NULL
726a989a 2160 || built_in_decls[BUILT_IN_NEXT_ARG] == NULL)
d7bd8aeb
JJ
2161 return NULL_TREE;
2162
726a989a 2163 if (gimple_call_num_args (call) != 2)
d7bd8aeb
JJ
2164 return NULL_TREE;
2165
726a989a 2166 lhs = gimple_call_arg (call, 0);
d7bd8aeb
JJ
2167 if (!POINTER_TYPE_P (TREE_TYPE (lhs))
2168 || TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (lhs)))
35cbb299 2169 != TYPE_MAIN_VARIANT (cfun_va_list))
d7bd8aeb 2170 return NULL_TREE;
b8698a0f 2171
db3927fb
AH
2172 lhs = build_fold_indirect_ref_loc (loc, lhs);
2173 rhs = build_call_expr_loc (loc, built_in_decls[BUILT_IN_NEXT_ARG],
726a989a 2174 1, integer_zero_node);
db3927fb 2175 rhs = fold_convert_loc (loc, TREE_TYPE (lhs), rhs);
d7bd8aeb
JJ
2176 return build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, rhs);
2177
2178 case BUILT_IN_VA_COPY:
2179 if (!va_list_simple_ptr)
2180 return NULL_TREE;
2181
726a989a 2182 if (gimple_call_num_args (call) != 2)
d7bd8aeb
JJ
2183 return NULL_TREE;
2184
726a989a 2185 lhs = gimple_call_arg (call, 0);
d7bd8aeb
JJ
2186 if (!POINTER_TYPE_P (TREE_TYPE (lhs))
2187 || TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (lhs)))
35cbb299 2188 != TYPE_MAIN_VARIANT (cfun_va_list))
d7bd8aeb
JJ
2189 return NULL_TREE;
2190
db3927fb 2191 lhs = build_fold_indirect_ref_loc (loc, lhs);
726a989a 2192 rhs = gimple_call_arg (call, 1);
d7bd8aeb 2193 if (TYPE_MAIN_VARIANT (TREE_TYPE (rhs))
35cbb299 2194 != TYPE_MAIN_VARIANT (cfun_va_list))
d7bd8aeb
JJ
2195 return NULL_TREE;
2196
db3927fb 2197 rhs = fold_convert_loc (loc, TREE_TYPE (lhs), rhs);
d7bd8aeb
JJ
2198 return build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, rhs);
2199
2200 case BUILT_IN_VA_END:
726a989a 2201 /* No effect, so the statement will be deleted. */
d7bd8aeb
JJ
2202 return integer_zero_node;
2203
2204 default:
2205 gcc_unreachable ();
2206 }
2207}
726a989a 2208
6de9cd9a
DN
2209/* A simple pass that attempts to fold all builtin functions. This pass
2210 is run after we've propagated as many constants as we can. */
2211
c2924966 2212static unsigned int
6de9cd9a
DN
2213execute_fold_all_builtins (void)
2214{
a7d6ba24 2215 bool cfg_changed = false;
6de9cd9a 2216 basic_block bb;
7b0e48fb 2217 unsigned int todoflags = 0;
b8698a0f 2218
6de9cd9a
DN
2219 FOR_EACH_BB (bb)
2220 {
726a989a
RB
2221 gimple_stmt_iterator i;
2222 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
6de9cd9a 2223 {
726a989a 2224 gimple stmt, old_stmt;
6de9cd9a 2225 tree callee, result;
10a0d495 2226 enum built_in_function fcode;
6de9cd9a 2227
726a989a
RB
2228 stmt = gsi_stmt (i);
2229
2230 if (gimple_code (stmt) != GIMPLE_CALL)
10a0d495 2231 {
726a989a 2232 gsi_next (&i);
10a0d495
JJ
2233 continue;
2234 }
726a989a 2235 callee = gimple_call_fndecl (stmt);
6de9cd9a 2236 if (!callee || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL)
10a0d495 2237 {
726a989a 2238 gsi_next (&i);
10a0d495
JJ
2239 continue;
2240 }
2241 fcode = DECL_FUNCTION_CODE (callee);
6de9cd9a 2242
cbdd87d4 2243 result = gimple_fold_builtin (stmt);
53a8f709
UB
2244
2245 if (result)
726a989a 2246 gimple_remove_stmt_histograms (cfun, stmt);
53a8f709 2247
6de9cd9a
DN
2248 if (!result)
2249 switch (DECL_FUNCTION_CODE (callee))
2250 {
2251 case BUILT_IN_CONSTANT_P:
2252 /* Resolve __builtin_constant_p. If it hasn't been
2253 folded to integer_one_node by now, it's fairly
2254 certain that the value simply isn't constant. */
726a989a 2255 result = integer_zero_node;
6de9cd9a
DN
2256 break;
2257
45d439ac
JJ
2258 case BUILT_IN_ASSUME_ALIGNED:
2259 /* Remove __builtin_assume_aligned. */
2260 result = gimple_call_arg (stmt, 0);
2261 break;
2262
cb8e078d 2263 case BUILT_IN_STACK_RESTORE:
726a989a 2264 result = optimize_stack_restore (i);
d7bd8aeb
JJ
2265 if (result)
2266 break;
726a989a 2267 gsi_next (&i);
d7bd8aeb
JJ
2268 continue;
2269
2270 case BUILT_IN_VA_START:
2271 case BUILT_IN_VA_END:
2272 case BUILT_IN_VA_COPY:
2273 /* These shouldn't be folded before pass_stdarg. */
726a989a 2274 result = optimize_stdarg_builtin (stmt);
cb8e078d
JJ
2275 if (result)
2276 break;
2277 /* FALLTHRU */
2278
6de9cd9a 2279 default:
726a989a 2280 gsi_next (&i);
6de9cd9a
DN
2281 continue;
2282 }
2283
2284 if (dump_file && (dump_flags & TDF_DETAILS))
2285 {
2286 fprintf (dump_file, "Simplified\n ");
726a989a 2287 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
6de9cd9a
DN
2288 }
2289
726a989a 2290 old_stmt = stmt;
726a989a 2291 if (!update_call_from_tree (&i, result))
4bad83f5
RG
2292 {
2293 gimplify_and_update_call_from_tree (&i, result);
2294 todoflags |= TODO_update_address_taken;
2295 }
cfaab3a9 2296
726a989a 2297 stmt = gsi_stmt (i);
cff4e50d 2298 update_stmt (stmt);
cfaab3a9 2299
726a989a
RB
2300 if (maybe_clean_or_replace_eh_stmt (old_stmt, stmt)
2301 && gimple_purge_dead_eh_edges (bb))
a7d6ba24 2302 cfg_changed = true;
6de9cd9a
DN
2303
2304 if (dump_file && (dump_flags & TDF_DETAILS))
2305 {
2306 fprintf (dump_file, "to\n ");
726a989a 2307 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
6de9cd9a
DN
2308 fprintf (dump_file, "\n");
2309 }
10a0d495
JJ
2310
2311 /* Retry the same statement if it changed into another
2312 builtin, there might be new opportunities now. */
726a989a 2313 if (gimple_code (stmt) != GIMPLE_CALL)
10a0d495 2314 {
726a989a 2315 gsi_next (&i);
10a0d495
JJ
2316 continue;
2317 }
726a989a 2318 callee = gimple_call_fndecl (stmt);
10a0d495 2319 if (!callee
726a989a 2320 || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL
10a0d495 2321 || DECL_FUNCTION_CODE (callee) == fcode)
726a989a 2322 gsi_next (&i);
6de9cd9a
DN
2323 }
2324 }
b8698a0f 2325
a7d6ba24 2326 /* Delete unreachable blocks. */
7b0e48fb
DB
2327 if (cfg_changed)
2328 todoflags |= TODO_cleanup_cfg;
b8698a0f 2329
7b0e48fb 2330 return todoflags;
6de9cd9a
DN
2331}
2332
750628d8 2333
b8698a0f 2334struct gimple_opt_pass pass_fold_builtins =
6de9cd9a 2335{
8ddbbcae
JH
2336 {
2337 GIMPLE_PASS,
6de9cd9a
DN
2338 "fab", /* name */
2339 NULL, /* gate */
2340 execute_fold_all_builtins, /* execute */
2341 NULL, /* sub */
2342 NULL, /* next */
2343 0, /* static_pass_number */
7072a650 2344 TV_NONE, /* tv_id */
7faade0f 2345 PROP_cfg | PROP_ssa, /* properties_required */
6de9cd9a
DN
2346 0, /* properties_provided */
2347 0, /* properties_destroyed */
2348 0, /* todo_flags_start */
22c5fa5f 2349 TODO_verify_ssa
8ddbbcae
JH
2350 | TODO_update_ssa /* todo_flags_finish */
2351 }
6de9cd9a 2352};