]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/tree-ssa-ccp.c
backport: As described in http://gcc.gnu.org/ml/gcc/2012-08/msg00015.html...
[thirdparty/gcc.git] / gcc / tree-ssa-ccp.c
CommitLineData
6de9cd9a 1/* Conditional constant propagation pass for the GNU compiler.
1cdaa211 2 Copyright (C) 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009,
45a5b21a 3 2010, 2011, 2012 Free Software Foundation, Inc.
6de9cd9a
DN
4 Adapted from original RTL SSA-CCP by Daniel Berlin <dberlin@dberlin.org>
5 Adapted to GIMPLE trees by Diego Novillo <dnovillo@redhat.com>
6
7This file is part of GCC.
b8698a0f 8
6de9cd9a
DN
9GCC is free software; you can redistribute it and/or modify it
10under the terms of the GNU General Public License as published by the
9dcd6f09 11Free Software Foundation; either version 3, or (at your option) any
6de9cd9a 12later version.
b8698a0f 13
6de9cd9a
DN
14GCC is distributed in the hope that it will be useful, but WITHOUT
15ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
16FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17for more details.
b8698a0f 18
6de9cd9a 19You should have received a copy of the GNU General Public License
9dcd6f09
NC
20along with GCC; see the file COPYING3. If not see
21<http://www.gnu.org/licenses/>. */
6de9cd9a 22
0bca51f0
DN
23/* Conditional constant propagation (CCP) is based on the SSA
24 propagation engine (tree-ssa-propagate.c). Constant assignments of
25 the form VAR = CST are propagated from the assignments into uses of
26 VAR, which in turn may generate new constants. The simulation uses
27 a four level lattice to keep track of constant values associated
28 with SSA names. Given an SSA name V_i, it may take one of the
29 following values:
30
106dec71
ZD
31 UNINITIALIZED -> the initial state of the value. This value
32 is replaced with a correct initial value
33 the first time the value is used, so the
34 rest of the pass does not need to care about
35 it. Using this value simplifies initialization
36 of the pass, and prevents us from needlessly
37 scanning statements that are never reached.
0bca51f0
DN
38
39 UNDEFINED -> V_i is a local variable whose definition
40 has not been processed yet. Therefore we
41 don't yet know if its value is a constant
42 or not.
43
44 CONSTANT -> V_i has been found to hold a constant
45 value C.
46
47 VARYING -> V_i cannot take a constant value, or if it
48 does, it is not possible to determine it
49 at compile time.
50
51 The core of SSA-CCP is in ccp_visit_stmt and ccp_visit_phi_node:
52
53 1- In ccp_visit_stmt, we are interested in assignments whose RHS
54 evaluates into a constant and conditional jumps whose predicate
55 evaluates into a boolean true or false. When an assignment of
56 the form V_i = CONST is found, V_i's lattice value is set to
57 CONSTANT and CONST is associated with it. This causes the
58 propagation engine to add all the SSA edges coming out the
59 assignment into the worklists, so that statements that use V_i
60 can be visited.
61
62 If the statement is a conditional with a constant predicate, we
63 mark the outgoing edges as executable or not executable
64 depending on the predicate's value. This is then used when
65 visiting PHI nodes to know when a PHI argument can be ignored.
b8698a0f 66
0bca51f0
DN
67
68 2- In ccp_visit_phi_node, if all the PHI arguments evaluate to the
69 same constant C, then the LHS of the PHI is set to C. This
70 evaluation is known as the "meet operation". Since one of the
71 goals of this evaluation is to optimistically return constant
72 values as often as possible, it uses two main short cuts:
73
74 - If an argument is flowing in through a non-executable edge, it
75 is ignored. This is useful in cases like this:
76
77 if (PRED)
78 a_9 = 3;
79 else
80 a_10 = 100;
81 a_11 = PHI (a_9, a_10)
82
83 If PRED is known to always evaluate to false, then we can
84 assume that a_11 will always take its value from a_10, meaning
85 that instead of consider it VARYING (a_9 and a_10 have
86 different values), we can consider it CONSTANT 100.
87
88 - If an argument has an UNDEFINED value, then it does not affect
89 the outcome of the meet operation. If a variable V_i has an
90 UNDEFINED value, it means that either its defining statement
91 hasn't been visited yet or V_i has no defining statement, in
92 which case the original symbol 'V' is being used
93 uninitialized. Since 'V' is a local variable, the compiler
94 may assume any initial value for it.
95
96
97 After propagation, every variable V_i that ends up with a lattice
98 value of CONSTANT will have the associated constant value in the
99 array CONST_VAL[i].VALUE. That is fed into substitute_and_fold for
100 final substitution and folding.
101
6de9cd9a
DN
102 References:
103
104 Constant propagation with conditional branches,
105 Wegman and Zadeck, ACM TOPLAS 13(2):181-210.
106
107 Building an Optimizing Compiler,
108 Robert Morgan, Butterworth-Heinemann, 1998, Section 8.9.
109
110 Advanced Compiler Design and Implementation,
111 Steven Muchnick, Morgan Kaufmann, 1997, Section 12.6 */
112
113#include "config.h"
114#include "system.h"
115#include "coretypes.h"
116#include "tm.h"
6de9cd9a 117#include "tree.h"
750628d8 118#include "flags.h"
6de9cd9a 119#include "tm_p.h"
6de9cd9a 120#include "basic-block.h"
750628d8 121#include "function.h"
cf835838 122#include "gimple-pretty-print.h"
750628d8 123#include "tree-flow.h"
6de9cd9a 124#include "tree-pass.h"
750628d8 125#include "tree-ssa-propagate.h"
53a8f709 126#include "value-prof.h"
750628d8 127#include "langhooks.h"
ae3df618 128#include "target.h"
718f9c0f 129#include "diagnostic-core.h"
74fe548b 130#include "dbgcnt.h"
cfef45c8 131#include "gimple-fold.h"
1fed1006 132#include "params.h"
0823efed 133#include "hash-table.h"
6de9cd9a
DN
134
135
136/* Possible lattice values. */
137typedef enum
138{
106dec71 139 UNINITIALIZED,
6de9cd9a
DN
140 UNDEFINED,
141 CONSTANT,
142 VARYING
0bca51f0 143} ccp_lattice_t;
6de9cd9a 144
455e6d5b
RG
145struct prop_value_d {
146 /* Lattice value. */
147 ccp_lattice_t lattice_val;
148
149 /* Propagated value. */
150 tree value;
0b4b14ac
RG
151
152 /* Mask that applies to the propagated value during CCP. For
153 X with a CONSTANT lattice value X & ~mask == value & ~mask. */
154 double_int mask;
455e6d5b
RG
155};
156
157typedef struct prop_value_d prop_value_t;
158
0bca51f0
DN
159/* Array of propagated constant values. After propagation,
160 CONST_VAL[I].VALUE holds the constant value for SSA_NAME(I). If
161 the constant is held in an SSA name representing a memory store
38635499
DN
162 (i.e., a VDEF), CONST_VAL[I].MEM_REF will contain the actual
163 memory reference used to store (i.e., the LHS of the assignment
164 doing the store). */
404f4351 165static prop_value_t *const_val;
6de9cd9a 166
25e20805 167static void canonicalize_float_value (prop_value_t *);
f61e18ec 168static bool ccp_fold_stmt (gimple_stmt_iterator *);
25e20805 169
0bca51f0 170/* Dump constant propagation value VAL to file OUTF prefixed by PREFIX. */
95eec0d6
DB
171
172static void
0bca51f0 173dump_lattice_value (FILE *outf, const char *prefix, prop_value_t val)
95eec0d6 174{
750628d8 175 switch (val.lattice_val)
95eec0d6 176 {
0bca51f0
DN
177 case UNINITIALIZED:
178 fprintf (outf, "%sUNINITIALIZED", prefix);
179 break;
750628d8
DN
180 case UNDEFINED:
181 fprintf (outf, "%sUNDEFINED", prefix);
182 break;
183 case VARYING:
184 fprintf (outf, "%sVARYING", prefix);
185 break;
750628d8
DN
186 case CONSTANT:
187 fprintf (outf, "%sCONSTANT ", prefix);
0b4b14ac
RG
188 if (TREE_CODE (val.value) != INTEGER_CST
189 || double_int_zero_p (val.mask))
190 print_generic_expr (outf, val.value, dump_flags);
191 else
192 {
193 double_int cval = double_int_and_not (tree_to_double_int (val.value),
194 val.mask);
195 fprintf (outf, "%sCONSTANT " HOST_WIDE_INT_PRINT_DOUBLE_HEX,
196 prefix, cval.high, cval.low);
197 fprintf (outf, " (" HOST_WIDE_INT_PRINT_DOUBLE_HEX ")",
198 val.mask.high, val.mask.low);
199 }
750628d8
DN
200 break;
201 default:
1e128c5f 202 gcc_unreachable ();
750628d8 203 }
95eec0d6 204}
6de9cd9a 205
6de9cd9a 206
0bca51f0
DN
207/* Print lattice value VAL to stderr. */
208
209void debug_lattice_value (prop_value_t val);
210
24e47c76 211DEBUG_FUNCTION void
0bca51f0
DN
212debug_lattice_value (prop_value_t val)
213{
214 dump_lattice_value (stderr, "", val);
215 fprintf (stderr, "\n");
216}
6de9cd9a 217
6de9cd9a 218
0bca51f0
DN
219/* Compute a default value for variable VAR and store it in the
220 CONST_VAL array. The following rules are used to get default
221 values:
95eec0d6 222
0bca51f0
DN
223 1- Global and static variables that are declared constant are
224 considered CONSTANT.
225
226 2- Any other value is considered UNDEFINED. This is useful when
750628d8
DN
227 considering PHI nodes. PHI arguments that are undefined do not
228 change the constant value of the PHI node, which allows for more
0bca51f0 229 constants to be propagated.
6de9cd9a 230
caf55296 231 3- Variables defined by statements other than assignments and PHI
0bca51f0 232 nodes are considered VARYING.
6de9cd9a 233
caf55296 234 4- Initial values of variables that are not GIMPLE registers are
106dec71 235 considered VARYING. */
6de9cd9a 236
0bca51f0
DN
237static prop_value_t
238get_default_value (tree var)
239{
0b4b14ac 240 prop_value_t val = { UNINITIALIZED, NULL_TREE, { 0, 0 } };
e8114fba
RG
241 gimple stmt;
242
243 stmt = SSA_NAME_DEF_STMT (var);
244
245 if (gimple_nop_p (stmt))
6de9cd9a 246 {
e8114fba
RG
247 /* Variables defined by an empty statement are those used
248 before being initialized. If VAR is a local variable, we
249 can assume initially that it is UNDEFINED, otherwise we must
250 consider it VARYING. */
ea057359
RG
251 if (!virtual_operand_p (var)
252 && TREE_CODE (SSA_NAME_VAR (var)) == VAR_DECL)
e8114fba
RG
253 val.lattice_val = UNDEFINED;
254 else
0b4b14ac
RG
255 {
256 val.lattice_val = VARYING;
257 val.mask = double_int_minus_one;
258 }
6de9cd9a 259 }
e8114fba
RG
260 else if (is_gimple_assign (stmt)
261 /* Value-returning GIMPLE_CALL statements assign to
262 a variable, and are treated similarly to GIMPLE_ASSIGN. */
263 || (is_gimple_call (stmt)
264 && gimple_call_lhs (stmt) != NULL_TREE)
265 || gimple_code (stmt) == GIMPLE_PHI)
750628d8 266 {
e8114fba
RG
267 tree cst;
268 if (gimple_assign_single_p (stmt)
269 && DECL_P (gimple_assign_rhs1 (stmt))
270 && (cst = get_symbol_constant_value (gimple_assign_rhs1 (stmt))))
0bca51f0 271 {
e8114fba
RG
272 val.lattice_val = CONSTANT;
273 val.value = cst;
0bca51f0
DN
274 }
275 else
e8114fba
RG
276 /* Any other variable defined by an assignment or a PHI node
277 is considered UNDEFINED. */
278 val.lattice_val = UNDEFINED;
279 }
280 else
281 {
282 /* Otherwise, VAR will never take on a constant value. */
283 val.lattice_val = VARYING;
0b4b14ac 284 val.mask = double_int_minus_one;
750628d8 285 }
6de9cd9a 286
750628d8
DN
287 return val;
288}
6de9cd9a 289
6de9cd9a 290
106dec71 291/* Get the constant value associated with variable VAR. */
6de9cd9a 292
106dec71
ZD
293static inline prop_value_t *
294get_value (tree var)
0bca51f0 295{
ed97ddc6 296 prop_value_t *val;
106dec71 297
ed97ddc6
RG
298 if (const_val == NULL)
299 return NULL;
300
301 val = &const_val[SSA_NAME_VERSION (var)];
106dec71 302 if (val->lattice_val == UNINITIALIZED)
6de9cd9a
DN
303 *val = get_default_value (var);
304
25e20805
RG
305 canonicalize_float_value (val);
306
6de9cd9a
DN
307 return val;
308}
309
84d77ca6
RG
310/* Return the constant tree value associated with VAR. */
311
312static inline tree
313get_constant_value (tree var)
314{
e196b221
JH
315 prop_value_t *val;
316 if (TREE_CODE (var) != SSA_NAME)
317 {
318 if (is_gimple_min_invariant (var))
319 return var;
320 return NULL_TREE;
321 }
322 val = get_value (var);
0b4b14ac
RG
323 if (val
324 && val->lattice_val == CONSTANT
325 && (TREE_CODE (val->value) != INTEGER_CST
326 || double_int_zero_p (val->mask)))
84d77ca6
RG
327 return val->value;
328 return NULL_TREE;
329}
330
106dec71
ZD
331/* Sets the value associated with VAR to VARYING. */
332
333static inline void
334set_value_varying (tree var)
335{
336 prop_value_t *val = &const_val[SSA_NAME_VERSION (var)];
337
338 val->lattice_val = VARYING;
339 val->value = NULL_TREE;
0b4b14ac 340 val->mask = double_int_minus_one;
106dec71 341}
6de9cd9a 342
fbb5445b
L
343/* For float types, modify the value of VAL to make ccp work correctly
344 for non-standard values (-0, NaN):
345
346 If HONOR_SIGNED_ZEROS is false, and VAL = -0, we canonicalize it to 0.
347 If HONOR_NANS is false, and VAL is NaN, we canonicalize it to UNDEFINED.
348 This is to fix the following problem (see PR 29921): Suppose we have
349
350 x = 0.0 * y
351
352 and we set value of y to NaN. This causes value of x to be set to NaN.
353 When we later determine that y is in fact VARYING, fold uses the fact
354 that HONOR_NANS is false, and we try to change the value of x to 0,
355 causing an ICE. With HONOR_NANS being false, the real appearance of
356 NaN would cause undefined behavior, though, so claiming that y (and x)
357 are UNDEFINED initially is correct. */
358
359static void
360canonicalize_float_value (prop_value_t *val)
361{
362 enum machine_mode mode;
363 tree type;
364 REAL_VALUE_TYPE d;
365
366 if (val->lattice_val != CONSTANT
367 || TREE_CODE (val->value) != REAL_CST)
368 return;
369
370 d = TREE_REAL_CST (val->value);
371 type = TREE_TYPE (val->value);
372 mode = TYPE_MODE (type);
373
374 if (!HONOR_SIGNED_ZEROS (mode)
375 && REAL_VALUE_MINUS_ZERO (d))
376 {
377 val->value = build_real (type, dconst0);
378 return;
379 }
380
381 if (!HONOR_NANS (mode)
382 && REAL_VALUE_ISNAN (d))
383 {
384 val->lattice_val = UNDEFINED;
385 val->value = NULL;
fbb5445b
L
386 return;
387 }
388}
389
0b4b14ac
RG
390/* Return whether the lattice transition is valid. */
391
392static bool
393valid_lattice_transition (prop_value_t old_val, prop_value_t new_val)
394{
395 /* Lattice transitions must always be monotonically increasing in
396 value. */
397 if (old_val.lattice_val < new_val.lattice_val)
398 return true;
399
400 if (old_val.lattice_val != new_val.lattice_val)
401 return false;
402
403 if (!old_val.value && !new_val.value)
404 return true;
405
406 /* Now both lattice values are CONSTANT. */
407
95cbf851
RG
408 /* Allow transitioning from PHI <&x, not executable> == &x
409 to PHI <&x, &y> == common alignment. */
0b4b14ac
RG
410 if (TREE_CODE (old_val.value) != INTEGER_CST
411 && TREE_CODE (new_val.value) == INTEGER_CST)
412 return true;
413
414 /* Bit-lattices have to agree in the still valid bits. */
415 if (TREE_CODE (old_val.value) == INTEGER_CST
416 && TREE_CODE (new_val.value) == INTEGER_CST)
417 return double_int_equal_p
418 (double_int_and_not (tree_to_double_int (old_val.value),
419 new_val.mask),
420 double_int_and_not (tree_to_double_int (new_val.value),
421 new_val.mask));
422
423 /* Otherwise constant values have to agree. */
424 return operand_equal_p (old_val.value, new_val.value, 0);
425}
426
0bca51f0
DN
427/* Set the value for variable VAR to NEW_VAL. Return true if the new
428 value is different from VAR's previous value. */
6de9cd9a 429
750628d8 430static bool
0bca51f0 431set_lattice_value (tree var, prop_value_t new_val)
6de9cd9a 432{
7a95d078
RG
433 /* We can deal with old UNINITIALIZED values just fine here. */
434 prop_value_t *old_val = &const_val[SSA_NAME_VERSION (var)];
0bca51f0 435
fbb5445b
L
436 canonicalize_float_value (&new_val);
437
0b4b14ac
RG
438 /* We have to be careful to not go up the bitwise lattice
439 represented by the mask.
440 ??? This doesn't seem to be the best place to enforce this. */
441 if (new_val.lattice_val == CONSTANT
442 && old_val->lattice_val == CONSTANT
443 && TREE_CODE (new_val.value) == INTEGER_CST
444 && TREE_CODE (old_val->value) == INTEGER_CST)
445 {
446 double_int diff;
447 diff = double_int_xor (tree_to_double_int (new_val.value),
448 tree_to_double_int (old_val->value));
449 new_val.mask = double_int_ior (new_val.mask,
450 double_int_ior (old_val->mask, diff));
451 }
106dec71 452
0b4b14ac 453 gcc_assert (valid_lattice_transition (*old_val, new_val));
0bca51f0 454
0b4b14ac
RG
455 /* If *OLD_VAL and NEW_VAL are the same, return false to inform the
456 caller that this was a non-transition. */
457 if (old_val->lattice_val != new_val.lattice_val
458 || (new_val.lattice_val == CONSTANT
459 && TREE_CODE (new_val.value) == INTEGER_CST
460 && (TREE_CODE (old_val->value) != INTEGER_CST
461 || !double_int_equal_p (new_val.mask, old_val->mask))))
6de9cd9a 462 {
0b4b14ac
RG
463 /* ??? We would like to delay creation of INTEGER_CSTs from
464 partially constants here. */
465
750628d8
DN
466 if (dump_file && (dump_flags & TDF_DETAILS))
467 {
0bca51f0 468 dump_lattice_value (dump_file, "Lattice value changed to ", new_val);
106dec71 469 fprintf (dump_file, ". Adding SSA edges to worklist.\n");
750628d8
DN
470 }
471
0bca51f0
DN
472 *old_val = new_val;
473
7a95d078 474 gcc_assert (new_val.lattice_val != UNINITIALIZED);
106dec71 475 return true;
6de9cd9a 476 }
750628d8
DN
477
478 return false;
6de9cd9a
DN
479}
480
0b4b14ac
RG
481static prop_value_t get_value_for_expr (tree, bool);
482static prop_value_t bit_value_binop (enum tree_code, tree, tree, tree);
483static void bit_value_binop_1 (enum tree_code, tree, double_int *, double_int *,
484 tree, double_int, double_int,
485 tree, double_int, double_int);
486
487/* Return a double_int that can be used for bitwise simplifications
488 from VAL. */
489
490static double_int
491value_to_double_int (prop_value_t val)
492{
493 if (val.value
494 && TREE_CODE (val.value) == INTEGER_CST)
495 return tree_to_double_int (val.value);
496 else
497 return double_int_zero;
498}
499
500/* Return the value for the address expression EXPR based on alignment
501 information. */
7a95d078
RG
502
503static prop_value_t
0b4b14ac
RG
504get_value_from_alignment (tree expr)
505{
d1f4e15f 506 tree type = TREE_TYPE (expr);
0b4b14ac 507 prop_value_t val;
d1f4e15f
RG
508 unsigned HOST_WIDE_INT bitpos;
509 unsigned int align;
0b4b14ac
RG
510
511 gcc_assert (TREE_CODE (expr) == ADDR_EXPR);
512
a0969677 513 get_pointer_alignment_1 (expr, &align, &bitpos);
d1f4e15f
RG
514 val.mask
515 = double_int_and_not (POINTER_TYPE_P (type) || TYPE_UNSIGNED (type)
516 ? double_int_mask (TYPE_PRECISION (type))
517 : double_int_minus_one,
518 uhwi_to_double_int (align / BITS_PER_UNIT - 1));
519 val.lattice_val = double_int_minus_one_p (val.mask) ? VARYING : CONSTANT;
520 if (val.lattice_val == CONSTANT)
521 val.value
522 = double_int_to_tree (type, uhwi_to_double_int (bitpos / BITS_PER_UNIT));
0b4b14ac 523 else
d1f4e15f 524 val.value = NULL_TREE;
0b4b14ac
RG
525
526 return val;
527}
528
529/* Return the value for the tree operand EXPR. If FOR_BITS_P is true
530 return constant bits extracted from alignment information for
531 invariant addresses. */
532
533static prop_value_t
534get_value_for_expr (tree expr, bool for_bits_p)
7a95d078
RG
535{
536 prop_value_t val;
537
538 if (TREE_CODE (expr) == SSA_NAME)
0b4b14ac
RG
539 {
540 val = *get_value (expr);
541 if (for_bits_p
542 && val.lattice_val == CONSTANT
543 && TREE_CODE (val.value) == ADDR_EXPR)
544 val = get_value_from_alignment (val.value);
545 }
546 else if (is_gimple_min_invariant (expr)
547 && (!for_bits_p || TREE_CODE (expr) != ADDR_EXPR))
7a95d078
RG
548 {
549 val.lattice_val = CONSTANT;
550 val.value = expr;
0b4b14ac 551 val.mask = double_int_zero;
7a95d078
RG
552 canonicalize_float_value (&val);
553 }
0b4b14ac
RG
554 else if (TREE_CODE (expr) == ADDR_EXPR)
555 val = get_value_from_alignment (expr);
7a95d078
RG
556 else
557 {
558 val.lattice_val = VARYING;
0b4b14ac 559 val.mask = double_int_minus_one;
7a95d078
RG
560 val.value = NULL_TREE;
561 }
7a95d078
RG
562 return val;
563}
564
0bca51f0 565/* Return the likely CCP lattice value for STMT.
6de9cd9a 566
750628d8 567 If STMT has no operands, then return CONSTANT.
6de9cd9a 568
7f879c96
RG
569 Else if undefinedness of operands of STMT cause its value to be
570 undefined, then return UNDEFINED.
6de9cd9a 571
750628d8 572 Else if any operands of STMT are constants, then return CONSTANT.
6de9cd9a 573
750628d8 574 Else return VARYING. */
6de9cd9a 575
0bca51f0 576static ccp_lattice_t
726a989a 577likely_value (gimple stmt)
750628d8 578{
7f879c96 579 bool has_constant_operand, has_undefined_operand, all_undefined_operands;
750628d8
DN
580 tree use;
581 ssa_op_iter iter;
e8114fba 582 unsigned i;
6de9cd9a 583
e0c68ce9 584 enum gimple_code code = gimple_code (stmt);
726a989a
RB
585
586 /* This function appears to be called only for assignments, calls,
587 conditionals, and switches, due to the logic in visit_stmt. */
588 gcc_assert (code == GIMPLE_ASSIGN
589 || code == GIMPLE_CALL
590 || code == GIMPLE_COND
591 || code == GIMPLE_SWITCH);
0bca51f0
DN
592
593 /* If the statement has volatile operands, it won't fold to a
594 constant value. */
726a989a 595 if (gimple_has_volatile_ops (stmt))
0bca51f0
DN
596 return VARYING;
597
726a989a 598 /* Arrive here for more complex cases. */
106dec71 599 has_constant_operand = false;
7f879c96
RG
600 has_undefined_operand = false;
601 all_undefined_operands = true;
e8114fba 602 FOR_EACH_SSA_TREE_OPERAND (use, stmt, iter, SSA_OP_USE)
750628d8 603 {
106dec71 604 prop_value_t *val = get_value (use);
750628d8 605
106dec71 606 if (val->lattice_val == UNDEFINED)
7f879c96
RG
607 has_undefined_operand = true;
608 else
609 all_undefined_operands = false;
0bca51f0 610
750628d8 611 if (val->lattice_val == CONSTANT)
106dec71 612 has_constant_operand = true;
6de9cd9a 613 }
750628d8 614
5006671f
RG
615 /* There may be constants in regular rhs operands. For calls we
616 have to ignore lhs, fndecl and static chain, otherwise only
617 the lhs. */
618 for (i = (is_gimple_call (stmt) ? 2 : 0) + gimple_has_lhs (stmt);
e8114fba
RG
619 i < gimple_num_ops (stmt); ++i)
620 {
621 tree op = gimple_op (stmt, i);
622 if (!op || TREE_CODE (op) == SSA_NAME)
623 continue;
624 if (is_gimple_min_invariant (op))
625 has_constant_operand = true;
626 }
627
1cdaa211
RG
628 if (has_constant_operand)
629 all_undefined_operands = false;
630
7f879c96
RG
631 /* If the operation combines operands like COMPLEX_EXPR make sure to
632 not mark the result UNDEFINED if only one part of the result is
633 undefined. */
726a989a 634 if (has_undefined_operand && all_undefined_operands)
7f879c96 635 return UNDEFINED;
726a989a 636 else if (code == GIMPLE_ASSIGN && has_undefined_operand)
7f879c96 637 {
726a989a 638 switch (gimple_assign_rhs_code (stmt))
7f879c96
RG
639 {
640 /* Unary operators are handled with all_undefined_operands. */
641 case PLUS_EXPR:
642 case MINUS_EXPR:
7f879c96 643 case POINTER_PLUS_EXPR:
7f879c96
RG
644 /* Not MIN_EXPR, MAX_EXPR. One VARYING operand may be selected.
645 Not bitwise operators, one VARYING operand may specify the
646 result completely. Not logical operators for the same reason.
0cedb9e9
RG
647 Not COMPLEX_EXPR as one VARYING operand makes the result partly
648 not UNDEFINED. Not *DIV_EXPR, comparisons and shifts because
649 the undefined operand may be promoted. */
7f879c96
RG
650 return UNDEFINED;
651
95cbf851
RG
652 case ADDR_EXPR:
653 /* If any part of an address is UNDEFINED, like the index
654 of an ARRAY_EXPR, then treat the result as UNDEFINED. */
655 return UNDEFINED;
656
7f879c96
RG
657 default:
658 ;
659 }
660 }
661 /* If there was an UNDEFINED operand but the result may be not UNDEFINED
6ad024b4
RG
662 fall back to CONSTANT. During iteration UNDEFINED may still drop
663 to CONSTANT. */
7f879c96 664 if (has_undefined_operand)
6ad024b4 665 return CONSTANT;
7f879c96 666
e8114fba
RG
667 /* We do not consider virtual operands here -- load from read-only
668 memory may have only VARYING virtual operands, but still be
669 constant. */
106dec71 670 if (has_constant_operand
e8114fba 671 || gimple_references_memory_p (stmt))
0bca51f0
DN
672 return CONSTANT;
673
106dec71 674 return VARYING;
6de9cd9a
DN
675}
676
106dec71
ZD
677/* Returns true if STMT cannot be constant. */
678
679static bool
726a989a 680surely_varying_stmt_p (gimple stmt)
106dec71
ZD
681{
682 /* If the statement has operands that we cannot handle, it cannot be
683 constant. */
726a989a 684 if (gimple_has_volatile_ops (stmt))
106dec71
ZD
685 return true;
686
174ef36d
RG
687 /* If it is a call and does not return a value or is not a
688 builtin and not an indirect call, it is varying. */
726a989a 689 if (is_gimple_call (stmt))
174ef36d
RG
690 {
691 tree fndecl;
692 if (!gimple_call_lhs (stmt)
693 || ((fndecl = gimple_call_fndecl (stmt)) != NULL_TREE
99f536cc 694 && !DECL_BUILT_IN (fndecl)))
174ef36d
RG
695 return true;
696 }
106dec71 697
e8114fba 698 /* Any other store operation is not interesting. */
5006671f 699 else if (gimple_vdef (stmt))
e8114fba
RG
700 return true;
701
106dec71
ZD
702 /* Anything other than assignments and conditional jumps are not
703 interesting for CCP. */
726a989a 704 if (gimple_code (stmt) != GIMPLE_ASSIGN
174ef36d
RG
705 && gimple_code (stmt) != GIMPLE_COND
706 && gimple_code (stmt) != GIMPLE_SWITCH
707 && gimple_code (stmt) != GIMPLE_CALL)
106dec71
ZD
708 return true;
709
710 return false;
711}
6de9cd9a 712
750628d8 713/* Initialize local data structures for CCP. */
6de9cd9a
DN
714
715static void
750628d8 716ccp_initialize (void)
6de9cd9a 717{
750628d8 718 basic_block bb;
6de9cd9a 719
b9eae1a9 720 const_val = XCNEWVEC (prop_value_t, num_ssa_names);
6de9cd9a 721
750628d8
DN
722 /* Initialize simulation flags for PHI nodes and statements. */
723 FOR_EACH_BB (bb)
6de9cd9a 724 {
726a989a 725 gimple_stmt_iterator i;
6de9cd9a 726
726a989a 727 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
750628d8 728 {
726a989a 729 gimple stmt = gsi_stmt (i);
cd6ea7a2
RH
730 bool is_varying;
731
732 /* If the statement is a control insn, then we do not
733 want to avoid simulating the statement once. Failure
734 to do so means that those edges will never get added. */
735 if (stmt_ends_bb_p (stmt))
736 is_varying = false;
737 else
738 is_varying = surely_varying_stmt_p (stmt);
6de9cd9a 739
106dec71 740 if (is_varying)
750628d8 741 {
0bca51f0
DN
742 tree def;
743 ssa_op_iter iter;
744
745 /* If the statement will not produce a constant, mark
746 all its outputs VARYING. */
747 FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_ALL_DEFS)
e8114fba 748 set_value_varying (def);
750628d8 749 }
726a989a 750 prop_set_simulate_again (stmt, !is_varying);
750628d8 751 }
6de9cd9a
DN
752 }
753
726a989a
RB
754 /* Now process PHI nodes. We never clear the simulate_again flag on
755 phi nodes, since we do not know which edges are executable yet,
756 except for phi nodes for virtual operands when we do not do store ccp. */
750628d8 757 FOR_EACH_BB (bb)
6de9cd9a 758 {
726a989a 759 gimple_stmt_iterator i;
750628d8 760
726a989a
RB
761 for (i = gsi_start_phis (bb); !gsi_end_p (i); gsi_next (&i))
762 {
763 gimple phi = gsi_stmt (i);
764
ea057359 765 if (virtual_operand_p (gimple_phi_result (phi)))
726a989a 766 prop_set_simulate_again (phi, false);
106dec71 767 else
726a989a 768 prop_set_simulate_again (phi, true);
750628d8 769 }
6de9cd9a 770 }
750628d8 771}
6de9cd9a 772
74fe548b
XDL
773/* Debug count support. Reset the values of ssa names
774 VARYING when the total number ssa names analyzed is
775 beyond the debug count specified. */
776
777static void
778do_dbg_cnt (void)
779{
780 unsigned i;
781 for (i = 0; i < num_ssa_names; i++)
782 {
783 if (!dbg_cnt (ccp))
784 {
785 const_val[i].lattice_val = VARYING;
0b4b14ac 786 const_val[i].mask = double_int_minus_one;
74fe548b
XDL
787 const_val[i].value = NULL_TREE;
788 }
789 }
790}
791
6de9cd9a 792
0bca51f0 793/* Do final substitution of propagated values, cleanup the flowgraph and
b8698a0f 794 free allocated storage.
6de9cd9a 795
3253eafb
JH
796 Return TRUE when something was optimized. */
797
798static bool
0bca51f0 799ccp_finalize (void)
6de9cd9a 800{
74fe548b 801 bool something_changed;
1be38ccb 802 unsigned i;
74fe548b
XDL
803
804 do_dbg_cnt ();
1be38ccb
RG
805
806 /* Derive alignment and misalignment information from partially
807 constant pointers in the lattice. */
808 for (i = 1; i < num_ssa_names; ++i)
809 {
810 tree name = ssa_name (i);
811 prop_value_t *val;
1be38ccb
RG
812 unsigned int tem, align;
813
814 if (!name
815 || !POINTER_TYPE_P (TREE_TYPE (name)))
816 continue;
817
818 val = get_value (name);
819 if (val->lattice_val != CONSTANT
820 || TREE_CODE (val->value) != INTEGER_CST)
821 continue;
822
823 /* Trailing constant bits specify the alignment, trailing value
824 bits the misalignment. */
825 tem = val->mask.low;
826 align = (tem & -tem);
644ffefd
MJ
827 if (align > 1)
828 set_ptr_info_alignment (get_ptr_info (name), align,
829 TREE_INT_CST_LOW (val->value) & (align - 1));
1be38ccb
RG
830 }
831
0bca51f0 832 /* Perform substitutions based on the known constant values. */
455e6d5b
RG
833 something_changed = substitute_and_fold (get_constant_value,
834 ccp_fold_stmt, true);
6de9cd9a 835
0bca51f0 836 free (const_val);
ed97ddc6 837 const_val = NULL;
3253eafb 838 return something_changed;;
6de9cd9a
DN
839}
840
841
0bca51f0
DN
842/* Compute the meet operator between *VAL1 and *VAL2. Store the result
843 in VAL1.
844
845 any M UNDEFINED = any
0bca51f0
DN
846 any M VARYING = VARYING
847 Ci M Cj = Ci if (i == j)
848 Ci M Cj = VARYING if (i != j)
106dec71 849 */
6de9cd9a
DN
850
851static void
0bca51f0 852ccp_lattice_meet (prop_value_t *val1, prop_value_t *val2)
6de9cd9a 853{
0bca51f0 854 if (val1->lattice_val == UNDEFINED)
6de9cd9a 855 {
0bca51f0
DN
856 /* UNDEFINED M any = any */
857 *val1 = *val2;
750628d8 858 }
0bca51f0 859 else if (val2->lattice_val == UNDEFINED)
195da47b 860 {
0bca51f0
DN
861 /* any M UNDEFINED = any
862 Nothing to do. VAL1 already contains the value we want. */
863 ;
195da47b 864 }
0bca51f0
DN
865 else if (val1->lattice_val == VARYING
866 || val2->lattice_val == VARYING)
750628d8 867 {
0bca51f0
DN
868 /* any M VARYING = VARYING. */
869 val1->lattice_val = VARYING;
0b4b14ac 870 val1->mask = double_int_minus_one;
0bca51f0 871 val1->value = NULL_TREE;
750628d8 872 }
0b4b14ac
RG
873 else if (val1->lattice_val == CONSTANT
874 && val2->lattice_val == CONSTANT
875 && TREE_CODE (val1->value) == INTEGER_CST
876 && TREE_CODE (val2->value) == INTEGER_CST)
877 {
878 /* Ci M Cj = Ci if (i == j)
879 Ci M Cj = VARYING if (i != j)
880
881 For INTEGER_CSTs mask unequal bits. If no equal bits remain,
882 drop to varying. */
883 val1->mask
884 = double_int_ior (double_int_ior (val1->mask,
885 val2->mask),
886 double_int_xor (tree_to_double_int (val1->value),
887 tree_to_double_int (val2->value)));
888 if (double_int_minus_one_p (val1->mask))
889 {
890 val1->lattice_val = VARYING;
891 val1->value = NULL_TREE;
892 }
893 }
0bca51f0
DN
894 else if (val1->lattice_val == CONSTANT
895 && val2->lattice_val == CONSTANT
dce2b2f6 896 && simple_cst_equal (val1->value, val2->value) == 1)
750628d8 897 {
0bca51f0
DN
898 /* Ci M Cj = Ci if (i == j)
899 Ci M Cj = VARYING if (i != j)
900
0b4b14ac
RG
901 VAL1 already contains the value we want for equivalent values. */
902 }
903 else if (val1->lattice_val == CONSTANT
904 && val2->lattice_val == CONSTANT
905 && (TREE_CODE (val1->value) == ADDR_EXPR
906 || TREE_CODE (val2->value) == ADDR_EXPR))
907 {
908 /* When not equal addresses are involved try meeting for
909 alignment. */
910 prop_value_t tem = *val2;
911 if (TREE_CODE (val1->value) == ADDR_EXPR)
912 *val1 = get_value_for_expr (val1->value, true);
913 if (TREE_CODE (val2->value) == ADDR_EXPR)
914 tem = get_value_for_expr (val2->value, true);
915 ccp_lattice_meet (val1, &tem);
750628d8
DN
916 }
917 else
918 {
0bca51f0
DN
919 /* Any other combination is VARYING. */
920 val1->lattice_val = VARYING;
0b4b14ac 921 val1->mask = double_int_minus_one;
0bca51f0 922 val1->value = NULL_TREE;
750628d8 923 }
6de9cd9a
DN
924}
925
926
750628d8
DN
927/* Loop through the PHI_NODE's parameters for BLOCK and compare their
928 lattice values to determine PHI_NODE's lattice value. The value of a
0bca51f0 929 PHI node is determined calling ccp_lattice_meet with all the arguments
750628d8 930 of the PHI node that are incoming via executable edges. */
6de9cd9a 931
750628d8 932static enum ssa_prop_result
726a989a 933ccp_visit_phi_node (gimple phi)
6de9cd9a 934{
726a989a 935 unsigned i;
0bca51f0 936 prop_value_t *old_val, new_val;
6de9cd9a 937
750628d8 938 if (dump_file && (dump_flags & TDF_DETAILS))
6de9cd9a 939 {
750628d8 940 fprintf (dump_file, "\nVisiting PHI node: ");
726a989a 941 print_gimple_stmt (dump_file, phi, 0, dump_flags);
6de9cd9a 942 }
6de9cd9a 943
726a989a 944 old_val = get_value (gimple_phi_result (phi));
750628d8
DN
945 switch (old_val->lattice_val)
946 {
947 case VARYING:
0bca51f0 948 return SSA_PROP_VARYING;
6de9cd9a 949
750628d8
DN
950 case CONSTANT:
951 new_val = *old_val;
952 break;
6de9cd9a 953
750628d8 954 case UNDEFINED:
750628d8 955 new_val.lattice_val = UNDEFINED;
0bca51f0 956 new_val.value = NULL_TREE;
750628d8 957 break;
6de9cd9a 958
750628d8 959 default:
1e128c5f 960 gcc_unreachable ();
750628d8 961 }
6de9cd9a 962
726a989a 963 for (i = 0; i < gimple_phi_num_args (phi); i++)
750628d8 964 {
0bca51f0
DN
965 /* Compute the meet operator over all the PHI arguments flowing
966 through executable edges. */
726a989a 967 edge e = gimple_phi_arg_edge (phi, i);
6de9cd9a 968
750628d8
DN
969 if (dump_file && (dump_flags & TDF_DETAILS))
970 {
971 fprintf (dump_file,
972 "\n Argument #%d (%d -> %d %sexecutable)\n",
973 i, e->src->index, e->dest->index,
974 (e->flags & EDGE_EXECUTABLE) ? "" : "not ");
975 }
976
977 /* If the incoming edge is executable, Compute the meet operator for
978 the existing value of the PHI node and the current PHI argument. */
979 if (e->flags & EDGE_EXECUTABLE)
980 {
726a989a 981 tree arg = gimple_phi_arg (phi, i)->def;
0b4b14ac 982 prop_value_t arg_val = get_value_for_expr (arg, false);
6de9cd9a 983
0bca51f0 984 ccp_lattice_meet (&new_val, &arg_val);
6de9cd9a 985
750628d8
DN
986 if (dump_file && (dump_flags & TDF_DETAILS))
987 {
988 fprintf (dump_file, "\t");
0bca51f0
DN
989 print_generic_expr (dump_file, arg, dump_flags);
990 dump_lattice_value (dump_file, "\tValue: ", arg_val);
750628d8
DN
991 fprintf (dump_file, "\n");
992 }
6de9cd9a 993
750628d8
DN
994 if (new_val.lattice_val == VARYING)
995 break;
996 }
997 }
6de9cd9a
DN
998
999 if (dump_file && (dump_flags & TDF_DETAILS))
750628d8
DN
1000 {
1001 dump_lattice_value (dump_file, "\n PHI node value: ", new_val);
1002 fprintf (dump_file, "\n\n");
1003 }
1004
106dec71 1005 /* Make the transition to the new value. */
726a989a 1006 if (set_lattice_value (gimple_phi_result (phi), new_val))
750628d8
DN
1007 {
1008 if (new_val.lattice_val == VARYING)
1009 return SSA_PROP_VARYING;
1010 else
1011 return SSA_PROP_INTERESTING;
1012 }
1013 else
1014 return SSA_PROP_NOT_INTERESTING;
6de9cd9a
DN
1015}
1016
84d77ca6 1017/* Return the constant value for OP or OP otherwise. */
0354c0c7
BS
1018
1019static tree
84d77ca6 1020valueize_op (tree op)
0354c0c7 1021{
0354c0c7
BS
1022 if (TREE_CODE (op) == SSA_NAME)
1023 {
84d77ca6
RG
1024 tree tem = get_constant_value (op);
1025 if (tem)
1026 return tem;
0354c0c7
BS
1027 }
1028 return op;
1029}
1030
750628d8
DN
1031/* CCP specific front-end to the non-destructive constant folding
1032 routines.
6de9cd9a
DN
1033
1034 Attempt to simplify the RHS of STMT knowing that one or more
1035 operands are constants.
1036
1037 If simplification is possible, return the simplified RHS,
726a989a 1038 otherwise return the original RHS or NULL_TREE. */
6de9cd9a
DN
1039
1040static tree
726a989a 1041ccp_fold (gimple stmt)
6de9cd9a 1042{
db3927fb 1043 location_t loc = gimple_location (stmt);
726a989a 1044 switch (gimple_code (stmt))
0bca51f0 1045 {
726a989a
RB
1046 case GIMPLE_COND:
1047 {
1048 /* Handle comparison operators that can appear in GIMPLE form. */
84d77ca6
RG
1049 tree op0 = valueize_op (gimple_cond_lhs (stmt));
1050 tree op1 = valueize_op (gimple_cond_rhs (stmt));
726a989a 1051 enum tree_code code = gimple_cond_code (stmt);
db3927fb 1052 return fold_binary_loc (loc, code, boolean_type_node, op0, op1);
726a989a 1053 }
6de9cd9a 1054
726a989a
RB
1055 case GIMPLE_SWITCH:
1056 {
84d77ca6
RG
1057 /* Return the constant switch index. */
1058 return valueize_op (gimple_switch_index (stmt));
726a989a 1059 }
00d382a8 1060
cfef45c8
RG
1061 case GIMPLE_ASSIGN:
1062 case GIMPLE_CALL:
1063 return gimple_fold_stmt_to_constant_1 (stmt, valueize_op);
87e1e42b 1064
ae3df618 1065 default:
cfef45c8 1066 gcc_unreachable ();
ae3df618 1067 }
ae3df618 1068}
726a989a 1069
0b4b14ac
RG
1070/* Apply the operation CODE in type TYPE to the value, mask pair
1071 RVAL and RMASK representing a value of type RTYPE and set
1072 the value, mask pair *VAL and *MASK to the result. */
1073
1074static void
1075bit_value_unop_1 (enum tree_code code, tree type,
1076 double_int *val, double_int *mask,
1077 tree rtype, double_int rval, double_int rmask)
1078{
1079 switch (code)
1080 {
1081 case BIT_NOT_EXPR:
1082 *mask = rmask;
1083 *val = double_int_not (rval);
1084 break;
1085
1086 case NEGATE_EXPR:
1087 {
1088 double_int temv, temm;
1089 /* Return ~rval + 1. */
1090 bit_value_unop_1 (BIT_NOT_EXPR, type, &temv, &temm, type, rval, rmask);
1091 bit_value_binop_1 (PLUS_EXPR, type, val, mask,
1092 type, temv, temm,
1093 type, double_int_one, double_int_zero);
1094 break;
1095 }
1096
1097 CASE_CONVERT:
1098 {
1099 bool uns;
1100
1101 /* First extend mask and value according to the original type. */
56099f00 1102 uns = TYPE_UNSIGNED (rtype);
0b4b14ac
RG
1103 *mask = double_int_ext (rmask, TYPE_PRECISION (rtype), uns);
1104 *val = double_int_ext (rval, TYPE_PRECISION (rtype), uns);
1105
1106 /* Then extend mask and value according to the target type. */
56099f00 1107 uns = TYPE_UNSIGNED (type);
0b4b14ac
RG
1108 *mask = double_int_ext (*mask, TYPE_PRECISION (type), uns);
1109 *val = double_int_ext (*val, TYPE_PRECISION (type), uns);
1110 break;
1111 }
1112
1113 default:
1114 *mask = double_int_minus_one;
1115 break;
1116 }
1117}
1118
1119/* Apply the operation CODE in type TYPE to the value, mask pairs
1120 R1VAL, R1MASK and R2VAL, R2MASK representing a values of type R1TYPE
1121 and R2TYPE and set the value, mask pair *VAL and *MASK to the result. */
1122
1123static void
1124bit_value_binop_1 (enum tree_code code, tree type,
1125 double_int *val, double_int *mask,
1126 tree r1type, double_int r1val, double_int r1mask,
1127 tree r2type, double_int r2val, double_int r2mask)
1128{
56099f00 1129 bool uns = TYPE_UNSIGNED (type);
0b4b14ac
RG
1130 /* Assume we'll get a constant result. Use an initial varying value,
1131 we fall back to varying in the end if necessary. */
1132 *mask = double_int_minus_one;
1133 switch (code)
1134 {
1135 case BIT_AND_EXPR:
1136 /* The mask is constant where there is a known not
1137 set bit, (m1 | m2) & ((v1 | m1) & (v2 | m2)) */
1138 *mask = double_int_and (double_int_ior (r1mask, r2mask),
1139 double_int_and (double_int_ior (r1val, r1mask),
1140 double_int_ior (r2val, r2mask)));
1141 *val = double_int_and (r1val, r2val);
1142 break;
1143
1144 case BIT_IOR_EXPR:
1145 /* The mask is constant where there is a known
1146 set bit, (m1 | m2) & ~((v1 & ~m1) | (v2 & ~m2)). */
1147 *mask = double_int_and_not
1148 (double_int_ior (r1mask, r2mask),
1149 double_int_ior (double_int_and_not (r1val, r1mask),
1150 double_int_and_not (r2val, r2mask)));
1151 *val = double_int_ior (r1val, r2val);
1152 break;
1153
1154 case BIT_XOR_EXPR:
1155 /* m1 | m2 */
1156 *mask = double_int_ior (r1mask, r2mask);
1157 *val = double_int_xor (r1val, r2val);
1158 break;
1159
1160 case LROTATE_EXPR:
1161 case RROTATE_EXPR:
1162 if (double_int_zero_p (r2mask))
1163 {
1164 HOST_WIDE_INT shift = r2val.low;
1165 if (code == RROTATE_EXPR)
1166 shift = -shift;
1167 *mask = double_int_lrotate (r1mask, shift, TYPE_PRECISION (type));
1168 *val = double_int_lrotate (r1val, shift, TYPE_PRECISION (type));
1169 }
1170 break;
1171
1172 case LSHIFT_EXPR:
1173 case RSHIFT_EXPR:
1174 /* ??? We can handle partially known shift counts if we know
1175 its sign. That way we can tell that (x << (y | 8)) & 255
1176 is zero. */
1177 if (double_int_zero_p (r2mask))
1178 {
1179 HOST_WIDE_INT shift = r2val.low;
1180 if (code == RSHIFT_EXPR)
1181 shift = -shift;
1182 /* We need to know if we are doing a left or a right shift
1183 to properly shift in zeros for left shift and unsigned
1184 right shifts and the sign bit for signed right shifts.
1185 For signed right shifts we shift in varying in case
1186 the sign bit was varying. */
1187 if (shift > 0)
1188 {
1189 *mask = double_int_lshift (r1mask, shift,
1190 TYPE_PRECISION (type), false);
1191 *val = double_int_lshift (r1val, shift,
1192 TYPE_PRECISION (type), false);
1193 }
1194 else if (shift < 0)
1195 {
1196 shift = -shift;
1197 *mask = double_int_rshift (r1mask, shift,
1198 TYPE_PRECISION (type), !uns);
1199 *val = double_int_rshift (r1val, shift,
1200 TYPE_PRECISION (type), !uns);
1201 }
1202 else
1203 {
1204 *mask = r1mask;
1205 *val = r1val;
1206 }
1207 }
1208 break;
1209
1210 case PLUS_EXPR:
1211 case POINTER_PLUS_EXPR:
1212 {
1213 double_int lo, hi;
1214 /* Do the addition with unknown bits set to zero, to give carry-ins of
1215 zero wherever possible. */
1216 lo = double_int_add (double_int_and_not (r1val, r1mask),
1217 double_int_and_not (r2val, r2mask));
1218 lo = double_int_ext (lo, TYPE_PRECISION (type), uns);
1219 /* Do the addition with unknown bits set to one, to give carry-ins of
1220 one wherever possible. */
1221 hi = double_int_add (double_int_ior (r1val, r1mask),
1222 double_int_ior (r2val, r2mask));
1223 hi = double_int_ext (hi, TYPE_PRECISION (type), uns);
1224 /* Each bit in the result is known if (a) the corresponding bits in
1225 both inputs are known, and (b) the carry-in to that bit position
1226 is known. We can check condition (b) by seeing if we got the same
1227 result with minimised carries as with maximised carries. */
1228 *mask = double_int_ior (double_int_ior (r1mask, r2mask),
1229 double_int_xor (lo, hi));
1230 *mask = double_int_ext (*mask, TYPE_PRECISION (type), uns);
1231 /* It shouldn't matter whether we choose lo or hi here. */
1232 *val = lo;
1233 break;
1234 }
1235
1236 case MINUS_EXPR:
1237 {
1238 double_int temv, temm;
1239 bit_value_unop_1 (NEGATE_EXPR, r2type, &temv, &temm,
1240 r2type, r2val, r2mask);
1241 bit_value_binop_1 (PLUS_EXPR, type, val, mask,
1242 r1type, r1val, r1mask,
1243 r2type, temv, temm);
1244 break;
1245 }
1246
1247 case MULT_EXPR:
1248 {
1249 /* Just track trailing zeros in both operands and transfer
1250 them to the other. */
1251 int r1tz = double_int_ctz (double_int_ior (r1val, r1mask));
1252 int r2tz = double_int_ctz (double_int_ior (r2val, r2mask));
1253 if (r1tz + r2tz >= HOST_BITS_PER_DOUBLE_INT)
1254 {
1255 *mask = double_int_zero;
1256 *val = double_int_zero;
1257 }
1258 else if (r1tz + r2tz > 0)
1259 {
1260 *mask = double_int_not (double_int_mask (r1tz + r2tz));
1261 *mask = double_int_ext (*mask, TYPE_PRECISION (type), uns);
1262 *val = double_int_zero;
1263 }
1264 break;
1265 }
1266
1267 case EQ_EXPR:
1268 case NE_EXPR:
1269 {
1270 double_int m = double_int_ior (r1mask, r2mask);
1271 if (!double_int_equal_p (double_int_and_not (r1val, m),
1272 double_int_and_not (r2val, m)))
1273 {
1274 *mask = double_int_zero;
1275 *val = ((code == EQ_EXPR) ? double_int_zero : double_int_one);
1276 }
1277 else
1278 {
1279 /* We know the result of a comparison is always one or zero. */
1280 *mask = double_int_one;
1281 *val = double_int_zero;
1282 }
1283 break;
1284 }
1285
1286 case GE_EXPR:
1287 case GT_EXPR:
1288 {
1289 double_int tem = r1val;
1290 r1val = r2val;
1291 r2val = tem;
1292 tem = r1mask;
1293 r1mask = r2mask;
1294 r2mask = tem;
1295 code = swap_tree_comparison (code);
1296 }
1297 /* Fallthru. */
1298 case LT_EXPR:
1299 case LE_EXPR:
1300 {
1301 int minmax, maxmin;
1302 /* If the most significant bits are not known we know nothing. */
1303 if (double_int_negative_p (r1mask) || double_int_negative_p (r2mask))
1304 break;
1305
0425d6f5 1306 /* For comparisons the signedness is in the comparison operands. */
56099f00 1307 uns = TYPE_UNSIGNED (r1type);
0425d6f5 1308
0b4b14ac
RG
1309 /* If we know the most significant bits we know the values
1310 value ranges by means of treating varying bits as zero
1311 or one. Do a cross comparison of the max/min pairs. */
1312 maxmin = double_int_cmp (double_int_ior (r1val, r1mask),
1313 double_int_and_not (r2val, r2mask), uns);
1314 minmax = double_int_cmp (double_int_and_not (r1val, r1mask),
1315 double_int_ior (r2val, r2mask), uns);
1316 if (maxmin < 0) /* r1 is less than r2. */
1317 {
1318 *mask = double_int_zero;
1319 *val = double_int_one;
1320 }
1321 else if (minmax > 0) /* r1 is not less or equal to r2. */
1322 {
1323 *mask = double_int_zero;
1324 *val = double_int_zero;
1325 }
1326 else if (maxmin == minmax) /* r1 and r2 are equal. */
1327 {
1328 /* This probably should never happen as we'd have
1329 folded the thing during fully constant value folding. */
1330 *mask = double_int_zero;
1331 *val = (code == LE_EXPR ? double_int_one : double_int_zero);
1332 }
1333 else
1334 {
1335 /* We know the result of a comparison is always one or zero. */
1336 *mask = double_int_one;
1337 *val = double_int_zero;
1338 }
1339 break;
1340 }
1341
1342 default:;
1343 }
1344}
1345
1346/* Return the propagation value when applying the operation CODE to
1347 the value RHS yielding type TYPE. */
1348
1349static prop_value_t
1350bit_value_unop (enum tree_code code, tree type, tree rhs)
1351{
1352 prop_value_t rval = get_value_for_expr (rhs, true);
1353 double_int value, mask;
1354 prop_value_t val;
6ad024b4
RG
1355
1356 if (rval.lattice_val == UNDEFINED)
1357 return rval;
1358
0b4b14ac
RG
1359 gcc_assert ((rval.lattice_val == CONSTANT
1360 && TREE_CODE (rval.value) == INTEGER_CST)
1361 || double_int_minus_one_p (rval.mask));
1362 bit_value_unop_1 (code, type, &value, &mask,
1363 TREE_TYPE (rhs), value_to_double_int (rval), rval.mask);
1364 if (!double_int_minus_one_p (mask))
1365 {
1366 val.lattice_val = CONSTANT;
1367 val.mask = mask;
1368 /* ??? Delay building trees here. */
1369 val.value = double_int_to_tree (type, value);
1370 }
1371 else
1372 {
1373 val.lattice_val = VARYING;
1374 val.value = NULL_TREE;
1375 val.mask = double_int_minus_one;
1376 }
1377 return val;
1378}
1379
1380/* Return the propagation value when applying the operation CODE to
1381 the values RHS1 and RHS2 yielding type TYPE. */
1382
1383static prop_value_t
1384bit_value_binop (enum tree_code code, tree type, tree rhs1, tree rhs2)
1385{
1386 prop_value_t r1val = get_value_for_expr (rhs1, true);
1387 prop_value_t r2val = get_value_for_expr (rhs2, true);
1388 double_int value, mask;
1389 prop_value_t val;
6ad024b4
RG
1390
1391 if (r1val.lattice_val == UNDEFINED
1392 || r2val.lattice_val == UNDEFINED)
1393 {
1394 val.lattice_val = VARYING;
1395 val.value = NULL_TREE;
1396 val.mask = double_int_minus_one;
1397 return val;
1398 }
1399
0b4b14ac
RG
1400 gcc_assert ((r1val.lattice_val == CONSTANT
1401 && TREE_CODE (r1val.value) == INTEGER_CST)
1402 || double_int_minus_one_p (r1val.mask));
1403 gcc_assert ((r2val.lattice_val == CONSTANT
1404 && TREE_CODE (r2val.value) == INTEGER_CST)
1405 || double_int_minus_one_p (r2val.mask));
1406 bit_value_binop_1 (code, type, &value, &mask,
1407 TREE_TYPE (rhs1), value_to_double_int (r1val), r1val.mask,
1408 TREE_TYPE (rhs2), value_to_double_int (r2val), r2val.mask);
1409 if (!double_int_minus_one_p (mask))
1410 {
1411 val.lattice_val = CONSTANT;
1412 val.mask = mask;
1413 /* ??? Delay building trees here. */
1414 val.value = double_int_to_tree (type, value);
1415 }
1416 else
1417 {
1418 val.lattice_val = VARYING;
1419 val.value = NULL_TREE;
1420 val.mask = double_int_minus_one;
1421 }
1422 return val;
1423}
1424
45d439ac
JJ
1425/* Return the propagation value when applying __builtin_assume_aligned to
1426 its arguments. */
1427
1428static prop_value_t
1429bit_value_assume_aligned (gimple stmt)
1430{
1431 tree ptr = gimple_call_arg (stmt, 0), align, misalign = NULL_TREE;
1432 tree type = TREE_TYPE (ptr);
1433 unsigned HOST_WIDE_INT aligni, misaligni = 0;
1434 prop_value_t ptrval = get_value_for_expr (ptr, true);
1435 prop_value_t alignval;
1436 double_int value, mask;
1437 prop_value_t val;
1438 if (ptrval.lattice_val == UNDEFINED)
1439 return ptrval;
1440 gcc_assert ((ptrval.lattice_val == CONSTANT
1441 && TREE_CODE (ptrval.value) == INTEGER_CST)
1442 || double_int_minus_one_p (ptrval.mask));
1443 align = gimple_call_arg (stmt, 1);
1444 if (!host_integerp (align, 1))
1445 return ptrval;
1446 aligni = tree_low_cst (align, 1);
1447 if (aligni <= 1
1448 || (aligni & (aligni - 1)) != 0)
1449 return ptrval;
1450 if (gimple_call_num_args (stmt) > 2)
1451 {
1452 misalign = gimple_call_arg (stmt, 2);
1453 if (!host_integerp (misalign, 1))
1454 return ptrval;
1455 misaligni = tree_low_cst (misalign, 1);
1456 if (misaligni >= aligni)
1457 return ptrval;
1458 }
1459 align = build_int_cst_type (type, -aligni);
1460 alignval = get_value_for_expr (align, true);
1461 bit_value_binop_1 (BIT_AND_EXPR, type, &value, &mask,
1462 type, value_to_double_int (ptrval), ptrval.mask,
1463 type, value_to_double_int (alignval), alignval.mask);
1464 if (!double_int_minus_one_p (mask))
1465 {
1466 val.lattice_val = CONSTANT;
1467 val.mask = mask;
1468 gcc_assert ((mask.low & (aligni - 1)) == 0);
1469 gcc_assert ((value.low & (aligni - 1)) == 0);
1470 value.low |= misaligni;
1471 /* ??? Delay building trees here. */
1472 val.value = double_int_to_tree (type, value);
1473 }
1474 else
1475 {
1476 val.lattice_val = VARYING;
1477 val.value = NULL_TREE;
1478 val.mask = double_int_minus_one;
1479 }
1480 return val;
1481}
1482
726a989a
RB
1483/* Evaluate statement STMT.
1484 Valid only for assignments, calls, conditionals, and switches. */
6de9cd9a 1485
0bca51f0 1486static prop_value_t
726a989a 1487evaluate_stmt (gimple stmt)
6de9cd9a 1488{
0bca51f0 1489 prop_value_t val;
faaf1436 1490 tree simplified = NULL_TREE;
0bca51f0 1491 ccp_lattice_t likelyvalue = likely_value (stmt);
0b4b14ac 1492 bool is_constant = false;
13e49da9 1493 unsigned int align;
0bca51f0 1494
0b4b14ac
RG
1495 if (dump_file && (dump_flags & TDF_DETAILS))
1496 {
1497 fprintf (dump_file, "which is likely ");
1498 switch (likelyvalue)
1499 {
1500 case CONSTANT:
1501 fprintf (dump_file, "CONSTANT");
1502 break;
1503 case UNDEFINED:
1504 fprintf (dump_file, "UNDEFINED");
1505 break;
1506 case VARYING:
1507 fprintf (dump_file, "VARYING");
1508 break;
1509 default:;
1510 }
1511 fprintf (dump_file, "\n");
1512 }
6ac01510 1513
6de9cd9a
DN
1514 /* If the statement is likely to have a CONSTANT result, then try
1515 to fold the statement to determine the constant value. */
726a989a
RB
1516 /* FIXME. This is the only place that we call ccp_fold.
1517 Since likely_value never returns CONSTANT for calls, we will
1518 not attempt to fold them, including builtins that may profit. */
6de9cd9a 1519 if (likelyvalue == CONSTANT)
0b4b14ac
RG
1520 {
1521 fold_defer_overflow_warnings ();
1522 simplified = ccp_fold (stmt);
1523 is_constant = simplified && is_gimple_min_invariant (simplified);
1524 fold_undefer_overflow_warnings (is_constant, stmt, 0);
1525 if (is_constant)
1526 {
1527 /* The statement produced a constant value. */
1528 val.lattice_val = CONSTANT;
1529 val.value = simplified;
1530 val.mask = double_int_zero;
1531 }
1532 }
6de9cd9a
DN
1533 /* If the statement is likely to have a VARYING result, then do not
1534 bother folding the statement. */
87e1e42b 1535 else if (likelyvalue == VARYING)
726a989a 1536 {
e0c68ce9 1537 enum gimple_code code = gimple_code (stmt);
726a989a
RB
1538 if (code == GIMPLE_ASSIGN)
1539 {
1540 enum tree_code subcode = gimple_assign_rhs_code (stmt);
b8698a0f 1541
726a989a
RB
1542 /* Other cases cannot satisfy is_gimple_min_invariant
1543 without folding. */
1544 if (get_gimple_rhs_class (subcode) == GIMPLE_SINGLE_RHS)
1545 simplified = gimple_assign_rhs1 (stmt);
1546 }
1547 else if (code == GIMPLE_SWITCH)
1548 simplified = gimple_switch_index (stmt);
1549 else
44e10129
MM
1550 /* These cannot satisfy is_gimple_min_invariant without folding. */
1551 gcc_assert (code == GIMPLE_CALL || code == GIMPLE_COND);
0b4b14ac
RG
1552 is_constant = simplified && is_gimple_min_invariant (simplified);
1553 if (is_constant)
1554 {
1555 /* The statement produced a constant value. */
1556 val.lattice_val = CONSTANT;
1557 val.value = simplified;
1558 val.mask = double_int_zero;
1559 }
726a989a 1560 }
6de9cd9a 1561
0b4b14ac
RG
1562 /* Resort to simplification for bitwise tracking. */
1563 if (flag_tree_bit_ccp
36dc1a88 1564 && (likelyvalue == CONSTANT || is_gimple_call (stmt))
0b4b14ac 1565 && !is_constant)
00d382a8 1566 {
0b4b14ac 1567 enum gimple_code code = gimple_code (stmt);
1be38ccb 1568 tree fndecl;
0b4b14ac
RG
1569 val.lattice_val = VARYING;
1570 val.value = NULL_TREE;
1571 val.mask = double_int_minus_one;
1572 if (code == GIMPLE_ASSIGN)
00d382a8 1573 {
0b4b14ac
RG
1574 enum tree_code subcode = gimple_assign_rhs_code (stmt);
1575 tree rhs1 = gimple_assign_rhs1 (stmt);
1576 switch (get_gimple_rhs_class (subcode))
1577 {
1578 case GIMPLE_SINGLE_RHS:
1579 if (INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
1580 || POINTER_TYPE_P (TREE_TYPE (rhs1)))
1581 val = get_value_for_expr (rhs1, true);
1582 break;
1583
1584 case GIMPLE_UNARY_RHS:
1585 if ((INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
1586 || POINTER_TYPE_P (TREE_TYPE (rhs1)))
1587 && (INTEGRAL_TYPE_P (gimple_expr_type (stmt))
1588 || POINTER_TYPE_P (gimple_expr_type (stmt))))
1589 val = bit_value_unop (subcode, gimple_expr_type (stmt), rhs1);
1590 break;
1591
1592 case GIMPLE_BINARY_RHS:
1593 if (INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
1594 || POINTER_TYPE_P (TREE_TYPE (rhs1)))
1595 {
4e996296 1596 tree lhs = gimple_assign_lhs (stmt);
0b4b14ac
RG
1597 tree rhs2 = gimple_assign_rhs2 (stmt);
1598 val = bit_value_binop (subcode,
4e996296 1599 TREE_TYPE (lhs), rhs1, rhs2);
0b4b14ac
RG
1600 }
1601 break;
1602
1603 default:;
1604 }
00d382a8 1605 }
0b4b14ac
RG
1606 else if (code == GIMPLE_COND)
1607 {
1608 enum tree_code code = gimple_cond_code (stmt);
1609 tree rhs1 = gimple_cond_lhs (stmt);
1610 tree rhs2 = gimple_cond_rhs (stmt);
1611 if (INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
1612 || POINTER_TYPE_P (TREE_TYPE (rhs1)))
1613 val = bit_value_binop (code, TREE_TYPE (rhs1), rhs1, rhs2);
1614 }
1be38ccb
RG
1615 else if (code == GIMPLE_CALL
1616 && (fndecl = gimple_call_fndecl (stmt))
1617 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
1618 {
1619 switch (DECL_FUNCTION_CODE (fndecl))
1620 {
1621 case BUILT_IN_MALLOC:
1622 case BUILT_IN_REALLOC:
1623 case BUILT_IN_CALLOC:
36dc1a88
JJ
1624 case BUILT_IN_STRDUP:
1625 case BUILT_IN_STRNDUP:
1be38ccb
RG
1626 val.lattice_val = CONSTANT;
1627 val.value = build_int_cst (TREE_TYPE (gimple_get_lhs (stmt)), 0);
1628 val.mask = shwi_to_double_int
1629 (~(((HOST_WIDE_INT) MALLOC_ABI_ALIGNMENT)
1630 / BITS_PER_UNIT - 1));
1631 break;
1632
1633 case BUILT_IN_ALLOCA:
13e49da9
TV
1634 case BUILT_IN_ALLOCA_WITH_ALIGN:
1635 align = (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_ALLOCA_WITH_ALIGN
1636 ? TREE_INT_CST_LOW (gimple_call_arg (stmt, 1))
1637 : BIGGEST_ALIGNMENT);
1be38ccb
RG
1638 val.lattice_val = CONSTANT;
1639 val.value = build_int_cst (TREE_TYPE (gimple_get_lhs (stmt)), 0);
1640 val.mask = shwi_to_double_int
13e49da9 1641 (~(((HOST_WIDE_INT) align)
1be38ccb
RG
1642 / BITS_PER_UNIT - 1));
1643 break;
1644
36dc1a88
JJ
1645 /* These builtins return their first argument, unmodified. */
1646 case BUILT_IN_MEMCPY:
1647 case BUILT_IN_MEMMOVE:
1648 case BUILT_IN_MEMSET:
1649 case BUILT_IN_STRCPY:
1650 case BUILT_IN_STRNCPY:
1651 case BUILT_IN_MEMCPY_CHK:
1652 case BUILT_IN_MEMMOVE_CHK:
1653 case BUILT_IN_MEMSET_CHK:
1654 case BUILT_IN_STRCPY_CHK:
1655 case BUILT_IN_STRNCPY_CHK:
1656 val = get_value_for_expr (gimple_call_arg (stmt, 0), true);
1657 break;
1658
45d439ac
JJ
1659 case BUILT_IN_ASSUME_ALIGNED:
1660 val = bit_value_assume_aligned (stmt);
1661 break;
1662
1be38ccb
RG
1663 default:;
1664 }
1665 }
0b4b14ac 1666 is_constant = (val.lattice_val == CONSTANT);
00d382a8
RG
1667 }
1668
0b4b14ac 1669 if (!is_constant)
6de9cd9a
DN
1670 {
1671 /* The statement produced a nonconstant value. If the statement
0bca51f0
DN
1672 had UNDEFINED operands, then the result of the statement
1673 should be UNDEFINED. Otherwise, the statement is VARYING. */
106dec71 1674 if (likelyvalue == UNDEFINED)
0b4b14ac
RG
1675 {
1676 val.lattice_val = likelyvalue;
1677 val.mask = double_int_zero;
1678 }
a318e3ac 1679 else
0b4b14ac
RG
1680 {
1681 val.lattice_val = VARYING;
1682 val.mask = double_int_minus_one;
1683 }
a318e3ac 1684
0bca51f0 1685 val.value = NULL_TREE;
6de9cd9a 1686 }
750628d8
DN
1687
1688 return val;
6de9cd9a
DN
1689}
1690
0823efed
DN
1691typedef hash_table <gimple_statement_d, typed_pointer_hash<gimple_statement_d>,
1692 typed_pointer_equal<gimple_statement_d>,
1693 typed_null_remove<gimple_statement_d> >
1694 gimple_htab;
1695
2f31f742
TV
1696/* Given a BUILT_IN_STACK_SAVE value SAVED_VAL, insert a clobber of VAR before
1697 each matching BUILT_IN_STACK_RESTORE. Mark visited phis in VISITED. */
1698
1699static void
0823efed
DN
1700insert_clobber_before_stack_restore (tree saved_val, tree var,
1701 gimple_htab *visited)
2f31f742
TV
1702{
1703 gimple stmt, clobber_stmt;
1704 tree clobber;
1705 imm_use_iterator iter;
1706 gimple_stmt_iterator i;
1707 gimple *slot;
1708
1709 FOR_EACH_IMM_USE_STMT (stmt, iter, saved_val)
1710 if (gimple_call_builtin_p (stmt, BUILT_IN_STACK_RESTORE))
1711 {
1712 clobber = build_constructor (TREE_TYPE (var), NULL);
1713 TREE_THIS_VOLATILE (clobber) = 1;
1714 clobber_stmt = gimple_build_assign (var, clobber);
1715
1716 i = gsi_for_stmt (stmt);
1717 gsi_insert_before (&i, clobber_stmt, GSI_SAME_STMT);
1718 }
1719 else if (gimple_code (stmt) == GIMPLE_PHI)
1720 {
0823efed
DN
1721 if (!visited->is_created ())
1722 visited->create (10);
2f31f742 1723
0823efed 1724 slot = visited->find_slot (stmt, INSERT);
2f31f742
TV
1725 if (*slot != NULL)
1726 continue;
1727
1728 *slot = stmt;
1729 insert_clobber_before_stack_restore (gimple_phi_result (stmt), var,
1730 visited);
1731 }
1732 else
1733 gcc_assert (is_gimple_debug (stmt));
1734}
1735
1736/* Advance the iterator to the previous non-debug gimple statement in the same
1737 or dominating basic block. */
1738
1739static inline void
1740gsi_prev_dom_bb_nondebug (gimple_stmt_iterator *i)
1741{
1742 basic_block dom;
1743
1744 gsi_prev_nondebug (i);
1745 while (gsi_end_p (*i))
1746 {
1747 dom = get_immediate_dominator (CDI_DOMINATORS, i->bb);
1748 if (dom == NULL || dom == ENTRY_BLOCK_PTR)
1749 return;
1750
1751 *i = gsi_last_bb (dom);
1752 }
1753}
1754
1755/* Find a BUILT_IN_STACK_SAVE dominating gsi_stmt (I), and insert
b9bebd7f
MJ
1756 a clobber of VAR before each matching BUILT_IN_STACK_RESTORE.
1757
1758 It is possible that BUILT_IN_STACK_SAVE cannot be find in a dominator when a
1759 previous pass (such as DOM) duplicated it along multiple paths to a BB. In
1760 that case the function gives up without inserting the clobbers. */
2f31f742
TV
1761
1762static void
1763insert_clobbers_for_var (gimple_stmt_iterator i, tree var)
1764{
2f31f742
TV
1765 gimple stmt;
1766 tree saved_val;
0823efed 1767 gimple_htab visited;
2f31f742 1768
b9bebd7f 1769 for (; !gsi_end_p (i); gsi_prev_dom_bb_nondebug (&i))
2f31f742
TV
1770 {
1771 stmt = gsi_stmt (i);
1772
1773 if (!gimple_call_builtin_p (stmt, BUILT_IN_STACK_SAVE))
1774 continue;
2f31f742
TV
1775
1776 saved_val = gimple_call_lhs (stmt);
1777 if (saved_val == NULL_TREE)
1778 continue;
1779
1780 insert_clobber_before_stack_restore (saved_val, var, &visited);
1781 break;
1782 }
1783
0823efed
DN
1784 if (visited.is_created ())
1785 visited.dispose ();
2f31f742
TV
1786}
1787
13e49da9
TV
1788/* Detects a __builtin_alloca_with_align with constant size argument. Declares
1789 fixed-size array and returns the address, if found, otherwise returns
1790 NULL_TREE. */
1fed1006
TV
1791
1792static tree
13e49da9 1793fold_builtin_alloca_with_align (gimple stmt)
1fed1006
TV
1794{
1795 unsigned HOST_WIDE_INT size, threshold, n_elem;
1796 tree lhs, arg, block, var, elem_type, array_type;
1fed1006
TV
1797
1798 /* Get lhs. */
1799 lhs = gimple_call_lhs (stmt);
1800 if (lhs == NULL_TREE)
1801 return NULL_TREE;
1802
1803 /* Detect constant argument. */
1804 arg = get_constant_value (gimple_call_arg (stmt, 0));
5d882cc1
RG
1805 if (arg == NULL_TREE
1806 || TREE_CODE (arg) != INTEGER_CST
1fed1006
TV
1807 || !host_integerp (arg, 1))
1808 return NULL_TREE;
5d882cc1 1809
1fed1006
TV
1810 size = TREE_INT_CST_LOW (arg);
1811
13e49da9 1812 /* Heuristic: don't fold large allocas. */
1fed1006 1813 threshold = (unsigned HOST_WIDE_INT)PARAM_VALUE (PARAM_LARGE_STACK_FRAME);
13e49da9
TV
1814 /* In case the alloca is located at function entry, it has the same lifetime
1815 as a declared array, so we allow a larger size. */
1fed1006
TV
1816 block = gimple_block (stmt);
1817 if (!(cfun->after_inlining
1818 && TREE_CODE (BLOCK_SUPERCONTEXT (block)) == FUNCTION_DECL))
1819 threshold /= 10;
1820 if (size > threshold)
1821 return NULL_TREE;
1822
1823 /* Declare array. */
1824 elem_type = build_nonstandard_integer_type (BITS_PER_UNIT, 1);
1825 n_elem = size * 8 / BITS_PER_UNIT;
1fed1006
TV
1826 array_type = build_array_type_nelts (elem_type, n_elem);
1827 var = create_tmp_var (array_type, NULL);
13e49da9 1828 DECL_ALIGN (var) = TREE_INT_CST_LOW (gimple_call_arg (stmt, 1));
703ffc30
TV
1829 {
1830 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (lhs);
1831 if (pi != NULL && !pi->pt.anything)
1832 {
1833 bool singleton_p;
1834 unsigned uid;
1835 singleton_p = pt_solution_singleton_p (&pi->pt, &uid);
1836 gcc_assert (singleton_p);
1837 SET_DECL_PT_UID (var, uid);
1838 }
1839 }
1fed1006
TV
1840
1841 /* Fold alloca to the address of the array. */
1842 return fold_convert (TREE_TYPE (lhs), build_fold_addr_expr (var));
1843}
1844
f61e18ec
RG
1845/* Fold the stmt at *GSI with CCP specific information that propagating
1846 and regular folding does not catch. */
1847
1848static bool
1849ccp_fold_stmt (gimple_stmt_iterator *gsi)
1850{
1851 gimple stmt = gsi_stmt (*gsi);
f61e18ec 1852
830bc550
RG
1853 switch (gimple_code (stmt))
1854 {
1855 case GIMPLE_COND:
1856 {
1857 prop_value_t val;
1858 /* Statement evaluation will handle type mismatches in constants
1859 more gracefully than the final propagation. This allows us to
1860 fold more conditionals here. */
1861 val = evaluate_stmt (stmt);
1862 if (val.lattice_val != CONSTANT
0b4b14ac 1863 || !double_int_zero_p (val.mask))
830bc550
RG
1864 return false;
1865
0b4b14ac
RG
1866 if (dump_file)
1867 {
1868 fprintf (dump_file, "Folding predicate ");
1869 print_gimple_expr (dump_file, stmt, 0, 0);
1870 fprintf (dump_file, " to ");
1871 print_generic_expr (dump_file, val.value, 0);
1872 fprintf (dump_file, "\n");
1873 }
1874
830bc550
RG
1875 if (integer_zerop (val.value))
1876 gimple_cond_make_false (stmt);
1877 else
1878 gimple_cond_make_true (stmt);
f61e18ec 1879
830bc550
RG
1880 return true;
1881 }
f61e18ec 1882
830bc550
RG
1883 case GIMPLE_CALL:
1884 {
1885 tree lhs = gimple_call_lhs (stmt);
45a5b21a 1886 int flags = gimple_call_flags (stmt);
84d77ca6 1887 tree val;
830bc550
RG
1888 tree argt;
1889 bool changed = false;
1890 unsigned i;
1891
1892 /* If the call was folded into a constant make sure it goes
1893 away even if we cannot propagate into all uses because of
1894 type issues. */
1895 if (lhs
1896 && TREE_CODE (lhs) == SSA_NAME
45a5b21a
JJ
1897 && (val = get_constant_value (lhs))
1898 /* Don't optimize away calls that have side-effects. */
1899 && (flags & (ECF_CONST|ECF_PURE)) != 0
1900 && (flags & ECF_LOOPING_CONST_OR_PURE) == 0)
830bc550 1901 {
84d77ca6 1902 tree new_rhs = unshare_expr (val);
eb6b98c7 1903 bool res;
830bc550
RG
1904 if (!useless_type_conversion_p (TREE_TYPE (lhs),
1905 TREE_TYPE (new_rhs)))
1906 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
eb6b98c7
RG
1907 res = update_call_from_tree (gsi, new_rhs);
1908 gcc_assert (res);
830bc550
RG
1909 return true;
1910 }
1911
25583c4f
RS
1912 /* Internal calls provide no argument types, so the extra laxity
1913 for normal calls does not apply. */
1914 if (gimple_call_internal_p (stmt))
1915 return false;
1916
13e49da9
TV
1917 /* The heuristic of fold_builtin_alloca_with_align differs before and
1918 after inlining, so we don't require the arg to be changed into a
1919 constant for folding, but just to be constant. */
1920 if (gimple_call_builtin_p (stmt, BUILT_IN_ALLOCA_WITH_ALIGN))
1fed1006 1921 {
13e49da9 1922 tree new_rhs = fold_builtin_alloca_with_align (stmt);
5d882cc1
RG
1923 if (new_rhs)
1924 {
1925 bool res = update_call_from_tree (gsi, new_rhs);
2f31f742 1926 tree var = TREE_OPERAND (TREE_OPERAND (new_rhs, 0),0);
5d882cc1 1927 gcc_assert (res);
2f31f742 1928 insert_clobbers_for_var (*gsi, var);
5d882cc1
RG
1929 return true;
1930 }
1fed1006
TV
1931 }
1932
830bc550
RG
1933 /* Propagate into the call arguments. Compared to replace_uses_in
1934 this can use the argument slot types for type verification
1935 instead of the current argument type. We also can safely
1936 drop qualifiers here as we are dealing with constants anyway. */
9bfc434b 1937 argt = TYPE_ARG_TYPES (gimple_call_fntype (stmt));
830bc550
RG
1938 for (i = 0; i < gimple_call_num_args (stmt) && argt;
1939 ++i, argt = TREE_CHAIN (argt))
1940 {
1941 tree arg = gimple_call_arg (stmt, i);
1942 if (TREE_CODE (arg) == SSA_NAME
84d77ca6 1943 && (val = get_constant_value (arg))
830bc550
RG
1944 && useless_type_conversion_p
1945 (TYPE_MAIN_VARIANT (TREE_VALUE (argt)),
84d77ca6 1946 TYPE_MAIN_VARIANT (TREE_TYPE (val))))
830bc550 1947 {
84d77ca6 1948 gimple_call_set_arg (stmt, i, unshare_expr (val));
830bc550
RG
1949 changed = true;
1950 }
1951 }
74e80a24 1952
830bc550
RG
1953 return changed;
1954 }
f61e18ec 1955
5c95f07b
RG
1956 case GIMPLE_ASSIGN:
1957 {
1958 tree lhs = gimple_assign_lhs (stmt);
84d77ca6 1959 tree val;
5c95f07b
RG
1960
1961 /* If we have a load that turned out to be constant replace it
1962 as we cannot propagate into all uses in all cases. */
1963 if (gimple_assign_single_p (stmt)
1964 && TREE_CODE (lhs) == SSA_NAME
84d77ca6 1965 && (val = get_constant_value (lhs)))
5c95f07b 1966 {
84d77ca6 1967 tree rhs = unshare_expr (val);
5c95f07b 1968 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs)))
70f34814 1969 rhs = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (lhs), rhs);
5c95f07b
RG
1970 gimple_assign_set_rhs_from_tree (gsi, rhs);
1971 return true;
1972 }
1973
1974 return false;
1975 }
1976
830bc550
RG
1977 default:
1978 return false;
1979 }
f61e18ec
RG
1980}
1981
750628d8 1982/* Visit the assignment statement STMT. Set the value of its LHS to the
0bca51f0
DN
1983 value computed by the RHS and store LHS in *OUTPUT_P. If STMT
1984 creates virtual definitions, set the value of each new name to that
726a989a
RB
1985 of the RHS (if we can derive a constant out of the RHS).
1986 Value-returning call statements also perform an assignment, and
1987 are handled here. */
6de9cd9a 1988
750628d8 1989static enum ssa_prop_result
726a989a 1990visit_assignment (gimple stmt, tree *output_p)
6de9cd9a 1991{
0bca51f0 1992 prop_value_t val;
0bca51f0 1993 enum ssa_prop_result retval;
6de9cd9a 1994
726a989a 1995 tree lhs = gimple_get_lhs (stmt);
6de9cd9a 1996
726a989a
RB
1997 gcc_assert (gimple_code (stmt) != GIMPLE_CALL
1998 || gimple_call_lhs (stmt) != NULL_TREE);
1999
84d77ca6
RG
2000 if (gimple_assign_single_p (stmt)
2001 && gimple_assign_rhs_code (stmt) == SSA_NAME)
2002 /* For a simple copy operation, we copy the lattice values. */
2003 val = *get_value (gimple_assign_rhs1 (stmt));
750628d8 2004 else
726a989a
RB
2005 /* Evaluate the statement, which could be
2006 either a GIMPLE_ASSIGN or a GIMPLE_CALL. */
87e1e42b 2007 val = evaluate_stmt (stmt);
6de9cd9a 2008
0bca51f0 2009 retval = SSA_PROP_NOT_INTERESTING;
6de9cd9a 2010
750628d8 2011 /* Set the lattice value of the statement's output. */
0bca51f0 2012 if (TREE_CODE (lhs) == SSA_NAME)
6de9cd9a 2013 {
0bca51f0
DN
2014 /* If STMT is an assignment to an SSA_NAME, we only have one
2015 value to set. */
2016 if (set_lattice_value (lhs, val))
2017 {
2018 *output_p = lhs;
2019 if (val.lattice_val == VARYING)
2020 retval = SSA_PROP_VARYING;
2021 else
2022 retval = SSA_PROP_INTERESTING;
2023 }
6de9cd9a 2024 }
0bca51f0
DN
2025
2026 return retval;
6de9cd9a
DN
2027}
2028
6de9cd9a 2029
750628d8
DN
2030/* Visit the conditional statement STMT. Return SSA_PROP_INTERESTING
2031 if it can determine which edge will be taken. Otherwise, return
2032 SSA_PROP_VARYING. */
2033
2034static enum ssa_prop_result
726a989a 2035visit_cond_stmt (gimple stmt, edge *taken_edge_p)
6de9cd9a 2036{
0bca51f0 2037 prop_value_t val;
750628d8
DN
2038 basic_block block;
2039
726a989a 2040 block = gimple_bb (stmt);
750628d8 2041 val = evaluate_stmt (stmt);
0b4b14ac
RG
2042 if (val.lattice_val != CONSTANT
2043 || !double_int_zero_p (val.mask))
2044 return SSA_PROP_VARYING;
750628d8
DN
2045
2046 /* Find which edge out of the conditional block will be taken and add it
2047 to the worklist. If no single edge can be determined statically,
2048 return SSA_PROP_VARYING to feed all the outgoing edges to the
2049 propagation engine. */
0b4b14ac 2050 *taken_edge_p = find_taken_edge (block, val.value);
750628d8
DN
2051 if (*taken_edge_p)
2052 return SSA_PROP_INTERESTING;
2053 else
2054 return SSA_PROP_VARYING;
6de9cd9a
DN
2055}
2056
6de9cd9a 2057
750628d8
DN
2058/* Evaluate statement STMT. If the statement produces an output value and
2059 its evaluation changes the lattice value of its output, return
2060 SSA_PROP_INTERESTING and set *OUTPUT_P to the SSA_NAME holding the
2061 output value.
b8698a0f 2062
750628d8
DN
2063 If STMT is a conditional branch and we can determine its truth
2064 value, set *TAKEN_EDGE_P accordingly. If STMT produces a varying
2065 value, return SSA_PROP_VARYING. */
6de9cd9a 2066
750628d8 2067static enum ssa_prop_result
726a989a 2068ccp_visit_stmt (gimple stmt, edge *taken_edge_p, tree *output_p)
750628d8 2069{
750628d8
DN
2070 tree def;
2071 ssa_op_iter iter;
6de9cd9a 2072
750628d8 2073 if (dump_file && (dump_flags & TDF_DETAILS))
6de9cd9a 2074 {
0bca51f0 2075 fprintf (dump_file, "\nVisiting statement:\n");
726a989a 2076 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
6de9cd9a 2077 }
6de9cd9a 2078
726a989a 2079 switch (gimple_code (stmt))
6de9cd9a 2080 {
726a989a
RB
2081 case GIMPLE_ASSIGN:
2082 /* If the statement is an assignment that produces a single
2083 output value, evaluate its RHS to see if the lattice value of
2084 its output has changed. */
2085 return visit_assignment (stmt, output_p);
2086
2087 case GIMPLE_CALL:
2088 /* A value-returning call also performs an assignment. */
2089 if (gimple_call_lhs (stmt) != NULL_TREE)
2090 return visit_assignment (stmt, output_p);
2091 break;
2092
2093 case GIMPLE_COND:
2094 case GIMPLE_SWITCH:
2095 /* If STMT is a conditional branch, see if we can determine
2096 which branch will be taken. */
2097 /* FIXME. It appears that we should be able to optimize
2098 computed GOTOs here as well. */
2099 return visit_cond_stmt (stmt, taken_edge_p);
2100
2101 default:
2102 break;
6de9cd9a 2103 }
6de9cd9a 2104
750628d8
DN
2105 /* Any other kind of statement is not interesting for constant
2106 propagation and, therefore, not worth simulating. */
750628d8
DN
2107 if (dump_file && (dump_flags & TDF_DETAILS))
2108 fprintf (dump_file, "No interesting values produced. Marked VARYING.\n");
6de9cd9a 2109
750628d8
DN
2110 /* Definitions made by statements other than assignments to
2111 SSA_NAMEs represent unknown modifications to their outputs.
2112 Mark them VARYING. */
0bca51f0
DN
2113 FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_ALL_DEFS)
2114 {
0b4b14ac 2115 prop_value_t v = { VARYING, NULL_TREE, { -1, (HOST_WIDE_INT) -1 } };
0bca51f0
DN
2116 set_lattice_value (def, v);
2117 }
6de9cd9a 2118
750628d8
DN
2119 return SSA_PROP_VARYING;
2120}
6de9cd9a 2121
6de9cd9a 2122
0bca51f0 2123/* Main entry point for SSA Conditional Constant Propagation. */
750628d8 2124
3253eafb 2125static unsigned int
dce2b2f6 2126do_ssa_ccp (void)
750628d8 2127{
2f31f742
TV
2128 unsigned int todo = 0;
2129 calculate_dominance_info (CDI_DOMINATORS);
750628d8
DN
2130 ccp_initialize ();
2131 ssa_propagate (ccp_visit_stmt, ccp_visit_phi_node);
3253eafb 2132 if (ccp_finalize ())
2f31f742
TV
2133 todo = (TODO_cleanup_cfg | TODO_update_ssa | TODO_remove_unused_locals);
2134 free_dominance_info (CDI_DOMINATORS);
2135 return todo;
6de9cd9a
DN
2136}
2137
173b818d
BB
2138
2139static bool
750628d8 2140gate_ccp (void)
173b818d 2141{
750628d8 2142 return flag_tree_ccp != 0;
173b818d
BB
2143}
2144
6de9cd9a 2145
b8698a0f 2146struct gimple_opt_pass pass_ccp =
750628d8 2147{
8ddbbcae
JH
2148 {
2149 GIMPLE_PASS,
750628d8
DN
2150 "ccp", /* name */
2151 gate_ccp, /* gate */
0bca51f0 2152 do_ssa_ccp, /* execute */
750628d8
DN
2153 NULL, /* sub */
2154 NULL, /* next */
2155 0, /* static_pass_number */
2156 TV_TREE_CCP, /* tv_id */
7faade0f 2157 PROP_cfg | PROP_ssa, /* properties_required */
750628d8 2158 0, /* properties_provided */
ae07b463 2159 0, /* properties_destroyed */
750628d8 2160 0, /* todo_flags_start */
22c5fa5f 2161 TODO_verify_ssa
8ddbbcae
JH
2162 | TODO_verify_stmts | TODO_ggc_collect/* todo_flags_finish */
2163 }
750628d8 2164};
6de9cd9a 2165
6de9cd9a 2166
726a989a 2167
cb8e078d
JJ
2168/* Try to optimize out __builtin_stack_restore. Optimize it out
2169 if there is another __builtin_stack_restore in the same basic
2170 block and no calls or ASM_EXPRs are in between, or if this block's
2171 only outgoing edge is to EXIT_BLOCK and there are no calls or
2172 ASM_EXPRs after this __builtin_stack_restore. */
2173
2174static tree
726a989a 2175optimize_stack_restore (gimple_stmt_iterator i)
cb8e078d 2176{
ff9d1adc
RH
2177 tree callee;
2178 gimple stmt;
726a989a
RB
2179
2180 basic_block bb = gsi_bb (i);
2181 gimple call = gsi_stmt (i);
cb8e078d 2182
726a989a
RB
2183 if (gimple_code (call) != GIMPLE_CALL
2184 || gimple_call_num_args (call) != 1
2185 || TREE_CODE (gimple_call_arg (call, 0)) != SSA_NAME
2186 || !POINTER_TYPE_P (TREE_TYPE (gimple_call_arg (call, 0))))
cb8e078d
JJ
2187 return NULL_TREE;
2188
726a989a 2189 for (gsi_next (&i); !gsi_end_p (i); gsi_next (&i))
cb8e078d 2190 {
726a989a
RB
2191 stmt = gsi_stmt (i);
2192 if (gimple_code (stmt) == GIMPLE_ASM)
cb8e078d 2193 return NULL_TREE;
726a989a 2194 if (gimple_code (stmt) != GIMPLE_CALL)
cb8e078d
JJ
2195 continue;
2196
726a989a 2197 callee = gimple_call_fndecl (stmt);
12f9ddbc
RG
2198 if (!callee
2199 || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL
2200 /* All regular builtins are ok, just obviously not alloca. */
13e49da9
TV
2201 || DECL_FUNCTION_CODE (callee) == BUILT_IN_ALLOCA
2202 || DECL_FUNCTION_CODE (callee) == BUILT_IN_ALLOCA_WITH_ALIGN)
cb8e078d
JJ
2203 return NULL_TREE;
2204
2205 if (DECL_FUNCTION_CODE (callee) == BUILT_IN_STACK_RESTORE)
ff9d1adc 2206 goto second_stack_restore;
cb8e078d
JJ
2207 }
2208
ff9d1adc 2209 if (!gsi_end_p (i))
cb8e078d
JJ
2210 return NULL_TREE;
2211
ff9d1adc
RH
2212 /* Allow one successor of the exit block, or zero successors. */
2213 switch (EDGE_COUNT (bb->succs))
2214 {
2215 case 0:
2216 break;
2217 case 1:
2218 if (single_succ_edge (bb)->dest != EXIT_BLOCK_PTR)
2219 return NULL_TREE;
2220 break;
2221 default:
2222 return NULL_TREE;
2223 }
2224 second_stack_restore:
cb8e078d 2225
ff9d1adc
RH
2226 /* If there's exactly one use, then zap the call to __builtin_stack_save.
2227 If there are multiple uses, then the last one should remove the call.
2228 In any case, whether the call to __builtin_stack_save can be removed
2229 or not is irrelevant to removing the call to __builtin_stack_restore. */
2230 if (has_single_use (gimple_call_arg (call, 0)))
2231 {
2232 gimple stack_save = SSA_NAME_DEF_STMT (gimple_call_arg (call, 0));
2233 if (is_gimple_call (stack_save))
2234 {
2235 callee = gimple_call_fndecl (stack_save);
2236 if (callee
2237 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL
2238 && DECL_FUNCTION_CODE (callee) == BUILT_IN_STACK_SAVE)
2239 {
2240 gimple_stmt_iterator stack_save_gsi;
2241 tree rhs;
cb8e078d 2242
ff9d1adc
RH
2243 stack_save_gsi = gsi_for_stmt (stack_save);
2244 rhs = build_int_cst (TREE_TYPE (gimple_call_arg (call, 0)), 0);
2245 update_call_from_tree (&stack_save_gsi, rhs);
2246 }
2247 }
2248 }
cb8e078d 2249
726a989a 2250 /* No effect, so the statement will be deleted. */
cb8e078d
JJ
2251 return integer_zero_node;
2252}
726a989a 2253
d7bd8aeb
JJ
2254/* If va_list type is a simple pointer and nothing special is needed,
2255 optimize __builtin_va_start (&ap, 0) into ap = __builtin_next_arg (0),
2256 __builtin_va_end (&ap) out as NOP and __builtin_va_copy into a simple
2257 pointer assignment. */
2258
2259static tree
726a989a 2260optimize_stdarg_builtin (gimple call)
d7bd8aeb 2261{
35cbb299 2262 tree callee, lhs, rhs, cfun_va_list;
d7bd8aeb 2263 bool va_list_simple_ptr;
db3927fb 2264 location_t loc = gimple_location (call);
d7bd8aeb 2265
726a989a 2266 if (gimple_code (call) != GIMPLE_CALL)
d7bd8aeb
JJ
2267 return NULL_TREE;
2268
726a989a 2269 callee = gimple_call_fndecl (call);
35cbb299
KT
2270
2271 cfun_va_list = targetm.fn_abi_va_list (callee);
2272 va_list_simple_ptr = POINTER_TYPE_P (cfun_va_list)
2273 && (TREE_TYPE (cfun_va_list) == void_type_node
2274 || TREE_TYPE (cfun_va_list) == char_type_node);
2275
d7bd8aeb
JJ
2276 switch (DECL_FUNCTION_CODE (callee))
2277 {
2278 case BUILT_IN_VA_START:
2279 if (!va_list_simple_ptr
2280 || targetm.expand_builtin_va_start != NULL
f25a65f5 2281 || !builtin_decl_explicit_p (BUILT_IN_NEXT_ARG))
d7bd8aeb
JJ
2282 return NULL_TREE;
2283
726a989a 2284 if (gimple_call_num_args (call) != 2)
d7bd8aeb
JJ
2285 return NULL_TREE;
2286
726a989a 2287 lhs = gimple_call_arg (call, 0);
d7bd8aeb
JJ
2288 if (!POINTER_TYPE_P (TREE_TYPE (lhs))
2289 || TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (lhs)))
35cbb299 2290 != TYPE_MAIN_VARIANT (cfun_va_list))
d7bd8aeb 2291 return NULL_TREE;
b8698a0f 2292
db3927fb 2293 lhs = build_fold_indirect_ref_loc (loc, lhs);
e79983f4 2294 rhs = build_call_expr_loc (loc, builtin_decl_explicit (BUILT_IN_NEXT_ARG),
726a989a 2295 1, integer_zero_node);
db3927fb 2296 rhs = fold_convert_loc (loc, TREE_TYPE (lhs), rhs);
d7bd8aeb
JJ
2297 return build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, rhs);
2298
2299 case BUILT_IN_VA_COPY:
2300 if (!va_list_simple_ptr)
2301 return NULL_TREE;
2302
726a989a 2303 if (gimple_call_num_args (call) != 2)
d7bd8aeb
JJ
2304 return NULL_TREE;
2305
726a989a 2306 lhs = gimple_call_arg (call, 0);
d7bd8aeb
JJ
2307 if (!POINTER_TYPE_P (TREE_TYPE (lhs))
2308 || TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (lhs)))
35cbb299 2309 != TYPE_MAIN_VARIANT (cfun_va_list))
d7bd8aeb
JJ
2310 return NULL_TREE;
2311
db3927fb 2312 lhs = build_fold_indirect_ref_loc (loc, lhs);
726a989a 2313 rhs = gimple_call_arg (call, 1);
d7bd8aeb 2314 if (TYPE_MAIN_VARIANT (TREE_TYPE (rhs))
35cbb299 2315 != TYPE_MAIN_VARIANT (cfun_va_list))
d7bd8aeb
JJ
2316 return NULL_TREE;
2317
db3927fb 2318 rhs = fold_convert_loc (loc, TREE_TYPE (lhs), rhs);
d7bd8aeb
JJ
2319 return build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, rhs);
2320
2321 case BUILT_IN_VA_END:
726a989a 2322 /* No effect, so the statement will be deleted. */
d7bd8aeb
JJ
2323 return integer_zero_node;
2324
2325 default:
2326 gcc_unreachable ();
2327 }
2328}
726a989a 2329
c61e5cc1
TV
2330/* Attemp to make the block of __builtin_unreachable I unreachable by changing
2331 the incoming jumps. Return true if at least one jump was changed. */
2332
2333static bool
2334optimize_unreachable (gimple_stmt_iterator i)
2335{
2336 basic_block bb = gsi_bb (i);
2337 gimple_stmt_iterator gsi;
2338 gimple stmt;
2339 edge_iterator ei;
2340 edge e;
2341 bool ret;
2342
2343 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2344 {
2345 stmt = gsi_stmt (gsi);
2346
2347 if (is_gimple_debug (stmt))
2348 continue;
2349
2350 if (gimple_code (stmt) == GIMPLE_LABEL)
2351 {
2352 /* Verify we do not need to preserve the label. */
2353 if (FORCED_LABEL (gimple_label_label (stmt)))
2354 return false;
2355
2356 continue;
2357 }
2358
2359 /* Only handle the case that __builtin_unreachable is the first statement
2360 in the block. We rely on DCE to remove stmts without side-effects
2361 before __builtin_unreachable. */
2362 if (gsi_stmt (gsi) != gsi_stmt (i))
2363 return false;
2364 }
2365
2366 ret = false;
2367 FOR_EACH_EDGE (e, ei, bb->preds)
2368 {
2369 gsi = gsi_last_bb (e->src);
cd356d96
UW
2370 if (gsi_end_p (gsi))
2371 continue;
c61e5cc1 2372
cd356d96
UW
2373 stmt = gsi_stmt (gsi);
2374 if (gimple_code (stmt) == GIMPLE_COND)
c61e5cc1
TV
2375 {
2376 if (e->flags & EDGE_TRUE_VALUE)
2377 gimple_cond_make_false (stmt);
2378 else if (e->flags & EDGE_FALSE_VALUE)
2379 gimple_cond_make_true (stmt);
2380 else
2381 gcc_unreachable ();
2382 }
2383 else
2384 {
2385 /* Todo: handle other cases, f.i. switch statement. */
2386 continue;
2387 }
2388
2389 ret = true;
2390 }
2391
2392 return ret;
2393}
2394
6de9cd9a
DN
2395/* A simple pass that attempts to fold all builtin functions. This pass
2396 is run after we've propagated as many constants as we can. */
2397
c2924966 2398static unsigned int
6de9cd9a
DN
2399execute_fold_all_builtins (void)
2400{
a7d6ba24 2401 bool cfg_changed = false;
6de9cd9a 2402 basic_block bb;
7b0e48fb 2403 unsigned int todoflags = 0;
b8698a0f 2404
6de9cd9a
DN
2405 FOR_EACH_BB (bb)
2406 {
726a989a
RB
2407 gimple_stmt_iterator i;
2408 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
6de9cd9a 2409 {
726a989a 2410 gimple stmt, old_stmt;
6de9cd9a 2411 tree callee, result;
10a0d495 2412 enum built_in_function fcode;
6de9cd9a 2413
726a989a
RB
2414 stmt = gsi_stmt (i);
2415
2416 if (gimple_code (stmt) != GIMPLE_CALL)
10a0d495 2417 {
726a989a 2418 gsi_next (&i);
10a0d495
JJ
2419 continue;
2420 }
726a989a 2421 callee = gimple_call_fndecl (stmt);
6de9cd9a 2422 if (!callee || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL)
10a0d495 2423 {
726a989a 2424 gsi_next (&i);
10a0d495
JJ
2425 continue;
2426 }
2427 fcode = DECL_FUNCTION_CODE (callee);
6de9cd9a 2428
cbdd87d4 2429 result = gimple_fold_builtin (stmt);
53a8f709
UB
2430
2431 if (result)
726a989a 2432 gimple_remove_stmt_histograms (cfun, stmt);
53a8f709 2433
6de9cd9a
DN
2434 if (!result)
2435 switch (DECL_FUNCTION_CODE (callee))
2436 {
2437 case BUILT_IN_CONSTANT_P:
2438 /* Resolve __builtin_constant_p. If it hasn't been
2439 folded to integer_one_node by now, it's fairly
2440 certain that the value simply isn't constant. */
726a989a 2441 result = integer_zero_node;
6de9cd9a
DN
2442 break;
2443
45d439ac
JJ
2444 case BUILT_IN_ASSUME_ALIGNED:
2445 /* Remove __builtin_assume_aligned. */
2446 result = gimple_call_arg (stmt, 0);
2447 break;
2448
cb8e078d 2449 case BUILT_IN_STACK_RESTORE:
726a989a 2450 result = optimize_stack_restore (i);
d7bd8aeb
JJ
2451 if (result)
2452 break;
726a989a 2453 gsi_next (&i);
d7bd8aeb
JJ
2454 continue;
2455
c61e5cc1
TV
2456 case BUILT_IN_UNREACHABLE:
2457 if (optimize_unreachable (i))
2458 cfg_changed = true;
2459 break;
2460
d7bd8aeb
JJ
2461 case BUILT_IN_VA_START:
2462 case BUILT_IN_VA_END:
2463 case BUILT_IN_VA_COPY:
2464 /* These shouldn't be folded before pass_stdarg. */
726a989a 2465 result = optimize_stdarg_builtin (stmt);
cb8e078d
JJ
2466 if (result)
2467 break;
2468 /* FALLTHRU */
2469
6de9cd9a 2470 default:
726a989a 2471 gsi_next (&i);
6de9cd9a
DN
2472 continue;
2473 }
2474
c61e5cc1
TV
2475 if (result == NULL_TREE)
2476 break;
2477
6de9cd9a
DN
2478 if (dump_file && (dump_flags & TDF_DETAILS))
2479 {
2480 fprintf (dump_file, "Simplified\n ");
726a989a 2481 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
6de9cd9a
DN
2482 }
2483
726a989a 2484 old_stmt = stmt;
726a989a 2485 if (!update_call_from_tree (&i, result))
4bad83f5
RG
2486 {
2487 gimplify_and_update_call_from_tree (&i, result);
2488 todoflags |= TODO_update_address_taken;
2489 }
cfaab3a9 2490
726a989a 2491 stmt = gsi_stmt (i);
cff4e50d 2492 update_stmt (stmt);
cfaab3a9 2493
726a989a
RB
2494 if (maybe_clean_or_replace_eh_stmt (old_stmt, stmt)
2495 && gimple_purge_dead_eh_edges (bb))
a7d6ba24 2496 cfg_changed = true;
6de9cd9a
DN
2497
2498 if (dump_file && (dump_flags & TDF_DETAILS))
2499 {
2500 fprintf (dump_file, "to\n ");
726a989a 2501 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
6de9cd9a
DN
2502 fprintf (dump_file, "\n");
2503 }
10a0d495
JJ
2504
2505 /* Retry the same statement if it changed into another
2506 builtin, there might be new opportunities now. */
726a989a 2507 if (gimple_code (stmt) != GIMPLE_CALL)
10a0d495 2508 {
726a989a 2509 gsi_next (&i);
10a0d495
JJ
2510 continue;
2511 }
726a989a 2512 callee = gimple_call_fndecl (stmt);
10a0d495 2513 if (!callee
726a989a 2514 || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL
10a0d495 2515 || DECL_FUNCTION_CODE (callee) == fcode)
726a989a 2516 gsi_next (&i);
6de9cd9a
DN
2517 }
2518 }
b8698a0f 2519
a7d6ba24 2520 /* Delete unreachable blocks. */
7b0e48fb
DB
2521 if (cfg_changed)
2522 todoflags |= TODO_cleanup_cfg;
b8698a0f 2523
7b0e48fb 2524 return todoflags;
6de9cd9a
DN
2525}
2526
750628d8 2527
b8698a0f 2528struct gimple_opt_pass pass_fold_builtins =
6de9cd9a 2529{
8ddbbcae
JH
2530 {
2531 GIMPLE_PASS,
6de9cd9a
DN
2532 "fab", /* name */
2533 NULL, /* gate */
2534 execute_fold_all_builtins, /* execute */
2535 NULL, /* sub */
2536 NULL, /* next */
2537 0, /* static_pass_number */
7072a650 2538 TV_NONE, /* tv_id */
7faade0f 2539 PROP_cfg | PROP_ssa, /* properties_required */
6de9cd9a
DN
2540 0, /* properties_provided */
2541 0, /* properties_destroyed */
2542 0, /* todo_flags_start */
22c5fa5f 2543 TODO_verify_ssa
8ddbbcae
JH
2544 | TODO_update_ssa /* todo_flags_finish */
2545 }
6de9cd9a 2546};