]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/tree-ssa-ccp.c
* tree-ssa-ccp.c (optimize_unreachable): Check gsi_end_p
[thirdparty/gcc.git] / gcc / tree-ssa-ccp.c
CommitLineData
4ee9c684 1/* Conditional constant propagation pass for the GNU compiler.
87c0a9fc 2 Copyright (C) 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009,
3064bb7b 3 2010, 2011, 2012 Free Software Foundation, Inc.
4ee9c684 4 Adapted from original RTL SSA-CCP by Daniel Berlin <dberlin@dberlin.org>
5 Adapted to GIMPLE trees by Diego Novillo <dnovillo@redhat.com>
6
7This file is part of GCC.
48e1416a 8
4ee9c684 9GCC is free software; you can redistribute it and/or modify it
10under the terms of the GNU General Public License as published by the
8c4c00c1 11Free Software Foundation; either version 3, or (at your option) any
4ee9c684 12later version.
48e1416a 13
4ee9c684 14GCC is distributed in the hope that it will be useful, but WITHOUT
15ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
16FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17for more details.
48e1416a 18
4ee9c684 19You should have received a copy of the GNU General Public License
8c4c00c1 20along with GCC; see the file COPYING3. If not see
21<http://www.gnu.org/licenses/>. */
4ee9c684 22
88dbf20f 23/* Conditional constant propagation (CCP) is based on the SSA
24 propagation engine (tree-ssa-propagate.c). Constant assignments of
25 the form VAR = CST are propagated from the assignments into uses of
26 VAR, which in turn may generate new constants. The simulation uses
27 a four level lattice to keep track of constant values associated
28 with SSA names. Given an SSA name V_i, it may take one of the
29 following values:
30
bfa30570 31 UNINITIALIZED -> the initial state of the value. This value
32 is replaced with a correct initial value
33 the first time the value is used, so the
34 rest of the pass does not need to care about
35 it. Using this value simplifies initialization
36 of the pass, and prevents us from needlessly
37 scanning statements that are never reached.
88dbf20f 38
39 UNDEFINED -> V_i is a local variable whose definition
40 has not been processed yet. Therefore we
41 don't yet know if its value is a constant
42 or not.
43
44 CONSTANT -> V_i has been found to hold a constant
45 value C.
46
47 VARYING -> V_i cannot take a constant value, or if it
48 does, it is not possible to determine it
49 at compile time.
50
51 The core of SSA-CCP is in ccp_visit_stmt and ccp_visit_phi_node:
52
53 1- In ccp_visit_stmt, we are interested in assignments whose RHS
54 evaluates into a constant and conditional jumps whose predicate
55 evaluates into a boolean true or false. When an assignment of
56 the form V_i = CONST is found, V_i's lattice value is set to
57 CONSTANT and CONST is associated with it. This causes the
58 propagation engine to add all the SSA edges coming out the
59 assignment into the worklists, so that statements that use V_i
60 can be visited.
61
62 If the statement is a conditional with a constant predicate, we
63 mark the outgoing edges as executable or not executable
64 depending on the predicate's value. This is then used when
65 visiting PHI nodes to know when a PHI argument can be ignored.
48e1416a 66
88dbf20f 67
68 2- In ccp_visit_phi_node, if all the PHI arguments evaluate to the
69 same constant C, then the LHS of the PHI is set to C. This
70 evaluation is known as the "meet operation". Since one of the
71 goals of this evaluation is to optimistically return constant
72 values as often as possible, it uses two main short cuts:
73
74 - If an argument is flowing in through a non-executable edge, it
75 is ignored. This is useful in cases like this:
76
77 if (PRED)
78 a_9 = 3;
79 else
80 a_10 = 100;
81 a_11 = PHI (a_9, a_10)
82
83 If PRED is known to always evaluate to false, then we can
84 assume that a_11 will always take its value from a_10, meaning
85 that instead of consider it VARYING (a_9 and a_10 have
86 different values), we can consider it CONSTANT 100.
87
88 - If an argument has an UNDEFINED value, then it does not affect
89 the outcome of the meet operation. If a variable V_i has an
90 UNDEFINED value, it means that either its defining statement
91 hasn't been visited yet or V_i has no defining statement, in
92 which case the original symbol 'V' is being used
93 uninitialized. Since 'V' is a local variable, the compiler
94 may assume any initial value for it.
95
96
97 After propagation, every variable V_i that ends up with a lattice
98 value of CONSTANT will have the associated constant value in the
99 array CONST_VAL[i].VALUE. That is fed into substitute_and_fold for
100 final substitution and folding.
101
4ee9c684 102 References:
103
104 Constant propagation with conditional branches,
105 Wegman and Zadeck, ACM TOPLAS 13(2):181-210.
106
107 Building an Optimizing Compiler,
108 Robert Morgan, Butterworth-Heinemann, 1998, Section 8.9.
109
110 Advanced Compiler Design and Implementation,
111 Steven Muchnick, Morgan Kaufmann, 1997, Section 12.6 */
112
113#include "config.h"
114#include "system.h"
115#include "coretypes.h"
116#include "tm.h"
4ee9c684 117#include "tree.h"
41511585 118#include "flags.h"
4ee9c684 119#include "tm_p.h"
4ee9c684 120#include "basic-block.h"
41511585 121#include "function.h"
ce084dfc 122#include "gimple-pretty-print.h"
41511585 123#include "tree-flow.h"
4ee9c684 124#include "tree-pass.h"
41511585 125#include "tree-ssa-propagate.h"
5a4b7e1e 126#include "value-prof.h"
41511585 127#include "langhooks.h"
8782adcf 128#include "target.h"
0b205f4c 129#include "diagnostic-core.h"
43fb76c1 130#include "dbgcnt.h"
1d0b727d 131#include "gimple-fold.h"
9a65cc0a 132#include "params.h"
4ee9c684 133
134
135/* Possible lattice values. */
136typedef enum
137{
bfa30570 138 UNINITIALIZED,
4ee9c684 139 UNDEFINED,
140 CONSTANT,
141 VARYING
88dbf20f 142} ccp_lattice_t;
4ee9c684 143
14f101cf 144struct prop_value_d {
145 /* Lattice value. */
146 ccp_lattice_t lattice_val;
147
148 /* Propagated value. */
149 tree value;
b7e55469 150
151 /* Mask that applies to the propagated value during CCP. For
152 X with a CONSTANT lattice value X & ~mask == value & ~mask. */
153 double_int mask;
14f101cf 154};
155
156typedef struct prop_value_d prop_value_t;
157
88dbf20f 158/* Array of propagated constant values. After propagation,
159 CONST_VAL[I].VALUE holds the constant value for SSA_NAME(I). If
160 the constant is held in an SSA name representing a memory store
4fb5e5ca 161 (i.e., a VDEF), CONST_VAL[I].MEM_REF will contain the actual
162 memory reference used to store (i.e., the LHS of the assignment
163 doing the store). */
20140406 164static prop_value_t *const_val;
4ee9c684 165
4af351a8 166static void canonicalize_float_value (prop_value_t *);
6688f8ec 167static bool ccp_fold_stmt (gimple_stmt_iterator *);
4af351a8 168
88dbf20f 169/* Dump constant propagation value VAL to file OUTF prefixed by PREFIX. */
01406fc0 170
171static void
88dbf20f 172dump_lattice_value (FILE *outf, const char *prefix, prop_value_t val)
01406fc0 173{
41511585 174 switch (val.lattice_val)
01406fc0 175 {
88dbf20f 176 case UNINITIALIZED:
177 fprintf (outf, "%sUNINITIALIZED", prefix);
178 break;
41511585 179 case UNDEFINED:
180 fprintf (outf, "%sUNDEFINED", prefix);
181 break;
182 case VARYING:
183 fprintf (outf, "%sVARYING", prefix);
184 break;
41511585 185 case CONSTANT:
186 fprintf (outf, "%sCONSTANT ", prefix);
b7e55469 187 if (TREE_CODE (val.value) != INTEGER_CST
188 || double_int_zero_p (val.mask))
189 print_generic_expr (outf, val.value, dump_flags);
190 else
191 {
192 double_int cval = double_int_and_not (tree_to_double_int (val.value),
193 val.mask);
194 fprintf (outf, "%sCONSTANT " HOST_WIDE_INT_PRINT_DOUBLE_HEX,
195 prefix, cval.high, cval.low);
196 fprintf (outf, " (" HOST_WIDE_INT_PRINT_DOUBLE_HEX ")",
197 val.mask.high, val.mask.low);
198 }
41511585 199 break;
200 default:
8c0963c4 201 gcc_unreachable ();
41511585 202 }
01406fc0 203}
4ee9c684 204
4ee9c684 205
88dbf20f 206/* Print lattice value VAL to stderr. */
207
208void debug_lattice_value (prop_value_t val);
209
4b987fac 210DEBUG_FUNCTION void
88dbf20f 211debug_lattice_value (prop_value_t val)
212{
213 dump_lattice_value (stderr, "", val);
214 fprintf (stderr, "\n");
215}
4ee9c684 216
4ee9c684 217
88dbf20f 218/* Compute a default value for variable VAR and store it in the
219 CONST_VAL array. The following rules are used to get default
220 values:
01406fc0 221
88dbf20f 222 1- Global and static variables that are declared constant are
223 considered CONSTANT.
224
225 2- Any other value is considered UNDEFINED. This is useful when
41511585 226 considering PHI nodes. PHI arguments that are undefined do not
227 change the constant value of the PHI node, which allows for more
88dbf20f 228 constants to be propagated.
4ee9c684 229
8883e700 230 3- Variables defined by statements other than assignments and PHI
88dbf20f 231 nodes are considered VARYING.
4ee9c684 232
8883e700 233 4- Initial values of variables that are not GIMPLE registers are
bfa30570 234 considered VARYING. */
4ee9c684 235
88dbf20f 236static prop_value_t
237get_default_value (tree var)
238{
239 tree sym = SSA_NAME_VAR (var);
b7e55469 240 prop_value_t val = { UNINITIALIZED, NULL_TREE, { 0, 0 } };
8edeb88b 241 gimple stmt;
242
243 stmt = SSA_NAME_DEF_STMT (var);
244
245 if (gimple_nop_p (stmt))
4ee9c684 246 {
8edeb88b 247 /* Variables defined by an empty statement are those used
248 before being initialized. If VAR is a local variable, we
249 can assume initially that it is UNDEFINED, otherwise we must
250 consider it VARYING. */
524a0531 251 if (is_gimple_reg (sym)
252 && TREE_CODE (sym) == VAR_DECL)
8edeb88b 253 val.lattice_val = UNDEFINED;
254 else
b7e55469 255 {
256 val.lattice_val = VARYING;
257 val.mask = double_int_minus_one;
258 }
4ee9c684 259 }
8edeb88b 260 else if (is_gimple_assign (stmt)
261 /* Value-returning GIMPLE_CALL statements assign to
262 a variable, and are treated similarly to GIMPLE_ASSIGN. */
263 || (is_gimple_call (stmt)
264 && gimple_call_lhs (stmt) != NULL_TREE)
265 || gimple_code (stmt) == GIMPLE_PHI)
41511585 266 {
8edeb88b 267 tree cst;
268 if (gimple_assign_single_p (stmt)
269 && DECL_P (gimple_assign_rhs1 (stmt))
270 && (cst = get_symbol_constant_value (gimple_assign_rhs1 (stmt))))
88dbf20f 271 {
8edeb88b 272 val.lattice_val = CONSTANT;
273 val.value = cst;
88dbf20f 274 }
275 else
8edeb88b 276 /* Any other variable defined by an assignment or a PHI node
277 is considered UNDEFINED. */
278 val.lattice_val = UNDEFINED;
279 }
280 else
281 {
282 /* Otherwise, VAR will never take on a constant value. */
283 val.lattice_val = VARYING;
b7e55469 284 val.mask = double_int_minus_one;
41511585 285 }
4ee9c684 286
41511585 287 return val;
288}
4ee9c684 289
4ee9c684 290
bfa30570 291/* Get the constant value associated with variable VAR. */
4ee9c684 292
bfa30570 293static inline prop_value_t *
294get_value (tree var)
88dbf20f 295{
e004838d 296 prop_value_t *val;
bfa30570 297
e004838d 298 if (const_val == NULL)
299 return NULL;
300
301 val = &const_val[SSA_NAME_VERSION (var)];
bfa30570 302 if (val->lattice_val == UNINITIALIZED)
4ee9c684 303 *val = get_default_value (var);
304
4af351a8 305 canonicalize_float_value (val);
306
4ee9c684 307 return val;
308}
309
15d138c9 310/* Return the constant tree value associated with VAR. */
311
312static inline tree
313get_constant_value (tree var)
314{
98d92e3c 315 prop_value_t *val;
316 if (TREE_CODE (var) != SSA_NAME)
317 {
318 if (is_gimple_min_invariant (var))
319 return var;
320 return NULL_TREE;
321 }
322 val = get_value (var);
b7e55469 323 if (val
324 && val->lattice_val == CONSTANT
325 && (TREE_CODE (val->value) != INTEGER_CST
326 || double_int_zero_p (val->mask)))
15d138c9 327 return val->value;
328 return NULL_TREE;
329}
330
bfa30570 331/* Sets the value associated with VAR to VARYING. */
332
333static inline void
334set_value_varying (tree var)
335{
336 prop_value_t *val = &const_val[SSA_NAME_VERSION (var)];
337
338 val->lattice_val = VARYING;
339 val->value = NULL_TREE;
b7e55469 340 val->mask = double_int_minus_one;
bfa30570 341}
4ee9c684 342
b31eb493 343/* For float types, modify the value of VAL to make ccp work correctly
344 for non-standard values (-0, NaN):
345
346 If HONOR_SIGNED_ZEROS is false, and VAL = -0, we canonicalize it to 0.
347 If HONOR_NANS is false, and VAL is NaN, we canonicalize it to UNDEFINED.
348 This is to fix the following problem (see PR 29921): Suppose we have
349
350 x = 0.0 * y
351
352 and we set value of y to NaN. This causes value of x to be set to NaN.
353 When we later determine that y is in fact VARYING, fold uses the fact
354 that HONOR_NANS is false, and we try to change the value of x to 0,
355 causing an ICE. With HONOR_NANS being false, the real appearance of
356 NaN would cause undefined behavior, though, so claiming that y (and x)
357 are UNDEFINED initially is correct. */
358
359static void
360canonicalize_float_value (prop_value_t *val)
361{
362 enum machine_mode mode;
363 tree type;
364 REAL_VALUE_TYPE d;
365
366 if (val->lattice_val != CONSTANT
367 || TREE_CODE (val->value) != REAL_CST)
368 return;
369
370 d = TREE_REAL_CST (val->value);
371 type = TREE_TYPE (val->value);
372 mode = TYPE_MODE (type);
373
374 if (!HONOR_SIGNED_ZEROS (mode)
375 && REAL_VALUE_MINUS_ZERO (d))
376 {
377 val->value = build_real (type, dconst0);
378 return;
379 }
380
381 if (!HONOR_NANS (mode)
382 && REAL_VALUE_ISNAN (d))
383 {
384 val->lattice_val = UNDEFINED;
385 val->value = NULL;
b31eb493 386 return;
387 }
388}
389
b7e55469 390/* Return whether the lattice transition is valid. */
391
392static bool
393valid_lattice_transition (prop_value_t old_val, prop_value_t new_val)
394{
395 /* Lattice transitions must always be monotonically increasing in
396 value. */
397 if (old_val.lattice_val < new_val.lattice_val)
398 return true;
399
400 if (old_val.lattice_val != new_val.lattice_val)
401 return false;
402
403 if (!old_val.value && !new_val.value)
404 return true;
405
406 /* Now both lattice values are CONSTANT. */
407
408 /* Allow transitioning from &x to &x & ~3. */
409 if (TREE_CODE (old_val.value) != INTEGER_CST
410 && TREE_CODE (new_val.value) == INTEGER_CST)
411 return true;
412
413 /* Bit-lattices have to agree in the still valid bits. */
414 if (TREE_CODE (old_val.value) == INTEGER_CST
415 && TREE_CODE (new_val.value) == INTEGER_CST)
416 return double_int_equal_p
417 (double_int_and_not (tree_to_double_int (old_val.value),
418 new_val.mask),
419 double_int_and_not (tree_to_double_int (new_val.value),
420 new_val.mask));
421
422 /* Otherwise constant values have to agree. */
423 return operand_equal_p (old_val.value, new_val.value, 0);
424}
425
88dbf20f 426/* Set the value for variable VAR to NEW_VAL. Return true if the new
427 value is different from VAR's previous value. */
4ee9c684 428
41511585 429static bool
88dbf20f 430set_lattice_value (tree var, prop_value_t new_val)
4ee9c684 431{
6d0bf6d6 432 /* We can deal with old UNINITIALIZED values just fine here. */
433 prop_value_t *old_val = &const_val[SSA_NAME_VERSION (var)];
88dbf20f 434
b31eb493 435 canonicalize_float_value (&new_val);
436
b7e55469 437 /* We have to be careful to not go up the bitwise lattice
438 represented by the mask.
439 ??? This doesn't seem to be the best place to enforce this. */
440 if (new_val.lattice_val == CONSTANT
441 && old_val->lattice_val == CONSTANT
442 && TREE_CODE (new_val.value) == INTEGER_CST
443 && TREE_CODE (old_val->value) == INTEGER_CST)
444 {
445 double_int diff;
446 diff = double_int_xor (tree_to_double_int (new_val.value),
447 tree_to_double_int (old_val->value));
448 new_val.mask = double_int_ior (new_val.mask,
449 double_int_ior (old_val->mask, diff));
450 }
bfa30570 451
b7e55469 452 gcc_assert (valid_lattice_transition (*old_val, new_val));
88dbf20f 453
b7e55469 454 /* If *OLD_VAL and NEW_VAL are the same, return false to inform the
455 caller that this was a non-transition. */
456 if (old_val->lattice_val != new_val.lattice_val
457 || (new_val.lattice_val == CONSTANT
458 && TREE_CODE (new_val.value) == INTEGER_CST
459 && (TREE_CODE (old_val->value) != INTEGER_CST
460 || !double_int_equal_p (new_val.mask, old_val->mask))))
4ee9c684 461 {
b7e55469 462 /* ??? We would like to delay creation of INTEGER_CSTs from
463 partially constants here. */
464
41511585 465 if (dump_file && (dump_flags & TDF_DETAILS))
466 {
88dbf20f 467 dump_lattice_value (dump_file, "Lattice value changed to ", new_val);
bfa30570 468 fprintf (dump_file, ". Adding SSA edges to worklist.\n");
41511585 469 }
470
88dbf20f 471 *old_val = new_val;
472
6d0bf6d6 473 gcc_assert (new_val.lattice_val != UNINITIALIZED);
bfa30570 474 return true;
4ee9c684 475 }
41511585 476
477 return false;
4ee9c684 478}
479
b7e55469 480static prop_value_t get_value_for_expr (tree, bool);
481static prop_value_t bit_value_binop (enum tree_code, tree, tree, tree);
482static void bit_value_binop_1 (enum tree_code, tree, double_int *, double_int *,
483 tree, double_int, double_int,
484 tree, double_int, double_int);
485
486/* Return a double_int that can be used for bitwise simplifications
487 from VAL. */
488
489static double_int
490value_to_double_int (prop_value_t val)
491{
492 if (val.value
493 && TREE_CODE (val.value) == INTEGER_CST)
494 return tree_to_double_int (val.value);
495 else
496 return double_int_zero;
497}
498
499/* Return the value for the address expression EXPR based on alignment
500 information. */
6d0bf6d6 501
502static prop_value_t
b7e55469 503get_value_from_alignment (tree expr)
504{
f8abb542 505 tree type = TREE_TYPE (expr);
b7e55469 506 prop_value_t val;
f8abb542 507 unsigned HOST_WIDE_INT bitpos;
508 unsigned int align;
b7e55469 509
510 gcc_assert (TREE_CODE (expr) == ADDR_EXPR);
511
ceea063b 512 get_object_alignment_1 (TREE_OPERAND (expr, 0), &align, &bitpos);
f8abb542 513 val.mask
514 = double_int_and_not (POINTER_TYPE_P (type) || TYPE_UNSIGNED (type)
515 ? double_int_mask (TYPE_PRECISION (type))
516 : double_int_minus_one,
517 uhwi_to_double_int (align / BITS_PER_UNIT - 1));
518 val.lattice_val = double_int_minus_one_p (val.mask) ? VARYING : CONSTANT;
519 if (val.lattice_val == CONSTANT)
520 val.value
521 = double_int_to_tree (type, uhwi_to_double_int (bitpos / BITS_PER_UNIT));
b7e55469 522 else
f8abb542 523 val.value = NULL_TREE;
b7e55469 524
525 return val;
526}
527
528/* Return the value for the tree operand EXPR. If FOR_BITS_P is true
529 return constant bits extracted from alignment information for
530 invariant addresses. */
531
532static prop_value_t
533get_value_for_expr (tree expr, bool for_bits_p)
6d0bf6d6 534{
535 prop_value_t val;
536
537 if (TREE_CODE (expr) == SSA_NAME)
b7e55469 538 {
539 val = *get_value (expr);
540 if (for_bits_p
541 && val.lattice_val == CONSTANT
542 && TREE_CODE (val.value) == ADDR_EXPR)
543 val = get_value_from_alignment (val.value);
544 }
545 else if (is_gimple_min_invariant (expr)
546 && (!for_bits_p || TREE_CODE (expr) != ADDR_EXPR))
6d0bf6d6 547 {
548 val.lattice_val = CONSTANT;
549 val.value = expr;
b7e55469 550 val.mask = double_int_zero;
6d0bf6d6 551 canonicalize_float_value (&val);
552 }
b7e55469 553 else if (TREE_CODE (expr) == ADDR_EXPR)
554 val = get_value_from_alignment (expr);
6d0bf6d6 555 else
556 {
557 val.lattice_val = VARYING;
b7e55469 558 val.mask = double_int_minus_one;
6d0bf6d6 559 val.value = NULL_TREE;
560 }
6d0bf6d6 561 return val;
562}
563
88dbf20f 564/* Return the likely CCP lattice value for STMT.
4ee9c684 565
41511585 566 If STMT has no operands, then return CONSTANT.
4ee9c684 567
d61b9af3 568 Else if undefinedness of operands of STMT cause its value to be
569 undefined, then return UNDEFINED.
4ee9c684 570
41511585 571 Else if any operands of STMT are constants, then return CONSTANT.
4ee9c684 572
41511585 573 Else return VARYING. */
4ee9c684 574
88dbf20f 575static ccp_lattice_t
75a70cf9 576likely_value (gimple stmt)
41511585 577{
d61b9af3 578 bool has_constant_operand, has_undefined_operand, all_undefined_operands;
41511585 579 tree use;
580 ssa_op_iter iter;
8edeb88b 581 unsigned i;
4ee9c684 582
590c3166 583 enum gimple_code code = gimple_code (stmt);
75a70cf9 584
585 /* This function appears to be called only for assignments, calls,
586 conditionals, and switches, due to the logic in visit_stmt. */
587 gcc_assert (code == GIMPLE_ASSIGN
588 || code == GIMPLE_CALL
589 || code == GIMPLE_COND
590 || code == GIMPLE_SWITCH);
88dbf20f 591
592 /* If the statement has volatile operands, it won't fold to a
593 constant value. */
75a70cf9 594 if (gimple_has_volatile_ops (stmt))
88dbf20f 595 return VARYING;
596
75a70cf9 597 /* Arrive here for more complex cases. */
bfa30570 598 has_constant_operand = false;
d61b9af3 599 has_undefined_operand = false;
600 all_undefined_operands = true;
8edeb88b 601 FOR_EACH_SSA_TREE_OPERAND (use, stmt, iter, SSA_OP_USE)
41511585 602 {
bfa30570 603 prop_value_t *val = get_value (use);
41511585 604
bfa30570 605 if (val->lattice_val == UNDEFINED)
d61b9af3 606 has_undefined_operand = true;
607 else
608 all_undefined_operands = false;
88dbf20f 609
41511585 610 if (val->lattice_val == CONSTANT)
bfa30570 611 has_constant_operand = true;
4ee9c684 612 }
41511585 613
dd277d48 614 /* There may be constants in regular rhs operands. For calls we
615 have to ignore lhs, fndecl and static chain, otherwise only
616 the lhs. */
617 for (i = (is_gimple_call (stmt) ? 2 : 0) + gimple_has_lhs (stmt);
8edeb88b 618 i < gimple_num_ops (stmt); ++i)
619 {
620 tree op = gimple_op (stmt, i);
621 if (!op || TREE_CODE (op) == SSA_NAME)
622 continue;
623 if (is_gimple_min_invariant (op))
624 has_constant_operand = true;
625 }
626
87c0a9fc 627 if (has_constant_operand)
628 all_undefined_operands = false;
629
d61b9af3 630 /* If the operation combines operands like COMPLEX_EXPR make sure to
631 not mark the result UNDEFINED if only one part of the result is
632 undefined. */
75a70cf9 633 if (has_undefined_operand && all_undefined_operands)
d61b9af3 634 return UNDEFINED;
75a70cf9 635 else if (code == GIMPLE_ASSIGN && has_undefined_operand)
d61b9af3 636 {
75a70cf9 637 switch (gimple_assign_rhs_code (stmt))
d61b9af3 638 {
639 /* Unary operators are handled with all_undefined_operands. */
640 case PLUS_EXPR:
641 case MINUS_EXPR:
d61b9af3 642 case POINTER_PLUS_EXPR:
d61b9af3 643 /* Not MIN_EXPR, MAX_EXPR. One VARYING operand may be selected.
644 Not bitwise operators, one VARYING operand may specify the
645 result completely. Not logical operators for the same reason.
05a936a0 646 Not COMPLEX_EXPR as one VARYING operand makes the result partly
647 not UNDEFINED. Not *DIV_EXPR, comparisons and shifts because
648 the undefined operand may be promoted. */
d61b9af3 649 return UNDEFINED;
650
651 default:
652 ;
653 }
654 }
655 /* If there was an UNDEFINED operand but the result may be not UNDEFINED
c91fedc5 656 fall back to CONSTANT. During iteration UNDEFINED may still drop
657 to CONSTANT. */
d61b9af3 658 if (has_undefined_operand)
c91fedc5 659 return CONSTANT;
d61b9af3 660
8edeb88b 661 /* We do not consider virtual operands here -- load from read-only
662 memory may have only VARYING virtual operands, but still be
663 constant. */
bfa30570 664 if (has_constant_operand
8edeb88b 665 || gimple_references_memory_p (stmt))
88dbf20f 666 return CONSTANT;
667
bfa30570 668 return VARYING;
4ee9c684 669}
670
bfa30570 671/* Returns true if STMT cannot be constant. */
672
673static bool
75a70cf9 674surely_varying_stmt_p (gimple stmt)
bfa30570 675{
676 /* If the statement has operands that we cannot handle, it cannot be
677 constant. */
75a70cf9 678 if (gimple_has_volatile_ops (stmt))
bfa30570 679 return true;
680
f257af64 681 /* If it is a call and does not return a value or is not a
682 builtin and not an indirect call, it is varying. */
75a70cf9 683 if (is_gimple_call (stmt))
f257af64 684 {
685 tree fndecl;
686 if (!gimple_call_lhs (stmt)
687 || ((fndecl = gimple_call_fndecl (stmt)) != NULL_TREE
5768aeb3 688 && !DECL_BUILT_IN (fndecl)))
f257af64 689 return true;
690 }
bfa30570 691
8edeb88b 692 /* Any other store operation is not interesting. */
dd277d48 693 else if (gimple_vdef (stmt))
8edeb88b 694 return true;
695
bfa30570 696 /* Anything other than assignments and conditional jumps are not
697 interesting for CCP. */
75a70cf9 698 if (gimple_code (stmt) != GIMPLE_ASSIGN
f257af64 699 && gimple_code (stmt) != GIMPLE_COND
700 && gimple_code (stmt) != GIMPLE_SWITCH
701 && gimple_code (stmt) != GIMPLE_CALL)
bfa30570 702 return true;
703
704 return false;
705}
4ee9c684 706
41511585 707/* Initialize local data structures for CCP. */
4ee9c684 708
709static void
41511585 710ccp_initialize (void)
4ee9c684 711{
41511585 712 basic_block bb;
4ee9c684 713
43959b95 714 const_val = XCNEWVEC (prop_value_t, num_ssa_names);
4ee9c684 715
41511585 716 /* Initialize simulation flags for PHI nodes and statements. */
717 FOR_EACH_BB (bb)
4ee9c684 718 {
75a70cf9 719 gimple_stmt_iterator i;
4ee9c684 720
75a70cf9 721 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
41511585 722 {
75a70cf9 723 gimple stmt = gsi_stmt (i);
2193544e 724 bool is_varying;
725
726 /* If the statement is a control insn, then we do not
727 want to avoid simulating the statement once. Failure
728 to do so means that those edges will never get added. */
729 if (stmt_ends_bb_p (stmt))
730 is_varying = false;
731 else
732 is_varying = surely_varying_stmt_p (stmt);
4ee9c684 733
bfa30570 734 if (is_varying)
41511585 735 {
88dbf20f 736 tree def;
737 ssa_op_iter iter;
738
739 /* If the statement will not produce a constant, mark
740 all its outputs VARYING. */
741 FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_ALL_DEFS)
8edeb88b 742 set_value_varying (def);
41511585 743 }
75a70cf9 744 prop_set_simulate_again (stmt, !is_varying);
41511585 745 }
4ee9c684 746 }
747
75a70cf9 748 /* Now process PHI nodes. We never clear the simulate_again flag on
749 phi nodes, since we do not know which edges are executable yet,
750 except for phi nodes for virtual operands when we do not do store ccp. */
41511585 751 FOR_EACH_BB (bb)
4ee9c684 752 {
75a70cf9 753 gimple_stmt_iterator i;
41511585 754
75a70cf9 755 for (i = gsi_start_phis (bb); !gsi_end_p (i); gsi_next (&i))
756 {
757 gimple phi = gsi_stmt (i);
758
61207d43 759 if (!is_gimple_reg (gimple_phi_result (phi)))
75a70cf9 760 prop_set_simulate_again (phi, false);
bfa30570 761 else
75a70cf9 762 prop_set_simulate_again (phi, true);
41511585 763 }
4ee9c684 764 }
41511585 765}
4ee9c684 766
43fb76c1 767/* Debug count support. Reset the values of ssa names
768 VARYING when the total number ssa names analyzed is
769 beyond the debug count specified. */
770
771static void
772do_dbg_cnt (void)
773{
774 unsigned i;
775 for (i = 0; i < num_ssa_names; i++)
776 {
777 if (!dbg_cnt (ccp))
778 {
779 const_val[i].lattice_val = VARYING;
b7e55469 780 const_val[i].mask = double_int_minus_one;
43fb76c1 781 const_val[i].value = NULL_TREE;
782 }
783 }
784}
785
4ee9c684 786
88dbf20f 787/* Do final substitution of propagated values, cleanup the flowgraph and
48e1416a 788 free allocated storage.
4ee9c684 789
33a34f1e 790 Return TRUE when something was optimized. */
791
792static bool
88dbf20f 793ccp_finalize (void)
4ee9c684 794{
43fb76c1 795 bool something_changed;
153c3b50 796 unsigned i;
43fb76c1 797
798 do_dbg_cnt ();
153c3b50 799
800 /* Derive alignment and misalignment information from partially
801 constant pointers in the lattice. */
802 for (i = 1; i < num_ssa_names; ++i)
803 {
804 tree name = ssa_name (i);
805 prop_value_t *val;
153c3b50 806 unsigned int tem, align;
807
808 if (!name
809 || !POINTER_TYPE_P (TREE_TYPE (name)))
810 continue;
811
812 val = get_value (name);
813 if (val->lattice_val != CONSTANT
814 || TREE_CODE (val->value) != INTEGER_CST)
815 continue;
816
817 /* Trailing constant bits specify the alignment, trailing value
818 bits the misalignment. */
819 tem = val->mask.low;
820 align = (tem & -tem);
ceea063b 821 if (align > 1)
822 set_ptr_info_alignment (get_ptr_info (name), align,
823 TREE_INT_CST_LOW (val->value) & (align - 1));
153c3b50 824 }
825
88dbf20f 826 /* Perform substitutions based on the known constant values. */
14f101cf 827 something_changed = substitute_and_fold (get_constant_value,
828 ccp_fold_stmt, true);
4ee9c684 829
88dbf20f 830 free (const_val);
e004838d 831 const_val = NULL;
33a34f1e 832 return something_changed;;
4ee9c684 833}
834
835
88dbf20f 836/* Compute the meet operator between *VAL1 and *VAL2. Store the result
837 in VAL1.
838
839 any M UNDEFINED = any
88dbf20f 840 any M VARYING = VARYING
841 Ci M Cj = Ci if (i == j)
842 Ci M Cj = VARYING if (i != j)
bfa30570 843 */
4ee9c684 844
845static void
88dbf20f 846ccp_lattice_meet (prop_value_t *val1, prop_value_t *val2)
4ee9c684 847{
88dbf20f 848 if (val1->lattice_val == UNDEFINED)
4ee9c684 849 {
88dbf20f 850 /* UNDEFINED M any = any */
851 *val1 = *val2;
41511585 852 }
88dbf20f 853 else if (val2->lattice_val == UNDEFINED)
92481a4d 854 {
88dbf20f 855 /* any M UNDEFINED = any
856 Nothing to do. VAL1 already contains the value we want. */
857 ;
92481a4d 858 }
88dbf20f 859 else if (val1->lattice_val == VARYING
860 || val2->lattice_val == VARYING)
41511585 861 {
88dbf20f 862 /* any M VARYING = VARYING. */
863 val1->lattice_val = VARYING;
b7e55469 864 val1->mask = double_int_minus_one;
88dbf20f 865 val1->value = NULL_TREE;
41511585 866 }
b7e55469 867 else if (val1->lattice_val == CONSTANT
868 && val2->lattice_val == CONSTANT
869 && TREE_CODE (val1->value) == INTEGER_CST
870 && TREE_CODE (val2->value) == INTEGER_CST)
871 {
872 /* Ci M Cj = Ci if (i == j)
873 Ci M Cj = VARYING if (i != j)
874
875 For INTEGER_CSTs mask unequal bits. If no equal bits remain,
876 drop to varying. */
877 val1->mask
878 = double_int_ior (double_int_ior (val1->mask,
879 val2->mask),
880 double_int_xor (tree_to_double_int (val1->value),
881 tree_to_double_int (val2->value)));
882 if (double_int_minus_one_p (val1->mask))
883 {
884 val1->lattice_val = VARYING;
885 val1->value = NULL_TREE;
886 }
887 }
88dbf20f 888 else if (val1->lattice_val == CONSTANT
889 && val2->lattice_val == CONSTANT
61207d43 890 && simple_cst_equal (val1->value, val2->value) == 1)
41511585 891 {
88dbf20f 892 /* Ci M Cj = Ci if (i == j)
893 Ci M Cj = VARYING if (i != j)
894
b7e55469 895 VAL1 already contains the value we want for equivalent values. */
896 }
897 else if (val1->lattice_val == CONSTANT
898 && val2->lattice_val == CONSTANT
899 && (TREE_CODE (val1->value) == ADDR_EXPR
900 || TREE_CODE (val2->value) == ADDR_EXPR))
901 {
902 /* When not equal addresses are involved try meeting for
903 alignment. */
904 prop_value_t tem = *val2;
905 if (TREE_CODE (val1->value) == ADDR_EXPR)
906 *val1 = get_value_for_expr (val1->value, true);
907 if (TREE_CODE (val2->value) == ADDR_EXPR)
908 tem = get_value_for_expr (val2->value, true);
909 ccp_lattice_meet (val1, &tem);
41511585 910 }
911 else
912 {
88dbf20f 913 /* Any other combination is VARYING. */
914 val1->lattice_val = VARYING;
b7e55469 915 val1->mask = double_int_minus_one;
88dbf20f 916 val1->value = NULL_TREE;
41511585 917 }
4ee9c684 918}
919
920
41511585 921/* Loop through the PHI_NODE's parameters for BLOCK and compare their
922 lattice values to determine PHI_NODE's lattice value. The value of a
88dbf20f 923 PHI node is determined calling ccp_lattice_meet with all the arguments
41511585 924 of the PHI node that are incoming via executable edges. */
4ee9c684 925
41511585 926static enum ssa_prop_result
75a70cf9 927ccp_visit_phi_node (gimple phi)
4ee9c684 928{
75a70cf9 929 unsigned i;
88dbf20f 930 prop_value_t *old_val, new_val;
4ee9c684 931
41511585 932 if (dump_file && (dump_flags & TDF_DETAILS))
4ee9c684 933 {
41511585 934 fprintf (dump_file, "\nVisiting PHI node: ");
75a70cf9 935 print_gimple_stmt (dump_file, phi, 0, dump_flags);
4ee9c684 936 }
4ee9c684 937
75a70cf9 938 old_val = get_value (gimple_phi_result (phi));
41511585 939 switch (old_val->lattice_val)
940 {
941 case VARYING:
88dbf20f 942 return SSA_PROP_VARYING;
4ee9c684 943
41511585 944 case CONSTANT:
945 new_val = *old_val;
946 break;
4ee9c684 947
41511585 948 case UNDEFINED:
41511585 949 new_val.lattice_val = UNDEFINED;
88dbf20f 950 new_val.value = NULL_TREE;
41511585 951 break;
4ee9c684 952
41511585 953 default:
8c0963c4 954 gcc_unreachable ();
41511585 955 }
4ee9c684 956
75a70cf9 957 for (i = 0; i < gimple_phi_num_args (phi); i++)
41511585 958 {
88dbf20f 959 /* Compute the meet operator over all the PHI arguments flowing
960 through executable edges. */
75a70cf9 961 edge e = gimple_phi_arg_edge (phi, i);
4ee9c684 962
41511585 963 if (dump_file && (dump_flags & TDF_DETAILS))
964 {
965 fprintf (dump_file,
966 "\n Argument #%d (%d -> %d %sexecutable)\n",
967 i, e->src->index, e->dest->index,
968 (e->flags & EDGE_EXECUTABLE) ? "" : "not ");
969 }
970
971 /* If the incoming edge is executable, Compute the meet operator for
972 the existing value of the PHI node and the current PHI argument. */
973 if (e->flags & EDGE_EXECUTABLE)
974 {
75a70cf9 975 tree arg = gimple_phi_arg (phi, i)->def;
b7e55469 976 prop_value_t arg_val = get_value_for_expr (arg, false);
4ee9c684 977
88dbf20f 978 ccp_lattice_meet (&new_val, &arg_val);
4ee9c684 979
41511585 980 if (dump_file && (dump_flags & TDF_DETAILS))
981 {
982 fprintf (dump_file, "\t");
88dbf20f 983 print_generic_expr (dump_file, arg, dump_flags);
984 dump_lattice_value (dump_file, "\tValue: ", arg_val);
41511585 985 fprintf (dump_file, "\n");
986 }
4ee9c684 987
41511585 988 if (new_val.lattice_val == VARYING)
989 break;
990 }
991 }
4ee9c684 992
993 if (dump_file && (dump_flags & TDF_DETAILS))
41511585 994 {
995 dump_lattice_value (dump_file, "\n PHI node value: ", new_val);
996 fprintf (dump_file, "\n\n");
997 }
998
bfa30570 999 /* Make the transition to the new value. */
75a70cf9 1000 if (set_lattice_value (gimple_phi_result (phi), new_val))
41511585 1001 {
1002 if (new_val.lattice_val == VARYING)
1003 return SSA_PROP_VARYING;
1004 else
1005 return SSA_PROP_INTERESTING;
1006 }
1007 else
1008 return SSA_PROP_NOT_INTERESTING;
4ee9c684 1009}
1010
15d138c9 1011/* Return the constant value for OP or OP otherwise. */
00f4f705 1012
1013static tree
15d138c9 1014valueize_op (tree op)
00f4f705 1015{
00f4f705 1016 if (TREE_CODE (op) == SSA_NAME)
1017 {
15d138c9 1018 tree tem = get_constant_value (op);
1019 if (tem)
1020 return tem;
00f4f705 1021 }
1022 return op;
1023}
1024
41511585 1025/* CCP specific front-end to the non-destructive constant folding
1026 routines.
4ee9c684 1027
1028 Attempt to simplify the RHS of STMT knowing that one or more
1029 operands are constants.
1030
1031 If simplification is possible, return the simplified RHS,
75a70cf9 1032 otherwise return the original RHS or NULL_TREE. */
4ee9c684 1033
1034static tree
75a70cf9 1035ccp_fold (gimple stmt)
4ee9c684 1036{
389dd41b 1037 location_t loc = gimple_location (stmt);
75a70cf9 1038 switch (gimple_code (stmt))
88dbf20f 1039 {
75a70cf9 1040 case GIMPLE_COND:
1041 {
1042 /* Handle comparison operators that can appear in GIMPLE form. */
15d138c9 1043 tree op0 = valueize_op (gimple_cond_lhs (stmt));
1044 tree op1 = valueize_op (gimple_cond_rhs (stmt));
75a70cf9 1045 enum tree_code code = gimple_cond_code (stmt);
389dd41b 1046 return fold_binary_loc (loc, code, boolean_type_node, op0, op1);
75a70cf9 1047 }
4ee9c684 1048
75a70cf9 1049 case GIMPLE_SWITCH:
1050 {
15d138c9 1051 /* Return the constant switch index. */
1052 return valueize_op (gimple_switch_index (stmt));
75a70cf9 1053 }
912f109f 1054
1d0b727d 1055 case GIMPLE_ASSIGN:
1056 case GIMPLE_CALL:
1057 return gimple_fold_stmt_to_constant_1 (stmt, valueize_op);
04236c3a 1058
8782adcf 1059 default:
1d0b727d 1060 gcc_unreachable ();
8782adcf 1061 }
8782adcf 1062}
75a70cf9 1063
b7e55469 1064/* Apply the operation CODE in type TYPE to the value, mask pair
1065 RVAL and RMASK representing a value of type RTYPE and set
1066 the value, mask pair *VAL and *MASK to the result. */
1067
1068static void
1069bit_value_unop_1 (enum tree_code code, tree type,
1070 double_int *val, double_int *mask,
1071 tree rtype, double_int rval, double_int rmask)
1072{
1073 switch (code)
1074 {
1075 case BIT_NOT_EXPR:
1076 *mask = rmask;
1077 *val = double_int_not (rval);
1078 break;
1079
1080 case NEGATE_EXPR:
1081 {
1082 double_int temv, temm;
1083 /* Return ~rval + 1. */
1084 bit_value_unop_1 (BIT_NOT_EXPR, type, &temv, &temm, type, rval, rmask);
1085 bit_value_binop_1 (PLUS_EXPR, type, val, mask,
1086 type, temv, temm,
1087 type, double_int_one, double_int_zero);
1088 break;
1089 }
1090
1091 CASE_CONVERT:
1092 {
1093 bool uns;
1094
1095 /* First extend mask and value according to the original type. */
85d86b55 1096 uns = TYPE_UNSIGNED (rtype);
b7e55469 1097 *mask = double_int_ext (rmask, TYPE_PRECISION (rtype), uns);
1098 *val = double_int_ext (rval, TYPE_PRECISION (rtype), uns);
1099
1100 /* Then extend mask and value according to the target type. */
85d86b55 1101 uns = TYPE_UNSIGNED (type);
b7e55469 1102 *mask = double_int_ext (*mask, TYPE_PRECISION (type), uns);
1103 *val = double_int_ext (*val, TYPE_PRECISION (type), uns);
1104 break;
1105 }
1106
1107 default:
1108 *mask = double_int_minus_one;
1109 break;
1110 }
1111}
1112
1113/* Apply the operation CODE in type TYPE to the value, mask pairs
1114 R1VAL, R1MASK and R2VAL, R2MASK representing a values of type R1TYPE
1115 and R2TYPE and set the value, mask pair *VAL and *MASK to the result. */
1116
1117static void
1118bit_value_binop_1 (enum tree_code code, tree type,
1119 double_int *val, double_int *mask,
1120 tree r1type, double_int r1val, double_int r1mask,
1121 tree r2type, double_int r2val, double_int r2mask)
1122{
85d86b55 1123 bool uns = TYPE_UNSIGNED (type);
b7e55469 1124 /* Assume we'll get a constant result. Use an initial varying value,
1125 we fall back to varying in the end if necessary. */
1126 *mask = double_int_minus_one;
1127 switch (code)
1128 {
1129 case BIT_AND_EXPR:
1130 /* The mask is constant where there is a known not
1131 set bit, (m1 | m2) & ((v1 | m1) & (v2 | m2)) */
1132 *mask = double_int_and (double_int_ior (r1mask, r2mask),
1133 double_int_and (double_int_ior (r1val, r1mask),
1134 double_int_ior (r2val, r2mask)));
1135 *val = double_int_and (r1val, r2val);
1136 break;
1137
1138 case BIT_IOR_EXPR:
1139 /* The mask is constant where there is a known
1140 set bit, (m1 | m2) & ~((v1 & ~m1) | (v2 & ~m2)). */
1141 *mask = double_int_and_not
1142 (double_int_ior (r1mask, r2mask),
1143 double_int_ior (double_int_and_not (r1val, r1mask),
1144 double_int_and_not (r2val, r2mask)));
1145 *val = double_int_ior (r1val, r2val);
1146 break;
1147
1148 case BIT_XOR_EXPR:
1149 /* m1 | m2 */
1150 *mask = double_int_ior (r1mask, r2mask);
1151 *val = double_int_xor (r1val, r2val);
1152 break;
1153
1154 case LROTATE_EXPR:
1155 case RROTATE_EXPR:
1156 if (double_int_zero_p (r2mask))
1157 {
1158 HOST_WIDE_INT shift = r2val.low;
1159 if (code == RROTATE_EXPR)
1160 shift = -shift;
1161 *mask = double_int_lrotate (r1mask, shift, TYPE_PRECISION (type));
1162 *val = double_int_lrotate (r1val, shift, TYPE_PRECISION (type));
1163 }
1164 break;
1165
1166 case LSHIFT_EXPR:
1167 case RSHIFT_EXPR:
1168 /* ??? We can handle partially known shift counts if we know
1169 its sign. That way we can tell that (x << (y | 8)) & 255
1170 is zero. */
1171 if (double_int_zero_p (r2mask))
1172 {
1173 HOST_WIDE_INT shift = r2val.low;
1174 if (code == RSHIFT_EXPR)
1175 shift = -shift;
1176 /* We need to know if we are doing a left or a right shift
1177 to properly shift in zeros for left shift and unsigned
1178 right shifts and the sign bit for signed right shifts.
1179 For signed right shifts we shift in varying in case
1180 the sign bit was varying. */
1181 if (shift > 0)
1182 {
1183 *mask = double_int_lshift (r1mask, shift,
1184 TYPE_PRECISION (type), false);
1185 *val = double_int_lshift (r1val, shift,
1186 TYPE_PRECISION (type), false);
1187 }
1188 else if (shift < 0)
1189 {
1190 shift = -shift;
1191 *mask = double_int_rshift (r1mask, shift,
1192 TYPE_PRECISION (type), !uns);
1193 *val = double_int_rshift (r1val, shift,
1194 TYPE_PRECISION (type), !uns);
1195 }
1196 else
1197 {
1198 *mask = r1mask;
1199 *val = r1val;
1200 }
1201 }
1202 break;
1203
1204 case PLUS_EXPR:
1205 case POINTER_PLUS_EXPR:
1206 {
1207 double_int lo, hi;
1208 /* Do the addition with unknown bits set to zero, to give carry-ins of
1209 zero wherever possible. */
1210 lo = double_int_add (double_int_and_not (r1val, r1mask),
1211 double_int_and_not (r2val, r2mask));
1212 lo = double_int_ext (lo, TYPE_PRECISION (type), uns);
1213 /* Do the addition with unknown bits set to one, to give carry-ins of
1214 one wherever possible. */
1215 hi = double_int_add (double_int_ior (r1val, r1mask),
1216 double_int_ior (r2val, r2mask));
1217 hi = double_int_ext (hi, TYPE_PRECISION (type), uns);
1218 /* Each bit in the result is known if (a) the corresponding bits in
1219 both inputs are known, and (b) the carry-in to that bit position
1220 is known. We can check condition (b) by seeing if we got the same
1221 result with minimised carries as with maximised carries. */
1222 *mask = double_int_ior (double_int_ior (r1mask, r2mask),
1223 double_int_xor (lo, hi));
1224 *mask = double_int_ext (*mask, TYPE_PRECISION (type), uns);
1225 /* It shouldn't matter whether we choose lo or hi here. */
1226 *val = lo;
1227 break;
1228 }
1229
1230 case MINUS_EXPR:
1231 {
1232 double_int temv, temm;
1233 bit_value_unop_1 (NEGATE_EXPR, r2type, &temv, &temm,
1234 r2type, r2val, r2mask);
1235 bit_value_binop_1 (PLUS_EXPR, type, val, mask,
1236 r1type, r1val, r1mask,
1237 r2type, temv, temm);
1238 break;
1239 }
1240
1241 case MULT_EXPR:
1242 {
1243 /* Just track trailing zeros in both operands and transfer
1244 them to the other. */
1245 int r1tz = double_int_ctz (double_int_ior (r1val, r1mask));
1246 int r2tz = double_int_ctz (double_int_ior (r2val, r2mask));
1247 if (r1tz + r2tz >= HOST_BITS_PER_DOUBLE_INT)
1248 {
1249 *mask = double_int_zero;
1250 *val = double_int_zero;
1251 }
1252 else if (r1tz + r2tz > 0)
1253 {
1254 *mask = double_int_not (double_int_mask (r1tz + r2tz));
1255 *mask = double_int_ext (*mask, TYPE_PRECISION (type), uns);
1256 *val = double_int_zero;
1257 }
1258 break;
1259 }
1260
1261 case EQ_EXPR:
1262 case NE_EXPR:
1263 {
1264 double_int m = double_int_ior (r1mask, r2mask);
1265 if (!double_int_equal_p (double_int_and_not (r1val, m),
1266 double_int_and_not (r2val, m)))
1267 {
1268 *mask = double_int_zero;
1269 *val = ((code == EQ_EXPR) ? double_int_zero : double_int_one);
1270 }
1271 else
1272 {
1273 /* We know the result of a comparison is always one or zero. */
1274 *mask = double_int_one;
1275 *val = double_int_zero;
1276 }
1277 break;
1278 }
1279
1280 case GE_EXPR:
1281 case GT_EXPR:
1282 {
1283 double_int tem = r1val;
1284 r1val = r2val;
1285 r2val = tem;
1286 tem = r1mask;
1287 r1mask = r2mask;
1288 r2mask = tem;
1289 code = swap_tree_comparison (code);
1290 }
1291 /* Fallthru. */
1292 case LT_EXPR:
1293 case LE_EXPR:
1294 {
1295 int minmax, maxmin;
1296 /* If the most significant bits are not known we know nothing. */
1297 if (double_int_negative_p (r1mask) || double_int_negative_p (r2mask))
1298 break;
1299
90c0f5b7 1300 /* For comparisons the signedness is in the comparison operands. */
85d86b55 1301 uns = TYPE_UNSIGNED (r1type);
90c0f5b7 1302
b7e55469 1303 /* If we know the most significant bits we know the values
1304 value ranges by means of treating varying bits as zero
1305 or one. Do a cross comparison of the max/min pairs. */
1306 maxmin = double_int_cmp (double_int_ior (r1val, r1mask),
1307 double_int_and_not (r2val, r2mask), uns);
1308 minmax = double_int_cmp (double_int_and_not (r1val, r1mask),
1309 double_int_ior (r2val, r2mask), uns);
1310 if (maxmin < 0) /* r1 is less than r2. */
1311 {
1312 *mask = double_int_zero;
1313 *val = double_int_one;
1314 }
1315 else if (minmax > 0) /* r1 is not less or equal to r2. */
1316 {
1317 *mask = double_int_zero;
1318 *val = double_int_zero;
1319 }
1320 else if (maxmin == minmax) /* r1 and r2 are equal. */
1321 {
1322 /* This probably should never happen as we'd have
1323 folded the thing during fully constant value folding. */
1324 *mask = double_int_zero;
1325 *val = (code == LE_EXPR ? double_int_one : double_int_zero);
1326 }
1327 else
1328 {
1329 /* We know the result of a comparison is always one or zero. */
1330 *mask = double_int_one;
1331 *val = double_int_zero;
1332 }
1333 break;
1334 }
1335
1336 default:;
1337 }
1338}
1339
1340/* Return the propagation value when applying the operation CODE to
1341 the value RHS yielding type TYPE. */
1342
1343static prop_value_t
1344bit_value_unop (enum tree_code code, tree type, tree rhs)
1345{
1346 prop_value_t rval = get_value_for_expr (rhs, true);
1347 double_int value, mask;
1348 prop_value_t val;
c91fedc5 1349
1350 if (rval.lattice_val == UNDEFINED)
1351 return rval;
1352
b7e55469 1353 gcc_assert ((rval.lattice_val == CONSTANT
1354 && TREE_CODE (rval.value) == INTEGER_CST)
1355 || double_int_minus_one_p (rval.mask));
1356 bit_value_unop_1 (code, type, &value, &mask,
1357 TREE_TYPE (rhs), value_to_double_int (rval), rval.mask);
1358 if (!double_int_minus_one_p (mask))
1359 {
1360 val.lattice_val = CONSTANT;
1361 val.mask = mask;
1362 /* ??? Delay building trees here. */
1363 val.value = double_int_to_tree (type, value);
1364 }
1365 else
1366 {
1367 val.lattice_val = VARYING;
1368 val.value = NULL_TREE;
1369 val.mask = double_int_minus_one;
1370 }
1371 return val;
1372}
1373
1374/* Return the propagation value when applying the operation CODE to
1375 the values RHS1 and RHS2 yielding type TYPE. */
1376
1377static prop_value_t
1378bit_value_binop (enum tree_code code, tree type, tree rhs1, tree rhs2)
1379{
1380 prop_value_t r1val = get_value_for_expr (rhs1, true);
1381 prop_value_t r2val = get_value_for_expr (rhs2, true);
1382 double_int value, mask;
1383 prop_value_t val;
c91fedc5 1384
1385 if (r1val.lattice_val == UNDEFINED
1386 || r2val.lattice_val == UNDEFINED)
1387 {
1388 val.lattice_val = VARYING;
1389 val.value = NULL_TREE;
1390 val.mask = double_int_minus_one;
1391 return val;
1392 }
1393
b7e55469 1394 gcc_assert ((r1val.lattice_val == CONSTANT
1395 && TREE_CODE (r1val.value) == INTEGER_CST)
1396 || double_int_minus_one_p (r1val.mask));
1397 gcc_assert ((r2val.lattice_val == CONSTANT
1398 && TREE_CODE (r2val.value) == INTEGER_CST)
1399 || double_int_minus_one_p (r2val.mask));
1400 bit_value_binop_1 (code, type, &value, &mask,
1401 TREE_TYPE (rhs1), value_to_double_int (r1val), r1val.mask,
1402 TREE_TYPE (rhs2), value_to_double_int (r2val), r2val.mask);
1403 if (!double_int_minus_one_p (mask))
1404 {
1405 val.lattice_val = CONSTANT;
1406 val.mask = mask;
1407 /* ??? Delay building trees here. */
1408 val.value = double_int_to_tree (type, value);
1409 }
1410 else
1411 {
1412 val.lattice_val = VARYING;
1413 val.value = NULL_TREE;
1414 val.mask = double_int_minus_one;
1415 }
1416 return val;
1417}
1418
fca0886c 1419/* Return the propagation value when applying __builtin_assume_aligned to
1420 its arguments. */
1421
1422static prop_value_t
1423bit_value_assume_aligned (gimple stmt)
1424{
1425 tree ptr = gimple_call_arg (stmt, 0), align, misalign = NULL_TREE;
1426 tree type = TREE_TYPE (ptr);
1427 unsigned HOST_WIDE_INT aligni, misaligni = 0;
1428 prop_value_t ptrval = get_value_for_expr (ptr, true);
1429 prop_value_t alignval;
1430 double_int value, mask;
1431 prop_value_t val;
1432 if (ptrval.lattice_val == UNDEFINED)
1433 return ptrval;
1434 gcc_assert ((ptrval.lattice_val == CONSTANT
1435 && TREE_CODE (ptrval.value) == INTEGER_CST)
1436 || double_int_minus_one_p (ptrval.mask));
1437 align = gimple_call_arg (stmt, 1);
1438 if (!host_integerp (align, 1))
1439 return ptrval;
1440 aligni = tree_low_cst (align, 1);
1441 if (aligni <= 1
1442 || (aligni & (aligni - 1)) != 0)
1443 return ptrval;
1444 if (gimple_call_num_args (stmt) > 2)
1445 {
1446 misalign = gimple_call_arg (stmt, 2);
1447 if (!host_integerp (misalign, 1))
1448 return ptrval;
1449 misaligni = tree_low_cst (misalign, 1);
1450 if (misaligni >= aligni)
1451 return ptrval;
1452 }
1453 align = build_int_cst_type (type, -aligni);
1454 alignval = get_value_for_expr (align, true);
1455 bit_value_binop_1 (BIT_AND_EXPR, type, &value, &mask,
1456 type, value_to_double_int (ptrval), ptrval.mask,
1457 type, value_to_double_int (alignval), alignval.mask);
1458 if (!double_int_minus_one_p (mask))
1459 {
1460 val.lattice_val = CONSTANT;
1461 val.mask = mask;
1462 gcc_assert ((mask.low & (aligni - 1)) == 0);
1463 gcc_assert ((value.low & (aligni - 1)) == 0);
1464 value.low |= misaligni;
1465 /* ??? Delay building trees here. */
1466 val.value = double_int_to_tree (type, value);
1467 }
1468 else
1469 {
1470 val.lattice_val = VARYING;
1471 val.value = NULL_TREE;
1472 val.mask = double_int_minus_one;
1473 }
1474 return val;
1475}
1476
75a70cf9 1477/* Evaluate statement STMT.
1478 Valid only for assignments, calls, conditionals, and switches. */
4ee9c684 1479
88dbf20f 1480static prop_value_t
75a70cf9 1481evaluate_stmt (gimple stmt)
4ee9c684 1482{
88dbf20f 1483 prop_value_t val;
4f61cce6 1484 tree simplified = NULL_TREE;
88dbf20f 1485 ccp_lattice_t likelyvalue = likely_value (stmt);
b7e55469 1486 bool is_constant = false;
581bf1c2 1487 unsigned int align;
88dbf20f 1488
b7e55469 1489 if (dump_file && (dump_flags & TDF_DETAILS))
1490 {
1491 fprintf (dump_file, "which is likely ");
1492 switch (likelyvalue)
1493 {
1494 case CONSTANT:
1495 fprintf (dump_file, "CONSTANT");
1496 break;
1497 case UNDEFINED:
1498 fprintf (dump_file, "UNDEFINED");
1499 break;
1500 case VARYING:
1501 fprintf (dump_file, "VARYING");
1502 break;
1503 default:;
1504 }
1505 fprintf (dump_file, "\n");
1506 }
add6ee5e 1507
4ee9c684 1508 /* If the statement is likely to have a CONSTANT result, then try
1509 to fold the statement to determine the constant value. */
75a70cf9 1510 /* FIXME. This is the only place that we call ccp_fold.
1511 Since likely_value never returns CONSTANT for calls, we will
1512 not attempt to fold them, including builtins that may profit. */
4ee9c684 1513 if (likelyvalue == CONSTANT)
b7e55469 1514 {
1515 fold_defer_overflow_warnings ();
1516 simplified = ccp_fold (stmt);
1517 is_constant = simplified && is_gimple_min_invariant (simplified);
1518 fold_undefer_overflow_warnings (is_constant, stmt, 0);
1519 if (is_constant)
1520 {
1521 /* The statement produced a constant value. */
1522 val.lattice_val = CONSTANT;
1523 val.value = simplified;
1524 val.mask = double_int_zero;
1525 }
1526 }
4ee9c684 1527 /* If the statement is likely to have a VARYING result, then do not
1528 bother folding the statement. */
04236c3a 1529 else if (likelyvalue == VARYING)
75a70cf9 1530 {
590c3166 1531 enum gimple_code code = gimple_code (stmt);
75a70cf9 1532 if (code == GIMPLE_ASSIGN)
1533 {
1534 enum tree_code subcode = gimple_assign_rhs_code (stmt);
48e1416a 1535
75a70cf9 1536 /* Other cases cannot satisfy is_gimple_min_invariant
1537 without folding. */
1538 if (get_gimple_rhs_class (subcode) == GIMPLE_SINGLE_RHS)
1539 simplified = gimple_assign_rhs1 (stmt);
1540 }
1541 else if (code == GIMPLE_SWITCH)
1542 simplified = gimple_switch_index (stmt);
1543 else
a65c4d64 1544 /* These cannot satisfy is_gimple_min_invariant without folding. */
1545 gcc_assert (code == GIMPLE_CALL || code == GIMPLE_COND);
b7e55469 1546 is_constant = simplified && is_gimple_min_invariant (simplified);
1547 if (is_constant)
1548 {
1549 /* The statement produced a constant value. */
1550 val.lattice_val = CONSTANT;
1551 val.value = simplified;
1552 val.mask = double_int_zero;
1553 }
75a70cf9 1554 }
4ee9c684 1555
b7e55469 1556 /* Resort to simplification for bitwise tracking. */
1557 if (flag_tree_bit_ccp
939514e9 1558 && (likelyvalue == CONSTANT || is_gimple_call (stmt))
b7e55469 1559 && !is_constant)
912f109f 1560 {
b7e55469 1561 enum gimple_code code = gimple_code (stmt);
153c3b50 1562 tree fndecl;
b7e55469 1563 val.lattice_val = VARYING;
1564 val.value = NULL_TREE;
1565 val.mask = double_int_minus_one;
1566 if (code == GIMPLE_ASSIGN)
912f109f 1567 {
b7e55469 1568 enum tree_code subcode = gimple_assign_rhs_code (stmt);
1569 tree rhs1 = gimple_assign_rhs1 (stmt);
1570 switch (get_gimple_rhs_class (subcode))
1571 {
1572 case GIMPLE_SINGLE_RHS:
1573 if (INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
1574 || POINTER_TYPE_P (TREE_TYPE (rhs1)))
1575 val = get_value_for_expr (rhs1, true);
1576 break;
1577
1578 case GIMPLE_UNARY_RHS:
1579 if ((INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
1580 || POINTER_TYPE_P (TREE_TYPE (rhs1)))
1581 && (INTEGRAL_TYPE_P (gimple_expr_type (stmt))
1582 || POINTER_TYPE_P (gimple_expr_type (stmt))))
1583 val = bit_value_unop (subcode, gimple_expr_type (stmt), rhs1);
1584 break;
1585
1586 case GIMPLE_BINARY_RHS:
1587 if (INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
1588 || POINTER_TYPE_P (TREE_TYPE (rhs1)))
1589 {
e47d81e0 1590 tree lhs = gimple_assign_lhs (stmt);
b7e55469 1591 tree rhs2 = gimple_assign_rhs2 (stmt);
1592 val = bit_value_binop (subcode,
e47d81e0 1593 TREE_TYPE (lhs), rhs1, rhs2);
b7e55469 1594 }
1595 break;
1596
1597 default:;
1598 }
912f109f 1599 }
b7e55469 1600 else if (code == GIMPLE_COND)
1601 {
1602 enum tree_code code = gimple_cond_code (stmt);
1603 tree rhs1 = gimple_cond_lhs (stmt);
1604 tree rhs2 = gimple_cond_rhs (stmt);
1605 if (INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
1606 || POINTER_TYPE_P (TREE_TYPE (rhs1)))
1607 val = bit_value_binop (code, TREE_TYPE (rhs1), rhs1, rhs2);
1608 }
153c3b50 1609 else if (code == GIMPLE_CALL
1610 && (fndecl = gimple_call_fndecl (stmt))
1611 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
1612 {
1613 switch (DECL_FUNCTION_CODE (fndecl))
1614 {
1615 case BUILT_IN_MALLOC:
1616 case BUILT_IN_REALLOC:
1617 case BUILT_IN_CALLOC:
939514e9 1618 case BUILT_IN_STRDUP:
1619 case BUILT_IN_STRNDUP:
153c3b50 1620 val.lattice_val = CONSTANT;
1621 val.value = build_int_cst (TREE_TYPE (gimple_get_lhs (stmt)), 0);
1622 val.mask = shwi_to_double_int
1623 (~(((HOST_WIDE_INT) MALLOC_ABI_ALIGNMENT)
1624 / BITS_PER_UNIT - 1));
1625 break;
1626
1627 case BUILT_IN_ALLOCA:
581bf1c2 1628 case BUILT_IN_ALLOCA_WITH_ALIGN:
1629 align = (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_ALLOCA_WITH_ALIGN
1630 ? TREE_INT_CST_LOW (gimple_call_arg (stmt, 1))
1631 : BIGGEST_ALIGNMENT);
153c3b50 1632 val.lattice_val = CONSTANT;
1633 val.value = build_int_cst (TREE_TYPE (gimple_get_lhs (stmt)), 0);
1634 val.mask = shwi_to_double_int
581bf1c2 1635 (~(((HOST_WIDE_INT) align)
153c3b50 1636 / BITS_PER_UNIT - 1));
1637 break;
1638
939514e9 1639 /* These builtins return their first argument, unmodified. */
1640 case BUILT_IN_MEMCPY:
1641 case BUILT_IN_MEMMOVE:
1642 case BUILT_IN_MEMSET:
1643 case BUILT_IN_STRCPY:
1644 case BUILT_IN_STRNCPY:
1645 case BUILT_IN_MEMCPY_CHK:
1646 case BUILT_IN_MEMMOVE_CHK:
1647 case BUILT_IN_MEMSET_CHK:
1648 case BUILT_IN_STRCPY_CHK:
1649 case BUILT_IN_STRNCPY_CHK:
1650 val = get_value_for_expr (gimple_call_arg (stmt, 0), true);
1651 break;
1652
fca0886c 1653 case BUILT_IN_ASSUME_ALIGNED:
1654 val = bit_value_assume_aligned (stmt);
1655 break;
1656
153c3b50 1657 default:;
1658 }
1659 }
b7e55469 1660 is_constant = (val.lattice_val == CONSTANT);
912f109f 1661 }
1662
b7e55469 1663 if (!is_constant)
4ee9c684 1664 {
1665 /* The statement produced a nonconstant value. If the statement
88dbf20f 1666 had UNDEFINED operands, then the result of the statement
1667 should be UNDEFINED. Otherwise, the statement is VARYING. */
bfa30570 1668 if (likelyvalue == UNDEFINED)
b7e55469 1669 {
1670 val.lattice_val = likelyvalue;
1671 val.mask = double_int_zero;
1672 }
b765fa12 1673 else
b7e55469 1674 {
1675 val.lattice_val = VARYING;
1676 val.mask = double_int_minus_one;
1677 }
b765fa12 1678
88dbf20f 1679 val.value = NULL_TREE;
4ee9c684 1680 }
41511585 1681
1682 return val;
4ee9c684 1683}
1684
582a80ed 1685/* Given a BUILT_IN_STACK_SAVE value SAVED_VAL, insert a clobber of VAR before
1686 each matching BUILT_IN_STACK_RESTORE. Mark visited phis in VISITED. */
1687
1688static void
1689insert_clobber_before_stack_restore (tree saved_val, tree var, htab_t *visited)
1690{
1691 gimple stmt, clobber_stmt;
1692 tree clobber;
1693 imm_use_iterator iter;
1694 gimple_stmt_iterator i;
1695 gimple *slot;
1696
1697 FOR_EACH_IMM_USE_STMT (stmt, iter, saved_val)
1698 if (gimple_call_builtin_p (stmt, BUILT_IN_STACK_RESTORE))
1699 {
1700 clobber = build_constructor (TREE_TYPE (var), NULL);
1701 TREE_THIS_VOLATILE (clobber) = 1;
1702 clobber_stmt = gimple_build_assign (var, clobber);
1703
1704 i = gsi_for_stmt (stmt);
1705 gsi_insert_before (&i, clobber_stmt, GSI_SAME_STMT);
1706 }
1707 else if (gimple_code (stmt) == GIMPLE_PHI)
1708 {
1709 if (*visited == NULL)
1710 *visited = htab_create (10, htab_hash_pointer, htab_eq_pointer, NULL);
1711
1712 slot = (gimple *)htab_find_slot (*visited, stmt, INSERT);
1713 if (*slot != NULL)
1714 continue;
1715
1716 *slot = stmt;
1717 insert_clobber_before_stack_restore (gimple_phi_result (stmt), var,
1718 visited);
1719 }
1720 else
1721 gcc_assert (is_gimple_debug (stmt));
1722}
1723
1724/* Advance the iterator to the previous non-debug gimple statement in the same
1725 or dominating basic block. */
1726
1727static inline void
1728gsi_prev_dom_bb_nondebug (gimple_stmt_iterator *i)
1729{
1730 basic_block dom;
1731
1732 gsi_prev_nondebug (i);
1733 while (gsi_end_p (*i))
1734 {
1735 dom = get_immediate_dominator (CDI_DOMINATORS, i->bb);
1736 if (dom == NULL || dom == ENTRY_BLOCK_PTR)
1737 return;
1738
1739 *i = gsi_last_bb (dom);
1740 }
1741}
1742
1743/* Find a BUILT_IN_STACK_SAVE dominating gsi_stmt (I), and insert
1543f720 1744 a clobber of VAR before each matching BUILT_IN_STACK_RESTORE.
1745
1746 It is possible that BUILT_IN_STACK_SAVE cannot be find in a dominator when a
1747 previous pass (such as DOM) duplicated it along multiple paths to a BB. In
1748 that case the function gives up without inserting the clobbers. */
582a80ed 1749
1750static void
1751insert_clobbers_for_var (gimple_stmt_iterator i, tree var)
1752{
582a80ed 1753 gimple stmt;
1754 tree saved_val;
1755 htab_t visited = NULL;
1756
1543f720 1757 for (; !gsi_end_p (i); gsi_prev_dom_bb_nondebug (&i))
582a80ed 1758 {
1759 stmt = gsi_stmt (i);
1760
1761 if (!gimple_call_builtin_p (stmt, BUILT_IN_STACK_SAVE))
1762 continue;
582a80ed 1763
1764 saved_val = gimple_call_lhs (stmt);
1765 if (saved_val == NULL_TREE)
1766 continue;
1767
1768 insert_clobber_before_stack_restore (saved_val, var, &visited);
1769 break;
1770 }
1771
1772 if (visited != NULL)
1773 htab_delete (visited);
582a80ed 1774}
1775
581bf1c2 1776/* Detects a __builtin_alloca_with_align with constant size argument. Declares
1777 fixed-size array and returns the address, if found, otherwise returns
1778 NULL_TREE. */
9a65cc0a 1779
1780static tree
581bf1c2 1781fold_builtin_alloca_with_align (gimple stmt)
9a65cc0a 1782{
1783 unsigned HOST_WIDE_INT size, threshold, n_elem;
1784 tree lhs, arg, block, var, elem_type, array_type;
9a65cc0a 1785
1786 /* Get lhs. */
1787 lhs = gimple_call_lhs (stmt);
1788 if (lhs == NULL_TREE)
1789 return NULL_TREE;
1790
1791 /* Detect constant argument. */
1792 arg = get_constant_value (gimple_call_arg (stmt, 0));
6e93d308 1793 if (arg == NULL_TREE
1794 || TREE_CODE (arg) != INTEGER_CST
9a65cc0a 1795 || !host_integerp (arg, 1))
1796 return NULL_TREE;
6e93d308 1797
9a65cc0a 1798 size = TREE_INT_CST_LOW (arg);
1799
581bf1c2 1800 /* Heuristic: don't fold large allocas. */
9a65cc0a 1801 threshold = (unsigned HOST_WIDE_INT)PARAM_VALUE (PARAM_LARGE_STACK_FRAME);
581bf1c2 1802 /* In case the alloca is located at function entry, it has the same lifetime
1803 as a declared array, so we allow a larger size. */
9a65cc0a 1804 block = gimple_block (stmt);
1805 if (!(cfun->after_inlining
1806 && TREE_CODE (BLOCK_SUPERCONTEXT (block)) == FUNCTION_DECL))
1807 threshold /= 10;
1808 if (size > threshold)
1809 return NULL_TREE;
1810
1811 /* Declare array. */
1812 elem_type = build_nonstandard_integer_type (BITS_PER_UNIT, 1);
1813 n_elem = size * 8 / BITS_PER_UNIT;
9a65cc0a 1814 array_type = build_array_type_nelts (elem_type, n_elem);
1815 var = create_tmp_var (array_type, NULL);
581bf1c2 1816 DECL_ALIGN (var) = TREE_INT_CST_LOW (gimple_call_arg (stmt, 1));
3d4a0a4b 1817 {
1818 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (lhs);
1819 if (pi != NULL && !pi->pt.anything)
1820 {
1821 bool singleton_p;
1822 unsigned uid;
1823 singleton_p = pt_solution_singleton_p (&pi->pt, &uid);
1824 gcc_assert (singleton_p);
1825 SET_DECL_PT_UID (var, uid);
1826 }
1827 }
9a65cc0a 1828
1829 /* Fold alloca to the address of the array. */
1830 return fold_convert (TREE_TYPE (lhs), build_fold_addr_expr (var));
1831}
1832
6688f8ec 1833/* Fold the stmt at *GSI with CCP specific information that propagating
1834 and regular folding does not catch. */
1835
1836static bool
1837ccp_fold_stmt (gimple_stmt_iterator *gsi)
1838{
1839 gimple stmt = gsi_stmt (*gsi);
6688f8ec 1840
94144e68 1841 switch (gimple_code (stmt))
1842 {
1843 case GIMPLE_COND:
1844 {
1845 prop_value_t val;
1846 /* Statement evaluation will handle type mismatches in constants
1847 more gracefully than the final propagation. This allows us to
1848 fold more conditionals here. */
1849 val = evaluate_stmt (stmt);
1850 if (val.lattice_val != CONSTANT
b7e55469 1851 || !double_int_zero_p (val.mask))
94144e68 1852 return false;
1853
b7e55469 1854 if (dump_file)
1855 {
1856 fprintf (dump_file, "Folding predicate ");
1857 print_gimple_expr (dump_file, stmt, 0, 0);
1858 fprintf (dump_file, " to ");
1859 print_generic_expr (dump_file, val.value, 0);
1860 fprintf (dump_file, "\n");
1861 }
1862
94144e68 1863 if (integer_zerop (val.value))
1864 gimple_cond_make_false (stmt);
1865 else
1866 gimple_cond_make_true (stmt);
6688f8ec 1867
94144e68 1868 return true;
1869 }
6688f8ec 1870
94144e68 1871 case GIMPLE_CALL:
1872 {
1873 tree lhs = gimple_call_lhs (stmt);
3064bb7b 1874 int flags = gimple_call_flags (stmt);
15d138c9 1875 tree val;
94144e68 1876 tree argt;
1877 bool changed = false;
1878 unsigned i;
1879
1880 /* If the call was folded into a constant make sure it goes
1881 away even if we cannot propagate into all uses because of
1882 type issues. */
1883 if (lhs
1884 && TREE_CODE (lhs) == SSA_NAME
3064bb7b 1885 && (val = get_constant_value (lhs))
1886 /* Don't optimize away calls that have side-effects. */
1887 && (flags & (ECF_CONST|ECF_PURE)) != 0
1888 && (flags & ECF_LOOPING_CONST_OR_PURE) == 0)
94144e68 1889 {
15d138c9 1890 tree new_rhs = unshare_expr (val);
338cce8f 1891 bool res;
94144e68 1892 if (!useless_type_conversion_p (TREE_TYPE (lhs),
1893 TREE_TYPE (new_rhs)))
1894 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
338cce8f 1895 res = update_call_from_tree (gsi, new_rhs);
1896 gcc_assert (res);
94144e68 1897 return true;
1898 }
1899
fb049fba 1900 /* Internal calls provide no argument types, so the extra laxity
1901 for normal calls does not apply. */
1902 if (gimple_call_internal_p (stmt))
1903 return false;
1904
581bf1c2 1905 /* The heuristic of fold_builtin_alloca_with_align differs before and
1906 after inlining, so we don't require the arg to be changed into a
1907 constant for folding, but just to be constant. */
1908 if (gimple_call_builtin_p (stmt, BUILT_IN_ALLOCA_WITH_ALIGN))
9a65cc0a 1909 {
581bf1c2 1910 tree new_rhs = fold_builtin_alloca_with_align (stmt);
6e93d308 1911 if (new_rhs)
1912 {
1913 bool res = update_call_from_tree (gsi, new_rhs);
582a80ed 1914 tree var = TREE_OPERAND (TREE_OPERAND (new_rhs, 0),0);
6e93d308 1915 gcc_assert (res);
582a80ed 1916 insert_clobbers_for_var (*gsi, var);
6e93d308 1917 return true;
1918 }
9a65cc0a 1919 }
1920
94144e68 1921 /* Propagate into the call arguments. Compared to replace_uses_in
1922 this can use the argument slot types for type verification
1923 instead of the current argument type. We also can safely
1924 drop qualifiers here as we are dealing with constants anyway. */
2de00a2d 1925 argt = TYPE_ARG_TYPES (gimple_call_fntype (stmt));
94144e68 1926 for (i = 0; i < gimple_call_num_args (stmt) && argt;
1927 ++i, argt = TREE_CHAIN (argt))
1928 {
1929 tree arg = gimple_call_arg (stmt, i);
1930 if (TREE_CODE (arg) == SSA_NAME
15d138c9 1931 && (val = get_constant_value (arg))
94144e68 1932 && useless_type_conversion_p
1933 (TYPE_MAIN_VARIANT (TREE_VALUE (argt)),
15d138c9 1934 TYPE_MAIN_VARIANT (TREE_TYPE (val))))
94144e68 1935 {
15d138c9 1936 gimple_call_set_arg (stmt, i, unshare_expr (val));
94144e68 1937 changed = true;
1938 }
1939 }
e16f4c39 1940
94144e68 1941 return changed;
1942 }
6688f8ec 1943
6872bf3c 1944 case GIMPLE_ASSIGN:
1945 {
1946 tree lhs = gimple_assign_lhs (stmt);
15d138c9 1947 tree val;
6872bf3c 1948
1949 /* If we have a load that turned out to be constant replace it
1950 as we cannot propagate into all uses in all cases. */
1951 if (gimple_assign_single_p (stmt)
1952 && TREE_CODE (lhs) == SSA_NAME
15d138c9 1953 && (val = get_constant_value (lhs)))
6872bf3c 1954 {
15d138c9 1955 tree rhs = unshare_expr (val);
6872bf3c 1956 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs)))
182cf5a9 1957 rhs = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (lhs), rhs);
6872bf3c 1958 gimple_assign_set_rhs_from_tree (gsi, rhs);
1959 return true;
1960 }
1961
1962 return false;
1963 }
1964
94144e68 1965 default:
1966 return false;
1967 }
6688f8ec 1968}
1969
41511585 1970/* Visit the assignment statement STMT. Set the value of its LHS to the
88dbf20f 1971 value computed by the RHS and store LHS in *OUTPUT_P. If STMT
1972 creates virtual definitions, set the value of each new name to that
75a70cf9 1973 of the RHS (if we can derive a constant out of the RHS).
1974 Value-returning call statements also perform an assignment, and
1975 are handled here. */
4ee9c684 1976
41511585 1977static enum ssa_prop_result
75a70cf9 1978visit_assignment (gimple stmt, tree *output_p)
4ee9c684 1979{
88dbf20f 1980 prop_value_t val;
88dbf20f 1981 enum ssa_prop_result retval;
4ee9c684 1982
75a70cf9 1983 tree lhs = gimple_get_lhs (stmt);
4ee9c684 1984
75a70cf9 1985 gcc_assert (gimple_code (stmt) != GIMPLE_CALL
1986 || gimple_call_lhs (stmt) != NULL_TREE);
1987
15d138c9 1988 if (gimple_assign_single_p (stmt)
1989 && gimple_assign_rhs_code (stmt) == SSA_NAME)
1990 /* For a simple copy operation, we copy the lattice values. */
1991 val = *get_value (gimple_assign_rhs1 (stmt));
41511585 1992 else
75a70cf9 1993 /* Evaluate the statement, which could be
1994 either a GIMPLE_ASSIGN or a GIMPLE_CALL. */
04236c3a 1995 val = evaluate_stmt (stmt);
4ee9c684 1996
88dbf20f 1997 retval = SSA_PROP_NOT_INTERESTING;
4ee9c684 1998
41511585 1999 /* Set the lattice value of the statement's output. */
88dbf20f 2000 if (TREE_CODE (lhs) == SSA_NAME)
4ee9c684 2001 {
88dbf20f 2002 /* If STMT is an assignment to an SSA_NAME, we only have one
2003 value to set. */
2004 if (set_lattice_value (lhs, val))
2005 {
2006 *output_p = lhs;
2007 if (val.lattice_val == VARYING)
2008 retval = SSA_PROP_VARYING;
2009 else
2010 retval = SSA_PROP_INTERESTING;
2011 }
4ee9c684 2012 }
88dbf20f 2013
2014 return retval;
4ee9c684 2015}
2016
4ee9c684 2017
41511585 2018/* Visit the conditional statement STMT. Return SSA_PROP_INTERESTING
2019 if it can determine which edge will be taken. Otherwise, return
2020 SSA_PROP_VARYING. */
2021
2022static enum ssa_prop_result
75a70cf9 2023visit_cond_stmt (gimple stmt, edge *taken_edge_p)
4ee9c684 2024{
88dbf20f 2025 prop_value_t val;
41511585 2026 basic_block block;
2027
75a70cf9 2028 block = gimple_bb (stmt);
41511585 2029 val = evaluate_stmt (stmt);
b7e55469 2030 if (val.lattice_val != CONSTANT
2031 || !double_int_zero_p (val.mask))
2032 return SSA_PROP_VARYING;
41511585 2033
2034 /* Find which edge out of the conditional block will be taken and add it
2035 to the worklist. If no single edge can be determined statically,
2036 return SSA_PROP_VARYING to feed all the outgoing edges to the
2037 propagation engine. */
b7e55469 2038 *taken_edge_p = find_taken_edge (block, val.value);
41511585 2039 if (*taken_edge_p)
2040 return SSA_PROP_INTERESTING;
2041 else
2042 return SSA_PROP_VARYING;
4ee9c684 2043}
2044
4ee9c684 2045
41511585 2046/* Evaluate statement STMT. If the statement produces an output value and
2047 its evaluation changes the lattice value of its output, return
2048 SSA_PROP_INTERESTING and set *OUTPUT_P to the SSA_NAME holding the
2049 output value.
48e1416a 2050
41511585 2051 If STMT is a conditional branch and we can determine its truth
2052 value, set *TAKEN_EDGE_P accordingly. If STMT produces a varying
2053 value, return SSA_PROP_VARYING. */
4ee9c684 2054
41511585 2055static enum ssa_prop_result
75a70cf9 2056ccp_visit_stmt (gimple stmt, edge *taken_edge_p, tree *output_p)
41511585 2057{
41511585 2058 tree def;
2059 ssa_op_iter iter;
4ee9c684 2060
41511585 2061 if (dump_file && (dump_flags & TDF_DETAILS))
4ee9c684 2062 {
88dbf20f 2063 fprintf (dump_file, "\nVisiting statement:\n");
75a70cf9 2064 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
4ee9c684 2065 }
4ee9c684 2066
75a70cf9 2067 switch (gimple_code (stmt))
4ee9c684 2068 {
75a70cf9 2069 case GIMPLE_ASSIGN:
2070 /* If the statement is an assignment that produces a single
2071 output value, evaluate its RHS to see if the lattice value of
2072 its output has changed. */
2073 return visit_assignment (stmt, output_p);
2074
2075 case GIMPLE_CALL:
2076 /* A value-returning call also performs an assignment. */
2077 if (gimple_call_lhs (stmt) != NULL_TREE)
2078 return visit_assignment (stmt, output_p);
2079 break;
2080
2081 case GIMPLE_COND:
2082 case GIMPLE_SWITCH:
2083 /* If STMT is a conditional branch, see if we can determine
2084 which branch will be taken. */
2085 /* FIXME. It appears that we should be able to optimize
2086 computed GOTOs here as well. */
2087 return visit_cond_stmt (stmt, taken_edge_p);
2088
2089 default:
2090 break;
4ee9c684 2091 }
4ee9c684 2092
41511585 2093 /* Any other kind of statement is not interesting for constant
2094 propagation and, therefore, not worth simulating. */
41511585 2095 if (dump_file && (dump_flags & TDF_DETAILS))
2096 fprintf (dump_file, "No interesting values produced. Marked VARYING.\n");
4ee9c684 2097
41511585 2098 /* Definitions made by statements other than assignments to
2099 SSA_NAMEs represent unknown modifications to their outputs.
2100 Mark them VARYING. */
88dbf20f 2101 FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_ALL_DEFS)
2102 {
b7e55469 2103 prop_value_t v = { VARYING, NULL_TREE, { -1, (HOST_WIDE_INT) -1 } };
88dbf20f 2104 set_lattice_value (def, v);
2105 }
4ee9c684 2106
41511585 2107 return SSA_PROP_VARYING;
2108}
4ee9c684 2109
4ee9c684 2110
88dbf20f 2111/* Main entry point for SSA Conditional Constant Propagation. */
41511585 2112
33a34f1e 2113static unsigned int
61207d43 2114do_ssa_ccp (void)
41511585 2115{
582a80ed 2116 unsigned int todo = 0;
2117 calculate_dominance_info (CDI_DOMINATORS);
41511585 2118 ccp_initialize ();
2119 ssa_propagate (ccp_visit_stmt, ccp_visit_phi_node);
33a34f1e 2120 if (ccp_finalize ())
582a80ed 2121 todo = (TODO_cleanup_cfg | TODO_update_ssa | TODO_remove_unused_locals);
2122 free_dominance_info (CDI_DOMINATORS);
2123 return todo;
4ee9c684 2124}
2125
5664499b 2126
2127static bool
41511585 2128gate_ccp (void)
5664499b 2129{
41511585 2130 return flag_tree_ccp != 0;
5664499b 2131}
2132
4ee9c684 2133
48e1416a 2134struct gimple_opt_pass pass_ccp =
41511585 2135{
20099e35 2136 {
2137 GIMPLE_PASS,
41511585 2138 "ccp", /* name */
2139 gate_ccp, /* gate */
88dbf20f 2140 do_ssa_ccp, /* execute */
41511585 2141 NULL, /* sub */
2142 NULL, /* next */
2143 0, /* static_pass_number */
2144 TV_TREE_CCP, /* tv_id */
49290934 2145 PROP_cfg | PROP_ssa, /* properties_required */
41511585 2146 0, /* properties_provided */
b6246c40 2147 0, /* properties_destroyed */
41511585 2148 0, /* todo_flags_start */
771e2890 2149 TODO_verify_ssa
20099e35 2150 | TODO_verify_stmts | TODO_ggc_collect/* todo_flags_finish */
2151 }
41511585 2152};
4ee9c684 2153
4ee9c684 2154
75a70cf9 2155
bdd0e199 2156/* Try to optimize out __builtin_stack_restore. Optimize it out
2157 if there is another __builtin_stack_restore in the same basic
2158 block and no calls or ASM_EXPRs are in between, or if this block's
2159 only outgoing edge is to EXIT_BLOCK and there are no calls or
2160 ASM_EXPRs after this __builtin_stack_restore. */
2161
2162static tree
75a70cf9 2163optimize_stack_restore (gimple_stmt_iterator i)
bdd0e199 2164{
6ea999da 2165 tree callee;
2166 gimple stmt;
75a70cf9 2167
2168 basic_block bb = gsi_bb (i);
2169 gimple call = gsi_stmt (i);
bdd0e199 2170
75a70cf9 2171 if (gimple_code (call) != GIMPLE_CALL
2172 || gimple_call_num_args (call) != 1
2173 || TREE_CODE (gimple_call_arg (call, 0)) != SSA_NAME
2174 || !POINTER_TYPE_P (TREE_TYPE (gimple_call_arg (call, 0))))
bdd0e199 2175 return NULL_TREE;
2176
75a70cf9 2177 for (gsi_next (&i); !gsi_end_p (i); gsi_next (&i))
bdd0e199 2178 {
75a70cf9 2179 stmt = gsi_stmt (i);
2180 if (gimple_code (stmt) == GIMPLE_ASM)
bdd0e199 2181 return NULL_TREE;
75a70cf9 2182 if (gimple_code (stmt) != GIMPLE_CALL)
bdd0e199 2183 continue;
2184
75a70cf9 2185 callee = gimple_call_fndecl (stmt);
c40a6f90 2186 if (!callee
2187 || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL
2188 /* All regular builtins are ok, just obviously not alloca. */
581bf1c2 2189 || DECL_FUNCTION_CODE (callee) == BUILT_IN_ALLOCA
2190 || DECL_FUNCTION_CODE (callee) == BUILT_IN_ALLOCA_WITH_ALIGN)
bdd0e199 2191 return NULL_TREE;
2192
2193 if (DECL_FUNCTION_CODE (callee) == BUILT_IN_STACK_RESTORE)
6ea999da 2194 goto second_stack_restore;
bdd0e199 2195 }
2196
6ea999da 2197 if (!gsi_end_p (i))
bdd0e199 2198 return NULL_TREE;
2199
6ea999da 2200 /* Allow one successor of the exit block, or zero successors. */
2201 switch (EDGE_COUNT (bb->succs))
2202 {
2203 case 0:
2204 break;
2205 case 1:
2206 if (single_succ_edge (bb)->dest != EXIT_BLOCK_PTR)
2207 return NULL_TREE;
2208 break;
2209 default:
2210 return NULL_TREE;
2211 }
2212 second_stack_restore:
bdd0e199 2213
6ea999da 2214 /* If there's exactly one use, then zap the call to __builtin_stack_save.
2215 If there are multiple uses, then the last one should remove the call.
2216 In any case, whether the call to __builtin_stack_save can be removed
2217 or not is irrelevant to removing the call to __builtin_stack_restore. */
2218 if (has_single_use (gimple_call_arg (call, 0)))
2219 {
2220 gimple stack_save = SSA_NAME_DEF_STMT (gimple_call_arg (call, 0));
2221 if (is_gimple_call (stack_save))
2222 {
2223 callee = gimple_call_fndecl (stack_save);
2224 if (callee
2225 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL
2226 && DECL_FUNCTION_CODE (callee) == BUILT_IN_STACK_SAVE)
2227 {
2228 gimple_stmt_iterator stack_save_gsi;
2229 tree rhs;
bdd0e199 2230
6ea999da 2231 stack_save_gsi = gsi_for_stmt (stack_save);
2232 rhs = build_int_cst (TREE_TYPE (gimple_call_arg (call, 0)), 0);
2233 update_call_from_tree (&stack_save_gsi, rhs);
2234 }
2235 }
2236 }
bdd0e199 2237
75a70cf9 2238 /* No effect, so the statement will be deleted. */
bdd0e199 2239 return integer_zero_node;
2240}
75a70cf9 2241
8a58ed0a 2242/* If va_list type is a simple pointer and nothing special is needed,
2243 optimize __builtin_va_start (&ap, 0) into ap = __builtin_next_arg (0),
2244 __builtin_va_end (&ap) out as NOP and __builtin_va_copy into a simple
2245 pointer assignment. */
2246
2247static tree
75a70cf9 2248optimize_stdarg_builtin (gimple call)
8a58ed0a 2249{
5f57a8b1 2250 tree callee, lhs, rhs, cfun_va_list;
8a58ed0a 2251 bool va_list_simple_ptr;
389dd41b 2252 location_t loc = gimple_location (call);
8a58ed0a 2253
75a70cf9 2254 if (gimple_code (call) != GIMPLE_CALL)
8a58ed0a 2255 return NULL_TREE;
2256
75a70cf9 2257 callee = gimple_call_fndecl (call);
5f57a8b1 2258
2259 cfun_va_list = targetm.fn_abi_va_list (callee);
2260 va_list_simple_ptr = POINTER_TYPE_P (cfun_va_list)
2261 && (TREE_TYPE (cfun_va_list) == void_type_node
2262 || TREE_TYPE (cfun_va_list) == char_type_node);
2263
8a58ed0a 2264 switch (DECL_FUNCTION_CODE (callee))
2265 {
2266 case BUILT_IN_VA_START:
2267 if (!va_list_simple_ptr
2268 || targetm.expand_builtin_va_start != NULL
e7ed5dd7 2269 || !builtin_decl_explicit_p (BUILT_IN_NEXT_ARG))
8a58ed0a 2270 return NULL_TREE;
2271
75a70cf9 2272 if (gimple_call_num_args (call) != 2)
8a58ed0a 2273 return NULL_TREE;
2274
75a70cf9 2275 lhs = gimple_call_arg (call, 0);
8a58ed0a 2276 if (!POINTER_TYPE_P (TREE_TYPE (lhs))
2277 || TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (lhs)))
5f57a8b1 2278 != TYPE_MAIN_VARIANT (cfun_va_list))
8a58ed0a 2279 return NULL_TREE;
48e1416a 2280
389dd41b 2281 lhs = build_fold_indirect_ref_loc (loc, lhs);
b9a16870 2282 rhs = build_call_expr_loc (loc, builtin_decl_explicit (BUILT_IN_NEXT_ARG),
75a70cf9 2283 1, integer_zero_node);
389dd41b 2284 rhs = fold_convert_loc (loc, TREE_TYPE (lhs), rhs);
8a58ed0a 2285 return build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, rhs);
2286
2287 case BUILT_IN_VA_COPY:
2288 if (!va_list_simple_ptr)
2289 return NULL_TREE;
2290
75a70cf9 2291 if (gimple_call_num_args (call) != 2)
8a58ed0a 2292 return NULL_TREE;
2293
75a70cf9 2294 lhs = gimple_call_arg (call, 0);
8a58ed0a 2295 if (!POINTER_TYPE_P (TREE_TYPE (lhs))
2296 || TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (lhs)))
5f57a8b1 2297 != TYPE_MAIN_VARIANT (cfun_va_list))
8a58ed0a 2298 return NULL_TREE;
2299
389dd41b 2300 lhs = build_fold_indirect_ref_loc (loc, lhs);
75a70cf9 2301 rhs = gimple_call_arg (call, 1);
8a58ed0a 2302 if (TYPE_MAIN_VARIANT (TREE_TYPE (rhs))
5f57a8b1 2303 != TYPE_MAIN_VARIANT (cfun_va_list))
8a58ed0a 2304 return NULL_TREE;
2305
389dd41b 2306 rhs = fold_convert_loc (loc, TREE_TYPE (lhs), rhs);
8a58ed0a 2307 return build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, rhs);
2308
2309 case BUILT_IN_VA_END:
75a70cf9 2310 /* No effect, so the statement will be deleted. */
8a58ed0a 2311 return integer_zero_node;
2312
2313 default:
2314 gcc_unreachable ();
2315 }
2316}
75a70cf9 2317
f87df69a 2318/* Attemp to make the block of __builtin_unreachable I unreachable by changing
2319 the incoming jumps. Return true if at least one jump was changed. */
2320
2321static bool
2322optimize_unreachable (gimple_stmt_iterator i)
2323{
2324 basic_block bb = gsi_bb (i);
2325 gimple_stmt_iterator gsi;
2326 gimple stmt;
2327 edge_iterator ei;
2328 edge e;
2329 bool ret;
2330
2331 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2332 {
2333 stmt = gsi_stmt (gsi);
2334
2335 if (is_gimple_debug (stmt))
2336 continue;
2337
2338 if (gimple_code (stmt) == GIMPLE_LABEL)
2339 {
2340 /* Verify we do not need to preserve the label. */
2341 if (FORCED_LABEL (gimple_label_label (stmt)))
2342 return false;
2343
2344 continue;
2345 }
2346
2347 /* Only handle the case that __builtin_unreachable is the first statement
2348 in the block. We rely on DCE to remove stmts without side-effects
2349 before __builtin_unreachable. */
2350 if (gsi_stmt (gsi) != gsi_stmt (i))
2351 return false;
2352 }
2353
2354 ret = false;
2355 FOR_EACH_EDGE (e, ei, bb->preds)
2356 {
2357 gsi = gsi_last_bb (e->src);
522f73a1 2358 if (gsi_end_p (gsi))
2359 continue;
f87df69a 2360
522f73a1 2361 stmt = gsi_stmt (gsi);
2362 if (gimple_code (stmt) == GIMPLE_COND)
f87df69a 2363 {
2364 if (e->flags & EDGE_TRUE_VALUE)
2365 gimple_cond_make_false (stmt);
2366 else if (e->flags & EDGE_FALSE_VALUE)
2367 gimple_cond_make_true (stmt);
2368 else
2369 gcc_unreachable ();
2370 }
2371 else
2372 {
2373 /* Todo: handle other cases, f.i. switch statement. */
2374 continue;
2375 }
2376
2377 ret = true;
2378 }
2379
2380 return ret;
2381}
2382
4ee9c684 2383/* A simple pass that attempts to fold all builtin functions. This pass
2384 is run after we've propagated as many constants as we can. */
2385
2a1990e9 2386static unsigned int
4ee9c684 2387execute_fold_all_builtins (void)
2388{
b36237eb 2389 bool cfg_changed = false;
4ee9c684 2390 basic_block bb;
b1b7c0c4 2391 unsigned int todoflags = 0;
48e1416a 2392
4ee9c684 2393 FOR_EACH_BB (bb)
2394 {
75a70cf9 2395 gimple_stmt_iterator i;
2396 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
4ee9c684 2397 {
75a70cf9 2398 gimple stmt, old_stmt;
4ee9c684 2399 tree callee, result;
0a39fd54 2400 enum built_in_function fcode;
4ee9c684 2401
75a70cf9 2402 stmt = gsi_stmt (i);
2403
2404 if (gimple_code (stmt) != GIMPLE_CALL)
0a39fd54 2405 {
75a70cf9 2406 gsi_next (&i);
0a39fd54 2407 continue;
2408 }
75a70cf9 2409 callee = gimple_call_fndecl (stmt);
4ee9c684 2410 if (!callee || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL)
0a39fd54 2411 {
75a70cf9 2412 gsi_next (&i);
0a39fd54 2413 continue;
2414 }
2415 fcode = DECL_FUNCTION_CODE (callee);
4ee9c684 2416
2d18b16d 2417 result = gimple_fold_builtin (stmt);
5a4b7e1e 2418
2419 if (result)
75a70cf9 2420 gimple_remove_stmt_histograms (cfun, stmt);
5a4b7e1e 2421
4ee9c684 2422 if (!result)
2423 switch (DECL_FUNCTION_CODE (callee))
2424 {
2425 case BUILT_IN_CONSTANT_P:
2426 /* Resolve __builtin_constant_p. If it hasn't been
2427 folded to integer_one_node by now, it's fairly
2428 certain that the value simply isn't constant. */
75a70cf9 2429 result = integer_zero_node;
4ee9c684 2430 break;
2431
fca0886c 2432 case BUILT_IN_ASSUME_ALIGNED:
2433 /* Remove __builtin_assume_aligned. */
2434 result = gimple_call_arg (stmt, 0);
2435 break;
2436
bdd0e199 2437 case BUILT_IN_STACK_RESTORE:
75a70cf9 2438 result = optimize_stack_restore (i);
8a58ed0a 2439 if (result)
2440 break;
75a70cf9 2441 gsi_next (&i);
8a58ed0a 2442 continue;
2443
f87df69a 2444 case BUILT_IN_UNREACHABLE:
2445 if (optimize_unreachable (i))
2446 cfg_changed = true;
2447 break;
2448
8a58ed0a 2449 case BUILT_IN_VA_START:
2450 case BUILT_IN_VA_END:
2451 case BUILT_IN_VA_COPY:
2452 /* These shouldn't be folded before pass_stdarg. */
75a70cf9 2453 result = optimize_stdarg_builtin (stmt);
bdd0e199 2454 if (result)
2455 break;
2456 /* FALLTHRU */
2457
4ee9c684 2458 default:
75a70cf9 2459 gsi_next (&i);
4ee9c684 2460 continue;
2461 }
2462
f87df69a 2463 if (result == NULL_TREE)
2464 break;
2465
4ee9c684 2466 if (dump_file && (dump_flags & TDF_DETAILS))
2467 {
2468 fprintf (dump_file, "Simplified\n ");
75a70cf9 2469 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
4ee9c684 2470 }
2471
75a70cf9 2472 old_stmt = stmt;
75a70cf9 2473 if (!update_call_from_tree (&i, result))
0fefde02 2474 {
2475 gimplify_and_update_call_from_tree (&i, result);
2476 todoflags |= TODO_update_address_taken;
2477 }
de6ed584 2478
75a70cf9 2479 stmt = gsi_stmt (i);
4c5fd53c 2480 update_stmt (stmt);
de6ed584 2481
75a70cf9 2482 if (maybe_clean_or_replace_eh_stmt (old_stmt, stmt)
2483 && gimple_purge_dead_eh_edges (bb))
b36237eb 2484 cfg_changed = true;
4ee9c684 2485
2486 if (dump_file && (dump_flags & TDF_DETAILS))
2487 {
2488 fprintf (dump_file, "to\n ");
75a70cf9 2489 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
4ee9c684 2490 fprintf (dump_file, "\n");
2491 }
0a39fd54 2492
2493 /* Retry the same statement if it changed into another
2494 builtin, there might be new opportunities now. */
75a70cf9 2495 if (gimple_code (stmt) != GIMPLE_CALL)
0a39fd54 2496 {
75a70cf9 2497 gsi_next (&i);
0a39fd54 2498 continue;
2499 }
75a70cf9 2500 callee = gimple_call_fndecl (stmt);
0a39fd54 2501 if (!callee
75a70cf9 2502 || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL
0a39fd54 2503 || DECL_FUNCTION_CODE (callee) == fcode)
75a70cf9 2504 gsi_next (&i);
4ee9c684 2505 }
2506 }
48e1416a 2507
b36237eb 2508 /* Delete unreachable blocks. */
b1b7c0c4 2509 if (cfg_changed)
2510 todoflags |= TODO_cleanup_cfg;
48e1416a 2511
b1b7c0c4 2512 return todoflags;
4ee9c684 2513}
2514
41511585 2515
48e1416a 2516struct gimple_opt_pass pass_fold_builtins =
4ee9c684 2517{
20099e35 2518 {
2519 GIMPLE_PASS,
4ee9c684 2520 "fab", /* name */
2521 NULL, /* gate */
2522 execute_fold_all_builtins, /* execute */
2523 NULL, /* sub */
2524 NULL, /* next */
2525 0, /* static_pass_number */
0b1615c1 2526 TV_NONE, /* tv_id */
49290934 2527 PROP_cfg | PROP_ssa, /* properties_required */
4ee9c684 2528 0, /* properties_provided */
2529 0, /* properties_destroyed */
2530 0, /* todo_flags_start */
771e2890 2531 TODO_verify_ssa
20099e35 2532 | TODO_update_ssa /* todo_flags_finish */
2533 }
4ee9c684 2534};