]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/tree-ssa-ccp.c
2010-08-06 Paolo Carlini <paolo.carlini@oracle.com>
[thirdparty/gcc.git] / gcc / tree-ssa-ccp.c
CommitLineData
4ee9c684 1/* Conditional constant propagation pass for the GNU compiler.
87c0a9fc 2 Copyright (C) 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009,
3 2010 Free Software Foundation, Inc.
4ee9c684 4 Adapted from original RTL SSA-CCP by Daniel Berlin <dberlin@dberlin.org>
5 Adapted to GIMPLE trees by Diego Novillo <dnovillo@redhat.com>
6
7This file is part of GCC.
48e1416a 8
4ee9c684 9GCC is free software; you can redistribute it and/or modify it
10under the terms of the GNU General Public License as published by the
8c4c00c1 11Free Software Foundation; either version 3, or (at your option) any
4ee9c684 12later version.
48e1416a 13
4ee9c684 14GCC is distributed in the hope that it will be useful, but WITHOUT
15ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
16FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17for more details.
48e1416a 18
4ee9c684 19You should have received a copy of the GNU General Public License
8c4c00c1 20along with GCC; see the file COPYING3. If not see
21<http://www.gnu.org/licenses/>. */
4ee9c684 22
88dbf20f 23/* Conditional constant propagation (CCP) is based on the SSA
24 propagation engine (tree-ssa-propagate.c). Constant assignments of
25 the form VAR = CST are propagated from the assignments into uses of
26 VAR, which in turn may generate new constants. The simulation uses
27 a four level lattice to keep track of constant values associated
28 with SSA names. Given an SSA name V_i, it may take one of the
29 following values:
30
bfa30570 31 UNINITIALIZED -> the initial state of the value. This value
32 is replaced with a correct initial value
33 the first time the value is used, so the
34 rest of the pass does not need to care about
35 it. Using this value simplifies initialization
36 of the pass, and prevents us from needlessly
37 scanning statements that are never reached.
88dbf20f 38
39 UNDEFINED -> V_i is a local variable whose definition
40 has not been processed yet. Therefore we
41 don't yet know if its value is a constant
42 or not.
43
44 CONSTANT -> V_i has been found to hold a constant
45 value C.
46
47 VARYING -> V_i cannot take a constant value, or if it
48 does, it is not possible to determine it
49 at compile time.
50
51 The core of SSA-CCP is in ccp_visit_stmt and ccp_visit_phi_node:
52
53 1- In ccp_visit_stmt, we are interested in assignments whose RHS
54 evaluates into a constant and conditional jumps whose predicate
55 evaluates into a boolean true or false. When an assignment of
56 the form V_i = CONST is found, V_i's lattice value is set to
57 CONSTANT and CONST is associated with it. This causes the
58 propagation engine to add all the SSA edges coming out the
59 assignment into the worklists, so that statements that use V_i
60 can be visited.
61
62 If the statement is a conditional with a constant predicate, we
63 mark the outgoing edges as executable or not executable
64 depending on the predicate's value. This is then used when
65 visiting PHI nodes to know when a PHI argument can be ignored.
48e1416a 66
88dbf20f 67
68 2- In ccp_visit_phi_node, if all the PHI arguments evaluate to the
69 same constant C, then the LHS of the PHI is set to C. This
70 evaluation is known as the "meet operation". Since one of the
71 goals of this evaluation is to optimistically return constant
72 values as often as possible, it uses two main short cuts:
73
74 - If an argument is flowing in through a non-executable edge, it
75 is ignored. This is useful in cases like this:
76
77 if (PRED)
78 a_9 = 3;
79 else
80 a_10 = 100;
81 a_11 = PHI (a_9, a_10)
82
83 If PRED is known to always evaluate to false, then we can
84 assume that a_11 will always take its value from a_10, meaning
85 that instead of consider it VARYING (a_9 and a_10 have
86 different values), we can consider it CONSTANT 100.
87
88 - If an argument has an UNDEFINED value, then it does not affect
89 the outcome of the meet operation. If a variable V_i has an
90 UNDEFINED value, it means that either its defining statement
91 hasn't been visited yet or V_i has no defining statement, in
92 which case the original symbol 'V' is being used
93 uninitialized. Since 'V' is a local variable, the compiler
94 may assume any initial value for it.
95
96
97 After propagation, every variable V_i that ends up with a lattice
98 value of CONSTANT will have the associated constant value in the
99 array CONST_VAL[i].VALUE. That is fed into substitute_and_fold for
100 final substitution and folding.
101
4ee9c684 102 References:
103
104 Constant propagation with conditional branches,
105 Wegman and Zadeck, ACM TOPLAS 13(2):181-210.
106
107 Building an Optimizing Compiler,
108 Robert Morgan, Butterworth-Heinemann, 1998, Section 8.9.
109
110 Advanced Compiler Design and Implementation,
111 Steven Muchnick, Morgan Kaufmann, 1997, Section 12.6 */
112
113#include "config.h"
114#include "system.h"
115#include "coretypes.h"
116#include "tm.h"
4ee9c684 117#include "tree.h"
41511585 118#include "flags.h"
4ee9c684 119#include "tm_p.h"
4ee9c684 120#include "basic-block.h"
41511585 121#include "output.h"
41511585 122#include "function.h"
ce084dfc 123#include "tree-pretty-print.h"
124#include "gimple-pretty-print.h"
41511585 125#include "timevar.h"
4ee9c684 126#include "tree-dump.h"
41511585 127#include "tree-flow.h"
4ee9c684 128#include "tree-pass.h"
41511585 129#include "tree-ssa-propagate.h"
5a4b7e1e 130#include "value-prof.h"
41511585 131#include "langhooks.h"
8782adcf 132#include "target.h"
0b205f4c 133#include "diagnostic-core.h"
add6ee5e 134#include "toplev.h"
43fb76c1 135#include "dbgcnt.h"
4ee9c684 136
137
138/* Possible lattice values. */
139typedef enum
140{
bfa30570 141 UNINITIALIZED,
4ee9c684 142 UNDEFINED,
143 CONSTANT,
144 VARYING
88dbf20f 145} ccp_lattice_t;
4ee9c684 146
14f101cf 147struct prop_value_d {
148 /* Lattice value. */
149 ccp_lattice_t lattice_val;
150
151 /* Propagated value. */
152 tree value;
153};
154
155typedef struct prop_value_d prop_value_t;
156
88dbf20f 157/* Array of propagated constant values. After propagation,
158 CONST_VAL[I].VALUE holds the constant value for SSA_NAME(I). If
159 the constant is held in an SSA name representing a memory store
4fb5e5ca 160 (i.e., a VDEF), CONST_VAL[I].MEM_REF will contain the actual
161 memory reference used to store (i.e., the LHS of the assignment
162 doing the store). */
20140406 163static prop_value_t *const_val;
4ee9c684 164
4af351a8 165static void canonicalize_float_value (prop_value_t *);
6688f8ec 166static bool ccp_fold_stmt (gimple_stmt_iterator *);
4af351a8 167
88dbf20f 168/* Dump constant propagation value VAL to file OUTF prefixed by PREFIX. */
01406fc0 169
170static void
88dbf20f 171dump_lattice_value (FILE *outf, const char *prefix, prop_value_t val)
01406fc0 172{
41511585 173 switch (val.lattice_val)
01406fc0 174 {
88dbf20f 175 case UNINITIALIZED:
176 fprintf (outf, "%sUNINITIALIZED", prefix);
177 break;
41511585 178 case UNDEFINED:
179 fprintf (outf, "%sUNDEFINED", prefix);
180 break;
181 case VARYING:
182 fprintf (outf, "%sVARYING", prefix);
183 break;
41511585 184 case CONSTANT:
185 fprintf (outf, "%sCONSTANT ", prefix);
88dbf20f 186 print_generic_expr (outf, val.value, dump_flags);
41511585 187 break;
188 default:
8c0963c4 189 gcc_unreachable ();
41511585 190 }
01406fc0 191}
4ee9c684 192
4ee9c684 193
88dbf20f 194/* Print lattice value VAL to stderr. */
195
196void debug_lattice_value (prop_value_t val);
197
4b987fac 198DEBUG_FUNCTION void
88dbf20f 199debug_lattice_value (prop_value_t val)
200{
201 dump_lattice_value (stderr, "", val);
202 fprintf (stderr, "\n");
203}
4ee9c684 204
4ee9c684 205
88dbf20f 206/* Compute a default value for variable VAR and store it in the
207 CONST_VAL array. The following rules are used to get default
208 values:
01406fc0 209
88dbf20f 210 1- Global and static variables that are declared constant are
211 considered CONSTANT.
212
213 2- Any other value is considered UNDEFINED. This is useful when
41511585 214 considering PHI nodes. PHI arguments that are undefined do not
215 change the constant value of the PHI node, which allows for more
88dbf20f 216 constants to be propagated.
4ee9c684 217
8883e700 218 3- Variables defined by statements other than assignments and PHI
88dbf20f 219 nodes are considered VARYING.
4ee9c684 220
8883e700 221 4- Initial values of variables that are not GIMPLE registers are
bfa30570 222 considered VARYING. */
4ee9c684 223
88dbf20f 224static prop_value_t
225get_default_value (tree var)
226{
227 tree sym = SSA_NAME_VAR (var);
61207d43 228 prop_value_t val = { UNINITIALIZED, NULL_TREE };
8edeb88b 229 gimple stmt;
230
231 stmt = SSA_NAME_DEF_STMT (var);
232
233 if (gimple_nop_p (stmt))
4ee9c684 234 {
8edeb88b 235 /* Variables defined by an empty statement are those used
236 before being initialized. If VAR is a local variable, we
237 can assume initially that it is UNDEFINED, otherwise we must
238 consider it VARYING. */
524a0531 239 if (is_gimple_reg (sym)
240 && TREE_CODE (sym) == VAR_DECL)
8edeb88b 241 val.lattice_val = UNDEFINED;
242 else
243 val.lattice_val = VARYING;
4ee9c684 244 }
8edeb88b 245 else if (is_gimple_assign (stmt)
246 /* Value-returning GIMPLE_CALL statements assign to
247 a variable, and are treated similarly to GIMPLE_ASSIGN. */
248 || (is_gimple_call (stmt)
249 && gimple_call_lhs (stmt) != NULL_TREE)
250 || gimple_code (stmt) == GIMPLE_PHI)
41511585 251 {
8edeb88b 252 tree cst;
253 if (gimple_assign_single_p (stmt)
254 && DECL_P (gimple_assign_rhs1 (stmt))
255 && (cst = get_symbol_constant_value (gimple_assign_rhs1 (stmt))))
88dbf20f 256 {
8edeb88b 257 val.lattice_val = CONSTANT;
258 val.value = cst;
88dbf20f 259 }
260 else
8edeb88b 261 /* Any other variable defined by an assignment or a PHI node
262 is considered UNDEFINED. */
263 val.lattice_val = UNDEFINED;
264 }
265 else
266 {
267 /* Otherwise, VAR will never take on a constant value. */
268 val.lattice_val = VARYING;
41511585 269 }
4ee9c684 270
41511585 271 return val;
272}
4ee9c684 273
4ee9c684 274
bfa30570 275/* Get the constant value associated with variable VAR. */
4ee9c684 276
bfa30570 277static inline prop_value_t *
278get_value (tree var)
88dbf20f 279{
e004838d 280 prop_value_t *val;
bfa30570 281
e004838d 282 if (const_val == NULL)
283 return NULL;
284
285 val = &const_val[SSA_NAME_VERSION (var)];
bfa30570 286 if (val->lattice_val == UNINITIALIZED)
4ee9c684 287 *val = get_default_value (var);
288
4af351a8 289 canonicalize_float_value (val);
290
4ee9c684 291 return val;
292}
293
15d138c9 294/* Return the constant tree value associated with VAR. */
295
296static inline tree
297get_constant_value (tree var)
298{
299 prop_value_t *val = get_value (var);
300 if (val && val->lattice_val == CONSTANT)
301 return val->value;
302 return NULL_TREE;
303}
304
bfa30570 305/* Sets the value associated with VAR to VARYING. */
306
307static inline void
308set_value_varying (tree var)
309{
310 prop_value_t *val = &const_val[SSA_NAME_VERSION (var)];
311
312 val->lattice_val = VARYING;
313 val->value = NULL_TREE;
bfa30570 314}
4ee9c684 315
b31eb493 316/* For float types, modify the value of VAL to make ccp work correctly
317 for non-standard values (-0, NaN):
318
319 If HONOR_SIGNED_ZEROS is false, and VAL = -0, we canonicalize it to 0.
320 If HONOR_NANS is false, and VAL is NaN, we canonicalize it to UNDEFINED.
321 This is to fix the following problem (see PR 29921): Suppose we have
322
323 x = 0.0 * y
324
325 and we set value of y to NaN. This causes value of x to be set to NaN.
326 When we later determine that y is in fact VARYING, fold uses the fact
327 that HONOR_NANS is false, and we try to change the value of x to 0,
328 causing an ICE. With HONOR_NANS being false, the real appearance of
329 NaN would cause undefined behavior, though, so claiming that y (and x)
330 are UNDEFINED initially is correct. */
331
332static void
333canonicalize_float_value (prop_value_t *val)
334{
335 enum machine_mode mode;
336 tree type;
337 REAL_VALUE_TYPE d;
338
339 if (val->lattice_val != CONSTANT
340 || TREE_CODE (val->value) != REAL_CST)
341 return;
342
343 d = TREE_REAL_CST (val->value);
344 type = TREE_TYPE (val->value);
345 mode = TYPE_MODE (type);
346
347 if (!HONOR_SIGNED_ZEROS (mode)
348 && REAL_VALUE_MINUS_ZERO (d))
349 {
350 val->value = build_real (type, dconst0);
351 return;
352 }
353
354 if (!HONOR_NANS (mode)
355 && REAL_VALUE_ISNAN (d))
356 {
357 val->lattice_val = UNDEFINED;
358 val->value = NULL;
b31eb493 359 return;
360 }
361}
362
88dbf20f 363/* Set the value for variable VAR to NEW_VAL. Return true if the new
364 value is different from VAR's previous value. */
4ee9c684 365
41511585 366static bool
88dbf20f 367set_lattice_value (tree var, prop_value_t new_val)
4ee9c684 368{
6d0bf6d6 369 /* We can deal with old UNINITIALIZED values just fine here. */
370 prop_value_t *old_val = &const_val[SSA_NAME_VERSION (var)];
88dbf20f 371
b31eb493 372 canonicalize_float_value (&new_val);
373
88dbf20f 374 /* Lattice transitions must always be monotonically increasing in
bfa30570 375 value. If *OLD_VAL and NEW_VAL are the same, return false to
376 inform the caller that this was a non-transition. */
377
aecfc21d 378 gcc_assert (old_val->lattice_val < new_val.lattice_val
88dbf20f 379 || (old_val->lattice_val == new_val.lattice_val
aecfc21d 380 && ((!old_val->value && !new_val.value)
61207d43 381 || operand_equal_p (old_val->value, new_val.value, 0))));
88dbf20f 382
383 if (old_val->lattice_val != new_val.lattice_val)
4ee9c684 384 {
41511585 385 if (dump_file && (dump_flags & TDF_DETAILS))
386 {
88dbf20f 387 dump_lattice_value (dump_file, "Lattice value changed to ", new_val);
bfa30570 388 fprintf (dump_file, ". Adding SSA edges to worklist.\n");
41511585 389 }
390
88dbf20f 391 *old_val = new_val;
392
6d0bf6d6 393 gcc_assert (new_val.lattice_val != UNINITIALIZED);
bfa30570 394 return true;
4ee9c684 395 }
41511585 396
397 return false;
4ee9c684 398}
399
6d0bf6d6 400/* Return the value for the tree operand EXPR. */
401
402static prop_value_t
403get_value_for_expr (tree expr)
404{
405 prop_value_t val;
406
407 if (TREE_CODE (expr) == SSA_NAME)
408 val = *(get_value (expr));
409 else if (is_gimple_min_invariant (expr))
410 {
411 val.lattice_val = CONSTANT;
412 val.value = expr;
413 canonicalize_float_value (&val);
414 }
415 else
416 {
417 val.lattice_val = VARYING;
418 val.value = NULL_TREE;
419 }
420
421 return val;
422}
423
4ee9c684 424
88dbf20f 425/* Return the likely CCP lattice value for STMT.
4ee9c684 426
41511585 427 If STMT has no operands, then return CONSTANT.
4ee9c684 428
d61b9af3 429 Else if undefinedness of operands of STMT cause its value to be
430 undefined, then return UNDEFINED.
4ee9c684 431
41511585 432 Else if any operands of STMT are constants, then return CONSTANT.
4ee9c684 433
41511585 434 Else return VARYING. */
4ee9c684 435
88dbf20f 436static ccp_lattice_t
75a70cf9 437likely_value (gimple stmt)
41511585 438{
d61b9af3 439 bool has_constant_operand, has_undefined_operand, all_undefined_operands;
41511585 440 tree use;
441 ssa_op_iter iter;
8edeb88b 442 unsigned i;
4ee9c684 443
590c3166 444 enum gimple_code code = gimple_code (stmt);
75a70cf9 445
446 /* This function appears to be called only for assignments, calls,
447 conditionals, and switches, due to the logic in visit_stmt. */
448 gcc_assert (code == GIMPLE_ASSIGN
449 || code == GIMPLE_CALL
450 || code == GIMPLE_COND
451 || code == GIMPLE_SWITCH);
88dbf20f 452
453 /* If the statement has volatile operands, it won't fold to a
454 constant value. */
75a70cf9 455 if (gimple_has_volatile_ops (stmt))
88dbf20f 456 return VARYING;
457
75a70cf9 458 /* Arrive here for more complex cases. */
bfa30570 459 has_constant_operand = false;
d61b9af3 460 has_undefined_operand = false;
461 all_undefined_operands = true;
8edeb88b 462 FOR_EACH_SSA_TREE_OPERAND (use, stmt, iter, SSA_OP_USE)
41511585 463 {
bfa30570 464 prop_value_t *val = get_value (use);
41511585 465
bfa30570 466 if (val->lattice_val == UNDEFINED)
d61b9af3 467 has_undefined_operand = true;
468 else
469 all_undefined_operands = false;
88dbf20f 470
41511585 471 if (val->lattice_val == CONSTANT)
bfa30570 472 has_constant_operand = true;
4ee9c684 473 }
41511585 474
dd277d48 475 /* There may be constants in regular rhs operands. For calls we
476 have to ignore lhs, fndecl and static chain, otherwise only
477 the lhs. */
478 for (i = (is_gimple_call (stmt) ? 2 : 0) + gimple_has_lhs (stmt);
8edeb88b 479 i < gimple_num_ops (stmt); ++i)
480 {
481 tree op = gimple_op (stmt, i);
482 if (!op || TREE_CODE (op) == SSA_NAME)
483 continue;
484 if (is_gimple_min_invariant (op))
485 has_constant_operand = true;
486 }
487
87c0a9fc 488 if (has_constant_operand)
489 all_undefined_operands = false;
490
d61b9af3 491 /* If the operation combines operands like COMPLEX_EXPR make sure to
492 not mark the result UNDEFINED if only one part of the result is
493 undefined. */
75a70cf9 494 if (has_undefined_operand && all_undefined_operands)
d61b9af3 495 return UNDEFINED;
75a70cf9 496 else if (code == GIMPLE_ASSIGN && has_undefined_operand)
d61b9af3 497 {
75a70cf9 498 switch (gimple_assign_rhs_code (stmt))
d61b9af3 499 {
500 /* Unary operators are handled with all_undefined_operands. */
501 case PLUS_EXPR:
502 case MINUS_EXPR:
d61b9af3 503 case POINTER_PLUS_EXPR:
d61b9af3 504 /* Not MIN_EXPR, MAX_EXPR. One VARYING operand may be selected.
505 Not bitwise operators, one VARYING operand may specify the
506 result completely. Not logical operators for the same reason.
05a936a0 507 Not COMPLEX_EXPR as one VARYING operand makes the result partly
508 not UNDEFINED. Not *DIV_EXPR, comparisons and shifts because
509 the undefined operand may be promoted. */
d61b9af3 510 return UNDEFINED;
511
512 default:
513 ;
514 }
515 }
516 /* If there was an UNDEFINED operand but the result may be not UNDEFINED
517 fall back to VARYING even if there were CONSTANT operands. */
518 if (has_undefined_operand)
519 return VARYING;
520
8edeb88b 521 /* We do not consider virtual operands here -- load from read-only
522 memory may have only VARYING virtual operands, but still be
523 constant. */
bfa30570 524 if (has_constant_operand
8edeb88b 525 || gimple_references_memory_p (stmt))
88dbf20f 526 return CONSTANT;
527
bfa30570 528 return VARYING;
4ee9c684 529}
530
bfa30570 531/* Returns true if STMT cannot be constant. */
532
533static bool
75a70cf9 534surely_varying_stmt_p (gimple stmt)
bfa30570 535{
536 /* If the statement has operands that we cannot handle, it cannot be
537 constant. */
75a70cf9 538 if (gimple_has_volatile_ops (stmt))
bfa30570 539 return true;
540
f257af64 541 /* If it is a call and does not return a value or is not a
542 builtin and not an indirect call, it is varying. */
75a70cf9 543 if (is_gimple_call (stmt))
f257af64 544 {
545 tree fndecl;
546 if (!gimple_call_lhs (stmt)
547 || ((fndecl = gimple_call_fndecl (stmt)) != NULL_TREE
5768aeb3 548 && !DECL_BUILT_IN (fndecl)))
f257af64 549 return true;
550 }
bfa30570 551
8edeb88b 552 /* Any other store operation is not interesting. */
dd277d48 553 else if (gimple_vdef (stmt))
8edeb88b 554 return true;
555
bfa30570 556 /* Anything other than assignments and conditional jumps are not
557 interesting for CCP. */
75a70cf9 558 if (gimple_code (stmt) != GIMPLE_ASSIGN
f257af64 559 && gimple_code (stmt) != GIMPLE_COND
560 && gimple_code (stmt) != GIMPLE_SWITCH
561 && gimple_code (stmt) != GIMPLE_CALL)
bfa30570 562 return true;
563
564 return false;
565}
4ee9c684 566
41511585 567/* Initialize local data structures for CCP. */
4ee9c684 568
569static void
41511585 570ccp_initialize (void)
4ee9c684 571{
41511585 572 basic_block bb;
4ee9c684 573
43959b95 574 const_val = XCNEWVEC (prop_value_t, num_ssa_names);
4ee9c684 575
41511585 576 /* Initialize simulation flags for PHI nodes and statements. */
577 FOR_EACH_BB (bb)
4ee9c684 578 {
75a70cf9 579 gimple_stmt_iterator i;
4ee9c684 580
75a70cf9 581 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
41511585 582 {
75a70cf9 583 gimple stmt = gsi_stmt (i);
2193544e 584 bool is_varying;
585
586 /* If the statement is a control insn, then we do not
587 want to avoid simulating the statement once. Failure
588 to do so means that those edges will never get added. */
589 if (stmt_ends_bb_p (stmt))
590 is_varying = false;
591 else
592 is_varying = surely_varying_stmt_p (stmt);
4ee9c684 593
bfa30570 594 if (is_varying)
41511585 595 {
88dbf20f 596 tree def;
597 ssa_op_iter iter;
598
599 /* If the statement will not produce a constant, mark
600 all its outputs VARYING. */
601 FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_ALL_DEFS)
8edeb88b 602 set_value_varying (def);
41511585 603 }
75a70cf9 604 prop_set_simulate_again (stmt, !is_varying);
41511585 605 }
4ee9c684 606 }
607
75a70cf9 608 /* Now process PHI nodes. We never clear the simulate_again flag on
609 phi nodes, since we do not know which edges are executable yet,
610 except for phi nodes for virtual operands when we do not do store ccp. */
41511585 611 FOR_EACH_BB (bb)
4ee9c684 612 {
75a70cf9 613 gimple_stmt_iterator i;
41511585 614
75a70cf9 615 for (i = gsi_start_phis (bb); !gsi_end_p (i); gsi_next (&i))
616 {
617 gimple phi = gsi_stmt (i);
618
61207d43 619 if (!is_gimple_reg (gimple_phi_result (phi)))
75a70cf9 620 prop_set_simulate_again (phi, false);
bfa30570 621 else
75a70cf9 622 prop_set_simulate_again (phi, true);
41511585 623 }
4ee9c684 624 }
41511585 625}
4ee9c684 626
43fb76c1 627/* Debug count support. Reset the values of ssa names
628 VARYING when the total number ssa names analyzed is
629 beyond the debug count specified. */
630
631static void
632do_dbg_cnt (void)
633{
634 unsigned i;
635 for (i = 0; i < num_ssa_names; i++)
636 {
637 if (!dbg_cnt (ccp))
638 {
639 const_val[i].lattice_val = VARYING;
640 const_val[i].value = NULL_TREE;
641 }
642 }
643}
644
4ee9c684 645
88dbf20f 646/* Do final substitution of propagated values, cleanup the flowgraph and
48e1416a 647 free allocated storage.
4ee9c684 648
33a34f1e 649 Return TRUE when something was optimized. */
650
651static bool
88dbf20f 652ccp_finalize (void)
4ee9c684 653{
43fb76c1 654 bool something_changed;
655
656 do_dbg_cnt ();
88dbf20f 657 /* Perform substitutions based on the known constant values. */
14f101cf 658 something_changed = substitute_and_fold (get_constant_value,
659 ccp_fold_stmt, true);
4ee9c684 660
88dbf20f 661 free (const_val);
e004838d 662 const_val = NULL;
33a34f1e 663 return something_changed;;
4ee9c684 664}
665
666
88dbf20f 667/* Compute the meet operator between *VAL1 and *VAL2. Store the result
668 in VAL1.
669
670 any M UNDEFINED = any
88dbf20f 671 any M VARYING = VARYING
672 Ci M Cj = Ci if (i == j)
673 Ci M Cj = VARYING if (i != j)
bfa30570 674 */
4ee9c684 675
676static void
88dbf20f 677ccp_lattice_meet (prop_value_t *val1, prop_value_t *val2)
4ee9c684 678{
88dbf20f 679 if (val1->lattice_val == UNDEFINED)
4ee9c684 680 {
88dbf20f 681 /* UNDEFINED M any = any */
682 *val1 = *val2;
41511585 683 }
88dbf20f 684 else if (val2->lattice_val == UNDEFINED)
92481a4d 685 {
88dbf20f 686 /* any M UNDEFINED = any
687 Nothing to do. VAL1 already contains the value we want. */
688 ;
92481a4d 689 }
88dbf20f 690 else if (val1->lattice_val == VARYING
691 || val2->lattice_val == VARYING)
41511585 692 {
88dbf20f 693 /* any M VARYING = VARYING. */
694 val1->lattice_val = VARYING;
695 val1->value = NULL_TREE;
41511585 696 }
88dbf20f 697 else if (val1->lattice_val == CONSTANT
698 && val2->lattice_val == CONSTANT
61207d43 699 && simple_cst_equal (val1->value, val2->value) == 1)
41511585 700 {
88dbf20f 701 /* Ci M Cj = Ci if (i == j)
702 Ci M Cj = VARYING if (i != j)
703
704 If these two values come from memory stores, make sure that
7739e598 705 they come from the same memory reference.
706 Nothing to do. VAL1 already contains the value we want. */
41511585 707 }
708 else
709 {
88dbf20f 710 /* Any other combination is VARYING. */
711 val1->lattice_val = VARYING;
712 val1->value = NULL_TREE;
41511585 713 }
4ee9c684 714}
715
716
41511585 717/* Loop through the PHI_NODE's parameters for BLOCK and compare their
718 lattice values to determine PHI_NODE's lattice value. The value of a
88dbf20f 719 PHI node is determined calling ccp_lattice_meet with all the arguments
41511585 720 of the PHI node that are incoming via executable edges. */
4ee9c684 721
41511585 722static enum ssa_prop_result
75a70cf9 723ccp_visit_phi_node (gimple phi)
4ee9c684 724{
75a70cf9 725 unsigned i;
88dbf20f 726 prop_value_t *old_val, new_val;
4ee9c684 727
41511585 728 if (dump_file && (dump_flags & TDF_DETAILS))
4ee9c684 729 {
41511585 730 fprintf (dump_file, "\nVisiting PHI node: ");
75a70cf9 731 print_gimple_stmt (dump_file, phi, 0, dump_flags);
4ee9c684 732 }
4ee9c684 733
75a70cf9 734 old_val = get_value (gimple_phi_result (phi));
41511585 735 switch (old_val->lattice_val)
736 {
737 case VARYING:
88dbf20f 738 return SSA_PROP_VARYING;
4ee9c684 739
41511585 740 case CONSTANT:
741 new_val = *old_val;
742 break;
4ee9c684 743
41511585 744 case UNDEFINED:
41511585 745 new_val.lattice_val = UNDEFINED;
88dbf20f 746 new_val.value = NULL_TREE;
41511585 747 break;
4ee9c684 748
41511585 749 default:
8c0963c4 750 gcc_unreachable ();
41511585 751 }
4ee9c684 752
75a70cf9 753 for (i = 0; i < gimple_phi_num_args (phi); i++)
41511585 754 {
88dbf20f 755 /* Compute the meet operator over all the PHI arguments flowing
756 through executable edges. */
75a70cf9 757 edge e = gimple_phi_arg_edge (phi, i);
4ee9c684 758
41511585 759 if (dump_file && (dump_flags & TDF_DETAILS))
760 {
761 fprintf (dump_file,
762 "\n Argument #%d (%d -> %d %sexecutable)\n",
763 i, e->src->index, e->dest->index,
764 (e->flags & EDGE_EXECUTABLE) ? "" : "not ");
765 }
766
767 /* If the incoming edge is executable, Compute the meet operator for
768 the existing value of the PHI node and the current PHI argument. */
769 if (e->flags & EDGE_EXECUTABLE)
770 {
75a70cf9 771 tree arg = gimple_phi_arg (phi, i)->def;
6d0bf6d6 772 prop_value_t arg_val = get_value_for_expr (arg);
4ee9c684 773
88dbf20f 774 ccp_lattice_meet (&new_val, &arg_val);
4ee9c684 775
41511585 776 if (dump_file && (dump_flags & TDF_DETAILS))
777 {
778 fprintf (dump_file, "\t");
88dbf20f 779 print_generic_expr (dump_file, arg, dump_flags);
780 dump_lattice_value (dump_file, "\tValue: ", arg_val);
41511585 781 fprintf (dump_file, "\n");
782 }
4ee9c684 783
41511585 784 if (new_val.lattice_val == VARYING)
785 break;
786 }
787 }
4ee9c684 788
789 if (dump_file && (dump_flags & TDF_DETAILS))
41511585 790 {
791 dump_lattice_value (dump_file, "\n PHI node value: ", new_val);
792 fprintf (dump_file, "\n\n");
793 }
794
bfa30570 795 /* Make the transition to the new value. */
75a70cf9 796 if (set_lattice_value (gimple_phi_result (phi), new_val))
41511585 797 {
798 if (new_val.lattice_val == VARYING)
799 return SSA_PROP_VARYING;
800 else
801 return SSA_PROP_INTERESTING;
802 }
803 else
804 return SSA_PROP_NOT_INTERESTING;
4ee9c684 805}
806
15d138c9 807/* Return the constant value for OP or OP otherwise. */
00f4f705 808
809static tree
15d138c9 810valueize_op (tree op)
00f4f705 811{
00f4f705 812 if (TREE_CODE (op) == SSA_NAME)
813 {
15d138c9 814 tree tem = get_constant_value (op);
815 if (tem)
816 return tem;
00f4f705 817 }
818 return op;
819}
820
41511585 821/* CCP specific front-end to the non-destructive constant folding
822 routines.
4ee9c684 823
824 Attempt to simplify the RHS of STMT knowing that one or more
825 operands are constants.
826
827 If simplification is possible, return the simplified RHS,
75a70cf9 828 otherwise return the original RHS or NULL_TREE. */
4ee9c684 829
830static tree
75a70cf9 831ccp_fold (gimple stmt)
4ee9c684 832{
389dd41b 833 location_t loc = gimple_location (stmt);
75a70cf9 834 switch (gimple_code (stmt))
88dbf20f 835 {
75a70cf9 836 case GIMPLE_ASSIGN:
837 {
838 enum tree_code subcode = gimple_assign_rhs_code (stmt);
839
840 switch (get_gimple_rhs_class (subcode))
841 {
842 case GIMPLE_SINGLE_RHS:
843 {
844 tree rhs = gimple_assign_rhs1 (stmt);
845 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
846
847 if (TREE_CODE (rhs) == SSA_NAME)
848 {
849 /* If the RHS is an SSA_NAME, return its known constant value,
850 if any. */
15d138c9 851 return get_constant_value (rhs);
75a70cf9 852 }
853 /* Handle propagating invariant addresses into address operations.
854 The folding we do here matches that in tree-ssa-forwprop.c. */
855 else if (TREE_CODE (rhs) == ADDR_EXPR)
856 {
857 tree *base;
858 base = &TREE_OPERAND (rhs, 0);
859 while (handled_component_p (*base))
860 base = &TREE_OPERAND (*base, 0);
182cf5a9 861 if (TREE_CODE (*base) == MEM_REF
75a70cf9 862 && TREE_CODE (TREE_OPERAND (*base, 0)) == SSA_NAME)
863 {
15d138c9 864 tree val = get_constant_value (TREE_OPERAND (*base, 0));
865 if (val
866 && TREE_CODE (val) == ADDR_EXPR)
75a70cf9 867 {
182cf5a9 868 tree ret, save = *base;
869 tree new_base;
870 new_base = fold_build2 (MEM_REF, TREE_TYPE (*base),
15d138c9 871 unshare_expr (val),
182cf5a9 872 TREE_OPERAND (*base, 1));
75a70cf9 873 /* We need to return a new tree, not modify the IL
874 or share parts of it. So play some tricks to
875 avoid manually building it. */
182cf5a9 876 *base = new_base;
75a70cf9 877 ret = unshare_expr (rhs);
878 recompute_tree_invariant_for_addr_expr (ret);
879 *base = save;
880 return ret;
881 }
882 }
883 }
388a0bc7 884 else if (TREE_CODE (rhs) == CONSTRUCTOR
885 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
886 && (CONSTRUCTOR_NELTS (rhs)
887 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
888 {
889 unsigned i;
890 tree val, list;
891
892 list = NULL_TREE;
893 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
894 {
15d138c9 895 val = valueize_op (val);
388a0bc7 896 if (TREE_CODE (val) == INTEGER_CST
897 || TREE_CODE (val) == REAL_CST
898 || TREE_CODE (val) == FIXED_CST)
899 list = tree_cons (NULL_TREE, val, list);
900 else
901 return NULL_TREE;
902 }
903
904 return build_vector (TREE_TYPE (rhs), nreverse (list));
905 }
4ee9c684 906
75a70cf9 907 if (kind == tcc_reference)
3e4be816 908 {
0fefde02 909 if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
910 || TREE_CODE (rhs) == REALPART_EXPR
911 || TREE_CODE (rhs) == IMAGPART_EXPR)
3e4be816 912 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
913 {
15d138c9 914 tree val = get_constant_value (TREE_OPERAND (rhs, 0));
915 if (val)
389dd41b 916 return fold_unary_loc (EXPR_LOCATION (rhs),
15d138c9 917 TREE_CODE (rhs),
918 TREE_TYPE (rhs), val);
3e4be816 919 }
182cf5a9 920 else if (TREE_CODE (rhs) == MEM_REF
8edeb88b 921 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
922 {
15d138c9 923 tree val = get_constant_value (TREE_OPERAND (rhs, 0));
924 if (val
925 && TREE_CODE (val) == ADDR_EXPR)
182cf5a9 926 {
927 tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
15d138c9 928 unshare_expr (val),
182cf5a9 929 TREE_OPERAND (rhs, 1));
930 if (tem)
931 rhs = tem;
932 }
8edeb88b 933 }
3e4be816 934 return fold_const_aggregate_ref (rhs);
935 }
75a70cf9 936 else if (kind == tcc_declaration)
937 return get_symbol_constant_value (rhs);
938 return rhs;
939 }
48e1416a 940
75a70cf9 941 case GIMPLE_UNARY_RHS:
942 {
943 /* Handle unary operators that can appear in GIMPLE form.
944 Note that we know the single operand must be a constant,
945 so this should almost always return a simplified RHS. */
946 tree lhs = gimple_assign_lhs (stmt);
15d138c9 947 tree op0 = valueize_op (gimple_assign_rhs1 (stmt));
75a70cf9 948
949 /* Conversions are useless for CCP purposes if they are
950 value-preserving. Thus the restrictions that
951 useless_type_conversion_p places for pointer type conversions
952 do not apply here. Substitution later will only substitute to
953 allowed places. */
d9659041 954 if (CONVERT_EXPR_CODE_P (subcode)
5768aeb3 955 && POINTER_TYPE_P (TREE_TYPE (lhs))
182cf5a9 956 && POINTER_TYPE_P (TREE_TYPE (op0)))
5768aeb3 957 {
958 tree tem;
182cf5a9 959 /* Try to re-construct array references on-the-fly. */
5768aeb3 960 if (!useless_type_conversion_p (TREE_TYPE (lhs),
961 TREE_TYPE (op0))
962 && ((tem = maybe_fold_offset_to_address
389dd41b 963 (loc,
e60a6f7b 964 op0, integer_zero_node, TREE_TYPE (lhs)))
5768aeb3 965 != NULL_TREE))
966 return tem;
967 return op0;
968 }
75a70cf9 969
48e1416a 970 return
389dd41b 971 fold_unary_ignore_overflow_loc (loc, subcode,
972 gimple_expr_type (stmt), op0);
f1fb2997 973 }
75a70cf9 974
975 case GIMPLE_BINARY_RHS:
976 {
977 /* Handle binary operators that can appear in GIMPLE form. */
15d138c9 978 tree op0 = valueize_op (gimple_assign_rhs1 (stmt));
979 tree op1 = valueize_op (gimple_assign_rhs2 (stmt));
75a70cf9 980
182cf5a9 981 /* Translate &x + CST into an invariant form suitable for
982 further propagation. */
5768aeb3 983 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
984 && TREE_CODE (op0) == ADDR_EXPR
985 && TREE_CODE (op1) == INTEGER_CST)
986 {
182cf5a9 987 tree off = fold_convert (ptr_type_node, op1);
988 return build_fold_addr_expr
989 (fold_build2 (MEM_REF,
990 TREE_TYPE (TREE_TYPE (op0)),
991 unshare_expr (op0), off));
5768aeb3 992 }
993
389dd41b 994 return fold_binary_loc (loc, subcode,
182cf5a9 995 gimple_expr_type (stmt), op0, op1);
75a70cf9 996 }
997
00f4f705 998 case GIMPLE_TERNARY_RHS:
999 {
15d138c9 1000 /* Handle ternary operators that can appear in GIMPLE form. */
1001 tree op0 = valueize_op (gimple_assign_rhs1 (stmt));
1002 tree op1 = valueize_op (gimple_assign_rhs2 (stmt));
1003 tree op2 = valueize_op (gimple_assign_rhs3 (stmt));
00f4f705 1004
1005 return fold_ternary_loc (loc, subcode,
1006 gimple_expr_type (stmt), op0, op1, op2);
1007 }
1008
75a70cf9 1009 default:
1010 gcc_unreachable ();
1011 }
1012 }
1013 break;
4ee9c684 1014
75a70cf9 1015 case GIMPLE_CALL:
f257af64 1016 {
15d138c9 1017 tree fn = valueize_op (gimple_call_fn (stmt));
f257af64 1018 if (TREE_CODE (fn) == ADDR_EXPR
8ef4f124 1019 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
f257af64 1020 && DECL_BUILT_IN (TREE_OPERAND (fn, 0)))
1021 {
1022 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
1023 tree call, retval;
1024 unsigned i;
1025 for (i = 0; i < gimple_call_num_args (stmt); ++i)
15d138c9 1026 args[i] = valueize_op (gimple_call_arg (stmt, i));
389dd41b 1027 call = build_call_array_loc (loc,
1028 gimple_call_return_type (stmt),
1029 fn, gimple_call_num_args (stmt), args);
1030 retval = fold_call_expr (EXPR_LOCATION (call), call, false);
f257af64 1031 if (retval)
1032 /* fold_call_expr wraps the result inside a NOP_EXPR. */
1033 STRIP_NOPS (retval);
1034 return retval;
1035 }
1036 return NULL_TREE;
1037 }
4ee9c684 1038
75a70cf9 1039 case GIMPLE_COND:
1040 {
1041 /* Handle comparison operators that can appear in GIMPLE form. */
15d138c9 1042 tree op0 = valueize_op (gimple_cond_lhs (stmt));
1043 tree op1 = valueize_op (gimple_cond_rhs (stmt));
75a70cf9 1044 enum tree_code code = gimple_cond_code (stmt);
389dd41b 1045 return fold_binary_loc (loc, code, boolean_type_node, op0, op1);
75a70cf9 1046 }
4ee9c684 1047
75a70cf9 1048 case GIMPLE_SWITCH:
1049 {
15d138c9 1050 /* Return the constant switch index. */
1051 return valueize_op (gimple_switch_index (stmt));
75a70cf9 1052 }
912f109f 1053
75a70cf9 1054 default:
1055 gcc_unreachable ();
4ee9c684 1056 }
4ee9c684 1057}
1058
8782adcf 1059/* Return the tree representing the element referenced by T if T is an
1060 ARRAY_REF or COMPONENT_REF into constant aggregates. Return
1061 NULL_TREE otherwise. */
1062
e004838d 1063tree
8782adcf 1064fold_const_aggregate_ref (tree t)
1065{
c75b4594 1066 tree base, ctor, idx, field;
1067 unsigned HOST_WIDE_INT cnt;
1068 tree cfield, cval;
15d138c9 1069 tree tem;
8782adcf 1070
8edeb88b 1071 if (TREE_CODE_CLASS (TREE_CODE (t)) == tcc_declaration)
1072 return get_symbol_constant_value (t);
1073
8782adcf 1074 switch (TREE_CODE (t))
1075 {
1076 case ARRAY_REF:
1077 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
1078 DECL_INITIAL. If BASE is a nested reference into another
1079 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
1080 the inner reference. */
1081 base = TREE_OPERAND (t, 0);
1082 switch (TREE_CODE (base))
1083 {
1c196c04 1084 case MEM_REF:
1085 /* ??? We could handle this case. */
1086 if (!integer_zerop (TREE_OPERAND (base, 1)))
1087 return NULL_TREE;
1088 base = get_base_address (base);
1089 if (!base
1090 || TREE_CODE (base) != VAR_DECL)
1091 return NULL_TREE;
1092
1093 /* Fallthru. */
8782adcf 1094 case VAR_DECL:
1095 if (!TREE_READONLY (base)
1096 || TREE_CODE (TREE_TYPE (base)) != ARRAY_TYPE
1097 || !targetm.binds_local_p (base))
1098 return NULL_TREE;
1099
1100 ctor = DECL_INITIAL (base);
1101 break;
1102
1103 case ARRAY_REF:
1104 case COMPONENT_REF:
1105 ctor = fold_const_aggregate_ref (base);
1106 break;
1107
04236c3a 1108 case STRING_CST:
1109 case CONSTRUCTOR:
1110 ctor = base;
1111 break;
1112
8782adcf 1113 default:
1114 return NULL_TREE;
1115 }
1116
1117 if (ctor == NULL_TREE
4f61cce6 1118 || (TREE_CODE (ctor) != CONSTRUCTOR
1119 && TREE_CODE (ctor) != STRING_CST)
8782adcf 1120 || !TREE_STATIC (ctor))
1121 return NULL_TREE;
1122
1123 /* Get the index. If we have an SSA_NAME, try to resolve it
1124 with the current lattice value for the SSA_NAME. */
1125 idx = TREE_OPERAND (t, 1);
1126 switch (TREE_CODE (idx))
1127 {
1128 case SSA_NAME:
15d138c9 1129 if ((tem = get_constant_value (idx))
1130 && TREE_CODE (tem) == INTEGER_CST)
1131 idx = tem;
8782adcf 1132 else
1133 return NULL_TREE;
1134 break;
1135
1136 case INTEGER_CST:
1137 break;
1138
1139 default:
1140 return NULL_TREE;
1141 }
1142
4f61cce6 1143 /* Fold read from constant string. */
1144 if (TREE_CODE (ctor) == STRING_CST)
1145 {
1146 if ((TYPE_MODE (TREE_TYPE (t))
1147 == TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
1148 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
1149 == MODE_INT)
1150 && GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor)))) == 1
1151 && compare_tree_int (idx, TREE_STRING_LENGTH (ctor)) < 0)
7b050b7b 1152 return build_int_cst_type (TREE_TYPE (t),
1153 (TREE_STRING_POINTER (ctor)
1154 [TREE_INT_CST_LOW (idx)]));
4f61cce6 1155 return NULL_TREE;
1156 }
1157
8782adcf 1158 /* Whoo-hoo! I'll fold ya baby. Yeah! */
c75b4594 1159 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
1160 if (tree_int_cst_equal (cfield, idx))
590d65aa 1161 {
6872bf3c 1162 STRIP_NOPS (cval);
3a44a278 1163 if (TREE_CODE (cval) == ADDR_EXPR)
1164 {
1165 tree base = get_base_address (TREE_OPERAND (cval, 0));
1166 if (base && TREE_CODE (base) == VAR_DECL)
1167 add_referenced_var (base);
1168 }
590d65aa 1169 return cval;
1170 }
8782adcf 1171 break;
1172
1173 case COMPONENT_REF:
1174 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
1175 DECL_INITIAL. If BASE is a nested reference into another
1176 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
1177 the inner reference. */
1178 base = TREE_OPERAND (t, 0);
1179 switch (TREE_CODE (base))
1180 {
1181 case VAR_DECL:
1182 if (!TREE_READONLY (base)
1183 || TREE_CODE (TREE_TYPE (base)) != RECORD_TYPE
1184 || !targetm.binds_local_p (base))
1185 return NULL_TREE;
1186
1187 ctor = DECL_INITIAL (base);
1188 break;
1189
1190 case ARRAY_REF:
1191 case COMPONENT_REF:
1192 ctor = fold_const_aggregate_ref (base);
1193 break;
1194
1195 default:
1196 return NULL_TREE;
1197 }
1198
1199 if (ctor == NULL_TREE
1200 || TREE_CODE (ctor) != CONSTRUCTOR
1201 || !TREE_STATIC (ctor))
1202 return NULL_TREE;
1203
1204 field = TREE_OPERAND (t, 1);
1205
c75b4594 1206 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
1207 if (cfield == field
8782adcf 1208 /* FIXME: Handle bit-fields. */
c75b4594 1209 && ! DECL_BIT_FIELD (cfield))
590d65aa 1210 {
6872bf3c 1211 STRIP_NOPS (cval);
3a44a278 1212 if (TREE_CODE (cval) == ADDR_EXPR)
1213 {
1214 tree base = get_base_address (TREE_OPERAND (cval, 0));
1215 if (base && TREE_CODE (base) == VAR_DECL)
1216 add_referenced_var (base);
1217 }
590d65aa 1218 return cval;
1219 }
8782adcf 1220 break;
1221
908cb59d 1222 case REALPART_EXPR:
1223 case IMAGPART_EXPR:
1224 {
1225 tree c = fold_const_aggregate_ref (TREE_OPERAND (t, 0));
1226 if (c && TREE_CODE (c) == COMPLEX_CST)
389dd41b 1227 return fold_build1_loc (EXPR_LOCATION (t),
1228 TREE_CODE (t), TREE_TYPE (t), c);
908cb59d 1229 break;
1230 }
04236c3a 1231
182cf5a9 1232 case MEM_REF:
1233 /* Get the base object we are accessing. */
1234 base = TREE_OPERAND (t, 0);
1235 if (TREE_CODE (base) == SSA_NAME
15d138c9 1236 && (tem = get_constant_value (base)))
1237 base = tem;
182cf5a9 1238 if (TREE_CODE (base) != ADDR_EXPR)
1239 return NULL_TREE;
1240 base = TREE_OPERAND (base, 0);
1241 switch (TREE_CODE (base))
1242 {
1243 case VAR_DECL:
1244 if (DECL_P (base)
1245 && !AGGREGATE_TYPE_P (TREE_TYPE (base))
1246 && integer_zerop (TREE_OPERAND (t, 1)))
dbeb5fe0 1247 {
1248 tree res = get_symbol_constant_value (base);
1249 if (res
1250 && !useless_type_conversion_p
1251 (TREE_TYPE (t), TREE_TYPE (res)))
1252 res = fold_unary (VIEW_CONVERT_EXPR, TREE_TYPE (t), res);
1253 return res;
1254 }
182cf5a9 1255
1256 if (!TREE_READONLY (base)
1257 || TREE_CODE (TREE_TYPE (base)) != ARRAY_TYPE
1258 || !targetm.binds_local_p (base))
1259 return NULL_TREE;
1260
1261 ctor = DECL_INITIAL (base);
1262 break;
1263
1264 case STRING_CST:
1265 case CONSTRUCTOR:
1266 ctor = base;
1267 break;
1268
1269 default:
1270 return NULL_TREE;
1271 }
1272
1273 if (ctor == NULL_TREE
1274 || (TREE_CODE (ctor) != CONSTRUCTOR
1275 && TREE_CODE (ctor) != STRING_CST)
1276 || !TREE_STATIC (ctor))
1277 return NULL_TREE;
1278
1279 /* Get the byte offset. */
1280 idx = TREE_OPERAND (t, 1);
1281
1282 /* Fold read from constant string. */
1283 if (TREE_CODE (ctor) == STRING_CST)
1284 {
1285 if ((TYPE_MODE (TREE_TYPE (t))
1286 == TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
1287 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
1288 == MODE_INT)
1289 && GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor)))) == 1
1290 && compare_tree_int (idx, TREE_STRING_LENGTH (ctor)) < 0)
1291 return build_int_cst_type (TREE_TYPE (t),
1292 (TREE_STRING_POINTER (ctor)
1293 [TREE_INT_CST_LOW (idx)]));
1294 return NULL_TREE;
1295 }
1296
1297 /* ??? Implement byte-offset indexing into a non-array CONSTRUCTOR. */
1298 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE
1299 && (TYPE_MODE (TREE_TYPE (t))
1300 == TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
1301 && GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (t))) != 0
1302 && integer_zerop
1303 (int_const_binop
1304 (TRUNC_MOD_EXPR, idx,
1305 size_int (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (t)))), 0)))
1306 {
1307 idx = int_const_binop (TRUNC_DIV_EXPR, idx,
1308 size_int (GET_MODE_SIZE
1309 (TYPE_MODE (TREE_TYPE (t)))), 0);
1310 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
1311 if (tree_int_cst_equal (cfield, idx))
1312 {
1313 STRIP_NOPS (cval);
1314 if (TREE_CODE (cval) == ADDR_EXPR)
1315 {
1316 tree base = get_base_address (TREE_OPERAND (cval, 0));
1317 if (base && TREE_CODE (base) == VAR_DECL)
1318 add_referenced_var (base);
1319 }
1320 if (useless_type_conversion_p (TREE_TYPE (t), TREE_TYPE (cval)))
1321 return cval;
1322 else if (CONSTANT_CLASS_P (cval))
1323 return fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (t), cval);
1324 else
1325 return NULL_TREE;
1326 }
1327 }
1328 break;
04236c3a 1329
8782adcf 1330 default:
1331 break;
1332 }
1333
1334 return NULL_TREE;
1335}
75a70cf9 1336
1337/* Evaluate statement STMT.
1338 Valid only for assignments, calls, conditionals, and switches. */
4ee9c684 1339
88dbf20f 1340static prop_value_t
75a70cf9 1341evaluate_stmt (gimple stmt)
4ee9c684 1342{
88dbf20f 1343 prop_value_t val;
4f61cce6 1344 tree simplified = NULL_TREE;
88dbf20f 1345 ccp_lattice_t likelyvalue = likely_value (stmt);
add6ee5e 1346 bool is_constant;
88dbf20f 1347
add6ee5e 1348 fold_defer_overflow_warnings ();
1349
4ee9c684 1350 /* If the statement is likely to have a CONSTANT result, then try
1351 to fold the statement to determine the constant value. */
75a70cf9 1352 /* FIXME. This is the only place that we call ccp_fold.
1353 Since likely_value never returns CONSTANT for calls, we will
1354 not attempt to fold them, including builtins that may profit. */
4ee9c684 1355 if (likelyvalue == CONSTANT)
1356 simplified = ccp_fold (stmt);
1357 /* If the statement is likely to have a VARYING result, then do not
1358 bother folding the statement. */
04236c3a 1359 else if (likelyvalue == VARYING)
75a70cf9 1360 {
590c3166 1361 enum gimple_code code = gimple_code (stmt);
75a70cf9 1362 if (code == GIMPLE_ASSIGN)
1363 {
1364 enum tree_code subcode = gimple_assign_rhs_code (stmt);
48e1416a 1365
75a70cf9 1366 /* Other cases cannot satisfy is_gimple_min_invariant
1367 without folding. */
1368 if (get_gimple_rhs_class (subcode) == GIMPLE_SINGLE_RHS)
1369 simplified = gimple_assign_rhs1 (stmt);
1370 }
1371 else if (code == GIMPLE_SWITCH)
1372 simplified = gimple_switch_index (stmt);
1373 else
a65c4d64 1374 /* These cannot satisfy is_gimple_min_invariant without folding. */
1375 gcc_assert (code == GIMPLE_CALL || code == GIMPLE_COND);
75a70cf9 1376 }
4ee9c684 1377
add6ee5e 1378 is_constant = simplified && is_gimple_min_invariant (simplified);
1379
1380 fold_undefer_overflow_warnings (is_constant, stmt, 0);
1381
912f109f 1382 if (dump_file && (dump_flags & TDF_DETAILS))
1383 {
1384 fprintf (dump_file, "which is likely ");
1385 switch (likelyvalue)
1386 {
1387 case CONSTANT:
1388 fprintf (dump_file, "CONSTANT");
1389 break;
1390 case UNDEFINED:
1391 fprintf (dump_file, "UNDEFINED");
1392 break;
1393 case VARYING:
1394 fprintf (dump_file, "VARYING");
1395 break;
1396 default:;
1397 }
1398 fprintf (dump_file, "\n");
1399 }
1400
add6ee5e 1401 if (is_constant)
4ee9c684 1402 {
1403 /* The statement produced a constant value. */
1404 val.lattice_val = CONSTANT;
88dbf20f 1405 val.value = simplified;
4ee9c684 1406 }
1407 else
1408 {
1409 /* The statement produced a nonconstant value. If the statement
88dbf20f 1410 had UNDEFINED operands, then the result of the statement
1411 should be UNDEFINED. Otherwise, the statement is VARYING. */
bfa30570 1412 if (likelyvalue == UNDEFINED)
b765fa12 1413 val.lattice_val = likelyvalue;
1414 else
1415 val.lattice_val = VARYING;
1416
88dbf20f 1417 val.value = NULL_TREE;
4ee9c684 1418 }
41511585 1419
1420 return val;
4ee9c684 1421}
1422
6688f8ec 1423/* Fold the stmt at *GSI with CCP specific information that propagating
1424 and regular folding does not catch. */
1425
1426static bool
1427ccp_fold_stmt (gimple_stmt_iterator *gsi)
1428{
1429 gimple stmt = gsi_stmt (*gsi);
6688f8ec 1430
94144e68 1431 switch (gimple_code (stmt))
1432 {
1433 case GIMPLE_COND:
1434 {
1435 prop_value_t val;
1436 /* Statement evaluation will handle type mismatches in constants
1437 more gracefully than the final propagation. This allows us to
1438 fold more conditionals here. */
1439 val = evaluate_stmt (stmt);
1440 if (val.lattice_val != CONSTANT
1441 || TREE_CODE (val.value) != INTEGER_CST)
1442 return false;
1443
1444 if (integer_zerop (val.value))
1445 gimple_cond_make_false (stmt);
1446 else
1447 gimple_cond_make_true (stmt);
6688f8ec 1448
94144e68 1449 return true;
1450 }
6688f8ec 1451
94144e68 1452 case GIMPLE_CALL:
1453 {
1454 tree lhs = gimple_call_lhs (stmt);
15d138c9 1455 tree val;
94144e68 1456 tree argt;
1457 bool changed = false;
1458 unsigned i;
1459
1460 /* If the call was folded into a constant make sure it goes
1461 away even if we cannot propagate into all uses because of
1462 type issues. */
1463 if (lhs
1464 && TREE_CODE (lhs) == SSA_NAME
15d138c9 1465 && (val = get_constant_value (lhs)))
94144e68 1466 {
15d138c9 1467 tree new_rhs = unshare_expr (val);
338cce8f 1468 bool res;
94144e68 1469 if (!useless_type_conversion_p (TREE_TYPE (lhs),
1470 TREE_TYPE (new_rhs)))
1471 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
338cce8f 1472 res = update_call_from_tree (gsi, new_rhs);
1473 gcc_assert (res);
94144e68 1474 return true;
1475 }
1476
1477 /* Propagate into the call arguments. Compared to replace_uses_in
1478 this can use the argument slot types for type verification
1479 instead of the current argument type. We also can safely
1480 drop qualifiers here as we are dealing with constants anyway. */
1481 argt = TYPE_ARG_TYPES (TREE_TYPE (TREE_TYPE (gimple_call_fn (stmt))));
1482 for (i = 0; i < gimple_call_num_args (stmt) && argt;
1483 ++i, argt = TREE_CHAIN (argt))
1484 {
1485 tree arg = gimple_call_arg (stmt, i);
1486 if (TREE_CODE (arg) == SSA_NAME
15d138c9 1487 && (val = get_constant_value (arg))
94144e68 1488 && useless_type_conversion_p
1489 (TYPE_MAIN_VARIANT (TREE_VALUE (argt)),
15d138c9 1490 TYPE_MAIN_VARIANT (TREE_TYPE (val))))
94144e68 1491 {
15d138c9 1492 gimple_call_set_arg (stmt, i, unshare_expr (val));
94144e68 1493 changed = true;
1494 }
1495 }
1496
1497 return changed;
1498 }
6688f8ec 1499
6872bf3c 1500 case GIMPLE_ASSIGN:
1501 {
1502 tree lhs = gimple_assign_lhs (stmt);
15d138c9 1503 tree val;
6872bf3c 1504
1505 /* If we have a load that turned out to be constant replace it
1506 as we cannot propagate into all uses in all cases. */
1507 if (gimple_assign_single_p (stmt)
1508 && TREE_CODE (lhs) == SSA_NAME
15d138c9 1509 && (val = get_constant_value (lhs)))
6872bf3c 1510 {
15d138c9 1511 tree rhs = unshare_expr (val);
6872bf3c 1512 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs)))
182cf5a9 1513 rhs = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (lhs), rhs);
6872bf3c 1514 gimple_assign_set_rhs_from_tree (gsi, rhs);
1515 return true;
1516 }
1517
1518 return false;
1519 }
1520
94144e68 1521 default:
1522 return false;
1523 }
6688f8ec 1524}
1525
41511585 1526/* Visit the assignment statement STMT. Set the value of its LHS to the
88dbf20f 1527 value computed by the RHS and store LHS in *OUTPUT_P. If STMT
1528 creates virtual definitions, set the value of each new name to that
75a70cf9 1529 of the RHS (if we can derive a constant out of the RHS).
1530 Value-returning call statements also perform an assignment, and
1531 are handled here. */
4ee9c684 1532
41511585 1533static enum ssa_prop_result
75a70cf9 1534visit_assignment (gimple stmt, tree *output_p)
4ee9c684 1535{
88dbf20f 1536 prop_value_t val;
88dbf20f 1537 enum ssa_prop_result retval;
4ee9c684 1538
75a70cf9 1539 tree lhs = gimple_get_lhs (stmt);
4ee9c684 1540
75a70cf9 1541 gcc_assert (gimple_code (stmt) != GIMPLE_CALL
1542 || gimple_call_lhs (stmt) != NULL_TREE);
1543
15d138c9 1544 if (gimple_assign_single_p (stmt)
1545 && gimple_assign_rhs_code (stmt) == SSA_NAME)
1546 /* For a simple copy operation, we copy the lattice values. */
1547 val = *get_value (gimple_assign_rhs1 (stmt));
41511585 1548 else
75a70cf9 1549 /* Evaluate the statement, which could be
1550 either a GIMPLE_ASSIGN or a GIMPLE_CALL. */
04236c3a 1551 val = evaluate_stmt (stmt);
4ee9c684 1552
88dbf20f 1553 retval = SSA_PROP_NOT_INTERESTING;
4ee9c684 1554
41511585 1555 /* Set the lattice value of the statement's output. */
88dbf20f 1556 if (TREE_CODE (lhs) == SSA_NAME)
4ee9c684 1557 {
88dbf20f 1558 /* If STMT is an assignment to an SSA_NAME, we only have one
1559 value to set. */
1560 if (set_lattice_value (lhs, val))
1561 {
1562 *output_p = lhs;
1563 if (val.lattice_val == VARYING)
1564 retval = SSA_PROP_VARYING;
1565 else
1566 retval = SSA_PROP_INTERESTING;
1567 }
4ee9c684 1568 }
88dbf20f 1569
1570 return retval;
4ee9c684 1571}
1572
4ee9c684 1573
41511585 1574/* Visit the conditional statement STMT. Return SSA_PROP_INTERESTING
1575 if it can determine which edge will be taken. Otherwise, return
1576 SSA_PROP_VARYING. */
1577
1578static enum ssa_prop_result
75a70cf9 1579visit_cond_stmt (gimple stmt, edge *taken_edge_p)
4ee9c684 1580{
88dbf20f 1581 prop_value_t val;
41511585 1582 basic_block block;
1583
75a70cf9 1584 block = gimple_bb (stmt);
41511585 1585 val = evaluate_stmt (stmt);
1586
1587 /* Find which edge out of the conditional block will be taken and add it
1588 to the worklist. If no single edge can be determined statically,
1589 return SSA_PROP_VARYING to feed all the outgoing edges to the
1590 propagation engine. */
88dbf20f 1591 *taken_edge_p = val.value ? find_taken_edge (block, val.value) : 0;
41511585 1592 if (*taken_edge_p)
1593 return SSA_PROP_INTERESTING;
1594 else
1595 return SSA_PROP_VARYING;
4ee9c684 1596}
1597
4ee9c684 1598
41511585 1599/* Evaluate statement STMT. If the statement produces an output value and
1600 its evaluation changes the lattice value of its output, return
1601 SSA_PROP_INTERESTING and set *OUTPUT_P to the SSA_NAME holding the
1602 output value.
48e1416a 1603
41511585 1604 If STMT is a conditional branch and we can determine its truth
1605 value, set *TAKEN_EDGE_P accordingly. If STMT produces a varying
1606 value, return SSA_PROP_VARYING. */
4ee9c684 1607
41511585 1608static enum ssa_prop_result
75a70cf9 1609ccp_visit_stmt (gimple stmt, edge *taken_edge_p, tree *output_p)
41511585 1610{
41511585 1611 tree def;
1612 ssa_op_iter iter;
4ee9c684 1613
41511585 1614 if (dump_file && (dump_flags & TDF_DETAILS))
4ee9c684 1615 {
88dbf20f 1616 fprintf (dump_file, "\nVisiting statement:\n");
75a70cf9 1617 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
4ee9c684 1618 }
4ee9c684 1619
75a70cf9 1620 switch (gimple_code (stmt))
4ee9c684 1621 {
75a70cf9 1622 case GIMPLE_ASSIGN:
1623 /* If the statement is an assignment that produces a single
1624 output value, evaluate its RHS to see if the lattice value of
1625 its output has changed. */
1626 return visit_assignment (stmt, output_p);
1627
1628 case GIMPLE_CALL:
1629 /* A value-returning call also performs an assignment. */
1630 if (gimple_call_lhs (stmt) != NULL_TREE)
1631 return visit_assignment (stmt, output_p);
1632 break;
1633
1634 case GIMPLE_COND:
1635 case GIMPLE_SWITCH:
1636 /* If STMT is a conditional branch, see if we can determine
1637 which branch will be taken. */
1638 /* FIXME. It appears that we should be able to optimize
1639 computed GOTOs here as well. */
1640 return visit_cond_stmt (stmt, taken_edge_p);
1641
1642 default:
1643 break;
4ee9c684 1644 }
4ee9c684 1645
41511585 1646 /* Any other kind of statement is not interesting for constant
1647 propagation and, therefore, not worth simulating. */
41511585 1648 if (dump_file && (dump_flags & TDF_DETAILS))
1649 fprintf (dump_file, "No interesting values produced. Marked VARYING.\n");
4ee9c684 1650
41511585 1651 /* Definitions made by statements other than assignments to
1652 SSA_NAMEs represent unknown modifications to their outputs.
1653 Mark them VARYING. */
88dbf20f 1654 FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_ALL_DEFS)
1655 {
61207d43 1656 prop_value_t v = { VARYING, NULL_TREE };
88dbf20f 1657 set_lattice_value (def, v);
1658 }
4ee9c684 1659
41511585 1660 return SSA_PROP_VARYING;
1661}
4ee9c684 1662
4ee9c684 1663
88dbf20f 1664/* Main entry point for SSA Conditional Constant Propagation. */
41511585 1665
33a34f1e 1666static unsigned int
61207d43 1667do_ssa_ccp (void)
41511585 1668{
1669 ccp_initialize ();
1670 ssa_propagate (ccp_visit_stmt, ccp_visit_phi_node);
33a34f1e 1671 if (ccp_finalize ())
eb9161e7 1672 return (TODO_cleanup_cfg | TODO_update_ssa | TODO_remove_unused_locals);
33a34f1e 1673 else
1674 return 0;
4ee9c684 1675}
1676
5664499b 1677
1678static bool
41511585 1679gate_ccp (void)
5664499b 1680{
41511585 1681 return flag_tree_ccp != 0;
5664499b 1682}
1683
4ee9c684 1684
48e1416a 1685struct gimple_opt_pass pass_ccp =
41511585 1686{
20099e35 1687 {
1688 GIMPLE_PASS,
41511585 1689 "ccp", /* name */
1690 gate_ccp, /* gate */
88dbf20f 1691 do_ssa_ccp, /* execute */
41511585 1692 NULL, /* sub */
1693 NULL, /* next */
1694 0, /* static_pass_number */
1695 TV_TREE_CCP, /* tv_id */
49290934 1696 PROP_cfg | PROP_ssa, /* properties_required */
41511585 1697 0, /* properties_provided */
b6246c40 1698 0, /* properties_destroyed */
41511585 1699 0, /* todo_flags_start */
33a34f1e 1700 TODO_dump_func | TODO_verify_ssa
20099e35 1701 | TODO_verify_stmts | TODO_ggc_collect/* todo_flags_finish */
1702 }
41511585 1703};
4ee9c684 1704
4ee9c684 1705
75a70cf9 1706
bdd0e199 1707/* Try to optimize out __builtin_stack_restore. Optimize it out
1708 if there is another __builtin_stack_restore in the same basic
1709 block and no calls or ASM_EXPRs are in between, or if this block's
1710 only outgoing edge is to EXIT_BLOCK and there are no calls or
1711 ASM_EXPRs after this __builtin_stack_restore. */
1712
1713static tree
75a70cf9 1714optimize_stack_restore (gimple_stmt_iterator i)
bdd0e199 1715{
6ea999da 1716 tree callee;
1717 gimple stmt;
75a70cf9 1718
1719 basic_block bb = gsi_bb (i);
1720 gimple call = gsi_stmt (i);
bdd0e199 1721
75a70cf9 1722 if (gimple_code (call) != GIMPLE_CALL
1723 || gimple_call_num_args (call) != 1
1724 || TREE_CODE (gimple_call_arg (call, 0)) != SSA_NAME
1725 || !POINTER_TYPE_P (TREE_TYPE (gimple_call_arg (call, 0))))
bdd0e199 1726 return NULL_TREE;
1727
75a70cf9 1728 for (gsi_next (&i); !gsi_end_p (i); gsi_next (&i))
bdd0e199 1729 {
75a70cf9 1730 stmt = gsi_stmt (i);
1731 if (gimple_code (stmt) == GIMPLE_ASM)
bdd0e199 1732 return NULL_TREE;
75a70cf9 1733 if (gimple_code (stmt) != GIMPLE_CALL)
bdd0e199 1734 continue;
1735
75a70cf9 1736 callee = gimple_call_fndecl (stmt);
c40a6f90 1737 if (!callee
1738 || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL
1739 /* All regular builtins are ok, just obviously not alloca. */
1740 || DECL_FUNCTION_CODE (callee) == BUILT_IN_ALLOCA)
bdd0e199 1741 return NULL_TREE;
1742
1743 if (DECL_FUNCTION_CODE (callee) == BUILT_IN_STACK_RESTORE)
6ea999da 1744 goto second_stack_restore;
bdd0e199 1745 }
1746
6ea999da 1747 if (!gsi_end_p (i))
bdd0e199 1748 return NULL_TREE;
1749
6ea999da 1750 /* Allow one successor of the exit block, or zero successors. */
1751 switch (EDGE_COUNT (bb->succs))
1752 {
1753 case 0:
1754 break;
1755 case 1:
1756 if (single_succ_edge (bb)->dest != EXIT_BLOCK_PTR)
1757 return NULL_TREE;
1758 break;
1759 default:
1760 return NULL_TREE;
1761 }
1762 second_stack_restore:
bdd0e199 1763
6ea999da 1764 /* If there's exactly one use, then zap the call to __builtin_stack_save.
1765 If there are multiple uses, then the last one should remove the call.
1766 In any case, whether the call to __builtin_stack_save can be removed
1767 or not is irrelevant to removing the call to __builtin_stack_restore. */
1768 if (has_single_use (gimple_call_arg (call, 0)))
1769 {
1770 gimple stack_save = SSA_NAME_DEF_STMT (gimple_call_arg (call, 0));
1771 if (is_gimple_call (stack_save))
1772 {
1773 callee = gimple_call_fndecl (stack_save);
1774 if (callee
1775 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL
1776 && DECL_FUNCTION_CODE (callee) == BUILT_IN_STACK_SAVE)
1777 {
1778 gimple_stmt_iterator stack_save_gsi;
1779 tree rhs;
bdd0e199 1780
6ea999da 1781 stack_save_gsi = gsi_for_stmt (stack_save);
1782 rhs = build_int_cst (TREE_TYPE (gimple_call_arg (call, 0)), 0);
1783 update_call_from_tree (&stack_save_gsi, rhs);
1784 }
1785 }
1786 }
bdd0e199 1787
75a70cf9 1788 /* No effect, so the statement will be deleted. */
bdd0e199 1789 return integer_zero_node;
1790}
75a70cf9 1791
8a58ed0a 1792/* If va_list type is a simple pointer and nothing special is needed,
1793 optimize __builtin_va_start (&ap, 0) into ap = __builtin_next_arg (0),
1794 __builtin_va_end (&ap) out as NOP and __builtin_va_copy into a simple
1795 pointer assignment. */
1796
1797static tree
75a70cf9 1798optimize_stdarg_builtin (gimple call)
8a58ed0a 1799{
5f57a8b1 1800 tree callee, lhs, rhs, cfun_va_list;
8a58ed0a 1801 bool va_list_simple_ptr;
389dd41b 1802 location_t loc = gimple_location (call);
8a58ed0a 1803
75a70cf9 1804 if (gimple_code (call) != GIMPLE_CALL)
8a58ed0a 1805 return NULL_TREE;
1806
75a70cf9 1807 callee = gimple_call_fndecl (call);
5f57a8b1 1808
1809 cfun_va_list = targetm.fn_abi_va_list (callee);
1810 va_list_simple_ptr = POINTER_TYPE_P (cfun_va_list)
1811 && (TREE_TYPE (cfun_va_list) == void_type_node
1812 || TREE_TYPE (cfun_va_list) == char_type_node);
1813
8a58ed0a 1814 switch (DECL_FUNCTION_CODE (callee))
1815 {
1816 case BUILT_IN_VA_START:
1817 if (!va_list_simple_ptr
1818 || targetm.expand_builtin_va_start != NULL
75a70cf9 1819 || built_in_decls[BUILT_IN_NEXT_ARG] == NULL)
8a58ed0a 1820 return NULL_TREE;
1821
75a70cf9 1822 if (gimple_call_num_args (call) != 2)
8a58ed0a 1823 return NULL_TREE;
1824
75a70cf9 1825 lhs = gimple_call_arg (call, 0);
8a58ed0a 1826 if (!POINTER_TYPE_P (TREE_TYPE (lhs))
1827 || TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (lhs)))
5f57a8b1 1828 != TYPE_MAIN_VARIANT (cfun_va_list))
8a58ed0a 1829 return NULL_TREE;
48e1416a 1830
389dd41b 1831 lhs = build_fold_indirect_ref_loc (loc, lhs);
1832 rhs = build_call_expr_loc (loc, built_in_decls[BUILT_IN_NEXT_ARG],
75a70cf9 1833 1, integer_zero_node);
389dd41b 1834 rhs = fold_convert_loc (loc, TREE_TYPE (lhs), rhs);
8a58ed0a 1835 return build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, rhs);
1836
1837 case BUILT_IN_VA_COPY:
1838 if (!va_list_simple_ptr)
1839 return NULL_TREE;
1840
75a70cf9 1841 if (gimple_call_num_args (call) != 2)
8a58ed0a 1842 return NULL_TREE;
1843
75a70cf9 1844 lhs = gimple_call_arg (call, 0);
8a58ed0a 1845 if (!POINTER_TYPE_P (TREE_TYPE (lhs))
1846 || TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (lhs)))
5f57a8b1 1847 != TYPE_MAIN_VARIANT (cfun_va_list))
8a58ed0a 1848 return NULL_TREE;
1849
389dd41b 1850 lhs = build_fold_indirect_ref_loc (loc, lhs);
75a70cf9 1851 rhs = gimple_call_arg (call, 1);
8a58ed0a 1852 if (TYPE_MAIN_VARIANT (TREE_TYPE (rhs))
5f57a8b1 1853 != TYPE_MAIN_VARIANT (cfun_va_list))
8a58ed0a 1854 return NULL_TREE;
1855
389dd41b 1856 rhs = fold_convert_loc (loc, TREE_TYPE (lhs), rhs);
8a58ed0a 1857 return build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, rhs);
1858
1859 case BUILT_IN_VA_END:
75a70cf9 1860 /* No effect, so the statement will be deleted. */
8a58ed0a 1861 return integer_zero_node;
1862
1863 default:
1864 gcc_unreachable ();
1865 }
1866}
75a70cf9 1867
4ee9c684 1868/* A simple pass that attempts to fold all builtin functions. This pass
1869 is run after we've propagated as many constants as we can. */
1870
2a1990e9 1871static unsigned int
4ee9c684 1872execute_fold_all_builtins (void)
1873{
b36237eb 1874 bool cfg_changed = false;
4ee9c684 1875 basic_block bb;
b1b7c0c4 1876 unsigned int todoflags = 0;
48e1416a 1877
4ee9c684 1878 FOR_EACH_BB (bb)
1879 {
75a70cf9 1880 gimple_stmt_iterator i;
1881 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
4ee9c684 1882 {
75a70cf9 1883 gimple stmt, old_stmt;
4ee9c684 1884 tree callee, result;
0a39fd54 1885 enum built_in_function fcode;
4ee9c684 1886
75a70cf9 1887 stmt = gsi_stmt (i);
1888
1889 if (gimple_code (stmt) != GIMPLE_CALL)
0a39fd54 1890 {
75a70cf9 1891 gsi_next (&i);
0a39fd54 1892 continue;
1893 }
75a70cf9 1894 callee = gimple_call_fndecl (stmt);
4ee9c684 1895 if (!callee || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL)
0a39fd54 1896 {
75a70cf9 1897 gsi_next (&i);
0a39fd54 1898 continue;
1899 }
1900 fcode = DECL_FUNCTION_CODE (callee);
4ee9c684 1901
2d18b16d 1902 result = gimple_fold_builtin (stmt);
5a4b7e1e 1903
1904 if (result)
75a70cf9 1905 gimple_remove_stmt_histograms (cfun, stmt);
5a4b7e1e 1906
4ee9c684 1907 if (!result)
1908 switch (DECL_FUNCTION_CODE (callee))
1909 {
1910 case BUILT_IN_CONSTANT_P:
1911 /* Resolve __builtin_constant_p. If it hasn't been
1912 folded to integer_one_node by now, it's fairly
1913 certain that the value simply isn't constant. */
75a70cf9 1914 result = integer_zero_node;
4ee9c684 1915 break;
1916
bdd0e199 1917 case BUILT_IN_STACK_RESTORE:
75a70cf9 1918 result = optimize_stack_restore (i);
8a58ed0a 1919 if (result)
1920 break;
75a70cf9 1921 gsi_next (&i);
8a58ed0a 1922 continue;
1923
1924 case BUILT_IN_VA_START:
1925 case BUILT_IN_VA_END:
1926 case BUILT_IN_VA_COPY:
1927 /* These shouldn't be folded before pass_stdarg. */
75a70cf9 1928 result = optimize_stdarg_builtin (stmt);
bdd0e199 1929 if (result)
1930 break;
1931 /* FALLTHRU */
1932
4ee9c684 1933 default:
75a70cf9 1934 gsi_next (&i);
4ee9c684 1935 continue;
1936 }
1937
1938 if (dump_file && (dump_flags & TDF_DETAILS))
1939 {
1940 fprintf (dump_file, "Simplified\n ");
75a70cf9 1941 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
4ee9c684 1942 }
1943
75a70cf9 1944 old_stmt = stmt;
75a70cf9 1945 if (!update_call_from_tree (&i, result))
0fefde02 1946 {
1947 gimplify_and_update_call_from_tree (&i, result);
1948 todoflags |= TODO_update_address_taken;
1949 }
de6ed584 1950
75a70cf9 1951 stmt = gsi_stmt (i);
4c5fd53c 1952 update_stmt (stmt);
de6ed584 1953
75a70cf9 1954 if (maybe_clean_or_replace_eh_stmt (old_stmt, stmt)
1955 && gimple_purge_dead_eh_edges (bb))
b36237eb 1956 cfg_changed = true;
4ee9c684 1957
1958 if (dump_file && (dump_flags & TDF_DETAILS))
1959 {
1960 fprintf (dump_file, "to\n ");
75a70cf9 1961 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
4ee9c684 1962 fprintf (dump_file, "\n");
1963 }
0a39fd54 1964
1965 /* Retry the same statement if it changed into another
1966 builtin, there might be new opportunities now. */
75a70cf9 1967 if (gimple_code (stmt) != GIMPLE_CALL)
0a39fd54 1968 {
75a70cf9 1969 gsi_next (&i);
0a39fd54 1970 continue;
1971 }
75a70cf9 1972 callee = gimple_call_fndecl (stmt);
0a39fd54 1973 if (!callee
75a70cf9 1974 || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL
0a39fd54 1975 || DECL_FUNCTION_CODE (callee) == fcode)
75a70cf9 1976 gsi_next (&i);
4ee9c684 1977 }
1978 }
48e1416a 1979
b36237eb 1980 /* Delete unreachable blocks. */
b1b7c0c4 1981 if (cfg_changed)
1982 todoflags |= TODO_cleanup_cfg;
48e1416a 1983
b1b7c0c4 1984 return todoflags;
4ee9c684 1985}
1986
41511585 1987
48e1416a 1988struct gimple_opt_pass pass_fold_builtins =
4ee9c684 1989{
20099e35 1990 {
1991 GIMPLE_PASS,
4ee9c684 1992 "fab", /* name */
1993 NULL, /* gate */
1994 execute_fold_all_builtins, /* execute */
1995 NULL, /* sub */
1996 NULL, /* next */
1997 0, /* static_pass_number */
0b1615c1 1998 TV_NONE, /* tv_id */
49290934 1999 PROP_cfg | PROP_ssa, /* properties_required */
4ee9c684 2000 0, /* properties_provided */
2001 0, /* properties_destroyed */
2002 0, /* todo_flags_start */
909e5ecb 2003 TODO_dump_func
2004 | TODO_verify_ssa
20099e35 2005 | TODO_update_ssa /* todo_flags_finish */
2006 }
4ee9c684 2007};