]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/tree-ssa-ccp.c
* config/arm/arm.c (arm_pcs_default): Remove static.
[thirdparty/gcc.git] / gcc / tree-ssa-ccp.c
CommitLineData
4ee9c684 1/* Conditional constant propagation pass for the GNU compiler.
87c0a9fc 2 Copyright (C) 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009,
3 2010 Free Software Foundation, Inc.
4ee9c684 4 Adapted from original RTL SSA-CCP by Daniel Berlin <dberlin@dberlin.org>
5 Adapted to GIMPLE trees by Diego Novillo <dnovillo@redhat.com>
6
7This file is part of GCC.
48e1416a 8
4ee9c684 9GCC is free software; you can redistribute it and/or modify it
10under the terms of the GNU General Public License as published by the
8c4c00c1 11Free Software Foundation; either version 3, or (at your option) any
4ee9c684 12later version.
48e1416a 13
4ee9c684 14GCC is distributed in the hope that it will be useful, but WITHOUT
15ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
16FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17for more details.
48e1416a 18
4ee9c684 19You should have received a copy of the GNU General Public License
8c4c00c1 20along with GCC; see the file COPYING3. If not see
21<http://www.gnu.org/licenses/>. */
4ee9c684 22
88dbf20f 23/* Conditional constant propagation (CCP) is based on the SSA
24 propagation engine (tree-ssa-propagate.c). Constant assignments of
25 the form VAR = CST are propagated from the assignments into uses of
26 VAR, which in turn may generate new constants. The simulation uses
27 a four level lattice to keep track of constant values associated
28 with SSA names. Given an SSA name V_i, it may take one of the
29 following values:
30
bfa30570 31 UNINITIALIZED -> the initial state of the value. This value
32 is replaced with a correct initial value
33 the first time the value is used, so the
34 rest of the pass does not need to care about
35 it. Using this value simplifies initialization
36 of the pass, and prevents us from needlessly
37 scanning statements that are never reached.
88dbf20f 38
39 UNDEFINED -> V_i is a local variable whose definition
40 has not been processed yet. Therefore we
41 don't yet know if its value is a constant
42 or not.
43
44 CONSTANT -> V_i has been found to hold a constant
45 value C.
46
47 VARYING -> V_i cannot take a constant value, or if it
48 does, it is not possible to determine it
49 at compile time.
50
51 The core of SSA-CCP is in ccp_visit_stmt and ccp_visit_phi_node:
52
53 1- In ccp_visit_stmt, we are interested in assignments whose RHS
54 evaluates into a constant and conditional jumps whose predicate
55 evaluates into a boolean true or false. When an assignment of
56 the form V_i = CONST is found, V_i's lattice value is set to
57 CONSTANT and CONST is associated with it. This causes the
58 propagation engine to add all the SSA edges coming out the
59 assignment into the worklists, so that statements that use V_i
60 can be visited.
61
62 If the statement is a conditional with a constant predicate, we
63 mark the outgoing edges as executable or not executable
64 depending on the predicate's value. This is then used when
65 visiting PHI nodes to know when a PHI argument can be ignored.
48e1416a 66
88dbf20f 67
68 2- In ccp_visit_phi_node, if all the PHI arguments evaluate to the
69 same constant C, then the LHS of the PHI is set to C. This
70 evaluation is known as the "meet operation". Since one of the
71 goals of this evaluation is to optimistically return constant
72 values as often as possible, it uses two main short cuts:
73
74 - If an argument is flowing in through a non-executable edge, it
75 is ignored. This is useful in cases like this:
76
77 if (PRED)
78 a_9 = 3;
79 else
80 a_10 = 100;
81 a_11 = PHI (a_9, a_10)
82
83 If PRED is known to always evaluate to false, then we can
84 assume that a_11 will always take its value from a_10, meaning
85 that instead of consider it VARYING (a_9 and a_10 have
86 different values), we can consider it CONSTANT 100.
87
88 - If an argument has an UNDEFINED value, then it does not affect
89 the outcome of the meet operation. If a variable V_i has an
90 UNDEFINED value, it means that either its defining statement
91 hasn't been visited yet or V_i has no defining statement, in
92 which case the original symbol 'V' is being used
93 uninitialized. Since 'V' is a local variable, the compiler
94 may assume any initial value for it.
95
96
97 After propagation, every variable V_i that ends up with a lattice
98 value of CONSTANT will have the associated constant value in the
99 array CONST_VAL[i].VALUE. That is fed into substitute_and_fold for
100 final substitution and folding.
101
102
103 Constant propagation in stores and loads (STORE-CCP)
104 ----------------------------------------------------
105
106 While CCP has all the logic to propagate constants in GIMPLE
107 registers, it is missing the ability to associate constants with
108 stores and loads (i.e., pointer dereferences, structures and
109 global/aliased variables). We don't keep loads and stores in
110 SSA, but we do build a factored use-def web for them (in the
111 virtual operands).
112
113 For instance, consider the following code fragment:
114
115 struct A a;
116 const int B = 42;
117
118 void foo (int i)
119 {
120 if (i > 10)
121 a.a = 42;
122 else
123 {
124 a.b = 21;
125 a.a = a.b + 21;
126 }
127
128 if (a.a != B)
129 never_executed ();
130 }
131
132 We should be able to deduce that the predicate 'a.a != B' is always
133 false. To achieve this, we associate constant values to the SSA
4fb5e5ca 134 names in the VDEF operands for each store. Additionally,
135 since we also glob partial loads/stores with the base symbol, we
136 also keep track of the memory reference where the constant value
137 was stored (in the MEM_REF field of PROP_VALUE_T). For instance,
88dbf20f 138
4fb5e5ca 139 # a_5 = VDEF <a_4>
88dbf20f 140 a.a = 2;
141
142 # VUSE <a_5>
143 x_3 = a.b;
144
145 In the example above, CCP will associate value '2' with 'a_5', but
146 it would be wrong to replace the load from 'a.b' with '2', because
147 '2' had been stored into a.a.
148
bfa30570 149 Note that the initial value of virtual operands is VARYING, not
150 UNDEFINED. Consider, for instance global variables:
88dbf20f 151
152 int A;
153
154 foo (int i)
155 {
156 if (i_3 > 10)
157 A_4 = 3;
158 # A_5 = PHI (A_4, A_2);
159
160 # VUSE <A_5>
161 A.0_6 = A;
162
163 return A.0_6;
164 }
165
166 The value of A_2 cannot be assumed to be UNDEFINED, as it may have
167 been defined outside of foo. If we were to assume it UNDEFINED, we
bfa30570 168 would erroneously optimize the above into 'return 3;'.
88dbf20f 169
170 Though STORE-CCP is not too expensive, it does have to do more work
171 than regular CCP, so it is only enabled at -O2. Both regular CCP
172 and STORE-CCP use the exact same algorithm. The only distinction
173 is that when doing STORE-CCP, the boolean variable DO_STORE_CCP is
174 set to true. This affects the evaluation of statements and PHI
175 nodes.
4ee9c684 176
177 References:
178
179 Constant propagation with conditional branches,
180 Wegman and Zadeck, ACM TOPLAS 13(2):181-210.
181
182 Building an Optimizing Compiler,
183 Robert Morgan, Butterworth-Heinemann, 1998, Section 8.9.
184
185 Advanced Compiler Design and Implementation,
186 Steven Muchnick, Morgan Kaufmann, 1997, Section 12.6 */
187
188#include "config.h"
189#include "system.h"
190#include "coretypes.h"
191#include "tm.h"
4ee9c684 192#include "tree.h"
41511585 193#include "flags.h"
4ee9c684 194#include "tm_p.h"
4ee9c684 195#include "basic-block.h"
41511585 196#include "output.h"
41511585 197#include "function.h"
ce084dfc 198#include "tree-pretty-print.h"
199#include "gimple-pretty-print.h"
41511585 200#include "timevar.h"
4ee9c684 201#include "tree-dump.h"
41511585 202#include "tree-flow.h"
4ee9c684 203#include "tree-pass.h"
41511585 204#include "tree-ssa-propagate.h"
5a4b7e1e 205#include "value-prof.h"
41511585 206#include "langhooks.h"
8782adcf 207#include "target.h"
0b205f4c 208#include "diagnostic-core.h"
add6ee5e 209#include "toplev.h"
43fb76c1 210#include "dbgcnt.h"
4ee9c684 211
212
213/* Possible lattice values. */
214typedef enum
215{
bfa30570 216 UNINITIALIZED,
4ee9c684 217 UNDEFINED,
218 CONSTANT,
219 VARYING
88dbf20f 220} ccp_lattice_t;
4ee9c684 221
88dbf20f 222/* Array of propagated constant values. After propagation,
223 CONST_VAL[I].VALUE holds the constant value for SSA_NAME(I). If
224 the constant is held in an SSA name representing a memory store
4fb5e5ca 225 (i.e., a VDEF), CONST_VAL[I].MEM_REF will contain the actual
226 memory reference used to store (i.e., the LHS of the assignment
227 doing the store). */
20140406 228static prop_value_t *const_val;
4ee9c684 229
4af351a8 230static void canonicalize_float_value (prop_value_t *);
6688f8ec 231static bool ccp_fold_stmt (gimple_stmt_iterator *);
4af351a8 232
88dbf20f 233/* Dump constant propagation value VAL to file OUTF prefixed by PREFIX. */
01406fc0 234
235static void
88dbf20f 236dump_lattice_value (FILE *outf, const char *prefix, prop_value_t val)
01406fc0 237{
41511585 238 switch (val.lattice_val)
01406fc0 239 {
88dbf20f 240 case UNINITIALIZED:
241 fprintf (outf, "%sUNINITIALIZED", prefix);
242 break;
41511585 243 case UNDEFINED:
244 fprintf (outf, "%sUNDEFINED", prefix);
245 break;
246 case VARYING:
247 fprintf (outf, "%sVARYING", prefix);
248 break;
41511585 249 case CONSTANT:
250 fprintf (outf, "%sCONSTANT ", prefix);
88dbf20f 251 print_generic_expr (outf, val.value, dump_flags);
41511585 252 break;
253 default:
8c0963c4 254 gcc_unreachable ();
41511585 255 }
01406fc0 256}
4ee9c684 257
4ee9c684 258
88dbf20f 259/* Print lattice value VAL to stderr. */
260
261void debug_lattice_value (prop_value_t val);
262
4b987fac 263DEBUG_FUNCTION void
88dbf20f 264debug_lattice_value (prop_value_t val)
265{
266 dump_lattice_value (stderr, "", val);
267 fprintf (stderr, "\n");
268}
4ee9c684 269
4ee9c684 270
88dbf20f 271/* Compute a default value for variable VAR and store it in the
272 CONST_VAL array. The following rules are used to get default
273 values:
01406fc0 274
88dbf20f 275 1- Global and static variables that are declared constant are
276 considered CONSTANT.
277
278 2- Any other value is considered UNDEFINED. This is useful when
41511585 279 considering PHI nodes. PHI arguments that are undefined do not
280 change the constant value of the PHI node, which allows for more
88dbf20f 281 constants to be propagated.
4ee9c684 282
8883e700 283 3- Variables defined by statements other than assignments and PHI
88dbf20f 284 nodes are considered VARYING.
4ee9c684 285
8883e700 286 4- Initial values of variables that are not GIMPLE registers are
bfa30570 287 considered VARYING. */
4ee9c684 288
88dbf20f 289static prop_value_t
290get_default_value (tree var)
291{
292 tree sym = SSA_NAME_VAR (var);
61207d43 293 prop_value_t val = { UNINITIALIZED, NULL_TREE };
8edeb88b 294 gimple stmt;
295
296 stmt = SSA_NAME_DEF_STMT (var);
297
298 if (gimple_nop_p (stmt))
4ee9c684 299 {
8edeb88b 300 /* Variables defined by an empty statement are those used
301 before being initialized. If VAR is a local variable, we
302 can assume initially that it is UNDEFINED, otherwise we must
303 consider it VARYING. */
524a0531 304 if (is_gimple_reg (sym)
305 && TREE_CODE (sym) == VAR_DECL)
8edeb88b 306 val.lattice_val = UNDEFINED;
307 else
308 val.lattice_val = VARYING;
4ee9c684 309 }
8edeb88b 310 else if (is_gimple_assign (stmt)
311 /* Value-returning GIMPLE_CALL statements assign to
312 a variable, and are treated similarly to GIMPLE_ASSIGN. */
313 || (is_gimple_call (stmt)
314 && gimple_call_lhs (stmt) != NULL_TREE)
315 || gimple_code (stmt) == GIMPLE_PHI)
41511585 316 {
8edeb88b 317 tree cst;
318 if (gimple_assign_single_p (stmt)
319 && DECL_P (gimple_assign_rhs1 (stmt))
320 && (cst = get_symbol_constant_value (gimple_assign_rhs1 (stmt))))
88dbf20f 321 {
8edeb88b 322 val.lattice_val = CONSTANT;
323 val.value = cst;
88dbf20f 324 }
325 else
8edeb88b 326 /* Any other variable defined by an assignment or a PHI node
327 is considered UNDEFINED. */
328 val.lattice_val = UNDEFINED;
329 }
330 else
331 {
332 /* Otherwise, VAR will never take on a constant value. */
333 val.lattice_val = VARYING;
41511585 334 }
4ee9c684 335
41511585 336 return val;
337}
4ee9c684 338
4ee9c684 339
bfa30570 340/* Get the constant value associated with variable VAR. */
4ee9c684 341
bfa30570 342static inline prop_value_t *
343get_value (tree var)
88dbf20f 344{
e004838d 345 prop_value_t *val;
bfa30570 346
e004838d 347 if (const_val == NULL)
348 return NULL;
349
350 val = &const_val[SSA_NAME_VERSION (var)];
bfa30570 351 if (val->lattice_val == UNINITIALIZED)
4ee9c684 352 *val = get_default_value (var);
353
4af351a8 354 canonicalize_float_value (val);
355
4ee9c684 356 return val;
357}
358
bfa30570 359/* Sets the value associated with VAR to VARYING. */
360
361static inline void
362set_value_varying (tree var)
363{
364 prop_value_t *val = &const_val[SSA_NAME_VERSION (var)];
365
366 val->lattice_val = VARYING;
367 val->value = NULL_TREE;
bfa30570 368}
4ee9c684 369
b31eb493 370/* For float types, modify the value of VAL to make ccp work correctly
371 for non-standard values (-0, NaN):
372
373 If HONOR_SIGNED_ZEROS is false, and VAL = -0, we canonicalize it to 0.
374 If HONOR_NANS is false, and VAL is NaN, we canonicalize it to UNDEFINED.
375 This is to fix the following problem (see PR 29921): Suppose we have
376
377 x = 0.0 * y
378
379 and we set value of y to NaN. This causes value of x to be set to NaN.
380 When we later determine that y is in fact VARYING, fold uses the fact
381 that HONOR_NANS is false, and we try to change the value of x to 0,
382 causing an ICE. With HONOR_NANS being false, the real appearance of
383 NaN would cause undefined behavior, though, so claiming that y (and x)
384 are UNDEFINED initially is correct. */
385
386static void
387canonicalize_float_value (prop_value_t *val)
388{
389 enum machine_mode mode;
390 tree type;
391 REAL_VALUE_TYPE d;
392
393 if (val->lattice_val != CONSTANT
394 || TREE_CODE (val->value) != REAL_CST)
395 return;
396
397 d = TREE_REAL_CST (val->value);
398 type = TREE_TYPE (val->value);
399 mode = TYPE_MODE (type);
400
401 if (!HONOR_SIGNED_ZEROS (mode)
402 && REAL_VALUE_MINUS_ZERO (d))
403 {
404 val->value = build_real (type, dconst0);
405 return;
406 }
407
408 if (!HONOR_NANS (mode)
409 && REAL_VALUE_ISNAN (d))
410 {
411 val->lattice_val = UNDEFINED;
412 val->value = NULL;
b31eb493 413 return;
414 }
415}
416
88dbf20f 417/* Set the value for variable VAR to NEW_VAL. Return true if the new
418 value is different from VAR's previous value. */
4ee9c684 419
41511585 420static bool
88dbf20f 421set_lattice_value (tree var, prop_value_t new_val)
4ee9c684 422{
bfa30570 423 prop_value_t *old_val = get_value (var);
88dbf20f 424
b31eb493 425 canonicalize_float_value (&new_val);
426
88dbf20f 427 /* Lattice transitions must always be monotonically increasing in
bfa30570 428 value. If *OLD_VAL and NEW_VAL are the same, return false to
429 inform the caller that this was a non-transition. */
430
aecfc21d 431 gcc_assert (old_val->lattice_val < new_val.lattice_val
88dbf20f 432 || (old_val->lattice_val == new_val.lattice_val
aecfc21d 433 && ((!old_val->value && !new_val.value)
61207d43 434 || operand_equal_p (old_val->value, new_val.value, 0))));
88dbf20f 435
436 if (old_val->lattice_val != new_val.lattice_val)
4ee9c684 437 {
41511585 438 if (dump_file && (dump_flags & TDF_DETAILS))
439 {
88dbf20f 440 dump_lattice_value (dump_file, "Lattice value changed to ", new_val);
bfa30570 441 fprintf (dump_file, ". Adding SSA edges to worklist.\n");
41511585 442 }
443
88dbf20f 444 *old_val = new_val;
445
bfa30570 446 gcc_assert (new_val.lattice_val != UNDEFINED);
447 return true;
4ee9c684 448 }
41511585 449
450 return false;
4ee9c684 451}
452
453
88dbf20f 454/* Return the likely CCP lattice value for STMT.
4ee9c684 455
41511585 456 If STMT has no operands, then return CONSTANT.
4ee9c684 457
d61b9af3 458 Else if undefinedness of operands of STMT cause its value to be
459 undefined, then return UNDEFINED.
4ee9c684 460
41511585 461 Else if any operands of STMT are constants, then return CONSTANT.
4ee9c684 462
41511585 463 Else return VARYING. */
4ee9c684 464
88dbf20f 465static ccp_lattice_t
75a70cf9 466likely_value (gimple stmt)
41511585 467{
d61b9af3 468 bool has_constant_operand, has_undefined_operand, all_undefined_operands;
41511585 469 tree use;
470 ssa_op_iter iter;
8edeb88b 471 unsigned i;
4ee9c684 472
590c3166 473 enum gimple_code code = gimple_code (stmt);
75a70cf9 474
475 /* This function appears to be called only for assignments, calls,
476 conditionals, and switches, due to the logic in visit_stmt. */
477 gcc_assert (code == GIMPLE_ASSIGN
478 || code == GIMPLE_CALL
479 || code == GIMPLE_COND
480 || code == GIMPLE_SWITCH);
88dbf20f 481
482 /* If the statement has volatile operands, it won't fold to a
483 constant value. */
75a70cf9 484 if (gimple_has_volatile_ops (stmt))
88dbf20f 485 return VARYING;
486
75a70cf9 487 /* Arrive here for more complex cases. */
bfa30570 488 has_constant_operand = false;
d61b9af3 489 has_undefined_operand = false;
490 all_undefined_operands = true;
8edeb88b 491 FOR_EACH_SSA_TREE_OPERAND (use, stmt, iter, SSA_OP_USE)
41511585 492 {
bfa30570 493 prop_value_t *val = get_value (use);
41511585 494
bfa30570 495 if (val->lattice_val == UNDEFINED)
d61b9af3 496 has_undefined_operand = true;
497 else
498 all_undefined_operands = false;
88dbf20f 499
41511585 500 if (val->lattice_val == CONSTANT)
bfa30570 501 has_constant_operand = true;
4ee9c684 502 }
41511585 503
dd277d48 504 /* There may be constants in regular rhs operands. For calls we
505 have to ignore lhs, fndecl and static chain, otherwise only
506 the lhs. */
507 for (i = (is_gimple_call (stmt) ? 2 : 0) + gimple_has_lhs (stmt);
8edeb88b 508 i < gimple_num_ops (stmt); ++i)
509 {
510 tree op = gimple_op (stmt, i);
511 if (!op || TREE_CODE (op) == SSA_NAME)
512 continue;
513 if (is_gimple_min_invariant (op))
514 has_constant_operand = true;
515 }
516
87c0a9fc 517 if (has_constant_operand)
518 all_undefined_operands = false;
519
d61b9af3 520 /* If the operation combines operands like COMPLEX_EXPR make sure to
521 not mark the result UNDEFINED if only one part of the result is
522 undefined. */
75a70cf9 523 if (has_undefined_operand && all_undefined_operands)
d61b9af3 524 return UNDEFINED;
75a70cf9 525 else if (code == GIMPLE_ASSIGN && has_undefined_operand)
d61b9af3 526 {
75a70cf9 527 switch (gimple_assign_rhs_code (stmt))
d61b9af3 528 {
529 /* Unary operators are handled with all_undefined_operands. */
530 case PLUS_EXPR:
531 case MINUS_EXPR:
d61b9af3 532 case POINTER_PLUS_EXPR:
d61b9af3 533 /* Not MIN_EXPR, MAX_EXPR. One VARYING operand may be selected.
534 Not bitwise operators, one VARYING operand may specify the
535 result completely. Not logical operators for the same reason.
05a936a0 536 Not COMPLEX_EXPR as one VARYING operand makes the result partly
537 not UNDEFINED. Not *DIV_EXPR, comparisons and shifts because
538 the undefined operand may be promoted. */
d61b9af3 539 return UNDEFINED;
540
541 default:
542 ;
543 }
544 }
545 /* If there was an UNDEFINED operand but the result may be not UNDEFINED
546 fall back to VARYING even if there were CONSTANT operands. */
547 if (has_undefined_operand)
548 return VARYING;
549
8edeb88b 550 /* We do not consider virtual operands here -- load from read-only
551 memory may have only VARYING virtual operands, but still be
552 constant. */
bfa30570 553 if (has_constant_operand
8edeb88b 554 || gimple_references_memory_p (stmt))
88dbf20f 555 return CONSTANT;
556
bfa30570 557 return VARYING;
4ee9c684 558}
559
bfa30570 560/* Returns true if STMT cannot be constant. */
561
562static bool
75a70cf9 563surely_varying_stmt_p (gimple stmt)
bfa30570 564{
565 /* If the statement has operands that we cannot handle, it cannot be
566 constant. */
75a70cf9 567 if (gimple_has_volatile_ops (stmt))
bfa30570 568 return true;
569
f257af64 570 /* If it is a call and does not return a value or is not a
571 builtin and not an indirect call, it is varying. */
75a70cf9 572 if (is_gimple_call (stmt))
f257af64 573 {
574 tree fndecl;
575 if (!gimple_call_lhs (stmt)
576 || ((fndecl = gimple_call_fndecl (stmt)) != NULL_TREE
5768aeb3 577 && !DECL_BUILT_IN (fndecl)))
f257af64 578 return true;
579 }
bfa30570 580
8edeb88b 581 /* Any other store operation is not interesting. */
dd277d48 582 else if (gimple_vdef (stmt))
8edeb88b 583 return true;
584
bfa30570 585 /* Anything other than assignments and conditional jumps are not
586 interesting for CCP. */
75a70cf9 587 if (gimple_code (stmt) != GIMPLE_ASSIGN
f257af64 588 && gimple_code (stmt) != GIMPLE_COND
589 && gimple_code (stmt) != GIMPLE_SWITCH
590 && gimple_code (stmt) != GIMPLE_CALL)
bfa30570 591 return true;
592
593 return false;
594}
4ee9c684 595
41511585 596/* Initialize local data structures for CCP. */
4ee9c684 597
598static void
41511585 599ccp_initialize (void)
4ee9c684 600{
41511585 601 basic_block bb;
4ee9c684 602
43959b95 603 const_val = XCNEWVEC (prop_value_t, num_ssa_names);
4ee9c684 604
41511585 605 /* Initialize simulation flags for PHI nodes and statements. */
606 FOR_EACH_BB (bb)
4ee9c684 607 {
75a70cf9 608 gimple_stmt_iterator i;
4ee9c684 609
75a70cf9 610 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
41511585 611 {
75a70cf9 612 gimple stmt = gsi_stmt (i);
2193544e 613 bool is_varying;
614
615 /* If the statement is a control insn, then we do not
616 want to avoid simulating the statement once. Failure
617 to do so means that those edges will never get added. */
618 if (stmt_ends_bb_p (stmt))
619 is_varying = false;
620 else
621 is_varying = surely_varying_stmt_p (stmt);
4ee9c684 622
bfa30570 623 if (is_varying)
41511585 624 {
88dbf20f 625 tree def;
626 ssa_op_iter iter;
627
628 /* If the statement will not produce a constant, mark
629 all its outputs VARYING. */
630 FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_ALL_DEFS)
8edeb88b 631 set_value_varying (def);
41511585 632 }
75a70cf9 633 prop_set_simulate_again (stmt, !is_varying);
41511585 634 }
4ee9c684 635 }
636
75a70cf9 637 /* Now process PHI nodes. We never clear the simulate_again flag on
638 phi nodes, since we do not know which edges are executable yet,
639 except for phi nodes for virtual operands when we do not do store ccp. */
41511585 640 FOR_EACH_BB (bb)
4ee9c684 641 {
75a70cf9 642 gimple_stmt_iterator i;
41511585 643
75a70cf9 644 for (i = gsi_start_phis (bb); !gsi_end_p (i); gsi_next (&i))
645 {
646 gimple phi = gsi_stmt (i);
647
61207d43 648 if (!is_gimple_reg (gimple_phi_result (phi)))
75a70cf9 649 prop_set_simulate_again (phi, false);
bfa30570 650 else
75a70cf9 651 prop_set_simulate_again (phi, true);
41511585 652 }
4ee9c684 653 }
41511585 654}
4ee9c684 655
43fb76c1 656/* Debug count support. Reset the values of ssa names
657 VARYING when the total number ssa names analyzed is
658 beyond the debug count specified. */
659
660static void
661do_dbg_cnt (void)
662{
663 unsigned i;
664 for (i = 0; i < num_ssa_names; i++)
665 {
666 if (!dbg_cnt (ccp))
667 {
668 const_val[i].lattice_val = VARYING;
669 const_val[i].value = NULL_TREE;
670 }
671 }
672}
673
4ee9c684 674
88dbf20f 675/* Do final substitution of propagated values, cleanup the flowgraph and
48e1416a 676 free allocated storage.
4ee9c684 677
33a34f1e 678 Return TRUE when something was optimized. */
679
680static bool
88dbf20f 681ccp_finalize (void)
4ee9c684 682{
43fb76c1 683 bool something_changed;
684
685 do_dbg_cnt ();
88dbf20f 686 /* Perform substitutions based on the known constant values. */
33fd90fa 687 something_changed = substitute_and_fold (const_val, ccp_fold_stmt, true);
4ee9c684 688
88dbf20f 689 free (const_val);
e004838d 690 const_val = NULL;
33a34f1e 691 return something_changed;;
4ee9c684 692}
693
694
88dbf20f 695/* Compute the meet operator between *VAL1 and *VAL2. Store the result
696 in VAL1.
697
698 any M UNDEFINED = any
88dbf20f 699 any M VARYING = VARYING
700 Ci M Cj = Ci if (i == j)
701 Ci M Cj = VARYING if (i != j)
bfa30570 702 */
4ee9c684 703
704static void
88dbf20f 705ccp_lattice_meet (prop_value_t *val1, prop_value_t *val2)
4ee9c684 706{
88dbf20f 707 if (val1->lattice_val == UNDEFINED)
4ee9c684 708 {
88dbf20f 709 /* UNDEFINED M any = any */
710 *val1 = *val2;
41511585 711 }
88dbf20f 712 else if (val2->lattice_val == UNDEFINED)
92481a4d 713 {
88dbf20f 714 /* any M UNDEFINED = any
715 Nothing to do. VAL1 already contains the value we want. */
716 ;
92481a4d 717 }
88dbf20f 718 else if (val1->lattice_val == VARYING
719 || val2->lattice_val == VARYING)
41511585 720 {
88dbf20f 721 /* any M VARYING = VARYING. */
722 val1->lattice_val = VARYING;
723 val1->value = NULL_TREE;
41511585 724 }
88dbf20f 725 else if (val1->lattice_val == CONSTANT
726 && val2->lattice_val == CONSTANT
61207d43 727 && simple_cst_equal (val1->value, val2->value) == 1)
41511585 728 {
88dbf20f 729 /* Ci M Cj = Ci if (i == j)
730 Ci M Cj = VARYING if (i != j)
731
732 If these two values come from memory stores, make sure that
7739e598 733 they come from the same memory reference.
734 Nothing to do. VAL1 already contains the value we want. */
41511585 735 }
736 else
737 {
88dbf20f 738 /* Any other combination is VARYING. */
739 val1->lattice_val = VARYING;
740 val1->value = NULL_TREE;
41511585 741 }
4ee9c684 742}
743
744
41511585 745/* Loop through the PHI_NODE's parameters for BLOCK and compare their
746 lattice values to determine PHI_NODE's lattice value. The value of a
88dbf20f 747 PHI node is determined calling ccp_lattice_meet with all the arguments
41511585 748 of the PHI node that are incoming via executable edges. */
4ee9c684 749
41511585 750static enum ssa_prop_result
75a70cf9 751ccp_visit_phi_node (gimple phi)
4ee9c684 752{
75a70cf9 753 unsigned i;
88dbf20f 754 prop_value_t *old_val, new_val;
4ee9c684 755
41511585 756 if (dump_file && (dump_flags & TDF_DETAILS))
4ee9c684 757 {
41511585 758 fprintf (dump_file, "\nVisiting PHI node: ");
75a70cf9 759 print_gimple_stmt (dump_file, phi, 0, dump_flags);
4ee9c684 760 }
4ee9c684 761
75a70cf9 762 old_val = get_value (gimple_phi_result (phi));
41511585 763 switch (old_val->lattice_val)
764 {
765 case VARYING:
88dbf20f 766 return SSA_PROP_VARYING;
4ee9c684 767
41511585 768 case CONSTANT:
769 new_val = *old_val;
770 break;
4ee9c684 771
41511585 772 case UNDEFINED:
41511585 773 new_val.lattice_val = UNDEFINED;
88dbf20f 774 new_val.value = NULL_TREE;
41511585 775 break;
4ee9c684 776
41511585 777 default:
8c0963c4 778 gcc_unreachable ();
41511585 779 }
4ee9c684 780
75a70cf9 781 for (i = 0; i < gimple_phi_num_args (phi); i++)
41511585 782 {
88dbf20f 783 /* Compute the meet operator over all the PHI arguments flowing
784 through executable edges. */
75a70cf9 785 edge e = gimple_phi_arg_edge (phi, i);
4ee9c684 786
41511585 787 if (dump_file && (dump_flags & TDF_DETAILS))
788 {
789 fprintf (dump_file,
790 "\n Argument #%d (%d -> %d %sexecutable)\n",
791 i, e->src->index, e->dest->index,
792 (e->flags & EDGE_EXECUTABLE) ? "" : "not ");
793 }
794
795 /* If the incoming edge is executable, Compute the meet operator for
796 the existing value of the PHI node and the current PHI argument. */
797 if (e->flags & EDGE_EXECUTABLE)
798 {
75a70cf9 799 tree arg = gimple_phi_arg (phi, i)->def;
88dbf20f 800 prop_value_t arg_val;
4ee9c684 801
88dbf20f 802 if (is_gimple_min_invariant (arg))
41511585 803 {
88dbf20f 804 arg_val.lattice_val = CONSTANT;
805 arg_val.value = arg;
41511585 806 }
807 else
bfa30570 808 arg_val = *(get_value (arg));
4ee9c684 809
88dbf20f 810 ccp_lattice_meet (&new_val, &arg_val);
4ee9c684 811
41511585 812 if (dump_file && (dump_flags & TDF_DETAILS))
813 {
814 fprintf (dump_file, "\t");
88dbf20f 815 print_generic_expr (dump_file, arg, dump_flags);
816 dump_lattice_value (dump_file, "\tValue: ", arg_val);
41511585 817 fprintf (dump_file, "\n");
818 }
4ee9c684 819
41511585 820 if (new_val.lattice_val == VARYING)
821 break;
822 }
823 }
4ee9c684 824
825 if (dump_file && (dump_flags & TDF_DETAILS))
41511585 826 {
827 dump_lattice_value (dump_file, "\n PHI node value: ", new_val);
828 fprintf (dump_file, "\n\n");
829 }
830
bfa30570 831 /* Make the transition to the new value. */
75a70cf9 832 if (set_lattice_value (gimple_phi_result (phi), new_val))
41511585 833 {
834 if (new_val.lattice_val == VARYING)
835 return SSA_PROP_VARYING;
836 else
837 return SSA_PROP_INTERESTING;
838 }
839 else
840 return SSA_PROP_NOT_INTERESTING;
4ee9c684 841}
842
00f4f705 843/* Get operand number OPNR from the rhs of STMT. Before returning it,
844 simplify it to a constant if possible. */
845
846static tree
847get_rhs_assign_op_for_ccp (gimple stmt, int opnr)
848{
849 tree op = gimple_op (stmt, opnr);
850
851 if (TREE_CODE (op) == SSA_NAME)
852 {
853 prop_value_t *val = get_value (op);
854 if (val->lattice_val == CONSTANT)
855 op = get_value (op)->value;
856 }
857 return op;
858}
859
41511585 860/* CCP specific front-end to the non-destructive constant folding
861 routines.
4ee9c684 862
863 Attempt to simplify the RHS of STMT knowing that one or more
864 operands are constants.
865
866 If simplification is possible, return the simplified RHS,
75a70cf9 867 otherwise return the original RHS or NULL_TREE. */
4ee9c684 868
869static tree
75a70cf9 870ccp_fold (gimple stmt)
4ee9c684 871{
389dd41b 872 location_t loc = gimple_location (stmt);
75a70cf9 873 switch (gimple_code (stmt))
88dbf20f 874 {
75a70cf9 875 case GIMPLE_ASSIGN:
876 {
877 enum tree_code subcode = gimple_assign_rhs_code (stmt);
878
879 switch (get_gimple_rhs_class (subcode))
880 {
881 case GIMPLE_SINGLE_RHS:
882 {
883 tree rhs = gimple_assign_rhs1 (stmt);
884 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
885
886 if (TREE_CODE (rhs) == SSA_NAME)
887 {
888 /* If the RHS is an SSA_NAME, return its known constant value,
889 if any. */
890 return get_value (rhs)->value;
891 }
892 /* Handle propagating invariant addresses into address operations.
893 The folding we do here matches that in tree-ssa-forwprop.c. */
894 else if (TREE_CODE (rhs) == ADDR_EXPR)
895 {
896 tree *base;
897 base = &TREE_OPERAND (rhs, 0);
898 while (handled_component_p (*base))
899 base = &TREE_OPERAND (*base, 0);
182cf5a9 900 if (TREE_CODE (*base) == MEM_REF
75a70cf9 901 && TREE_CODE (TREE_OPERAND (*base, 0)) == SSA_NAME)
902 {
903 prop_value_t *val = get_value (TREE_OPERAND (*base, 0));
904 if (val->lattice_val == CONSTANT
182cf5a9 905 && TREE_CODE (val->value) == ADDR_EXPR)
75a70cf9 906 {
182cf5a9 907 tree ret, save = *base;
908 tree new_base;
909 new_base = fold_build2 (MEM_REF, TREE_TYPE (*base),
910 unshare_expr (val->value),
911 TREE_OPERAND (*base, 1));
75a70cf9 912 /* We need to return a new tree, not modify the IL
913 or share parts of it. So play some tricks to
914 avoid manually building it. */
182cf5a9 915 *base = new_base;
75a70cf9 916 ret = unshare_expr (rhs);
917 recompute_tree_invariant_for_addr_expr (ret);
918 *base = save;
919 return ret;
920 }
921 }
922 }
388a0bc7 923 else if (TREE_CODE (rhs) == CONSTRUCTOR
924 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
925 && (CONSTRUCTOR_NELTS (rhs)
926 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
927 {
928 unsigned i;
929 tree val, list;
930
931 list = NULL_TREE;
932 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
933 {
934 if (TREE_CODE (val) == SSA_NAME
935 && get_value (val)->lattice_val == CONSTANT)
936 val = get_value (val)->value;
937 if (TREE_CODE (val) == INTEGER_CST
938 || TREE_CODE (val) == REAL_CST
939 || TREE_CODE (val) == FIXED_CST)
940 list = tree_cons (NULL_TREE, val, list);
941 else
942 return NULL_TREE;
943 }
944
945 return build_vector (TREE_TYPE (rhs), nreverse (list));
946 }
4ee9c684 947
75a70cf9 948 if (kind == tcc_reference)
3e4be816 949 {
0fefde02 950 if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
951 || TREE_CODE (rhs) == REALPART_EXPR
952 || TREE_CODE (rhs) == IMAGPART_EXPR)
3e4be816 953 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
954 {
955 prop_value_t *val = get_value (TREE_OPERAND (rhs, 0));
956 if (val->lattice_val == CONSTANT)
389dd41b 957 return fold_unary_loc (EXPR_LOCATION (rhs),
958 TREE_CODE (rhs),
3e4be816 959 TREE_TYPE (rhs), val->value);
960 }
182cf5a9 961 else if (TREE_CODE (rhs) == MEM_REF
8edeb88b 962 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
963 {
964 prop_value_t *val = get_value (TREE_OPERAND (rhs, 0));
965 if (val->lattice_val == CONSTANT
182cf5a9 966 && TREE_CODE (val->value) == ADDR_EXPR)
967 {
968 tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
969 unshare_expr (val->value),
970 TREE_OPERAND (rhs, 1));
971 if (tem)
972 rhs = tem;
973 }
8edeb88b 974 }
3e4be816 975 return fold_const_aggregate_ref (rhs);
976 }
75a70cf9 977 else if (kind == tcc_declaration)
978 return get_symbol_constant_value (rhs);
979 return rhs;
980 }
48e1416a 981
75a70cf9 982 case GIMPLE_UNARY_RHS:
983 {
984 /* Handle unary operators that can appear in GIMPLE form.
985 Note that we know the single operand must be a constant,
986 so this should almost always return a simplified RHS. */
987 tree lhs = gimple_assign_lhs (stmt);
00f4f705 988 tree op0 = get_rhs_assign_op_for_ccp (stmt, 1);
75a70cf9 989
990 /* Conversions are useless for CCP purposes if they are
991 value-preserving. Thus the restrictions that
992 useless_type_conversion_p places for pointer type conversions
993 do not apply here. Substitution later will only substitute to
994 allowed places. */
d9659041 995 if (CONVERT_EXPR_CODE_P (subcode)
5768aeb3 996 && POINTER_TYPE_P (TREE_TYPE (lhs))
182cf5a9 997 && POINTER_TYPE_P (TREE_TYPE (op0)))
5768aeb3 998 {
999 tree tem;
182cf5a9 1000 /* Try to re-construct array references on-the-fly. */
5768aeb3 1001 if (!useless_type_conversion_p (TREE_TYPE (lhs),
1002 TREE_TYPE (op0))
1003 && ((tem = maybe_fold_offset_to_address
389dd41b 1004 (loc,
e60a6f7b 1005 op0, integer_zero_node, TREE_TYPE (lhs)))
5768aeb3 1006 != NULL_TREE))
1007 return tem;
1008 return op0;
1009 }
75a70cf9 1010
48e1416a 1011 return
389dd41b 1012 fold_unary_ignore_overflow_loc (loc, subcode,
1013 gimple_expr_type (stmt), op0);
f1fb2997 1014 }
75a70cf9 1015
1016 case GIMPLE_BINARY_RHS:
1017 {
1018 /* Handle binary operators that can appear in GIMPLE form. */
00f4f705 1019 tree op0 = get_rhs_assign_op_for_ccp (stmt, 1);
1020 tree op1 = get_rhs_assign_op_for_ccp (stmt, 2);
75a70cf9 1021
182cf5a9 1022 /* Translate &x + CST into an invariant form suitable for
1023 further propagation. */
5768aeb3 1024 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
1025 && TREE_CODE (op0) == ADDR_EXPR
1026 && TREE_CODE (op1) == INTEGER_CST)
1027 {
182cf5a9 1028 tree off = fold_convert (ptr_type_node, op1);
1029 return build_fold_addr_expr
1030 (fold_build2 (MEM_REF,
1031 TREE_TYPE (TREE_TYPE (op0)),
1032 unshare_expr (op0), off));
5768aeb3 1033 }
1034
389dd41b 1035 return fold_binary_loc (loc, subcode,
182cf5a9 1036 gimple_expr_type (stmt), op0, op1);
75a70cf9 1037 }
1038
00f4f705 1039 case GIMPLE_TERNARY_RHS:
1040 {
1041 /* Handle binary operators that can appear in GIMPLE form. */
1042 tree op0 = get_rhs_assign_op_for_ccp (stmt, 1);
1043 tree op1 = get_rhs_assign_op_for_ccp (stmt, 2);
1044 tree op2 = get_rhs_assign_op_for_ccp (stmt, 3);
1045
1046 return fold_ternary_loc (loc, subcode,
1047 gimple_expr_type (stmt), op0, op1, op2);
1048 }
1049
75a70cf9 1050 default:
1051 gcc_unreachable ();
1052 }
1053 }
1054 break;
4ee9c684 1055
75a70cf9 1056 case GIMPLE_CALL:
f257af64 1057 {
1058 tree fn = gimple_call_fn (stmt);
1059 prop_value_t *val;
1060
1061 if (TREE_CODE (fn) == SSA_NAME)
1062 {
1063 val = get_value (fn);
1064 if (val->lattice_val == CONSTANT)
1065 fn = val->value;
1066 }
1067 if (TREE_CODE (fn) == ADDR_EXPR
8ef4f124 1068 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
f257af64 1069 && DECL_BUILT_IN (TREE_OPERAND (fn, 0)))
1070 {
1071 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
1072 tree call, retval;
1073 unsigned i;
1074 for (i = 0; i < gimple_call_num_args (stmt); ++i)
1075 {
1076 args[i] = gimple_call_arg (stmt, i);
1077 if (TREE_CODE (args[i]) == SSA_NAME)
1078 {
1079 val = get_value (args[i]);
1080 if (val->lattice_val == CONSTANT)
1081 args[i] = val->value;
1082 }
1083 }
389dd41b 1084 call = build_call_array_loc (loc,
1085 gimple_call_return_type (stmt),
1086 fn, gimple_call_num_args (stmt), args);
1087 retval = fold_call_expr (EXPR_LOCATION (call), call, false);
f257af64 1088 if (retval)
1089 /* fold_call_expr wraps the result inside a NOP_EXPR. */
1090 STRIP_NOPS (retval);
1091 return retval;
1092 }
1093 return NULL_TREE;
1094 }
4ee9c684 1095
75a70cf9 1096 case GIMPLE_COND:
1097 {
1098 /* Handle comparison operators that can appear in GIMPLE form. */
1099 tree op0 = gimple_cond_lhs (stmt);
1100 tree op1 = gimple_cond_rhs (stmt);
1101 enum tree_code code = gimple_cond_code (stmt);
1102
1103 /* Simplify the operands down to constants when appropriate. */
1104 if (TREE_CODE (op0) == SSA_NAME)
1105 {
1106 prop_value_t *val = get_value (op0);
1107 if (val->lattice_val == CONSTANT)
1108 op0 = val->value;
1109 }
1110
1111 if (TREE_CODE (op1) == SSA_NAME)
1112 {
1113 prop_value_t *val = get_value (op1);
1114 if (val->lattice_val == CONSTANT)
1115 op1 = val->value;
1116 }
1117
389dd41b 1118 return fold_binary_loc (loc, code, boolean_type_node, op0, op1);
75a70cf9 1119 }
4ee9c684 1120
75a70cf9 1121 case GIMPLE_SWITCH:
1122 {
1123 tree rhs = gimple_switch_index (stmt);
04236c3a 1124
75a70cf9 1125 if (TREE_CODE (rhs) == SSA_NAME)
1126 {
1127 /* If the RHS is an SSA_NAME, return its known constant value,
1128 if any. */
1129 return get_value (rhs)->value;
1130 }
04236c3a 1131
75a70cf9 1132 return rhs;
1133 }
912f109f 1134
75a70cf9 1135 default:
1136 gcc_unreachable ();
4ee9c684 1137 }
4ee9c684 1138}
1139
1140
8782adcf 1141/* Return the tree representing the element referenced by T if T is an
1142 ARRAY_REF or COMPONENT_REF into constant aggregates. Return
1143 NULL_TREE otherwise. */
1144
e004838d 1145tree
8782adcf 1146fold_const_aggregate_ref (tree t)
1147{
1148 prop_value_t *value;
c75b4594 1149 tree base, ctor, idx, field;
1150 unsigned HOST_WIDE_INT cnt;
1151 tree cfield, cval;
8782adcf 1152
8edeb88b 1153 if (TREE_CODE_CLASS (TREE_CODE (t)) == tcc_declaration)
1154 return get_symbol_constant_value (t);
1155
8782adcf 1156 switch (TREE_CODE (t))
1157 {
1158 case ARRAY_REF:
1159 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
1160 DECL_INITIAL. If BASE is a nested reference into another
1161 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
1162 the inner reference. */
1163 base = TREE_OPERAND (t, 0);
1164 switch (TREE_CODE (base))
1165 {
1c196c04 1166 case MEM_REF:
1167 /* ??? We could handle this case. */
1168 if (!integer_zerop (TREE_OPERAND (base, 1)))
1169 return NULL_TREE;
1170 base = get_base_address (base);
1171 if (!base
1172 || TREE_CODE (base) != VAR_DECL)
1173 return NULL_TREE;
1174
1175 /* Fallthru. */
8782adcf 1176 case VAR_DECL:
1177 if (!TREE_READONLY (base)
1178 || TREE_CODE (TREE_TYPE (base)) != ARRAY_TYPE
1179 || !targetm.binds_local_p (base))
1180 return NULL_TREE;
1181
1182 ctor = DECL_INITIAL (base);
1183 break;
1184
1185 case ARRAY_REF:
1186 case COMPONENT_REF:
1187 ctor = fold_const_aggregate_ref (base);
1188 break;
1189
04236c3a 1190 case STRING_CST:
1191 case CONSTRUCTOR:
1192 ctor = base;
1193 break;
1194
8782adcf 1195 default:
1196 return NULL_TREE;
1197 }
1198
1199 if (ctor == NULL_TREE
4f61cce6 1200 || (TREE_CODE (ctor) != CONSTRUCTOR
1201 && TREE_CODE (ctor) != STRING_CST)
8782adcf 1202 || !TREE_STATIC (ctor))
1203 return NULL_TREE;
1204
1205 /* Get the index. If we have an SSA_NAME, try to resolve it
1206 with the current lattice value for the SSA_NAME. */
1207 idx = TREE_OPERAND (t, 1);
1208 switch (TREE_CODE (idx))
1209 {
1210 case SSA_NAME:
bfa30570 1211 if ((value = get_value (idx))
8782adcf 1212 && value->lattice_val == CONSTANT
1213 && TREE_CODE (value->value) == INTEGER_CST)
1214 idx = value->value;
1215 else
1216 return NULL_TREE;
1217 break;
1218
1219 case INTEGER_CST:
1220 break;
1221
1222 default:
1223 return NULL_TREE;
1224 }
1225
4f61cce6 1226 /* Fold read from constant string. */
1227 if (TREE_CODE (ctor) == STRING_CST)
1228 {
1229 if ((TYPE_MODE (TREE_TYPE (t))
1230 == TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
1231 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
1232 == MODE_INT)
1233 && GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor)))) == 1
1234 && compare_tree_int (idx, TREE_STRING_LENGTH (ctor)) < 0)
7b050b7b 1235 return build_int_cst_type (TREE_TYPE (t),
1236 (TREE_STRING_POINTER (ctor)
1237 [TREE_INT_CST_LOW (idx)]));
4f61cce6 1238 return NULL_TREE;
1239 }
1240
8782adcf 1241 /* Whoo-hoo! I'll fold ya baby. Yeah! */
c75b4594 1242 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
1243 if (tree_int_cst_equal (cfield, idx))
590d65aa 1244 {
6872bf3c 1245 STRIP_NOPS (cval);
3a44a278 1246 if (TREE_CODE (cval) == ADDR_EXPR)
1247 {
1248 tree base = get_base_address (TREE_OPERAND (cval, 0));
1249 if (base && TREE_CODE (base) == VAR_DECL)
1250 add_referenced_var (base);
1251 }
590d65aa 1252 return cval;
1253 }
8782adcf 1254 break;
1255
1256 case COMPONENT_REF:
1257 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
1258 DECL_INITIAL. If BASE is a nested reference into another
1259 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
1260 the inner reference. */
1261 base = TREE_OPERAND (t, 0);
1262 switch (TREE_CODE (base))
1263 {
1264 case VAR_DECL:
1265 if (!TREE_READONLY (base)
1266 || TREE_CODE (TREE_TYPE (base)) != RECORD_TYPE
1267 || !targetm.binds_local_p (base))
1268 return NULL_TREE;
1269
1270 ctor = DECL_INITIAL (base);
1271 break;
1272
1273 case ARRAY_REF:
1274 case COMPONENT_REF:
1275 ctor = fold_const_aggregate_ref (base);
1276 break;
1277
1278 default:
1279 return NULL_TREE;
1280 }
1281
1282 if (ctor == NULL_TREE
1283 || TREE_CODE (ctor) != CONSTRUCTOR
1284 || !TREE_STATIC (ctor))
1285 return NULL_TREE;
1286
1287 field = TREE_OPERAND (t, 1);
1288
c75b4594 1289 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
1290 if (cfield == field
8782adcf 1291 /* FIXME: Handle bit-fields. */
c75b4594 1292 && ! DECL_BIT_FIELD (cfield))
590d65aa 1293 {
6872bf3c 1294 STRIP_NOPS (cval);
3a44a278 1295 if (TREE_CODE (cval) == ADDR_EXPR)
1296 {
1297 tree base = get_base_address (TREE_OPERAND (cval, 0));
1298 if (base && TREE_CODE (base) == VAR_DECL)
1299 add_referenced_var (base);
1300 }
590d65aa 1301 return cval;
1302 }
8782adcf 1303 break;
1304
908cb59d 1305 case REALPART_EXPR:
1306 case IMAGPART_EXPR:
1307 {
1308 tree c = fold_const_aggregate_ref (TREE_OPERAND (t, 0));
1309 if (c && TREE_CODE (c) == COMPLEX_CST)
389dd41b 1310 return fold_build1_loc (EXPR_LOCATION (t),
1311 TREE_CODE (t), TREE_TYPE (t), c);
908cb59d 1312 break;
1313 }
04236c3a 1314
182cf5a9 1315 case MEM_REF:
1316 /* Get the base object we are accessing. */
1317 base = TREE_OPERAND (t, 0);
1318 if (TREE_CODE (base) == SSA_NAME
1319 && (value = get_value (base))
1320 && value->lattice_val == CONSTANT)
1321 base = value->value;
1322 if (TREE_CODE (base) != ADDR_EXPR)
1323 return NULL_TREE;
1324 base = TREE_OPERAND (base, 0);
1325 switch (TREE_CODE (base))
1326 {
1327 case VAR_DECL:
1328 if (DECL_P (base)
1329 && !AGGREGATE_TYPE_P (TREE_TYPE (base))
1330 && integer_zerop (TREE_OPERAND (t, 1)))
dbeb5fe0 1331 {
1332 tree res = get_symbol_constant_value (base);
1333 if (res
1334 && !useless_type_conversion_p
1335 (TREE_TYPE (t), TREE_TYPE (res)))
1336 res = fold_unary (VIEW_CONVERT_EXPR, TREE_TYPE (t), res);
1337 return res;
1338 }
182cf5a9 1339
1340 if (!TREE_READONLY (base)
1341 || TREE_CODE (TREE_TYPE (base)) != ARRAY_TYPE
1342 || !targetm.binds_local_p (base))
1343 return NULL_TREE;
1344
1345 ctor = DECL_INITIAL (base);
1346 break;
1347
1348 case STRING_CST:
1349 case CONSTRUCTOR:
1350 ctor = base;
1351 break;
1352
1353 default:
1354 return NULL_TREE;
1355 }
1356
1357 if (ctor == NULL_TREE
1358 || (TREE_CODE (ctor) != CONSTRUCTOR
1359 && TREE_CODE (ctor) != STRING_CST)
1360 || !TREE_STATIC (ctor))
1361 return NULL_TREE;
1362
1363 /* Get the byte offset. */
1364 idx = TREE_OPERAND (t, 1);
1365
1366 /* Fold read from constant string. */
1367 if (TREE_CODE (ctor) == STRING_CST)
1368 {
1369 if ((TYPE_MODE (TREE_TYPE (t))
1370 == TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
1371 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
1372 == MODE_INT)
1373 && GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor)))) == 1
1374 && compare_tree_int (idx, TREE_STRING_LENGTH (ctor)) < 0)
1375 return build_int_cst_type (TREE_TYPE (t),
1376 (TREE_STRING_POINTER (ctor)
1377 [TREE_INT_CST_LOW (idx)]));
1378 return NULL_TREE;
1379 }
1380
1381 /* ??? Implement byte-offset indexing into a non-array CONSTRUCTOR. */
1382 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE
1383 && (TYPE_MODE (TREE_TYPE (t))
1384 == TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
1385 && GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (t))) != 0
1386 && integer_zerop
1387 (int_const_binop
1388 (TRUNC_MOD_EXPR, idx,
1389 size_int (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (t)))), 0)))
1390 {
1391 idx = int_const_binop (TRUNC_DIV_EXPR, idx,
1392 size_int (GET_MODE_SIZE
1393 (TYPE_MODE (TREE_TYPE (t)))), 0);
1394 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
1395 if (tree_int_cst_equal (cfield, idx))
1396 {
1397 STRIP_NOPS (cval);
1398 if (TREE_CODE (cval) == ADDR_EXPR)
1399 {
1400 tree base = get_base_address (TREE_OPERAND (cval, 0));
1401 if (base && TREE_CODE (base) == VAR_DECL)
1402 add_referenced_var (base);
1403 }
1404 if (useless_type_conversion_p (TREE_TYPE (t), TREE_TYPE (cval)))
1405 return cval;
1406 else if (CONSTANT_CLASS_P (cval))
1407 return fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (t), cval);
1408 else
1409 return NULL_TREE;
1410 }
1411 }
1412 break;
04236c3a 1413
8782adcf 1414 default:
1415 break;
1416 }
1417
1418 return NULL_TREE;
1419}
75a70cf9 1420
1421/* Evaluate statement STMT.
1422 Valid only for assignments, calls, conditionals, and switches. */
4ee9c684 1423
88dbf20f 1424static prop_value_t
75a70cf9 1425evaluate_stmt (gimple stmt)
4ee9c684 1426{
88dbf20f 1427 prop_value_t val;
4f61cce6 1428 tree simplified = NULL_TREE;
88dbf20f 1429 ccp_lattice_t likelyvalue = likely_value (stmt);
add6ee5e 1430 bool is_constant;
88dbf20f 1431
add6ee5e 1432 fold_defer_overflow_warnings ();
1433
4ee9c684 1434 /* If the statement is likely to have a CONSTANT result, then try
1435 to fold the statement to determine the constant value. */
75a70cf9 1436 /* FIXME. This is the only place that we call ccp_fold.
1437 Since likely_value never returns CONSTANT for calls, we will
1438 not attempt to fold them, including builtins that may profit. */
4ee9c684 1439 if (likelyvalue == CONSTANT)
1440 simplified = ccp_fold (stmt);
1441 /* If the statement is likely to have a VARYING result, then do not
1442 bother folding the statement. */
04236c3a 1443 else if (likelyvalue == VARYING)
75a70cf9 1444 {
590c3166 1445 enum gimple_code code = gimple_code (stmt);
75a70cf9 1446 if (code == GIMPLE_ASSIGN)
1447 {
1448 enum tree_code subcode = gimple_assign_rhs_code (stmt);
48e1416a 1449
75a70cf9 1450 /* Other cases cannot satisfy is_gimple_min_invariant
1451 without folding. */
1452 if (get_gimple_rhs_class (subcode) == GIMPLE_SINGLE_RHS)
1453 simplified = gimple_assign_rhs1 (stmt);
1454 }
1455 else if (code == GIMPLE_SWITCH)
1456 simplified = gimple_switch_index (stmt);
1457 else
a65c4d64 1458 /* These cannot satisfy is_gimple_min_invariant without folding. */
1459 gcc_assert (code == GIMPLE_CALL || code == GIMPLE_COND);
75a70cf9 1460 }
4ee9c684 1461
add6ee5e 1462 is_constant = simplified && is_gimple_min_invariant (simplified);
1463
1464 fold_undefer_overflow_warnings (is_constant, stmt, 0);
1465
912f109f 1466 if (dump_file && (dump_flags & TDF_DETAILS))
1467 {
1468 fprintf (dump_file, "which is likely ");
1469 switch (likelyvalue)
1470 {
1471 case CONSTANT:
1472 fprintf (dump_file, "CONSTANT");
1473 break;
1474 case UNDEFINED:
1475 fprintf (dump_file, "UNDEFINED");
1476 break;
1477 case VARYING:
1478 fprintf (dump_file, "VARYING");
1479 break;
1480 default:;
1481 }
1482 fprintf (dump_file, "\n");
1483 }
1484
add6ee5e 1485 if (is_constant)
4ee9c684 1486 {
1487 /* The statement produced a constant value. */
1488 val.lattice_val = CONSTANT;
88dbf20f 1489 val.value = simplified;
4ee9c684 1490 }
1491 else
1492 {
1493 /* The statement produced a nonconstant value. If the statement
88dbf20f 1494 had UNDEFINED operands, then the result of the statement
1495 should be UNDEFINED. Otherwise, the statement is VARYING. */
bfa30570 1496 if (likelyvalue == UNDEFINED)
b765fa12 1497 val.lattice_val = likelyvalue;
1498 else
1499 val.lattice_val = VARYING;
1500
88dbf20f 1501 val.value = NULL_TREE;
4ee9c684 1502 }
41511585 1503
1504 return val;
4ee9c684 1505}
1506
6688f8ec 1507/* Fold the stmt at *GSI with CCP specific information that propagating
1508 and regular folding does not catch. */
1509
1510static bool
1511ccp_fold_stmt (gimple_stmt_iterator *gsi)
1512{
1513 gimple stmt = gsi_stmt (*gsi);
6688f8ec 1514
94144e68 1515 switch (gimple_code (stmt))
1516 {
1517 case GIMPLE_COND:
1518 {
1519 prop_value_t val;
1520 /* Statement evaluation will handle type mismatches in constants
1521 more gracefully than the final propagation. This allows us to
1522 fold more conditionals here. */
1523 val = evaluate_stmt (stmt);
1524 if (val.lattice_val != CONSTANT
1525 || TREE_CODE (val.value) != INTEGER_CST)
1526 return false;
1527
1528 if (integer_zerop (val.value))
1529 gimple_cond_make_false (stmt);
1530 else
1531 gimple_cond_make_true (stmt);
6688f8ec 1532
94144e68 1533 return true;
1534 }
6688f8ec 1535
94144e68 1536 case GIMPLE_CALL:
1537 {
1538 tree lhs = gimple_call_lhs (stmt);
1539 prop_value_t *val;
1540 tree argt;
1541 bool changed = false;
1542 unsigned i;
1543
1544 /* If the call was folded into a constant make sure it goes
1545 away even if we cannot propagate into all uses because of
1546 type issues. */
1547 if (lhs
1548 && TREE_CODE (lhs) == SSA_NAME
1549 && (val = get_value (lhs))
1550 && val->lattice_val == CONSTANT)
1551 {
338cce8f 1552 tree new_rhs = unshare_expr (val->value);
1553 bool res;
94144e68 1554 if (!useless_type_conversion_p (TREE_TYPE (lhs),
1555 TREE_TYPE (new_rhs)))
1556 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
338cce8f 1557 res = update_call_from_tree (gsi, new_rhs);
1558 gcc_assert (res);
94144e68 1559 return true;
1560 }
1561
1562 /* Propagate into the call arguments. Compared to replace_uses_in
1563 this can use the argument slot types for type verification
1564 instead of the current argument type. We also can safely
1565 drop qualifiers here as we are dealing with constants anyway. */
1566 argt = TYPE_ARG_TYPES (TREE_TYPE (TREE_TYPE (gimple_call_fn (stmt))));
1567 for (i = 0; i < gimple_call_num_args (stmt) && argt;
1568 ++i, argt = TREE_CHAIN (argt))
1569 {
1570 tree arg = gimple_call_arg (stmt, i);
1571 if (TREE_CODE (arg) == SSA_NAME
1572 && (val = get_value (arg))
1573 && val->lattice_val == CONSTANT
1574 && useless_type_conversion_p
1575 (TYPE_MAIN_VARIANT (TREE_VALUE (argt)),
1576 TYPE_MAIN_VARIANT (TREE_TYPE (val->value))))
1577 {
338cce8f 1578 gimple_call_set_arg (stmt, i, unshare_expr (val->value));
94144e68 1579 changed = true;
1580 }
1581 }
1582
1583 return changed;
1584 }
6688f8ec 1585
6872bf3c 1586 case GIMPLE_ASSIGN:
1587 {
1588 tree lhs = gimple_assign_lhs (stmt);
1589 prop_value_t *val;
1590
1591 /* If we have a load that turned out to be constant replace it
1592 as we cannot propagate into all uses in all cases. */
1593 if (gimple_assign_single_p (stmt)
1594 && TREE_CODE (lhs) == SSA_NAME
1595 && (val = get_value (lhs))
1596 && val->lattice_val == CONSTANT)
1597 {
1598 tree rhs = unshare_expr (val->value);
1599 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs)))
182cf5a9 1600 rhs = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (lhs), rhs);
6872bf3c 1601 gimple_assign_set_rhs_from_tree (gsi, rhs);
1602 return true;
1603 }
1604
1605 return false;
1606 }
1607
94144e68 1608 default:
1609 return false;
1610 }
6688f8ec 1611}
1612
41511585 1613/* Visit the assignment statement STMT. Set the value of its LHS to the
88dbf20f 1614 value computed by the RHS and store LHS in *OUTPUT_P. If STMT
1615 creates virtual definitions, set the value of each new name to that
75a70cf9 1616 of the RHS (if we can derive a constant out of the RHS).
1617 Value-returning call statements also perform an assignment, and
1618 are handled here. */
4ee9c684 1619
41511585 1620static enum ssa_prop_result
75a70cf9 1621visit_assignment (gimple stmt, tree *output_p)
4ee9c684 1622{
88dbf20f 1623 prop_value_t val;
88dbf20f 1624 enum ssa_prop_result retval;
4ee9c684 1625
75a70cf9 1626 tree lhs = gimple_get_lhs (stmt);
4ee9c684 1627
75a70cf9 1628 gcc_assert (gimple_code (stmt) != GIMPLE_CALL
1629 || gimple_call_lhs (stmt) != NULL_TREE);
1630
1631 if (gimple_assign_copy_p (stmt))
41511585 1632 {
75a70cf9 1633 tree rhs = gimple_assign_rhs1 (stmt);
88dbf20f 1634
75a70cf9 1635 if (TREE_CODE (rhs) == SSA_NAME)
1636 {
1637 /* For a simple copy operation, we copy the lattice values. */
1638 prop_value_t *nval = get_value (rhs);
1639 val = *nval;
1640 }
88dbf20f 1641 else
75a70cf9 1642 val = evaluate_stmt (stmt);
41511585 1643 }
1644 else
75a70cf9 1645 /* Evaluate the statement, which could be
1646 either a GIMPLE_ASSIGN or a GIMPLE_CALL. */
04236c3a 1647 val = evaluate_stmt (stmt);
4ee9c684 1648
88dbf20f 1649 retval = SSA_PROP_NOT_INTERESTING;
4ee9c684 1650
41511585 1651 /* Set the lattice value of the statement's output. */
88dbf20f 1652 if (TREE_CODE (lhs) == SSA_NAME)
4ee9c684 1653 {
88dbf20f 1654 /* If STMT is an assignment to an SSA_NAME, we only have one
1655 value to set. */
1656 if (set_lattice_value (lhs, val))
1657 {
1658 *output_p = lhs;
1659 if (val.lattice_val == VARYING)
1660 retval = SSA_PROP_VARYING;
1661 else
1662 retval = SSA_PROP_INTERESTING;
1663 }
4ee9c684 1664 }
88dbf20f 1665
1666 return retval;
4ee9c684 1667}
1668
4ee9c684 1669
41511585 1670/* Visit the conditional statement STMT. Return SSA_PROP_INTERESTING
1671 if it can determine which edge will be taken. Otherwise, return
1672 SSA_PROP_VARYING. */
1673
1674static enum ssa_prop_result
75a70cf9 1675visit_cond_stmt (gimple stmt, edge *taken_edge_p)
4ee9c684 1676{
88dbf20f 1677 prop_value_t val;
41511585 1678 basic_block block;
1679
75a70cf9 1680 block = gimple_bb (stmt);
41511585 1681 val = evaluate_stmt (stmt);
1682
1683 /* Find which edge out of the conditional block will be taken and add it
1684 to the worklist. If no single edge can be determined statically,
1685 return SSA_PROP_VARYING to feed all the outgoing edges to the
1686 propagation engine. */
88dbf20f 1687 *taken_edge_p = val.value ? find_taken_edge (block, val.value) : 0;
41511585 1688 if (*taken_edge_p)
1689 return SSA_PROP_INTERESTING;
1690 else
1691 return SSA_PROP_VARYING;
4ee9c684 1692}
1693
4ee9c684 1694
41511585 1695/* Evaluate statement STMT. If the statement produces an output value and
1696 its evaluation changes the lattice value of its output, return
1697 SSA_PROP_INTERESTING and set *OUTPUT_P to the SSA_NAME holding the
1698 output value.
48e1416a 1699
41511585 1700 If STMT is a conditional branch and we can determine its truth
1701 value, set *TAKEN_EDGE_P accordingly. If STMT produces a varying
1702 value, return SSA_PROP_VARYING. */
4ee9c684 1703
41511585 1704static enum ssa_prop_result
75a70cf9 1705ccp_visit_stmt (gimple stmt, edge *taken_edge_p, tree *output_p)
41511585 1706{
41511585 1707 tree def;
1708 ssa_op_iter iter;
4ee9c684 1709
41511585 1710 if (dump_file && (dump_flags & TDF_DETAILS))
4ee9c684 1711 {
88dbf20f 1712 fprintf (dump_file, "\nVisiting statement:\n");
75a70cf9 1713 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
4ee9c684 1714 }
4ee9c684 1715
75a70cf9 1716 switch (gimple_code (stmt))
4ee9c684 1717 {
75a70cf9 1718 case GIMPLE_ASSIGN:
1719 /* If the statement is an assignment that produces a single
1720 output value, evaluate its RHS to see if the lattice value of
1721 its output has changed. */
1722 return visit_assignment (stmt, output_p);
1723
1724 case GIMPLE_CALL:
1725 /* A value-returning call also performs an assignment. */
1726 if (gimple_call_lhs (stmt) != NULL_TREE)
1727 return visit_assignment (stmt, output_p);
1728 break;
1729
1730 case GIMPLE_COND:
1731 case GIMPLE_SWITCH:
1732 /* If STMT is a conditional branch, see if we can determine
1733 which branch will be taken. */
1734 /* FIXME. It appears that we should be able to optimize
1735 computed GOTOs here as well. */
1736 return visit_cond_stmt (stmt, taken_edge_p);
1737
1738 default:
1739 break;
4ee9c684 1740 }
4ee9c684 1741
41511585 1742 /* Any other kind of statement is not interesting for constant
1743 propagation and, therefore, not worth simulating. */
41511585 1744 if (dump_file && (dump_flags & TDF_DETAILS))
1745 fprintf (dump_file, "No interesting values produced. Marked VARYING.\n");
4ee9c684 1746
41511585 1747 /* Definitions made by statements other than assignments to
1748 SSA_NAMEs represent unknown modifications to their outputs.
1749 Mark them VARYING. */
88dbf20f 1750 FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_ALL_DEFS)
1751 {
61207d43 1752 prop_value_t v = { VARYING, NULL_TREE };
88dbf20f 1753 set_lattice_value (def, v);
1754 }
4ee9c684 1755
41511585 1756 return SSA_PROP_VARYING;
1757}
4ee9c684 1758
4ee9c684 1759
88dbf20f 1760/* Main entry point for SSA Conditional Constant Propagation. */
41511585 1761
33a34f1e 1762static unsigned int
61207d43 1763do_ssa_ccp (void)
41511585 1764{
1765 ccp_initialize ();
1766 ssa_propagate (ccp_visit_stmt, ccp_visit_phi_node);
33a34f1e 1767 if (ccp_finalize ())
eb9161e7 1768 return (TODO_cleanup_cfg | TODO_update_ssa | TODO_remove_unused_locals);
33a34f1e 1769 else
1770 return 0;
4ee9c684 1771}
1772
5664499b 1773
1774static bool
41511585 1775gate_ccp (void)
5664499b 1776{
41511585 1777 return flag_tree_ccp != 0;
5664499b 1778}
1779
4ee9c684 1780
48e1416a 1781struct gimple_opt_pass pass_ccp =
41511585 1782{
20099e35 1783 {
1784 GIMPLE_PASS,
41511585 1785 "ccp", /* name */
1786 gate_ccp, /* gate */
88dbf20f 1787 do_ssa_ccp, /* execute */
41511585 1788 NULL, /* sub */
1789 NULL, /* next */
1790 0, /* static_pass_number */
1791 TV_TREE_CCP, /* tv_id */
49290934 1792 PROP_cfg | PROP_ssa, /* properties_required */
41511585 1793 0, /* properties_provided */
b6246c40 1794 0, /* properties_destroyed */
41511585 1795 0, /* todo_flags_start */
33a34f1e 1796 TODO_dump_func | TODO_verify_ssa
20099e35 1797 | TODO_verify_stmts | TODO_ggc_collect/* todo_flags_finish */
1798 }
41511585 1799};
4ee9c684 1800
4ee9c684 1801
75a70cf9 1802
bdd0e199 1803/* Try to optimize out __builtin_stack_restore. Optimize it out
1804 if there is another __builtin_stack_restore in the same basic
1805 block and no calls or ASM_EXPRs are in between, or if this block's
1806 only outgoing edge is to EXIT_BLOCK and there are no calls or
1807 ASM_EXPRs after this __builtin_stack_restore. */
1808
1809static tree
75a70cf9 1810optimize_stack_restore (gimple_stmt_iterator i)
bdd0e199 1811{
6ea999da 1812 tree callee;
1813 gimple stmt;
75a70cf9 1814
1815 basic_block bb = gsi_bb (i);
1816 gimple call = gsi_stmt (i);
bdd0e199 1817
75a70cf9 1818 if (gimple_code (call) != GIMPLE_CALL
1819 || gimple_call_num_args (call) != 1
1820 || TREE_CODE (gimple_call_arg (call, 0)) != SSA_NAME
1821 || !POINTER_TYPE_P (TREE_TYPE (gimple_call_arg (call, 0))))
bdd0e199 1822 return NULL_TREE;
1823
75a70cf9 1824 for (gsi_next (&i); !gsi_end_p (i); gsi_next (&i))
bdd0e199 1825 {
75a70cf9 1826 stmt = gsi_stmt (i);
1827 if (gimple_code (stmt) == GIMPLE_ASM)
bdd0e199 1828 return NULL_TREE;
75a70cf9 1829 if (gimple_code (stmt) != GIMPLE_CALL)
bdd0e199 1830 continue;
1831
75a70cf9 1832 callee = gimple_call_fndecl (stmt);
c40a6f90 1833 if (!callee
1834 || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL
1835 /* All regular builtins are ok, just obviously not alloca. */
1836 || DECL_FUNCTION_CODE (callee) == BUILT_IN_ALLOCA)
bdd0e199 1837 return NULL_TREE;
1838
1839 if (DECL_FUNCTION_CODE (callee) == BUILT_IN_STACK_RESTORE)
6ea999da 1840 goto second_stack_restore;
bdd0e199 1841 }
1842
6ea999da 1843 if (!gsi_end_p (i))
bdd0e199 1844 return NULL_TREE;
1845
6ea999da 1846 /* Allow one successor of the exit block, or zero successors. */
1847 switch (EDGE_COUNT (bb->succs))
1848 {
1849 case 0:
1850 break;
1851 case 1:
1852 if (single_succ_edge (bb)->dest != EXIT_BLOCK_PTR)
1853 return NULL_TREE;
1854 break;
1855 default:
1856 return NULL_TREE;
1857 }
1858 second_stack_restore:
bdd0e199 1859
6ea999da 1860 /* If there's exactly one use, then zap the call to __builtin_stack_save.
1861 If there are multiple uses, then the last one should remove the call.
1862 In any case, whether the call to __builtin_stack_save can be removed
1863 or not is irrelevant to removing the call to __builtin_stack_restore. */
1864 if (has_single_use (gimple_call_arg (call, 0)))
1865 {
1866 gimple stack_save = SSA_NAME_DEF_STMT (gimple_call_arg (call, 0));
1867 if (is_gimple_call (stack_save))
1868 {
1869 callee = gimple_call_fndecl (stack_save);
1870 if (callee
1871 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL
1872 && DECL_FUNCTION_CODE (callee) == BUILT_IN_STACK_SAVE)
1873 {
1874 gimple_stmt_iterator stack_save_gsi;
1875 tree rhs;
bdd0e199 1876
6ea999da 1877 stack_save_gsi = gsi_for_stmt (stack_save);
1878 rhs = build_int_cst (TREE_TYPE (gimple_call_arg (call, 0)), 0);
1879 update_call_from_tree (&stack_save_gsi, rhs);
1880 }
1881 }
1882 }
bdd0e199 1883
75a70cf9 1884 /* No effect, so the statement will be deleted. */
bdd0e199 1885 return integer_zero_node;
1886}
75a70cf9 1887
8a58ed0a 1888/* If va_list type is a simple pointer and nothing special is needed,
1889 optimize __builtin_va_start (&ap, 0) into ap = __builtin_next_arg (0),
1890 __builtin_va_end (&ap) out as NOP and __builtin_va_copy into a simple
1891 pointer assignment. */
1892
1893static tree
75a70cf9 1894optimize_stdarg_builtin (gimple call)
8a58ed0a 1895{
5f57a8b1 1896 tree callee, lhs, rhs, cfun_va_list;
8a58ed0a 1897 bool va_list_simple_ptr;
389dd41b 1898 location_t loc = gimple_location (call);
8a58ed0a 1899
75a70cf9 1900 if (gimple_code (call) != GIMPLE_CALL)
8a58ed0a 1901 return NULL_TREE;
1902
75a70cf9 1903 callee = gimple_call_fndecl (call);
5f57a8b1 1904
1905 cfun_va_list = targetm.fn_abi_va_list (callee);
1906 va_list_simple_ptr = POINTER_TYPE_P (cfun_va_list)
1907 && (TREE_TYPE (cfun_va_list) == void_type_node
1908 || TREE_TYPE (cfun_va_list) == char_type_node);
1909
8a58ed0a 1910 switch (DECL_FUNCTION_CODE (callee))
1911 {
1912 case BUILT_IN_VA_START:
1913 if (!va_list_simple_ptr
1914 || targetm.expand_builtin_va_start != NULL
75a70cf9 1915 || built_in_decls[BUILT_IN_NEXT_ARG] == NULL)
8a58ed0a 1916 return NULL_TREE;
1917
75a70cf9 1918 if (gimple_call_num_args (call) != 2)
8a58ed0a 1919 return NULL_TREE;
1920
75a70cf9 1921 lhs = gimple_call_arg (call, 0);
8a58ed0a 1922 if (!POINTER_TYPE_P (TREE_TYPE (lhs))
1923 || TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (lhs)))
5f57a8b1 1924 != TYPE_MAIN_VARIANT (cfun_va_list))
8a58ed0a 1925 return NULL_TREE;
48e1416a 1926
389dd41b 1927 lhs = build_fold_indirect_ref_loc (loc, lhs);
1928 rhs = build_call_expr_loc (loc, built_in_decls[BUILT_IN_NEXT_ARG],
75a70cf9 1929 1, integer_zero_node);
389dd41b 1930 rhs = fold_convert_loc (loc, TREE_TYPE (lhs), rhs);
8a58ed0a 1931 return build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, rhs);
1932
1933 case BUILT_IN_VA_COPY:
1934 if (!va_list_simple_ptr)
1935 return NULL_TREE;
1936
75a70cf9 1937 if (gimple_call_num_args (call) != 2)
8a58ed0a 1938 return NULL_TREE;
1939
75a70cf9 1940 lhs = gimple_call_arg (call, 0);
8a58ed0a 1941 if (!POINTER_TYPE_P (TREE_TYPE (lhs))
1942 || TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (lhs)))
5f57a8b1 1943 != TYPE_MAIN_VARIANT (cfun_va_list))
8a58ed0a 1944 return NULL_TREE;
1945
389dd41b 1946 lhs = build_fold_indirect_ref_loc (loc, lhs);
75a70cf9 1947 rhs = gimple_call_arg (call, 1);
8a58ed0a 1948 if (TYPE_MAIN_VARIANT (TREE_TYPE (rhs))
5f57a8b1 1949 != TYPE_MAIN_VARIANT (cfun_va_list))
8a58ed0a 1950 return NULL_TREE;
1951
389dd41b 1952 rhs = fold_convert_loc (loc, TREE_TYPE (lhs), rhs);
8a58ed0a 1953 return build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, rhs);
1954
1955 case BUILT_IN_VA_END:
75a70cf9 1956 /* No effect, so the statement will be deleted. */
8a58ed0a 1957 return integer_zero_node;
1958
1959 default:
1960 gcc_unreachable ();
1961 }
1962}
75a70cf9 1963
4ee9c684 1964/* A simple pass that attempts to fold all builtin functions. This pass
1965 is run after we've propagated as many constants as we can. */
1966
2a1990e9 1967static unsigned int
4ee9c684 1968execute_fold_all_builtins (void)
1969{
b36237eb 1970 bool cfg_changed = false;
4ee9c684 1971 basic_block bb;
b1b7c0c4 1972 unsigned int todoflags = 0;
48e1416a 1973
4ee9c684 1974 FOR_EACH_BB (bb)
1975 {
75a70cf9 1976 gimple_stmt_iterator i;
1977 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
4ee9c684 1978 {
75a70cf9 1979 gimple stmt, old_stmt;
4ee9c684 1980 tree callee, result;
0a39fd54 1981 enum built_in_function fcode;
4ee9c684 1982
75a70cf9 1983 stmt = gsi_stmt (i);
1984
1985 if (gimple_code (stmt) != GIMPLE_CALL)
0a39fd54 1986 {
75a70cf9 1987 gsi_next (&i);
0a39fd54 1988 continue;
1989 }
75a70cf9 1990 callee = gimple_call_fndecl (stmt);
4ee9c684 1991 if (!callee || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL)
0a39fd54 1992 {
75a70cf9 1993 gsi_next (&i);
0a39fd54 1994 continue;
1995 }
1996 fcode = DECL_FUNCTION_CODE (callee);
4ee9c684 1997
2d18b16d 1998 result = gimple_fold_builtin (stmt);
5a4b7e1e 1999
2000 if (result)
75a70cf9 2001 gimple_remove_stmt_histograms (cfun, stmt);
5a4b7e1e 2002
4ee9c684 2003 if (!result)
2004 switch (DECL_FUNCTION_CODE (callee))
2005 {
2006 case BUILT_IN_CONSTANT_P:
2007 /* Resolve __builtin_constant_p. If it hasn't been
2008 folded to integer_one_node by now, it's fairly
2009 certain that the value simply isn't constant. */
75a70cf9 2010 result = integer_zero_node;
4ee9c684 2011 break;
2012
bdd0e199 2013 case BUILT_IN_STACK_RESTORE:
75a70cf9 2014 result = optimize_stack_restore (i);
8a58ed0a 2015 if (result)
2016 break;
75a70cf9 2017 gsi_next (&i);
8a58ed0a 2018 continue;
2019
2020 case BUILT_IN_VA_START:
2021 case BUILT_IN_VA_END:
2022 case BUILT_IN_VA_COPY:
2023 /* These shouldn't be folded before pass_stdarg. */
75a70cf9 2024 result = optimize_stdarg_builtin (stmt);
bdd0e199 2025 if (result)
2026 break;
2027 /* FALLTHRU */
2028
4ee9c684 2029 default:
75a70cf9 2030 gsi_next (&i);
4ee9c684 2031 continue;
2032 }
2033
2034 if (dump_file && (dump_flags & TDF_DETAILS))
2035 {
2036 fprintf (dump_file, "Simplified\n ");
75a70cf9 2037 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
4ee9c684 2038 }
2039
75a70cf9 2040 old_stmt = stmt;
75a70cf9 2041 if (!update_call_from_tree (&i, result))
0fefde02 2042 {
2043 gimplify_and_update_call_from_tree (&i, result);
2044 todoflags |= TODO_update_address_taken;
2045 }
de6ed584 2046
75a70cf9 2047 stmt = gsi_stmt (i);
4c5fd53c 2048 update_stmt (stmt);
de6ed584 2049
75a70cf9 2050 if (maybe_clean_or_replace_eh_stmt (old_stmt, stmt)
2051 && gimple_purge_dead_eh_edges (bb))
b36237eb 2052 cfg_changed = true;
4ee9c684 2053
2054 if (dump_file && (dump_flags & TDF_DETAILS))
2055 {
2056 fprintf (dump_file, "to\n ");
75a70cf9 2057 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
4ee9c684 2058 fprintf (dump_file, "\n");
2059 }
0a39fd54 2060
2061 /* Retry the same statement if it changed into another
2062 builtin, there might be new opportunities now. */
75a70cf9 2063 if (gimple_code (stmt) != GIMPLE_CALL)
0a39fd54 2064 {
75a70cf9 2065 gsi_next (&i);
0a39fd54 2066 continue;
2067 }
75a70cf9 2068 callee = gimple_call_fndecl (stmt);
0a39fd54 2069 if (!callee
75a70cf9 2070 || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL
0a39fd54 2071 || DECL_FUNCTION_CODE (callee) == fcode)
75a70cf9 2072 gsi_next (&i);
4ee9c684 2073 }
2074 }
48e1416a 2075
b36237eb 2076 /* Delete unreachable blocks. */
b1b7c0c4 2077 if (cfg_changed)
2078 todoflags |= TODO_cleanup_cfg;
48e1416a 2079
b1b7c0c4 2080 return todoflags;
4ee9c684 2081}
2082
41511585 2083
48e1416a 2084struct gimple_opt_pass pass_fold_builtins =
4ee9c684 2085{
20099e35 2086 {
2087 GIMPLE_PASS,
4ee9c684 2088 "fab", /* name */
2089 NULL, /* gate */
2090 execute_fold_all_builtins, /* execute */
2091 NULL, /* sub */
2092 NULL, /* next */
2093 0, /* static_pass_number */
0b1615c1 2094 TV_NONE, /* tv_id */
49290934 2095 PROP_cfg | PROP_ssa, /* properties_required */
4ee9c684 2096 0, /* properties_provided */
2097 0, /* properties_destroyed */
2098 0, /* todo_flags_start */
909e5ecb 2099 TODO_dump_func
2100 | TODO_verify_ssa
20099e35 2101 | TODO_update_ssa /* todo_flags_finish */
2102 }
4ee9c684 2103};