]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/tree-ssa-ccp.c
2008-08-20 Robert Dewar <dewar@adacore.com>
[thirdparty/gcc.git] / gcc / tree-ssa-ccp.c
CommitLineData
4ee9c684 1/* Conditional constant propagation pass for the GNU compiler.
f0b5f617 2 Copyright (C) 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008
000657b5 3 Free Software Foundation, Inc.
4ee9c684 4 Adapted from original RTL SSA-CCP by Daniel Berlin <dberlin@dberlin.org>
5 Adapted to GIMPLE trees by Diego Novillo <dnovillo@redhat.com>
6
7This file is part of GCC.
8
9GCC is free software; you can redistribute it and/or modify it
10under the terms of the GNU General Public License as published by the
8c4c00c1 11Free Software Foundation; either version 3, or (at your option) any
4ee9c684 12later version.
13
14GCC is distributed in the hope that it will be useful, but WITHOUT
15ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
16FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17for more details.
18
19You should have received a copy of the GNU General Public License
8c4c00c1 20along with GCC; see the file COPYING3. If not see
21<http://www.gnu.org/licenses/>. */
4ee9c684 22
88dbf20f 23/* Conditional constant propagation (CCP) is based on the SSA
24 propagation engine (tree-ssa-propagate.c). Constant assignments of
25 the form VAR = CST are propagated from the assignments into uses of
26 VAR, which in turn may generate new constants. The simulation uses
27 a four level lattice to keep track of constant values associated
28 with SSA names. Given an SSA name V_i, it may take one of the
29 following values:
30
bfa30570 31 UNINITIALIZED -> the initial state of the value. This value
32 is replaced with a correct initial value
33 the first time the value is used, so the
34 rest of the pass does not need to care about
35 it. Using this value simplifies initialization
36 of the pass, and prevents us from needlessly
37 scanning statements that are never reached.
88dbf20f 38
39 UNDEFINED -> V_i is a local variable whose definition
40 has not been processed yet. Therefore we
41 don't yet know if its value is a constant
42 or not.
43
44 CONSTANT -> V_i has been found to hold a constant
45 value C.
46
47 VARYING -> V_i cannot take a constant value, or if it
48 does, it is not possible to determine it
49 at compile time.
50
51 The core of SSA-CCP is in ccp_visit_stmt and ccp_visit_phi_node:
52
53 1- In ccp_visit_stmt, we are interested in assignments whose RHS
54 evaluates into a constant and conditional jumps whose predicate
55 evaluates into a boolean true or false. When an assignment of
56 the form V_i = CONST is found, V_i's lattice value is set to
57 CONSTANT and CONST is associated with it. This causes the
58 propagation engine to add all the SSA edges coming out the
59 assignment into the worklists, so that statements that use V_i
60 can be visited.
61
62 If the statement is a conditional with a constant predicate, we
63 mark the outgoing edges as executable or not executable
64 depending on the predicate's value. This is then used when
65 visiting PHI nodes to know when a PHI argument can be ignored.
66
67
68 2- In ccp_visit_phi_node, if all the PHI arguments evaluate to the
69 same constant C, then the LHS of the PHI is set to C. This
70 evaluation is known as the "meet operation". Since one of the
71 goals of this evaluation is to optimistically return constant
72 values as often as possible, it uses two main short cuts:
73
74 - If an argument is flowing in through a non-executable edge, it
75 is ignored. This is useful in cases like this:
76
77 if (PRED)
78 a_9 = 3;
79 else
80 a_10 = 100;
81 a_11 = PHI (a_9, a_10)
82
83 If PRED is known to always evaluate to false, then we can
84 assume that a_11 will always take its value from a_10, meaning
85 that instead of consider it VARYING (a_9 and a_10 have
86 different values), we can consider it CONSTANT 100.
87
88 - If an argument has an UNDEFINED value, then it does not affect
89 the outcome of the meet operation. If a variable V_i has an
90 UNDEFINED value, it means that either its defining statement
91 hasn't been visited yet or V_i has no defining statement, in
92 which case the original symbol 'V' is being used
93 uninitialized. Since 'V' is a local variable, the compiler
94 may assume any initial value for it.
95
96
97 After propagation, every variable V_i that ends up with a lattice
98 value of CONSTANT will have the associated constant value in the
99 array CONST_VAL[i].VALUE. That is fed into substitute_and_fold for
100 final substitution and folding.
101
102
103 Constant propagation in stores and loads (STORE-CCP)
104 ----------------------------------------------------
105
106 While CCP has all the logic to propagate constants in GIMPLE
107 registers, it is missing the ability to associate constants with
108 stores and loads (i.e., pointer dereferences, structures and
109 global/aliased variables). We don't keep loads and stores in
110 SSA, but we do build a factored use-def web for them (in the
111 virtual operands).
112
113 For instance, consider the following code fragment:
114
115 struct A a;
116 const int B = 42;
117
118 void foo (int i)
119 {
120 if (i > 10)
121 a.a = 42;
122 else
123 {
124 a.b = 21;
125 a.a = a.b + 21;
126 }
127
128 if (a.a != B)
129 never_executed ();
130 }
131
132 We should be able to deduce that the predicate 'a.a != B' is always
133 false. To achieve this, we associate constant values to the SSA
4fb5e5ca 134 names in the VDEF operands for each store. Additionally,
135 since we also glob partial loads/stores with the base symbol, we
136 also keep track of the memory reference where the constant value
137 was stored (in the MEM_REF field of PROP_VALUE_T). For instance,
88dbf20f 138
4fb5e5ca 139 # a_5 = VDEF <a_4>
88dbf20f 140 a.a = 2;
141
142 # VUSE <a_5>
143 x_3 = a.b;
144
145 In the example above, CCP will associate value '2' with 'a_5', but
146 it would be wrong to replace the load from 'a.b' with '2', because
147 '2' had been stored into a.a.
148
bfa30570 149 Note that the initial value of virtual operands is VARYING, not
150 UNDEFINED. Consider, for instance global variables:
88dbf20f 151
152 int A;
153
154 foo (int i)
155 {
156 if (i_3 > 10)
157 A_4 = 3;
158 # A_5 = PHI (A_4, A_2);
159
160 # VUSE <A_5>
161 A.0_6 = A;
162
163 return A.0_6;
164 }
165
166 The value of A_2 cannot be assumed to be UNDEFINED, as it may have
167 been defined outside of foo. If we were to assume it UNDEFINED, we
bfa30570 168 would erroneously optimize the above into 'return 3;'.
88dbf20f 169
170 Though STORE-CCP is not too expensive, it does have to do more work
171 than regular CCP, so it is only enabled at -O2. Both regular CCP
172 and STORE-CCP use the exact same algorithm. The only distinction
173 is that when doing STORE-CCP, the boolean variable DO_STORE_CCP is
174 set to true. This affects the evaluation of statements and PHI
175 nodes.
4ee9c684 176
177 References:
178
179 Constant propagation with conditional branches,
180 Wegman and Zadeck, ACM TOPLAS 13(2):181-210.
181
182 Building an Optimizing Compiler,
183 Robert Morgan, Butterworth-Heinemann, 1998, Section 8.9.
184
185 Advanced Compiler Design and Implementation,
186 Steven Muchnick, Morgan Kaufmann, 1997, Section 12.6 */
187
188#include "config.h"
189#include "system.h"
190#include "coretypes.h"
191#include "tm.h"
4ee9c684 192#include "tree.h"
41511585 193#include "flags.h"
4ee9c684 194#include "rtl.h"
195#include "tm_p.h"
41511585 196#include "ggc.h"
4ee9c684 197#include "basic-block.h"
41511585 198#include "output.h"
41511585 199#include "expr.h"
200#include "function.h"
4ee9c684 201#include "diagnostic.h"
41511585 202#include "timevar.h"
4ee9c684 203#include "tree-dump.h"
41511585 204#include "tree-flow.h"
4ee9c684 205#include "tree-pass.h"
41511585 206#include "tree-ssa-propagate.h"
5a4b7e1e 207#include "value-prof.h"
41511585 208#include "langhooks.h"
8782adcf 209#include "target.h"
add6ee5e 210#include "toplev.h"
4ee9c684 211
212
213/* Possible lattice values. */
214typedef enum
215{
bfa30570 216 UNINITIALIZED,
4ee9c684 217 UNDEFINED,
218 CONSTANT,
219 VARYING
88dbf20f 220} ccp_lattice_t;
4ee9c684 221
88dbf20f 222/* Array of propagated constant values. After propagation,
223 CONST_VAL[I].VALUE holds the constant value for SSA_NAME(I). If
224 the constant is held in an SSA name representing a memory store
4fb5e5ca 225 (i.e., a VDEF), CONST_VAL[I].MEM_REF will contain the actual
226 memory reference used to store (i.e., the LHS of the assignment
227 doing the store). */
20140406 228static prop_value_t *const_val;
4ee9c684 229
88dbf20f 230/* True if we are also propagating constants in stores and loads. */
231static bool do_store_ccp;
4ee9c684 232
88dbf20f 233/* Dump constant propagation value VAL to file OUTF prefixed by PREFIX. */
01406fc0 234
235static void
88dbf20f 236dump_lattice_value (FILE *outf, const char *prefix, prop_value_t val)
01406fc0 237{
41511585 238 switch (val.lattice_val)
01406fc0 239 {
88dbf20f 240 case UNINITIALIZED:
241 fprintf (outf, "%sUNINITIALIZED", prefix);
242 break;
41511585 243 case UNDEFINED:
244 fprintf (outf, "%sUNDEFINED", prefix);
245 break;
246 case VARYING:
247 fprintf (outf, "%sVARYING", prefix);
248 break;
41511585 249 case CONSTANT:
250 fprintf (outf, "%sCONSTANT ", prefix);
88dbf20f 251 print_generic_expr (outf, val.value, dump_flags);
41511585 252 break;
253 default:
8c0963c4 254 gcc_unreachable ();
41511585 255 }
01406fc0 256}
4ee9c684 257
4ee9c684 258
88dbf20f 259/* Print lattice value VAL to stderr. */
260
261void debug_lattice_value (prop_value_t val);
262
263void
264debug_lattice_value (prop_value_t val)
265{
266 dump_lattice_value (stderr, "", val);
267 fprintf (stderr, "\n");
268}
4ee9c684 269
4ee9c684 270
75a70cf9 271
aecfc21d 272/* If SYM is a constant variable with known value, return the value.
273 NULL_TREE is returned otherwise. */
274
e004838d 275tree
aecfc21d 276get_symbol_constant_value (tree sym)
277{
278 if (TREE_STATIC (sym)
279 && TREE_READONLY (sym)
e004838d 280 && !MTAG_P (sym))
aecfc21d 281 {
282 tree val = DECL_INITIAL (sym);
590d65aa 283 if (val)
284 {
285 STRIP_USELESS_TYPE_CONVERSION (val);
286 if (is_gimple_min_invariant (val))
287 return val;
288 }
6e6e51e5 289 /* Variables declared 'const' without an initializer
f0b5f617 290 have zero as the initializer if they may not be
e004838d 291 overridden at link or run time. */
6e6e51e5 292 if (!val
e004838d 293 && targetm.binds_local_p (sym)
6e6e51e5 294 && (INTEGRAL_TYPE_P (TREE_TYPE (sym))
295 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (sym))))
296 return fold_convert (TREE_TYPE (sym), integer_zero_node);
aecfc21d 297 }
298
299 return NULL_TREE;
300}
d03bd588 301
88dbf20f 302/* Compute a default value for variable VAR and store it in the
303 CONST_VAL array. The following rules are used to get default
304 values:
01406fc0 305
88dbf20f 306 1- Global and static variables that are declared constant are
307 considered CONSTANT.
308
309 2- Any other value is considered UNDEFINED. This is useful when
41511585 310 considering PHI nodes. PHI arguments that are undefined do not
311 change the constant value of the PHI node, which allows for more
88dbf20f 312 constants to be propagated.
4ee9c684 313
8883e700 314 3- Variables defined by statements other than assignments and PHI
88dbf20f 315 nodes are considered VARYING.
4ee9c684 316
8883e700 317 4- Initial values of variables that are not GIMPLE registers are
bfa30570 318 considered VARYING. */
4ee9c684 319
88dbf20f 320static prop_value_t
321get_default_value (tree var)
322{
323 tree sym = SSA_NAME_VAR (var);
324 prop_value_t val = { UNINITIALIZED, NULL_TREE, NULL_TREE };
aecfc21d 325 tree cst_val;
bfa30570 326
88dbf20f 327 if (!do_store_ccp && !is_gimple_reg (var))
4ee9c684 328 {
88dbf20f 329 /* Short circuit for regular CCP. We are not interested in any
330 non-register when DO_STORE_CCP is false. */
41511585 331 val.lattice_val = VARYING;
4ee9c684 332 }
aecfc21d 333 else if ((cst_val = get_symbol_constant_value (sym)) != NULL_TREE)
41511585 334 {
88dbf20f 335 /* Globals and static variables declared 'const' take their
336 initial value. */
337 val.lattice_val = CONSTANT;
aecfc21d 338 val.value = cst_val;
88dbf20f 339 val.mem_ref = sym;
41511585 340 }
341 else
342 {
75a70cf9 343 gimple stmt = SSA_NAME_DEF_STMT (var);
4ee9c684 344
75a70cf9 345 if (gimple_nop_p (stmt))
88dbf20f 346 {
347 /* Variables defined by an empty statement are those used
348 before being initialized. If VAR is a local variable, we
bfa30570 349 can assume initially that it is UNDEFINED, otherwise we must
350 consider it VARYING. */
88dbf20f 351 if (is_gimple_reg (sym) && TREE_CODE (sym) != PARM_DECL)
352 val.lattice_val = UNDEFINED;
88dbf20f 353 else
41511585 354 val.lattice_val = VARYING;
355 }
75a70cf9 356 else if (is_gimple_assign (stmt)
357 /* Value-returning GIMPLE_CALL statements assign to
358 a variable, and are treated similarly to GIMPLE_ASSIGN. */
359 || (is_gimple_call (stmt)
360 && gimple_call_lhs (stmt) != NULL_TREE)
361 || gimple_code (stmt) == GIMPLE_PHI)
362 {
88dbf20f 363 /* Any other variable defined by an assignment or a PHI node
bfa30570 364 is considered UNDEFINED. */
365 val.lattice_val = UNDEFINED;
88dbf20f 366 }
367 else
368 {
369 /* Otherwise, VAR will never take on a constant value. */
370 val.lattice_val = VARYING;
371 }
41511585 372 }
4ee9c684 373
41511585 374 return val;
375}
4ee9c684 376
4ee9c684 377
bfa30570 378/* Get the constant value associated with variable VAR. */
4ee9c684 379
bfa30570 380static inline prop_value_t *
381get_value (tree var)
88dbf20f 382{
e004838d 383 prop_value_t *val;
bfa30570 384
e004838d 385 if (const_val == NULL)
386 return NULL;
387
388 val = &const_val[SSA_NAME_VERSION (var)];
bfa30570 389 if (val->lattice_val == UNINITIALIZED)
4ee9c684 390 *val = get_default_value (var);
391
392 return val;
393}
394
bfa30570 395/* Sets the value associated with VAR to VARYING. */
396
397static inline void
398set_value_varying (tree var)
399{
400 prop_value_t *val = &const_val[SSA_NAME_VERSION (var)];
401
402 val->lattice_val = VARYING;
403 val->value = NULL_TREE;
404 val->mem_ref = NULL_TREE;
405}
4ee9c684 406
b31eb493 407/* For float types, modify the value of VAL to make ccp work correctly
408 for non-standard values (-0, NaN):
409
410 If HONOR_SIGNED_ZEROS is false, and VAL = -0, we canonicalize it to 0.
411 If HONOR_NANS is false, and VAL is NaN, we canonicalize it to UNDEFINED.
412 This is to fix the following problem (see PR 29921): Suppose we have
413
414 x = 0.0 * y
415
416 and we set value of y to NaN. This causes value of x to be set to NaN.
417 When we later determine that y is in fact VARYING, fold uses the fact
418 that HONOR_NANS is false, and we try to change the value of x to 0,
419 causing an ICE. With HONOR_NANS being false, the real appearance of
420 NaN would cause undefined behavior, though, so claiming that y (and x)
421 are UNDEFINED initially is correct. */
422
423static void
424canonicalize_float_value (prop_value_t *val)
425{
426 enum machine_mode mode;
427 tree type;
428 REAL_VALUE_TYPE d;
429
430 if (val->lattice_val != CONSTANT
431 || TREE_CODE (val->value) != REAL_CST)
432 return;
433
434 d = TREE_REAL_CST (val->value);
435 type = TREE_TYPE (val->value);
436 mode = TYPE_MODE (type);
437
438 if (!HONOR_SIGNED_ZEROS (mode)
439 && REAL_VALUE_MINUS_ZERO (d))
440 {
441 val->value = build_real (type, dconst0);
442 return;
443 }
444
445 if (!HONOR_NANS (mode)
446 && REAL_VALUE_ISNAN (d))
447 {
448 val->lattice_val = UNDEFINED;
449 val->value = NULL;
450 val->mem_ref = NULL;
451 return;
452 }
453}
454
88dbf20f 455/* Set the value for variable VAR to NEW_VAL. Return true if the new
456 value is different from VAR's previous value. */
4ee9c684 457
41511585 458static bool
88dbf20f 459set_lattice_value (tree var, prop_value_t new_val)
4ee9c684 460{
bfa30570 461 prop_value_t *old_val = get_value (var);
88dbf20f 462
b31eb493 463 canonicalize_float_value (&new_val);
464
88dbf20f 465 /* Lattice transitions must always be monotonically increasing in
bfa30570 466 value. If *OLD_VAL and NEW_VAL are the same, return false to
467 inform the caller that this was a non-transition. */
468
aecfc21d 469 gcc_assert (old_val->lattice_val < new_val.lattice_val
88dbf20f 470 || (old_val->lattice_val == new_val.lattice_val
aecfc21d 471 && ((!old_val->value && !new_val.value)
472 || operand_equal_p (old_val->value, new_val.value, 0))
bfa30570 473 && old_val->mem_ref == new_val.mem_ref));
88dbf20f 474
475 if (old_val->lattice_val != new_val.lattice_val)
4ee9c684 476 {
41511585 477 if (dump_file && (dump_flags & TDF_DETAILS))
478 {
88dbf20f 479 dump_lattice_value (dump_file, "Lattice value changed to ", new_val);
bfa30570 480 fprintf (dump_file, ". Adding SSA edges to worklist.\n");
41511585 481 }
482
88dbf20f 483 *old_val = new_val;
484
bfa30570 485 gcc_assert (new_val.lattice_val != UNDEFINED);
486 return true;
4ee9c684 487 }
41511585 488
489 return false;
4ee9c684 490}
491
492
88dbf20f 493/* Return the likely CCP lattice value for STMT.
4ee9c684 494
41511585 495 If STMT has no operands, then return CONSTANT.
4ee9c684 496
d61b9af3 497 Else if undefinedness of operands of STMT cause its value to be
498 undefined, then return UNDEFINED.
4ee9c684 499
41511585 500 Else if any operands of STMT are constants, then return CONSTANT.
4ee9c684 501
41511585 502 Else return VARYING. */
4ee9c684 503
88dbf20f 504static ccp_lattice_t
75a70cf9 505likely_value (gimple stmt)
41511585 506{
d61b9af3 507 bool has_constant_operand, has_undefined_operand, all_undefined_operands;
41511585 508 tree use;
509 ssa_op_iter iter;
4ee9c684 510
75a70cf9 511 enum tree_code code = gimple_code (stmt);
512
513 /* This function appears to be called only for assignments, calls,
514 conditionals, and switches, due to the logic in visit_stmt. */
515 gcc_assert (code == GIMPLE_ASSIGN
516 || code == GIMPLE_CALL
517 || code == GIMPLE_COND
518 || code == GIMPLE_SWITCH);
88dbf20f 519
520 /* If the statement has volatile operands, it won't fold to a
521 constant value. */
75a70cf9 522 if (gimple_has_volatile_ops (stmt))
88dbf20f 523 return VARYING;
524
525 /* If we are not doing store-ccp, statements with loads
526 and/or stores will never fold into a constant. */
527 if (!do_store_ccp
9d637cc5 528 && !ZERO_SSA_OPERANDS (stmt, SSA_OP_ALL_VIRTUALS))
41511585 529 return VARYING;
4ee9c684 530
75a70cf9 531 /* Note that only a GIMPLE_SINGLE_RHS assignment can satisfy
532 is_gimple_min_invariant, so we do not consider calls or
533 other forms of assignment. */
f257af64 534 if (gimple_assign_single_p (stmt)
75a70cf9 535 && is_gimple_min_invariant (gimple_assign_rhs1 (stmt)))
536 return CONSTANT;
88dbf20f 537
75a70cf9 538 if (code == GIMPLE_COND
539 && is_gimple_min_invariant (gimple_cond_lhs (stmt))
540 && is_gimple_min_invariant (gimple_cond_rhs (stmt)))
b765fa12 541 return CONSTANT;
542
75a70cf9 543 if (code == GIMPLE_SWITCH
544 && is_gimple_min_invariant (gimple_switch_index (stmt)))
545 return CONSTANT;
546
547 /* Arrive here for more complex cases. */
548
bfa30570 549 has_constant_operand = false;
d61b9af3 550 has_undefined_operand = false;
551 all_undefined_operands = true;
bfa30570 552 FOR_EACH_SSA_TREE_OPERAND (use, stmt, iter, SSA_OP_USE | SSA_OP_VUSE)
41511585 553 {
bfa30570 554 prop_value_t *val = get_value (use);
41511585 555
bfa30570 556 if (val->lattice_val == UNDEFINED)
d61b9af3 557 has_undefined_operand = true;
558 else
559 all_undefined_operands = false;
88dbf20f 560
41511585 561 if (val->lattice_val == CONSTANT)
bfa30570 562 has_constant_operand = true;
4ee9c684 563 }
41511585 564
d61b9af3 565 /* If the operation combines operands like COMPLEX_EXPR make sure to
566 not mark the result UNDEFINED if only one part of the result is
567 undefined. */
75a70cf9 568 if (has_undefined_operand && all_undefined_operands)
d61b9af3 569 return UNDEFINED;
75a70cf9 570 else if (code == GIMPLE_ASSIGN && has_undefined_operand)
d61b9af3 571 {
75a70cf9 572 switch (gimple_assign_rhs_code (stmt))
d61b9af3 573 {
574 /* Unary operators are handled with all_undefined_operands. */
575 case PLUS_EXPR:
576 case MINUS_EXPR:
d61b9af3 577 case POINTER_PLUS_EXPR:
d61b9af3 578 /* Not MIN_EXPR, MAX_EXPR. One VARYING operand may be selected.
579 Not bitwise operators, one VARYING operand may specify the
580 result completely. Not logical operators for the same reason.
05a936a0 581 Not COMPLEX_EXPR as one VARYING operand makes the result partly
582 not UNDEFINED. Not *DIV_EXPR, comparisons and shifts because
583 the undefined operand may be promoted. */
d61b9af3 584 return UNDEFINED;
585
586 default:
587 ;
588 }
589 }
590 /* If there was an UNDEFINED operand but the result may be not UNDEFINED
591 fall back to VARYING even if there were CONSTANT operands. */
592 if (has_undefined_operand)
593 return VARYING;
594
bfa30570 595 if (has_constant_operand
596 /* We do not consider virtual operands here -- load from read-only
597 memory may have only VARYING virtual operands, but still be
598 constant. */
599 || ZERO_SSA_OPERANDS (stmt, SSA_OP_USE))
88dbf20f 600 return CONSTANT;
601
bfa30570 602 return VARYING;
4ee9c684 603}
604
bfa30570 605/* Returns true if STMT cannot be constant. */
606
607static bool
75a70cf9 608surely_varying_stmt_p (gimple stmt)
bfa30570 609{
610 /* If the statement has operands that we cannot handle, it cannot be
611 constant. */
75a70cf9 612 if (gimple_has_volatile_ops (stmt))
bfa30570 613 return true;
614
615 if (!ZERO_SSA_OPERANDS (stmt, SSA_OP_ALL_VIRTUALS))
616 {
617 if (!do_store_ccp)
618 return true;
619
620 /* We can only handle simple loads and stores. */
621 if (!stmt_makes_single_load (stmt)
622 && !stmt_makes_single_store (stmt))
623 return true;
624 }
625
f257af64 626 /* If it is a call and does not return a value or is not a
627 builtin and not an indirect call, it is varying. */
75a70cf9 628 if (is_gimple_call (stmt))
f257af64 629 {
630 tree fndecl;
631 if (!gimple_call_lhs (stmt)
632 || ((fndecl = gimple_call_fndecl (stmt)) != NULL_TREE
5768aeb3 633 && !DECL_BUILT_IN (fndecl)))
f257af64 634 return true;
635 }
bfa30570 636
637 /* Anything other than assignments and conditional jumps are not
638 interesting for CCP. */
75a70cf9 639 if (gimple_code (stmt) != GIMPLE_ASSIGN
f257af64 640 && gimple_code (stmt) != GIMPLE_COND
641 && gimple_code (stmt) != GIMPLE_SWITCH
642 && gimple_code (stmt) != GIMPLE_CALL)
bfa30570 643 return true;
644
645 return false;
646}
4ee9c684 647
41511585 648/* Initialize local data structures for CCP. */
4ee9c684 649
650static void
41511585 651ccp_initialize (void)
4ee9c684 652{
41511585 653 basic_block bb;
4ee9c684 654
43959b95 655 const_val = XCNEWVEC (prop_value_t, num_ssa_names);
4ee9c684 656
41511585 657 /* Initialize simulation flags for PHI nodes and statements. */
658 FOR_EACH_BB (bb)
4ee9c684 659 {
75a70cf9 660 gimple_stmt_iterator i;
4ee9c684 661
75a70cf9 662 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
41511585 663 {
75a70cf9 664 gimple stmt = gsi_stmt (i);
bfa30570 665 bool is_varying = surely_varying_stmt_p (stmt);
4ee9c684 666
bfa30570 667 if (is_varying)
41511585 668 {
88dbf20f 669 tree def;
670 ssa_op_iter iter;
671
672 /* If the statement will not produce a constant, mark
673 all its outputs VARYING. */
674 FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_ALL_DEFS)
bfa30570 675 {
676 if (is_varying)
677 set_value_varying (def);
678 }
41511585 679 }
75a70cf9 680 prop_set_simulate_again (stmt, !is_varying);
41511585 681 }
4ee9c684 682 }
683
75a70cf9 684 /* Now process PHI nodes. We never clear the simulate_again flag on
685 phi nodes, since we do not know which edges are executable yet,
686 except for phi nodes for virtual operands when we do not do store ccp. */
41511585 687 FOR_EACH_BB (bb)
4ee9c684 688 {
75a70cf9 689 gimple_stmt_iterator i;
41511585 690
75a70cf9 691 for (i = gsi_start_phis (bb); !gsi_end_p (i); gsi_next (&i))
692 {
693 gimple phi = gsi_stmt (i);
694
695 if (!do_store_ccp && !is_gimple_reg (gimple_phi_result (phi)))
696 prop_set_simulate_again (phi, false);
bfa30570 697 else
75a70cf9 698 prop_set_simulate_again (phi, true);
41511585 699 }
4ee9c684 700 }
41511585 701}
4ee9c684 702
4ee9c684 703
88dbf20f 704/* Do final substitution of propagated values, cleanup the flowgraph and
33a34f1e 705 free allocated storage.
4ee9c684 706
33a34f1e 707 Return TRUE when something was optimized. */
708
709static bool
88dbf20f 710ccp_finalize (void)
4ee9c684 711{
88dbf20f 712 /* Perform substitutions based on the known constant values. */
33a34f1e 713 bool something_changed = substitute_and_fold (const_val, false);
4ee9c684 714
88dbf20f 715 free (const_val);
e004838d 716 const_val = NULL;
33a34f1e 717 return something_changed;;
4ee9c684 718}
719
720
88dbf20f 721/* Compute the meet operator between *VAL1 and *VAL2. Store the result
722 in VAL1.
723
724 any M UNDEFINED = any
88dbf20f 725 any M VARYING = VARYING
726 Ci M Cj = Ci if (i == j)
727 Ci M Cj = VARYING if (i != j)
bfa30570 728 */
4ee9c684 729
730static void
88dbf20f 731ccp_lattice_meet (prop_value_t *val1, prop_value_t *val2)
4ee9c684 732{
88dbf20f 733 if (val1->lattice_val == UNDEFINED)
4ee9c684 734 {
88dbf20f 735 /* UNDEFINED M any = any */
736 *val1 = *val2;
41511585 737 }
88dbf20f 738 else if (val2->lattice_val == UNDEFINED)
92481a4d 739 {
88dbf20f 740 /* any M UNDEFINED = any
741 Nothing to do. VAL1 already contains the value we want. */
742 ;
92481a4d 743 }
88dbf20f 744 else if (val1->lattice_val == VARYING
745 || val2->lattice_val == VARYING)
41511585 746 {
88dbf20f 747 /* any M VARYING = VARYING. */
748 val1->lattice_val = VARYING;
749 val1->value = NULL_TREE;
750 val1->mem_ref = NULL_TREE;
41511585 751 }
88dbf20f 752 else if (val1->lattice_val == CONSTANT
753 && val2->lattice_val == CONSTANT
754 && simple_cst_equal (val1->value, val2->value) == 1
755 && (!do_store_ccp
b765fa12 756 || (val1->mem_ref && val2->mem_ref
757 && operand_equal_p (val1->mem_ref, val2->mem_ref, 0))))
41511585 758 {
88dbf20f 759 /* Ci M Cj = Ci if (i == j)
760 Ci M Cj = VARYING if (i != j)
761
762 If these two values come from memory stores, make sure that
763 they come from the same memory reference. */
764 val1->lattice_val = CONSTANT;
765 val1->value = val1->value;
766 val1->mem_ref = val1->mem_ref;
41511585 767 }
768 else
769 {
88dbf20f 770 /* Any other combination is VARYING. */
771 val1->lattice_val = VARYING;
772 val1->value = NULL_TREE;
773 val1->mem_ref = NULL_TREE;
41511585 774 }
4ee9c684 775}
776
777
41511585 778/* Loop through the PHI_NODE's parameters for BLOCK and compare their
779 lattice values to determine PHI_NODE's lattice value. The value of a
88dbf20f 780 PHI node is determined calling ccp_lattice_meet with all the arguments
41511585 781 of the PHI node that are incoming via executable edges. */
4ee9c684 782
41511585 783static enum ssa_prop_result
75a70cf9 784ccp_visit_phi_node (gimple phi)
4ee9c684 785{
75a70cf9 786 unsigned i;
88dbf20f 787 prop_value_t *old_val, new_val;
4ee9c684 788
41511585 789 if (dump_file && (dump_flags & TDF_DETAILS))
4ee9c684 790 {
41511585 791 fprintf (dump_file, "\nVisiting PHI node: ");
75a70cf9 792 print_gimple_stmt (dump_file, phi, 0, dump_flags);
4ee9c684 793 }
4ee9c684 794
75a70cf9 795 old_val = get_value (gimple_phi_result (phi));
41511585 796 switch (old_val->lattice_val)
797 {
798 case VARYING:
88dbf20f 799 return SSA_PROP_VARYING;
4ee9c684 800
41511585 801 case CONSTANT:
802 new_val = *old_val;
803 break;
4ee9c684 804
41511585 805 case UNDEFINED:
41511585 806 new_val.lattice_val = UNDEFINED;
88dbf20f 807 new_val.value = NULL_TREE;
808 new_val.mem_ref = NULL_TREE;
41511585 809 break;
4ee9c684 810
41511585 811 default:
8c0963c4 812 gcc_unreachable ();
41511585 813 }
4ee9c684 814
75a70cf9 815 for (i = 0; i < gimple_phi_num_args (phi); i++)
41511585 816 {
88dbf20f 817 /* Compute the meet operator over all the PHI arguments flowing
818 through executable edges. */
75a70cf9 819 edge e = gimple_phi_arg_edge (phi, i);
4ee9c684 820
41511585 821 if (dump_file && (dump_flags & TDF_DETAILS))
822 {
823 fprintf (dump_file,
824 "\n Argument #%d (%d -> %d %sexecutable)\n",
825 i, e->src->index, e->dest->index,
826 (e->flags & EDGE_EXECUTABLE) ? "" : "not ");
827 }
828
829 /* If the incoming edge is executable, Compute the meet operator for
830 the existing value of the PHI node and the current PHI argument. */
831 if (e->flags & EDGE_EXECUTABLE)
832 {
75a70cf9 833 tree arg = gimple_phi_arg (phi, i)->def;
88dbf20f 834 prop_value_t arg_val;
4ee9c684 835
88dbf20f 836 if (is_gimple_min_invariant (arg))
41511585 837 {
88dbf20f 838 arg_val.lattice_val = CONSTANT;
839 arg_val.value = arg;
840 arg_val.mem_ref = NULL_TREE;
41511585 841 }
842 else
bfa30570 843 arg_val = *(get_value (arg));
4ee9c684 844
88dbf20f 845 ccp_lattice_meet (&new_val, &arg_val);
4ee9c684 846
41511585 847 if (dump_file && (dump_flags & TDF_DETAILS))
848 {
849 fprintf (dump_file, "\t");
88dbf20f 850 print_generic_expr (dump_file, arg, dump_flags);
851 dump_lattice_value (dump_file, "\tValue: ", arg_val);
41511585 852 fprintf (dump_file, "\n");
853 }
4ee9c684 854
41511585 855 if (new_val.lattice_val == VARYING)
856 break;
857 }
858 }
4ee9c684 859
860 if (dump_file && (dump_flags & TDF_DETAILS))
41511585 861 {
862 dump_lattice_value (dump_file, "\n PHI node value: ", new_val);
863 fprintf (dump_file, "\n\n");
864 }
865
bfa30570 866 /* Make the transition to the new value. */
75a70cf9 867 if (set_lattice_value (gimple_phi_result (phi), new_val))
41511585 868 {
869 if (new_val.lattice_val == VARYING)
870 return SSA_PROP_VARYING;
871 else
872 return SSA_PROP_INTERESTING;
873 }
874 else
875 return SSA_PROP_NOT_INTERESTING;
4ee9c684 876}
877
878
41511585 879/* CCP specific front-end to the non-destructive constant folding
880 routines.
4ee9c684 881
882 Attempt to simplify the RHS of STMT knowing that one or more
883 operands are constants.
884
885 If simplification is possible, return the simplified RHS,
75a70cf9 886 otherwise return the original RHS or NULL_TREE. */
4ee9c684 887
888static tree
75a70cf9 889ccp_fold (gimple stmt)
4ee9c684 890{
75a70cf9 891 switch (gimple_code (stmt))
88dbf20f 892 {
75a70cf9 893 case GIMPLE_ASSIGN:
894 {
895 enum tree_code subcode = gimple_assign_rhs_code (stmt);
896
897 switch (get_gimple_rhs_class (subcode))
898 {
899 case GIMPLE_SINGLE_RHS:
900 {
901 tree rhs = gimple_assign_rhs1 (stmt);
902 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
903
904 if (TREE_CODE (rhs) == SSA_NAME)
905 {
906 /* If the RHS is an SSA_NAME, return its known constant value,
907 if any. */
908 return get_value (rhs)->value;
909 }
910 /* Handle propagating invariant addresses into address operations.
911 The folding we do here matches that in tree-ssa-forwprop.c. */
912 else if (TREE_CODE (rhs) == ADDR_EXPR)
913 {
914 tree *base;
915 base = &TREE_OPERAND (rhs, 0);
916 while (handled_component_p (*base))
917 base = &TREE_OPERAND (*base, 0);
918 if (TREE_CODE (*base) == INDIRECT_REF
919 && TREE_CODE (TREE_OPERAND (*base, 0)) == SSA_NAME)
920 {
921 prop_value_t *val = get_value (TREE_OPERAND (*base, 0));
922 if (val->lattice_val == CONSTANT
923 && TREE_CODE (val->value) == ADDR_EXPR
924 && useless_type_conversion_p
925 (TREE_TYPE (TREE_OPERAND (*base, 0)),
926 TREE_TYPE (val->value))
927 && useless_type_conversion_p
928 (TREE_TYPE (*base),
929 TREE_TYPE (TREE_OPERAND (val->value, 0))))
930 {
931 /* We need to return a new tree, not modify the IL
932 or share parts of it. So play some tricks to
933 avoid manually building it. */
934 tree ret, save = *base;
935 *base = TREE_OPERAND (val->value, 0);
936 ret = unshare_expr (rhs);
937 recompute_tree_invariant_for_addr_expr (ret);
938 *base = save;
939 return ret;
940 }
941 }
942 }
4ee9c684 943
75a70cf9 944 else if (do_store_ccp && stmt_makes_single_load (stmt))
945 {
946 /* If the RHS is a memory load, see if the VUSEs associated with
947 it are a valid constant for that memory load. */
948 prop_value_t *val = get_value_loaded_by (stmt, const_val);
949 if (val && val->mem_ref)
950 {
951 if (operand_equal_p (val->mem_ref, rhs, 0))
952 return val->value;
953
954 /* If RHS is extracting REALPART_EXPR or IMAGPART_EXPR of a
955 complex type with a known constant value, return it. */
956 if ((TREE_CODE (rhs) == REALPART_EXPR
957 || TREE_CODE (rhs) == IMAGPART_EXPR)
958 && operand_equal_p (val->mem_ref, TREE_OPERAND (rhs, 0), 0))
959 return fold_build1 (TREE_CODE (rhs), TREE_TYPE (rhs), val->value);
960 }
961 }
962
963 if (kind == tcc_reference)
964 return fold_const_aggregate_ref (rhs);
965 else if (kind == tcc_declaration)
966 return get_symbol_constant_value (rhs);
967 return rhs;
968 }
969
970 case GIMPLE_UNARY_RHS:
971 {
972 /* Handle unary operators that can appear in GIMPLE form.
973 Note that we know the single operand must be a constant,
974 so this should almost always return a simplified RHS. */
975 tree lhs = gimple_assign_lhs (stmt);
976 tree op0 = gimple_assign_rhs1 (stmt);
977
978 /* Simplify the operand down to a constant. */
979 if (TREE_CODE (op0) == SSA_NAME)
980 {
981 prop_value_t *val = get_value (op0);
982 if (val->lattice_val == CONSTANT)
983 op0 = get_value (op0)->value;
984 }
985
986 /* Conversions are useless for CCP purposes if they are
987 value-preserving. Thus the restrictions that
988 useless_type_conversion_p places for pointer type conversions
989 do not apply here. Substitution later will only substitute to
990 allowed places. */
d9659041 991 if (CONVERT_EXPR_CODE_P (subcode)
5768aeb3 992 && POINTER_TYPE_P (TREE_TYPE (lhs))
993 && POINTER_TYPE_P (TREE_TYPE (op0))
994 /* Do not allow differences in volatile qualification
995 as this might get us confused as to whether a
996 propagation destination statement is volatile
997 or not. See PR36988. */
998 && (TYPE_VOLATILE (TREE_TYPE (TREE_TYPE (lhs)))
999 == TYPE_VOLATILE (TREE_TYPE (TREE_TYPE (op0)))))
1000 {
1001 tree tem;
1002 /* Still try to generate a constant of correct type. */
1003 if (!useless_type_conversion_p (TREE_TYPE (lhs),
1004 TREE_TYPE (op0))
1005 && ((tem = maybe_fold_offset_to_address
1006 (op0, integer_zero_node, TREE_TYPE (lhs)))
1007 != NULL_TREE))
1008 return tem;
1009 return op0;
1010 }
75a70cf9 1011
1012 return fold_unary (subcode, gimple_expr_type (stmt), op0);
1013 }
1014
1015 case GIMPLE_BINARY_RHS:
1016 {
1017 /* Handle binary operators that can appear in GIMPLE form. */
1018 tree op0 = gimple_assign_rhs1 (stmt);
1019 tree op1 = gimple_assign_rhs2 (stmt);
1020
1021 /* Simplify the operands down to constants when appropriate. */
1022 if (TREE_CODE (op0) == SSA_NAME)
1023 {
1024 prop_value_t *val = get_value (op0);
1025 if (val->lattice_val == CONSTANT)
1026 op0 = val->value;
1027 }
1028
1029 if (TREE_CODE (op1) == SSA_NAME)
1030 {
1031 prop_value_t *val = get_value (op1);
1032 if (val->lattice_val == CONSTANT)
1033 op1 = val->value;
1034 }
1035
5768aeb3 1036 /* Fold &foo + CST into an invariant reference if possible. */
1037 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
1038 && TREE_CODE (op0) == ADDR_EXPR
1039 && TREE_CODE (op1) == INTEGER_CST)
1040 {
1041 tree lhs = gimple_assign_lhs (stmt);
1042 tree tem = maybe_fold_offset_to_address (op0, op1,
1043 TREE_TYPE (lhs));
1044 if (tem != NULL_TREE)
1045 return tem;
1046 }
1047
75a70cf9 1048 return fold_binary (subcode, gimple_expr_type (stmt), op0, op1);
1049 }
1050
1051 default:
1052 gcc_unreachable ();
1053 }
1054 }
1055 break;
4ee9c684 1056
75a70cf9 1057 case GIMPLE_CALL:
f257af64 1058 {
1059 tree fn = gimple_call_fn (stmt);
1060 prop_value_t *val;
1061
1062 if (TREE_CODE (fn) == SSA_NAME)
1063 {
1064 val = get_value (fn);
1065 if (val->lattice_val == CONSTANT)
1066 fn = val->value;
1067 }
1068 if (TREE_CODE (fn) == ADDR_EXPR
1069 && DECL_BUILT_IN (TREE_OPERAND (fn, 0)))
1070 {
1071 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
1072 tree call, retval;
1073 unsigned i;
1074 for (i = 0; i < gimple_call_num_args (stmt); ++i)
1075 {
1076 args[i] = gimple_call_arg (stmt, i);
1077 if (TREE_CODE (args[i]) == SSA_NAME)
1078 {
1079 val = get_value (args[i]);
1080 if (val->lattice_val == CONSTANT)
1081 args[i] = val->value;
1082 }
1083 }
1084 call = build_call_array (gimple_call_return_type (stmt),
1085 fn, gimple_call_num_args (stmt), args);
1086 retval = fold_call_expr (call, false);
1087 if (retval)
1088 /* fold_call_expr wraps the result inside a NOP_EXPR. */
1089 STRIP_NOPS (retval);
1090 return retval;
1091 }
1092 return NULL_TREE;
1093 }
4ee9c684 1094
75a70cf9 1095 case GIMPLE_COND:
1096 {
1097 /* Handle comparison operators that can appear in GIMPLE form. */
1098 tree op0 = gimple_cond_lhs (stmt);
1099 tree op1 = gimple_cond_rhs (stmt);
1100 enum tree_code code = gimple_cond_code (stmt);
1101
1102 /* Simplify the operands down to constants when appropriate. */
1103 if (TREE_CODE (op0) == SSA_NAME)
1104 {
1105 prop_value_t *val = get_value (op0);
1106 if (val->lattice_val == CONSTANT)
1107 op0 = val->value;
1108 }
1109
1110 if (TREE_CODE (op1) == SSA_NAME)
1111 {
1112 prop_value_t *val = get_value (op1);
1113 if (val->lattice_val == CONSTANT)
1114 op1 = val->value;
1115 }
1116
1117 return fold_binary (code, boolean_type_node, op0, op1);
1118 }
4ee9c684 1119
75a70cf9 1120 case GIMPLE_SWITCH:
1121 {
1122 tree rhs = gimple_switch_index (stmt);
04236c3a 1123
75a70cf9 1124 if (TREE_CODE (rhs) == SSA_NAME)
1125 {
1126 /* If the RHS is an SSA_NAME, return its known constant value,
1127 if any. */
1128 return get_value (rhs)->value;
1129 }
04236c3a 1130
75a70cf9 1131 return rhs;
1132 }
912f109f 1133
75a70cf9 1134 default:
1135 gcc_unreachable ();
4ee9c684 1136 }
4ee9c684 1137}
1138
1139
8782adcf 1140/* Return the tree representing the element referenced by T if T is an
1141 ARRAY_REF or COMPONENT_REF into constant aggregates. Return
1142 NULL_TREE otherwise. */
1143
e004838d 1144tree
8782adcf 1145fold_const_aggregate_ref (tree t)
1146{
1147 prop_value_t *value;
c75b4594 1148 tree base, ctor, idx, field;
1149 unsigned HOST_WIDE_INT cnt;
1150 tree cfield, cval;
8782adcf 1151
1152 switch (TREE_CODE (t))
1153 {
1154 case ARRAY_REF:
1155 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
1156 DECL_INITIAL. If BASE is a nested reference into another
1157 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
1158 the inner reference. */
1159 base = TREE_OPERAND (t, 0);
1160 switch (TREE_CODE (base))
1161 {
1162 case VAR_DECL:
1163 if (!TREE_READONLY (base)
1164 || TREE_CODE (TREE_TYPE (base)) != ARRAY_TYPE
1165 || !targetm.binds_local_p (base))
1166 return NULL_TREE;
1167
1168 ctor = DECL_INITIAL (base);
1169 break;
1170
1171 case ARRAY_REF:
1172 case COMPONENT_REF:
1173 ctor = fold_const_aggregate_ref (base);
1174 break;
1175
04236c3a 1176 case STRING_CST:
1177 case CONSTRUCTOR:
1178 ctor = base;
1179 break;
1180
8782adcf 1181 default:
1182 return NULL_TREE;
1183 }
1184
1185 if (ctor == NULL_TREE
4f61cce6 1186 || (TREE_CODE (ctor) != CONSTRUCTOR
1187 && TREE_CODE (ctor) != STRING_CST)
8782adcf 1188 || !TREE_STATIC (ctor))
1189 return NULL_TREE;
1190
1191 /* Get the index. If we have an SSA_NAME, try to resolve it
1192 with the current lattice value for the SSA_NAME. */
1193 idx = TREE_OPERAND (t, 1);
1194 switch (TREE_CODE (idx))
1195 {
1196 case SSA_NAME:
bfa30570 1197 if ((value = get_value (idx))
8782adcf 1198 && value->lattice_val == CONSTANT
1199 && TREE_CODE (value->value) == INTEGER_CST)
1200 idx = value->value;
1201 else
1202 return NULL_TREE;
1203 break;
1204
1205 case INTEGER_CST:
1206 break;
1207
1208 default:
1209 return NULL_TREE;
1210 }
1211
4f61cce6 1212 /* Fold read from constant string. */
1213 if (TREE_CODE (ctor) == STRING_CST)
1214 {
1215 if ((TYPE_MODE (TREE_TYPE (t))
1216 == TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
1217 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
1218 == MODE_INT)
1219 && GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor)))) == 1
1220 && compare_tree_int (idx, TREE_STRING_LENGTH (ctor)) < 0)
7b050b7b 1221 return build_int_cst_type (TREE_TYPE (t),
1222 (TREE_STRING_POINTER (ctor)
1223 [TREE_INT_CST_LOW (idx)]));
4f61cce6 1224 return NULL_TREE;
1225 }
1226
8782adcf 1227 /* Whoo-hoo! I'll fold ya baby. Yeah! */
c75b4594 1228 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
1229 if (tree_int_cst_equal (cfield, idx))
590d65aa 1230 {
1231 STRIP_USELESS_TYPE_CONVERSION (cval);
1232 return cval;
1233 }
8782adcf 1234 break;
1235
1236 case COMPONENT_REF:
1237 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
1238 DECL_INITIAL. If BASE is a nested reference into another
1239 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
1240 the inner reference. */
1241 base = TREE_OPERAND (t, 0);
1242 switch (TREE_CODE (base))
1243 {
1244 case VAR_DECL:
1245 if (!TREE_READONLY (base)
1246 || TREE_CODE (TREE_TYPE (base)) != RECORD_TYPE
1247 || !targetm.binds_local_p (base))
1248 return NULL_TREE;
1249
1250 ctor = DECL_INITIAL (base);
1251 break;
1252
1253 case ARRAY_REF:
1254 case COMPONENT_REF:
1255 ctor = fold_const_aggregate_ref (base);
1256 break;
1257
1258 default:
1259 return NULL_TREE;
1260 }
1261
1262 if (ctor == NULL_TREE
1263 || TREE_CODE (ctor) != CONSTRUCTOR
1264 || !TREE_STATIC (ctor))
1265 return NULL_TREE;
1266
1267 field = TREE_OPERAND (t, 1);
1268
c75b4594 1269 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
1270 if (cfield == field
8782adcf 1271 /* FIXME: Handle bit-fields. */
c75b4594 1272 && ! DECL_BIT_FIELD (cfield))
590d65aa 1273 {
1274 STRIP_USELESS_TYPE_CONVERSION (cval);
1275 return cval;
1276 }
8782adcf 1277 break;
1278
908cb59d 1279 case REALPART_EXPR:
1280 case IMAGPART_EXPR:
1281 {
1282 tree c = fold_const_aggregate_ref (TREE_OPERAND (t, 0));
1283 if (c && TREE_CODE (c) == COMPLEX_CST)
1284 return fold_build1 (TREE_CODE (t), TREE_TYPE (t), c);
1285 break;
1286 }
04236c3a 1287
1288 case INDIRECT_REF:
1289 {
1290 tree base = TREE_OPERAND (t, 0);
1291 if (TREE_CODE (base) == SSA_NAME
1292 && (value = get_value (base))
1293 && value->lattice_val == CONSTANT
1294 && TREE_CODE (value->value) == ADDR_EXPR)
1295 return fold_const_aggregate_ref (TREE_OPERAND (value->value, 0));
1296 break;
1297 }
1298
8782adcf 1299 default:
1300 break;
1301 }
1302
1303 return NULL_TREE;
1304}
75a70cf9 1305
1306/* Evaluate statement STMT.
1307 Valid only for assignments, calls, conditionals, and switches. */
4ee9c684 1308
88dbf20f 1309static prop_value_t
75a70cf9 1310evaluate_stmt (gimple stmt)
4ee9c684 1311{
88dbf20f 1312 prop_value_t val;
4f61cce6 1313 tree simplified = NULL_TREE;
88dbf20f 1314 ccp_lattice_t likelyvalue = likely_value (stmt);
add6ee5e 1315 bool is_constant;
88dbf20f 1316
1317 val.mem_ref = NULL_TREE;
4ee9c684 1318
add6ee5e 1319 fold_defer_overflow_warnings ();
1320
4ee9c684 1321 /* If the statement is likely to have a CONSTANT result, then try
1322 to fold the statement to determine the constant value. */
75a70cf9 1323 /* FIXME. This is the only place that we call ccp_fold.
1324 Since likely_value never returns CONSTANT for calls, we will
1325 not attempt to fold them, including builtins that may profit. */
4ee9c684 1326 if (likelyvalue == CONSTANT)
1327 simplified = ccp_fold (stmt);
1328 /* If the statement is likely to have a VARYING result, then do not
1329 bother folding the statement. */
04236c3a 1330 else if (likelyvalue == VARYING)
75a70cf9 1331 {
1332 enum tree_code code = gimple_code (stmt);
1333 if (code == GIMPLE_ASSIGN)
1334 {
1335 enum tree_code subcode = gimple_assign_rhs_code (stmt);
1336
1337 /* Other cases cannot satisfy is_gimple_min_invariant
1338 without folding. */
1339 if (get_gimple_rhs_class (subcode) == GIMPLE_SINGLE_RHS)
1340 simplified = gimple_assign_rhs1 (stmt);
1341 }
1342 else if (code == GIMPLE_SWITCH)
1343 simplified = gimple_switch_index (stmt);
1344 else
1345 /* These cannot satisfy is_gimple_min_invariant without folding. */
1346 gcc_assert (code == GIMPLE_CALL || code == GIMPLE_COND);
1347 }
4ee9c684 1348
add6ee5e 1349 is_constant = simplified && is_gimple_min_invariant (simplified);
1350
1351 fold_undefer_overflow_warnings (is_constant, stmt, 0);
1352
912f109f 1353 if (dump_file && (dump_flags & TDF_DETAILS))
1354 {
1355 fprintf (dump_file, "which is likely ");
1356 switch (likelyvalue)
1357 {
1358 case CONSTANT:
1359 fprintf (dump_file, "CONSTANT");
1360 break;
1361 case UNDEFINED:
1362 fprintf (dump_file, "UNDEFINED");
1363 break;
1364 case VARYING:
1365 fprintf (dump_file, "VARYING");
1366 break;
1367 default:;
1368 }
1369 fprintf (dump_file, "\n");
1370 }
1371
add6ee5e 1372 if (is_constant)
4ee9c684 1373 {
1374 /* The statement produced a constant value. */
1375 val.lattice_val = CONSTANT;
88dbf20f 1376 val.value = simplified;
4ee9c684 1377 }
1378 else
1379 {
1380 /* The statement produced a nonconstant value. If the statement
88dbf20f 1381 had UNDEFINED operands, then the result of the statement
1382 should be UNDEFINED. Otherwise, the statement is VARYING. */
bfa30570 1383 if (likelyvalue == UNDEFINED)
b765fa12 1384 val.lattice_val = likelyvalue;
1385 else
1386 val.lattice_val = VARYING;
1387
88dbf20f 1388 val.value = NULL_TREE;
4ee9c684 1389 }
41511585 1390
1391 return val;
4ee9c684 1392}
1393
41511585 1394/* Visit the assignment statement STMT. Set the value of its LHS to the
88dbf20f 1395 value computed by the RHS and store LHS in *OUTPUT_P. If STMT
1396 creates virtual definitions, set the value of each new name to that
75a70cf9 1397 of the RHS (if we can derive a constant out of the RHS).
1398 Value-returning call statements also perform an assignment, and
1399 are handled here. */
4ee9c684 1400
41511585 1401static enum ssa_prop_result
75a70cf9 1402visit_assignment (gimple stmt, tree *output_p)
4ee9c684 1403{
88dbf20f 1404 prop_value_t val;
88dbf20f 1405 enum ssa_prop_result retval;
4ee9c684 1406
75a70cf9 1407 tree lhs = gimple_get_lhs (stmt);
4ee9c684 1408
75a70cf9 1409 gcc_assert (gimple_code (stmt) != GIMPLE_CALL
1410 || gimple_call_lhs (stmt) != NULL_TREE);
1411
1412 if (gimple_assign_copy_p (stmt))
41511585 1413 {
75a70cf9 1414 tree rhs = gimple_assign_rhs1 (stmt);
88dbf20f 1415
75a70cf9 1416 if (TREE_CODE (rhs) == SSA_NAME)
1417 {
1418 /* For a simple copy operation, we copy the lattice values. */
1419 prop_value_t *nval = get_value (rhs);
1420 val = *nval;
1421 }
1422 else if (do_store_ccp && stmt_makes_single_load (stmt))
1423 {
1424 /* Same as above, but the RHS is not a gimple register and yet
1425 has a known VUSE. If STMT is loading from the same memory
1426 location that created the SSA_NAMEs for the virtual operands,
1427 we can propagate the value on the RHS. */
1428 prop_value_t *nval = get_value_loaded_by (stmt, const_val);
1429
1430 if (nval
1431 && nval->mem_ref
1432 && operand_equal_p (nval->mem_ref, rhs, 0))
1433 val = *nval;
1434 else
1435 val = evaluate_stmt (stmt);
1436 }
88dbf20f 1437 else
75a70cf9 1438 val = evaluate_stmt (stmt);
41511585 1439 }
1440 else
75a70cf9 1441 /* Evaluate the statement, which could be
1442 either a GIMPLE_ASSIGN or a GIMPLE_CALL. */
04236c3a 1443 val = evaluate_stmt (stmt);
4ee9c684 1444
88dbf20f 1445 retval = SSA_PROP_NOT_INTERESTING;
4ee9c684 1446
41511585 1447 /* Set the lattice value of the statement's output. */
88dbf20f 1448 if (TREE_CODE (lhs) == SSA_NAME)
4ee9c684 1449 {
88dbf20f 1450 /* If STMT is an assignment to an SSA_NAME, we only have one
1451 value to set. */
1452 if (set_lattice_value (lhs, val))
1453 {
1454 *output_p = lhs;
1455 if (val.lattice_val == VARYING)
1456 retval = SSA_PROP_VARYING;
1457 else
1458 retval = SSA_PROP_INTERESTING;
1459 }
4ee9c684 1460 }
88dbf20f 1461 else if (do_store_ccp && stmt_makes_single_store (stmt))
1462 {
4fb5e5ca 1463 /* Otherwise, set the names in VDEF operands to the new
1464 constant value and mark the LHS as the memory reference
1465 associated with VAL. */
88dbf20f 1466 ssa_op_iter i;
1467 tree vdef;
1468 bool changed;
1469
88dbf20f 1470 /* Mark VAL as stored in the LHS of this assignment. */
bfa30570 1471 if (val.lattice_val == CONSTANT)
1472 val.mem_ref = lhs;
88dbf20f 1473
1474 /* Set the value of every VDEF to VAL. */
1475 changed = false;
1476 FOR_EACH_SSA_TREE_OPERAND (vdef, stmt, i, SSA_OP_VIRTUAL_DEFS)
aecfc21d 1477 {
1478 /* See PR 29801. We may have VDEFs for read-only variables
1479 (see the handling of unmodifiable variables in
1480 add_virtual_operand); do not attempt to change their value. */
1481 if (get_symbol_constant_value (SSA_NAME_VAR (vdef)) != NULL_TREE)
1482 continue;
1483
1484 changed |= set_lattice_value (vdef, val);
1485 }
88dbf20f 1486
1487 /* Note that for propagation purposes, we are only interested in
1488 visiting statements that load the exact same memory reference
1489 stored here. Those statements will have the exact same list
1490 of virtual uses, so it is enough to set the output of this
1491 statement to be its first virtual definition. */
1492 *output_p = first_vdef (stmt);
1493 if (changed)
1494 {
1495 if (val.lattice_val == VARYING)
1496 retval = SSA_PROP_VARYING;
1497 else
1498 retval = SSA_PROP_INTERESTING;
1499 }
1500 }
1501
1502 return retval;
4ee9c684 1503}
1504
4ee9c684 1505
41511585 1506/* Visit the conditional statement STMT. Return SSA_PROP_INTERESTING
1507 if it can determine which edge will be taken. Otherwise, return
1508 SSA_PROP_VARYING. */
1509
1510static enum ssa_prop_result
75a70cf9 1511visit_cond_stmt (gimple stmt, edge *taken_edge_p)
4ee9c684 1512{
88dbf20f 1513 prop_value_t val;
41511585 1514 basic_block block;
1515
75a70cf9 1516 block = gimple_bb (stmt);
41511585 1517 val = evaluate_stmt (stmt);
1518
1519 /* Find which edge out of the conditional block will be taken and add it
1520 to the worklist. If no single edge can be determined statically,
1521 return SSA_PROP_VARYING to feed all the outgoing edges to the
1522 propagation engine. */
88dbf20f 1523 *taken_edge_p = val.value ? find_taken_edge (block, val.value) : 0;
41511585 1524 if (*taken_edge_p)
1525 return SSA_PROP_INTERESTING;
1526 else
1527 return SSA_PROP_VARYING;
4ee9c684 1528}
1529
4ee9c684 1530
41511585 1531/* Evaluate statement STMT. If the statement produces an output value and
1532 its evaluation changes the lattice value of its output, return
1533 SSA_PROP_INTERESTING and set *OUTPUT_P to the SSA_NAME holding the
1534 output value.
1535
1536 If STMT is a conditional branch and we can determine its truth
1537 value, set *TAKEN_EDGE_P accordingly. If STMT produces a varying
1538 value, return SSA_PROP_VARYING. */
4ee9c684 1539
41511585 1540static enum ssa_prop_result
75a70cf9 1541ccp_visit_stmt (gimple stmt, edge *taken_edge_p, tree *output_p)
41511585 1542{
41511585 1543 tree def;
1544 ssa_op_iter iter;
4ee9c684 1545
41511585 1546 if (dump_file && (dump_flags & TDF_DETAILS))
4ee9c684 1547 {
88dbf20f 1548 fprintf (dump_file, "\nVisiting statement:\n");
75a70cf9 1549 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
4ee9c684 1550 }
4ee9c684 1551
75a70cf9 1552 switch (gimple_code (stmt))
4ee9c684 1553 {
75a70cf9 1554 case GIMPLE_ASSIGN:
1555 /* If the statement is an assignment that produces a single
1556 output value, evaluate its RHS to see if the lattice value of
1557 its output has changed. */
1558 return visit_assignment (stmt, output_p);
1559
1560 case GIMPLE_CALL:
1561 /* A value-returning call also performs an assignment. */
1562 if (gimple_call_lhs (stmt) != NULL_TREE)
1563 return visit_assignment (stmt, output_p);
1564 break;
1565
1566 case GIMPLE_COND:
1567 case GIMPLE_SWITCH:
1568 /* If STMT is a conditional branch, see if we can determine
1569 which branch will be taken. */
1570 /* FIXME. It appears that we should be able to optimize
1571 computed GOTOs here as well. */
1572 return visit_cond_stmt (stmt, taken_edge_p);
1573
1574 default:
1575 break;
4ee9c684 1576 }
4ee9c684 1577
41511585 1578 /* Any other kind of statement is not interesting for constant
1579 propagation and, therefore, not worth simulating. */
41511585 1580 if (dump_file && (dump_flags & TDF_DETAILS))
1581 fprintf (dump_file, "No interesting values produced. Marked VARYING.\n");
4ee9c684 1582
41511585 1583 /* Definitions made by statements other than assignments to
1584 SSA_NAMEs represent unknown modifications to their outputs.
1585 Mark them VARYING. */
88dbf20f 1586 FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_ALL_DEFS)
1587 {
1588 prop_value_t v = { VARYING, NULL_TREE, NULL_TREE };
1589 set_lattice_value (def, v);
1590 }
4ee9c684 1591
41511585 1592 return SSA_PROP_VARYING;
1593}
4ee9c684 1594
4ee9c684 1595
88dbf20f 1596/* Main entry point for SSA Conditional Constant Propagation. */
41511585 1597
33a34f1e 1598static unsigned int
88dbf20f 1599execute_ssa_ccp (bool store_ccp)
41511585 1600{
88dbf20f 1601 do_store_ccp = store_ccp;
41511585 1602 ccp_initialize ();
1603 ssa_propagate (ccp_visit_stmt, ccp_visit_phi_node);
33a34f1e 1604 if (ccp_finalize ())
eb9161e7 1605 return (TODO_cleanup_cfg | TODO_update_ssa | TODO_remove_unused_locals);
33a34f1e 1606 else
1607 return 0;
4ee9c684 1608}
1609
5664499b 1610
2a1990e9 1611static unsigned int
88dbf20f 1612do_ssa_ccp (void)
1613{
33a34f1e 1614 return execute_ssa_ccp (false);
88dbf20f 1615}
1616
1617
5664499b 1618static bool
41511585 1619gate_ccp (void)
5664499b 1620{
41511585 1621 return flag_tree_ccp != 0;
5664499b 1622}
1623
4ee9c684 1624
20099e35 1625struct gimple_opt_pass pass_ccp =
41511585 1626{
20099e35 1627 {
1628 GIMPLE_PASS,
41511585 1629 "ccp", /* name */
1630 gate_ccp, /* gate */
88dbf20f 1631 do_ssa_ccp, /* execute */
41511585 1632 NULL, /* sub */
1633 NULL, /* next */
1634 0, /* static_pass_number */
1635 TV_TREE_CCP, /* tv_id */
49290934 1636 PROP_cfg | PROP_ssa, /* properties_required */
41511585 1637 0, /* properties_provided */
b6246c40 1638 0, /* properties_destroyed */
41511585 1639 0, /* todo_flags_start */
33a34f1e 1640 TODO_dump_func | TODO_verify_ssa
20099e35 1641 | TODO_verify_stmts | TODO_ggc_collect/* todo_flags_finish */
1642 }
41511585 1643};
4ee9c684 1644
4ee9c684 1645
2a1990e9 1646static unsigned int
88dbf20f 1647do_ssa_store_ccp (void)
1648{
1649 /* If STORE-CCP is not enabled, we just run regular CCP. */
33a34f1e 1650 return execute_ssa_ccp (flag_tree_store_ccp != 0);
88dbf20f 1651}
1652
1653static bool
1654gate_store_ccp (void)
1655{
1656 /* STORE-CCP is enabled only with -ftree-store-ccp, but when
1657 -fno-tree-store-ccp is specified, we should run regular CCP.
1658 That's why the pass is enabled with either flag. */
1659 return flag_tree_store_ccp != 0 || flag_tree_ccp != 0;
1660}
1661
1662
20099e35 1663struct gimple_opt_pass pass_store_ccp =
88dbf20f 1664{
20099e35 1665 {
1666 GIMPLE_PASS,
88dbf20f 1667 "store_ccp", /* name */
1668 gate_store_ccp, /* gate */
1669 do_ssa_store_ccp, /* execute */
1670 NULL, /* sub */
1671 NULL, /* next */
1672 0, /* static_pass_number */
1673 TV_TREE_STORE_CCP, /* tv_id */
1674 PROP_cfg | PROP_ssa | PROP_alias, /* properties_required */
1675 0, /* properties_provided */
b6246c40 1676 0, /* properties_destroyed */
88dbf20f 1677 0, /* todo_flags_start */
33a34f1e 1678 TODO_dump_func | TODO_verify_ssa
20099e35 1679 | TODO_verify_stmts | TODO_ggc_collect/* todo_flags_finish */
1680 }
88dbf20f 1681};
1682
4ee9c684 1683/* A subroutine of fold_stmt_r. Attempts to fold *(A+O) to A[X].
1684 BASE is an array type. OFFSET is a byte displacement. ORIG_TYPE
0bed3869 1685 is the desired result type. */
4ee9c684 1686
1687static tree
b7488229 1688maybe_fold_offset_to_array_ref (tree base, tree offset, tree orig_type,
1689 bool allow_negative_idx)
4ee9c684 1690{
e71da05f 1691 tree min_idx, idx, idx_type, elt_offset = integer_zero_node;
6374121b 1692 tree array_type, elt_type, elt_size;
b513c084 1693 tree domain_type;
6374121b 1694
1695 /* If BASE is an ARRAY_REF, we can pick up another offset (this time
1696 measured in units of the size of elements type) from that ARRAY_REF).
1697 We can't do anything if either is variable.
1698
1699 The case we handle here is *(&A[N]+O). */
1700 if (TREE_CODE (base) == ARRAY_REF)
1701 {
1702 tree low_bound = array_ref_low_bound (base);
1703
1704 elt_offset = TREE_OPERAND (base, 1);
1705 if (TREE_CODE (low_bound) != INTEGER_CST
1706 || TREE_CODE (elt_offset) != INTEGER_CST)
1707 return NULL_TREE;
1708
1709 elt_offset = int_const_binop (MINUS_EXPR, elt_offset, low_bound, 0);
1710 base = TREE_OPERAND (base, 0);
1711 }
4ee9c684 1712
1713 /* Ignore stupid user tricks of indexing non-array variables. */
1714 array_type = TREE_TYPE (base);
1715 if (TREE_CODE (array_type) != ARRAY_TYPE)
1716 return NULL_TREE;
1717 elt_type = TREE_TYPE (array_type);
c8ca3ee7 1718 if (!useless_type_conversion_p (orig_type, elt_type))
4ee9c684 1719 return NULL_TREE;
e71da05f 1720
1721 /* Use signed size type for intermediate computation on the index. */
1722 idx_type = signed_type_for (size_type_node);
1723
6374121b 1724 /* If OFFSET and ELT_OFFSET are zero, we don't care about the size of the
1725 element type (so we can use the alignment if it's not constant).
1726 Otherwise, compute the offset as an index by using a division. If the
1727 division isn't exact, then don't do anything. */
4ee9c684 1728 elt_size = TYPE_SIZE_UNIT (elt_type);
3b45913d 1729 if (!elt_size)
1730 return NULL;
6374121b 1731 if (integer_zerop (offset))
1732 {
1733 if (TREE_CODE (elt_size) != INTEGER_CST)
1734 elt_size = size_int (TYPE_ALIGN (elt_type));
4ee9c684 1735
e71da05f 1736 idx = build_int_cst (idx_type, 0);
6374121b 1737 }
1738 else
1739 {
1740 unsigned HOST_WIDE_INT lquo, lrem;
1741 HOST_WIDE_INT hquo, hrem;
e71da05f 1742 double_int soffset;
6374121b 1743
e71da05f 1744 /* The final array offset should be signed, so we need
1745 to sign-extend the (possibly pointer) offset here
1746 and use signed division. */
1747 soffset = double_int_sext (tree_to_double_int (offset),
1748 TYPE_PRECISION (TREE_TYPE (offset)));
6374121b 1749 if (TREE_CODE (elt_size) != INTEGER_CST
e71da05f 1750 || div_and_round_double (TRUNC_DIV_EXPR, 0,
1751 soffset.low, soffset.high,
6374121b 1752 TREE_INT_CST_LOW (elt_size),
1753 TREE_INT_CST_HIGH (elt_size),
1754 &lquo, &hquo, &lrem, &hrem)
1755 || lrem || hrem)
1756 return NULL_TREE;
4ee9c684 1757
e71da05f 1758 idx = build_int_cst_wide (idx_type, lquo, hquo);
6374121b 1759 }
1760
1761 /* Assume the low bound is zero. If there is a domain type, get the
1762 low bound, if any, convert the index into that type, and add the
1763 low bound. */
e71da05f 1764 min_idx = build_int_cst (idx_type, 0);
b513c084 1765 domain_type = TYPE_DOMAIN (array_type);
1766 if (domain_type)
4ee9c684 1767 {
b513c084 1768 idx_type = domain_type;
e71da05f 1769 if (TYPE_MIN_VALUE (idx_type))
1770 min_idx = TYPE_MIN_VALUE (idx_type);
6374121b 1771 else
e71da05f 1772 min_idx = fold_convert (idx_type, min_idx);
6374121b 1773
1774 if (TREE_CODE (min_idx) != INTEGER_CST)
1775 return NULL_TREE;
1776
e71da05f 1777 elt_offset = fold_convert (idx_type, elt_offset);
4ee9c684 1778 }
1779
6374121b 1780 if (!integer_zerop (min_idx))
1781 idx = int_const_binop (PLUS_EXPR, idx, min_idx, 0);
1782 if (!integer_zerop (elt_offset))
1783 idx = int_const_binop (PLUS_EXPR, idx, elt_offset, 0);
1784
e71da05f 1785 /* Make sure to possibly truncate late after offsetting. */
1786 idx = fold_convert (idx_type, idx);
1787
b513c084 1788 /* We don't want to construct access past array bounds. For example
b7488229 1789 char *(c[4]);
1790 c[3][2];
1791 should not be simplified into (*c)[14] or tree-vrp will
1792 give false warnings. The same is true for
1793 struct A { long x; char d[0]; } *a;
1794 (char *)a - 4;
1795 which should be not folded to &a->d[-8]. */
1796 if (domain_type
1797 && TYPE_MAX_VALUE (domain_type)
b513c084 1798 && TREE_CODE (TYPE_MAX_VALUE (domain_type)) == INTEGER_CST)
1799 {
1800 tree up_bound = TYPE_MAX_VALUE (domain_type);
1801
1802 if (tree_int_cst_lt (up_bound, idx)
1803 /* Accesses after the end of arrays of size 0 (gcc
1804 extension) and 1 are likely intentional ("struct
1805 hack"). */
1806 && compare_tree_int (up_bound, 1) > 0)
1807 return NULL_TREE;
1808 }
b7488229 1809 if (domain_type
1810 && TYPE_MIN_VALUE (domain_type))
1811 {
1812 if (!allow_negative_idx
1813 && TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST
1814 && tree_int_cst_lt (idx, TYPE_MIN_VALUE (domain_type)))
1815 return NULL_TREE;
1816 }
1817 else if (!allow_negative_idx
1818 && compare_tree_int (idx, 0) < 0)
1819 return NULL_TREE;
b513c084 1820
d3828421 1821 return build4 (ARRAY_REF, elt_type, base, idx, NULL_TREE, NULL_TREE);
4ee9c684 1822}
1823
41511585 1824
3b45913d 1825/* Attempt to fold *(S+O) to S.X.
4ee9c684 1826 BASE is a record type. OFFSET is a byte displacement. ORIG_TYPE
1827 is the desired result type. */
4ee9c684 1828
3b45913d 1829static tree
4ee9c684 1830maybe_fold_offset_to_component_ref (tree record_type, tree base, tree offset,
1831 tree orig_type, bool base_is_ptr)
1832{
6d5d8428 1833 tree f, t, field_type, tail_array_field, field_offset;
d9745cea 1834 tree ret;
1835 tree new_base;
4ee9c684 1836
1837 if (TREE_CODE (record_type) != RECORD_TYPE
1838 && TREE_CODE (record_type) != UNION_TYPE
1839 && TREE_CODE (record_type) != QUAL_UNION_TYPE)
1840 return NULL_TREE;
1841
1842 /* Short-circuit silly cases. */
c8ca3ee7 1843 if (useless_type_conversion_p (record_type, orig_type))
4ee9c684 1844 return NULL_TREE;
1845
1846 tail_array_field = NULL_TREE;
1847 for (f = TYPE_FIELDS (record_type); f ; f = TREE_CHAIN (f))
1848 {
1849 int cmp;
1850
1851 if (TREE_CODE (f) != FIELD_DECL)
1852 continue;
1853 if (DECL_BIT_FIELD (f))
1854 continue;
6d5d8428 1855
3b45913d 1856 if (!DECL_FIELD_OFFSET (f))
1857 continue;
6d5d8428 1858 field_offset = byte_position (f);
1859 if (TREE_CODE (field_offset) != INTEGER_CST)
4ee9c684 1860 continue;
1861
1862 /* ??? Java creates "interesting" fields for representing base classes.
1863 They have no name, and have no context. With no context, we get into
1864 trouble with nonoverlapping_component_refs_p. Skip them. */
1865 if (!DECL_FIELD_CONTEXT (f))
1866 continue;
1867
1868 /* The previous array field isn't at the end. */
1869 tail_array_field = NULL_TREE;
1870
1871 /* Check to see if this offset overlaps with the field. */
6d5d8428 1872 cmp = tree_int_cst_compare (field_offset, offset);
4ee9c684 1873 if (cmp > 0)
1874 continue;
1875
1876 field_type = TREE_TYPE (f);
4ee9c684 1877
1878 /* Here we exactly match the offset being checked. If the types match,
1879 then we can return that field. */
115073ff 1880 if (cmp == 0
c8ca3ee7 1881 && useless_type_conversion_p (orig_type, field_type))
4ee9c684 1882 {
1883 if (base_is_ptr)
1884 base = build1 (INDIRECT_REF, record_type, base);
40b19772 1885 t = build3 (COMPONENT_REF, field_type, base, f, NULL_TREE);
4ee9c684 1886 return t;
1887 }
115073ff 1888
1889 /* Don't care about offsets into the middle of scalars. */
1890 if (!AGGREGATE_TYPE_P (field_type))
1891 continue;
4ee9c684 1892
115073ff 1893 /* Check for array at the end of the struct. This is often
1894 used as for flexible array members. We should be able to
1895 turn this into an array access anyway. */
1896 if (TREE_CODE (field_type) == ARRAY_TYPE)
1897 tail_array_field = f;
1898
1899 /* Check the end of the field against the offset. */
1900 if (!DECL_SIZE_UNIT (f)
1901 || TREE_CODE (DECL_SIZE_UNIT (f)) != INTEGER_CST)
1902 continue;
1903 t = int_const_binop (MINUS_EXPR, offset, field_offset, 1);
1904 if (!tree_int_cst_lt (t, DECL_SIZE_UNIT (f)))
1905 continue;
4ee9c684 1906
115073ff 1907 /* If we matched, then set offset to the displacement into
1908 this field. */
d9745cea 1909 if (base_is_ptr)
1910 new_base = build1 (INDIRECT_REF, record_type, base);
1911 else
1912 new_base = base;
1913 new_base = build3 (COMPONENT_REF, field_type, new_base, f, NULL_TREE);
1914
1915 /* Recurse to possibly find the match. */
b7488229 1916 ret = maybe_fold_offset_to_array_ref (new_base, t, orig_type,
1917 f == TYPE_FIELDS (record_type));
d9745cea 1918 if (ret)
1919 return ret;
1920 ret = maybe_fold_offset_to_component_ref (field_type, new_base, t,
1921 orig_type, false);
1922 if (ret)
1923 return ret;
4ee9c684 1924 }
1925
1926 if (!tail_array_field)
1927 return NULL_TREE;
1928
1929 f = tail_array_field;
1930 field_type = TREE_TYPE (f);
115073ff 1931 offset = int_const_binop (MINUS_EXPR, offset, byte_position (f), 1);
4ee9c684 1932
4ee9c684 1933 /* If we get here, we've got an aggregate field, and a possibly
365db11e 1934 nonzero offset into them. Recurse and hope for a valid match. */
4ee9c684 1935 if (base_is_ptr)
1936 base = build1 (INDIRECT_REF, record_type, base);
40b19772 1937 base = build3 (COMPONENT_REF, field_type, base, f, NULL_TREE);
4ee9c684 1938
b7488229 1939 t = maybe_fold_offset_to_array_ref (base, offset, orig_type,
1940 f == TYPE_FIELDS (record_type));
4ee9c684 1941 if (t)
1942 return t;
1943 return maybe_fold_offset_to_component_ref (field_type, base, offset,
1944 orig_type, false);
1945}
1946
3b45913d 1947/* Attempt to express (ORIG_TYPE)BASE+OFFSET as BASE->field_of_orig_type
1948 or BASE[index] or by combination of those.
1949
1950 Before attempting the conversion strip off existing ADDR_EXPRs and
1951 handled component refs. */
1952
1953tree
1954maybe_fold_offset_to_reference (tree base, tree offset, tree orig_type)
1955{
1956 tree ret;
1957 tree type;
1958 bool base_is_ptr = true;
1959
1960 STRIP_NOPS (base);
1961 if (TREE_CODE (base) == ADDR_EXPR)
1962 {
1963 base_is_ptr = false;
1964
1965 base = TREE_OPERAND (base, 0);
1966
1967 /* Handle case where existing COMPONENT_REF pick e.g. wrong field of union,
1968 so it needs to be removed and new COMPONENT_REF constructed.
1969 The wrong COMPONENT_REF are often constructed by folding the
1970 (type *)&object within the expression (type *)&object+offset */
5768aeb3 1971 if (handled_component_p (base))
3b45913d 1972 {
1973 HOST_WIDE_INT sub_offset, size, maxsize;
1974 tree newbase;
1975 newbase = get_ref_base_and_extent (base, &sub_offset,
1976 &size, &maxsize);
1977 gcc_assert (newbase);
5768aeb3 1978 if (size == maxsize
844461a0 1979 && size != -1
5768aeb3 1980 && !(sub_offset & (BITS_PER_UNIT - 1)))
3b45913d 1981 {
1982 base = newbase;
1983 if (sub_offset)
1984 offset = int_const_binop (PLUS_EXPR, offset,
1985 build_int_cst (TREE_TYPE (offset),
1986 sub_offset / BITS_PER_UNIT), 1);
1987 }
1988 }
c8ca3ee7 1989 if (useless_type_conversion_p (orig_type, TREE_TYPE (base))
3b45913d 1990 && integer_zerop (offset))
1991 return base;
1992 type = TREE_TYPE (base);
1993 }
1994 else
1995 {
1996 base_is_ptr = true;
1997 if (!POINTER_TYPE_P (TREE_TYPE (base)))
1998 return NULL_TREE;
1999 type = TREE_TYPE (TREE_TYPE (base));
2000 }
2001 ret = maybe_fold_offset_to_component_ref (type, base, offset,
2002 orig_type, base_is_ptr);
2003 if (!ret)
2004 {
2005 if (base_is_ptr)
2006 base = build1 (INDIRECT_REF, type, base);
b7488229 2007 ret = maybe_fold_offset_to_array_ref (base, offset, orig_type, true);
3b45913d 2008 }
2009 return ret;
2010}
41511585 2011
5768aeb3 2012/* Attempt to express (ORIG_TYPE)&BASE+OFFSET as &BASE->field_of_orig_type
2013 or &BASE[index] or by combination of those.
2014
2015 Before attempting the conversion strip off existing component refs. */
2016
2017tree
2018maybe_fold_offset_to_address (tree addr, tree offset, tree orig_type)
2019{
2020 tree t;
2021
2022 gcc_assert (POINTER_TYPE_P (TREE_TYPE (addr))
2023 && POINTER_TYPE_P (orig_type));
2024
2025 t = maybe_fold_offset_to_reference (addr, offset, TREE_TYPE (orig_type));
2026 if (t != NULL_TREE)
2027 {
2028 tree orig = addr;
2029 tree ptr_type;
2030
2031 /* For __builtin_object_size to function correctly we need to
2032 make sure not to fold address arithmetic so that we change
2033 reference from one array to another. This would happen for
2034 example for
2035
2036 struct X { char s1[10]; char s2[10] } s;
2037 char *foo (void) { return &s.s2[-4]; }
2038
2039 where we need to avoid generating &s.s1[6]. As the C and
2040 C++ frontends create different initial trees
2041 (char *) &s.s1 + -4 vs. &s.s1[-4] we have to do some
2042 sophisticated comparisons here. Note that checking for the
2043 condition after the fact is easier than trying to avoid doing
2044 the folding. */
2045 STRIP_NOPS (orig);
2046 if (TREE_CODE (orig) == ADDR_EXPR)
2047 orig = TREE_OPERAND (orig, 0);
2048 if ((TREE_CODE (orig) == ARRAY_REF
2049 || (TREE_CODE (orig) == COMPONENT_REF
2050 && TREE_CODE (TREE_TYPE (TREE_OPERAND (orig, 1))) == ARRAY_TYPE))
2051 && (TREE_CODE (t) == ARRAY_REF
2052 || (TREE_CODE (t) == COMPONENT_REF
2053 && TREE_CODE (TREE_TYPE (TREE_OPERAND (t, 1))) == ARRAY_TYPE))
2054 && !operand_equal_p (TREE_CODE (orig) == ARRAY_REF
2055 ? TREE_OPERAND (orig, 0) : orig,
2056 TREE_CODE (t) == ARRAY_REF
2057 ? TREE_OPERAND (t, 0) : t, 0))
2058 return NULL_TREE;
2059
2060 ptr_type = build_pointer_type (TREE_TYPE (t));
2061 if (!useless_type_conversion_p (orig_type, ptr_type))
2062 return NULL_TREE;
2063 return build_fold_addr_expr_with_type (t, ptr_type);
2064 }
2065
2066 return NULL_TREE;
2067}
2068
4ee9c684 2069/* A subroutine of fold_stmt_r. Attempt to simplify *(BASE+OFFSET).
2070 Return the simplified expression, or NULL if nothing could be done. */
2071
2072static tree
2073maybe_fold_stmt_indirect (tree expr, tree base, tree offset)
2074{
2075 tree t;
5acf8305 2076 bool volatile_p = TREE_THIS_VOLATILE (expr);
4ee9c684 2077
2078 /* We may well have constructed a double-nested PLUS_EXPR via multiple
2079 substitutions. Fold that down to one. Remove NON_LVALUE_EXPRs that
2080 are sometimes added. */
2081 base = fold (base);
40bcfc86 2082 STRIP_TYPE_NOPS (base);
4ee9c684 2083 TREE_OPERAND (expr, 0) = base;
2084
2085 /* One possibility is that the address reduces to a string constant. */
2086 t = fold_read_from_constant_string (expr);
2087 if (t)
2088 return t;
2089
0de36bdb 2090 /* Add in any offset from a POINTER_PLUS_EXPR. */
2091 if (TREE_CODE (base) == POINTER_PLUS_EXPR)
4ee9c684 2092 {
2093 tree offset2;
2094
2095 offset2 = TREE_OPERAND (base, 1);
2096 if (TREE_CODE (offset2) != INTEGER_CST)
2097 return NULL_TREE;
2098 base = TREE_OPERAND (base, 0);
2099
0de36bdb 2100 offset = fold_convert (sizetype,
2101 int_const_binop (PLUS_EXPR, offset, offset2, 1));
4ee9c684 2102 }
2103
2104 if (TREE_CODE (base) == ADDR_EXPR)
2105 {
3b45913d 2106 tree base_addr = base;
2107
4ee9c684 2108 /* Strip the ADDR_EXPR. */
2109 base = TREE_OPERAND (base, 0);
2110
e67e5e1f 2111 /* Fold away CONST_DECL to its value, if the type is scalar. */
2112 if (TREE_CODE (base) == CONST_DECL
0a685b29 2113 && is_gimple_min_invariant (DECL_INITIAL (base)))
e67e5e1f 2114 return DECL_INITIAL (base);
2115
4ee9c684 2116 /* Try folding *(&B+O) to B.X. */
3b45913d 2117 t = maybe_fold_offset_to_reference (base_addr, offset,
2118 TREE_TYPE (expr));
4ee9c684 2119 if (t)
5acf8305 2120 {
2121 TREE_THIS_VOLATILE (t) = volatile_p;
2122 return t;
2123 }
4ee9c684 2124 }
2125 else
2126 {
2127 /* We can get here for out-of-range string constant accesses,
2128 such as "_"[3]. Bail out of the entire substitution search
2129 and arrange for the entire statement to be replaced by a
06b27565 2130 call to __builtin_trap. In all likelihood this will all be
4ee9c684 2131 constant-folded away, but in the meantime we can't leave with
2132 something that get_expr_operands can't understand. */
2133
2134 t = base;
2135 STRIP_NOPS (t);
2136 if (TREE_CODE (t) == ADDR_EXPR
2137 && TREE_CODE (TREE_OPERAND (t, 0)) == STRING_CST)
2138 {
2139 /* FIXME: Except that this causes problems elsewhere with dead
1fa3a8f6 2140 code not being deleted, and we die in the rtl expanders
4ee9c684 2141 because we failed to remove some ssa_name. In the meantime,
2142 just return zero. */
2143 /* FIXME2: This condition should be signaled by
2144 fold_read_from_constant_string directly, rather than
2145 re-checking for it here. */
2146 return integer_zero_node;
2147 }
2148
2149 /* Try folding *(B+O) to B->X. Still an improvement. */
2150 if (POINTER_TYPE_P (TREE_TYPE (base)))
2151 {
3b45913d 2152 t = maybe_fold_offset_to_reference (base, offset,
2153 TREE_TYPE (expr));
4ee9c684 2154 if (t)
2155 return t;
2156 }
2157 }
2158
2159 /* Otherwise we had an offset that we could not simplify. */
2160 return NULL_TREE;
2161}
2162
41511585 2163
75a70cf9 2164/* A quaint feature extant in our address arithmetic is that there
4ee9c684 2165 can be hidden type changes here. The type of the result need
2166 not be the same as the type of the input pointer.
2167
2168 What we're after here is an expression of the form
2169 (T *)(&array + const)
75a70cf9 2170 where array is OP0, const is OP1, RES_TYPE is T and
2171 the cast doesn't actually exist, but is implicit in the
0de36bdb 2172 type of the POINTER_PLUS_EXPR. We'd like to turn this into
4ee9c684 2173 &array[x]
2174 which may be able to propagate further. */
2175
75a70cf9 2176tree
2177maybe_fold_stmt_addition (tree res_type, tree op0, tree op1)
4ee9c684 2178{
4ee9c684 2179 tree ptd_type;
2180 tree t;
4ee9c684 2181
4ee9c684 2182 /* It had better be a constant. */
2183 if (TREE_CODE (op1) != INTEGER_CST)
2184 return NULL_TREE;
2185 /* The first operand should be an ADDR_EXPR. */
2186 if (TREE_CODE (op0) != ADDR_EXPR)
2187 return NULL_TREE;
2188 op0 = TREE_OPERAND (op0, 0);
2189
2190 /* If the first operand is an ARRAY_REF, expand it so that we can fold
2191 the offset into it. */
2192 while (TREE_CODE (op0) == ARRAY_REF)
2193 {
2194 tree array_obj = TREE_OPERAND (op0, 0);
2195 tree array_idx = TREE_OPERAND (op0, 1);
2196 tree elt_type = TREE_TYPE (op0);
2197 tree elt_size = TYPE_SIZE_UNIT (elt_type);
2198 tree min_idx;
2199
2200 if (TREE_CODE (array_idx) != INTEGER_CST)
2201 break;
2202 if (TREE_CODE (elt_size) != INTEGER_CST)
2203 break;
2204
2205 /* Un-bias the index by the min index of the array type. */
2206 min_idx = TYPE_DOMAIN (TREE_TYPE (array_obj));
2207 if (min_idx)
2208 {
2209 min_idx = TYPE_MIN_VALUE (min_idx);
2210 if (min_idx)
2211 {
6374121b 2212 if (TREE_CODE (min_idx) != INTEGER_CST)
2213 break;
2214
535664e3 2215 array_idx = fold_convert (TREE_TYPE (min_idx), array_idx);
4ee9c684 2216 if (!integer_zerop (min_idx))
2217 array_idx = int_const_binop (MINUS_EXPR, array_idx,
2218 min_idx, 0);
2219 }
2220 }
2221
2222 /* Convert the index to a byte offset. */
535664e3 2223 array_idx = fold_convert (sizetype, array_idx);
4ee9c684 2224 array_idx = int_const_binop (MULT_EXPR, array_idx, elt_size, 0);
2225
2226 /* Update the operands for the next round, or for folding. */
0de36bdb 2227 op1 = int_const_binop (PLUS_EXPR,
4ee9c684 2228 array_idx, op1, 0);
4ee9c684 2229 op0 = array_obj;
2230 }
2231
75a70cf9 2232 ptd_type = TREE_TYPE (res_type);
0b4a6afc 2233 /* If we want a pointer to void, reconstruct the reference from the
2234 array element type. A pointer to that can be trivially converted
2235 to void *. This happens as we fold (void *)(ptr p+ off). */
2236 if (VOID_TYPE_P (ptd_type)
2237 && TREE_CODE (TREE_TYPE (op0)) == ARRAY_TYPE)
2238 ptd_type = TREE_TYPE (TREE_TYPE (op0));
4ee9c684 2239
2240 /* At which point we can try some of the same things as for indirects. */
b7488229 2241 t = maybe_fold_offset_to_array_ref (op0, op1, ptd_type, true);
4ee9c684 2242 if (!t)
2243 t = maybe_fold_offset_to_component_ref (TREE_TYPE (op0), op0, op1,
2244 ptd_type, false);
2245 if (t)
75a70cf9 2246 t = build1 (ADDR_EXPR, res_type, t);
4ee9c684 2247
2248 return t;
2249}
2250
0d759d9c 2251/* For passing state through walk_tree into fold_stmt_r and its
2252 children. */
2253
2254struct fold_stmt_r_data
2255{
75a70cf9 2256 gimple stmt;
add6ee5e 2257 bool *changed_p;
2258 bool *inside_addr_expr_p;
0d759d9c 2259};
2260
4ee9c684 2261/* Subroutine of fold_stmt called via walk_tree. We perform several
2262 simplifications of EXPR_P, mostly having to do with pointer arithmetic. */
2263
2264static tree
2265fold_stmt_r (tree *expr_p, int *walk_subtrees, void *data)
2266{
75a70cf9 2267 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
2268 struct fold_stmt_r_data *fold_stmt_r_data;
2269 bool *inside_addr_expr_p;
2270 bool *changed_p;
4ee9c684 2271 tree expr = *expr_p, t;
30fde358 2272 bool volatile_p = TREE_THIS_VOLATILE (expr);
4ee9c684 2273
75a70cf9 2274 fold_stmt_r_data = (struct fold_stmt_r_data *) wi->info;
2275 inside_addr_expr_p = fold_stmt_r_data->inside_addr_expr_p;
2276 changed_p = fold_stmt_r_data->changed_p;
2277
4ee9c684 2278 /* ??? It'd be nice if walk_tree had a pre-order option. */
2279 switch (TREE_CODE (expr))
2280 {
2281 case INDIRECT_REF:
2282 t = walk_tree (&TREE_OPERAND (expr, 0), fold_stmt_r, data, NULL);
2283 if (t)
2284 return t;
2285 *walk_subtrees = 0;
2286
2287 t = maybe_fold_stmt_indirect (expr, TREE_OPERAND (expr, 0),
2288 integer_zero_node);
8ac2d49b 2289 if (!t
2290 && TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR)
2291 /* If we had a good reason for propagating the address here,
2292 make sure we end up with valid gimple. See PR34989. */
2293 t = TREE_OPERAND (TREE_OPERAND (expr, 0), 0);
4ee9c684 2294 break;
2295
3b45913d 2296 case NOP_EXPR:
2297 t = walk_tree (&TREE_OPERAND (expr, 0), fold_stmt_r, data, NULL);
2298 if (t)
2299 return t;
2300 *walk_subtrees = 0;
2301
2302 if (POINTER_TYPE_P (TREE_TYPE (expr))
2303 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 0)))
5768aeb3 2304 && (t = maybe_fold_offset_to_address (TREE_OPERAND (expr, 0),
2305 integer_zero_node,
2306 TREE_TYPE (TREE_TYPE (expr)))))
2307 return t;
3b45913d 2308 break;
2309
0d759d9c 2310 /* ??? Could handle more ARRAY_REFs here, as a variant of INDIRECT_REF.
4ee9c684 2311 We'd only want to bother decomposing an existing ARRAY_REF if
2312 the base array is found to have another offset contained within.
2313 Otherwise we'd be wasting time. */
0d759d9c 2314 case ARRAY_REF:
2315 /* If we are not processing expressions found within an
2316 ADDR_EXPR, then we can fold constant array references. */
2317 if (!*inside_addr_expr_p)
2318 t = fold_read_from_constant_string (expr);
2319 else
2320 t = NULL;
2321 break;
4ee9c684 2322
2323 case ADDR_EXPR:
0d759d9c 2324 *inside_addr_expr_p = true;
4ee9c684 2325 t = walk_tree (&TREE_OPERAND (expr, 0), fold_stmt_r, data, NULL);
0d759d9c 2326 *inside_addr_expr_p = false;
4ee9c684 2327 if (t)
2328 return t;
2329 *walk_subtrees = 0;
2330
c7d4e749 2331 /* Make sure the value is properly considered constant, and so gets
2332 propagated as expected. */
4ee9c684 2333 if (*changed_p)
750ad201 2334 recompute_tree_invariant_for_addr_expr (expr);
4ee9c684 2335 return NULL_TREE;
2336
4ee9c684 2337 case COMPONENT_REF:
2338 t = walk_tree (&TREE_OPERAND (expr, 0), fold_stmt_r, data, NULL);
2339 if (t)
2340 return t;
2341 *walk_subtrees = 0;
2342
504d3463 2343 /* Make sure the FIELD_DECL is actually a field in the type on the lhs.
2344 We've already checked that the records are compatible, so we should
2345 come up with a set of compatible fields. */
2346 {
2347 tree expr_record = TREE_TYPE (TREE_OPERAND (expr, 0));
2348 tree expr_field = TREE_OPERAND (expr, 1);
2349
2350 if (DECL_FIELD_CONTEXT (expr_field) != TYPE_MAIN_VARIANT (expr_record))
2351 {
2352 expr_field = find_compatible_field (expr_record, expr_field);
2353 TREE_OPERAND (expr, 1) = expr_field;
2354 }
2355 }
4ee9c684 2356 break;
2357
aed164c3 2358 case TARGET_MEM_REF:
2359 t = maybe_fold_tmr (expr);
2360 break;
2361
75a70cf9 2362 case POINTER_PLUS_EXPR:
2363 t = walk_tree (&TREE_OPERAND (expr, 0), fold_stmt_r, data, NULL);
2364 if (t)
2365 return t;
2366 t = walk_tree (&TREE_OPERAND (expr, 1), fold_stmt_r, data, NULL);
2367 if (t)
2368 return t;
2369 *walk_subtrees = 0;
2370
2371 t = maybe_fold_stmt_addition (TREE_TYPE (expr),
2372 TREE_OPERAND (expr, 0),
2373 TREE_OPERAND (expr, 1));
2374 break;
2375
bb8a9715 2376 case COND_EXPR:
2377 if (COMPARISON_CLASS_P (TREE_OPERAND (expr, 0)))
2378 {
2379 tree op0 = TREE_OPERAND (expr, 0);
add6ee5e 2380 tree tem;
2381 bool set;
2382
2383 fold_defer_overflow_warnings ();
2384 tem = fold_binary (TREE_CODE (op0), TREE_TYPE (op0),
2385 TREE_OPERAND (op0, 0),
2386 TREE_OPERAND (op0, 1));
75a70cf9 2387 /* This is actually a conditional expression, not a GIMPLE
2388 conditional statement, however, the valid_gimple_rhs_p
2389 test still applies. */
2390 set = tem && is_gimple_condexpr (tem) && valid_gimple_rhs_p (tem);
add6ee5e 2391 fold_undefer_overflow_warnings (set, fold_stmt_r_data->stmt, 0);
2392 if (set)
f2532264 2393 {
75a70cf9 2394 COND_EXPR_COND (expr) = tem;
2395 t = expr;
f2532264 2396 break;
2397 }
bb8a9715 2398 }
f2532264 2399 return NULL_TREE;
bb8a9715 2400
4ee9c684 2401 default:
2402 return NULL_TREE;
2403 }
2404
2405 if (t)
2406 {
30fde358 2407 /* Preserve volatileness of the original expression. */
2408 TREE_THIS_VOLATILE (t) = volatile_p;
4ee9c684 2409 *expr_p = t;
2410 *changed_p = true;
2411 }
2412
2413 return NULL_TREE;
2414}
2415
0a39fd54 2416/* Return the string length, maximum string length or maximum value of
2417 ARG in LENGTH.
2418 If ARG is an SSA name variable, follow its use-def chains. If LENGTH
2419 is not NULL and, for TYPE == 0, its value is not equal to the length
2420 we determine or if we are unable to determine the length or value,
2421 return false. VISITED is a bitmap of visited variables.
2422 TYPE is 0 if string length should be returned, 1 for maximum string
2423 length and 2 for maximum value ARG can have. */
4ee9c684 2424
72648a0e 2425static bool
0a39fd54 2426get_maxval_strlen (tree arg, tree *length, bitmap visited, int type)
4ee9c684 2427{
75a70cf9 2428 tree var, val;
2429 gimple def_stmt;
41511585 2430
2431 if (TREE_CODE (arg) != SSA_NAME)
72648a0e 2432 {
ec0fa513 2433 if (TREE_CODE (arg) == COND_EXPR)
2434 return get_maxval_strlen (COND_EXPR_THEN (arg), length, visited, type)
2435 && get_maxval_strlen (COND_EXPR_ELSE (arg), length, visited, type);
0b4a6afc 2436 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
2437 else if (TREE_CODE (arg) == ADDR_EXPR
2438 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF
2439 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg, 0), 1)))
2440 {
2441 tree aop0 = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
2442 if (TREE_CODE (aop0) == INDIRECT_REF
2443 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
2444 return get_maxval_strlen (TREE_OPERAND (aop0, 0),
2445 length, visited, type);
2446 }
ec0fa513 2447
0a39fd54 2448 if (type == 2)
2449 {
2450 val = arg;
2451 if (TREE_CODE (val) != INTEGER_CST
2452 || tree_int_cst_sgn (val) < 0)
2453 return false;
2454 }
2455 else
2456 val = c_strlen (arg, 1);
41511585 2457 if (!val)
72648a0e 2458 return false;
e37235f0 2459
0a39fd54 2460 if (*length)
2461 {
2462 if (type > 0)
2463 {
2464 if (TREE_CODE (*length) != INTEGER_CST
2465 || TREE_CODE (val) != INTEGER_CST)
2466 return false;
2467
2468 if (tree_int_cst_lt (*length, val))
2469 *length = val;
2470 return true;
2471 }
2472 else if (simple_cst_equal (val, *length) != 1)
2473 return false;
2474 }
4ee9c684 2475
41511585 2476 *length = val;
2477 return true;
4ee9c684 2478 }
72648a0e 2479
41511585 2480 /* If we were already here, break the infinite cycle. */
2481 if (bitmap_bit_p (visited, SSA_NAME_VERSION (arg)))
2482 return true;
2483 bitmap_set_bit (visited, SSA_NAME_VERSION (arg));
2484
2485 var = arg;
2486 def_stmt = SSA_NAME_DEF_STMT (var);
4ee9c684 2487
75a70cf9 2488 switch (gimple_code (def_stmt))
2489 {
2490 case GIMPLE_ASSIGN:
2491 /* The RHS of the statement defining VAR must either have a
2492 constant length or come from another SSA_NAME with a constant
2493 length. */
2494 if (gimple_assign_single_p (def_stmt)
2495 || gimple_assign_unary_nop_p (def_stmt))
2496 {
2497 tree rhs = gimple_assign_rhs1 (def_stmt);
2498 return get_maxval_strlen (rhs, length, visited, type);
2499 }
2500 return false;
2501
2502 case GIMPLE_PHI:
41511585 2503 {
2504 /* All the arguments of the PHI node must have the same constant
2505 length. */
75a70cf9 2506 unsigned i;
2507
2508 for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
2509 {
2510 tree arg = gimple_phi_arg (def_stmt, i)->def;
2511
2512 /* If this PHI has itself as an argument, we cannot
2513 determine the string length of this argument. However,
2514 if we can find a constant string length for the other
2515 PHI args then we can still be sure that this is a
2516 constant string length. So be optimistic and just
2517 continue with the next argument. */
2518 if (arg == gimple_phi_result (def_stmt))
2519 continue;
2520
2521 if (!get_maxval_strlen (arg, length, visited, type))
2522 return false;
2523 }
2524 }
2525 return true;
4ee9c684 2526
41511585 2527 default:
75a70cf9 2528 return false;
4ee9c684 2529 }
4ee9c684 2530}
2531
2532
75a70cf9 2533/* Fold builtin call in statement STMT. Returns a simplified tree.
2534 We may return a non-constant expression, including another call
2535 to a different function and with different arguments, e.g.,
2536 substituting memcpy for strcpy when the string length is known.
2537 Note that some builtins expand into inline code that may not
2538 be valid in GIMPLE. Callers must take care. */
4ee9c684 2539
2540static tree
75a70cf9 2541ccp_fold_builtin (gimple stmt)
4ee9c684 2542{
0a39fd54 2543 tree result, val[3];
c2f47e15 2544 tree callee, a;
0a39fd54 2545 int arg_mask, i, type;
f0613857 2546 bitmap visited;
2547 bool ignore;
c2f47e15 2548 int nargs;
4ee9c684 2549
75a70cf9 2550 gcc_assert (is_gimple_call (stmt));
2551
2552 ignore = (gimple_call_lhs (stmt) == NULL);
4ee9c684 2553
2554 /* First try the generic builtin folder. If that succeeds, return the
2555 result directly. */
75a70cf9 2556 result = fold_call_stmt (stmt, ignore);
4ee9c684 2557 if (result)
0a39fd54 2558 {
2559 if (ignore)
2560 STRIP_NOPS (result);
2561 return result;
2562 }
f0613857 2563
2564 /* Ignore MD builtins. */
75a70cf9 2565 callee = gimple_call_fndecl (stmt);
f0613857 2566 if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_MD)
2567 return NULL_TREE;
4ee9c684 2568
2569 /* If the builtin could not be folded, and it has no argument list,
2570 we're done. */
75a70cf9 2571 nargs = gimple_call_num_args (stmt);
c2f47e15 2572 if (nargs == 0)
4ee9c684 2573 return NULL_TREE;
2574
2575 /* Limit the work only for builtins we know how to simplify. */
2576 switch (DECL_FUNCTION_CODE (callee))
2577 {
2578 case BUILT_IN_STRLEN:
2579 case BUILT_IN_FPUTS:
2580 case BUILT_IN_FPUTS_UNLOCKED:
0a39fd54 2581 arg_mask = 1;
2582 type = 0;
4ee9c684 2583 break;
2584 case BUILT_IN_STRCPY:
2585 case BUILT_IN_STRNCPY:
0a39fd54 2586 arg_mask = 2;
2587 type = 0;
2588 break;
2589 case BUILT_IN_MEMCPY_CHK:
2590 case BUILT_IN_MEMPCPY_CHK:
2591 case BUILT_IN_MEMMOVE_CHK:
2592 case BUILT_IN_MEMSET_CHK:
2593 case BUILT_IN_STRNCPY_CHK:
2594 arg_mask = 4;
2595 type = 2;
2596 break;
2597 case BUILT_IN_STRCPY_CHK:
2598 case BUILT_IN_STPCPY_CHK:
2599 arg_mask = 2;
2600 type = 1;
2601 break;
2602 case BUILT_IN_SNPRINTF_CHK:
2603 case BUILT_IN_VSNPRINTF_CHK:
2604 arg_mask = 2;
2605 type = 2;
4ee9c684 2606 break;
2607 default:
2608 return NULL_TREE;
2609 }
2610
2611 /* Try to use the dataflow information gathered by the CCP process. */
27335ffd 2612 visited = BITMAP_ALLOC (NULL);
4ee9c684 2613
0a39fd54 2614 memset (val, 0, sizeof (val));
75a70cf9 2615 for (i = 0; i < nargs; i++)
c2f47e15 2616 {
75a70cf9 2617 if ((arg_mask >> i) & 1)
2618 {
2619 a = gimple_call_arg (stmt, i);
2620 bitmap_clear (visited);
2621 if (!get_maxval_strlen (a, &val[i], visited, type))
2622 val[i] = NULL_TREE;
2623 }
c2f47e15 2624 }
4ee9c684 2625
27335ffd 2626 BITMAP_FREE (visited);
4ee9c684 2627
f0613857 2628 result = NULL_TREE;
4ee9c684 2629 switch (DECL_FUNCTION_CODE (callee))
2630 {
2631 case BUILT_IN_STRLEN:
0a39fd54 2632 if (val[0])
4ee9c684 2633 {
75a70cf9 2634 tree new_val =
2635 fold_convert (TREE_TYPE (gimple_call_lhs (stmt)), val[0]);
4ee9c684 2636
2637 /* If the result is not a valid gimple value, or not a cast
2638 of a valid gimple value, then we can not use the result. */
f0d6e81c 2639 if (is_gimple_val (new_val)
2640 || (is_gimple_cast (new_val)
2641 && is_gimple_val (TREE_OPERAND (new_val, 0))))
2642 return new_val;
4ee9c684 2643 }
f0613857 2644 break;
2645
4ee9c684 2646 case BUILT_IN_STRCPY:
c2f47e15 2647 if (val[1] && is_gimple_val (val[1]) && nargs == 2)
2648 result = fold_builtin_strcpy (callee,
75a70cf9 2649 gimple_call_arg (stmt, 0),
2650 gimple_call_arg (stmt, 1),
c2f47e15 2651 val[1]);
f0613857 2652 break;
2653
4ee9c684 2654 case BUILT_IN_STRNCPY:
c2f47e15 2655 if (val[1] && is_gimple_val (val[1]) && nargs == 3)
2656 result = fold_builtin_strncpy (callee,
75a70cf9 2657 gimple_call_arg (stmt, 0),
2658 gimple_call_arg (stmt, 1),
2659 gimple_call_arg (stmt, 2),
c2f47e15 2660 val[1]);
f0613857 2661 break;
2662
4ee9c684 2663 case BUILT_IN_FPUTS:
75a70cf9 2664 result = fold_builtin_fputs (gimple_call_arg (stmt, 0),
2665 gimple_call_arg (stmt, 1),
2666 ignore, false, val[0]);
f0613857 2667 break;
2668
4ee9c684 2669 case BUILT_IN_FPUTS_UNLOCKED:
75a70cf9 2670 result = fold_builtin_fputs (gimple_call_arg (stmt, 0),
2671 gimple_call_arg (stmt, 1),
2672 ignore, true, val[0]);
0a39fd54 2673 break;
2674
2675 case BUILT_IN_MEMCPY_CHK:
2676 case BUILT_IN_MEMPCPY_CHK:
2677 case BUILT_IN_MEMMOVE_CHK:
2678 case BUILT_IN_MEMSET_CHK:
2679 if (val[2] && is_gimple_val (val[2]))
c2f47e15 2680 result = fold_builtin_memory_chk (callee,
75a70cf9 2681 gimple_call_arg (stmt, 0),
2682 gimple_call_arg (stmt, 1),
2683 gimple_call_arg (stmt, 2),
2684 gimple_call_arg (stmt, 3),
c2f47e15 2685 val[2], ignore,
0a39fd54 2686 DECL_FUNCTION_CODE (callee));
2687 break;
2688
2689 case BUILT_IN_STRCPY_CHK:
2690 case BUILT_IN_STPCPY_CHK:
2691 if (val[1] && is_gimple_val (val[1]))
c2f47e15 2692 result = fold_builtin_stxcpy_chk (callee,
75a70cf9 2693 gimple_call_arg (stmt, 0),
2694 gimple_call_arg (stmt, 1),
2695 gimple_call_arg (stmt, 2),
c2f47e15 2696 val[1], ignore,
0a39fd54 2697 DECL_FUNCTION_CODE (callee));
2698 break;
2699
2700 case BUILT_IN_STRNCPY_CHK:
2701 if (val[2] && is_gimple_val (val[2]))
75a70cf9 2702 result = fold_builtin_strncpy_chk (gimple_call_arg (stmt, 0),
2703 gimple_call_arg (stmt, 1),
2704 gimple_call_arg (stmt, 2),
2705 gimple_call_arg (stmt, 3),
c2f47e15 2706 val[2]);
0a39fd54 2707 break;
2708
2709 case BUILT_IN_SNPRINTF_CHK:
2710 case BUILT_IN_VSNPRINTF_CHK:
2711 if (val[1] && is_gimple_val (val[1]))
75a70cf9 2712 result = gimple_fold_builtin_snprintf_chk (stmt, val[1],
2713 DECL_FUNCTION_CODE (callee));
f0613857 2714 break;
4ee9c684 2715
2716 default:
8c0963c4 2717 gcc_unreachable ();
4ee9c684 2718 }
2719
f0613857 2720 if (result && ignore)
db97ad41 2721 result = fold_ignored_result (result);
f0613857 2722 return result;
4ee9c684 2723}
2724
75a70cf9 2725/* Attempt to fold an assignment statement pointed-to by SI. Returns a
2726 replacement rhs for the statement or NULL_TREE if no simplification
2727 could be made. It is assumed that the operands have been previously
2728 folded. */
2729
2730static tree
2731fold_gimple_assign (gimple_stmt_iterator *si)
2732{
2733 gimple stmt = gsi_stmt (*si);
2734 enum tree_code subcode = gimple_assign_rhs_code (stmt);
2735
2736 tree result = NULL;
2737
2738 switch (get_gimple_rhs_class (subcode))
2739 {
2740 case GIMPLE_SINGLE_RHS:
2741 {
2742 tree rhs = gimple_assign_rhs1 (stmt);
2743
2744 /* Try to fold a conditional expression. */
2745 if (TREE_CODE (rhs) == COND_EXPR)
2746 {
2747 tree temp = fold (COND_EXPR_COND (rhs));
2748 if (temp != COND_EXPR_COND (rhs))
2749 result = fold_build3 (COND_EXPR, TREE_TYPE (rhs), temp,
2750 COND_EXPR_THEN (rhs), COND_EXPR_ELSE (rhs));
2751 }
2752
2753 /* If we couldn't fold the RHS, hand over to the generic
2754 fold routines. */
2755 if (result == NULL_TREE)
2756 result = fold (rhs);
2757
2758 /* Strip away useless type conversions. Both the NON_LVALUE_EXPR
2759 that may have been added by fold, and "useless" type
2760 conversions that might now be apparent due to propagation. */
2761 STRIP_USELESS_TYPE_CONVERSION (result);
2762
2763 if (result != rhs && valid_gimple_rhs_p (result))
2764 return result;
2765 else
2766 /* It is possible that fold_stmt_r simplified the RHS.
2767 Make sure that the subcode of this statement still
2768 reflects the principal operator of the rhs operand. */
2769 return rhs;
2770 }
2771 break;
2772
2773 case GIMPLE_UNARY_RHS:
2774 result = fold_unary (subcode,
2775 gimple_expr_type (stmt),
2776 gimple_assign_rhs1 (stmt));
2777
2778 if (result)
2779 {
2780 STRIP_USELESS_TYPE_CONVERSION (result);
2781 if (valid_gimple_rhs_p (result))
2782 return result;
2783 }
d9659041 2784 else if (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt))
75a70cf9 2785 && POINTER_TYPE_P (gimple_expr_type (stmt))
2786 && POINTER_TYPE_P (TREE_TYPE (gimple_assign_rhs1 (stmt))))
2787 {
2788 tree type = gimple_expr_type (stmt);
5768aeb3 2789 tree t = maybe_fold_offset_to_address (gimple_assign_rhs1 (stmt),
2790 integer_zero_node, type);
75a70cf9 2791 if (t)
5768aeb3 2792 return t;
75a70cf9 2793 }
2794 break;
2795
2796 case GIMPLE_BINARY_RHS:
2797 /* Try to fold pointer addition. */
2798 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
2799 result = maybe_fold_stmt_addition (
2800 TREE_TYPE (gimple_assign_lhs (stmt)),
2801 gimple_assign_rhs1 (stmt),
2802 gimple_assign_rhs2 (stmt));
2803
2804 if (!result)
2805 result = fold_binary (subcode,
2806 TREE_TYPE (gimple_assign_lhs (stmt)),
2807 gimple_assign_rhs1 (stmt),
2808 gimple_assign_rhs2 (stmt));
2809
2810 if (result)
2811 {
2812 STRIP_USELESS_TYPE_CONVERSION (result);
2813 if (valid_gimple_rhs_p (result))
2814 return result;
2815 }
2816 break;
2817
2818 case GIMPLE_INVALID_RHS:
2819 gcc_unreachable ();
2820 }
2821
2822 return NULL_TREE;
2823}
2824
2825/* Attempt to fold a conditional statement. Return true if any changes were
2826 made. We only attempt to fold the condition expression, and do not perform
2827 any transformation that would require alteration of the cfg. It is
2828 assumed that the operands have been previously folded. */
2829
2830static bool
2831fold_gimple_cond (gimple stmt)
2832{
2833 tree result = fold_binary (gimple_cond_code (stmt),
2834 boolean_type_node,
2835 gimple_cond_lhs (stmt),
2836 gimple_cond_rhs (stmt));
2837
2838 if (result)
2839 {
2840 STRIP_USELESS_TYPE_CONVERSION (result);
2841 if (is_gimple_condexpr (result) && valid_gimple_rhs_p (result))
2842 {
2843 gimple_cond_set_condition_from_tree (stmt, result);
2844 return true;
2845 }
2846 }
2847
2848 return false;
2849}
2850
2851
2852/* Attempt to fold a call statement referenced by the statement iterator GSI.
2853 The statement may be replaced by another statement, e.g., if the call
2854 simplifies to a constant value. Return true if any changes were made.
2855 It is assumed that the operands have been previously folded. */
2856
2857static bool
2858fold_gimple_call (gimple_stmt_iterator *gsi)
2859{
2860 gimple stmt = gsi_stmt (*gsi);
2861
2862 tree callee = gimple_call_fndecl (stmt);
2863
2864 /* Check for builtins that CCP can handle using information not
2865 available in the generic fold routines. */
2866 if (callee && DECL_BUILT_IN (callee))
2867 {
2868 tree result = ccp_fold_builtin (stmt);
2869
2870 if (result)
2871 return update_call_from_tree (gsi, result);
2872 }
2873 else
2874 {
2875 /* Check for resolvable OBJ_TYPE_REF. The only sorts we can resolve
2876 here are when we've propagated the address of a decl into the
2877 object slot. */
2878 /* ??? Should perhaps do this in fold proper. However, doing it
2879 there requires that we create a new CALL_EXPR, and that requires
2880 copying EH region info to the new node. Easier to just do it
2881 here where we can just smash the call operand. */
2882 /* ??? Is there a good reason not to do this in fold_stmt_inplace? */
2883 callee = gimple_call_fn (stmt);
2884 if (TREE_CODE (callee) == OBJ_TYPE_REF
2885 && lang_hooks.fold_obj_type_ref
2886 && TREE_CODE (OBJ_TYPE_REF_OBJECT (callee)) == ADDR_EXPR
2887 && DECL_P (TREE_OPERAND
2888 (OBJ_TYPE_REF_OBJECT (callee), 0)))
2889 {
2890 tree t;
2891
2892 /* ??? Caution: Broken ADDR_EXPR semantics means that
2893 looking at the type of the operand of the addr_expr
2894 can yield an array type. See silly exception in
2895 check_pointer_types_r. */
2896 t = TREE_TYPE (TREE_TYPE (OBJ_TYPE_REF_OBJECT (callee)));
2897 t = lang_hooks.fold_obj_type_ref (callee, t);
2898 if (t)
2899 {
2900 gimple_call_set_fn (stmt, t);
2901 return true;
2902 }
2903 }
2904 }
2905
2906 return false;
2907}
4ee9c684 2908
75a70cf9 2909/* Fold the statement pointed to by GSI. In some cases, this function may
41511585 2910 replace the whole statement with a new one. Returns true iff folding
2911 makes any changes. */
4ee9c684 2912
41511585 2913bool
75a70cf9 2914fold_stmt (gimple_stmt_iterator *gsi)
4ee9c684 2915{
75a70cf9 2916 tree res;
0d759d9c 2917 struct fold_stmt_r_data fold_stmt_r_data;
75a70cf9 2918 struct walk_stmt_info wi;
2919
41511585 2920 bool changed = false;
0d759d9c 2921 bool inside_addr_expr = false;
2922
75a70cf9 2923 gimple stmt = gsi_stmt (*gsi);
add6ee5e 2924
2925 fold_stmt_r_data.stmt = stmt;
0d759d9c 2926 fold_stmt_r_data.changed_p = &changed;
2927 fold_stmt_r_data.inside_addr_expr_p = &inside_addr_expr;
4ee9c684 2928
75a70cf9 2929 memset (&wi, 0, sizeof (wi));
2930 wi.info = &fold_stmt_r_data;
4ee9c684 2931
75a70cf9 2932 /* Fold the individual operands.
2933 For example, fold instances of *&VAR into VAR, etc. */
2934 res = walk_gimple_op (stmt, fold_stmt_r, &wi);
2935 gcc_assert (!res);
4ee9c684 2936
75a70cf9 2937 /* Fold the main computation performed by the statement. */
2938 switch (gimple_code (stmt))
4ee9c684 2939 {
75a70cf9 2940 case GIMPLE_ASSIGN:
2941 {
2942 tree new_rhs = fold_gimple_assign (gsi);
2943 if (new_rhs != NULL_TREE)
2944 {
2945 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
2946 changed = true;
2947 }
2948 stmt = gsi_stmt (*gsi);
2949 break;
2950 }
2951 case GIMPLE_COND:
2952 changed |= fold_gimple_cond (stmt);
2953 break;
2954 case GIMPLE_CALL:
2955 /* The entire statement may be replaced in this case. */
2956 changed |= fold_gimple_call (gsi);
2957 break;
e77b8618 2958
75a70cf9 2959 default:
2960 return changed;
2961 break;
ec0fa513 2962 }
4ee9c684 2963
41511585 2964 return changed;
4ee9c684 2965}
2966
8171a1dd 2967/* Perform the minimal folding on statement STMT. Only operations like
2968 *&x created by constant propagation are handled. The statement cannot
75a70cf9 2969 be replaced with a new one. Return true if the statement was
2970 changed, false otherwise. */
8171a1dd 2971
2972bool
75a70cf9 2973fold_stmt_inplace (gimple stmt)
8171a1dd 2974{
75a70cf9 2975 tree res;
0d759d9c 2976 struct fold_stmt_r_data fold_stmt_r_data;
75a70cf9 2977 struct walk_stmt_info wi;
2978 gimple_stmt_iterator si;
2979
8171a1dd 2980 bool changed = false;
0d759d9c 2981 bool inside_addr_expr = false;
2982
add6ee5e 2983 fold_stmt_r_data.stmt = stmt;
0d759d9c 2984 fold_stmt_r_data.changed_p = &changed;
2985 fold_stmt_r_data.inside_addr_expr_p = &inside_addr_expr;
8171a1dd 2986
75a70cf9 2987 memset (&wi, 0, sizeof (wi));
2988 wi.info = &fold_stmt_r_data;
2989
2990 /* Fold the individual operands.
2991 For example, fold instances of *&VAR into VAR, etc.
8171a1dd 2992
75a70cf9 2993 It appears that, at one time, maybe_fold_stmt_indirect
2994 would cause the walk to return non-null in order to
2995 signal that the entire statement should be replaced with
2996 a call to _builtin_trap. This functionality is currently
2997 disabled, as noted in a FIXME, and cannot be supported here. */
2998 res = walk_gimple_op (stmt, fold_stmt_r, &wi);
2999 gcc_assert (!res);
8171a1dd 3000
75a70cf9 3001 /* Fold the main computation performed by the statement. */
3002 switch (gimple_code (stmt))
3003 {
3004 case GIMPLE_ASSIGN:
3005 {
3006 unsigned old_num_ops;
3007 tree new_rhs;
3008 old_num_ops = gimple_num_ops (stmt);
3009 si = gsi_for_stmt (stmt);
3010 new_rhs = fold_gimple_assign (&si);
3011 if (new_rhs != NULL_TREE
3012 && get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops)
3013 {
3014 gimple_assign_set_rhs_from_tree (&si, new_rhs);
3015 changed = true;
3016 }
3017 gcc_assert (gsi_stmt (si) == stmt);
3018 break;
3019 }
3020 case GIMPLE_COND:
3021 changed |= fold_gimple_cond (stmt);
3022 break;
8171a1dd 3023
75a70cf9 3024 default:
3025 break;
3026 }
8171a1dd 3027
3028 return changed;
3029}
75a70cf9 3030
bdd0e199 3031/* Try to optimize out __builtin_stack_restore. Optimize it out
3032 if there is another __builtin_stack_restore in the same basic
3033 block and no calls or ASM_EXPRs are in between, or if this block's
3034 only outgoing edge is to EXIT_BLOCK and there are no calls or
3035 ASM_EXPRs after this __builtin_stack_restore. */
3036
3037static tree
75a70cf9 3038optimize_stack_restore (gimple_stmt_iterator i)
bdd0e199 3039{
75a70cf9 3040 tree callee, rhs;
3041 gimple stmt, stack_save;
3042 gimple_stmt_iterator stack_save_gsi;
3043
3044 basic_block bb = gsi_bb (i);
3045 gimple call = gsi_stmt (i);
bdd0e199 3046
75a70cf9 3047 if (gimple_code (call) != GIMPLE_CALL
3048 || gimple_call_num_args (call) != 1
3049 || TREE_CODE (gimple_call_arg (call, 0)) != SSA_NAME
3050 || !POINTER_TYPE_P (TREE_TYPE (gimple_call_arg (call, 0))))
bdd0e199 3051 return NULL_TREE;
3052
75a70cf9 3053 for (gsi_next (&i); !gsi_end_p (i); gsi_next (&i))
bdd0e199 3054 {
75a70cf9 3055 stmt = gsi_stmt (i);
3056 if (gimple_code (stmt) == GIMPLE_ASM)
bdd0e199 3057 return NULL_TREE;
75a70cf9 3058 if (gimple_code (stmt) != GIMPLE_CALL)
bdd0e199 3059 continue;
3060
75a70cf9 3061 callee = gimple_call_fndecl (stmt);
bdd0e199 3062 if (!callee || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL)
3063 return NULL_TREE;
3064
3065 if (DECL_FUNCTION_CODE (callee) == BUILT_IN_STACK_RESTORE)
3066 break;
3067 }
3068
75a70cf9 3069 if (gsi_end_p (i)
bdd0e199 3070 && (! single_succ_p (bb)
3071 || single_succ_edge (bb)->dest != EXIT_BLOCK_PTR))
3072 return NULL_TREE;
3073
75a70cf9 3074 stack_save = SSA_NAME_DEF_STMT (gimple_call_arg (call, 0));
3075 if (gimple_code (stack_save) != GIMPLE_CALL
3076 || gimple_call_lhs (stack_save) != gimple_call_arg (call, 0)
3077 || stmt_could_throw_p (stack_save)
3078 || !has_single_use (gimple_call_arg (call, 0)))
bdd0e199 3079 return NULL_TREE;
3080
75a70cf9 3081 callee = gimple_call_fndecl (stack_save);
bdd0e199 3082 if (!callee
3083 || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL
3084 || DECL_FUNCTION_CODE (callee) != BUILT_IN_STACK_SAVE
75a70cf9 3085 || gimple_call_num_args (stack_save) != 0)
bdd0e199 3086 return NULL_TREE;
3087
75a70cf9 3088 stack_save_gsi = gsi_for_stmt (stack_save);
3089 push_stmt_changes (gsi_stmt_ptr (&stack_save_gsi));
3090 rhs = build_int_cst (TREE_TYPE (gimple_call_arg (call, 0)), 0);
3091 if (!update_call_from_tree (&stack_save_gsi, rhs))
bdd0e199 3092 {
75a70cf9 3093 discard_stmt_changes (gsi_stmt_ptr (&stack_save_gsi));
bdd0e199 3094 return NULL_TREE;
3095 }
75a70cf9 3096 pop_stmt_changes (gsi_stmt_ptr (&stack_save_gsi));
bdd0e199 3097
75a70cf9 3098 /* No effect, so the statement will be deleted. */
bdd0e199 3099 return integer_zero_node;
3100}
75a70cf9 3101
8a58ed0a 3102/* If va_list type is a simple pointer and nothing special is needed,
3103 optimize __builtin_va_start (&ap, 0) into ap = __builtin_next_arg (0),
3104 __builtin_va_end (&ap) out as NOP and __builtin_va_copy into a simple
3105 pointer assignment. */
3106
3107static tree
75a70cf9 3108optimize_stdarg_builtin (gimple call)
8a58ed0a 3109{
5f57a8b1 3110 tree callee, lhs, rhs, cfun_va_list;
8a58ed0a 3111 bool va_list_simple_ptr;
3112
75a70cf9 3113 if (gimple_code (call) != GIMPLE_CALL)
8a58ed0a 3114 return NULL_TREE;
3115
75a70cf9 3116 callee = gimple_call_fndecl (call);
5f57a8b1 3117
3118 cfun_va_list = targetm.fn_abi_va_list (callee);
3119 va_list_simple_ptr = POINTER_TYPE_P (cfun_va_list)
3120 && (TREE_TYPE (cfun_va_list) == void_type_node
3121 || TREE_TYPE (cfun_va_list) == char_type_node);
3122
8a58ed0a 3123 switch (DECL_FUNCTION_CODE (callee))
3124 {
3125 case BUILT_IN_VA_START:
3126 if (!va_list_simple_ptr
3127 || targetm.expand_builtin_va_start != NULL
75a70cf9 3128 || built_in_decls[BUILT_IN_NEXT_ARG] == NULL)
8a58ed0a 3129 return NULL_TREE;
3130
75a70cf9 3131 if (gimple_call_num_args (call) != 2)
8a58ed0a 3132 return NULL_TREE;
3133
75a70cf9 3134 lhs = gimple_call_arg (call, 0);
8a58ed0a 3135 if (!POINTER_TYPE_P (TREE_TYPE (lhs))
3136 || TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (lhs)))
5f57a8b1 3137 != TYPE_MAIN_VARIANT (cfun_va_list))
8a58ed0a 3138 return NULL_TREE;
75a70cf9 3139
8a58ed0a 3140 lhs = build_fold_indirect_ref (lhs);
3141 rhs = build_call_expr (built_in_decls[BUILT_IN_NEXT_ARG],
75a70cf9 3142 1, integer_zero_node);
8a58ed0a 3143 rhs = fold_convert (TREE_TYPE (lhs), rhs);
3144 return build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, rhs);
3145
3146 case BUILT_IN_VA_COPY:
3147 if (!va_list_simple_ptr)
3148 return NULL_TREE;
3149
75a70cf9 3150 if (gimple_call_num_args (call) != 2)
8a58ed0a 3151 return NULL_TREE;
3152
75a70cf9 3153 lhs = gimple_call_arg (call, 0);
8a58ed0a 3154 if (!POINTER_TYPE_P (TREE_TYPE (lhs))
3155 || TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (lhs)))
5f57a8b1 3156 != TYPE_MAIN_VARIANT (cfun_va_list))
8a58ed0a 3157 return NULL_TREE;
3158
3159 lhs = build_fold_indirect_ref (lhs);
75a70cf9 3160 rhs = gimple_call_arg (call, 1);
8a58ed0a 3161 if (TYPE_MAIN_VARIANT (TREE_TYPE (rhs))
5f57a8b1 3162 != TYPE_MAIN_VARIANT (cfun_va_list))
8a58ed0a 3163 return NULL_TREE;
3164
3165 rhs = fold_convert (TREE_TYPE (lhs), rhs);
3166 return build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, rhs);
3167
3168 case BUILT_IN_VA_END:
75a70cf9 3169 /* No effect, so the statement will be deleted. */
8a58ed0a 3170 return integer_zero_node;
3171
3172 default:
3173 gcc_unreachable ();
3174 }
3175}
75a70cf9 3176
909e5ecb 3177/* Convert EXPR into a GIMPLE value suitable for substitution on the
3178 RHS of an assignment. Insert the necessary statements before
75a70cf9 3179 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
3180 is replaced. If the call is expected to produces a result, then it
3181 is replaced by an assignment of the new RHS to the result variable.
3182 If the result is to be ignored, then the call is replaced by a
3183 GIMPLE_NOP. */
909e5ecb 3184
75a70cf9 3185static void
3186gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
909e5ecb 3187{
75a70cf9 3188 tree lhs;
3189 tree tmp = NULL_TREE; /* Silence warning. */
3190 gimple stmt, new_stmt;
3191 gimple_stmt_iterator i;
3192 gimple_seq stmts = gimple_seq_alloc();
dac18d1a 3193 struct gimplify_ctx gctx;
909e5ecb 3194
75a70cf9 3195 stmt = gsi_stmt (*si_p);
3196
3197 gcc_assert (is_gimple_call (stmt));
3198
3199 lhs = gimple_call_lhs (stmt);
3200
dac18d1a 3201 push_gimplify_context (&gctx);
75a70cf9 3202
3203 if (lhs == NULL_TREE)
3204 gimplify_and_add (expr, &stmts);
3205 else
a280136a 3206 tmp = get_initialized_tmp_var (expr, &stmts, NULL);
75a70cf9 3207
909e5ecb 3208 pop_gimplify_context (NULL);
3209
75a70cf9 3210 if (gimple_has_location (stmt))
3211 annotate_all_with_location (stmts, gimple_location (stmt));
b66731e8 3212
909e5ecb 3213 /* The replacement can expose previously unreferenced variables. */
75a70cf9 3214 for (i = gsi_start (stmts); !gsi_end_p (i); gsi_next (&i))
3215 {
3216 new_stmt = gsi_stmt (i);
3217 find_new_referenced_vars (new_stmt);
3218 gsi_insert_before (si_p, new_stmt, GSI_NEW_STMT);
3219 mark_symbols_for_renaming (new_stmt);
3220 gsi_next (si_p);
3221 }
3222
3223 if (lhs == NULL_TREE)
3224 new_stmt = gimple_build_nop ();
3225 else
909e5ecb 3226 {
75a70cf9 3227 new_stmt = gimple_build_assign (lhs, tmp);
3228 copy_virtual_operands (new_stmt, stmt);
3229 move_ssa_defining_stmt_for_defs (new_stmt, stmt);
909e5ecb 3230 }
3231
75a70cf9 3232 gimple_set_location (new_stmt, gimple_location (stmt));
3233 gsi_replace (si_p, new_stmt, false);
909e5ecb 3234}
3235
4ee9c684 3236/* A simple pass that attempts to fold all builtin functions. This pass
3237 is run after we've propagated as many constants as we can. */
3238
2a1990e9 3239static unsigned int
4ee9c684 3240execute_fold_all_builtins (void)
3241{
b36237eb 3242 bool cfg_changed = false;
4ee9c684 3243 basic_block bb;
b1b7c0c4 3244 unsigned int todoflags = 0;
3245
4ee9c684 3246 FOR_EACH_BB (bb)
3247 {
75a70cf9 3248 gimple_stmt_iterator i;
3249 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
4ee9c684 3250 {
75a70cf9 3251 gimple stmt, old_stmt;
4ee9c684 3252 tree callee, result;
0a39fd54 3253 enum built_in_function fcode;
4ee9c684 3254
75a70cf9 3255 stmt = gsi_stmt (i);
3256
3257 if (gimple_code (stmt) != GIMPLE_CALL)
0a39fd54 3258 {
75a70cf9 3259 gsi_next (&i);
0a39fd54 3260 continue;
3261 }
75a70cf9 3262 callee = gimple_call_fndecl (stmt);
4ee9c684 3263 if (!callee || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL)
0a39fd54 3264 {
75a70cf9 3265 gsi_next (&i);
0a39fd54 3266 continue;
3267 }
3268 fcode = DECL_FUNCTION_CODE (callee);
4ee9c684 3269
75a70cf9 3270 result = ccp_fold_builtin (stmt);
5a4b7e1e 3271
3272 if (result)
75a70cf9 3273 gimple_remove_stmt_histograms (cfun, stmt);
5a4b7e1e 3274
4ee9c684 3275 if (!result)
3276 switch (DECL_FUNCTION_CODE (callee))
3277 {
3278 case BUILT_IN_CONSTANT_P:
3279 /* Resolve __builtin_constant_p. If it hasn't been
3280 folded to integer_one_node by now, it's fairly
3281 certain that the value simply isn't constant. */
75a70cf9 3282 result = integer_zero_node;
4ee9c684 3283 break;
3284
bdd0e199 3285 case BUILT_IN_STACK_RESTORE:
75a70cf9 3286 result = optimize_stack_restore (i);
8a58ed0a 3287 if (result)
3288 break;
75a70cf9 3289 gsi_next (&i);
8a58ed0a 3290 continue;
3291
3292 case BUILT_IN_VA_START:
3293 case BUILT_IN_VA_END:
3294 case BUILT_IN_VA_COPY:
3295 /* These shouldn't be folded before pass_stdarg. */
75a70cf9 3296 result = optimize_stdarg_builtin (stmt);
bdd0e199 3297 if (result)
3298 break;
3299 /* FALLTHRU */
3300
4ee9c684 3301 default:
75a70cf9 3302 gsi_next (&i);
4ee9c684 3303 continue;
3304 }
3305
3306 if (dump_file && (dump_flags & TDF_DETAILS))
3307 {
3308 fprintf (dump_file, "Simplified\n ");
75a70cf9 3309 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
4ee9c684 3310 }
3311
75a70cf9 3312 old_stmt = stmt;
3313 push_stmt_changes (gsi_stmt_ptr (&i));
de6ed584 3314
75a70cf9 3315 if (!update_call_from_tree (&i, result))
3316 {
3317 gimplify_and_update_call_from_tree (&i, result);
3318 todoflags |= TODO_rebuild_alias;
3319 }
de6ed584 3320
75a70cf9 3321 stmt = gsi_stmt (i);
3322 pop_stmt_changes (gsi_stmt_ptr (&i));
de6ed584 3323
75a70cf9 3324 if (maybe_clean_or_replace_eh_stmt (old_stmt, stmt)
3325 && gimple_purge_dead_eh_edges (bb))
b36237eb 3326 cfg_changed = true;
4ee9c684 3327
3328 if (dump_file && (dump_flags & TDF_DETAILS))
3329 {
3330 fprintf (dump_file, "to\n ");
75a70cf9 3331 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
4ee9c684 3332 fprintf (dump_file, "\n");
3333 }
0a39fd54 3334
3335 /* Retry the same statement if it changed into another
3336 builtin, there might be new opportunities now. */
75a70cf9 3337 if (gimple_code (stmt) != GIMPLE_CALL)
0a39fd54 3338 {
75a70cf9 3339 gsi_next (&i);
0a39fd54 3340 continue;
3341 }
75a70cf9 3342 callee = gimple_call_fndecl (stmt);
0a39fd54 3343 if (!callee
75a70cf9 3344 || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL
0a39fd54 3345 || DECL_FUNCTION_CODE (callee) == fcode)
75a70cf9 3346 gsi_next (&i);
4ee9c684 3347 }
3348 }
b1b7c0c4 3349
b36237eb 3350 /* Delete unreachable blocks. */
b1b7c0c4 3351 if (cfg_changed)
3352 todoflags |= TODO_cleanup_cfg;
3353
3354 return todoflags;
4ee9c684 3355}
3356
41511585 3357
20099e35 3358struct gimple_opt_pass pass_fold_builtins =
4ee9c684 3359{
20099e35 3360 {
3361 GIMPLE_PASS,
4ee9c684 3362 "fab", /* name */
3363 NULL, /* gate */
3364 execute_fold_all_builtins, /* execute */
3365 NULL, /* sub */
3366 NULL, /* next */
3367 0, /* static_pass_number */
3368 0, /* tv_id */
49290934 3369 PROP_cfg | PROP_ssa, /* properties_required */
4ee9c684 3370 0, /* properties_provided */
3371 0, /* properties_destroyed */
3372 0, /* todo_flags_start */
909e5ecb 3373 TODO_dump_func
3374 | TODO_verify_ssa
20099e35 3375 | TODO_update_ssa /* todo_flags_finish */
3376 }
4ee9c684 3377};