]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/tree-ssa-ccp.c
2008-08-20 Robert Dewar <dewar@adacore.com>
[thirdparty/gcc.git] / gcc / tree-ssa-ccp.c
CommitLineData
6de9cd9a 1/* Conditional constant propagation pass for the GNU compiler.
fa10beec 2 Copyright (C) 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008
1e052c19 3 Free Software Foundation, Inc.
6de9cd9a
DN
4 Adapted from original RTL SSA-CCP by Daniel Berlin <dberlin@dberlin.org>
5 Adapted to GIMPLE trees by Diego Novillo <dnovillo@redhat.com>
6
7This file is part of GCC.
8
9GCC is free software; you can redistribute it and/or modify it
10under the terms of the GNU General Public License as published by the
9dcd6f09 11Free Software Foundation; either version 3, or (at your option) any
6de9cd9a
DN
12later version.
13
14GCC is distributed in the hope that it will be useful, but WITHOUT
15ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
16FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17for more details.
18
19You should have received a copy of the GNU General Public License
9dcd6f09
NC
20along with GCC; see the file COPYING3. If not see
21<http://www.gnu.org/licenses/>. */
6de9cd9a 22
0bca51f0
DN
23/* Conditional constant propagation (CCP) is based on the SSA
24 propagation engine (tree-ssa-propagate.c). Constant assignments of
25 the form VAR = CST are propagated from the assignments into uses of
26 VAR, which in turn may generate new constants. The simulation uses
27 a four level lattice to keep track of constant values associated
28 with SSA names. Given an SSA name V_i, it may take one of the
29 following values:
30
106dec71
ZD
31 UNINITIALIZED -> the initial state of the value. This value
32 is replaced with a correct initial value
33 the first time the value is used, so the
34 rest of the pass does not need to care about
35 it. Using this value simplifies initialization
36 of the pass, and prevents us from needlessly
37 scanning statements that are never reached.
0bca51f0
DN
38
39 UNDEFINED -> V_i is a local variable whose definition
40 has not been processed yet. Therefore we
41 don't yet know if its value is a constant
42 or not.
43
44 CONSTANT -> V_i has been found to hold a constant
45 value C.
46
47 VARYING -> V_i cannot take a constant value, or if it
48 does, it is not possible to determine it
49 at compile time.
50
51 The core of SSA-CCP is in ccp_visit_stmt and ccp_visit_phi_node:
52
53 1- In ccp_visit_stmt, we are interested in assignments whose RHS
54 evaluates into a constant and conditional jumps whose predicate
55 evaluates into a boolean true or false. When an assignment of
56 the form V_i = CONST is found, V_i's lattice value is set to
57 CONSTANT and CONST is associated with it. This causes the
58 propagation engine to add all the SSA edges coming out the
59 assignment into the worklists, so that statements that use V_i
60 can be visited.
61
62 If the statement is a conditional with a constant predicate, we
63 mark the outgoing edges as executable or not executable
64 depending on the predicate's value. This is then used when
65 visiting PHI nodes to know when a PHI argument can be ignored.
66
67
68 2- In ccp_visit_phi_node, if all the PHI arguments evaluate to the
69 same constant C, then the LHS of the PHI is set to C. This
70 evaluation is known as the "meet operation". Since one of the
71 goals of this evaluation is to optimistically return constant
72 values as often as possible, it uses two main short cuts:
73
74 - If an argument is flowing in through a non-executable edge, it
75 is ignored. This is useful in cases like this:
76
77 if (PRED)
78 a_9 = 3;
79 else
80 a_10 = 100;
81 a_11 = PHI (a_9, a_10)
82
83 If PRED is known to always evaluate to false, then we can
84 assume that a_11 will always take its value from a_10, meaning
85 that instead of consider it VARYING (a_9 and a_10 have
86 different values), we can consider it CONSTANT 100.
87
88 - If an argument has an UNDEFINED value, then it does not affect
89 the outcome of the meet operation. If a variable V_i has an
90 UNDEFINED value, it means that either its defining statement
91 hasn't been visited yet or V_i has no defining statement, in
92 which case the original symbol 'V' is being used
93 uninitialized. Since 'V' is a local variable, the compiler
94 may assume any initial value for it.
95
96
97 After propagation, every variable V_i that ends up with a lattice
98 value of CONSTANT will have the associated constant value in the
99 array CONST_VAL[i].VALUE. That is fed into substitute_and_fold for
100 final substitution and folding.
101
102
103 Constant propagation in stores and loads (STORE-CCP)
104 ----------------------------------------------------
105
106 While CCP has all the logic to propagate constants in GIMPLE
107 registers, it is missing the ability to associate constants with
108 stores and loads (i.e., pointer dereferences, structures and
109 global/aliased variables). We don't keep loads and stores in
110 SSA, but we do build a factored use-def web for them (in the
111 virtual operands).
112
113 For instance, consider the following code fragment:
114
115 struct A a;
116 const int B = 42;
117
118 void foo (int i)
119 {
120 if (i > 10)
121 a.a = 42;
122 else
123 {
124 a.b = 21;
125 a.a = a.b + 21;
126 }
127
128 if (a.a != B)
129 never_executed ();
130 }
131
132 We should be able to deduce that the predicate 'a.a != B' is always
133 false. To achieve this, we associate constant values to the SSA
38635499
DN
134 names in the VDEF operands for each store. Additionally,
135 since we also glob partial loads/stores with the base symbol, we
136 also keep track of the memory reference where the constant value
137 was stored (in the MEM_REF field of PROP_VALUE_T). For instance,
0bca51f0 138
38635499 139 # a_5 = VDEF <a_4>
0bca51f0
DN
140 a.a = 2;
141
142 # VUSE <a_5>
143 x_3 = a.b;
144
145 In the example above, CCP will associate value '2' with 'a_5', but
146 it would be wrong to replace the load from 'a.b' with '2', because
147 '2' had been stored into a.a.
148
106dec71
ZD
149 Note that the initial value of virtual operands is VARYING, not
150 UNDEFINED. Consider, for instance global variables:
0bca51f0
DN
151
152 int A;
153
154 foo (int i)
155 {
156 if (i_3 > 10)
157 A_4 = 3;
158 # A_5 = PHI (A_4, A_2);
159
160 # VUSE <A_5>
161 A.0_6 = A;
162
163 return A.0_6;
164 }
165
166 The value of A_2 cannot be assumed to be UNDEFINED, as it may have
167 been defined outside of foo. If we were to assume it UNDEFINED, we
106dec71 168 would erroneously optimize the above into 'return 3;'.
0bca51f0
DN
169
170 Though STORE-CCP is not too expensive, it does have to do more work
171 than regular CCP, so it is only enabled at -O2. Both regular CCP
172 and STORE-CCP use the exact same algorithm. The only distinction
173 is that when doing STORE-CCP, the boolean variable DO_STORE_CCP is
174 set to true. This affects the evaluation of statements and PHI
175 nodes.
6de9cd9a
DN
176
177 References:
178
179 Constant propagation with conditional branches,
180 Wegman and Zadeck, ACM TOPLAS 13(2):181-210.
181
182 Building an Optimizing Compiler,
183 Robert Morgan, Butterworth-Heinemann, 1998, Section 8.9.
184
185 Advanced Compiler Design and Implementation,
186 Steven Muchnick, Morgan Kaufmann, 1997, Section 12.6 */
187
188#include "config.h"
189#include "system.h"
190#include "coretypes.h"
191#include "tm.h"
6de9cd9a 192#include "tree.h"
750628d8 193#include "flags.h"
6de9cd9a
DN
194#include "rtl.h"
195#include "tm_p.h"
750628d8 196#include "ggc.h"
6de9cd9a 197#include "basic-block.h"
750628d8 198#include "output.h"
750628d8
DN
199#include "expr.h"
200#include "function.h"
6de9cd9a 201#include "diagnostic.h"
750628d8 202#include "timevar.h"
6de9cd9a 203#include "tree-dump.h"
750628d8 204#include "tree-flow.h"
6de9cd9a 205#include "tree-pass.h"
750628d8 206#include "tree-ssa-propagate.h"
53a8f709 207#include "value-prof.h"
750628d8 208#include "langhooks.h"
ae3df618 209#include "target.h"
6ac01510 210#include "toplev.h"
6de9cd9a
DN
211
212
213/* Possible lattice values. */
214typedef enum
215{
106dec71 216 UNINITIALIZED,
6de9cd9a
DN
217 UNDEFINED,
218 CONSTANT,
219 VARYING
0bca51f0 220} ccp_lattice_t;
6de9cd9a 221
0bca51f0
DN
222/* Array of propagated constant values. After propagation,
223 CONST_VAL[I].VALUE holds the constant value for SSA_NAME(I). If
224 the constant is held in an SSA name representing a memory store
38635499
DN
225 (i.e., a VDEF), CONST_VAL[I].MEM_REF will contain the actual
226 memory reference used to store (i.e., the LHS of the assignment
227 doing the store). */
404f4351 228static prop_value_t *const_val;
6de9cd9a 229
0bca51f0
DN
230/* True if we are also propagating constants in stores and loads. */
231static bool do_store_ccp;
6de9cd9a 232
0bca51f0 233/* Dump constant propagation value VAL to file OUTF prefixed by PREFIX. */
95eec0d6
DB
234
235static void
0bca51f0 236dump_lattice_value (FILE *outf, const char *prefix, prop_value_t val)
95eec0d6 237{
750628d8 238 switch (val.lattice_val)
95eec0d6 239 {
0bca51f0
DN
240 case UNINITIALIZED:
241 fprintf (outf, "%sUNINITIALIZED", prefix);
242 break;
750628d8
DN
243 case UNDEFINED:
244 fprintf (outf, "%sUNDEFINED", prefix);
245 break;
246 case VARYING:
247 fprintf (outf, "%sVARYING", prefix);
248 break;
750628d8
DN
249 case CONSTANT:
250 fprintf (outf, "%sCONSTANT ", prefix);
0bca51f0 251 print_generic_expr (outf, val.value, dump_flags);
750628d8
DN
252 break;
253 default:
1e128c5f 254 gcc_unreachable ();
750628d8 255 }
95eec0d6 256}
6de9cd9a 257
6de9cd9a 258
0bca51f0
DN
259/* Print lattice value VAL to stderr. */
260
261void debug_lattice_value (prop_value_t val);
262
263void
264debug_lattice_value (prop_value_t val)
265{
266 dump_lattice_value (stderr, "", val);
267 fprintf (stderr, "\n");
268}
6de9cd9a 269
6de9cd9a 270
726a989a 271
fc9962ee
ZD
272/* If SYM is a constant variable with known value, return the value.
273 NULL_TREE is returned otherwise. */
274
ed97ddc6 275tree
fc9962ee
ZD
276get_symbol_constant_value (tree sym)
277{
278 if (TREE_STATIC (sym)
279 && TREE_READONLY (sym)
ed97ddc6 280 && !MTAG_P (sym))
fc9962ee
ZD
281 {
282 tree val = DECL_INITIAL (sym);
62bc00e2
RG
283 if (val)
284 {
285 STRIP_USELESS_TYPE_CONVERSION (val);
286 if (is_gimple_min_invariant (val))
287 return val;
288 }
b0940154 289 /* Variables declared 'const' without an initializer
fa10beec 290 have zero as the initializer if they may not be
ed97ddc6 291 overridden at link or run time. */
b0940154 292 if (!val
ed97ddc6 293 && targetm.binds_local_p (sym)
b0940154
AP
294 && (INTEGRAL_TYPE_P (TREE_TYPE (sym))
295 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (sym))))
296 return fold_convert (TREE_TYPE (sym), integer_zero_node);
fc9962ee
ZD
297 }
298
299 return NULL_TREE;
300}
688e936d 301
0bca51f0
DN
302/* Compute a default value for variable VAR and store it in the
303 CONST_VAL array. The following rules are used to get default
304 values:
95eec0d6 305
0bca51f0
DN
306 1- Global and static variables that are declared constant are
307 considered CONSTANT.
308
309 2- Any other value is considered UNDEFINED. This is useful when
750628d8
DN
310 considering PHI nodes. PHI arguments that are undefined do not
311 change the constant value of the PHI node, which allows for more
0bca51f0 312 constants to be propagated.
6de9cd9a 313
caf55296 314 3- Variables defined by statements other than assignments and PHI
0bca51f0 315 nodes are considered VARYING.
6de9cd9a 316
caf55296 317 4- Initial values of variables that are not GIMPLE registers are
106dec71 318 considered VARYING. */
6de9cd9a 319
0bca51f0
DN
320static prop_value_t
321get_default_value (tree var)
322{
323 tree sym = SSA_NAME_VAR (var);
324 prop_value_t val = { UNINITIALIZED, NULL_TREE, NULL_TREE };
fc9962ee 325 tree cst_val;
106dec71 326
0bca51f0 327 if (!do_store_ccp && !is_gimple_reg (var))
6de9cd9a 328 {
0bca51f0
DN
329 /* Short circuit for regular CCP. We are not interested in any
330 non-register when DO_STORE_CCP is false. */
750628d8 331 val.lattice_val = VARYING;
6de9cd9a 332 }
fc9962ee 333 else if ((cst_val = get_symbol_constant_value (sym)) != NULL_TREE)
750628d8 334 {
0bca51f0
DN
335 /* Globals and static variables declared 'const' take their
336 initial value. */
337 val.lattice_val = CONSTANT;
fc9962ee 338 val.value = cst_val;
0bca51f0 339 val.mem_ref = sym;
750628d8
DN
340 }
341 else
342 {
726a989a 343 gimple stmt = SSA_NAME_DEF_STMT (var);
6de9cd9a 344
726a989a 345 if (gimple_nop_p (stmt))
0bca51f0
DN
346 {
347 /* Variables defined by an empty statement are those used
348 before being initialized. If VAR is a local variable, we
106dec71
ZD
349 can assume initially that it is UNDEFINED, otherwise we must
350 consider it VARYING. */
0bca51f0
DN
351 if (is_gimple_reg (sym) && TREE_CODE (sym) != PARM_DECL)
352 val.lattice_val = UNDEFINED;
0bca51f0 353 else
750628d8
DN
354 val.lattice_val = VARYING;
355 }
726a989a
RB
356 else if (is_gimple_assign (stmt)
357 /* Value-returning GIMPLE_CALL statements assign to
358 a variable, and are treated similarly to GIMPLE_ASSIGN. */
359 || (is_gimple_call (stmt)
360 && gimple_call_lhs (stmt) != NULL_TREE)
361 || gimple_code (stmt) == GIMPLE_PHI)
362 {
0bca51f0 363 /* Any other variable defined by an assignment or a PHI node
106dec71
ZD
364 is considered UNDEFINED. */
365 val.lattice_val = UNDEFINED;
0bca51f0
DN
366 }
367 else
368 {
369 /* Otherwise, VAR will never take on a constant value. */
370 val.lattice_val = VARYING;
371 }
750628d8 372 }
6de9cd9a 373
750628d8
DN
374 return val;
375}
6de9cd9a 376
6de9cd9a 377
106dec71 378/* Get the constant value associated with variable VAR. */
6de9cd9a 379
106dec71
ZD
380static inline prop_value_t *
381get_value (tree var)
0bca51f0 382{
ed97ddc6 383 prop_value_t *val;
106dec71 384
ed97ddc6
RG
385 if (const_val == NULL)
386 return NULL;
387
388 val = &const_val[SSA_NAME_VERSION (var)];
106dec71 389 if (val->lattice_val == UNINITIALIZED)
6de9cd9a
DN
390 *val = get_default_value (var);
391
392 return val;
393}
394
106dec71
ZD
395/* Sets the value associated with VAR to VARYING. */
396
397static inline void
398set_value_varying (tree var)
399{
400 prop_value_t *val = &const_val[SSA_NAME_VERSION (var)];
401
402 val->lattice_val = VARYING;
403 val->value = NULL_TREE;
404 val->mem_ref = NULL_TREE;
405}
6de9cd9a 406
fbb5445b
L
407/* For float types, modify the value of VAL to make ccp work correctly
408 for non-standard values (-0, NaN):
409
410 If HONOR_SIGNED_ZEROS is false, and VAL = -0, we canonicalize it to 0.
411 If HONOR_NANS is false, and VAL is NaN, we canonicalize it to UNDEFINED.
412 This is to fix the following problem (see PR 29921): Suppose we have
413
414 x = 0.0 * y
415
416 and we set value of y to NaN. This causes value of x to be set to NaN.
417 When we later determine that y is in fact VARYING, fold uses the fact
418 that HONOR_NANS is false, and we try to change the value of x to 0,
419 causing an ICE. With HONOR_NANS being false, the real appearance of
420 NaN would cause undefined behavior, though, so claiming that y (and x)
421 are UNDEFINED initially is correct. */
422
423static void
424canonicalize_float_value (prop_value_t *val)
425{
426 enum machine_mode mode;
427 tree type;
428 REAL_VALUE_TYPE d;
429
430 if (val->lattice_val != CONSTANT
431 || TREE_CODE (val->value) != REAL_CST)
432 return;
433
434 d = TREE_REAL_CST (val->value);
435 type = TREE_TYPE (val->value);
436 mode = TYPE_MODE (type);
437
438 if (!HONOR_SIGNED_ZEROS (mode)
439 && REAL_VALUE_MINUS_ZERO (d))
440 {
441 val->value = build_real (type, dconst0);
442 return;
443 }
444
445 if (!HONOR_NANS (mode)
446 && REAL_VALUE_ISNAN (d))
447 {
448 val->lattice_val = UNDEFINED;
449 val->value = NULL;
450 val->mem_ref = NULL;
451 return;
452 }
453}
454
0bca51f0
DN
455/* Set the value for variable VAR to NEW_VAL. Return true if the new
456 value is different from VAR's previous value. */
6de9cd9a 457
750628d8 458static bool
0bca51f0 459set_lattice_value (tree var, prop_value_t new_val)
6de9cd9a 460{
106dec71 461 prop_value_t *old_val = get_value (var);
0bca51f0 462
fbb5445b
L
463 canonicalize_float_value (&new_val);
464
0bca51f0 465 /* Lattice transitions must always be monotonically increasing in
106dec71
ZD
466 value. If *OLD_VAL and NEW_VAL are the same, return false to
467 inform the caller that this was a non-transition. */
468
fc9962ee 469 gcc_assert (old_val->lattice_val < new_val.lattice_val
0bca51f0 470 || (old_val->lattice_val == new_val.lattice_val
fc9962ee
ZD
471 && ((!old_val->value && !new_val.value)
472 || operand_equal_p (old_val->value, new_val.value, 0))
106dec71 473 && old_val->mem_ref == new_val.mem_ref));
0bca51f0
DN
474
475 if (old_val->lattice_val != new_val.lattice_val)
6de9cd9a 476 {
750628d8
DN
477 if (dump_file && (dump_flags & TDF_DETAILS))
478 {
0bca51f0 479 dump_lattice_value (dump_file, "Lattice value changed to ", new_val);
106dec71 480 fprintf (dump_file, ". Adding SSA edges to worklist.\n");
750628d8
DN
481 }
482
0bca51f0
DN
483 *old_val = new_val;
484
106dec71
ZD
485 gcc_assert (new_val.lattice_val != UNDEFINED);
486 return true;
6de9cd9a 487 }
750628d8
DN
488
489 return false;
6de9cd9a
DN
490}
491
492
0bca51f0 493/* Return the likely CCP lattice value for STMT.
6de9cd9a 494
750628d8 495 If STMT has no operands, then return CONSTANT.
6de9cd9a 496
7f879c96
RG
497 Else if undefinedness of operands of STMT cause its value to be
498 undefined, then return UNDEFINED.
6de9cd9a 499
750628d8 500 Else if any operands of STMT are constants, then return CONSTANT.
6de9cd9a 501
750628d8 502 Else return VARYING. */
6de9cd9a 503
0bca51f0 504static ccp_lattice_t
726a989a 505likely_value (gimple stmt)
750628d8 506{
7f879c96 507 bool has_constant_operand, has_undefined_operand, all_undefined_operands;
750628d8
DN
508 tree use;
509 ssa_op_iter iter;
6de9cd9a 510
726a989a
RB
511 enum tree_code code = gimple_code (stmt);
512
513 /* This function appears to be called only for assignments, calls,
514 conditionals, and switches, due to the logic in visit_stmt. */
515 gcc_assert (code == GIMPLE_ASSIGN
516 || code == GIMPLE_CALL
517 || code == GIMPLE_COND
518 || code == GIMPLE_SWITCH);
0bca51f0
DN
519
520 /* If the statement has volatile operands, it won't fold to a
521 constant value. */
726a989a 522 if (gimple_has_volatile_ops (stmt))
0bca51f0
DN
523 return VARYING;
524
525 /* If we are not doing store-ccp, statements with loads
526 and/or stores will never fold into a constant. */
527 if (!do_store_ccp
ff88c5aa 528 && !ZERO_SSA_OPERANDS (stmt, SSA_OP_ALL_VIRTUALS))
750628d8 529 return VARYING;
6de9cd9a 530
726a989a
RB
531 /* Note that only a GIMPLE_SINGLE_RHS assignment can satisfy
532 is_gimple_min_invariant, so we do not consider calls or
533 other forms of assignment. */
174ef36d 534 if (gimple_assign_single_p (stmt)
726a989a
RB
535 && is_gimple_min_invariant (gimple_assign_rhs1 (stmt)))
536 return CONSTANT;
0bca51f0 537
726a989a
RB
538 if (code == GIMPLE_COND
539 && is_gimple_min_invariant (gimple_cond_lhs (stmt))
540 && is_gimple_min_invariant (gimple_cond_rhs (stmt)))
a318e3ac
SB
541 return CONSTANT;
542
726a989a
RB
543 if (code == GIMPLE_SWITCH
544 && is_gimple_min_invariant (gimple_switch_index (stmt)))
545 return CONSTANT;
546
547 /* Arrive here for more complex cases. */
548
106dec71 549 has_constant_operand = false;
7f879c96
RG
550 has_undefined_operand = false;
551 all_undefined_operands = true;
106dec71 552 FOR_EACH_SSA_TREE_OPERAND (use, stmt, iter, SSA_OP_USE | SSA_OP_VUSE)
750628d8 553 {
106dec71 554 prop_value_t *val = get_value (use);
750628d8 555
106dec71 556 if (val->lattice_val == UNDEFINED)
7f879c96
RG
557 has_undefined_operand = true;
558 else
559 all_undefined_operands = false;
0bca51f0 560
750628d8 561 if (val->lattice_val == CONSTANT)
106dec71 562 has_constant_operand = true;
6de9cd9a 563 }
750628d8 564
7f879c96
RG
565 /* If the operation combines operands like COMPLEX_EXPR make sure to
566 not mark the result UNDEFINED if only one part of the result is
567 undefined. */
726a989a 568 if (has_undefined_operand && all_undefined_operands)
7f879c96 569 return UNDEFINED;
726a989a 570 else if (code == GIMPLE_ASSIGN && has_undefined_operand)
7f879c96 571 {
726a989a 572 switch (gimple_assign_rhs_code (stmt))
7f879c96
RG
573 {
574 /* Unary operators are handled with all_undefined_operands. */
575 case PLUS_EXPR:
576 case MINUS_EXPR:
7f879c96 577 case POINTER_PLUS_EXPR:
7f879c96
RG
578 /* Not MIN_EXPR, MAX_EXPR. One VARYING operand may be selected.
579 Not bitwise operators, one VARYING operand may specify the
580 result completely. Not logical operators for the same reason.
0cedb9e9
RG
581 Not COMPLEX_EXPR as one VARYING operand makes the result partly
582 not UNDEFINED. Not *DIV_EXPR, comparisons and shifts because
583 the undefined operand may be promoted. */
7f879c96
RG
584 return UNDEFINED;
585
586 default:
587 ;
588 }
589 }
590 /* If there was an UNDEFINED operand but the result may be not UNDEFINED
591 fall back to VARYING even if there were CONSTANT operands. */
592 if (has_undefined_operand)
593 return VARYING;
594
106dec71
ZD
595 if (has_constant_operand
596 /* We do not consider virtual operands here -- load from read-only
597 memory may have only VARYING virtual operands, but still be
598 constant. */
599 || ZERO_SSA_OPERANDS (stmt, SSA_OP_USE))
0bca51f0
DN
600 return CONSTANT;
601
106dec71 602 return VARYING;
6de9cd9a
DN
603}
604
106dec71
ZD
605/* Returns true if STMT cannot be constant. */
606
607static bool
726a989a 608surely_varying_stmt_p (gimple stmt)
106dec71
ZD
609{
610 /* If the statement has operands that we cannot handle, it cannot be
611 constant. */
726a989a 612 if (gimple_has_volatile_ops (stmt))
106dec71
ZD
613 return true;
614
615 if (!ZERO_SSA_OPERANDS (stmt, SSA_OP_ALL_VIRTUALS))
616 {
617 if (!do_store_ccp)
618 return true;
619
620 /* We can only handle simple loads and stores. */
621 if (!stmt_makes_single_load (stmt)
622 && !stmt_makes_single_store (stmt))
623 return true;
624 }
625
174ef36d
RG
626 /* If it is a call and does not return a value or is not a
627 builtin and not an indirect call, it is varying. */
726a989a 628 if (is_gimple_call (stmt))
174ef36d
RG
629 {
630 tree fndecl;
631 if (!gimple_call_lhs (stmt)
632 || ((fndecl = gimple_call_fndecl (stmt)) != NULL_TREE
99f536cc 633 && !DECL_BUILT_IN (fndecl)))
174ef36d
RG
634 return true;
635 }
106dec71
ZD
636
637 /* Anything other than assignments and conditional jumps are not
638 interesting for CCP. */
726a989a 639 if (gimple_code (stmt) != GIMPLE_ASSIGN
174ef36d
RG
640 && gimple_code (stmt) != GIMPLE_COND
641 && gimple_code (stmt) != GIMPLE_SWITCH
642 && gimple_code (stmt) != GIMPLE_CALL)
106dec71
ZD
643 return true;
644
645 return false;
646}
6de9cd9a 647
750628d8 648/* Initialize local data structures for CCP. */
6de9cd9a
DN
649
650static void
750628d8 651ccp_initialize (void)
6de9cd9a 652{
750628d8 653 basic_block bb;
6de9cd9a 654
b9eae1a9 655 const_val = XCNEWVEC (prop_value_t, num_ssa_names);
6de9cd9a 656
750628d8
DN
657 /* Initialize simulation flags for PHI nodes and statements. */
658 FOR_EACH_BB (bb)
6de9cd9a 659 {
726a989a 660 gimple_stmt_iterator i;
6de9cd9a 661
726a989a 662 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
750628d8 663 {
726a989a 664 gimple stmt = gsi_stmt (i);
106dec71 665 bool is_varying = surely_varying_stmt_p (stmt);
6de9cd9a 666
106dec71 667 if (is_varying)
750628d8 668 {
0bca51f0
DN
669 tree def;
670 ssa_op_iter iter;
671
672 /* If the statement will not produce a constant, mark
673 all its outputs VARYING. */
674 FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_ALL_DEFS)
106dec71
ZD
675 {
676 if (is_varying)
677 set_value_varying (def);
678 }
750628d8 679 }
726a989a 680 prop_set_simulate_again (stmt, !is_varying);
750628d8 681 }
6de9cd9a
DN
682 }
683
726a989a
RB
684 /* Now process PHI nodes. We never clear the simulate_again flag on
685 phi nodes, since we do not know which edges are executable yet,
686 except for phi nodes for virtual operands when we do not do store ccp. */
750628d8 687 FOR_EACH_BB (bb)
6de9cd9a 688 {
726a989a 689 gimple_stmt_iterator i;
750628d8 690
726a989a
RB
691 for (i = gsi_start_phis (bb); !gsi_end_p (i); gsi_next (&i))
692 {
693 gimple phi = gsi_stmt (i);
694
695 if (!do_store_ccp && !is_gimple_reg (gimple_phi_result (phi)))
696 prop_set_simulate_again (phi, false);
106dec71 697 else
726a989a 698 prop_set_simulate_again (phi, true);
750628d8 699 }
6de9cd9a 700 }
750628d8 701}
6de9cd9a 702
6de9cd9a 703
0bca51f0 704/* Do final substitution of propagated values, cleanup the flowgraph and
3253eafb 705 free allocated storage.
6de9cd9a 706
3253eafb
JH
707 Return TRUE when something was optimized. */
708
709static bool
0bca51f0 710ccp_finalize (void)
6de9cd9a 711{
0bca51f0 712 /* Perform substitutions based on the known constant values. */
3253eafb 713 bool something_changed = substitute_and_fold (const_val, false);
6de9cd9a 714
0bca51f0 715 free (const_val);
ed97ddc6 716 const_val = NULL;
3253eafb 717 return something_changed;;
6de9cd9a
DN
718}
719
720
0bca51f0
DN
721/* Compute the meet operator between *VAL1 and *VAL2. Store the result
722 in VAL1.
723
724 any M UNDEFINED = any
0bca51f0
DN
725 any M VARYING = VARYING
726 Ci M Cj = Ci if (i == j)
727 Ci M Cj = VARYING if (i != j)
106dec71 728 */
6de9cd9a
DN
729
730static void
0bca51f0 731ccp_lattice_meet (prop_value_t *val1, prop_value_t *val2)
6de9cd9a 732{
0bca51f0 733 if (val1->lattice_val == UNDEFINED)
6de9cd9a 734 {
0bca51f0
DN
735 /* UNDEFINED M any = any */
736 *val1 = *val2;
750628d8 737 }
0bca51f0 738 else if (val2->lattice_val == UNDEFINED)
195da47b 739 {
0bca51f0
DN
740 /* any M UNDEFINED = any
741 Nothing to do. VAL1 already contains the value we want. */
742 ;
195da47b 743 }
0bca51f0
DN
744 else if (val1->lattice_val == VARYING
745 || val2->lattice_val == VARYING)
750628d8 746 {
0bca51f0
DN
747 /* any M VARYING = VARYING. */
748 val1->lattice_val = VARYING;
749 val1->value = NULL_TREE;
750 val1->mem_ref = NULL_TREE;
750628d8 751 }
0bca51f0
DN
752 else if (val1->lattice_val == CONSTANT
753 && val2->lattice_val == CONSTANT
754 && simple_cst_equal (val1->value, val2->value) == 1
755 && (!do_store_ccp
a318e3ac
SB
756 || (val1->mem_ref && val2->mem_ref
757 && operand_equal_p (val1->mem_ref, val2->mem_ref, 0))))
750628d8 758 {
0bca51f0
DN
759 /* Ci M Cj = Ci if (i == j)
760 Ci M Cj = VARYING if (i != j)
761
762 If these two values come from memory stores, make sure that
763 they come from the same memory reference. */
764 val1->lattice_val = CONSTANT;
765 val1->value = val1->value;
766 val1->mem_ref = val1->mem_ref;
750628d8
DN
767 }
768 else
769 {
0bca51f0
DN
770 /* Any other combination is VARYING. */
771 val1->lattice_val = VARYING;
772 val1->value = NULL_TREE;
773 val1->mem_ref = NULL_TREE;
750628d8 774 }
6de9cd9a
DN
775}
776
777
750628d8
DN
778/* Loop through the PHI_NODE's parameters for BLOCK and compare their
779 lattice values to determine PHI_NODE's lattice value. The value of a
0bca51f0 780 PHI node is determined calling ccp_lattice_meet with all the arguments
750628d8 781 of the PHI node that are incoming via executable edges. */
6de9cd9a 782
750628d8 783static enum ssa_prop_result
726a989a 784ccp_visit_phi_node (gimple phi)
6de9cd9a 785{
726a989a 786 unsigned i;
0bca51f0 787 prop_value_t *old_val, new_val;
6de9cd9a 788
750628d8 789 if (dump_file && (dump_flags & TDF_DETAILS))
6de9cd9a 790 {
750628d8 791 fprintf (dump_file, "\nVisiting PHI node: ");
726a989a 792 print_gimple_stmt (dump_file, phi, 0, dump_flags);
6de9cd9a 793 }
6de9cd9a 794
726a989a 795 old_val = get_value (gimple_phi_result (phi));
750628d8
DN
796 switch (old_val->lattice_val)
797 {
798 case VARYING:
0bca51f0 799 return SSA_PROP_VARYING;
6de9cd9a 800
750628d8
DN
801 case CONSTANT:
802 new_val = *old_val;
803 break;
6de9cd9a 804
750628d8 805 case UNDEFINED:
750628d8 806 new_val.lattice_val = UNDEFINED;
0bca51f0
DN
807 new_val.value = NULL_TREE;
808 new_val.mem_ref = NULL_TREE;
750628d8 809 break;
6de9cd9a 810
750628d8 811 default:
1e128c5f 812 gcc_unreachable ();
750628d8 813 }
6de9cd9a 814
726a989a 815 for (i = 0; i < gimple_phi_num_args (phi); i++)
750628d8 816 {
0bca51f0
DN
817 /* Compute the meet operator over all the PHI arguments flowing
818 through executable edges. */
726a989a 819 edge e = gimple_phi_arg_edge (phi, i);
6de9cd9a 820
750628d8
DN
821 if (dump_file && (dump_flags & TDF_DETAILS))
822 {
823 fprintf (dump_file,
824 "\n Argument #%d (%d -> %d %sexecutable)\n",
825 i, e->src->index, e->dest->index,
826 (e->flags & EDGE_EXECUTABLE) ? "" : "not ");
827 }
828
829 /* If the incoming edge is executable, Compute the meet operator for
830 the existing value of the PHI node and the current PHI argument. */
831 if (e->flags & EDGE_EXECUTABLE)
832 {
726a989a 833 tree arg = gimple_phi_arg (phi, i)->def;
0bca51f0 834 prop_value_t arg_val;
6de9cd9a 835
0bca51f0 836 if (is_gimple_min_invariant (arg))
750628d8 837 {
0bca51f0
DN
838 arg_val.lattice_val = CONSTANT;
839 arg_val.value = arg;
840 arg_val.mem_ref = NULL_TREE;
750628d8
DN
841 }
842 else
106dec71 843 arg_val = *(get_value (arg));
6de9cd9a 844
0bca51f0 845 ccp_lattice_meet (&new_val, &arg_val);
6de9cd9a 846
750628d8
DN
847 if (dump_file && (dump_flags & TDF_DETAILS))
848 {
849 fprintf (dump_file, "\t");
0bca51f0
DN
850 print_generic_expr (dump_file, arg, dump_flags);
851 dump_lattice_value (dump_file, "\tValue: ", arg_val);
750628d8
DN
852 fprintf (dump_file, "\n");
853 }
6de9cd9a 854
750628d8
DN
855 if (new_val.lattice_val == VARYING)
856 break;
857 }
858 }
6de9cd9a
DN
859
860 if (dump_file && (dump_flags & TDF_DETAILS))
750628d8
DN
861 {
862 dump_lattice_value (dump_file, "\n PHI node value: ", new_val);
863 fprintf (dump_file, "\n\n");
864 }
865
106dec71 866 /* Make the transition to the new value. */
726a989a 867 if (set_lattice_value (gimple_phi_result (phi), new_val))
750628d8
DN
868 {
869 if (new_val.lattice_val == VARYING)
870 return SSA_PROP_VARYING;
871 else
872 return SSA_PROP_INTERESTING;
873 }
874 else
875 return SSA_PROP_NOT_INTERESTING;
6de9cd9a
DN
876}
877
878
750628d8
DN
879/* CCP specific front-end to the non-destructive constant folding
880 routines.
6de9cd9a
DN
881
882 Attempt to simplify the RHS of STMT knowing that one or more
883 operands are constants.
884
885 If simplification is possible, return the simplified RHS,
726a989a 886 otherwise return the original RHS or NULL_TREE. */
6de9cd9a
DN
887
888static tree
726a989a 889ccp_fold (gimple stmt)
6de9cd9a 890{
726a989a 891 switch (gimple_code (stmt))
0bca51f0 892 {
726a989a
RB
893 case GIMPLE_ASSIGN:
894 {
895 enum tree_code subcode = gimple_assign_rhs_code (stmt);
896
897 switch (get_gimple_rhs_class (subcode))
898 {
899 case GIMPLE_SINGLE_RHS:
900 {
901 tree rhs = gimple_assign_rhs1 (stmt);
902 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
903
904 if (TREE_CODE (rhs) == SSA_NAME)
905 {
906 /* If the RHS is an SSA_NAME, return its known constant value,
907 if any. */
908 return get_value (rhs)->value;
909 }
910 /* Handle propagating invariant addresses into address operations.
911 The folding we do here matches that in tree-ssa-forwprop.c. */
912 else if (TREE_CODE (rhs) == ADDR_EXPR)
913 {
914 tree *base;
915 base = &TREE_OPERAND (rhs, 0);
916 while (handled_component_p (*base))
917 base = &TREE_OPERAND (*base, 0);
918 if (TREE_CODE (*base) == INDIRECT_REF
919 && TREE_CODE (TREE_OPERAND (*base, 0)) == SSA_NAME)
920 {
921 prop_value_t *val = get_value (TREE_OPERAND (*base, 0));
922 if (val->lattice_val == CONSTANT
923 && TREE_CODE (val->value) == ADDR_EXPR
924 && useless_type_conversion_p
925 (TREE_TYPE (TREE_OPERAND (*base, 0)),
926 TREE_TYPE (val->value))
927 && useless_type_conversion_p
928 (TREE_TYPE (*base),
929 TREE_TYPE (TREE_OPERAND (val->value, 0))))
930 {
931 /* We need to return a new tree, not modify the IL
932 or share parts of it. So play some tricks to
933 avoid manually building it. */
934 tree ret, save = *base;
935 *base = TREE_OPERAND (val->value, 0);
936 ret = unshare_expr (rhs);
937 recompute_tree_invariant_for_addr_expr (ret);
938 *base = save;
939 return ret;
940 }
941 }
942 }
6de9cd9a 943
726a989a
RB
944 else if (do_store_ccp && stmt_makes_single_load (stmt))
945 {
946 /* If the RHS is a memory load, see if the VUSEs associated with
947 it are a valid constant for that memory load. */
948 prop_value_t *val = get_value_loaded_by (stmt, const_val);
949 if (val && val->mem_ref)
950 {
951 if (operand_equal_p (val->mem_ref, rhs, 0))
952 return val->value;
953
954 /* If RHS is extracting REALPART_EXPR or IMAGPART_EXPR of a
955 complex type with a known constant value, return it. */
956 if ((TREE_CODE (rhs) == REALPART_EXPR
957 || TREE_CODE (rhs) == IMAGPART_EXPR)
958 && operand_equal_p (val->mem_ref, TREE_OPERAND (rhs, 0), 0))
959 return fold_build1 (TREE_CODE (rhs), TREE_TYPE (rhs), val->value);
960 }
961 }
962
963 if (kind == tcc_reference)
964 return fold_const_aggregate_ref (rhs);
965 else if (kind == tcc_declaration)
966 return get_symbol_constant_value (rhs);
967 return rhs;
968 }
969
970 case GIMPLE_UNARY_RHS:
971 {
972 /* Handle unary operators that can appear in GIMPLE form.
973 Note that we know the single operand must be a constant,
974 so this should almost always return a simplified RHS. */
975 tree lhs = gimple_assign_lhs (stmt);
976 tree op0 = gimple_assign_rhs1 (stmt);
977
978 /* Simplify the operand down to a constant. */
979 if (TREE_CODE (op0) == SSA_NAME)
980 {
981 prop_value_t *val = get_value (op0);
982 if (val->lattice_val == CONSTANT)
983 op0 = get_value (op0)->value;
984 }
985
986 /* Conversions are useless for CCP purposes if they are
987 value-preserving. Thus the restrictions that
988 useless_type_conversion_p places for pointer type conversions
989 do not apply here. Substitution later will only substitute to
990 allowed places. */
1a87cf0c 991 if (CONVERT_EXPR_CODE_P (subcode)
99f536cc
RG
992 && POINTER_TYPE_P (TREE_TYPE (lhs))
993 && POINTER_TYPE_P (TREE_TYPE (op0))
994 /* Do not allow differences in volatile qualification
995 as this might get us confused as to whether a
996 propagation destination statement is volatile
997 or not. See PR36988. */
998 && (TYPE_VOLATILE (TREE_TYPE (TREE_TYPE (lhs)))
999 == TYPE_VOLATILE (TREE_TYPE (TREE_TYPE (op0)))))
1000 {
1001 tree tem;
1002 /* Still try to generate a constant of correct type. */
1003 if (!useless_type_conversion_p (TREE_TYPE (lhs),
1004 TREE_TYPE (op0))
1005 && ((tem = maybe_fold_offset_to_address
1006 (op0, integer_zero_node, TREE_TYPE (lhs)))
1007 != NULL_TREE))
1008 return tem;
1009 return op0;
1010 }
726a989a
RB
1011
1012 return fold_unary (subcode, gimple_expr_type (stmt), op0);
1013 }
1014
1015 case GIMPLE_BINARY_RHS:
1016 {
1017 /* Handle binary operators that can appear in GIMPLE form. */
1018 tree op0 = gimple_assign_rhs1 (stmt);
1019 tree op1 = gimple_assign_rhs2 (stmt);
1020
1021 /* Simplify the operands down to constants when appropriate. */
1022 if (TREE_CODE (op0) == SSA_NAME)
1023 {
1024 prop_value_t *val = get_value (op0);
1025 if (val->lattice_val == CONSTANT)
1026 op0 = val->value;
1027 }
1028
1029 if (TREE_CODE (op1) == SSA_NAME)
1030 {
1031 prop_value_t *val = get_value (op1);
1032 if (val->lattice_val == CONSTANT)
1033 op1 = val->value;
1034 }
1035
99f536cc
RG
1036 /* Fold &foo + CST into an invariant reference if possible. */
1037 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
1038 && TREE_CODE (op0) == ADDR_EXPR
1039 && TREE_CODE (op1) == INTEGER_CST)
1040 {
1041 tree lhs = gimple_assign_lhs (stmt);
1042 tree tem = maybe_fold_offset_to_address (op0, op1,
1043 TREE_TYPE (lhs));
1044 if (tem != NULL_TREE)
1045 return tem;
1046 }
1047
726a989a
RB
1048 return fold_binary (subcode, gimple_expr_type (stmt), op0, op1);
1049 }
1050
1051 default:
1052 gcc_unreachable ();
1053 }
1054 }
1055 break;
6de9cd9a 1056
726a989a 1057 case GIMPLE_CALL:
174ef36d
RG
1058 {
1059 tree fn = gimple_call_fn (stmt);
1060 prop_value_t *val;
1061
1062 if (TREE_CODE (fn) == SSA_NAME)
1063 {
1064 val = get_value (fn);
1065 if (val->lattice_val == CONSTANT)
1066 fn = val->value;
1067 }
1068 if (TREE_CODE (fn) == ADDR_EXPR
1069 && DECL_BUILT_IN (TREE_OPERAND (fn, 0)))
1070 {
1071 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
1072 tree call, retval;
1073 unsigned i;
1074 for (i = 0; i < gimple_call_num_args (stmt); ++i)
1075 {
1076 args[i] = gimple_call_arg (stmt, i);
1077 if (TREE_CODE (args[i]) == SSA_NAME)
1078 {
1079 val = get_value (args[i]);
1080 if (val->lattice_val == CONSTANT)
1081 args[i] = val->value;
1082 }
1083 }
1084 call = build_call_array (gimple_call_return_type (stmt),
1085 fn, gimple_call_num_args (stmt), args);
1086 retval = fold_call_expr (call, false);
1087 if (retval)
1088 /* fold_call_expr wraps the result inside a NOP_EXPR. */
1089 STRIP_NOPS (retval);
1090 return retval;
1091 }
1092 return NULL_TREE;
1093 }
6de9cd9a 1094
726a989a
RB
1095 case GIMPLE_COND:
1096 {
1097 /* Handle comparison operators that can appear in GIMPLE form. */
1098 tree op0 = gimple_cond_lhs (stmt);
1099 tree op1 = gimple_cond_rhs (stmt);
1100 enum tree_code code = gimple_cond_code (stmt);
1101
1102 /* Simplify the operands down to constants when appropriate. */
1103 if (TREE_CODE (op0) == SSA_NAME)
1104 {
1105 prop_value_t *val = get_value (op0);
1106 if (val->lattice_val == CONSTANT)
1107 op0 = val->value;
1108 }
1109
1110 if (TREE_CODE (op1) == SSA_NAME)
1111 {
1112 prop_value_t *val = get_value (op1);
1113 if (val->lattice_val == CONSTANT)
1114 op1 = val->value;
1115 }
1116
1117 return fold_binary (code, boolean_type_node, op0, op1);
1118 }
6de9cd9a 1119
726a989a
RB
1120 case GIMPLE_SWITCH:
1121 {
1122 tree rhs = gimple_switch_index (stmt);
87e1e42b 1123
726a989a
RB
1124 if (TREE_CODE (rhs) == SSA_NAME)
1125 {
1126 /* If the RHS is an SSA_NAME, return its known constant value,
1127 if any. */
1128 return get_value (rhs)->value;
1129 }
87e1e42b 1130
726a989a
RB
1131 return rhs;
1132 }
00d382a8 1133
726a989a
RB
1134 default:
1135 gcc_unreachable ();
6de9cd9a 1136 }
6de9cd9a
DN
1137}
1138
1139
ae3df618
SB
1140/* Return the tree representing the element referenced by T if T is an
1141 ARRAY_REF or COMPONENT_REF into constant aggregates. Return
1142 NULL_TREE otherwise. */
1143
ed97ddc6 1144tree
ae3df618
SB
1145fold_const_aggregate_ref (tree t)
1146{
1147 prop_value_t *value;
4038c495
GB
1148 tree base, ctor, idx, field;
1149 unsigned HOST_WIDE_INT cnt;
1150 tree cfield, cval;
ae3df618
SB
1151
1152 switch (TREE_CODE (t))
1153 {
1154 case ARRAY_REF:
1155 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
1156 DECL_INITIAL. If BASE is a nested reference into another
1157 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
1158 the inner reference. */
1159 base = TREE_OPERAND (t, 0);
1160 switch (TREE_CODE (base))
1161 {
1162 case VAR_DECL:
1163 if (!TREE_READONLY (base)
1164 || TREE_CODE (TREE_TYPE (base)) != ARRAY_TYPE
1165 || !targetm.binds_local_p (base))
1166 return NULL_TREE;
1167
1168 ctor = DECL_INITIAL (base);
1169 break;
1170
1171 case ARRAY_REF:
1172 case COMPONENT_REF:
1173 ctor = fold_const_aggregate_ref (base);
1174 break;
1175
87e1e42b
RG
1176 case STRING_CST:
1177 case CONSTRUCTOR:
1178 ctor = base;
1179 break;
1180
ae3df618
SB
1181 default:
1182 return NULL_TREE;
1183 }
1184
1185 if (ctor == NULL_TREE
faaf1436
RG
1186 || (TREE_CODE (ctor) != CONSTRUCTOR
1187 && TREE_CODE (ctor) != STRING_CST)
ae3df618
SB
1188 || !TREE_STATIC (ctor))
1189 return NULL_TREE;
1190
1191 /* Get the index. If we have an SSA_NAME, try to resolve it
1192 with the current lattice value for the SSA_NAME. */
1193 idx = TREE_OPERAND (t, 1);
1194 switch (TREE_CODE (idx))
1195 {
1196 case SSA_NAME:
106dec71 1197 if ((value = get_value (idx))
ae3df618
SB
1198 && value->lattice_val == CONSTANT
1199 && TREE_CODE (value->value) == INTEGER_CST)
1200 idx = value->value;
1201 else
1202 return NULL_TREE;
1203 break;
1204
1205 case INTEGER_CST:
1206 break;
1207
1208 default:
1209 return NULL_TREE;
1210 }
1211
faaf1436
RG
1212 /* Fold read from constant string. */
1213 if (TREE_CODE (ctor) == STRING_CST)
1214 {
1215 if ((TYPE_MODE (TREE_TYPE (t))
1216 == TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
1217 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
1218 == MODE_INT)
1219 && GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor)))) == 1
1220 && compare_tree_int (idx, TREE_STRING_LENGTH (ctor)) < 0)
0c4d4efb
DJ
1221 return build_int_cst_type (TREE_TYPE (t),
1222 (TREE_STRING_POINTER (ctor)
1223 [TREE_INT_CST_LOW (idx)]));
faaf1436
RG
1224 return NULL_TREE;
1225 }
1226
ae3df618 1227 /* Whoo-hoo! I'll fold ya baby. Yeah! */
4038c495
GB
1228 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
1229 if (tree_int_cst_equal (cfield, idx))
62bc00e2
RG
1230 {
1231 STRIP_USELESS_TYPE_CONVERSION (cval);
1232 return cval;
1233 }
ae3df618
SB
1234 break;
1235
1236 case COMPONENT_REF:
1237 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
1238 DECL_INITIAL. If BASE is a nested reference into another
1239 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
1240 the inner reference. */
1241 base = TREE_OPERAND (t, 0);
1242 switch (TREE_CODE (base))
1243 {
1244 case VAR_DECL:
1245 if (!TREE_READONLY (base)
1246 || TREE_CODE (TREE_TYPE (base)) != RECORD_TYPE
1247 || !targetm.binds_local_p (base))
1248 return NULL_TREE;
1249
1250 ctor = DECL_INITIAL (base);
1251 break;
1252
1253 case ARRAY_REF:
1254 case COMPONENT_REF:
1255 ctor = fold_const_aggregate_ref (base);
1256 break;
1257
1258 default:
1259 return NULL_TREE;
1260 }
1261
1262 if (ctor == NULL_TREE
1263 || TREE_CODE (ctor) != CONSTRUCTOR
1264 || !TREE_STATIC (ctor))
1265 return NULL_TREE;
1266
1267 field = TREE_OPERAND (t, 1);
1268
4038c495
GB
1269 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
1270 if (cfield == field
ae3df618 1271 /* FIXME: Handle bit-fields. */
4038c495 1272 && ! DECL_BIT_FIELD (cfield))
62bc00e2
RG
1273 {
1274 STRIP_USELESS_TYPE_CONVERSION (cval);
1275 return cval;
1276 }
ae3df618
SB
1277 break;
1278
1ebd8d9a
SB
1279 case REALPART_EXPR:
1280 case IMAGPART_EXPR:
1281 {
1282 tree c = fold_const_aggregate_ref (TREE_OPERAND (t, 0));
1283 if (c && TREE_CODE (c) == COMPLEX_CST)
1284 return fold_build1 (TREE_CODE (t), TREE_TYPE (t), c);
1285 break;
1286 }
87e1e42b
RG
1287
1288 case INDIRECT_REF:
1289 {
1290 tree base = TREE_OPERAND (t, 0);
1291 if (TREE_CODE (base) == SSA_NAME
1292 && (value = get_value (base))
1293 && value->lattice_val == CONSTANT
1294 && TREE_CODE (value->value) == ADDR_EXPR)
1295 return fold_const_aggregate_ref (TREE_OPERAND (value->value, 0));
1296 break;
1297 }
1298
ae3df618
SB
1299 default:
1300 break;
1301 }
1302
1303 return NULL_TREE;
1304}
726a989a
RB
1305
1306/* Evaluate statement STMT.
1307 Valid only for assignments, calls, conditionals, and switches. */
6de9cd9a 1308
0bca51f0 1309static prop_value_t
726a989a 1310evaluate_stmt (gimple stmt)
6de9cd9a 1311{
0bca51f0 1312 prop_value_t val;
faaf1436 1313 tree simplified = NULL_TREE;
0bca51f0 1314 ccp_lattice_t likelyvalue = likely_value (stmt);
6ac01510 1315 bool is_constant;
0bca51f0
DN
1316
1317 val.mem_ref = NULL_TREE;
6de9cd9a 1318
6ac01510
ILT
1319 fold_defer_overflow_warnings ();
1320
6de9cd9a
DN
1321 /* If the statement is likely to have a CONSTANT result, then try
1322 to fold the statement to determine the constant value. */
726a989a
RB
1323 /* FIXME. This is the only place that we call ccp_fold.
1324 Since likely_value never returns CONSTANT for calls, we will
1325 not attempt to fold them, including builtins that may profit. */
6de9cd9a
DN
1326 if (likelyvalue == CONSTANT)
1327 simplified = ccp_fold (stmt);
1328 /* If the statement is likely to have a VARYING result, then do not
1329 bother folding the statement. */
87e1e42b 1330 else if (likelyvalue == VARYING)
726a989a
RB
1331 {
1332 enum tree_code code = gimple_code (stmt);
1333 if (code == GIMPLE_ASSIGN)
1334 {
1335 enum tree_code subcode = gimple_assign_rhs_code (stmt);
1336
1337 /* Other cases cannot satisfy is_gimple_min_invariant
1338 without folding. */
1339 if (get_gimple_rhs_class (subcode) == GIMPLE_SINGLE_RHS)
1340 simplified = gimple_assign_rhs1 (stmt);
1341 }
1342 else if (code == GIMPLE_SWITCH)
1343 simplified = gimple_switch_index (stmt);
1344 else
1345 /* These cannot satisfy is_gimple_min_invariant without folding. */
1346 gcc_assert (code == GIMPLE_CALL || code == GIMPLE_COND);
1347 }
6de9cd9a 1348
6ac01510
ILT
1349 is_constant = simplified && is_gimple_min_invariant (simplified);
1350
1351 fold_undefer_overflow_warnings (is_constant, stmt, 0);
1352
00d382a8
RG
1353 if (dump_file && (dump_flags & TDF_DETAILS))
1354 {
1355 fprintf (dump_file, "which is likely ");
1356 switch (likelyvalue)
1357 {
1358 case CONSTANT:
1359 fprintf (dump_file, "CONSTANT");
1360 break;
1361 case UNDEFINED:
1362 fprintf (dump_file, "UNDEFINED");
1363 break;
1364 case VARYING:
1365 fprintf (dump_file, "VARYING");
1366 break;
1367 default:;
1368 }
1369 fprintf (dump_file, "\n");
1370 }
1371
6ac01510 1372 if (is_constant)
6de9cd9a
DN
1373 {
1374 /* The statement produced a constant value. */
1375 val.lattice_val = CONSTANT;
0bca51f0 1376 val.value = simplified;
6de9cd9a
DN
1377 }
1378 else
1379 {
1380 /* The statement produced a nonconstant value. If the statement
0bca51f0
DN
1381 had UNDEFINED operands, then the result of the statement
1382 should be UNDEFINED. Otherwise, the statement is VARYING. */
106dec71 1383 if (likelyvalue == UNDEFINED)
a318e3ac
SB
1384 val.lattice_val = likelyvalue;
1385 else
1386 val.lattice_val = VARYING;
1387
0bca51f0 1388 val.value = NULL_TREE;
6de9cd9a 1389 }
750628d8
DN
1390
1391 return val;
6de9cd9a
DN
1392}
1393
750628d8 1394/* Visit the assignment statement STMT. Set the value of its LHS to the
0bca51f0
DN
1395 value computed by the RHS and store LHS in *OUTPUT_P. If STMT
1396 creates virtual definitions, set the value of each new name to that
726a989a
RB
1397 of the RHS (if we can derive a constant out of the RHS).
1398 Value-returning call statements also perform an assignment, and
1399 are handled here. */
6de9cd9a 1400
750628d8 1401static enum ssa_prop_result
726a989a 1402visit_assignment (gimple stmt, tree *output_p)
6de9cd9a 1403{
0bca51f0 1404 prop_value_t val;
0bca51f0 1405 enum ssa_prop_result retval;
6de9cd9a 1406
726a989a 1407 tree lhs = gimple_get_lhs (stmt);
6de9cd9a 1408
726a989a
RB
1409 gcc_assert (gimple_code (stmt) != GIMPLE_CALL
1410 || gimple_call_lhs (stmt) != NULL_TREE);
1411
1412 if (gimple_assign_copy_p (stmt))
750628d8 1413 {
726a989a 1414 tree rhs = gimple_assign_rhs1 (stmt);
0bca51f0 1415
726a989a
RB
1416 if (TREE_CODE (rhs) == SSA_NAME)
1417 {
1418 /* For a simple copy operation, we copy the lattice values. */
1419 prop_value_t *nval = get_value (rhs);
1420 val = *nval;
1421 }
1422 else if (do_store_ccp && stmt_makes_single_load (stmt))
1423 {
1424 /* Same as above, but the RHS is not a gimple register and yet
1425 has a known VUSE. If STMT is loading from the same memory
1426 location that created the SSA_NAMEs for the virtual operands,
1427 we can propagate the value on the RHS. */
1428 prop_value_t *nval = get_value_loaded_by (stmt, const_val);
1429
1430 if (nval
1431 && nval->mem_ref
1432 && operand_equal_p (nval->mem_ref, rhs, 0))
1433 val = *nval;
1434 else
1435 val = evaluate_stmt (stmt);
1436 }
0bca51f0 1437 else
726a989a 1438 val = evaluate_stmt (stmt);
750628d8
DN
1439 }
1440 else
726a989a
RB
1441 /* Evaluate the statement, which could be
1442 either a GIMPLE_ASSIGN or a GIMPLE_CALL. */
87e1e42b 1443 val = evaluate_stmt (stmt);
6de9cd9a 1444
0bca51f0 1445 retval = SSA_PROP_NOT_INTERESTING;
6de9cd9a 1446
750628d8 1447 /* Set the lattice value of the statement's output. */
0bca51f0 1448 if (TREE_CODE (lhs) == SSA_NAME)
6de9cd9a 1449 {
0bca51f0
DN
1450 /* If STMT is an assignment to an SSA_NAME, we only have one
1451 value to set. */
1452 if (set_lattice_value (lhs, val))
1453 {
1454 *output_p = lhs;
1455 if (val.lattice_val == VARYING)
1456 retval = SSA_PROP_VARYING;
1457 else
1458 retval = SSA_PROP_INTERESTING;
1459 }
6de9cd9a 1460 }
0bca51f0
DN
1461 else if (do_store_ccp && stmt_makes_single_store (stmt))
1462 {
38635499
DN
1463 /* Otherwise, set the names in VDEF operands to the new
1464 constant value and mark the LHS as the memory reference
1465 associated with VAL. */
0bca51f0
DN
1466 ssa_op_iter i;
1467 tree vdef;
1468 bool changed;
1469
0bca51f0 1470 /* Mark VAL as stored in the LHS of this assignment. */
106dec71
ZD
1471 if (val.lattice_val == CONSTANT)
1472 val.mem_ref = lhs;
0bca51f0
DN
1473
1474 /* Set the value of every VDEF to VAL. */
1475 changed = false;
1476 FOR_EACH_SSA_TREE_OPERAND (vdef, stmt, i, SSA_OP_VIRTUAL_DEFS)
fc9962ee
ZD
1477 {
1478 /* See PR 29801. We may have VDEFs for read-only variables
1479 (see the handling of unmodifiable variables in
1480 add_virtual_operand); do not attempt to change their value. */
1481 if (get_symbol_constant_value (SSA_NAME_VAR (vdef)) != NULL_TREE)
1482 continue;
1483
1484 changed |= set_lattice_value (vdef, val);
1485 }
0bca51f0
DN
1486
1487 /* Note that for propagation purposes, we are only interested in
1488 visiting statements that load the exact same memory reference
1489 stored here. Those statements will have the exact same list
1490 of virtual uses, so it is enough to set the output of this
1491 statement to be its first virtual definition. */
1492 *output_p = first_vdef (stmt);
1493 if (changed)
1494 {
1495 if (val.lattice_val == VARYING)
1496 retval = SSA_PROP_VARYING;
1497 else
1498 retval = SSA_PROP_INTERESTING;
1499 }
1500 }
1501
1502 return retval;
6de9cd9a
DN
1503}
1504
6de9cd9a 1505
750628d8
DN
1506/* Visit the conditional statement STMT. Return SSA_PROP_INTERESTING
1507 if it can determine which edge will be taken. Otherwise, return
1508 SSA_PROP_VARYING. */
1509
1510static enum ssa_prop_result
726a989a 1511visit_cond_stmt (gimple stmt, edge *taken_edge_p)
6de9cd9a 1512{
0bca51f0 1513 prop_value_t val;
750628d8
DN
1514 basic_block block;
1515
726a989a 1516 block = gimple_bb (stmt);
750628d8
DN
1517 val = evaluate_stmt (stmt);
1518
1519 /* Find which edge out of the conditional block will be taken and add it
1520 to the worklist. If no single edge can be determined statically,
1521 return SSA_PROP_VARYING to feed all the outgoing edges to the
1522 propagation engine. */
0bca51f0 1523 *taken_edge_p = val.value ? find_taken_edge (block, val.value) : 0;
750628d8
DN
1524 if (*taken_edge_p)
1525 return SSA_PROP_INTERESTING;
1526 else
1527 return SSA_PROP_VARYING;
6de9cd9a
DN
1528}
1529
6de9cd9a 1530
750628d8
DN
1531/* Evaluate statement STMT. If the statement produces an output value and
1532 its evaluation changes the lattice value of its output, return
1533 SSA_PROP_INTERESTING and set *OUTPUT_P to the SSA_NAME holding the
1534 output value.
1535
1536 If STMT is a conditional branch and we can determine its truth
1537 value, set *TAKEN_EDGE_P accordingly. If STMT produces a varying
1538 value, return SSA_PROP_VARYING. */
6de9cd9a 1539
750628d8 1540static enum ssa_prop_result
726a989a 1541ccp_visit_stmt (gimple stmt, edge *taken_edge_p, tree *output_p)
750628d8 1542{
750628d8
DN
1543 tree def;
1544 ssa_op_iter iter;
6de9cd9a 1545
750628d8 1546 if (dump_file && (dump_flags & TDF_DETAILS))
6de9cd9a 1547 {
0bca51f0 1548 fprintf (dump_file, "\nVisiting statement:\n");
726a989a 1549 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
6de9cd9a 1550 }
6de9cd9a 1551
726a989a 1552 switch (gimple_code (stmt))
6de9cd9a 1553 {
726a989a
RB
1554 case GIMPLE_ASSIGN:
1555 /* If the statement is an assignment that produces a single
1556 output value, evaluate its RHS to see if the lattice value of
1557 its output has changed. */
1558 return visit_assignment (stmt, output_p);
1559
1560 case GIMPLE_CALL:
1561 /* A value-returning call also performs an assignment. */
1562 if (gimple_call_lhs (stmt) != NULL_TREE)
1563 return visit_assignment (stmt, output_p);
1564 break;
1565
1566 case GIMPLE_COND:
1567 case GIMPLE_SWITCH:
1568 /* If STMT is a conditional branch, see if we can determine
1569 which branch will be taken. */
1570 /* FIXME. It appears that we should be able to optimize
1571 computed GOTOs here as well. */
1572 return visit_cond_stmt (stmt, taken_edge_p);
1573
1574 default:
1575 break;
6de9cd9a 1576 }
6de9cd9a 1577
750628d8
DN
1578 /* Any other kind of statement is not interesting for constant
1579 propagation and, therefore, not worth simulating. */
750628d8
DN
1580 if (dump_file && (dump_flags & TDF_DETAILS))
1581 fprintf (dump_file, "No interesting values produced. Marked VARYING.\n");
6de9cd9a 1582
750628d8
DN
1583 /* Definitions made by statements other than assignments to
1584 SSA_NAMEs represent unknown modifications to their outputs.
1585 Mark them VARYING. */
0bca51f0
DN
1586 FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_ALL_DEFS)
1587 {
1588 prop_value_t v = { VARYING, NULL_TREE, NULL_TREE };
1589 set_lattice_value (def, v);
1590 }
6de9cd9a 1591
750628d8
DN
1592 return SSA_PROP_VARYING;
1593}
6de9cd9a 1594
6de9cd9a 1595
0bca51f0 1596/* Main entry point for SSA Conditional Constant Propagation. */
750628d8 1597
3253eafb 1598static unsigned int
0bca51f0 1599execute_ssa_ccp (bool store_ccp)
750628d8 1600{
0bca51f0 1601 do_store_ccp = store_ccp;
750628d8
DN
1602 ccp_initialize ();
1603 ssa_propagate (ccp_visit_stmt, ccp_visit_phi_node);
3253eafb 1604 if (ccp_finalize ())
706ca88e 1605 return (TODO_cleanup_cfg | TODO_update_ssa | TODO_remove_unused_locals);
3253eafb
JH
1606 else
1607 return 0;
6de9cd9a
DN
1608}
1609
173b818d 1610
c2924966 1611static unsigned int
0bca51f0
DN
1612do_ssa_ccp (void)
1613{
3253eafb 1614 return execute_ssa_ccp (false);
0bca51f0
DN
1615}
1616
1617
173b818d 1618static bool
750628d8 1619gate_ccp (void)
173b818d 1620{
750628d8 1621 return flag_tree_ccp != 0;
173b818d
BB
1622}
1623
6de9cd9a 1624
8ddbbcae 1625struct gimple_opt_pass pass_ccp =
750628d8 1626{
8ddbbcae
JH
1627 {
1628 GIMPLE_PASS,
750628d8
DN
1629 "ccp", /* name */
1630 gate_ccp, /* gate */
0bca51f0 1631 do_ssa_ccp, /* execute */
750628d8
DN
1632 NULL, /* sub */
1633 NULL, /* next */
1634 0, /* static_pass_number */
1635 TV_TREE_CCP, /* tv_id */
7faade0f 1636 PROP_cfg | PROP_ssa, /* properties_required */
750628d8 1637 0, /* properties_provided */
ae07b463 1638 0, /* properties_destroyed */
750628d8 1639 0, /* todo_flags_start */
3253eafb 1640 TODO_dump_func | TODO_verify_ssa
8ddbbcae
JH
1641 | TODO_verify_stmts | TODO_ggc_collect/* todo_flags_finish */
1642 }
750628d8 1643};
6de9cd9a 1644
6de9cd9a 1645
c2924966 1646static unsigned int
0bca51f0
DN
1647do_ssa_store_ccp (void)
1648{
1649 /* If STORE-CCP is not enabled, we just run regular CCP. */
3253eafb 1650 return execute_ssa_ccp (flag_tree_store_ccp != 0);
0bca51f0
DN
1651}
1652
1653static bool
1654gate_store_ccp (void)
1655{
1656 /* STORE-CCP is enabled only with -ftree-store-ccp, but when
1657 -fno-tree-store-ccp is specified, we should run regular CCP.
1658 That's why the pass is enabled with either flag. */
1659 return flag_tree_store_ccp != 0 || flag_tree_ccp != 0;
1660}
1661
1662
8ddbbcae 1663struct gimple_opt_pass pass_store_ccp =
0bca51f0 1664{
8ddbbcae
JH
1665 {
1666 GIMPLE_PASS,
0bca51f0
DN
1667 "store_ccp", /* name */
1668 gate_store_ccp, /* gate */
1669 do_ssa_store_ccp, /* execute */
1670 NULL, /* sub */
1671 NULL, /* next */
1672 0, /* static_pass_number */
1673 TV_TREE_STORE_CCP, /* tv_id */
1674 PROP_cfg | PROP_ssa | PROP_alias, /* properties_required */
1675 0, /* properties_provided */
ae07b463 1676 0, /* properties_destroyed */
0bca51f0 1677 0, /* todo_flags_start */
3253eafb 1678 TODO_dump_func | TODO_verify_ssa
8ddbbcae
JH
1679 | TODO_verify_stmts | TODO_ggc_collect/* todo_flags_finish */
1680 }
0bca51f0
DN
1681};
1682
6de9cd9a
DN
1683/* A subroutine of fold_stmt_r. Attempts to fold *(A+O) to A[X].
1684 BASE is an array type. OFFSET is a byte displacement. ORIG_TYPE
9cf737f8 1685 is the desired result type. */
6de9cd9a
DN
1686
1687static tree
1fd89060
RG
1688maybe_fold_offset_to_array_ref (tree base, tree offset, tree orig_type,
1689 bool allow_negative_idx)
6de9cd9a 1690{
891fc5e9 1691 tree min_idx, idx, idx_type, elt_offset = integer_zero_node;
44de5aeb 1692 tree array_type, elt_type, elt_size;
3097760b 1693 tree domain_type;
44de5aeb
RK
1694
1695 /* If BASE is an ARRAY_REF, we can pick up another offset (this time
1696 measured in units of the size of elements type) from that ARRAY_REF).
1697 We can't do anything if either is variable.
1698
1699 The case we handle here is *(&A[N]+O). */
1700 if (TREE_CODE (base) == ARRAY_REF)
1701 {
1702 tree low_bound = array_ref_low_bound (base);
1703
1704 elt_offset = TREE_OPERAND (base, 1);
1705 if (TREE_CODE (low_bound) != INTEGER_CST
1706 || TREE_CODE (elt_offset) != INTEGER_CST)
1707 return NULL_TREE;
1708
1709 elt_offset = int_const_binop (MINUS_EXPR, elt_offset, low_bound, 0);
1710 base = TREE_OPERAND (base, 0);
1711 }
6de9cd9a
DN
1712
1713 /* Ignore stupid user tricks of indexing non-array variables. */
1714 array_type = TREE_TYPE (base);
1715 if (TREE_CODE (array_type) != ARRAY_TYPE)
1716 return NULL_TREE;
1717 elt_type = TREE_TYPE (array_type);
f4088621 1718 if (!useless_type_conversion_p (orig_type, elt_type))
6de9cd9a 1719 return NULL_TREE;
891fc5e9
RG
1720
1721 /* Use signed size type for intermediate computation on the index. */
1722 idx_type = signed_type_for (size_type_node);
1723
44de5aeb
RK
1724 /* If OFFSET and ELT_OFFSET are zero, we don't care about the size of the
1725 element type (so we can use the alignment if it's not constant).
1726 Otherwise, compute the offset as an index by using a division. If the
1727 division isn't exact, then don't do anything. */
6de9cd9a 1728 elt_size = TYPE_SIZE_UNIT (elt_type);
fe9821b8
JH
1729 if (!elt_size)
1730 return NULL;
44de5aeb
RK
1731 if (integer_zerop (offset))
1732 {
1733 if (TREE_CODE (elt_size) != INTEGER_CST)
1734 elt_size = size_int (TYPE_ALIGN (elt_type));
6de9cd9a 1735
891fc5e9 1736 idx = build_int_cst (idx_type, 0);
44de5aeb
RK
1737 }
1738 else
1739 {
1740 unsigned HOST_WIDE_INT lquo, lrem;
1741 HOST_WIDE_INT hquo, hrem;
891fc5e9 1742 double_int soffset;
44de5aeb 1743
891fc5e9
RG
1744 /* The final array offset should be signed, so we need
1745 to sign-extend the (possibly pointer) offset here
1746 and use signed division. */
1747 soffset = double_int_sext (tree_to_double_int (offset),
1748 TYPE_PRECISION (TREE_TYPE (offset)));
44de5aeb 1749 if (TREE_CODE (elt_size) != INTEGER_CST
891fc5e9
RG
1750 || div_and_round_double (TRUNC_DIV_EXPR, 0,
1751 soffset.low, soffset.high,
44de5aeb
RK
1752 TREE_INT_CST_LOW (elt_size),
1753 TREE_INT_CST_HIGH (elt_size),
1754 &lquo, &hquo, &lrem, &hrem)
1755 || lrem || hrem)
1756 return NULL_TREE;
6de9cd9a 1757
891fc5e9 1758 idx = build_int_cst_wide (idx_type, lquo, hquo);
44de5aeb
RK
1759 }
1760
1761 /* Assume the low bound is zero. If there is a domain type, get the
1762 low bound, if any, convert the index into that type, and add the
1763 low bound. */
891fc5e9 1764 min_idx = build_int_cst (idx_type, 0);
3097760b
JH
1765 domain_type = TYPE_DOMAIN (array_type);
1766 if (domain_type)
6de9cd9a 1767 {
3097760b 1768 idx_type = domain_type;
891fc5e9
RG
1769 if (TYPE_MIN_VALUE (idx_type))
1770 min_idx = TYPE_MIN_VALUE (idx_type);
44de5aeb 1771 else
891fc5e9 1772 min_idx = fold_convert (idx_type, min_idx);
44de5aeb
RK
1773
1774 if (TREE_CODE (min_idx) != INTEGER_CST)
1775 return NULL_TREE;
1776
891fc5e9 1777 elt_offset = fold_convert (idx_type, elt_offset);
6de9cd9a
DN
1778 }
1779
44de5aeb
RK
1780 if (!integer_zerop (min_idx))
1781 idx = int_const_binop (PLUS_EXPR, idx, min_idx, 0);
1782 if (!integer_zerop (elt_offset))
1783 idx = int_const_binop (PLUS_EXPR, idx, elt_offset, 0);
1784
891fc5e9
RG
1785 /* Make sure to possibly truncate late after offsetting. */
1786 idx = fold_convert (idx_type, idx);
1787
3097760b 1788 /* We don't want to construct access past array bounds. For example
1fd89060
RG
1789 char *(c[4]);
1790 c[3][2];
1791 should not be simplified into (*c)[14] or tree-vrp will
1792 give false warnings. The same is true for
1793 struct A { long x; char d[0]; } *a;
1794 (char *)a - 4;
1795 which should be not folded to &a->d[-8]. */
1796 if (domain_type
1797 && TYPE_MAX_VALUE (domain_type)
3097760b
JH
1798 && TREE_CODE (TYPE_MAX_VALUE (domain_type)) == INTEGER_CST)
1799 {
1800 tree up_bound = TYPE_MAX_VALUE (domain_type);
1801
1802 if (tree_int_cst_lt (up_bound, idx)
1803 /* Accesses after the end of arrays of size 0 (gcc
1804 extension) and 1 are likely intentional ("struct
1805 hack"). */
1806 && compare_tree_int (up_bound, 1) > 0)
1807 return NULL_TREE;
1808 }
1fd89060
RG
1809 if (domain_type
1810 && TYPE_MIN_VALUE (domain_type))
1811 {
1812 if (!allow_negative_idx
1813 && TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST
1814 && tree_int_cst_lt (idx, TYPE_MIN_VALUE (domain_type)))
1815 return NULL_TREE;
1816 }
1817 else if (!allow_negative_idx
1818 && compare_tree_int (idx, 0) < 0)
1819 return NULL_TREE;
3097760b 1820
04d86531 1821 return build4 (ARRAY_REF, elt_type, base, idx, NULL_TREE, NULL_TREE);
6de9cd9a
DN
1822}
1823
750628d8 1824
fe9821b8 1825/* Attempt to fold *(S+O) to S.X.
6de9cd9a
DN
1826 BASE is a record type. OFFSET is a byte displacement. ORIG_TYPE
1827 is the desired result type. */
6de9cd9a 1828
fe9821b8 1829static tree
6de9cd9a
DN
1830maybe_fold_offset_to_component_ref (tree record_type, tree base, tree offset,
1831 tree orig_type, bool base_is_ptr)
1832{
f34fa464 1833 tree f, t, field_type, tail_array_field, field_offset;
6e6e19cb
JH
1834 tree ret;
1835 tree new_base;
6de9cd9a
DN
1836
1837 if (TREE_CODE (record_type) != RECORD_TYPE
1838 && TREE_CODE (record_type) != UNION_TYPE
1839 && TREE_CODE (record_type) != QUAL_UNION_TYPE)
1840 return NULL_TREE;
1841
1842 /* Short-circuit silly cases. */
f4088621 1843 if (useless_type_conversion_p (record_type, orig_type))
6de9cd9a
DN
1844 return NULL_TREE;
1845
1846 tail_array_field = NULL_TREE;
1847 for (f = TYPE_FIELDS (record_type); f ; f = TREE_CHAIN (f))
1848 {
1849 int cmp;
1850
1851 if (TREE_CODE (f) != FIELD_DECL)
1852 continue;
1853 if (DECL_BIT_FIELD (f))
1854 continue;
f34fa464 1855
fe9821b8
JH
1856 if (!DECL_FIELD_OFFSET (f))
1857 continue;
f34fa464
ZD
1858 field_offset = byte_position (f);
1859 if (TREE_CODE (field_offset) != INTEGER_CST)
6de9cd9a
DN
1860 continue;
1861
1862 /* ??? Java creates "interesting" fields for representing base classes.
1863 They have no name, and have no context. With no context, we get into
1864 trouble with nonoverlapping_component_refs_p. Skip them. */
1865 if (!DECL_FIELD_CONTEXT (f))
1866 continue;
1867
1868 /* The previous array field isn't at the end. */
1869 tail_array_field = NULL_TREE;
1870
1871 /* Check to see if this offset overlaps with the field. */
f34fa464 1872 cmp = tree_int_cst_compare (field_offset, offset);
6de9cd9a
DN
1873 if (cmp > 0)
1874 continue;
1875
1876 field_type = TREE_TYPE (f);
6de9cd9a
DN
1877
1878 /* Here we exactly match the offset being checked. If the types match,
1879 then we can return that field. */
53dba802 1880 if (cmp == 0
f4088621 1881 && useless_type_conversion_p (orig_type, field_type))
6de9cd9a
DN
1882 {
1883 if (base_is_ptr)
1884 base = build1 (INDIRECT_REF, record_type, base);
b4257cfc 1885 t = build3 (COMPONENT_REF, field_type, base, f, NULL_TREE);
6de9cd9a
DN
1886 return t;
1887 }
53dba802
ZD
1888
1889 /* Don't care about offsets into the middle of scalars. */
1890 if (!AGGREGATE_TYPE_P (field_type))
1891 continue;
6de9cd9a 1892
53dba802
ZD
1893 /* Check for array at the end of the struct. This is often
1894 used as for flexible array members. We should be able to
1895 turn this into an array access anyway. */
1896 if (TREE_CODE (field_type) == ARRAY_TYPE)
1897 tail_array_field = f;
1898
1899 /* Check the end of the field against the offset. */
1900 if (!DECL_SIZE_UNIT (f)
1901 || TREE_CODE (DECL_SIZE_UNIT (f)) != INTEGER_CST)
1902 continue;
1903 t = int_const_binop (MINUS_EXPR, offset, field_offset, 1);
1904 if (!tree_int_cst_lt (t, DECL_SIZE_UNIT (f)))
1905 continue;
6de9cd9a 1906
53dba802
ZD
1907 /* If we matched, then set offset to the displacement into
1908 this field. */
6e6e19cb
JH
1909 if (base_is_ptr)
1910 new_base = build1 (INDIRECT_REF, record_type, base);
1911 else
1912 new_base = base;
1913 new_base = build3 (COMPONENT_REF, field_type, new_base, f, NULL_TREE);
1914
1915 /* Recurse to possibly find the match. */
1fd89060
RG
1916 ret = maybe_fold_offset_to_array_ref (new_base, t, orig_type,
1917 f == TYPE_FIELDS (record_type));
6e6e19cb
JH
1918 if (ret)
1919 return ret;
1920 ret = maybe_fold_offset_to_component_ref (field_type, new_base, t,
1921 orig_type, false);
1922 if (ret)
1923 return ret;
6de9cd9a
DN
1924 }
1925
1926 if (!tail_array_field)
1927 return NULL_TREE;
1928
1929 f = tail_array_field;
1930 field_type = TREE_TYPE (f);
53dba802 1931 offset = int_const_binop (MINUS_EXPR, offset, byte_position (f), 1);
6de9cd9a 1932
6de9cd9a 1933 /* If we get here, we've got an aggregate field, and a possibly
1ea7e6ad 1934 nonzero offset into them. Recurse and hope for a valid match. */
6de9cd9a
DN
1935 if (base_is_ptr)
1936 base = build1 (INDIRECT_REF, record_type, base);
b4257cfc 1937 base = build3 (COMPONENT_REF, field_type, base, f, NULL_TREE);
6de9cd9a 1938
1fd89060
RG
1939 t = maybe_fold_offset_to_array_ref (base, offset, orig_type,
1940 f == TYPE_FIELDS (record_type));
6de9cd9a
DN
1941 if (t)
1942 return t;
1943 return maybe_fold_offset_to_component_ref (field_type, base, offset,
1944 orig_type, false);
1945}
1946
fe9821b8
JH
1947/* Attempt to express (ORIG_TYPE)BASE+OFFSET as BASE->field_of_orig_type
1948 or BASE[index] or by combination of those.
1949
1950 Before attempting the conversion strip off existing ADDR_EXPRs and
1951 handled component refs. */
1952
1953tree
1954maybe_fold_offset_to_reference (tree base, tree offset, tree orig_type)
1955{
1956 tree ret;
1957 tree type;
1958 bool base_is_ptr = true;
1959
1960 STRIP_NOPS (base);
1961 if (TREE_CODE (base) == ADDR_EXPR)
1962 {
1963 base_is_ptr = false;
1964
1965 base = TREE_OPERAND (base, 0);
1966
1967 /* Handle case where existing COMPONENT_REF pick e.g. wrong field of union,
1968 so it needs to be removed and new COMPONENT_REF constructed.
1969 The wrong COMPONENT_REF are often constructed by folding the
1970 (type *)&object within the expression (type *)&object+offset */
99f536cc 1971 if (handled_component_p (base))
fe9821b8
JH
1972 {
1973 HOST_WIDE_INT sub_offset, size, maxsize;
1974 tree newbase;
1975 newbase = get_ref_base_and_extent (base, &sub_offset,
1976 &size, &maxsize);
1977 gcc_assert (newbase);
99f536cc 1978 if (size == maxsize
a214f4d6 1979 && size != -1
99f536cc 1980 && !(sub_offset & (BITS_PER_UNIT - 1)))
fe9821b8
JH
1981 {
1982 base = newbase;
1983 if (sub_offset)
1984 offset = int_const_binop (PLUS_EXPR, offset,
1985 build_int_cst (TREE_TYPE (offset),
1986 sub_offset / BITS_PER_UNIT), 1);
1987 }
1988 }
f4088621 1989 if (useless_type_conversion_p (orig_type, TREE_TYPE (base))
fe9821b8
JH
1990 && integer_zerop (offset))
1991 return base;
1992 type = TREE_TYPE (base);
1993 }
1994 else
1995 {
1996 base_is_ptr = true;
1997 if (!POINTER_TYPE_P (TREE_TYPE (base)))
1998 return NULL_TREE;
1999 type = TREE_TYPE (TREE_TYPE (base));
2000 }
2001 ret = maybe_fold_offset_to_component_ref (type, base, offset,
2002 orig_type, base_is_ptr);
2003 if (!ret)
2004 {
2005 if (base_is_ptr)
2006 base = build1 (INDIRECT_REF, type, base);
1fd89060 2007 ret = maybe_fold_offset_to_array_ref (base, offset, orig_type, true);
fe9821b8
JH
2008 }
2009 return ret;
2010}
750628d8 2011
99f536cc
RG
2012/* Attempt to express (ORIG_TYPE)&BASE+OFFSET as &BASE->field_of_orig_type
2013 or &BASE[index] or by combination of those.
2014
2015 Before attempting the conversion strip off existing component refs. */
2016
2017tree
2018maybe_fold_offset_to_address (tree addr, tree offset, tree orig_type)
2019{
2020 tree t;
2021
2022 gcc_assert (POINTER_TYPE_P (TREE_TYPE (addr))
2023 && POINTER_TYPE_P (orig_type));
2024
2025 t = maybe_fold_offset_to_reference (addr, offset, TREE_TYPE (orig_type));
2026 if (t != NULL_TREE)
2027 {
2028 tree orig = addr;
2029 tree ptr_type;
2030
2031 /* For __builtin_object_size to function correctly we need to
2032 make sure not to fold address arithmetic so that we change
2033 reference from one array to another. This would happen for
2034 example for
2035
2036 struct X { char s1[10]; char s2[10] } s;
2037 char *foo (void) { return &s.s2[-4]; }
2038
2039 where we need to avoid generating &s.s1[6]. As the C and
2040 C++ frontends create different initial trees
2041 (char *) &s.s1 + -4 vs. &s.s1[-4] we have to do some
2042 sophisticated comparisons here. Note that checking for the
2043 condition after the fact is easier than trying to avoid doing
2044 the folding. */
2045 STRIP_NOPS (orig);
2046 if (TREE_CODE (orig) == ADDR_EXPR)
2047 orig = TREE_OPERAND (orig, 0);
2048 if ((TREE_CODE (orig) == ARRAY_REF
2049 || (TREE_CODE (orig) == COMPONENT_REF
2050 && TREE_CODE (TREE_TYPE (TREE_OPERAND (orig, 1))) == ARRAY_TYPE))
2051 && (TREE_CODE (t) == ARRAY_REF
2052 || (TREE_CODE (t) == COMPONENT_REF
2053 && TREE_CODE (TREE_TYPE (TREE_OPERAND (t, 1))) == ARRAY_TYPE))
2054 && !operand_equal_p (TREE_CODE (orig) == ARRAY_REF
2055 ? TREE_OPERAND (orig, 0) : orig,
2056 TREE_CODE (t) == ARRAY_REF
2057 ? TREE_OPERAND (t, 0) : t, 0))
2058 return NULL_TREE;
2059
2060 ptr_type = build_pointer_type (TREE_TYPE (t));
2061 if (!useless_type_conversion_p (orig_type, ptr_type))
2062 return NULL_TREE;
2063 return build_fold_addr_expr_with_type (t, ptr_type);
2064 }
2065
2066 return NULL_TREE;
2067}
2068
6de9cd9a
DN
2069/* A subroutine of fold_stmt_r. Attempt to simplify *(BASE+OFFSET).
2070 Return the simplified expression, or NULL if nothing could be done. */
2071
2072static tree
2073maybe_fold_stmt_indirect (tree expr, tree base, tree offset)
2074{
2075 tree t;
cbf8d355 2076 bool volatile_p = TREE_THIS_VOLATILE (expr);
6de9cd9a
DN
2077
2078 /* We may well have constructed a double-nested PLUS_EXPR via multiple
2079 substitutions. Fold that down to one. Remove NON_LVALUE_EXPRs that
2080 are sometimes added. */
2081 base = fold (base);
ff7c8597 2082 STRIP_TYPE_NOPS (base);
6de9cd9a
DN
2083 TREE_OPERAND (expr, 0) = base;
2084
2085 /* One possibility is that the address reduces to a string constant. */
2086 t = fold_read_from_constant_string (expr);
2087 if (t)
2088 return t;
2089
5be014d5
AP
2090 /* Add in any offset from a POINTER_PLUS_EXPR. */
2091 if (TREE_CODE (base) == POINTER_PLUS_EXPR)
6de9cd9a
DN
2092 {
2093 tree offset2;
2094
2095 offset2 = TREE_OPERAND (base, 1);
2096 if (TREE_CODE (offset2) != INTEGER_CST)
2097 return NULL_TREE;
2098 base = TREE_OPERAND (base, 0);
2099
5be014d5
AP
2100 offset = fold_convert (sizetype,
2101 int_const_binop (PLUS_EXPR, offset, offset2, 1));
6de9cd9a
DN
2102 }
2103
2104 if (TREE_CODE (base) == ADDR_EXPR)
2105 {
fe9821b8
JH
2106 tree base_addr = base;
2107
6de9cd9a
DN
2108 /* Strip the ADDR_EXPR. */
2109 base = TREE_OPERAND (base, 0);
2110
0534fa56
RH
2111 /* Fold away CONST_DECL to its value, if the type is scalar. */
2112 if (TREE_CODE (base) == CONST_DECL
1b738915 2113 && is_gimple_min_invariant (DECL_INITIAL (base)))
0534fa56
RH
2114 return DECL_INITIAL (base);
2115
6de9cd9a 2116 /* Try folding *(&B+O) to B.X. */
fe9821b8
JH
2117 t = maybe_fold_offset_to_reference (base_addr, offset,
2118 TREE_TYPE (expr));
6de9cd9a 2119 if (t)
cbf8d355
RG
2120 {
2121 TREE_THIS_VOLATILE (t) = volatile_p;
2122 return t;
2123 }
6de9cd9a
DN
2124 }
2125 else
2126 {
2127 /* We can get here for out-of-range string constant accesses,
2128 such as "_"[3]. Bail out of the entire substitution search
2129 and arrange for the entire statement to be replaced by a
aabcd309 2130 call to __builtin_trap. In all likelihood this will all be
6de9cd9a
DN
2131 constant-folded away, but in the meantime we can't leave with
2132 something that get_expr_operands can't understand. */
2133
2134 t = base;
2135 STRIP_NOPS (t);
2136 if (TREE_CODE (t) == ADDR_EXPR
2137 && TREE_CODE (TREE_OPERAND (t, 0)) == STRING_CST)
2138 {
2139 /* FIXME: Except that this causes problems elsewhere with dead
0e61db61 2140 code not being deleted, and we die in the rtl expanders
6de9cd9a
DN
2141 because we failed to remove some ssa_name. In the meantime,
2142 just return zero. */
2143 /* FIXME2: This condition should be signaled by
2144 fold_read_from_constant_string directly, rather than
2145 re-checking for it here. */
2146 return integer_zero_node;
2147 }
2148
2149 /* Try folding *(B+O) to B->X. Still an improvement. */
2150 if (POINTER_TYPE_P (TREE_TYPE (base)))
2151 {
fe9821b8
JH
2152 t = maybe_fold_offset_to_reference (base, offset,
2153 TREE_TYPE (expr));
6de9cd9a
DN
2154 if (t)
2155 return t;
2156 }
2157 }
2158
2159 /* Otherwise we had an offset that we could not simplify. */
2160 return NULL_TREE;
2161}
2162
750628d8 2163
726a989a 2164/* A quaint feature extant in our address arithmetic is that there
6de9cd9a
DN
2165 can be hidden type changes here. The type of the result need
2166 not be the same as the type of the input pointer.
2167
2168 What we're after here is an expression of the form
2169 (T *)(&array + const)
726a989a
RB
2170 where array is OP0, const is OP1, RES_TYPE is T and
2171 the cast doesn't actually exist, but is implicit in the
5be014d5 2172 type of the POINTER_PLUS_EXPR. We'd like to turn this into
6de9cd9a
DN
2173 &array[x]
2174 which may be able to propagate further. */
2175
726a989a
RB
2176tree
2177maybe_fold_stmt_addition (tree res_type, tree op0, tree op1)
6de9cd9a 2178{
6de9cd9a
DN
2179 tree ptd_type;
2180 tree t;
6de9cd9a 2181
6de9cd9a
DN
2182 /* It had better be a constant. */
2183 if (TREE_CODE (op1) != INTEGER_CST)
2184 return NULL_TREE;
2185 /* The first operand should be an ADDR_EXPR. */
2186 if (TREE_CODE (op0) != ADDR_EXPR)
2187 return NULL_TREE;
2188 op0 = TREE_OPERAND (op0, 0);
2189
2190 /* If the first operand is an ARRAY_REF, expand it so that we can fold
2191 the offset into it. */
2192 while (TREE_CODE (op0) == ARRAY_REF)
2193 {
2194 tree array_obj = TREE_OPERAND (op0, 0);
2195 tree array_idx = TREE_OPERAND (op0, 1);
2196 tree elt_type = TREE_TYPE (op0);
2197 tree elt_size = TYPE_SIZE_UNIT (elt_type);
2198 tree min_idx;
2199
2200 if (TREE_CODE (array_idx) != INTEGER_CST)
2201 break;
2202 if (TREE_CODE (elt_size) != INTEGER_CST)
2203 break;
2204
2205 /* Un-bias the index by the min index of the array type. */
2206 min_idx = TYPE_DOMAIN (TREE_TYPE (array_obj));
2207 if (min_idx)
2208 {
2209 min_idx = TYPE_MIN_VALUE (min_idx);
2210 if (min_idx)
2211 {
44de5aeb
RK
2212 if (TREE_CODE (min_idx) != INTEGER_CST)
2213 break;
2214
b6f65e3c 2215 array_idx = fold_convert (TREE_TYPE (min_idx), array_idx);
6de9cd9a
DN
2216 if (!integer_zerop (min_idx))
2217 array_idx = int_const_binop (MINUS_EXPR, array_idx,
2218 min_idx, 0);
2219 }
2220 }
2221
2222 /* Convert the index to a byte offset. */
b6f65e3c 2223 array_idx = fold_convert (sizetype, array_idx);
6de9cd9a
DN
2224 array_idx = int_const_binop (MULT_EXPR, array_idx, elt_size, 0);
2225
2226 /* Update the operands for the next round, or for folding. */
5be014d5 2227 op1 = int_const_binop (PLUS_EXPR,
6de9cd9a 2228 array_idx, op1, 0);
6de9cd9a
DN
2229 op0 = array_obj;
2230 }
2231
726a989a 2232 ptd_type = TREE_TYPE (res_type);
c4e5b5a8
RG
2233 /* If we want a pointer to void, reconstruct the reference from the
2234 array element type. A pointer to that can be trivially converted
2235 to void *. This happens as we fold (void *)(ptr p+ off). */
2236 if (VOID_TYPE_P (ptd_type)
2237 && TREE_CODE (TREE_TYPE (op0)) == ARRAY_TYPE)
2238 ptd_type = TREE_TYPE (TREE_TYPE (op0));
6de9cd9a
DN
2239
2240 /* At which point we can try some of the same things as for indirects. */
1fd89060 2241 t = maybe_fold_offset_to_array_ref (op0, op1, ptd_type, true);
6de9cd9a
DN
2242 if (!t)
2243 t = maybe_fold_offset_to_component_ref (TREE_TYPE (op0), op0, op1,
2244 ptd_type, false);
2245 if (t)
726a989a 2246 t = build1 (ADDR_EXPR, res_type, t);
6de9cd9a
DN
2247
2248 return t;
2249}
2250
622f91ba
JL
2251/* For passing state through walk_tree into fold_stmt_r and its
2252 children. */
2253
2254struct fold_stmt_r_data
2255{
726a989a 2256 gimple stmt;
6ac01510
ILT
2257 bool *changed_p;
2258 bool *inside_addr_expr_p;
622f91ba
JL
2259};
2260
6de9cd9a
DN
2261/* Subroutine of fold_stmt called via walk_tree. We perform several
2262 simplifications of EXPR_P, mostly having to do with pointer arithmetic. */
2263
2264static tree
2265fold_stmt_r (tree *expr_p, int *walk_subtrees, void *data)
2266{
726a989a
RB
2267 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
2268 struct fold_stmt_r_data *fold_stmt_r_data;
2269 bool *inside_addr_expr_p;
2270 bool *changed_p;
6de9cd9a 2271 tree expr = *expr_p, t;
6aaa4013 2272 bool volatile_p = TREE_THIS_VOLATILE (expr);
6de9cd9a 2273
726a989a
RB
2274 fold_stmt_r_data = (struct fold_stmt_r_data *) wi->info;
2275 inside_addr_expr_p = fold_stmt_r_data->inside_addr_expr_p;
2276 changed_p = fold_stmt_r_data->changed_p;
2277
6de9cd9a
DN
2278 /* ??? It'd be nice if walk_tree had a pre-order option. */
2279 switch (TREE_CODE (expr))
2280 {
2281 case INDIRECT_REF:
2282 t = walk_tree (&TREE_OPERAND (expr, 0), fold_stmt_r, data, NULL);
2283 if (t)
2284 return t;
2285 *walk_subtrees = 0;
2286
2287 t = maybe_fold_stmt_indirect (expr, TREE_OPERAND (expr, 0),
2288 integer_zero_node);
16ac8575
RG
2289 if (!t
2290 && TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR)
2291 /* If we had a good reason for propagating the address here,
2292 make sure we end up with valid gimple. See PR34989. */
2293 t = TREE_OPERAND (TREE_OPERAND (expr, 0), 0);
6de9cd9a
DN
2294 break;
2295
fe9821b8
JH
2296 case NOP_EXPR:
2297 t = walk_tree (&TREE_OPERAND (expr, 0), fold_stmt_r, data, NULL);
2298 if (t)
2299 return t;
2300 *walk_subtrees = 0;
2301
2302 if (POINTER_TYPE_P (TREE_TYPE (expr))
2303 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 0)))
99f536cc
RG
2304 && (t = maybe_fold_offset_to_address (TREE_OPERAND (expr, 0),
2305 integer_zero_node,
2306 TREE_TYPE (TREE_TYPE (expr)))))
2307 return t;
fe9821b8
JH
2308 break;
2309
622f91ba 2310 /* ??? Could handle more ARRAY_REFs here, as a variant of INDIRECT_REF.
6de9cd9a
DN
2311 We'd only want to bother decomposing an existing ARRAY_REF if
2312 the base array is found to have another offset contained within.
2313 Otherwise we'd be wasting time. */
622f91ba
JL
2314 case ARRAY_REF:
2315 /* If we are not processing expressions found within an
2316 ADDR_EXPR, then we can fold constant array references. */
2317 if (!*inside_addr_expr_p)
2318 t = fold_read_from_constant_string (expr);
2319 else
2320 t = NULL;
2321 break;
6de9cd9a
DN
2322
2323 case ADDR_EXPR:
622f91ba 2324 *inside_addr_expr_p = true;
6de9cd9a 2325 t = walk_tree (&TREE_OPERAND (expr, 0), fold_stmt_r, data, NULL);
622f91ba 2326 *inside_addr_expr_p = false;
6de9cd9a
DN
2327 if (t)
2328 return t;
2329 *walk_subtrees = 0;
2330
51eed280
PB
2331 /* Make sure the value is properly considered constant, and so gets
2332 propagated as expected. */
6de9cd9a 2333 if (*changed_p)
127203ac 2334 recompute_tree_invariant_for_addr_expr (expr);
6de9cd9a
DN
2335 return NULL_TREE;
2336
6de9cd9a
DN
2337 case COMPONENT_REF:
2338 t = walk_tree (&TREE_OPERAND (expr, 0), fold_stmt_r, data, NULL);
2339 if (t)
2340 return t;
2341 *walk_subtrees = 0;
2342
fa27426e
RH
2343 /* Make sure the FIELD_DECL is actually a field in the type on the lhs.
2344 We've already checked that the records are compatible, so we should
2345 come up with a set of compatible fields. */
2346 {
2347 tree expr_record = TREE_TYPE (TREE_OPERAND (expr, 0));
2348 tree expr_field = TREE_OPERAND (expr, 1);
2349
2350 if (DECL_FIELD_CONTEXT (expr_field) != TYPE_MAIN_VARIANT (expr_record))
2351 {
2352 expr_field = find_compatible_field (expr_record, expr_field);
2353 TREE_OPERAND (expr, 1) = expr_field;
2354 }
2355 }
6de9cd9a
DN
2356 break;
2357
ac182688
ZD
2358 case TARGET_MEM_REF:
2359 t = maybe_fold_tmr (expr);
2360 break;
2361
726a989a
RB
2362 case POINTER_PLUS_EXPR:
2363 t = walk_tree (&TREE_OPERAND (expr, 0), fold_stmt_r, data, NULL);
2364 if (t)
2365 return t;
2366 t = walk_tree (&TREE_OPERAND (expr, 1), fold_stmt_r, data, NULL);
2367 if (t)
2368 return t;
2369 *walk_subtrees = 0;
2370
2371 t = maybe_fold_stmt_addition (TREE_TYPE (expr),
2372 TREE_OPERAND (expr, 0),
2373 TREE_OPERAND (expr, 1));
2374 break;
2375
f393e7f5
RG
2376 case COND_EXPR:
2377 if (COMPARISON_CLASS_P (TREE_OPERAND (expr, 0)))
2378 {
2379 tree op0 = TREE_OPERAND (expr, 0);
6ac01510
ILT
2380 tree tem;
2381 bool set;
2382
2383 fold_defer_overflow_warnings ();
2384 tem = fold_binary (TREE_CODE (op0), TREE_TYPE (op0),
2385 TREE_OPERAND (op0, 0),
2386 TREE_OPERAND (op0, 1));
726a989a
RB
2387 /* This is actually a conditional expression, not a GIMPLE
2388 conditional statement, however, the valid_gimple_rhs_p
2389 test still applies. */
2390 set = tem && is_gimple_condexpr (tem) && valid_gimple_rhs_p (tem);
6ac01510
ILT
2391 fold_undefer_overflow_warnings (set, fold_stmt_r_data->stmt, 0);
2392 if (set)
5cdc4a26 2393 {
726a989a
RB
2394 COND_EXPR_COND (expr) = tem;
2395 t = expr;
5cdc4a26
RS
2396 break;
2397 }
f393e7f5 2398 }
5cdc4a26 2399 return NULL_TREE;
f393e7f5 2400
6de9cd9a
DN
2401 default:
2402 return NULL_TREE;
2403 }
2404
2405 if (t)
2406 {
6aaa4013
RG
2407 /* Preserve volatileness of the original expression. */
2408 TREE_THIS_VOLATILE (t) = volatile_p;
6de9cd9a
DN
2409 *expr_p = t;
2410 *changed_p = true;
2411 }
2412
2413 return NULL_TREE;
2414}
2415
10a0d495
JJ
2416/* Return the string length, maximum string length or maximum value of
2417 ARG in LENGTH.
2418 If ARG is an SSA name variable, follow its use-def chains. If LENGTH
2419 is not NULL and, for TYPE == 0, its value is not equal to the length
2420 we determine or if we are unable to determine the length or value,
2421 return false. VISITED is a bitmap of visited variables.
2422 TYPE is 0 if string length should be returned, 1 for maximum string
2423 length and 2 for maximum value ARG can have. */
6de9cd9a 2424
06a9b53f 2425static bool
10a0d495 2426get_maxval_strlen (tree arg, tree *length, bitmap visited, int type)
6de9cd9a 2427{
726a989a
RB
2428 tree var, val;
2429 gimple def_stmt;
750628d8
DN
2430
2431 if (TREE_CODE (arg) != SSA_NAME)
06a9b53f 2432 {
f255541f
RC
2433 if (TREE_CODE (arg) == COND_EXPR)
2434 return get_maxval_strlen (COND_EXPR_THEN (arg), length, visited, type)
2435 && get_maxval_strlen (COND_EXPR_ELSE (arg), length, visited, type);
c4e5b5a8
RG
2436 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
2437 else if (TREE_CODE (arg) == ADDR_EXPR
2438 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF
2439 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg, 0), 1)))
2440 {
2441 tree aop0 = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
2442 if (TREE_CODE (aop0) == INDIRECT_REF
2443 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
2444 return get_maxval_strlen (TREE_OPERAND (aop0, 0),
2445 length, visited, type);
2446 }
f255541f 2447
10a0d495
JJ
2448 if (type == 2)
2449 {
2450 val = arg;
2451 if (TREE_CODE (val) != INTEGER_CST
2452 || tree_int_cst_sgn (val) < 0)
2453 return false;
2454 }
2455 else
2456 val = c_strlen (arg, 1);
750628d8 2457 if (!val)
06a9b53f 2458 return false;
cd709752 2459
10a0d495
JJ
2460 if (*length)
2461 {
2462 if (type > 0)
2463 {
2464 if (TREE_CODE (*length) != INTEGER_CST
2465 || TREE_CODE (val) != INTEGER_CST)
2466 return false;
2467
2468 if (tree_int_cst_lt (*length, val))
2469 *length = val;
2470 return true;
2471 }
2472 else if (simple_cst_equal (val, *length) != 1)
2473 return false;
2474 }
6de9cd9a 2475
750628d8
DN
2476 *length = val;
2477 return true;
6de9cd9a 2478 }
06a9b53f 2479
750628d8
DN
2480 /* If we were already here, break the infinite cycle. */
2481 if (bitmap_bit_p (visited, SSA_NAME_VERSION (arg)))
2482 return true;
2483 bitmap_set_bit (visited, SSA_NAME_VERSION (arg));
2484
2485 var = arg;
2486 def_stmt = SSA_NAME_DEF_STMT (var);
6de9cd9a 2487
726a989a
RB
2488 switch (gimple_code (def_stmt))
2489 {
2490 case GIMPLE_ASSIGN:
2491 /* The RHS of the statement defining VAR must either have a
2492 constant length or come from another SSA_NAME with a constant
2493 length. */
2494 if (gimple_assign_single_p (def_stmt)
2495 || gimple_assign_unary_nop_p (def_stmt))
2496 {
2497 tree rhs = gimple_assign_rhs1 (def_stmt);
2498 return get_maxval_strlen (rhs, length, visited, type);
2499 }
2500 return false;
2501
2502 case GIMPLE_PHI:
750628d8
DN
2503 {
2504 /* All the arguments of the PHI node must have the same constant
2505 length. */
726a989a
RB
2506 unsigned i;
2507
2508 for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
2509 {
2510 tree arg = gimple_phi_arg (def_stmt, i)->def;
2511
2512 /* If this PHI has itself as an argument, we cannot
2513 determine the string length of this argument. However,
2514 if we can find a constant string length for the other
2515 PHI args then we can still be sure that this is a
2516 constant string length. So be optimistic and just
2517 continue with the next argument. */
2518 if (arg == gimple_phi_result (def_stmt))
2519 continue;
2520
2521 if (!get_maxval_strlen (arg, length, visited, type))
2522 return false;
2523 }
2524 }
2525 return true;
6de9cd9a 2526
750628d8 2527 default:
726a989a 2528 return false;
6de9cd9a 2529 }
6de9cd9a
DN
2530}
2531
2532
726a989a
RB
2533/* Fold builtin call in statement STMT. Returns a simplified tree.
2534 We may return a non-constant expression, including another call
2535 to a different function and with different arguments, e.g.,
2536 substituting memcpy for strcpy when the string length is known.
2537 Note that some builtins expand into inline code that may not
2538 be valid in GIMPLE. Callers must take care. */
6de9cd9a
DN
2539
2540static tree
726a989a 2541ccp_fold_builtin (gimple stmt)
6de9cd9a 2542{
10a0d495 2543 tree result, val[3];
5039610b 2544 tree callee, a;
10a0d495 2545 int arg_mask, i, type;
a32e70c3
RS
2546 bitmap visited;
2547 bool ignore;
5039610b 2548 int nargs;
6de9cd9a 2549
726a989a
RB
2550 gcc_assert (is_gimple_call (stmt));
2551
2552 ignore = (gimple_call_lhs (stmt) == NULL);
6de9cd9a
DN
2553
2554 /* First try the generic builtin folder. If that succeeds, return the
2555 result directly. */
726a989a 2556 result = fold_call_stmt (stmt, ignore);
6de9cd9a 2557 if (result)
10a0d495
JJ
2558 {
2559 if (ignore)
2560 STRIP_NOPS (result);
2561 return result;
2562 }
a32e70c3
RS
2563
2564 /* Ignore MD builtins. */
726a989a 2565 callee = gimple_call_fndecl (stmt);
a32e70c3
RS
2566 if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_MD)
2567 return NULL_TREE;
6de9cd9a
DN
2568
2569 /* If the builtin could not be folded, and it has no argument list,
2570 we're done. */
726a989a 2571 nargs = gimple_call_num_args (stmt);
5039610b 2572 if (nargs == 0)
6de9cd9a
DN
2573 return NULL_TREE;
2574
2575 /* Limit the work only for builtins we know how to simplify. */
2576 switch (DECL_FUNCTION_CODE (callee))
2577 {
2578 case BUILT_IN_STRLEN:
2579 case BUILT_IN_FPUTS:
2580 case BUILT_IN_FPUTS_UNLOCKED:
10a0d495
JJ
2581 arg_mask = 1;
2582 type = 0;
6de9cd9a
DN
2583 break;
2584 case BUILT_IN_STRCPY:
2585 case BUILT_IN_STRNCPY:
10a0d495
JJ
2586 arg_mask = 2;
2587 type = 0;
2588 break;
2589 case BUILT_IN_MEMCPY_CHK:
2590 case BUILT_IN_MEMPCPY_CHK:
2591 case BUILT_IN_MEMMOVE_CHK:
2592 case BUILT_IN_MEMSET_CHK:
2593 case BUILT_IN_STRNCPY_CHK:
2594 arg_mask = 4;
2595 type = 2;
2596 break;
2597 case BUILT_IN_STRCPY_CHK:
2598 case BUILT_IN_STPCPY_CHK:
2599 arg_mask = 2;
2600 type = 1;
2601 break;
2602 case BUILT_IN_SNPRINTF_CHK:
2603 case BUILT_IN_VSNPRINTF_CHK:
2604 arg_mask = 2;
2605 type = 2;
6de9cd9a
DN
2606 break;
2607 default:
2608 return NULL_TREE;
2609 }
2610
2611 /* Try to use the dataflow information gathered by the CCP process. */
8bdbfff5 2612 visited = BITMAP_ALLOC (NULL);
6de9cd9a 2613
10a0d495 2614 memset (val, 0, sizeof (val));
726a989a 2615 for (i = 0; i < nargs; i++)
5039610b 2616 {
726a989a
RB
2617 if ((arg_mask >> i) & 1)
2618 {
2619 a = gimple_call_arg (stmt, i);
2620 bitmap_clear (visited);
2621 if (!get_maxval_strlen (a, &val[i], visited, type))
2622 val[i] = NULL_TREE;
2623 }
5039610b 2624 }
6de9cd9a 2625
8bdbfff5 2626 BITMAP_FREE (visited);
6de9cd9a 2627
a32e70c3 2628 result = NULL_TREE;
6de9cd9a
DN
2629 switch (DECL_FUNCTION_CODE (callee))
2630 {
2631 case BUILT_IN_STRLEN:
10a0d495 2632 if (val[0])
6de9cd9a 2633 {
726a989a
RB
2634 tree new_val =
2635 fold_convert (TREE_TYPE (gimple_call_lhs (stmt)), val[0]);
6de9cd9a
DN
2636
2637 /* If the result is not a valid gimple value, or not a cast
2638 of a valid gimple value, then we can not use the result. */
c22940cd
TN
2639 if (is_gimple_val (new_val)
2640 || (is_gimple_cast (new_val)
2641 && is_gimple_val (TREE_OPERAND (new_val, 0))))
2642 return new_val;
6de9cd9a 2643 }
a32e70c3
RS
2644 break;
2645
6de9cd9a 2646 case BUILT_IN_STRCPY:
5039610b
SL
2647 if (val[1] && is_gimple_val (val[1]) && nargs == 2)
2648 result = fold_builtin_strcpy (callee,
726a989a
RB
2649 gimple_call_arg (stmt, 0),
2650 gimple_call_arg (stmt, 1),
5039610b 2651 val[1]);
a32e70c3
RS
2652 break;
2653
6de9cd9a 2654 case BUILT_IN_STRNCPY:
5039610b
SL
2655 if (val[1] && is_gimple_val (val[1]) && nargs == 3)
2656 result = fold_builtin_strncpy (callee,
726a989a
RB
2657 gimple_call_arg (stmt, 0),
2658 gimple_call_arg (stmt, 1),
2659 gimple_call_arg (stmt, 2),
5039610b 2660 val[1]);
a32e70c3
RS
2661 break;
2662
6de9cd9a 2663 case BUILT_IN_FPUTS:
726a989a
RB
2664 result = fold_builtin_fputs (gimple_call_arg (stmt, 0),
2665 gimple_call_arg (stmt, 1),
2666 ignore, false, val[0]);
a32e70c3
RS
2667 break;
2668
6de9cd9a 2669 case BUILT_IN_FPUTS_UNLOCKED:
726a989a
RB
2670 result = fold_builtin_fputs (gimple_call_arg (stmt, 0),
2671 gimple_call_arg (stmt, 1),
2672 ignore, true, val[0]);
10a0d495
JJ
2673 break;
2674
2675 case BUILT_IN_MEMCPY_CHK:
2676 case BUILT_IN_MEMPCPY_CHK:
2677 case BUILT_IN_MEMMOVE_CHK:
2678 case BUILT_IN_MEMSET_CHK:
2679 if (val[2] && is_gimple_val (val[2]))
5039610b 2680 result = fold_builtin_memory_chk (callee,
726a989a
RB
2681 gimple_call_arg (stmt, 0),
2682 gimple_call_arg (stmt, 1),
2683 gimple_call_arg (stmt, 2),
2684 gimple_call_arg (stmt, 3),
5039610b 2685 val[2], ignore,
10a0d495
JJ
2686 DECL_FUNCTION_CODE (callee));
2687 break;
2688
2689 case BUILT_IN_STRCPY_CHK:
2690 case BUILT_IN_STPCPY_CHK:
2691 if (val[1] && is_gimple_val (val[1]))
5039610b 2692 result = fold_builtin_stxcpy_chk (callee,
726a989a
RB
2693 gimple_call_arg (stmt, 0),
2694 gimple_call_arg (stmt, 1),
2695 gimple_call_arg (stmt, 2),
5039610b 2696 val[1], ignore,
10a0d495
JJ
2697 DECL_FUNCTION_CODE (callee));
2698 break;
2699
2700 case BUILT_IN_STRNCPY_CHK:
2701 if (val[2] && is_gimple_val (val[2]))
726a989a
RB
2702 result = fold_builtin_strncpy_chk (gimple_call_arg (stmt, 0),
2703 gimple_call_arg (stmt, 1),
2704 gimple_call_arg (stmt, 2),
2705 gimple_call_arg (stmt, 3),
5039610b 2706 val[2]);
10a0d495
JJ
2707 break;
2708
2709 case BUILT_IN_SNPRINTF_CHK:
2710 case BUILT_IN_VSNPRINTF_CHK:
2711 if (val[1] && is_gimple_val (val[1]))
726a989a
RB
2712 result = gimple_fold_builtin_snprintf_chk (stmt, val[1],
2713 DECL_FUNCTION_CODE (callee));
a32e70c3 2714 break;
6de9cd9a
DN
2715
2716 default:
1e128c5f 2717 gcc_unreachable ();
6de9cd9a
DN
2718 }
2719
a32e70c3 2720 if (result && ignore)
9675412f 2721 result = fold_ignored_result (result);
a32e70c3 2722 return result;
6de9cd9a
DN
2723}
2724
726a989a
RB
2725/* Attempt to fold an assignment statement pointed-to by SI. Returns a
2726 replacement rhs for the statement or NULL_TREE if no simplification
2727 could be made. It is assumed that the operands have been previously
2728 folded. */
2729
2730static tree
2731fold_gimple_assign (gimple_stmt_iterator *si)
2732{
2733 gimple stmt = gsi_stmt (*si);
2734 enum tree_code subcode = gimple_assign_rhs_code (stmt);
2735
2736 tree result = NULL;
2737
2738 switch (get_gimple_rhs_class (subcode))
2739 {
2740 case GIMPLE_SINGLE_RHS:
2741 {
2742 tree rhs = gimple_assign_rhs1 (stmt);
2743
2744 /* Try to fold a conditional expression. */
2745 if (TREE_CODE (rhs) == COND_EXPR)
2746 {
2747 tree temp = fold (COND_EXPR_COND (rhs));
2748 if (temp != COND_EXPR_COND (rhs))
2749 result = fold_build3 (COND_EXPR, TREE_TYPE (rhs), temp,
2750 COND_EXPR_THEN (rhs), COND_EXPR_ELSE (rhs));
2751 }
2752
2753 /* If we couldn't fold the RHS, hand over to the generic
2754 fold routines. */
2755 if (result == NULL_TREE)
2756 result = fold (rhs);
2757
2758 /* Strip away useless type conversions. Both the NON_LVALUE_EXPR
2759 that may have been added by fold, and "useless" type
2760 conversions that might now be apparent due to propagation. */
2761 STRIP_USELESS_TYPE_CONVERSION (result);
2762
2763 if (result != rhs && valid_gimple_rhs_p (result))
2764 return result;
2765 else
2766 /* It is possible that fold_stmt_r simplified the RHS.
2767 Make sure that the subcode of this statement still
2768 reflects the principal operator of the rhs operand. */
2769 return rhs;
2770 }
2771 break;
2772
2773 case GIMPLE_UNARY_RHS:
2774 result = fold_unary (subcode,
2775 gimple_expr_type (stmt),
2776 gimple_assign_rhs1 (stmt));
2777
2778 if (result)
2779 {
2780 STRIP_USELESS_TYPE_CONVERSION (result);
2781 if (valid_gimple_rhs_p (result))
2782 return result;
2783 }
1a87cf0c 2784 else if (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt))
726a989a
RB
2785 && POINTER_TYPE_P (gimple_expr_type (stmt))
2786 && POINTER_TYPE_P (TREE_TYPE (gimple_assign_rhs1 (stmt))))
2787 {
2788 tree type = gimple_expr_type (stmt);
99f536cc
RG
2789 tree t = maybe_fold_offset_to_address (gimple_assign_rhs1 (stmt),
2790 integer_zero_node, type);
726a989a 2791 if (t)
99f536cc 2792 return t;
726a989a
RB
2793 }
2794 break;
2795
2796 case GIMPLE_BINARY_RHS:
2797 /* Try to fold pointer addition. */
2798 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
2799 result = maybe_fold_stmt_addition (
2800 TREE_TYPE (gimple_assign_lhs (stmt)),
2801 gimple_assign_rhs1 (stmt),
2802 gimple_assign_rhs2 (stmt));
2803
2804 if (!result)
2805 result = fold_binary (subcode,
2806 TREE_TYPE (gimple_assign_lhs (stmt)),
2807 gimple_assign_rhs1 (stmt),
2808 gimple_assign_rhs2 (stmt));
2809
2810 if (result)
2811 {
2812 STRIP_USELESS_TYPE_CONVERSION (result);
2813 if (valid_gimple_rhs_p (result))
2814 return result;
2815 }
2816 break;
2817
2818 case GIMPLE_INVALID_RHS:
2819 gcc_unreachable ();
2820 }
2821
2822 return NULL_TREE;
2823}
2824
2825/* Attempt to fold a conditional statement. Return true if any changes were
2826 made. We only attempt to fold the condition expression, and do not perform
2827 any transformation that would require alteration of the cfg. It is
2828 assumed that the operands have been previously folded. */
2829
2830static bool
2831fold_gimple_cond (gimple stmt)
2832{
2833 tree result = fold_binary (gimple_cond_code (stmt),
2834 boolean_type_node,
2835 gimple_cond_lhs (stmt),
2836 gimple_cond_rhs (stmt));
2837
2838 if (result)
2839 {
2840 STRIP_USELESS_TYPE_CONVERSION (result);
2841 if (is_gimple_condexpr (result) && valid_gimple_rhs_p (result))
2842 {
2843 gimple_cond_set_condition_from_tree (stmt, result);
2844 return true;
2845 }
2846 }
2847
2848 return false;
2849}
2850
2851
2852/* Attempt to fold a call statement referenced by the statement iterator GSI.
2853 The statement may be replaced by another statement, e.g., if the call
2854 simplifies to a constant value. Return true if any changes were made.
2855 It is assumed that the operands have been previously folded. */
2856
2857static bool
2858fold_gimple_call (gimple_stmt_iterator *gsi)
2859{
2860 gimple stmt = gsi_stmt (*gsi);
2861
2862 tree callee = gimple_call_fndecl (stmt);
2863
2864 /* Check for builtins that CCP can handle using information not
2865 available in the generic fold routines. */
2866 if (callee && DECL_BUILT_IN (callee))
2867 {
2868 tree result = ccp_fold_builtin (stmt);
2869
2870 if (result)
2871 return update_call_from_tree (gsi, result);
2872 }
2873 else
2874 {
2875 /* Check for resolvable OBJ_TYPE_REF. The only sorts we can resolve
2876 here are when we've propagated the address of a decl into the
2877 object slot. */
2878 /* ??? Should perhaps do this in fold proper. However, doing it
2879 there requires that we create a new CALL_EXPR, and that requires
2880 copying EH region info to the new node. Easier to just do it
2881 here where we can just smash the call operand. */
2882 /* ??? Is there a good reason not to do this in fold_stmt_inplace? */
2883 callee = gimple_call_fn (stmt);
2884 if (TREE_CODE (callee) == OBJ_TYPE_REF
2885 && lang_hooks.fold_obj_type_ref
2886 && TREE_CODE (OBJ_TYPE_REF_OBJECT (callee)) == ADDR_EXPR
2887 && DECL_P (TREE_OPERAND
2888 (OBJ_TYPE_REF_OBJECT (callee), 0)))
2889 {
2890 tree t;
2891
2892 /* ??? Caution: Broken ADDR_EXPR semantics means that
2893 looking at the type of the operand of the addr_expr
2894 can yield an array type. See silly exception in
2895 check_pointer_types_r. */
2896 t = TREE_TYPE (TREE_TYPE (OBJ_TYPE_REF_OBJECT (callee)));
2897 t = lang_hooks.fold_obj_type_ref (callee, t);
2898 if (t)
2899 {
2900 gimple_call_set_fn (stmt, t);
2901 return true;
2902 }
2903 }
2904 }
2905
2906 return false;
2907}
6de9cd9a 2908
726a989a 2909/* Fold the statement pointed to by GSI. In some cases, this function may
750628d8
DN
2910 replace the whole statement with a new one. Returns true iff folding
2911 makes any changes. */
6de9cd9a 2912
750628d8 2913bool
726a989a 2914fold_stmt (gimple_stmt_iterator *gsi)
6de9cd9a 2915{
726a989a 2916 tree res;
622f91ba 2917 struct fold_stmt_r_data fold_stmt_r_data;
726a989a
RB
2918 struct walk_stmt_info wi;
2919
750628d8 2920 bool changed = false;
622f91ba
JL
2921 bool inside_addr_expr = false;
2922
726a989a 2923 gimple stmt = gsi_stmt (*gsi);
6ac01510
ILT
2924
2925 fold_stmt_r_data.stmt = stmt;
622f91ba
JL
2926 fold_stmt_r_data.changed_p = &changed;
2927 fold_stmt_r_data.inside_addr_expr_p = &inside_addr_expr;
6de9cd9a 2928
726a989a
RB
2929 memset (&wi, 0, sizeof (wi));
2930 wi.info = &fold_stmt_r_data;
6de9cd9a 2931
726a989a
RB
2932 /* Fold the individual operands.
2933 For example, fold instances of *&VAR into VAR, etc. */
2934 res = walk_gimple_op (stmt, fold_stmt_r, &wi);
2935 gcc_assert (!res);
6de9cd9a 2936
726a989a
RB
2937 /* Fold the main computation performed by the statement. */
2938 switch (gimple_code (stmt))
6de9cd9a 2939 {
726a989a
RB
2940 case GIMPLE_ASSIGN:
2941 {
2942 tree new_rhs = fold_gimple_assign (gsi);
2943 if (new_rhs != NULL_TREE)
2944 {
2945 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
2946 changed = true;
2947 }
2948 stmt = gsi_stmt (*gsi);
2949 break;
2950 }
2951 case GIMPLE_COND:
2952 changed |= fold_gimple_cond (stmt);
2953 break;
2954 case GIMPLE_CALL:
2955 /* The entire statement may be replaced in this case. */
2956 changed |= fold_gimple_call (gsi);
2957 break;
1809ff6b 2958
726a989a
RB
2959 default:
2960 return changed;
2961 break;
f255541f 2962 }
6de9cd9a 2963
750628d8 2964 return changed;
6de9cd9a
DN
2965}
2966
38965eb2
ZD
2967/* Perform the minimal folding on statement STMT. Only operations like
2968 *&x created by constant propagation are handled. The statement cannot
726a989a
RB
2969 be replaced with a new one. Return true if the statement was
2970 changed, false otherwise. */
38965eb2
ZD
2971
2972bool
726a989a 2973fold_stmt_inplace (gimple stmt)
38965eb2 2974{
726a989a 2975 tree res;
622f91ba 2976 struct fold_stmt_r_data fold_stmt_r_data;
726a989a
RB
2977 struct walk_stmt_info wi;
2978 gimple_stmt_iterator si;
2979
38965eb2 2980 bool changed = false;
622f91ba
JL
2981 bool inside_addr_expr = false;
2982
6ac01510 2983 fold_stmt_r_data.stmt = stmt;
622f91ba
JL
2984 fold_stmt_r_data.changed_p = &changed;
2985 fold_stmt_r_data.inside_addr_expr_p = &inside_addr_expr;
38965eb2 2986
726a989a
RB
2987 memset (&wi, 0, sizeof (wi));
2988 wi.info = &fold_stmt_r_data;
2989
2990 /* Fold the individual operands.
2991 For example, fold instances of *&VAR into VAR, etc.
38965eb2 2992
726a989a
RB
2993 It appears that, at one time, maybe_fold_stmt_indirect
2994 would cause the walk to return non-null in order to
2995 signal that the entire statement should be replaced with
2996 a call to _builtin_trap. This functionality is currently
2997 disabled, as noted in a FIXME, and cannot be supported here. */
2998 res = walk_gimple_op (stmt, fold_stmt_r, &wi);
2999 gcc_assert (!res);
38965eb2 3000
726a989a
RB
3001 /* Fold the main computation performed by the statement. */
3002 switch (gimple_code (stmt))
3003 {
3004 case GIMPLE_ASSIGN:
3005 {
3006 unsigned old_num_ops;
3007 tree new_rhs;
3008 old_num_ops = gimple_num_ops (stmt);
3009 si = gsi_for_stmt (stmt);
3010 new_rhs = fold_gimple_assign (&si);
3011 if (new_rhs != NULL_TREE
3012 && get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops)
3013 {
3014 gimple_assign_set_rhs_from_tree (&si, new_rhs);
3015 changed = true;
3016 }
3017 gcc_assert (gsi_stmt (si) == stmt);
3018 break;
3019 }
3020 case GIMPLE_COND:
3021 changed |= fold_gimple_cond (stmt);
3022 break;
38965eb2 3023
726a989a
RB
3024 default:
3025 break;
3026 }
38965eb2
ZD
3027
3028 return changed;
3029}
726a989a 3030
cb8e078d
JJ
3031/* Try to optimize out __builtin_stack_restore. Optimize it out
3032 if there is another __builtin_stack_restore in the same basic
3033 block and no calls or ASM_EXPRs are in between, or if this block's
3034 only outgoing edge is to EXIT_BLOCK and there are no calls or
3035 ASM_EXPRs after this __builtin_stack_restore. */
3036
3037static tree
726a989a 3038optimize_stack_restore (gimple_stmt_iterator i)
cb8e078d 3039{
726a989a
RB
3040 tree callee, rhs;
3041 gimple stmt, stack_save;
3042 gimple_stmt_iterator stack_save_gsi;
3043
3044 basic_block bb = gsi_bb (i);
3045 gimple call = gsi_stmt (i);
cb8e078d 3046
726a989a
RB
3047 if (gimple_code (call) != GIMPLE_CALL
3048 || gimple_call_num_args (call) != 1
3049 || TREE_CODE (gimple_call_arg (call, 0)) != SSA_NAME
3050 || !POINTER_TYPE_P (TREE_TYPE (gimple_call_arg (call, 0))))
cb8e078d
JJ
3051 return NULL_TREE;
3052
726a989a 3053 for (gsi_next (&i); !gsi_end_p (i); gsi_next (&i))
cb8e078d 3054 {
726a989a
RB
3055 stmt = gsi_stmt (i);
3056 if (gimple_code (stmt) == GIMPLE_ASM)
cb8e078d 3057 return NULL_TREE;
726a989a 3058 if (gimple_code (stmt) != GIMPLE_CALL)
cb8e078d
JJ
3059 continue;
3060
726a989a 3061 callee = gimple_call_fndecl (stmt);
cb8e078d
JJ
3062 if (!callee || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL)
3063 return NULL_TREE;
3064
3065 if (DECL_FUNCTION_CODE (callee) == BUILT_IN_STACK_RESTORE)
3066 break;
3067 }
3068
726a989a 3069 if (gsi_end_p (i)
cb8e078d
JJ
3070 && (! single_succ_p (bb)
3071 || single_succ_edge (bb)->dest != EXIT_BLOCK_PTR))
3072 return NULL_TREE;
3073
726a989a
RB
3074 stack_save = SSA_NAME_DEF_STMT (gimple_call_arg (call, 0));
3075 if (gimple_code (stack_save) != GIMPLE_CALL
3076 || gimple_call_lhs (stack_save) != gimple_call_arg (call, 0)
3077 || stmt_could_throw_p (stack_save)
3078 || !has_single_use (gimple_call_arg (call, 0)))
cb8e078d
JJ
3079 return NULL_TREE;
3080
726a989a 3081 callee = gimple_call_fndecl (stack_save);
cb8e078d
JJ
3082 if (!callee
3083 || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL
3084 || DECL_FUNCTION_CODE (callee) != BUILT_IN_STACK_SAVE
726a989a 3085 || gimple_call_num_args (stack_save) != 0)
cb8e078d
JJ
3086 return NULL_TREE;
3087
726a989a
RB
3088 stack_save_gsi = gsi_for_stmt (stack_save);
3089 push_stmt_changes (gsi_stmt_ptr (&stack_save_gsi));
3090 rhs = build_int_cst (TREE_TYPE (gimple_call_arg (call, 0)), 0);
3091 if (!update_call_from_tree (&stack_save_gsi, rhs))
cb8e078d 3092 {
726a989a 3093 discard_stmt_changes (gsi_stmt_ptr (&stack_save_gsi));
cb8e078d
JJ
3094 return NULL_TREE;
3095 }
726a989a 3096 pop_stmt_changes (gsi_stmt_ptr (&stack_save_gsi));
cb8e078d 3097
726a989a 3098 /* No effect, so the statement will be deleted. */
cb8e078d
JJ
3099 return integer_zero_node;
3100}
726a989a 3101
d7bd8aeb
JJ
3102/* If va_list type is a simple pointer and nothing special is needed,
3103 optimize __builtin_va_start (&ap, 0) into ap = __builtin_next_arg (0),
3104 __builtin_va_end (&ap) out as NOP and __builtin_va_copy into a simple
3105 pointer assignment. */
3106
3107static tree
726a989a 3108optimize_stdarg_builtin (gimple call)
d7bd8aeb 3109{
35cbb299 3110 tree callee, lhs, rhs, cfun_va_list;
d7bd8aeb
JJ
3111 bool va_list_simple_ptr;
3112
726a989a 3113 if (gimple_code (call) != GIMPLE_CALL)
d7bd8aeb
JJ
3114 return NULL_TREE;
3115
726a989a 3116 callee = gimple_call_fndecl (call);
35cbb299
KT
3117
3118 cfun_va_list = targetm.fn_abi_va_list (callee);
3119 va_list_simple_ptr = POINTER_TYPE_P (cfun_va_list)
3120 && (TREE_TYPE (cfun_va_list) == void_type_node
3121 || TREE_TYPE (cfun_va_list) == char_type_node);
3122
d7bd8aeb
JJ
3123 switch (DECL_FUNCTION_CODE (callee))
3124 {
3125 case BUILT_IN_VA_START:
3126 if (!va_list_simple_ptr
3127 || targetm.expand_builtin_va_start != NULL
726a989a 3128 || built_in_decls[BUILT_IN_NEXT_ARG] == NULL)
d7bd8aeb
JJ
3129 return NULL_TREE;
3130
726a989a 3131 if (gimple_call_num_args (call) != 2)
d7bd8aeb
JJ
3132 return NULL_TREE;
3133
726a989a 3134 lhs = gimple_call_arg (call, 0);
d7bd8aeb
JJ
3135 if (!POINTER_TYPE_P (TREE_TYPE (lhs))
3136 || TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (lhs)))
35cbb299 3137 != TYPE_MAIN_VARIANT (cfun_va_list))
d7bd8aeb 3138 return NULL_TREE;
726a989a 3139
d7bd8aeb
JJ
3140 lhs = build_fold_indirect_ref (lhs);
3141 rhs = build_call_expr (built_in_decls[BUILT_IN_NEXT_ARG],
726a989a 3142 1, integer_zero_node);
d7bd8aeb
JJ
3143 rhs = fold_convert (TREE_TYPE (lhs), rhs);
3144 return build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, rhs);
3145
3146 case BUILT_IN_VA_COPY:
3147 if (!va_list_simple_ptr)
3148 return NULL_TREE;
3149
726a989a 3150 if (gimple_call_num_args (call) != 2)
d7bd8aeb
JJ
3151 return NULL_TREE;
3152
726a989a 3153 lhs = gimple_call_arg (call, 0);
d7bd8aeb
JJ
3154 if (!POINTER_TYPE_P (TREE_TYPE (lhs))
3155 || TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (lhs)))
35cbb299 3156 != TYPE_MAIN_VARIANT (cfun_va_list))
d7bd8aeb
JJ
3157 return NULL_TREE;
3158
3159 lhs = build_fold_indirect_ref (lhs);
726a989a 3160 rhs = gimple_call_arg (call, 1);
d7bd8aeb 3161 if (TYPE_MAIN_VARIANT (TREE_TYPE (rhs))
35cbb299 3162 != TYPE_MAIN_VARIANT (cfun_va_list))
d7bd8aeb
JJ
3163 return NULL_TREE;
3164
3165 rhs = fold_convert (TREE_TYPE (lhs), rhs);
3166 return build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, rhs);
3167
3168 case BUILT_IN_VA_END:
726a989a 3169 /* No effect, so the statement will be deleted. */
d7bd8aeb
JJ
3170 return integer_zero_node;
3171
3172 default:
3173 gcc_unreachable ();
3174 }
3175}
726a989a 3176
b28b1600
JJ
3177/* Convert EXPR into a GIMPLE value suitable for substitution on the
3178 RHS of an assignment. Insert the necessary statements before
726a989a
RB
3179 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
3180 is replaced. If the call is expected to produces a result, then it
3181 is replaced by an assignment of the new RHS to the result variable.
3182 If the result is to be ignored, then the call is replaced by a
3183 GIMPLE_NOP. */
b28b1600 3184
726a989a
RB
3185static void
3186gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
b28b1600 3187{
726a989a
RB
3188 tree lhs;
3189 tree tmp = NULL_TREE; /* Silence warning. */
3190 gimple stmt, new_stmt;
3191 gimple_stmt_iterator i;
3192 gimple_seq stmts = gimple_seq_alloc();
d406b663 3193 struct gimplify_ctx gctx;
b28b1600 3194
726a989a
RB
3195 stmt = gsi_stmt (*si_p);
3196
3197 gcc_assert (is_gimple_call (stmt));
3198
3199 lhs = gimple_call_lhs (stmt);
3200
d406b663 3201 push_gimplify_context (&gctx);
726a989a
RB
3202
3203 if (lhs == NULL_TREE)
3204 gimplify_and_add (expr, &stmts);
3205 else
2e929cf3 3206 tmp = get_initialized_tmp_var (expr, &stmts, NULL);
726a989a 3207
b28b1600
JJ
3208 pop_gimplify_context (NULL);
3209
726a989a
RB
3210 if (gimple_has_location (stmt))
3211 annotate_all_with_location (stmts, gimple_location (stmt));
f47c96aa 3212
b28b1600 3213 /* The replacement can expose previously unreferenced variables. */
726a989a
RB
3214 for (i = gsi_start (stmts); !gsi_end_p (i); gsi_next (&i))
3215 {
3216 new_stmt = gsi_stmt (i);
3217 find_new_referenced_vars (new_stmt);
3218 gsi_insert_before (si_p, new_stmt, GSI_NEW_STMT);
3219 mark_symbols_for_renaming (new_stmt);
3220 gsi_next (si_p);
3221 }
3222
3223 if (lhs == NULL_TREE)
3224 new_stmt = gimple_build_nop ();
3225 else
b28b1600 3226 {
726a989a
RB
3227 new_stmt = gimple_build_assign (lhs, tmp);
3228 copy_virtual_operands (new_stmt, stmt);
3229 move_ssa_defining_stmt_for_defs (new_stmt, stmt);
b28b1600
JJ
3230 }
3231
726a989a
RB
3232 gimple_set_location (new_stmt, gimple_location (stmt));
3233 gsi_replace (si_p, new_stmt, false);
b28b1600
JJ
3234}
3235
6de9cd9a
DN
3236/* A simple pass that attempts to fold all builtin functions. This pass
3237 is run after we've propagated as many constants as we can. */
3238
c2924966 3239static unsigned int
6de9cd9a
DN
3240execute_fold_all_builtins (void)
3241{
a7d6ba24 3242 bool cfg_changed = false;
6de9cd9a 3243 basic_block bb;
7b0e48fb
DB
3244 unsigned int todoflags = 0;
3245
6de9cd9a
DN
3246 FOR_EACH_BB (bb)
3247 {
726a989a
RB
3248 gimple_stmt_iterator i;
3249 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
6de9cd9a 3250 {
726a989a 3251 gimple stmt, old_stmt;
6de9cd9a 3252 tree callee, result;
10a0d495 3253 enum built_in_function fcode;
6de9cd9a 3254
726a989a
RB
3255 stmt = gsi_stmt (i);
3256
3257 if (gimple_code (stmt) != GIMPLE_CALL)
10a0d495 3258 {
726a989a 3259 gsi_next (&i);
10a0d495
JJ
3260 continue;
3261 }
726a989a 3262 callee = gimple_call_fndecl (stmt);
6de9cd9a 3263 if (!callee || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL)
10a0d495 3264 {
726a989a 3265 gsi_next (&i);
10a0d495
JJ
3266 continue;
3267 }
3268 fcode = DECL_FUNCTION_CODE (callee);
6de9cd9a 3269
726a989a 3270 result = ccp_fold_builtin (stmt);
53a8f709
UB
3271
3272 if (result)
726a989a 3273 gimple_remove_stmt_histograms (cfun, stmt);
53a8f709 3274
6de9cd9a
DN
3275 if (!result)
3276 switch (DECL_FUNCTION_CODE (callee))
3277 {
3278 case BUILT_IN_CONSTANT_P:
3279 /* Resolve __builtin_constant_p. If it hasn't been
3280 folded to integer_one_node by now, it's fairly
3281 certain that the value simply isn't constant. */
726a989a 3282 result = integer_zero_node;
6de9cd9a
DN
3283 break;
3284
cb8e078d 3285 case BUILT_IN_STACK_RESTORE:
726a989a 3286 result = optimize_stack_restore (i);
d7bd8aeb
JJ
3287 if (result)
3288 break;
726a989a 3289 gsi_next (&i);
d7bd8aeb
JJ
3290 continue;
3291
3292 case BUILT_IN_VA_START:
3293 case BUILT_IN_VA_END:
3294 case BUILT_IN_VA_COPY:
3295 /* These shouldn't be folded before pass_stdarg. */
726a989a 3296 result = optimize_stdarg_builtin (stmt);
cb8e078d
JJ
3297 if (result)
3298 break;
3299 /* FALLTHRU */
3300
6de9cd9a 3301 default:
726a989a 3302 gsi_next (&i);
6de9cd9a
DN
3303 continue;
3304 }
3305
3306 if (dump_file && (dump_flags & TDF_DETAILS))
3307 {
3308 fprintf (dump_file, "Simplified\n ");
726a989a 3309 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
6de9cd9a
DN
3310 }
3311
726a989a
RB
3312 old_stmt = stmt;
3313 push_stmt_changes (gsi_stmt_ptr (&i));
cfaab3a9 3314
726a989a
RB
3315 if (!update_call_from_tree (&i, result))
3316 {
3317 gimplify_and_update_call_from_tree (&i, result);
3318 todoflags |= TODO_rebuild_alias;
3319 }
cfaab3a9 3320
726a989a
RB
3321 stmt = gsi_stmt (i);
3322 pop_stmt_changes (gsi_stmt_ptr (&i));
cfaab3a9 3323
726a989a
RB
3324 if (maybe_clean_or_replace_eh_stmt (old_stmt, stmt)
3325 && gimple_purge_dead_eh_edges (bb))
a7d6ba24 3326 cfg_changed = true;
6de9cd9a
DN
3327
3328 if (dump_file && (dump_flags & TDF_DETAILS))
3329 {
3330 fprintf (dump_file, "to\n ");
726a989a 3331 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
6de9cd9a
DN
3332 fprintf (dump_file, "\n");
3333 }
10a0d495
JJ
3334
3335 /* Retry the same statement if it changed into another
3336 builtin, there might be new opportunities now. */
726a989a 3337 if (gimple_code (stmt) != GIMPLE_CALL)
10a0d495 3338 {
726a989a 3339 gsi_next (&i);
10a0d495
JJ
3340 continue;
3341 }
726a989a 3342 callee = gimple_call_fndecl (stmt);
10a0d495 3343 if (!callee
726a989a 3344 || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL
10a0d495 3345 || DECL_FUNCTION_CODE (callee) == fcode)
726a989a 3346 gsi_next (&i);
6de9cd9a
DN
3347 }
3348 }
7b0e48fb 3349
a7d6ba24 3350 /* Delete unreachable blocks. */
7b0e48fb
DB
3351 if (cfg_changed)
3352 todoflags |= TODO_cleanup_cfg;
3353
3354 return todoflags;
6de9cd9a
DN
3355}
3356
750628d8 3357
8ddbbcae 3358struct gimple_opt_pass pass_fold_builtins =
6de9cd9a 3359{
8ddbbcae
JH
3360 {
3361 GIMPLE_PASS,
6de9cd9a
DN
3362 "fab", /* name */
3363 NULL, /* gate */
3364 execute_fold_all_builtins, /* execute */
3365 NULL, /* sub */
3366 NULL, /* next */
3367 0, /* static_pass_number */
3368 0, /* tv_id */
7faade0f 3369 PROP_cfg | PROP_ssa, /* properties_required */
6de9cd9a
DN
3370 0, /* properties_provided */
3371 0, /* properties_destroyed */
3372 0, /* todo_flags_start */
b28b1600
JJ
3373 TODO_dump_func
3374 | TODO_verify_ssa
8ddbbcae
JH
3375 | TODO_update_ssa /* todo_flags_finish */
3376 }
6de9cd9a 3377};