]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/tree-ssa-ccp.c
re PR tree-optimization/38445 (ICE in tree-ssa-struct-alias when compiling grub-0.97)
[thirdparty/gcc.git] / gcc / tree-ssa-ccp.c
CommitLineData
6de9cd9a 1/* Conditional constant propagation pass for the GNU compiler.
fa10beec 2 Copyright (C) 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008
1e052c19 3 Free Software Foundation, Inc.
6de9cd9a
DN
4 Adapted from original RTL SSA-CCP by Daniel Berlin <dberlin@dberlin.org>
5 Adapted to GIMPLE trees by Diego Novillo <dnovillo@redhat.com>
6
7This file is part of GCC.
8
9GCC is free software; you can redistribute it and/or modify it
10under the terms of the GNU General Public License as published by the
9dcd6f09 11Free Software Foundation; either version 3, or (at your option) any
6de9cd9a
DN
12later version.
13
14GCC is distributed in the hope that it will be useful, but WITHOUT
15ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
16FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17for more details.
18
19You should have received a copy of the GNU General Public License
9dcd6f09
NC
20along with GCC; see the file COPYING3. If not see
21<http://www.gnu.org/licenses/>. */
6de9cd9a 22
0bca51f0
DN
23/* Conditional constant propagation (CCP) is based on the SSA
24 propagation engine (tree-ssa-propagate.c). Constant assignments of
25 the form VAR = CST are propagated from the assignments into uses of
26 VAR, which in turn may generate new constants. The simulation uses
27 a four level lattice to keep track of constant values associated
28 with SSA names. Given an SSA name V_i, it may take one of the
29 following values:
30
106dec71
ZD
31 UNINITIALIZED -> the initial state of the value. This value
32 is replaced with a correct initial value
33 the first time the value is used, so the
34 rest of the pass does not need to care about
35 it. Using this value simplifies initialization
36 of the pass, and prevents us from needlessly
37 scanning statements that are never reached.
0bca51f0
DN
38
39 UNDEFINED -> V_i is a local variable whose definition
40 has not been processed yet. Therefore we
41 don't yet know if its value is a constant
42 or not.
43
44 CONSTANT -> V_i has been found to hold a constant
45 value C.
46
47 VARYING -> V_i cannot take a constant value, or if it
48 does, it is not possible to determine it
49 at compile time.
50
51 The core of SSA-CCP is in ccp_visit_stmt and ccp_visit_phi_node:
52
53 1- In ccp_visit_stmt, we are interested in assignments whose RHS
54 evaluates into a constant and conditional jumps whose predicate
55 evaluates into a boolean true or false. When an assignment of
56 the form V_i = CONST is found, V_i's lattice value is set to
57 CONSTANT and CONST is associated with it. This causes the
58 propagation engine to add all the SSA edges coming out the
59 assignment into the worklists, so that statements that use V_i
60 can be visited.
61
62 If the statement is a conditional with a constant predicate, we
63 mark the outgoing edges as executable or not executable
64 depending on the predicate's value. This is then used when
65 visiting PHI nodes to know when a PHI argument can be ignored.
66
67
68 2- In ccp_visit_phi_node, if all the PHI arguments evaluate to the
69 same constant C, then the LHS of the PHI is set to C. This
70 evaluation is known as the "meet operation". Since one of the
71 goals of this evaluation is to optimistically return constant
72 values as often as possible, it uses two main short cuts:
73
74 - If an argument is flowing in through a non-executable edge, it
75 is ignored. This is useful in cases like this:
76
77 if (PRED)
78 a_9 = 3;
79 else
80 a_10 = 100;
81 a_11 = PHI (a_9, a_10)
82
83 If PRED is known to always evaluate to false, then we can
84 assume that a_11 will always take its value from a_10, meaning
85 that instead of consider it VARYING (a_9 and a_10 have
86 different values), we can consider it CONSTANT 100.
87
88 - If an argument has an UNDEFINED value, then it does not affect
89 the outcome of the meet operation. If a variable V_i has an
90 UNDEFINED value, it means that either its defining statement
91 hasn't been visited yet or V_i has no defining statement, in
92 which case the original symbol 'V' is being used
93 uninitialized. Since 'V' is a local variable, the compiler
94 may assume any initial value for it.
95
96
97 After propagation, every variable V_i that ends up with a lattice
98 value of CONSTANT will have the associated constant value in the
99 array CONST_VAL[i].VALUE. That is fed into substitute_and_fold for
100 final substitution and folding.
101
102
103 Constant propagation in stores and loads (STORE-CCP)
104 ----------------------------------------------------
105
106 While CCP has all the logic to propagate constants in GIMPLE
107 registers, it is missing the ability to associate constants with
108 stores and loads (i.e., pointer dereferences, structures and
109 global/aliased variables). We don't keep loads and stores in
110 SSA, but we do build a factored use-def web for them (in the
111 virtual operands).
112
113 For instance, consider the following code fragment:
114
115 struct A a;
116 const int B = 42;
117
118 void foo (int i)
119 {
120 if (i > 10)
121 a.a = 42;
122 else
123 {
124 a.b = 21;
125 a.a = a.b + 21;
126 }
127
128 if (a.a != B)
129 never_executed ();
130 }
131
132 We should be able to deduce that the predicate 'a.a != B' is always
133 false. To achieve this, we associate constant values to the SSA
38635499
DN
134 names in the VDEF operands for each store. Additionally,
135 since we also glob partial loads/stores with the base symbol, we
136 also keep track of the memory reference where the constant value
137 was stored (in the MEM_REF field of PROP_VALUE_T). For instance,
0bca51f0 138
38635499 139 # a_5 = VDEF <a_4>
0bca51f0
DN
140 a.a = 2;
141
142 # VUSE <a_5>
143 x_3 = a.b;
144
145 In the example above, CCP will associate value '2' with 'a_5', but
146 it would be wrong to replace the load from 'a.b' with '2', because
147 '2' had been stored into a.a.
148
106dec71
ZD
149 Note that the initial value of virtual operands is VARYING, not
150 UNDEFINED. Consider, for instance global variables:
0bca51f0
DN
151
152 int A;
153
154 foo (int i)
155 {
156 if (i_3 > 10)
157 A_4 = 3;
158 # A_5 = PHI (A_4, A_2);
159
160 # VUSE <A_5>
161 A.0_6 = A;
162
163 return A.0_6;
164 }
165
166 The value of A_2 cannot be assumed to be UNDEFINED, as it may have
167 been defined outside of foo. If we were to assume it UNDEFINED, we
106dec71 168 would erroneously optimize the above into 'return 3;'.
0bca51f0
DN
169
170 Though STORE-CCP is not too expensive, it does have to do more work
171 than regular CCP, so it is only enabled at -O2. Both regular CCP
172 and STORE-CCP use the exact same algorithm. The only distinction
173 is that when doing STORE-CCP, the boolean variable DO_STORE_CCP is
174 set to true. This affects the evaluation of statements and PHI
175 nodes.
6de9cd9a
DN
176
177 References:
178
179 Constant propagation with conditional branches,
180 Wegman and Zadeck, ACM TOPLAS 13(2):181-210.
181
182 Building an Optimizing Compiler,
183 Robert Morgan, Butterworth-Heinemann, 1998, Section 8.9.
184
185 Advanced Compiler Design and Implementation,
186 Steven Muchnick, Morgan Kaufmann, 1997, Section 12.6 */
187
188#include "config.h"
189#include "system.h"
190#include "coretypes.h"
191#include "tm.h"
6de9cd9a 192#include "tree.h"
750628d8 193#include "flags.h"
6de9cd9a
DN
194#include "rtl.h"
195#include "tm_p.h"
750628d8 196#include "ggc.h"
6de9cd9a 197#include "basic-block.h"
750628d8 198#include "output.h"
750628d8
DN
199#include "expr.h"
200#include "function.h"
6de9cd9a 201#include "diagnostic.h"
750628d8 202#include "timevar.h"
6de9cd9a 203#include "tree-dump.h"
750628d8 204#include "tree-flow.h"
6de9cd9a 205#include "tree-pass.h"
750628d8 206#include "tree-ssa-propagate.h"
53a8f709 207#include "value-prof.h"
750628d8 208#include "langhooks.h"
ae3df618 209#include "target.h"
6ac01510 210#include "toplev.h"
6de9cd9a
DN
211
212
213/* Possible lattice values. */
214typedef enum
215{
106dec71 216 UNINITIALIZED,
6de9cd9a
DN
217 UNDEFINED,
218 CONSTANT,
219 VARYING
0bca51f0 220} ccp_lattice_t;
6de9cd9a 221
0bca51f0
DN
222/* Array of propagated constant values. After propagation,
223 CONST_VAL[I].VALUE holds the constant value for SSA_NAME(I). If
224 the constant is held in an SSA name representing a memory store
38635499
DN
225 (i.e., a VDEF), CONST_VAL[I].MEM_REF will contain the actual
226 memory reference used to store (i.e., the LHS of the assignment
227 doing the store). */
404f4351 228static prop_value_t *const_val;
6de9cd9a 229
0bca51f0 230/* Dump constant propagation value VAL to file OUTF prefixed by PREFIX. */
95eec0d6
DB
231
232static void
0bca51f0 233dump_lattice_value (FILE *outf, const char *prefix, prop_value_t val)
95eec0d6 234{
750628d8 235 switch (val.lattice_val)
95eec0d6 236 {
0bca51f0
DN
237 case UNINITIALIZED:
238 fprintf (outf, "%sUNINITIALIZED", prefix);
239 break;
750628d8
DN
240 case UNDEFINED:
241 fprintf (outf, "%sUNDEFINED", prefix);
242 break;
243 case VARYING:
244 fprintf (outf, "%sVARYING", prefix);
245 break;
750628d8
DN
246 case CONSTANT:
247 fprintf (outf, "%sCONSTANT ", prefix);
0bca51f0 248 print_generic_expr (outf, val.value, dump_flags);
750628d8
DN
249 break;
250 default:
1e128c5f 251 gcc_unreachable ();
750628d8 252 }
95eec0d6 253}
6de9cd9a 254
6de9cd9a 255
0bca51f0
DN
256/* Print lattice value VAL to stderr. */
257
258void debug_lattice_value (prop_value_t val);
259
260void
261debug_lattice_value (prop_value_t val)
262{
263 dump_lattice_value (stderr, "", val);
264 fprintf (stderr, "\n");
265}
6de9cd9a 266
6de9cd9a 267
726a989a 268
fc9962ee
ZD
269/* If SYM is a constant variable with known value, return the value.
270 NULL_TREE is returned otherwise. */
271
ed97ddc6 272tree
fc9962ee
ZD
273get_symbol_constant_value (tree sym)
274{
275 if (TREE_STATIC (sym)
276 && TREE_READONLY (sym)
ed97ddc6 277 && !MTAG_P (sym))
fc9962ee
ZD
278 {
279 tree val = DECL_INITIAL (sym);
62bc00e2
RG
280 if (val)
281 {
282 STRIP_USELESS_TYPE_CONVERSION (val);
283 if (is_gimple_min_invariant (val))
284 return val;
285 }
b0940154 286 /* Variables declared 'const' without an initializer
fa10beec 287 have zero as the initializer if they may not be
ed97ddc6 288 overridden at link or run time. */
b0940154 289 if (!val
ed97ddc6 290 && targetm.binds_local_p (sym)
b0940154
AP
291 && (INTEGRAL_TYPE_P (TREE_TYPE (sym))
292 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (sym))))
293 return fold_convert (TREE_TYPE (sym), integer_zero_node);
fc9962ee
ZD
294 }
295
296 return NULL_TREE;
297}
688e936d 298
0bca51f0
DN
299/* Compute a default value for variable VAR and store it in the
300 CONST_VAL array. The following rules are used to get default
301 values:
95eec0d6 302
0bca51f0
DN
303 1- Global and static variables that are declared constant are
304 considered CONSTANT.
305
306 2- Any other value is considered UNDEFINED. This is useful when
750628d8
DN
307 considering PHI nodes. PHI arguments that are undefined do not
308 change the constant value of the PHI node, which allows for more
0bca51f0 309 constants to be propagated.
6de9cd9a 310
caf55296 311 3- Variables defined by statements other than assignments and PHI
0bca51f0 312 nodes are considered VARYING.
6de9cd9a 313
caf55296 314 4- Initial values of variables that are not GIMPLE registers are
106dec71 315 considered VARYING. */
6de9cd9a 316
0bca51f0
DN
317static prop_value_t
318get_default_value (tree var)
319{
320 tree sym = SSA_NAME_VAR (var);
dce2b2f6 321 prop_value_t val = { UNINITIALIZED, NULL_TREE };
fc9962ee 322 tree cst_val;
106dec71 323
dce2b2f6 324 if (!is_gimple_reg (var))
6de9cd9a 325 {
0bca51f0
DN
326 /* Short circuit for regular CCP. We are not interested in any
327 non-register when DO_STORE_CCP is false. */
750628d8 328 val.lattice_val = VARYING;
6de9cd9a 329 }
fc9962ee 330 else if ((cst_val = get_symbol_constant_value (sym)) != NULL_TREE)
750628d8 331 {
0bca51f0
DN
332 /* Globals and static variables declared 'const' take their
333 initial value. */
334 val.lattice_val = CONSTANT;
fc9962ee 335 val.value = cst_val;
750628d8
DN
336 }
337 else
338 {
726a989a 339 gimple stmt = SSA_NAME_DEF_STMT (var);
6de9cd9a 340
726a989a 341 if (gimple_nop_p (stmt))
0bca51f0
DN
342 {
343 /* Variables defined by an empty statement are those used
344 before being initialized. If VAR is a local variable, we
106dec71
ZD
345 can assume initially that it is UNDEFINED, otherwise we must
346 consider it VARYING. */
0bca51f0
DN
347 if (is_gimple_reg (sym) && TREE_CODE (sym) != PARM_DECL)
348 val.lattice_val = UNDEFINED;
0bca51f0 349 else
750628d8
DN
350 val.lattice_val = VARYING;
351 }
726a989a
RB
352 else if (is_gimple_assign (stmt)
353 /* Value-returning GIMPLE_CALL statements assign to
354 a variable, and are treated similarly to GIMPLE_ASSIGN. */
355 || (is_gimple_call (stmt)
356 && gimple_call_lhs (stmt) != NULL_TREE)
357 || gimple_code (stmt) == GIMPLE_PHI)
358 {
0bca51f0 359 /* Any other variable defined by an assignment or a PHI node
106dec71
ZD
360 is considered UNDEFINED. */
361 val.lattice_val = UNDEFINED;
0bca51f0
DN
362 }
363 else
364 {
365 /* Otherwise, VAR will never take on a constant value. */
366 val.lattice_val = VARYING;
367 }
750628d8 368 }
6de9cd9a 369
750628d8
DN
370 return val;
371}
6de9cd9a 372
6de9cd9a 373
106dec71 374/* Get the constant value associated with variable VAR. */
6de9cd9a 375
106dec71
ZD
376static inline prop_value_t *
377get_value (tree var)
0bca51f0 378{
ed97ddc6 379 prop_value_t *val;
106dec71 380
ed97ddc6
RG
381 if (const_val == NULL)
382 return NULL;
383
384 val = &const_val[SSA_NAME_VERSION (var)];
106dec71 385 if (val->lattice_val == UNINITIALIZED)
6de9cd9a
DN
386 *val = get_default_value (var);
387
388 return val;
389}
390
106dec71
ZD
391/* Sets the value associated with VAR to VARYING. */
392
393static inline void
394set_value_varying (tree var)
395{
396 prop_value_t *val = &const_val[SSA_NAME_VERSION (var)];
397
398 val->lattice_val = VARYING;
399 val->value = NULL_TREE;
106dec71 400}
6de9cd9a 401
fbb5445b
L
402/* For float types, modify the value of VAL to make ccp work correctly
403 for non-standard values (-0, NaN):
404
405 If HONOR_SIGNED_ZEROS is false, and VAL = -0, we canonicalize it to 0.
406 If HONOR_NANS is false, and VAL is NaN, we canonicalize it to UNDEFINED.
407 This is to fix the following problem (see PR 29921): Suppose we have
408
409 x = 0.0 * y
410
411 and we set value of y to NaN. This causes value of x to be set to NaN.
412 When we later determine that y is in fact VARYING, fold uses the fact
413 that HONOR_NANS is false, and we try to change the value of x to 0,
414 causing an ICE. With HONOR_NANS being false, the real appearance of
415 NaN would cause undefined behavior, though, so claiming that y (and x)
416 are UNDEFINED initially is correct. */
417
418static void
419canonicalize_float_value (prop_value_t *val)
420{
421 enum machine_mode mode;
422 tree type;
423 REAL_VALUE_TYPE d;
424
425 if (val->lattice_val != CONSTANT
426 || TREE_CODE (val->value) != REAL_CST)
427 return;
428
429 d = TREE_REAL_CST (val->value);
430 type = TREE_TYPE (val->value);
431 mode = TYPE_MODE (type);
432
433 if (!HONOR_SIGNED_ZEROS (mode)
434 && REAL_VALUE_MINUS_ZERO (d))
435 {
436 val->value = build_real (type, dconst0);
437 return;
438 }
439
440 if (!HONOR_NANS (mode)
441 && REAL_VALUE_ISNAN (d))
442 {
443 val->lattice_val = UNDEFINED;
444 val->value = NULL;
fbb5445b
L
445 return;
446 }
447}
448
0bca51f0
DN
449/* Set the value for variable VAR to NEW_VAL. Return true if the new
450 value is different from VAR's previous value. */
6de9cd9a 451
750628d8 452static bool
0bca51f0 453set_lattice_value (tree var, prop_value_t new_val)
6de9cd9a 454{
106dec71 455 prop_value_t *old_val = get_value (var);
0bca51f0 456
fbb5445b
L
457 canonicalize_float_value (&new_val);
458
0bca51f0 459 /* Lattice transitions must always be monotonically increasing in
106dec71
ZD
460 value. If *OLD_VAL and NEW_VAL are the same, return false to
461 inform the caller that this was a non-transition. */
462
fc9962ee 463 gcc_assert (old_val->lattice_val < new_val.lattice_val
0bca51f0 464 || (old_val->lattice_val == new_val.lattice_val
fc9962ee 465 && ((!old_val->value && !new_val.value)
dce2b2f6 466 || operand_equal_p (old_val->value, new_val.value, 0))));
0bca51f0
DN
467
468 if (old_val->lattice_val != new_val.lattice_val)
6de9cd9a 469 {
750628d8
DN
470 if (dump_file && (dump_flags & TDF_DETAILS))
471 {
0bca51f0 472 dump_lattice_value (dump_file, "Lattice value changed to ", new_val);
106dec71 473 fprintf (dump_file, ". Adding SSA edges to worklist.\n");
750628d8
DN
474 }
475
0bca51f0
DN
476 *old_val = new_val;
477
106dec71
ZD
478 gcc_assert (new_val.lattice_val != UNDEFINED);
479 return true;
6de9cd9a 480 }
750628d8
DN
481
482 return false;
6de9cd9a
DN
483}
484
485
0bca51f0 486/* Return the likely CCP lattice value for STMT.
6de9cd9a 487
750628d8 488 If STMT has no operands, then return CONSTANT.
6de9cd9a 489
7f879c96
RG
490 Else if undefinedness of operands of STMT cause its value to be
491 undefined, then return UNDEFINED.
6de9cd9a 492
750628d8 493 Else if any operands of STMT are constants, then return CONSTANT.
6de9cd9a 494
750628d8 495 Else return VARYING. */
6de9cd9a 496
0bca51f0 497static ccp_lattice_t
726a989a 498likely_value (gimple stmt)
750628d8 499{
7f879c96 500 bool has_constant_operand, has_undefined_operand, all_undefined_operands;
750628d8
DN
501 tree use;
502 ssa_op_iter iter;
6de9cd9a 503
e0c68ce9 504 enum gimple_code code = gimple_code (stmt);
726a989a
RB
505
506 /* This function appears to be called only for assignments, calls,
507 conditionals, and switches, due to the logic in visit_stmt. */
508 gcc_assert (code == GIMPLE_ASSIGN
509 || code == GIMPLE_CALL
510 || code == GIMPLE_COND
511 || code == GIMPLE_SWITCH);
0bca51f0
DN
512
513 /* If the statement has volatile operands, it won't fold to a
514 constant value. */
726a989a 515 if (gimple_has_volatile_ops (stmt))
0bca51f0
DN
516 return VARYING;
517
518 /* If we are not doing store-ccp, statements with loads
519 and/or stores will never fold into a constant. */
dce2b2f6 520 if (!ZERO_SSA_OPERANDS (stmt, SSA_OP_ALL_VIRTUALS))
750628d8 521 return VARYING;
6de9cd9a 522
726a989a
RB
523 /* Note that only a GIMPLE_SINGLE_RHS assignment can satisfy
524 is_gimple_min_invariant, so we do not consider calls or
525 other forms of assignment. */
174ef36d 526 if (gimple_assign_single_p (stmt)
726a989a
RB
527 && is_gimple_min_invariant (gimple_assign_rhs1 (stmt)))
528 return CONSTANT;
0bca51f0 529
726a989a
RB
530 if (code == GIMPLE_COND
531 && is_gimple_min_invariant (gimple_cond_lhs (stmt))
532 && is_gimple_min_invariant (gimple_cond_rhs (stmt)))
a318e3ac
SB
533 return CONSTANT;
534
726a989a
RB
535 if (code == GIMPLE_SWITCH
536 && is_gimple_min_invariant (gimple_switch_index (stmt)))
537 return CONSTANT;
538
539 /* Arrive here for more complex cases. */
540
106dec71 541 has_constant_operand = false;
7f879c96
RG
542 has_undefined_operand = false;
543 all_undefined_operands = true;
106dec71 544 FOR_EACH_SSA_TREE_OPERAND (use, stmt, iter, SSA_OP_USE | SSA_OP_VUSE)
750628d8 545 {
106dec71 546 prop_value_t *val = get_value (use);
750628d8 547
106dec71 548 if (val->lattice_val == UNDEFINED)
7f879c96
RG
549 has_undefined_operand = true;
550 else
551 all_undefined_operands = false;
0bca51f0 552
750628d8 553 if (val->lattice_val == CONSTANT)
106dec71 554 has_constant_operand = true;
6de9cd9a 555 }
750628d8 556
7f879c96
RG
557 /* If the operation combines operands like COMPLEX_EXPR make sure to
558 not mark the result UNDEFINED if only one part of the result is
559 undefined. */
726a989a 560 if (has_undefined_operand && all_undefined_operands)
7f879c96 561 return UNDEFINED;
726a989a 562 else if (code == GIMPLE_ASSIGN && has_undefined_operand)
7f879c96 563 {
726a989a 564 switch (gimple_assign_rhs_code (stmt))
7f879c96
RG
565 {
566 /* Unary operators are handled with all_undefined_operands. */
567 case PLUS_EXPR:
568 case MINUS_EXPR:
7f879c96 569 case POINTER_PLUS_EXPR:
7f879c96
RG
570 /* Not MIN_EXPR, MAX_EXPR. One VARYING operand may be selected.
571 Not bitwise operators, one VARYING operand may specify the
572 result completely. Not logical operators for the same reason.
0cedb9e9
RG
573 Not COMPLEX_EXPR as one VARYING operand makes the result partly
574 not UNDEFINED. Not *DIV_EXPR, comparisons and shifts because
575 the undefined operand may be promoted. */
7f879c96
RG
576 return UNDEFINED;
577
578 default:
579 ;
580 }
581 }
582 /* If there was an UNDEFINED operand but the result may be not UNDEFINED
583 fall back to VARYING even if there were CONSTANT operands. */
584 if (has_undefined_operand)
585 return VARYING;
586
106dec71
ZD
587 if (has_constant_operand
588 /* We do not consider virtual operands here -- load from read-only
589 memory may have only VARYING virtual operands, but still be
590 constant. */
591 || ZERO_SSA_OPERANDS (stmt, SSA_OP_USE))
0bca51f0
DN
592 return CONSTANT;
593
106dec71 594 return VARYING;
6de9cd9a
DN
595}
596
106dec71
ZD
597/* Returns true if STMT cannot be constant. */
598
599static bool
726a989a 600surely_varying_stmt_p (gimple stmt)
106dec71
ZD
601{
602 /* If the statement has operands that we cannot handle, it cannot be
603 constant. */
726a989a 604 if (gimple_has_volatile_ops (stmt))
106dec71
ZD
605 return true;
606
607 if (!ZERO_SSA_OPERANDS (stmt, SSA_OP_ALL_VIRTUALS))
dce2b2f6 608 return true;
106dec71 609
174ef36d
RG
610 /* If it is a call and does not return a value or is not a
611 builtin and not an indirect call, it is varying. */
726a989a 612 if (is_gimple_call (stmt))
174ef36d
RG
613 {
614 tree fndecl;
615 if (!gimple_call_lhs (stmt)
616 || ((fndecl = gimple_call_fndecl (stmt)) != NULL_TREE
99f536cc 617 && !DECL_BUILT_IN (fndecl)))
174ef36d
RG
618 return true;
619 }
106dec71
ZD
620
621 /* Anything other than assignments and conditional jumps are not
622 interesting for CCP. */
726a989a 623 if (gimple_code (stmt) != GIMPLE_ASSIGN
174ef36d
RG
624 && gimple_code (stmt) != GIMPLE_COND
625 && gimple_code (stmt) != GIMPLE_SWITCH
626 && gimple_code (stmt) != GIMPLE_CALL)
106dec71
ZD
627 return true;
628
629 return false;
630}
6de9cd9a 631
750628d8 632/* Initialize local data structures for CCP. */
6de9cd9a
DN
633
634static void
750628d8 635ccp_initialize (void)
6de9cd9a 636{
750628d8 637 basic_block bb;
6de9cd9a 638
b9eae1a9 639 const_val = XCNEWVEC (prop_value_t, num_ssa_names);
6de9cd9a 640
750628d8
DN
641 /* Initialize simulation flags for PHI nodes and statements. */
642 FOR_EACH_BB (bb)
6de9cd9a 643 {
726a989a 644 gimple_stmt_iterator i;
6de9cd9a 645
726a989a 646 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
750628d8 647 {
726a989a 648 gimple stmt = gsi_stmt (i);
106dec71 649 bool is_varying = surely_varying_stmt_p (stmt);
6de9cd9a 650
106dec71 651 if (is_varying)
750628d8 652 {
0bca51f0
DN
653 tree def;
654 ssa_op_iter iter;
655
656 /* If the statement will not produce a constant, mark
657 all its outputs VARYING. */
658 FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_ALL_DEFS)
106dec71
ZD
659 {
660 if (is_varying)
661 set_value_varying (def);
662 }
750628d8 663 }
726a989a 664 prop_set_simulate_again (stmt, !is_varying);
750628d8 665 }
6de9cd9a
DN
666 }
667
726a989a
RB
668 /* Now process PHI nodes. We never clear the simulate_again flag on
669 phi nodes, since we do not know which edges are executable yet,
670 except for phi nodes for virtual operands when we do not do store ccp. */
750628d8 671 FOR_EACH_BB (bb)
6de9cd9a 672 {
726a989a 673 gimple_stmt_iterator i;
750628d8 674
726a989a
RB
675 for (i = gsi_start_phis (bb); !gsi_end_p (i); gsi_next (&i))
676 {
677 gimple phi = gsi_stmt (i);
678
dce2b2f6 679 if (!is_gimple_reg (gimple_phi_result (phi)))
726a989a 680 prop_set_simulate_again (phi, false);
106dec71 681 else
726a989a 682 prop_set_simulate_again (phi, true);
750628d8 683 }
6de9cd9a 684 }
750628d8 685}
6de9cd9a 686
6de9cd9a 687
0bca51f0 688/* Do final substitution of propagated values, cleanup the flowgraph and
3253eafb 689 free allocated storage.
6de9cd9a 690
3253eafb
JH
691 Return TRUE when something was optimized. */
692
693static bool
0bca51f0 694ccp_finalize (void)
6de9cd9a 695{
0bca51f0 696 /* Perform substitutions based on the known constant values. */
3253eafb 697 bool something_changed = substitute_and_fold (const_val, false);
6de9cd9a 698
0bca51f0 699 free (const_val);
ed97ddc6 700 const_val = NULL;
3253eafb 701 return something_changed;;
6de9cd9a
DN
702}
703
704
0bca51f0
DN
705/* Compute the meet operator between *VAL1 and *VAL2. Store the result
706 in VAL1.
707
708 any M UNDEFINED = any
0bca51f0
DN
709 any M VARYING = VARYING
710 Ci M Cj = Ci if (i == j)
711 Ci M Cj = VARYING if (i != j)
106dec71 712 */
6de9cd9a
DN
713
714static void
0bca51f0 715ccp_lattice_meet (prop_value_t *val1, prop_value_t *val2)
6de9cd9a 716{
0bca51f0 717 if (val1->lattice_val == UNDEFINED)
6de9cd9a 718 {
0bca51f0
DN
719 /* UNDEFINED M any = any */
720 *val1 = *val2;
750628d8 721 }
0bca51f0 722 else if (val2->lattice_val == UNDEFINED)
195da47b 723 {
0bca51f0
DN
724 /* any M UNDEFINED = any
725 Nothing to do. VAL1 already contains the value we want. */
726 ;
195da47b 727 }
0bca51f0
DN
728 else if (val1->lattice_val == VARYING
729 || val2->lattice_val == VARYING)
750628d8 730 {
0bca51f0
DN
731 /* any M VARYING = VARYING. */
732 val1->lattice_val = VARYING;
733 val1->value = NULL_TREE;
750628d8 734 }
0bca51f0
DN
735 else if (val1->lattice_val == CONSTANT
736 && val2->lattice_val == CONSTANT
dce2b2f6 737 && simple_cst_equal (val1->value, val2->value) == 1)
750628d8 738 {
0bca51f0
DN
739 /* Ci M Cj = Ci if (i == j)
740 Ci M Cj = VARYING if (i != j)
741
742 If these two values come from memory stores, make sure that
743 they come from the same memory reference. */
744 val1->lattice_val = CONSTANT;
745 val1->value = val1->value;
750628d8
DN
746 }
747 else
748 {
0bca51f0
DN
749 /* Any other combination is VARYING. */
750 val1->lattice_val = VARYING;
751 val1->value = NULL_TREE;
750628d8 752 }
6de9cd9a
DN
753}
754
755
750628d8
DN
756/* Loop through the PHI_NODE's parameters for BLOCK and compare their
757 lattice values to determine PHI_NODE's lattice value. The value of a
0bca51f0 758 PHI node is determined calling ccp_lattice_meet with all the arguments
750628d8 759 of the PHI node that are incoming via executable edges. */
6de9cd9a 760
750628d8 761static enum ssa_prop_result
726a989a 762ccp_visit_phi_node (gimple phi)
6de9cd9a 763{
726a989a 764 unsigned i;
0bca51f0 765 prop_value_t *old_val, new_val;
6de9cd9a 766
750628d8 767 if (dump_file && (dump_flags & TDF_DETAILS))
6de9cd9a 768 {
750628d8 769 fprintf (dump_file, "\nVisiting PHI node: ");
726a989a 770 print_gimple_stmt (dump_file, phi, 0, dump_flags);
6de9cd9a 771 }
6de9cd9a 772
726a989a 773 old_val = get_value (gimple_phi_result (phi));
750628d8
DN
774 switch (old_val->lattice_val)
775 {
776 case VARYING:
0bca51f0 777 return SSA_PROP_VARYING;
6de9cd9a 778
750628d8
DN
779 case CONSTANT:
780 new_val = *old_val;
781 break;
6de9cd9a 782
750628d8 783 case UNDEFINED:
750628d8 784 new_val.lattice_val = UNDEFINED;
0bca51f0 785 new_val.value = NULL_TREE;
750628d8 786 break;
6de9cd9a 787
750628d8 788 default:
1e128c5f 789 gcc_unreachable ();
750628d8 790 }
6de9cd9a 791
726a989a 792 for (i = 0; i < gimple_phi_num_args (phi); i++)
750628d8 793 {
0bca51f0
DN
794 /* Compute the meet operator over all the PHI arguments flowing
795 through executable edges. */
726a989a 796 edge e = gimple_phi_arg_edge (phi, i);
6de9cd9a 797
750628d8
DN
798 if (dump_file && (dump_flags & TDF_DETAILS))
799 {
800 fprintf (dump_file,
801 "\n Argument #%d (%d -> %d %sexecutable)\n",
802 i, e->src->index, e->dest->index,
803 (e->flags & EDGE_EXECUTABLE) ? "" : "not ");
804 }
805
806 /* If the incoming edge is executable, Compute the meet operator for
807 the existing value of the PHI node and the current PHI argument. */
808 if (e->flags & EDGE_EXECUTABLE)
809 {
726a989a 810 tree arg = gimple_phi_arg (phi, i)->def;
0bca51f0 811 prop_value_t arg_val;
6de9cd9a 812
0bca51f0 813 if (is_gimple_min_invariant (arg))
750628d8 814 {
0bca51f0
DN
815 arg_val.lattice_val = CONSTANT;
816 arg_val.value = arg;
750628d8
DN
817 }
818 else
106dec71 819 arg_val = *(get_value (arg));
6de9cd9a 820
0bca51f0 821 ccp_lattice_meet (&new_val, &arg_val);
6de9cd9a 822
750628d8
DN
823 if (dump_file && (dump_flags & TDF_DETAILS))
824 {
825 fprintf (dump_file, "\t");
0bca51f0
DN
826 print_generic_expr (dump_file, arg, dump_flags);
827 dump_lattice_value (dump_file, "\tValue: ", arg_val);
750628d8
DN
828 fprintf (dump_file, "\n");
829 }
6de9cd9a 830
750628d8
DN
831 if (new_val.lattice_val == VARYING)
832 break;
833 }
834 }
6de9cd9a
DN
835
836 if (dump_file && (dump_flags & TDF_DETAILS))
750628d8
DN
837 {
838 dump_lattice_value (dump_file, "\n PHI node value: ", new_val);
839 fprintf (dump_file, "\n\n");
840 }
841
106dec71 842 /* Make the transition to the new value. */
726a989a 843 if (set_lattice_value (gimple_phi_result (phi), new_val))
750628d8
DN
844 {
845 if (new_val.lattice_val == VARYING)
846 return SSA_PROP_VARYING;
847 else
848 return SSA_PROP_INTERESTING;
849 }
850 else
851 return SSA_PROP_NOT_INTERESTING;
6de9cd9a
DN
852}
853
f7c0ffb4
RG
854/* Return true if we may propagate the address expression ADDR into the
855 dereference DEREF and cancel them. */
856
857bool
858may_propagate_address_into_dereference (tree addr, tree deref)
859{
860 gcc_assert (INDIRECT_REF_P (deref)
861 && TREE_CODE (addr) == ADDR_EXPR);
862
863 /* If the address is invariant then we do not need to preserve restrict
864 qualifications. But we do need to preserve volatile qualifiers until
865 we can annotate the folded dereference itself properly. */
866 if (is_gimple_min_invariant (addr)
867 && (!TREE_THIS_VOLATILE (deref)
868 || TYPE_VOLATILE (TREE_TYPE (addr))))
869 return useless_type_conversion_p (TREE_TYPE (deref),
870 TREE_TYPE (TREE_OPERAND (addr, 0)));
871
872 /* Else both the address substitution and the folding must result in
873 a valid useless type conversion sequence. */
874 return (useless_type_conversion_p (TREE_TYPE (TREE_OPERAND (deref, 0)),
875 TREE_TYPE (addr))
876 && useless_type_conversion_p (TREE_TYPE (deref),
877 TREE_TYPE (TREE_OPERAND (addr, 0))));
878}
6de9cd9a 879
750628d8
DN
880/* CCP specific front-end to the non-destructive constant folding
881 routines.
6de9cd9a
DN
882
883 Attempt to simplify the RHS of STMT knowing that one or more
884 operands are constants.
885
886 If simplification is possible, return the simplified RHS,
726a989a 887 otherwise return the original RHS or NULL_TREE. */
6de9cd9a
DN
888
889static tree
726a989a 890ccp_fold (gimple stmt)
6de9cd9a 891{
726a989a 892 switch (gimple_code (stmt))
0bca51f0 893 {
726a989a
RB
894 case GIMPLE_ASSIGN:
895 {
896 enum tree_code subcode = gimple_assign_rhs_code (stmt);
897
898 switch (get_gimple_rhs_class (subcode))
899 {
900 case GIMPLE_SINGLE_RHS:
901 {
902 tree rhs = gimple_assign_rhs1 (stmt);
903 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
904
905 if (TREE_CODE (rhs) == SSA_NAME)
906 {
907 /* If the RHS is an SSA_NAME, return its known constant value,
908 if any. */
909 return get_value (rhs)->value;
910 }
911 /* Handle propagating invariant addresses into address operations.
912 The folding we do here matches that in tree-ssa-forwprop.c. */
913 else if (TREE_CODE (rhs) == ADDR_EXPR)
914 {
915 tree *base;
916 base = &TREE_OPERAND (rhs, 0);
917 while (handled_component_p (*base))
918 base = &TREE_OPERAND (*base, 0);
919 if (TREE_CODE (*base) == INDIRECT_REF
920 && TREE_CODE (TREE_OPERAND (*base, 0)) == SSA_NAME)
921 {
922 prop_value_t *val = get_value (TREE_OPERAND (*base, 0));
923 if (val->lattice_val == CONSTANT
924 && TREE_CODE (val->value) == ADDR_EXPR
f7c0ffb4
RG
925 && may_propagate_address_into_dereference
926 (val->value, *base))
726a989a
RB
927 {
928 /* We need to return a new tree, not modify the IL
929 or share parts of it. So play some tricks to
930 avoid manually building it. */
931 tree ret, save = *base;
932 *base = TREE_OPERAND (val->value, 0);
933 ret = unshare_expr (rhs);
934 recompute_tree_invariant_for_addr_expr (ret);
935 *base = save;
936 return ret;
937 }
938 }
939 }
6de9cd9a 940
726a989a 941 if (kind == tcc_reference)
c76a1f18
RG
942 {
943 if (TREE_CODE (rhs) == VIEW_CONVERT_EXPR
944 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
945 {
946 prop_value_t *val = get_value (TREE_OPERAND (rhs, 0));
947 if (val->lattice_val == CONSTANT)
948 return fold_unary (VIEW_CONVERT_EXPR,
949 TREE_TYPE (rhs), val->value);
950 }
951 return fold_const_aggregate_ref (rhs);
952 }
726a989a
RB
953 else if (kind == tcc_declaration)
954 return get_symbol_constant_value (rhs);
955 return rhs;
956 }
957
958 case GIMPLE_UNARY_RHS:
959 {
960 /* Handle unary operators that can appear in GIMPLE form.
961 Note that we know the single operand must be a constant,
962 so this should almost always return a simplified RHS. */
963 tree lhs = gimple_assign_lhs (stmt);
964 tree op0 = gimple_assign_rhs1 (stmt);
ff8b183b 965 tree res;
726a989a
RB
966
967 /* Simplify the operand down to a constant. */
968 if (TREE_CODE (op0) == SSA_NAME)
969 {
970 prop_value_t *val = get_value (op0);
971 if (val->lattice_val == CONSTANT)
972 op0 = get_value (op0)->value;
973 }
974
975 /* Conversions are useless for CCP purposes if they are
976 value-preserving. Thus the restrictions that
977 useless_type_conversion_p places for pointer type conversions
978 do not apply here. Substitution later will only substitute to
979 allowed places. */
1a87cf0c 980 if (CONVERT_EXPR_CODE_P (subcode)
99f536cc
RG
981 && POINTER_TYPE_P (TREE_TYPE (lhs))
982 && POINTER_TYPE_P (TREE_TYPE (op0))
983 /* Do not allow differences in volatile qualification
984 as this might get us confused as to whether a
985 propagation destination statement is volatile
986 or not. See PR36988. */
987 && (TYPE_VOLATILE (TREE_TYPE (TREE_TYPE (lhs)))
988 == TYPE_VOLATILE (TREE_TYPE (TREE_TYPE (op0)))))
989 {
990 tree tem;
991 /* Still try to generate a constant of correct type. */
992 if (!useless_type_conversion_p (TREE_TYPE (lhs),
993 TREE_TYPE (op0))
994 && ((tem = maybe_fold_offset_to_address
995 (op0, integer_zero_node, TREE_TYPE (lhs)))
996 != NULL_TREE))
997 return tem;
998 return op0;
999 }
726a989a 1000
ff8b183b
RG
1001 res = fold_unary (subcode, gimple_expr_type (stmt), op0);
1002
1003 /* If the operation was a conversion do _not_ mark a
1004 resulting constant with TREE_OVERFLOW if the original
1005 constant was not. These conversions have implementation
1006 defined behavior and retaining the TREE_OVERFLOW flag
1007 here would confuse later passes such as VRP. */
1008 if (res
1009 && TREE_CODE (res) == INTEGER_CST
1010 && TREE_CODE (op0) == INTEGER_CST
1011 && CONVERT_EXPR_CODE_P (subcode))
1012 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
1013
1014 return res;
1015 }
726a989a
RB
1016
1017 case GIMPLE_BINARY_RHS:
1018 {
1019 /* Handle binary operators that can appear in GIMPLE form. */
1020 tree op0 = gimple_assign_rhs1 (stmt);
1021 tree op1 = gimple_assign_rhs2 (stmt);
1022
1023 /* Simplify the operands down to constants when appropriate. */
1024 if (TREE_CODE (op0) == SSA_NAME)
1025 {
1026 prop_value_t *val = get_value (op0);
1027 if (val->lattice_val == CONSTANT)
1028 op0 = val->value;
1029 }
1030
1031 if (TREE_CODE (op1) == SSA_NAME)
1032 {
1033 prop_value_t *val = get_value (op1);
1034 if (val->lattice_val == CONSTANT)
1035 op1 = val->value;
1036 }
1037
99f536cc
RG
1038 /* Fold &foo + CST into an invariant reference if possible. */
1039 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
1040 && TREE_CODE (op0) == ADDR_EXPR
1041 && TREE_CODE (op1) == INTEGER_CST)
1042 {
1043 tree lhs = gimple_assign_lhs (stmt);
1044 tree tem = maybe_fold_offset_to_address (op0, op1,
1045 TREE_TYPE (lhs));
1046 if (tem != NULL_TREE)
1047 return tem;
1048 }
1049
726a989a
RB
1050 return fold_binary (subcode, gimple_expr_type (stmt), op0, op1);
1051 }
1052
1053 default:
1054 gcc_unreachable ();
1055 }
1056 }
1057 break;
6de9cd9a 1058
726a989a 1059 case GIMPLE_CALL:
174ef36d
RG
1060 {
1061 tree fn = gimple_call_fn (stmt);
1062 prop_value_t *val;
1063
1064 if (TREE_CODE (fn) == SSA_NAME)
1065 {
1066 val = get_value (fn);
1067 if (val->lattice_val == CONSTANT)
1068 fn = val->value;
1069 }
1070 if (TREE_CODE (fn) == ADDR_EXPR
a135b1c4 1071 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
174ef36d
RG
1072 && DECL_BUILT_IN (TREE_OPERAND (fn, 0)))
1073 {
1074 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
1075 tree call, retval;
1076 unsigned i;
1077 for (i = 0; i < gimple_call_num_args (stmt); ++i)
1078 {
1079 args[i] = gimple_call_arg (stmt, i);
1080 if (TREE_CODE (args[i]) == SSA_NAME)
1081 {
1082 val = get_value (args[i]);
1083 if (val->lattice_val == CONSTANT)
1084 args[i] = val->value;
1085 }
1086 }
1087 call = build_call_array (gimple_call_return_type (stmt),
1088 fn, gimple_call_num_args (stmt), args);
1089 retval = fold_call_expr (call, false);
1090 if (retval)
1091 /* fold_call_expr wraps the result inside a NOP_EXPR. */
1092 STRIP_NOPS (retval);
1093 return retval;
1094 }
1095 return NULL_TREE;
1096 }
6de9cd9a 1097
726a989a
RB
1098 case GIMPLE_COND:
1099 {
1100 /* Handle comparison operators that can appear in GIMPLE form. */
1101 tree op0 = gimple_cond_lhs (stmt);
1102 tree op1 = gimple_cond_rhs (stmt);
1103 enum tree_code code = gimple_cond_code (stmt);
1104
1105 /* Simplify the operands down to constants when appropriate. */
1106 if (TREE_CODE (op0) == SSA_NAME)
1107 {
1108 prop_value_t *val = get_value (op0);
1109 if (val->lattice_val == CONSTANT)
1110 op0 = val->value;
1111 }
1112
1113 if (TREE_CODE (op1) == SSA_NAME)
1114 {
1115 prop_value_t *val = get_value (op1);
1116 if (val->lattice_val == CONSTANT)
1117 op1 = val->value;
1118 }
1119
1120 return fold_binary (code, boolean_type_node, op0, op1);
1121 }
6de9cd9a 1122
726a989a
RB
1123 case GIMPLE_SWITCH:
1124 {
1125 tree rhs = gimple_switch_index (stmt);
87e1e42b 1126
726a989a
RB
1127 if (TREE_CODE (rhs) == SSA_NAME)
1128 {
1129 /* If the RHS is an SSA_NAME, return its known constant value,
1130 if any. */
1131 return get_value (rhs)->value;
1132 }
87e1e42b 1133
726a989a
RB
1134 return rhs;
1135 }
00d382a8 1136
726a989a
RB
1137 default:
1138 gcc_unreachable ();
6de9cd9a 1139 }
6de9cd9a
DN
1140}
1141
1142
ae3df618
SB
1143/* Return the tree representing the element referenced by T if T is an
1144 ARRAY_REF or COMPONENT_REF into constant aggregates. Return
1145 NULL_TREE otherwise. */
1146
ed97ddc6 1147tree
ae3df618
SB
1148fold_const_aggregate_ref (tree t)
1149{
1150 prop_value_t *value;
4038c495
GB
1151 tree base, ctor, idx, field;
1152 unsigned HOST_WIDE_INT cnt;
1153 tree cfield, cval;
ae3df618
SB
1154
1155 switch (TREE_CODE (t))
1156 {
1157 case ARRAY_REF:
1158 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
1159 DECL_INITIAL. If BASE is a nested reference into another
1160 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
1161 the inner reference. */
1162 base = TREE_OPERAND (t, 0);
1163 switch (TREE_CODE (base))
1164 {
1165 case VAR_DECL:
1166 if (!TREE_READONLY (base)
1167 || TREE_CODE (TREE_TYPE (base)) != ARRAY_TYPE
1168 || !targetm.binds_local_p (base))
1169 return NULL_TREE;
1170
1171 ctor = DECL_INITIAL (base);
1172 break;
1173
1174 case ARRAY_REF:
1175 case COMPONENT_REF:
1176 ctor = fold_const_aggregate_ref (base);
1177 break;
1178
87e1e42b
RG
1179 case STRING_CST:
1180 case CONSTRUCTOR:
1181 ctor = base;
1182 break;
1183
ae3df618
SB
1184 default:
1185 return NULL_TREE;
1186 }
1187
1188 if (ctor == NULL_TREE
faaf1436
RG
1189 || (TREE_CODE (ctor) != CONSTRUCTOR
1190 && TREE_CODE (ctor) != STRING_CST)
ae3df618
SB
1191 || !TREE_STATIC (ctor))
1192 return NULL_TREE;
1193
1194 /* Get the index. If we have an SSA_NAME, try to resolve it
1195 with the current lattice value for the SSA_NAME. */
1196 idx = TREE_OPERAND (t, 1);
1197 switch (TREE_CODE (idx))
1198 {
1199 case SSA_NAME:
106dec71 1200 if ((value = get_value (idx))
ae3df618
SB
1201 && value->lattice_val == CONSTANT
1202 && TREE_CODE (value->value) == INTEGER_CST)
1203 idx = value->value;
1204 else
1205 return NULL_TREE;
1206 break;
1207
1208 case INTEGER_CST:
1209 break;
1210
1211 default:
1212 return NULL_TREE;
1213 }
1214
faaf1436
RG
1215 /* Fold read from constant string. */
1216 if (TREE_CODE (ctor) == STRING_CST)
1217 {
1218 if ((TYPE_MODE (TREE_TYPE (t))
1219 == TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
1220 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
1221 == MODE_INT)
1222 && GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor)))) == 1
1223 && compare_tree_int (idx, TREE_STRING_LENGTH (ctor)) < 0)
0c4d4efb
DJ
1224 return build_int_cst_type (TREE_TYPE (t),
1225 (TREE_STRING_POINTER (ctor)
1226 [TREE_INT_CST_LOW (idx)]));
faaf1436
RG
1227 return NULL_TREE;
1228 }
1229
ae3df618 1230 /* Whoo-hoo! I'll fold ya baby. Yeah! */
4038c495
GB
1231 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
1232 if (tree_int_cst_equal (cfield, idx))
62bc00e2
RG
1233 {
1234 STRIP_USELESS_TYPE_CONVERSION (cval);
1235 return cval;
1236 }
ae3df618
SB
1237 break;
1238
1239 case COMPONENT_REF:
1240 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
1241 DECL_INITIAL. If BASE is a nested reference into another
1242 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
1243 the inner reference. */
1244 base = TREE_OPERAND (t, 0);
1245 switch (TREE_CODE (base))
1246 {
1247 case VAR_DECL:
1248 if (!TREE_READONLY (base)
1249 || TREE_CODE (TREE_TYPE (base)) != RECORD_TYPE
1250 || !targetm.binds_local_p (base))
1251 return NULL_TREE;
1252
1253 ctor = DECL_INITIAL (base);
1254 break;
1255
1256 case ARRAY_REF:
1257 case COMPONENT_REF:
1258 ctor = fold_const_aggregate_ref (base);
1259 break;
1260
1261 default:
1262 return NULL_TREE;
1263 }
1264
1265 if (ctor == NULL_TREE
1266 || TREE_CODE (ctor) != CONSTRUCTOR
1267 || !TREE_STATIC (ctor))
1268 return NULL_TREE;
1269
1270 field = TREE_OPERAND (t, 1);
1271
4038c495
GB
1272 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
1273 if (cfield == field
ae3df618 1274 /* FIXME: Handle bit-fields. */
4038c495 1275 && ! DECL_BIT_FIELD (cfield))
62bc00e2
RG
1276 {
1277 STRIP_USELESS_TYPE_CONVERSION (cval);
1278 return cval;
1279 }
ae3df618
SB
1280 break;
1281
1ebd8d9a
SB
1282 case REALPART_EXPR:
1283 case IMAGPART_EXPR:
1284 {
1285 tree c = fold_const_aggregate_ref (TREE_OPERAND (t, 0));
1286 if (c && TREE_CODE (c) == COMPLEX_CST)
1287 return fold_build1 (TREE_CODE (t), TREE_TYPE (t), c);
1288 break;
1289 }
87e1e42b
RG
1290
1291 case INDIRECT_REF:
1292 {
1293 tree base = TREE_OPERAND (t, 0);
1294 if (TREE_CODE (base) == SSA_NAME
1295 && (value = get_value (base))
1296 && value->lattice_val == CONSTANT
1297 && TREE_CODE (value->value) == ADDR_EXPR)
1298 return fold_const_aggregate_ref (TREE_OPERAND (value->value, 0));
1299 break;
1300 }
1301
ae3df618
SB
1302 default:
1303 break;
1304 }
1305
1306 return NULL_TREE;
1307}
726a989a
RB
1308
1309/* Evaluate statement STMT.
1310 Valid only for assignments, calls, conditionals, and switches. */
6de9cd9a 1311
0bca51f0 1312static prop_value_t
726a989a 1313evaluate_stmt (gimple stmt)
6de9cd9a 1314{
0bca51f0 1315 prop_value_t val;
faaf1436 1316 tree simplified = NULL_TREE;
0bca51f0 1317 ccp_lattice_t likelyvalue = likely_value (stmt);
6ac01510 1318 bool is_constant;
0bca51f0 1319
6ac01510
ILT
1320 fold_defer_overflow_warnings ();
1321
6de9cd9a
DN
1322 /* If the statement is likely to have a CONSTANT result, then try
1323 to fold the statement to determine the constant value. */
726a989a
RB
1324 /* FIXME. This is the only place that we call ccp_fold.
1325 Since likely_value never returns CONSTANT for calls, we will
1326 not attempt to fold them, including builtins that may profit. */
6de9cd9a
DN
1327 if (likelyvalue == CONSTANT)
1328 simplified = ccp_fold (stmt);
1329 /* If the statement is likely to have a VARYING result, then do not
1330 bother folding the statement. */
87e1e42b 1331 else if (likelyvalue == VARYING)
726a989a 1332 {
e0c68ce9 1333 enum gimple_code code = gimple_code (stmt);
726a989a
RB
1334 if (code == GIMPLE_ASSIGN)
1335 {
1336 enum tree_code subcode = gimple_assign_rhs_code (stmt);
1337
1338 /* Other cases cannot satisfy is_gimple_min_invariant
1339 without folding. */
1340 if (get_gimple_rhs_class (subcode) == GIMPLE_SINGLE_RHS)
1341 simplified = gimple_assign_rhs1 (stmt);
1342 }
1343 else if (code == GIMPLE_SWITCH)
1344 simplified = gimple_switch_index (stmt);
1345 else
1346 /* These cannot satisfy is_gimple_min_invariant without folding. */
1347 gcc_assert (code == GIMPLE_CALL || code == GIMPLE_COND);
1348 }
6de9cd9a 1349
6ac01510
ILT
1350 is_constant = simplified && is_gimple_min_invariant (simplified);
1351
1352 fold_undefer_overflow_warnings (is_constant, stmt, 0);
1353
00d382a8
RG
1354 if (dump_file && (dump_flags & TDF_DETAILS))
1355 {
1356 fprintf (dump_file, "which is likely ");
1357 switch (likelyvalue)
1358 {
1359 case CONSTANT:
1360 fprintf (dump_file, "CONSTANT");
1361 break;
1362 case UNDEFINED:
1363 fprintf (dump_file, "UNDEFINED");
1364 break;
1365 case VARYING:
1366 fprintf (dump_file, "VARYING");
1367 break;
1368 default:;
1369 }
1370 fprintf (dump_file, "\n");
1371 }
1372
6ac01510 1373 if (is_constant)
6de9cd9a
DN
1374 {
1375 /* The statement produced a constant value. */
1376 val.lattice_val = CONSTANT;
0bca51f0 1377 val.value = simplified;
6de9cd9a
DN
1378 }
1379 else
1380 {
1381 /* The statement produced a nonconstant value. If the statement
0bca51f0
DN
1382 had UNDEFINED operands, then the result of the statement
1383 should be UNDEFINED. Otherwise, the statement is VARYING. */
106dec71 1384 if (likelyvalue == UNDEFINED)
a318e3ac
SB
1385 val.lattice_val = likelyvalue;
1386 else
1387 val.lattice_val = VARYING;
1388
0bca51f0 1389 val.value = NULL_TREE;
6de9cd9a 1390 }
750628d8
DN
1391
1392 return val;
6de9cd9a
DN
1393}
1394
750628d8 1395/* Visit the assignment statement STMT. Set the value of its LHS to the
0bca51f0
DN
1396 value computed by the RHS and store LHS in *OUTPUT_P. If STMT
1397 creates virtual definitions, set the value of each new name to that
726a989a
RB
1398 of the RHS (if we can derive a constant out of the RHS).
1399 Value-returning call statements also perform an assignment, and
1400 are handled here. */
6de9cd9a 1401
750628d8 1402static enum ssa_prop_result
726a989a 1403visit_assignment (gimple stmt, tree *output_p)
6de9cd9a 1404{
0bca51f0 1405 prop_value_t val;
0bca51f0 1406 enum ssa_prop_result retval;
6de9cd9a 1407
726a989a 1408 tree lhs = gimple_get_lhs (stmt);
6de9cd9a 1409
726a989a
RB
1410 gcc_assert (gimple_code (stmt) != GIMPLE_CALL
1411 || gimple_call_lhs (stmt) != NULL_TREE);
1412
1413 if (gimple_assign_copy_p (stmt))
750628d8 1414 {
726a989a 1415 tree rhs = gimple_assign_rhs1 (stmt);
0bca51f0 1416
726a989a
RB
1417 if (TREE_CODE (rhs) == SSA_NAME)
1418 {
1419 /* For a simple copy operation, we copy the lattice values. */
1420 prop_value_t *nval = get_value (rhs);
1421 val = *nval;
1422 }
0bca51f0 1423 else
726a989a 1424 val = evaluate_stmt (stmt);
750628d8
DN
1425 }
1426 else
726a989a
RB
1427 /* Evaluate the statement, which could be
1428 either a GIMPLE_ASSIGN or a GIMPLE_CALL. */
87e1e42b 1429 val = evaluate_stmt (stmt);
6de9cd9a 1430
0bca51f0 1431 retval = SSA_PROP_NOT_INTERESTING;
6de9cd9a 1432
750628d8 1433 /* Set the lattice value of the statement's output. */
0bca51f0 1434 if (TREE_CODE (lhs) == SSA_NAME)
6de9cd9a 1435 {
0bca51f0
DN
1436 /* If STMT is an assignment to an SSA_NAME, we only have one
1437 value to set. */
1438 if (set_lattice_value (lhs, val))
1439 {
1440 *output_p = lhs;
1441 if (val.lattice_val == VARYING)
1442 retval = SSA_PROP_VARYING;
1443 else
1444 retval = SSA_PROP_INTERESTING;
1445 }
6de9cd9a 1446 }
0bca51f0
DN
1447
1448 return retval;
6de9cd9a
DN
1449}
1450
6de9cd9a 1451
750628d8
DN
1452/* Visit the conditional statement STMT. Return SSA_PROP_INTERESTING
1453 if it can determine which edge will be taken. Otherwise, return
1454 SSA_PROP_VARYING. */
1455
1456static enum ssa_prop_result
726a989a 1457visit_cond_stmt (gimple stmt, edge *taken_edge_p)
6de9cd9a 1458{
0bca51f0 1459 prop_value_t val;
750628d8
DN
1460 basic_block block;
1461
726a989a 1462 block = gimple_bb (stmt);
750628d8
DN
1463 val = evaluate_stmt (stmt);
1464
1465 /* Find which edge out of the conditional block will be taken and add it
1466 to the worklist. If no single edge can be determined statically,
1467 return SSA_PROP_VARYING to feed all the outgoing edges to the
1468 propagation engine. */
0bca51f0 1469 *taken_edge_p = val.value ? find_taken_edge (block, val.value) : 0;
750628d8
DN
1470 if (*taken_edge_p)
1471 return SSA_PROP_INTERESTING;
1472 else
1473 return SSA_PROP_VARYING;
6de9cd9a
DN
1474}
1475
6de9cd9a 1476
750628d8
DN
1477/* Evaluate statement STMT. If the statement produces an output value and
1478 its evaluation changes the lattice value of its output, return
1479 SSA_PROP_INTERESTING and set *OUTPUT_P to the SSA_NAME holding the
1480 output value.
1481
1482 If STMT is a conditional branch and we can determine its truth
1483 value, set *TAKEN_EDGE_P accordingly. If STMT produces a varying
1484 value, return SSA_PROP_VARYING. */
6de9cd9a 1485
750628d8 1486static enum ssa_prop_result
726a989a 1487ccp_visit_stmt (gimple stmt, edge *taken_edge_p, tree *output_p)
750628d8 1488{
750628d8
DN
1489 tree def;
1490 ssa_op_iter iter;
6de9cd9a 1491
750628d8 1492 if (dump_file && (dump_flags & TDF_DETAILS))
6de9cd9a 1493 {
0bca51f0 1494 fprintf (dump_file, "\nVisiting statement:\n");
726a989a 1495 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
6de9cd9a 1496 }
6de9cd9a 1497
726a989a 1498 switch (gimple_code (stmt))
6de9cd9a 1499 {
726a989a
RB
1500 case GIMPLE_ASSIGN:
1501 /* If the statement is an assignment that produces a single
1502 output value, evaluate its RHS to see if the lattice value of
1503 its output has changed. */
1504 return visit_assignment (stmt, output_p);
1505
1506 case GIMPLE_CALL:
1507 /* A value-returning call also performs an assignment. */
1508 if (gimple_call_lhs (stmt) != NULL_TREE)
1509 return visit_assignment (stmt, output_p);
1510 break;
1511
1512 case GIMPLE_COND:
1513 case GIMPLE_SWITCH:
1514 /* If STMT is a conditional branch, see if we can determine
1515 which branch will be taken. */
1516 /* FIXME. It appears that we should be able to optimize
1517 computed GOTOs here as well. */
1518 return visit_cond_stmt (stmt, taken_edge_p);
1519
1520 default:
1521 break;
6de9cd9a 1522 }
6de9cd9a 1523
750628d8
DN
1524 /* Any other kind of statement is not interesting for constant
1525 propagation and, therefore, not worth simulating. */
750628d8
DN
1526 if (dump_file && (dump_flags & TDF_DETAILS))
1527 fprintf (dump_file, "No interesting values produced. Marked VARYING.\n");
6de9cd9a 1528
750628d8
DN
1529 /* Definitions made by statements other than assignments to
1530 SSA_NAMEs represent unknown modifications to their outputs.
1531 Mark them VARYING. */
0bca51f0
DN
1532 FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_ALL_DEFS)
1533 {
dce2b2f6 1534 prop_value_t v = { VARYING, NULL_TREE };
0bca51f0
DN
1535 set_lattice_value (def, v);
1536 }
6de9cd9a 1537
750628d8
DN
1538 return SSA_PROP_VARYING;
1539}
6de9cd9a 1540
6de9cd9a 1541
0bca51f0 1542/* Main entry point for SSA Conditional Constant Propagation. */
750628d8 1543
3253eafb 1544static unsigned int
dce2b2f6 1545do_ssa_ccp (void)
750628d8
DN
1546{
1547 ccp_initialize ();
1548 ssa_propagate (ccp_visit_stmt, ccp_visit_phi_node);
3253eafb 1549 if (ccp_finalize ())
706ca88e 1550 return (TODO_cleanup_cfg | TODO_update_ssa | TODO_remove_unused_locals);
3253eafb
JH
1551 else
1552 return 0;
6de9cd9a
DN
1553}
1554
173b818d
BB
1555
1556static bool
750628d8 1557gate_ccp (void)
173b818d 1558{
750628d8 1559 return flag_tree_ccp != 0;
173b818d
BB
1560}
1561
6de9cd9a 1562
8ddbbcae 1563struct gimple_opt_pass pass_ccp =
750628d8 1564{
8ddbbcae
JH
1565 {
1566 GIMPLE_PASS,
750628d8
DN
1567 "ccp", /* name */
1568 gate_ccp, /* gate */
0bca51f0 1569 do_ssa_ccp, /* execute */
750628d8
DN
1570 NULL, /* sub */
1571 NULL, /* next */
1572 0, /* static_pass_number */
1573 TV_TREE_CCP, /* tv_id */
7faade0f 1574 PROP_cfg | PROP_ssa, /* properties_required */
750628d8 1575 0, /* properties_provided */
ae07b463 1576 0, /* properties_destroyed */
750628d8 1577 0, /* todo_flags_start */
3253eafb 1578 TODO_dump_func | TODO_verify_ssa
8ddbbcae
JH
1579 | TODO_verify_stmts | TODO_ggc_collect/* todo_flags_finish */
1580 }
750628d8 1581};
6de9cd9a 1582
6de9cd9a 1583
6de9cd9a
DN
1584/* A subroutine of fold_stmt_r. Attempts to fold *(A+O) to A[X].
1585 BASE is an array type. OFFSET is a byte displacement. ORIG_TYPE
9cf737f8 1586 is the desired result type. */
6de9cd9a
DN
1587
1588static tree
1fd89060
RG
1589maybe_fold_offset_to_array_ref (tree base, tree offset, tree orig_type,
1590 bool allow_negative_idx)
6de9cd9a 1591{
891fc5e9 1592 tree min_idx, idx, idx_type, elt_offset = integer_zero_node;
44de5aeb 1593 tree array_type, elt_type, elt_size;
3097760b 1594 tree domain_type;
44de5aeb
RK
1595
1596 /* If BASE is an ARRAY_REF, we can pick up another offset (this time
1597 measured in units of the size of elements type) from that ARRAY_REF).
1598 We can't do anything if either is variable.
1599
1600 The case we handle here is *(&A[N]+O). */
1601 if (TREE_CODE (base) == ARRAY_REF)
1602 {
1603 tree low_bound = array_ref_low_bound (base);
1604
1605 elt_offset = TREE_OPERAND (base, 1);
1606 if (TREE_CODE (low_bound) != INTEGER_CST
1607 || TREE_CODE (elt_offset) != INTEGER_CST)
1608 return NULL_TREE;
1609
1610 elt_offset = int_const_binop (MINUS_EXPR, elt_offset, low_bound, 0);
1611 base = TREE_OPERAND (base, 0);
1612 }
6de9cd9a
DN
1613
1614 /* Ignore stupid user tricks of indexing non-array variables. */
1615 array_type = TREE_TYPE (base);
1616 if (TREE_CODE (array_type) != ARRAY_TYPE)
1617 return NULL_TREE;
1618 elt_type = TREE_TYPE (array_type);
f4088621 1619 if (!useless_type_conversion_p (orig_type, elt_type))
6de9cd9a 1620 return NULL_TREE;
891fc5e9
RG
1621
1622 /* Use signed size type for intermediate computation on the index. */
1623 idx_type = signed_type_for (size_type_node);
1624
44de5aeb
RK
1625 /* If OFFSET and ELT_OFFSET are zero, we don't care about the size of the
1626 element type (so we can use the alignment if it's not constant).
1627 Otherwise, compute the offset as an index by using a division. If the
1628 division isn't exact, then don't do anything. */
6de9cd9a 1629 elt_size = TYPE_SIZE_UNIT (elt_type);
fe9821b8
JH
1630 if (!elt_size)
1631 return NULL;
44de5aeb
RK
1632 if (integer_zerop (offset))
1633 {
1634 if (TREE_CODE (elt_size) != INTEGER_CST)
1635 elt_size = size_int (TYPE_ALIGN (elt_type));
6de9cd9a 1636
891fc5e9 1637 idx = build_int_cst (idx_type, 0);
44de5aeb
RK
1638 }
1639 else
1640 {
1641 unsigned HOST_WIDE_INT lquo, lrem;
1642 HOST_WIDE_INT hquo, hrem;
891fc5e9 1643 double_int soffset;
44de5aeb 1644
891fc5e9
RG
1645 /* The final array offset should be signed, so we need
1646 to sign-extend the (possibly pointer) offset here
1647 and use signed division. */
1648 soffset = double_int_sext (tree_to_double_int (offset),
1649 TYPE_PRECISION (TREE_TYPE (offset)));
44de5aeb 1650 if (TREE_CODE (elt_size) != INTEGER_CST
891fc5e9
RG
1651 || div_and_round_double (TRUNC_DIV_EXPR, 0,
1652 soffset.low, soffset.high,
44de5aeb
RK
1653 TREE_INT_CST_LOW (elt_size),
1654 TREE_INT_CST_HIGH (elt_size),
1655 &lquo, &hquo, &lrem, &hrem)
1656 || lrem || hrem)
1657 return NULL_TREE;
6de9cd9a 1658
891fc5e9 1659 idx = build_int_cst_wide (idx_type, lquo, hquo);
44de5aeb
RK
1660 }
1661
1662 /* Assume the low bound is zero. If there is a domain type, get the
1663 low bound, if any, convert the index into that type, and add the
1664 low bound. */
891fc5e9 1665 min_idx = build_int_cst (idx_type, 0);
3097760b
JH
1666 domain_type = TYPE_DOMAIN (array_type);
1667 if (domain_type)
6de9cd9a 1668 {
3097760b 1669 idx_type = domain_type;
891fc5e9
RG
1670 if (TYPE_MIN_VALUE (idx_type))
1671 min_idx = TYPE_MIN_VALUE (idx_type);
44de5aeb 1672 else
891fc5e9 1673 min_idx = fold_convert (idx_type, min_idx);
44de5aeb
RK
1674
1675 if (TREE_CODE (min_idx) != INTEGER_CST)
1676 return NULL_TREE;
1677
891fc5e9 1678 elt_offset = fold_convert (idx_type, elt_offset);
6de9cd9a
DN
1679 }
1680
44de5aeb
RK
1681 if (!integer_zerop (min_idx))
1682 idx = int_const_binop (PLUS_EXPR, idx, min_idx, 0);
1683 if (!integer_zerop (elt_offset))
1684 idx = int_const_binop (PLUS_EXPR, idx, elt_offset, 0);
1685
891fc5e9
RG
1686 /* Make sure to possibly truncate late after offsetting. */
1687 idx = fold_convert (idx_type, idx);
1688
3097760b 1689 /* We don't want to construct access past array bounds. For example
1fd89060
RG
1690 char *(c[4]);
1691 c[3][2];
1692 should not be simplified into (*c)[14] or tree-vrp will
1693 give false warnings. The same is true for
1694 struct A { long x; char d[0]; } *a;
1695 (char *)a - 4;
1696 which should be not folded to &a->d[-8]. */
1697 if (domain_type
1698 && TYPE_MAX_VALUE (domain_type)
3097760b
JH
1699 && TREE_CODE (TYPE_MAX_VALUE (domain_type)) == INTEGER_CST)
1700 {
1701 tree up_bound = TYPE_MAX_VALUE (domain_type);
1702
1703 if (tree_int_cst_lt (up_bound, idx)
1704 /* Accesses after the end of arrays of size 0 (gcc
1705 extension) and 1 are likely intentional ("struct
1706 hack"). */
1707 && compare_tree_int (up_bound, 1) > 0)
1708 return NULL_TREE;
1709 }
1fd89060
RG
1710 if (domain_type
1711 && TYPE_MIN_VALUE (domain_type))
1712 {
1713 if (!allow_negative_idx
1714 && TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST
1715 && tree_int_cst_lt (idx, TYPE_MIN_VALUE (domain_type)))
1716 return NULL_TREE;
1717 }
1718 else if (!allow_negative_idx
1719 && compare_tree_int (idx, 0) < 0)
1720 return NULL_TREE;
3097760b 1721
04d86531 1722 return build4 (ARRAY_REF, elt_type, base, idx, NULL_TREE, NULL_TREE);
6de9cd9a
DN
1723}
1724
750628d8 1725
fe9821b8 1726/* Attempt to fold *(S+O) to S.X.
6de9cd9a
DN
1727 BASE is a record type. OFFSET is a byte displacement. ORIG_TYPE
1728 is the desired result type. */
6de9cd9a 1729
fe9821b8 1730static tree
6de9cd9a
DN
1731maybe_fold_offset_to_component_ref (tree record_type, tree base, tree offset,
1732 tree orig_type, bool base_is_ptr)
1733{
f34fa464 1734 tree f, t, field_type, tail_array_field, field_offset;
6e6e19cb
JH
1735 tree ret;
1736 tree new_base;
6de9cd9a
DN
1737
1738 if (TREE_CODE (record_type) != RECORD_TYPE
1739 && TREE_CODE (record_type) != UNION_TYPE
1740 && TREE_CODE (record_type) != QUAL_UNION_TYPE)
1741 return NULL_TREE;
1742
1743 /* Short-circuit silly cases. */
f4088621 1744 if (useless_type_conversion_p (record_type, orig_type))
6de9cd9a
DN
1745 return NULL_TREE;
1746
1747 tail_array_field = NULL_TREE;
1748 for (f = TYPE_FIELDS (record_type); f ; f = TREE_CHAIN (f))
1749 {
1750 int cmp;
1751
1752 if (TREE_CODE (f) != FIELD_DECL)
1753 continue;
1754 if (DECL_BIT_FIELD (f))
1755 continue;
f34fa464 1756
fe9821b8
JH
1757 if (!DECL_FIELD_OFFSET (f))
1758 continue;
f34fa464
ZD
1759 field_offset = byte_position (f);
1760 if (TREE_CODE (field_offset) != INTEGER_CST)
6de9cd9a
DN
1761 continue;
1762
1763 /* ??? Java creates "interesting" fields for representing base classes.
1764 They have no name, and have no context. With no context, we get into
1765 trouble with nonoverlapping_component_refs_p. Skip them. */
1766 if (!DECL_FIELD_CONTEXT (f))
1767 continue;
1768
1769 /* The previous array field isn't at the end. */
1770 tail_array_field = NULL_TREE;
1771
1772 /* Check to see if this offset overlaps with the field. */
f34fa464 1773 cmp = tree_int_cst_compare (field_offset, offset);
6de9cd9a
DN
1774 if (cmp > 0)
1775 continue;
1776
1777 field_type = TREE_TYPE (f);
6de9cd9a
DN
1778
1779 /* Here we exactly match the offset being checked. If the types match,
1780 then we can return that field. */
53dba802 1781 if (cmp == 0
f4088621 1782 && useless_type_conversion_p (orig_type, field_type))
6de9cd9a
DN
1783 {
1784 if (base_is_ptr)
1785 base = build1 (INDIRECT_REF, record_type, base);
b4257cfc 1786 t = build3 (COMPONENT_REF, field_type, base, f, NULL_TREE);
6de9cd9a
DN
1787 return t;
1788 }
53dba802
ZD
1789
1790 /* Don't care about offsets into the middle of scalars. */
1791 if (!AGGREGATE_TYPE_P (field_type))
1792 continue;
6de9cd9a 1793
53dba802
ZD
1794 /* Check for array at the end of the struct. This is often
1795 used as for flexible array members. We should be able to
1796 turn this into an array access anyway. */
1797 if (TREE_CODE (field_type) == ARRAY_TYPE)
1798 tail_array_field = f;
1799
1800 /* Check the end of the field against the offset. */
1801 if (!DECL_SIZE_UNIT (f)
1802 || TREE_CODE (DECL_SIZE_UNIT (f)) != INTEGER_CST)
1803 continue;
1804 t = int_const_binop (MINUS_EXPR, offset, field_offset, 1);
1805 if (!tree_int_cst_lt (t, DECL_SIZE_UNIT (f)))
1806 continue;
6de9cd9a 1807
53dba802
ZD
1808 /* If we matched, then set offset to the displacement into
1809 this field. */
6e6e19cb
JH
1810 if (base_is_ptr)
1811 new_base = build1 (INDIRECT_REF, record_type, base);
1812 else
1813 new_base = base;
1814 new_base = build3 (COMPONENT_REF, field_type, new_base, f, NULL_TREE);
1815
1816 /* Recurse to possibly find the match. */
1fd89060
RG
1817 ret = maybe_fold_offset_to_array_ref (new_base, t, orig_type,
1818 f == TYPE_FIELDS (record_type));
6e6e19cb
JH
1819 if (ret)
1820 return ret;
1821 ret = maybe_fold_offset_to_component_ref (field_type, new_base, t,
1822 orig_type, false);
1823 if (ret)
1824 return ret;
6de9cd9a
DN
1825 }
1826
1827 if (!tail_array_field)
1828 return NULL_TREE;
1829
1830 f = tail_array_field;
1831 field_type = TREE_TYPE (f);
53dba802 1832 offset = int_const_binop (MINUS_EXPR, offset, byte_position (f), 1);
6de9cd9a 1833
6de9cd9a 1834 /* If we get here, we've got an aggregate field, and a possibly
1ea7e6ad 1835 nonzero offset into them. Recurse and hope for a valid match. */
6de9cd9a
DN
1836 if (base_is_ptr)
1837 base = build1 (INDIRECT_REF, record_type, base);
b4257cfc 1838 base = build3 (COMPONENT_REF, field_type, base, f, NULL_TREE);
6de9cd9a 1839
1fd89060
RG
1840 t = maybe_fold_offset_to_array_ref (base, offset, orig_type,
1841 f == TYPE_FIELDS (record_type));
6de9cd9a
DN
1842 if (t)
1843 return t;
1844 return maybe_fold_offset_to_component_ref (field_type, base, offset,
1845 orig_type, false);
1846}
1847
fe9821b8
JH
1848/* Attempt to express (ORIG_TYPE)BASE+OFFSET as BASE->field_of_orig_type
1849 or BASE[index] or by combination of those.
1850
1851 Before attempting the conversion strip off existing ADDR_EXPRs and
1852 handled component refs. */
1853
1854tree
1855maybe_fold_offset_to_reference (tree base, tree offset, tree orig_type)
1856{
1857 tree ret;
1858 tree type;
1859 bool base_is_ptr = true;
1860
1861 STRIP_NOPS (base);
1862 if (TREE_CODE (base) == ADDR_EXPR)
1863 {
1864 base_is_ptr = false;
1865
1866 base = TREE_OPERAND (base, 0);
1867
1868 /* Handle case where existing COMPONENT_REF pick e.g. wrong field of union,
1869 so it needs to be removed and new COMPONENT_REF constructed.
1870 The wrong COMPONENT_REF are often constructed by folding the
1871 (type *)&object within the expression (type *)&object+offset */
99f536cc 1872 if (handled_component_p (base))
fe9821b8
JH
1873 {
1874 HOST_WIDE_INT sub_offset, size, maxsize;
1875 tree newbase;
1876 newbase = get_ref_base_and_extent (base, &sub_offset,
1877 &size, &maxsize);
1878 gcc_assert (newbase);
99f536cc 1879 if (size == maxsize
a214f4d6 1880 && size != -1
99f536cc 1881 && !(sub_offset & (BITS_PER_UNIT - 1)))
fe9821b8
JH
1882 {
1883 base = newbase;
1884 if (sub_offset)
1885 offset = int_const_binop (PLUS_EXPR, offset,
1886 build_int_cst (TREE_TYPE (offset),
1887 sub_offset / BITS_PER_UNIT), 1);
1888 }
1889 }
f4088621 1890 if (useless_type_conversion_p (orig_type, TREE_TYPE (base))
fe9821b8
JH
1891 && integer_zerop (offset))
1892 return base;
1893 type = TREE_TYPE (base);
1894 }
1895 else
1896 {
1897 base_is_ptr = true;
1898 if (!POINTER_TYPE_P (TREE_TYPE (base)))
1899 return NULL_TREE;
1900 type = TREE_TYPE (TREE_TYPE (base));
1901 }
1902 ret = maybe_fold_offset_to_component_ref (type, base, offset,
1903 orig_type, base_is_ptr);
1904 if (!ret)
1905 {
1906 if (base_is_ptr)
1907 base = build1 (INDIRECT_REF, type, base);
1fd89060 1908 ret = maybe_fold_offset_to_array_ref (base, offset, orig_type, true);
fe9821b8
JH
1909 }
1910 return ret;
1911}
750628d8 1912
99f536cc
RG
1913/* Attempt to express (ORIG_TYPE)&BASE+OFFSET as &BASE->field_of_orig_type
1914 or &BASE[index] or by combination of those.
1915
1916 Before attempting the conversion strip off existing component refs. */
1917
1918tree
1919maybe_fold_offset_to_address (tree addr, tree offset, tree orig_type)
1920{
1921 tree t;
1922
1923 gcc_assert (POINTER_TYPE_P (TREE_TYPE (addr))
1924 && POINTER_TYPE_P (orig_type));
1925
1926 t = maybe_fold_offset_to_reference (addr, offset, TREE_TYPE (orig_type));
1927 if (t != NULL_TREE)
1928 {
1929 tree orig = addr;
1930 tree ptr_type;
1931
1932 /* For __builtin_object_size to function correctly we need to
1933 make sure not to fold address arithmetic so that we change
1934 reference from one array to another. This would happen for
1935 example for
1936
1937 struct X { char s1[10]; char s2[10] } s;
1938 char *foo (void) { return &s.s2[-4]; }
1939
1940 where we need to avoid generating &s.s1[6]. As the C and
1941 C++ frontends create different initial trees
1942 (char *) &s.s1 + -4 vs. &s.s1[-4] we have to do some
1943 sophisticated comparisons here. Note that checking for the
1944 condition after the fact is easier than trying to avoid doing
1945 the folding. */
1946 STRIP_NOPS (orig);
1947 if (TREE_CODE (orig) == ADDR_EXPR)
1948 orig = TREE_OPERAND (orig, 0);
1949 if ((TREE_CODE (orig) == ARRAY_REF
1950 || (TREE_CODE (orig) == COMPONENT_REF
1951 && TREE_CODE (TREE_TYPE (TREE_OPERAND (orig, 1))) == ARRAY_TYPE))
1952 && (TREE_CODE (t) == ARRAY_REF
1953 || (TREE_CODE (t) == COMPONENT_REF
1954 && TREE_CODE (TREE_TYPE (TREE_OPERAND (t, 1))) == ARRAY_TYPE))
1955 && !operand_equal_p (TREE_CODE (orig) == ARRAY_REF
1956 ? TREE_OPERAND (orig, 0) : orig,
1957 TREE_CODE (t) == ARRAY_REF
1958 ? TREE_OPERAND (t, 0) : t, 0))
1959 return NULL_TREE;
1960
1961 ptr_type = build_pointer_type (TREE_TYPE (t));
1962 if (!useless_type_conversion_p (orig_type, ptr_type))
1963 return NULL_TREE;
1964 return build_fold_addr_expr_with_type (t, ptr_type);
1965 }
1966
1967 return NULL_TREE;
1968}
1969
6de9cd9a
DN
1970/* A subroutine of fold_stmt_r. Attempt to simplify *(BASE+OFFSET).
1971 Return the simplified expression, or NULL if nothing could be done. */
1972
1973static tree
1974maybe_fold_stmt_indirect (tree expr, tree base, tree offset)
1975{
1976 tree t;
cbf8d355 1977 bool volatile_p = TREE_THIS_VOLATILE (expr);
6de9cd9a
DN
1978
1979 /* We may well have constructed a double-nested PLUS_EXPR via multiple
1980 substitutions. Fold that down to one. Remove NON_LVALUE_EXPRs that
1981 are sometimes added. */
1982 base = fold (base);
ff7c8597 1983 STRIP_TYPE_NOPS (base);
6de9cd9a
DN
1984 TREE_OPERAND (expr, 0) = base;
1985
1986 /* One possibility is that the address reduces to a string constant. */
1987 t = fold_read_from_constant_string (expr);
1988 if (t)
1989 return t;
1990
5be014d5
AP
1991 /* Add in any offset from a POINTER_PLUS_EXPR. */
1992 if (TREE_CODE (base) == POINTER_PLUS_EXPR)
6de9cd9a
DN
1993 {
1994 tree offset2;
1995
1996 offset2 = TREE_OPERAND (base, 1);
1997 if (TREE_CODE (offset2) != INTEGER_CST)
1998 return NULL_TREE;
1999 base = TREE_OPERAND (base, 0);
2000
5be014d5
AP
2001 offset = fold_convert (sizetype,
2002 int_const_binop (PLUS_EXPR, offset, offset2, 1));
6de9cd9a
DN
2003 }
2004
2005 if (TREE_CODE (base) == ADDR_EXPR)
2006 {
fe9821b8
JH
2007 tree base_addr = base;
2008
6de9cd9a
DN
2009 /* Strip the ADDR_EXPR. */
2010 base = TREE_OPERAND (base, 0);
2011
0534fa56
RH
2012 /* Fold away CONST_DECL to its value, if the type is scalar. */
2013 if (TREE_CODE (base) == CONST_DECL
1b738915 2014 && is_gimple_min_invariant (DECL_INITIAL (base)))
0534fa56
RH
2015 return DECL_INITIAL (base);
2016
6de9cd9a 2017 /* Try folding *(&B+O) to B.X. */
fe9821b8
JH
2018 t = maybe_fold_offset_to_reference (base_addr, offset,
2019 TREE_TYPE (expr));
6de9cd9a 2020 if (t)
cbf8d355 2021 {
e2104f59
RG
2022 /* Preserve volatileness of the original expression.
2023 We can end up with a plain decl here which is shared
2024 and we shouldn't mess with its flags. */
2025 if (!SSA_VAR_P (t))
2026 TREE_THIS_VOLATILE (t) = volatile_p;
cbf8d355
RG
2027 return t;
2028 }
6de9cd9a
DN
2029 }
2030 else
2031 {
2032 /* We can get here for out-of-range string constant accesses,
2033 such as "_"[3]. Bail out of the entire substitution search
2034 and arrange for the entire statement to be replaced by a
aabcd309 2035 call to __builtin_trap. In all likelihood this will all be
6de9cd9a
DN
2036 constant-folded away, but in the meantime we can't leave with
2037 something that get_expr_operands can't understand. */
2038
2039 t = base;
2040 STRIP_NOPS (t);
2041 if (TREE_CODE (t) == ADDR_EXPR
2042 && TREE_CODE (TREE_OPERAND (t, 0)) == STRING_CST)
2043 {
2044 /* FIXME: Except that this causes problems elsewhere with dead
0e61db61 2045 code not being deleted, and we die in the rtl expanders
6de9cd9a
DN
2046 because we failed to remove some ssa_name. In the meantime,
2047 just return zero. */
2048 /* FIXME2: This condition should be signaled by
2049 fold_read_from_constant_string directly, rather than
2050 re-checking for it here. */
2051 return integer_zero_node;
2052 }
2053
2054 /* Try folding *(B+O) to B->X. Still an improvement. */
2055 if (POINTER_TYPE_P (TREE_TYPE (base)))
2056 {
fe9821b8
JH
2057 t = maybe_fold_offset_to_reference (base, offset,
2058 TREE_TYPE (expr));
6de9cd9a
DN
2059 if (t)
2060 return t;
2061 }
2062 }
2063
2064 /* Otherwise we had an offset that we could not simplify. */
2065 return NULL_TREE;
2066}
2067
750628d8 2068
726a989a 2069/* A quaint feature extant in our address arithmetic is that there
6de9cd9a
DN
2070 can be hidden type changes here. The type of the result need
2071 not be the same as the type of the input pointer.
2072
2073 What we're after here is an expression of the form
2074 (T *)(&array + const)
726a989a
RB
2075 where array is OP0, const is OP1, RES_TYPE is T and
2076 the cast doesn't actually exist, but is implicit in the
5be014d5 2077 type of the POINTER_PLUS_EXPR. We'd like to turn this into
6de9cd9a
DN
2078 &array[x]
2079 which may be able to propagate further. */
2080
726a989a
RB
2081tree
2082maybe_fold_stmt_addition (tree res_type, tree op0, tree op1)
6de9cd9a 2083{
6de9cd9a
DN
2084 tree ptd_type;
2085 tree t;
6de9cd9a 2086
6de9cd9a
DN
2087 /* It had better be a constant. */
2088 if (TREE_CODE (op1) != INTEGER_CST)
2089 return NULL_TREE;
2090 /* The first operand should be an ADDR_EXPR. */
2091 if (TREE_CODE (op0) != ADDR_EXPR)
2092 return NULL_TREE;
2093 op0 = TREE_OPERAND (op0, 0);
2094
2095 /* If the first operand is an ARRAY_REF, expand it so that we can fold
2096 the offset into it. */
2097 while (TREE_CODE (op0) == ARRAY_REF)
2098 {
2099 tree array_obj = TREE_OPERAND (op0, 0);
2100 tree array_idx = TREE_OPERAND (op0, 1);
2101 tree elt_type = TREE_TYPE (op0);
2102 tree elt_size = TYPE_SIZE_UNIT (elt_type);
2103 tree min_idx;
2104
2105 if (TREE_CODE (array_idx) != INTEGER_CST)
2106 break;
2107 if (TREE_CODE (elt_size) != INTEGER_CST)
2108 break;
2109
2110 /* Un-bias the index by the min index of the array type. */
2111 min_idx = TYPE_DOMAIN (TREE_TYPE (array_obj));
2112 if (min_idx)
2113 {
2114 min_idx = TYPE_MIN_VALUE (min_idx);
2115 if (min_idx)
2116 {
44de5aeb
RK
2117 if (TREE_CODE (min_idx) != INTEGER_CST)
2118 break;
2119
b6f65e3c 2120 array_idx = fold_convert (TREE_TYPE (min_idx), array_idx);
6de9cd9a
DN
2121 if (!integer_zerop (min_idx))
2122 array_idx = int_const_binop (MINUS_EXPR, array_idx,
2123 min_idx, 0);
2124 }
2125 }
2126
2127 /* Convert the index to a byte offset. */
b6f65e3c 2128 array_idx = fold_convert (sizetype, array_idx);
6de9cd9a
DN
2129 array_idx = int_const_binop (MULT_EXPR, array_idx, elt_size, 0);
2130
2131 /* Update the operands for the next round, or for folding. */
5be014d5 2132 op1 = int_const_binop (PLUS_EXPR,
6de9cd9a 2133 array_idx, op1, 0);
6de9cd9a
DN
2134 op0 = array_obj;
2135 }
2136
726a989a 2137 ptd_type = TREE_TYPE (res_type);
c4e5b5a8
RG
2138 /* If we want a pointer to void, reconstruct the reference from the
2139 array element type. A pointer to that can be trivially converted
2140 to void *. This happens as we fold (void *)(ptr p+ off). */
2141 if (VOID_TYPE_P (ptd_type)
2142 && TREE_CODE (TREE_TYPE (op0)) == ARRAY_TYPE)
2143 ptd_type = TREE_TYPE (TREE_TYPE (op0));
6de9cd9a
DN
2144
2145 /* At which point we can try some of the same things as for indirects. */
1fd89060 2146 t = maybe_fold_offset_to_array_ref (op0, op1, ptd_type, true);
6de9cd9a
DN
2147 if (!t)
2148 t = maybe_fold_offset_to_component_ref (TREE_TYPE (op0), op0, op1,
2149 ptd_type, false);
2150 if (t)
726a989a 2151 t = build1 (ADDR_EXPR, res_type, t);
6de9cd9a
DN
2152
2153 return t;
2154}
2155
622f91ba
JL
2156/* For passing state through walk_tree into fold_stmt_r and its
2157 children. */
2158
2159struct fold_stmt_r_data
2160{
726a989a 2161 gimple stmt;
6ac01510
ILT
2162 bool *changed_p;
2163 bool *inside_addr_expr_p;
622f91ba
JL
2164};
2165
6de9cd9a
DN
2166/* Subroutine of fold_stmt called via walk_tree. We perform several
2167 simplifications of EXPR_P, mostly having to do with pointer arithmetic. */
2168
2169static tree
2170fold_stmt_r (tree *expr_p, int *walk_subtrees, void *data)
2171{
726a989a
RB
2172 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
2173 struct fold_stmt_r_data *fold_stmt_r_data;
2174 bool *inside_addr_expr_p;
2175 bool *changed_p;
6de9cd9a 2176 tree expr = *expr_p, t;
6aaa4013 2177 bool volatile_p = TREE_THIS_VOLATILE (expr);
6de9cd9a 2178
726a989a
RB
2179 fold_stmt_r_data = (struct fold_stmt_r_data *) wi->info;
2180 inside_addr_expr_p = fold_stmt_r_data->inside_addr_expr_p;
2181 changed_p = fold_stmt_r_data->changed_p;
2182
6de9cd9a
DN
2183 /* ??? It'd be nice if walk_tree had a pre-order option. */
2184 switch (TREE_CODE (expr))
2185 {
2186 case INDIRECT_REF:
2187 t = walk_tree (&TREE_OPERAND (expr, 0), fold_stmt_r, data, NULL);
2188 if (t)
2189 return t;
2190 *walk_subtrees = 0;
2191
2192 t = maybe_fold_stmt_indirect (expr, TREE_OPERAND (expr, 0),
2193 integer_zero_node);
16ac8575
RG
2194 if (!t
2195 && TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR)
2196 /* If we had a good reason for propagating the address here,
2197 make sure we end up with valid gimple. See PR34989. */
2198 t = TREE_OPERAND (TREE_OPERAND (expr, 0), 0);
6de9cd9a
DN
2199 break;
2200
fe9821b8
JH
2201 case NOP_EXPR:
2202 t = walk_tree (&TREE_OPERAND (expr, 0), fold_stmt_r, data, NULL);
2203 if (t)
2204 return t;
2205 *walk_subtrees = 0;
2206
2207 if (POINTER_TYPE_P (TREE_TYPE (expr))
129a37fc 2208 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (expr)))
fe9821b8 2209 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 0)))
99f536cc
RG
2210 && (t = maybe_fold_offset_to_address (TREE_OPERAND (expr, 0),
2211 integer_zero_node,
2212 TREE_TYPE (TREE_TYPE (expr)))))
2213 return t;
fe9821b8
JH
2214 break;
2215
622f91ba 2216 /* ??? Could handle more ARRAY_REFs here, as a variant of INDIRECT_REF.
6de9cd9a
DN
2217 We'd only want to bother decomposing an existing ARRAY_REF if
2218 the base array is found to have another offset contained within.
2219 Otherwise we'd be wasting time. */
622f91ba
JL
2220 case ARRAY_REF:
2221 /* If we are not processing expressions found within an
2222 ADDR_EXPR, then we can fold constant array references. */
2223 if (!*inside_addr_expr_p)
2224 t = fold_read_from_constant_string (expr);
2225 else
2226 t = NULL;
2227 break;
6de9cd9a
DN
2228
2229 case ADDR_EXPR:
622f91ba 2230 *inside_addr_expr_p = true;
6de9cd9a 2231 t = walk_tree (&TREE_OPERAND (expr, 0), fold_stmt_r, data, NULL);
622f91ba 2232 *inside_addr_expr_p = false;
6de9cd9a
DN
2233 if (t)
2234 return t;
2235 *walk_subtrees = 0;
2236
51eed280
PB
2237 /* Make sure the value is properly considered constant, and so gets
2238 propagated as expected. */
6de9cd9a 2239 if (*changed_p)
127203ac 2240 recompute_tree_invariant_for_addr_expr (expr);
6de9cd9a
DN
2241 return NULL_TREE;
2242
6de9cd9a
DN
2243 case COMPONENT_REF:
2244 t = walk_tree (&TREE_OPERAND (expr, 0), fold_stmt_r, data, NULL);
2245 if (t)
2246 return t;
2247 *walk_subtrees = 0;
2248
fa27426e
RH
2249 /* Make sure the FIELD_DECL is actually a field in the type on the lhs.
2250 We've already checked that the records are compatible, so we should
2251 come up with a set of compatible fields. */
2252 {
2253 tree expr_record = TREE_TYPE (TREE_OPERAND (expr, 0));
2254 tree expr_field = TREE_OPERAND (expr, 1);
2255
2256 if (DECL_FIELD_CONTEXT (expr_field) != TYPE_MAIN_VARIANT (expr_record))
2257 {
2258 expr_field = find_compatible_field (expr_record, expr_field);
2259 TREE_OPERAND (expr, 1) = expr_field;
2260 }
2261 }
6de9cd9a
DN
2262 break;
2263
ac182688
ZD
2264 case TARGET_MEM_REF:
2265 t = maybe_fold_tmr (expr);
2266 break;
2267
726a989a
RB
2268 case POINTER_PLUS_EXPR:
2269 t = walk_tree (&TREE_OPERAND (expr, 0), fold_stmt_r, data, NULL);
2270 if (t)
2271 return t;
2272 t = walk_tree (&TREE_OPERAND (expr, 1), fold_stmt_r, data, NULL);
2273 if (t)
2274 return t;
2275 *walk_subtrees = 0;
2276
2277 t = maybe_fold_stmt_addition (TREE_TYPE (expr),
2278 TREE_OPERAND (expr, 0),
2279 TREE_OPERAND (expr, 1));
2280 break;
2281
f393e7f5
RG
2282 case COND_EXPR:
2283 if (COMPARISON_CLASS_P (TREE_OPERAND (expr, 0)))
2284 {
2285 tree op0 = TREE_OPERAND (expr, 0);
6ac01510
ILT
2286 tree tem;
2287 bool set;
2288
2289 fold_defer_overflow_warnings ();
2290 tem = fold_binary (TREE_CODE (op0), TREE_TYPE (op0),
2291 TREE_OPERAND (op0, 0),
2292 TREE_OPERAND (op0, 1));
726a989a
RB
2293 /* This is actually a conditional expression, not a GIMPLE
2294 conditional statement, however, the valid_gimple_rhs_p
2295 test still applies. */
2296 set = tem && is_gimple_condexpr (tem) && valid_gimple_rhs_p (tem);
6ac01510
ILT
2297 fold_undefer_overflow_warnings (set, fold_stmt_r_data->stmt, 0);
2298 if (set)
5cdc4a26 2299 {
726a989a
RB
2300 COND_EXPR_COND (expr) = tem;
2301 t = expr;
5cdc4a26
RS
2302 break;
2303 }
f393e7f5 2304 }
5cdc4a26 2305 return NULL_TREE;
f393e7f5 2306
6de9cd9a
DN
2307 default:
2308 return NULL_TREE;
2309 }
2310
2311 if (t)
2312 {
e2104f59
RG
2313 /* Preserve volatileness of the original expression.
2314 We can end up with a plain decl here which is shared
2315 and we shouldn't mess with its flags. */
2316 if (!SSA_VAR_P (t))
2317 TREE_THIS_VOLATILE (t) = volatile_p;
6de9cd9a
DN
2318 *expr_p = t;
2319 *changed_p = true;
2320 }
2321
2322 return NULL_TREE;
2323}
2324
10a0d495
JJ
2325/* Return the string length, maximum string length or maximum value of
2326 ARG in LENGTH.
2327 If ARG is an SSA name variable, follow its use-def chains. If LENGTH
2328 is not NULL and, for TYPE == 0, its value is not equal to the length
2329 we determine or if we are unable to determine the length or value,
2330 return false. VISITED is a bitmap of visited variables.
2331 TYPE is 0 if string length should be returned, 1 for maximum string
2332 length and 2 for maximum value ARG can have. */
6de9cd9a 2333
06a9b53f 2334static bool
10a0d495 2335get_maxval_strlen (tree arg, tree *length, bitmap visited, int type)
6de9cd9a 2336{
726a989a
RB
2337 tree var, val;
2338 gimple def_stmt;
750628d8
DN
2339
2340 if (TREE_CODE (arg) != SSA_NAME)
06a9b53f 2341 {
f255541f
RC
2342 if (TREE_CODE (arg) == COND_EXPR)
2343 return get_maxval_strlen (COND_EXPR_THEN (arg), length, visited, type)
2344 && get_maxval_strlen (COND_EXPR_ELSE (arg), length, visited, type);
c4e5b5a8
RG
2345 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
2346 else if (TREE_CODE (arg) == ADDR_EXPR
2347 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF
2348 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg, 0), 1)))
2349 {
2350 tree aop0 = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
2351 if (TREE_CODE (aop0) == INDIRECT_REF
2352 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
2353 return get_maxval_strlen (TREE_OPERAND (aop0, 0),
2354 length, visited, type);
2355 }
f255541f 2356
10a0d495
JJ
2357 if (type == 2)
2358 {
2359 val = arg;
2360 if (TREE_CODE (val) != INTEGER_CST
2361 || tree_int_cst_sgn (val) < 0)
2362 return false;
2363 }
2364 else
2365 val = c_strlen (arg, 1);
750628d8 2366 if (!val)
06a9b53f 2367 return false;
cd709752 2368
10a0d495
JJ
2369 if (*length)
2370 {
2371 if (type > 0)
2372 {
2373 if (TREE_CODE (*length) != INTEGER_CST
2374 || TREE_CODE (val) != INTEGER_CST)
2375 return false;
2376
2377 if (tree_int_cst_lt (*length, val))
2378 *length = val;
2379 return true;
2380 }
2381 else if (simple_cst_equal (val, *length) != 1)
2382 return false;
2383 }
6de9cd9a 2384
750628d8
DN
2385 *length = val;
2386 return true;
6de9cd9a 2387 }
06a9b53f 2388
750628d8
DN
2389 /* If we were already here, break the infinite cycle. */
2390 if (bitmap_bit_p (visited, SSA_NAME_VERSION (arg)))
2391 return true;
2392 bitmap_set_bit (visited, SSA_NAME_VERSION (arg));
2393
2394 var = arg;
2395 def_stmt = SSA_NAME_DEF_STMT (var);
6de9cd9a 2396
726a989a
RB
2397 switch (gimple_code (def_stmt))
2398 {
2399 case GIMPLE_ASSIGN:
2400 /* The RHS of the statement defining VAR must either have a
2401 constant length or come from another SSA_NAME with a constant
2402 length. */
2403 if (gimple_assign_single_p (def_stmt)
2404 || gimple_assign_unary_nop_p (def_stmt))
2405 {
2406 tree rhs = gimple_assign_rhs1 (def_stmt);
2407 return get_maxval_strlen (rhs, length, visited, type);
2408 }
2409 return false;
2410
2411 case GIMPLE_PHI:
750628d8
DN
2412 {
2413 /* All the arguments of the PHI node must have the same constant
2414 length. */
726a989a
RB
2415 unsigned i;
2416
2417 for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
2418 {
2419 tree arg = gimple_phi_arg (def_stmt, i)->def;
2420
2421 /* If this PHI has itself as an argument, we cannot
2422 determine the string length of this argument. However,
2423 if we can find a constant string length for the other
2424 PHI args then we can still be sure that this is a
2425 constant string length. So be optimistic and just
2426 continue with the next argument. */
2427 if (arg == gimple_phi_result (def_stmt))
2428 continue;
2429
2430 if (!get_maxval_strlen (arg, length, visited, type))
2431 return false;
2432 }
2433 }
2434 return true;
6de9cd9a 2435
750628d8 2436 default:
726a989a 2437 return false;
6de9cd9a 2438 }
6de9cd9a
DN
2439}
2440
2441
726a989a
RB
2442/* Fold builtin call in statement STMT. Returns a simplified tree.
2443 We may return a non-constant expression, including another call
2444 to a different function and with different arguments, e.g.,
2445 substituting memcpy for strcpy when the string length is known.
2446 Note that some builtins expand into inline code that may not
2447 be valid in GIMPLE. Callers must take care. */
6de9cd9a
DN
2448
2449static tree
726a989a 2450ccp_fold_builtin (gimple stmt)
6de9cd9a 2451{
10a0d495 2452 tree result, val[3];
5039610b 2453 tree callee, a;
d9cc481a 2454 int arg_idx, type;
a32e70c3
RS
2455 bitmap visited;
2456 bool ignore;
5039610b 2457 int nargs;
6de9cd9a 2458
726a989a
RB
2459 gcc_assert (is_gimple_call (stmt));
2460
2461 ignore = (gimple_call_lhs (stmt) == NULL);
6de9cd9a
DN
2462
2463 /* First try the generic builtin folder. If that succeeds, return the
2464 result directly. */
726a989a 2465 result = fold_call_stmt (stmt, ignore);
6de9cd9a 2466 if (result)
10a0d495
JJ
2467 {
2468 if (ignore)
2469 STRIP_NOPS (result);
2470 return result;
2471 }
a32e70c3
RS
2472
2473 /* Ignore MD builtins. */
726a989a 2474 callee = gimple_call_fndecl (stmt);
a32e70c3
RS
2475 if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_MD)
2476 return NULL_TREE;
6de9cd9a
DN
2477
2478 /* If the builtin could not be folded, and it has no argument list,
2479 we're done. */
726a989a 2480 nargs = gimple_call_num_args (stmt);
5039610b 2481 if (nargs == 0)
6de9cd9a
DN
2482 return NULL_TREE;
2483
2484 /* Limit the work only for builtins we know how to simplify. */
2485 switch (DECL_FUNCTION_CODE (callee))
2486 {
2487 case BUILT_IN_STRLEN:
2488 case BUILT_IN_FPUTS:
2489 case BUILT_IN_FPUTS_UNLOCKED:
d9cc481a 2490 arg_idx = 0;
10a0d495 2491 type = 0;
6de9cd9a
DN
2492 break;
2493 case BUILT_IN_STRCPY:
2494 case BUILT_IN_STRNCPY:
d9cc481a 2495 arg_idx = 1;
10a0d495
JJ
2496 type = 0;
2497 break;
2498 case BUILT_IN_MEMCPY_CHK:
2499 case BUILT_IN_MEMPCPY_CHK:
2500 case BUILT_IN_MEMMOVE_CHK:
2501 case BUILT_IN_MEMSET_CHK:
2502 case BUILT_IN_STRNCPY_CHK:
d9cc481a 2503 arg_idx = 2;
10a0d495
JJ
2504 type = 2;
2505 break;
2506 case BUILT_IN_STRCPY_CHK:
2507 case BUILT_IN_STPCPY_CHK:
d9cc481a 2508 arg_idx = 1;
10a0d495
JJ
2509 type = 1;
2510 break;
2511 case BUILT_IN_SNPRINTF_CHK:
2512 case BUILT_IN_VSNPRINTF_CHK:
d9cc481a 2513 arg_idx = 1;
10a0d495 2514 type = 2;
6de9cd9a
DN
2515 break;
2516 default:
2517 return NULL_TREE;
2518 }
2519
f6ab27bb
JJ
2520 if (arg_idx >= nargs)
2521 return NULL_TREE;
2522
6de9cd9a 2523 /* Try to use the dataflow information gathered by the CCP process. */
8bdbfff5 2524 visited = BITMAP_ALLOC (NULL);
d9cc481a 2525 bitmap_clear (visited);
6de9cd9a 2526
10a0d495 2527 memset (val, 0, sizeof (val));
d9cc481a
AN
2528 a = gimple_call_arg (stmt, arg_idx);
2529 if (!get_maxval_strlen (a, &val[arg_idx], visited, type))
2530 val[arg_idx] = NULL_TREE;
6de9cd9a 2531
8bdbfff5 2532 BITMAP_FREE (visited);
6de9cd9a 2533
a32e70c3 2534 result = NULL_TREE;
6de9cd9a
DN
2535 switch (DECL_FUNCTION_CODE (callee))
2536 {
2537 case BUILT_IN_STRLEN:
f6ab27bb 2538 if (val[0] && nargs == 1)
6de9cd9a 2539 {
726a989a
RB
2540 tree new_val =
2541 fold_convert (TREE_TYPE (gimple_call_lhs (stmt)), val[0]);
6de9cd9a
DN
2542
2543 /* If the result is not a valid gimple value, or not a cast
2544 of a valid gimple value, then we can not use the result. */
c22940cd
TN
2545 if (is_gimple_val (new_val)
2546 || (is_gimple_cast (new_val)
2547 && is_gimple_val (TREE_OPERAND (new_val, 0))))
2548 return new_val;
6de9cd9a 2549 }
a32e70c3
RS
2550 break;
2551
6de9cd9a 2552 case BUILT_IN_STRCPY:
5039610b
SL
2553 if (val[1] && is_gimple_val (val[1]) && nargs == 2)
2554 result = fold_builtin_strcpy (callee,
726a989a
RB
2555 gimple_call_arg (stmt, 0),
2556 gimple_call_arg (stmt, 1),
5039610b 2557 val[1]);
a32e70c3
RS
2558 break;
2559
6de9cd9a 2560 case BUILT_IN_STRNCPY:
5039610b
SL
2561 if (val[1] && is_gimple_val (val[1]) && nargs == 3)
2562 result = fold_builtin_strncpy (callee,
726a989a
RB
2563 gimple_call_arg (stmt, 0),
2564 gimple_call_arg (stmt, 1),
2565 gimple_call_arg (stmt, 2),
5039610b 2566 val[1]);
a32e70c3
RS
2567 break;
2568
6de9cd9a 2569 case BUILT_IN_FPUTS:
f6ab27bb
JJ
2570 if (nargs == 2)
2571 result = fold_builtin_fputs (gimple_call_arg (stmt, 0),
2572 gimple_call_arg (stmt, 1),
2573 ignore, false, val[0]);
a32e70c3
RS
2574 break;
2575
6de9cd9a 2576 case BUILT_IN_FPUTS_UNLOCKED:
f6ab27bb
JJ
2577 if (nargs == 2)
2578 result = fold_builtin_fputs (gimple_call_arg (stmt, 0),
2579 gimple_call_arg (stmt, 1),
2580 ignore, true, val[0]);
10a0d495
JJ
2581 break;
2582
2583 case BUILT_IN_MEMCPY_CHK:
2584 case BUILT_IN_MEMPCPY_CHK:
2585 case BUILT_IN_MEMMOVE_CHK:
2586 case BUILT_IN_MEMSET_CHK:
f6ab27bb 2587 if (val[2] && is_gimple_val (val[2]) && nargs == 4)
5039610b 2588 result = fold_builtin_memory_chk (callee,
726a989a
RB
2589 gimple_call_arg (stmt, 0),
2590 gimple_call_arg (stmt, 1),
2591 gimple_call_arg (stmt, 2),
2592 gimple_call_arg (stmt, 3),
5039610b 2593 val[2], ignore,
10a0d495
JJ
2594 DECL_FUNCTION_CODE (callee));
2595 break;
2596
2597 case BUILT_IN_STRCPY_CHK:
2598 case BUILT_IN_STPCPY_CHK:
f6ab27bb 2599 if (val[1] && is_gimple_val (val[1]) && nargs == 3)
5039610b 2600 result = fold_builtin_stxcpy_chk (callee,
726a989a
RB
2601 gimple_call_arg (stmt, 0),
2602 gimple_call_arg (stmt, 1),
2603 gimple_call_arg (stmt, 2),
5039610b 2604 val[1], ignore,
10a0d495
JJ
2605 DECL_FUNCTION_CODE (callee));
2606 break;
2607
2608 case BUILT_IN_STRNCPY_CHK:
f6ab27bb 2609 if (val[2] && is_gimple_val (val[2]) && nargs == 4)
726a989a
RB
2610 result = fold_builtin_strncpy_chk (gimple_call_arg (stmt, 0),
2611 gimple_call_arg (stmt, 1),
2612 gimple_call_arg (stmt, 2),
2613 gimple_call_arg (stmt, 3),
5039610b 2614 val[2]);
10a0d495
JJ
2615 break;
2616
2617 case BUILT_IN_SNPRINTF_CHK:
2618 case BUILT_IN_VSNPRINTF_CHK:
2619 if (val[1] && is_gimple_val (val[1]))
726a989a
RB
2620 result = gimple_fold_builtin_snprintf_chk (stmt, val[1],
2621 DECL_FUNCTION_CODE (callee));
a32e70c3 2622 break;
6de9cd9a
DN
2623
2624 default:
1e128c5f 2625 gcc_unreachable ();
6de9cd9a
DN
2626 }
2627
a32e70c3 2628 if (result && ignore)
9675412f 2629 result = fold_ignored_result (result);
a32e70c3 2630 return result;
6de9cd9a
DN
2631}
2632
726a989a
RB
2633/* Attempt to fold an assignment statement pointed-to by SI. Returns a
2634 replacement rhs for the statement or NULL_TREE if no simplification
2635 could be made. It is assumed that the operands have been previously
2636 folded. */
2637
2638static tree
2639fold_gimple_assign (gimple_stmt_iterator *si)
2640{
2641 gimple stmt = gsi_stmt (*si);
2642 enum tree_code subcode = gimple_assign_rhs_code (stmt);
2643
2644 tree result = NULL;
2645
2646 switch (get_gimple_rhs_class (subcode))
2647 {
2648 case GIMPLE_SINGLE_RHS:
2649 {
2650 tree rhs = gimple_assign_rhs1 (stmt);
2651
2652 /* Try to fold a conditional expression. */
2653 if (TREE_CODE (rhs) == COND_EXPR)
2654 {
2655 tree temp = fold (COND_EXPR_COND (rhs));
2656 if (temp != COND_EXPR_COND (rhs))
2657 result = fold_build3 (COND_EXPR, TREE_TYPE (rhs), temp,
2658 COND_EXPR_THEN (rhs), COND_EXPR_ELSE (rhs));
2659 }
2660
2661 /* If we couldn't fold the RHS, hand over to the generic
2662 fold routines. */
2663 if (result == NULL_TREE)
2664 result = fold (rhs);
2665
2666 /* Strip away useless type conversions. Both the NON_LVALUE_EXPR
2667 that may have been added by fold, and "useless" type
2668 conversions that might now be apparent due to propagation. */
2669 STRIP_USELESS_TYPE_CONVERSION (result);
2670
2671 if (result != rhs && valid_gimple_rhs_p (result))
2672 return result;
2673 else
2674 /* It is possible that fold_stmt_r simplified the RHS.
2675 Make sure that the subcode of this statement still
2676 reflects the principal operator of the rhs operand. */
2677 return rhs;
2678 }
2679 break;
2680
2681 case GIMPLE_UNARY_RHS:
ff8b183b
RG
2682 {
2683 tree rhs = gimple_assign_rhs1 (stmt);
726a989a 2684
ff8b183b
RG
2685 result = fold_unary (subcode, gimple_expr_type (stmt), rhs);
2686 if (result)
2687 {
2688 /* If the operation was a conversion do _not_ mark a
2689 resulting constant with TREE_OVERFLOW if the original
2690 constant was not. These conversions have implementation
2691 defined behavior and retaining the TREE_OVERFLOW flag
2692 here would confuse later passes such as VRP. */
2693 if (CONVERT_EXPR_CODE_P (subcode)
2694 && TREE_CODE (result) == INTEGER_CST
2695 && TREE_CODE (rhs) == INTEGER_CST)
2696 TREE_OVERFLOW (result) = TREE_OVERFLOW (rhs);
2697
2698 STRIP_USELESS_TYPE_CONVERSION (result);
2699 if (valid_gimple_rhs_p (result))
2700 return result;
2701 }
2702 else if (CONVERT_EXPR_CODE_P (subcode)
2703 && POINTER_TYPE_P (gimple_expr_type (stmt))
2704 && POINTER_TYPE_P (TREE_TYPE (gimple_assign_rhs1 (stmt))))
2705 {
2706 tree type = gimple_expr_type (stmt);
2707 tree t = maybe_fold_offset_to_address (gimple_assign_rhs1 (stmt),
2708 integer_zero_node, type);
2709 if (t)
2710 return t;
2711 }
2712 }
726a989a
RB
2713 break;
2714
2715 case GIMPLE_BINARY_RHS:
2716 /* Try to fold pointer addition. */
2717 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
2718 result = maybe_fold_stmt_addition (
2719 TREE_TYPE (gimple_assign_lhs (stmt)),
2720 gimple_assign_rhs1 (stmt),
2721 gimple_assign_rhs2 (stmt));
2722
2723 if (!result)
2724 result = fold_binary (subcode,
2725 TREE_TYPE (gimple_assign_lhs (stmt)),
2726 gimple_assign_rhs1 (stmt),
2727 gimple_assign_rhs2 (stmt));
2728
2729 if (result)
2730 {
2731 STRIP_USELESS_TYPE_CONVERSION (result);
2732 if (valid_gimple_rhs_p (result))
2733 return result;
001003c2
PB
2734
2735 /* Fold might have produced non-GIMPLE, so if we trust it blindly
2736 we lose canonicalization opportunities. Do not go again
2737 through fold here though, or the same non-GIMPLE will be
2738 produced. */
2739 if (commutative_tree_code (subcode)
2740 && tree_swap_operands_p (gimple_assign_rhs1 (stmt),
2741 gimple_assign_rhs2 (stmt), false))
2742 return build2 (subcode, TREE_TYPE (gimple_assign_lhs (stmt)),
2743 gimple_assign_rhs2 (stmt),
2744 gimple_assign_rhs1 (stmt));
726a989a
RB
2745 }
2746 break;
2747
2748 case GIMPLE_INVALID_RHS:
2749 gcc_unreachable ();
2750 }
2751
2752 return NULL_TREE;
2753}
2754
2755/* Attempt to fold a conditional statement. Return true if any changes were
2756 made. We only attempt to fold the condition expression, and do not perform
2757 any transformation that would require alteration of the cfg. It is
2758 assumed that the operands have been previously folded. */
2759
2760static bool
2761fold_gimple_cond (gimple stmt)
2762{
2763 tree result = fold_binary (gimple_cond_code (stmt),
2764 boolean_type_node,
2765 gimple_cond_lhs (stmt),
2766 gimple_cond_rhs (stmt));
2767
2768 if (result)
2769 {
2770 STRIP_USELESS_TYPE_CONVERSION (result);
2771 if (is_gimple_condexpr (result) && valid_gimple_rhs_p (result))
2772 {
2773 gimple_cond_set_condition_from_tree (stmt, result);
2774 return true;
2775 }
2776 }
2777
2778 return false;
2779}
2780
2781
2782/* Attempt to fold a call statement referenced by the statement iterator GSI.
2783 The statement may be replaced by another statement, e.g., if the call
2784 simplifies to a constant value. Return true if any changes were made.
2785 It is assumed that the operands have been previously folded. */
2786
2787static bool
2788fold_gimple_call (gimple_stmt_iterator *gsi)
2789{
2790 gimple stmt = gsi_stmt (*gsi);
2791
2792 tree callee = gimple_call_fndecl (stmt);
2793
2794 /* Check for builtins that CCP can handle using information not
2795 available in the generic fold routines. */
2796 if (callee && DECL_BUILT_IN (callee))
2797 {
2798 tree result = ccp_fold_builtin (stmt);
2799
2800 if (result)
2801 return update_call_from_tree (gsi, result);
2802 }
2803 else
2804 {
2805 /* Check for resolvable OBJ_TYPE_REF. The only sorts we can resolve
2806 here are when we've propagated the address of a decl into the
2807 object slot. */
2808 /* ??? Should perhaps do this in fold proper. However, doing it
2809 there requires that we create a new CALL_EXPR, and that requires
2810 copying EH region info to the new node. Easier to just do it
2811 here where we can just smash the call operand. */
2812 /* ??? Is there a good reason not to do this in fold_stmt_inplace? */
2813 callee = gimple_call_fn (stmt);
2814 if (TREE_CODE (callee) == OBJ_TYPE_REF
2815 && lang_hooks.fold_obj_type_ref
2816 && TREE_CODE (OBJ_TYPE_REF_OBJECT (callee)) == ADDR_EXPR
2817 && DECL_P (TREE_OPERAND
2818 (OBJ_TYPE_REF_OBJECT (callee), 0)))
2819 {
2820 tree t;
2821
2822 /* ??? Caution: Broken ADDR_EXPR semantics means that
2823 looking at the type of the operand of the addr_expr
2824 can yield an array type. See silly exception in
2825 check_pointer_types_r. */
2826 t = TREE_TYPE (TREE_TYPE (OBJ_TYPE_REF_OBJECT (callee)));
2827 t = lang_hooks.fold_obj_type_ref (callee, t);
2828 if (t)
2829 {
2830 gimple_call_set_fn (stmt, t);
2831 return true;
2832 }
2833 }
2834 }
2835
2836 return false;
2837}
6de9cd9a 2838
726a989a 2839/* Fold the statement pointed to by GSI. In some cases, this function may
750628d8
DN
2840 replace the whole statement with a new one. Returns true iff folding
2841 makes any changes. */
6de9cd9a 2842
750628d8 2843bool
726a989a 2844fold_stmt (gimple_stmt_iterator *gsi)
6de9cd9a 2845{
726a989a 2846 tree res;
622f91ba 2847 struct fold_stmt_r_data fold_stmt_r_data;
726a989a
RB
2848 struct walk_stmt_info wi;
2849
750628d8 2850 bool changed = false;
622f91ba
JL
2851 bool inside_addr_expr = false;
2852
726a989a 2853 gimple stmt = gsi_stmt (*gsi);
6ac01510
ILT
2854
2855 fold_stmt_r_data.stmt = stmt;
622f91ba
JL
2856 fold_stmt_r_data.changed_p = &changed;
2857 fold_stmt_r_data.inside_addr_expr_p = &inside_addr_expr;
6de9cd9a 2858
726a989a
RB
2859 memset (&wi, 0, sizeof (wi));
2860 wi.info = &fold_stmt_r_data;
6de9cd9a 2861
726a989a
RB
2862 /* Fold the individual operands.
2863 For example, fold instances of *&VAR into VAR, etc. */
2864 res = walk_gimple_op (stmt, fold_stmt_r, &wi);
2865 gcc_assert (!res);
6de9cd9a 2866
726a989a
RB
2867 /* Fold the main computation performed by the statement. */
2868 switch (gimple_code (stmt))
6de9cd9a 2869 {
726a989a
RB
2870 case GIMPLE_ASSIGN:
2871 {
2872 tree new_rhs = fold_gimple_assign (gsi);
2873 if (new_rhs != NULL_TREE)
2874 {
2875 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
2876 changed = true;
2877 }
2878 stmt = gsi_stmt (*gsi);
2879 break;
2880 }
2881 case GIMPLE_COND:
2882 changed |= fold_gimple_cond (stmt);
2883 break;
2884 case GIMPLE_CALL:
2885 /* The entire statement may be replaced in this case. */
2886 changed |= fold_gimple_call (gsi);
2887 break;
1809ff6b 2888
726a989a
RB
2889 default:
2890 return changed;
2891 break;
f255541f 2892 }
6de9cd9a 2893
750628d8 2894 return changed;
6de9cd9a
DN
2895}
2896
38965eb2
ZD
2897/* Perform the minimal folding on statement STMT. Only operations like
2898 *&x created by constant propagation are handled. The statement cannot
726a989a
RB
2899 be replaced with a new one. Return true if the statement was
2900 changed, false otherwise. */
38965eb2
ZD
2901
2902bool
726a989a 2903fold_stmt_inplace (gimple stmt)
38965eb2 2904{
726a989a 2905 tree res;
622f91ba 2906 struct fold_stmt_r_data fold_stmt_r_data;
726a989a
RB
2907 struct walk_stmt_info wi;
2908 gimple_stmt_iterator si;
2909
38965eb2 2910 bool changed = false;
622f91ba
JL
2911 bool inside_addr_expr = false;
2912
6ac01510 2913 fold_stmt_r_data.stmt = stmt;
622f91ba
JL
2914 fold_stmt_r_data.changed_p = &changed;
2915 fold_stmt_r_data.inside_addr_expr_p = &inside_addr_expr;
38965eb2 2916
726a989a
RB
2917 memset (&wi, 0, sizeof (wi));
2918 wi.info = &fold_stmt_r_data;
2919
2920 /* Fold the individual operands.
2921 For example, fold instances of *&VAR into VAR, etc.
38965eb2 2922
726a989a
RB
2923 It appears that, at one time, maybe_fold_stmt_indirect
2924 would cause the walk to return non-null in order to
2925 signal that the entire statement should be replaced with
2926 a call to _builtin_trap. This functionality is currently
2927 disabled, as noted in a FIXME, and cannot be supported here. */
2928 res = walk_gimple_op (stmt, fold_stmt_r, &wi);
2929 gcc_assert (!res);
38965eb2 2930
726a989a
RB
2931 /* Fold the main computation performed by the statement. */
2932 switch (gimple_code (stmt))
2933 {
2934 case GIMPLE_ASSIGN:
2935 {
2936 unsigned old_num_ops;
2937 tree new_rhs;
2938 old_num_ops = gimple_num_ops (stmt);
2939 si = gsi_for_stmt (stmt);
2940 new_rhs = fold_gimple_assign (&si);
2941 if (new_rhs != NULL_TREE
2942 && get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops)
2943 {
2944 gimple_assign_set_rhs_from_tree (&si, new_rhs);
2945 changed = true;
2946 }
2947 gcc_assert (gsi_stmt (si) == stmt);
2948 break;
2949 }
2950 case GIMPLE_COND:
2951 changed |= fold_gimple_cond (stmt);
2952 break;
38965eb2 2953
726a989a
RB
2954 default:
2955 break;
2956 }
38965eb2
ZD
2957
2958 return changed;
2959}
726a989a 2960
cb8e078d
JJ
2961/* Try to optimize out __builtin_stack_restore. Optimize it out
2962 if there is another __builtin_stack_restore in the same basic
2963 block and no calls or ASM_EXPRs are in between, or if this block's
2964 only outgoing edge is to EXIT_BLOCK and there are no calls or
2965 ASM_EXPRs after this __builtin_stack_restore. */
2966
2967static tree
726a989a 2968optimize_stack_restore (gimple_stmt_iterator i)
cb8e078d 2969{
726a989a
RB
2970 tree callee, rhs;
2971 gimple stmt, stack_save;
2972 gimple_stmt_iterator stack_save_gsi;
2973
2974 basic_block bb = gsi_bb (i);
2975 gimple call = gsi_stmt (i);
cb8e078d 2976
726a989a
RB
2977 if (gimple_code (call) != GIMPLE_CALL
2978 || gimple_call_num_args (call) != 1
2979 || TREE_CODE (gimple_call_arg (call, 0)) != SSA_NAME
2980 || !POINTER_TYPE_P (TREE_TYPE (gimple_call_arg (call, 0))))
cb8e078d
JJ
2981 return NULL_TREE;
2982
726a989a 2983 for (gsi_next (&i); !gsi_end_p (i); gsi_next (&i))
cb8e078d 2984 {
726a989a
RB
2985 stmt = gsi_stmt (i);
2986 if (gimple_code (stmt) == GIMPLE_ASM)
cb8e078d 2987 return NULL_TREE;
726a989a 2988 if (gimple_code (stmt) != GIMPLE_CALL)
cb8e078d
JJ
2989 continue;
2990
726a989a 2991 callee = gimple_call_fndecl (stmt);
cb8e078d
JJ
2992 if (!callee || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL)
2993 return NULL_TREE;
2994
2995 if (DECL_FUNCTION_CODE (callee) == BUILT_IN_STACK_RESTORE)
2996 break;
2997 }
2998
726a989a 2999 if (gsi_end_p (i)
cb8e078d
JJ
3000 && (! single_succ_p (bb)
3001 || single_succ_edge (bb)->dest != EXIT_BLOCK_PTR))
3002 return NULL_TREE;
3003
726a989a
RB
3004 stack_save = SSA_NAME_DEF_STMT (gimple_call_arg (call, 0));
3005 if (gimple_code (stack_save) != GIMPLE_CALL
3006 || gimple_call_lhs (stack_save) != gimple_call_arg (call, 0)
3007 || stmt_could_throw_p (stack_save)
3008 || !has_single_use (gimple_call_arg (call, 0)))
cb8e078d
JJ
3009 return NULL_TREE;
3010
726a989a 3011 callee = gimple_call_fndecl (stack_save);
cb8e078d
JJ
3012 if (!callee
3013 || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL
3014 || DECL_FUNCTION_CODE (callee) != BUILT_IN_STACK_SAVE
726a989a 3015 || gimple_call_num_args (stack_save) != 0)
cb8e078d
JJ
3016 return NULL_TREE;
3017
726a989a
RB
3018 stack_save_gsi = gsi_for_stmt (stack_save);
3019 push_stmt_changes (gsi_stmt_ptr (&stack_save_gsi));
3020 rhs = build_int_cst (TREE_TYPE (gimple_call_arg (call, 0)), 0);
3021 if (!update_call_from_tree (&stack_save_gsi, rhs))
cb8e078d 3022 {
726a989a 3023 discard_stmt_changes (gsi_stmt_ptr (&stack_save_gsi));
cb8e078d
JJ
3024 return NULL_TREE;
3025 }
726a989a 3026 pop_stmt_changes (gsi_stmt_ptr (&stack_save_gsi));
cb8e078d 3027
726a989a 3028 /* No effect, so the statement will be deleted. */
cb8e078d
JJ
3029 return integer_zero_node;
3030}
726a989a 3031
d7bd8aeb
JJ
3032/* If va_list type is a simple pointer and nothing special is needed,
3033 optimize __builtin_va_start (&ap, 0) into ap = __builtin_next_arg (0),
3034 __builtin_va_end (&ap) out as NOP and __builtin_va_copy into a simple
3035 pointer assignment. */
3036
3037static tree
726a989a 3038optimize_stdarg_builtin (gimple call)
d7bd8aeb 3039{
35cbb299 3040 tree callee, lhs, rhs, cfun_va_list;
d7bd8aeb
JJ
3041 bool va_list_simple_ptr;
3042
726a989a 3043 if (gimple_code (call) != GIMPLE_CALL)
d7bd8aeb
JJ
3044 return NULL_TREE;
3045
726a989a 3046 callee = gimple_call_fndecl (call);
35cbb299
KT
3047
3048 cfun_va_list = targetm.fn_abi_va_list (callee);
3049 va_list_simple_ptr = POINTER_TYPE_P (cfun_va_list)
3050 && (TREE_TYPE (cfun_va_list) == void_type_node
3051 || TREE_TYPE (cfun_va_list) == char_type_node);
3052
d7bd8aeb
JJ
3053 switch (DECL_FUNCTION_CODE (callee))
3054 {
3055 case BUILT_IN_VA_START:
3056 if (!va_list_simple_ptr
3057 || targetm.expand_builtin_va_start != NULL
726a989a 3058 || built_in_decls[BUILT_IN_NEXT_ARG] == NULL)
d7bd8aeb
JJ
3059 return NULL_TREE;
3060
726a989a 3061 if (gimple_call_num_args (call) != 2)
d7bd8aeb
JJ
3062 return NULL_TREE;
3063
726a989a 3064 lhs = gimple_call_arg (call, 0);
d7bd8aeb
JJ
3065 if (!POINTER_TYPE_P (TREE_TYPE (lhs))
3066 || TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (lhs)))
35cbb299 3067 != TYPE_MAIN_VARIANT (cfun_va_list))
d7bd8aeb 3068 return NULL_TREE;
726a989a 3069
d7bd8aeb
JJ
3070 lhs = build_fold_indirect_ref (lhs);
3071 rhs = build_call_expr (built_in_decls[BUILT_IN_NEXT_ARG],
726a989a 3072 1, integer_zero_node);
d7bd8aeb
JJ
3073 rhs = fold_convert (TREE_TYPE (lhs), rhs);
3074 return build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, rhs);
3075
3076 case BUILT_IN_VA_COPY:
3077 if (!va_list_simple_ptr)
3078 return NULL_TREE;
3079
726a989a 3080 if (gimple_call_num_args (call) != 2)
d7bd8aeb
JJ
3081 return NULL_TREE;
3082
726a989a 3083 lhs = gimple_call_arg (call, 0);
d7bd8aeb
JJ
3084 if (!POINTER_TYPE_P (TREE_TYPE (lhs))
3085 || TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (lhs)))
35cbb299 3086 != TYPE_MAIN_VARIANT (cfun_va_list))
d7bd8aeb
JJ
3087 return NULL_TREE;
3088
3089 lhs = build_fold_indirect_ref (lhs);
726a989a 3090 rhs = gimple_call_arg (call, 1);
d7bd8aeb 3091 if (TYPE_MAIN_VARIANT (TREE_TYPE (rhs))
35cbb299 3092 != TYPE_MAIN_VARIANT (cfun_va_list))
d7bd8aeb
JJ
3093 return NULL_TREE;
3094
3095 rhs = fold_convert (TREE_TYPE (lhs), rhs);
3096 return build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, rhs);
3097
3098 case BUILT_IN_VA_END:
726a989a 3099 /* No effect, so the statement will be deleted. */
d7bd8aeb
JJ
3100 return integer_zero_node;
3101
3102 default:
3103 gcc_unreachable ();
3104 }
3105}
726a989a 3106
b28b1600
JJ
3107/* Convert EXPR into a GIMPLE value suitable for substitution on the
3108 RHS of an assignment. Insert the necessary statements before
726a989a
RB
3109 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
3110 is replaced. If the call is expected to produces a result, then it
3111 is replaced by an assignment of the new RHS to the result variable.
3112 If the result is to be ignored, then the call is replaced by a
3113 GIMPLE_NOP. */
b28b1600 3114
726a989a
RB
3115static void
3116gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
b28b1600 3117{
726a989a
RB
3118 tree lhs;
3119 tree tmp = NULL_TREE; /* Silence warning. */
3120 gimple stmt, new_stmt;
3121 gimple_stmt_iterator i;
3122 gimple_seq stmts = gimple_seq_alloc();
d406b663 3123 struct gimplify_ctx gctx;
b28b1600 3124
726a989a
RB
3125 stmt = gsi_stmt (*si_p);
3126
3127 gcc_assert (is_gimple_call (stmt));
3128
3129 lhs = gimple_call_lhs (stmt);
3130
d406b663 3131 push_gimplify_context (&gctx);
726a989a
RB
3132
3133 if (lhs == NULL_TREE)
3134 gimplify_and_add (expr, &stmts);
3135 else
2e929cf3 3136 tmp = get_initialized_tmp_var (expr, &stmts, NULL);
726a989a 3137
b28b1600
JJ
3138 pop_gimplify_context (NULL);
3139
726a989a
RB
3140 if (gimple_has_location (stmt))
3141 annotate_all_with_location (stmts, gimple_location (stmt));
f47c96aa 3142
b28b1600 3143 /* The replacement can expose previously unreferenced variables. */
726a989a
RB
3144 for (i = gsi_start (stmts); !gsi_end_p (i); gsi_next (&i))
3145 {
3146 new_stmt = gsi_stmt (i);
3147 find_new_referenced_vars (new_stmt);
3148 gsi_insert_before (si_p, new_stmt, GSI_NEW_STMT);
3149 mark_symbols_for_renaming (new_stmt);
3150 gsi_next (si_p);
3151 }
3152
3153 if (lhs == NULL_TREE)
3154 new_stmt = gimple_build_nop ();
3155 else
b28b1600 3156 {
726a989a
RB
3157 new_stmt = gimple_build_assign (lhs, tmp);
3158 copy_virtual_operands (new_stmt, stmt);
3159 move_ssa_defining_stmt_for_defs (new_stmt, stmt);
b28b1600
JJ
3160 }
3161
726a989a
RB
3162 gimple_set_location (new_stmt, gimple_location (stmt));
3163 gsi_replace (si_p, new_stmt, false);
b28b1600
JJ
3164}
3165
6de9cd9a
DN
3166/* A simple pass that attempts to fold all builtin functions. This pass
3167 is run after we've propagated as many constants as we can. */
3168
c2924966 3169static unsigned int
6de9cd9a
DN
3170execute_fold_all_builtins (void)
3171{
a7d6ba24 3172 bool cfg_changed = false;
6de9cd9a 3173 basic_block bb;
7b0e48fb
DB
3174 unsigned int todoflags = 0;
3175
6de9cd9a
DN
3176 FOR_EACH_BB (bb)
3177 {
726a989a
RB
3178 gimple_stmt_iterator i;
3179 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
6de9cd9a 3180 {
726a989a 3181 gimple stmt, old_stmt;
6de9cd9a 3182 tree callee, result;
10a0d495 3183 enum built_in_function fcode;
6de9cd9a 3184
726a989a
RB
3185 stmt = gsi_stmt (i);
3186
3187 if (gimple_code (stmt) != GIMPLE_CALL)
10a0d495 3188 {
726a989a 3189 gsi_next (&i);
10a0d495
JJ
3190 continue;
3191 }
726a989a 3192 callee = gimple_call_fndecl (stmt);
6de9cd9a 3193 if (!callee || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL)
10a0d495 3194 {
726a989a 3195 gsi_next (&i);
10a0d495
JJ
3196 continue;
3197 }
3198 fcode = DECL_FUNCTION_CODE (callee);
6de9cd9a 3199
726a989a 3200 result = ccp_fold_builtin (stmt);
53a8f709
UB
3201
3202 if (result)
726a989a 3203 gimple_remove_stmt_histograms (cfun, stmt);
53a8f709 3204
6de9cd9a
DN
3205 if (!result)
3206 switch (DECL_FUNCTION_CODE (callee))
3207 {
3208 case BUILT_IN_CONSTANT_P:
3209 /* Resolve __builtin_constant_p. If it hasn't been
3210 folded to integer_one_node by now, it's fairly
3211 certain that the value simply isn't constant. */
726a989a 3212 result = integer_zero_node;
6de9cd9a
DN
3213 break;
3214
cb8e078d 3215 case BUILT_IN_STACK_RESTORE:
726a989a 3216 result = optimize_stack_restore (i);
d7bd8aeb
JJ
3217 if (result)
3218 break;
726a989a 3219 gsi_next (&i);
d7bd8aeb
JJ
3220 continue;
3221
3222 case BUILT_IN_VA_START:
3223 case BUILT_IN_VA_END:
3224 case BUILT_IN_VA_COPY:
3225 /* These shouldn't be folded before pass_stdarg. */
726a989a 3226 result = optimize_stdarg_builtin (stmt);
cb8e078d
JJ
3227 if (result)
3228 break;
3229 /* FALLTHRU */
3230
6de9cd9a 3231 default:
726a989a 3232 gsi_next (&i);
6de9cd9a
DN
3233 continue;
3234 }
3235
3236 if (dump_file && (dump_flags & TDF_DETAILS))
3237 {
3238 fprintf (dump_file, "Simplified\n ");
726a989a 3239 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
6de9cd9a
DN
3240 }
3241
726a989a
RB
3242 old_stmt = stmt;
3243 push_stmt_changes (gsi_stmt_ptr (&i));
cfaab3a9 3244
726a989a
RB
3245 if (!update_call_from_tree (&i, result))
3246 {
3247 gimplify_and_update_call_from_tree (&i, result);
3248 todoflags |= TODO_rebuild_alias;
3249 }
cfaab3a9 3250
726a989a
RB
3251 stmt = gsi_stmt (i);
3252 pop_stmt_changes (gsi_stmt_ptr (&i));
cfaab3a9 3253
726a989a
RB
3254 if (maybe_clean_or_replace_eh_stmt (old_stmt, stmt)
3255 && gimple_purge_dead_eh_edges (bb))
a7d6ba24 3256 cfg_changed = true;
6de9cd9a
DN
3257
3258 if (dump_file && (dump_flags & TDF_DETAILS))
3259 {
3260 fprintf (dump_file, "to\n ");
726a989a 3261 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
6de9cd9a
DN
3262 fprintf (dump_file, "\n");
3263 }
10a0d495
JJ
3264
3265 /* Retry the same statement if it changed into another
3266 builtin, there might be new opportunities now. */
726a989a 3267 if (gimple_code (stmt) != GIMPLE_CALL)
10a0d495 3268 {
726a989a 3269 gsi_next (&i);
10a0d495
JJ
3270 continue;
3271 }
726a989a 3272 callee = gimple_call_fndecl (stmt);
10a0d495 3273 if (!callee
726a989a 3274 || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL
10a0d495 3275 || DECL_FUNCTION_CODE (callee) == fcode)
726a989a 3276 gsi_next (&i);
6de9cd9a
DN
3277 }
3278 }
7b0e48fb 3279
a7d6ba24 3280 /* Delete unreachable blocks. */
7b0e48fb
DB
3281 if (cfg_changed)
3282 todoflags |= TODO_cleanup_cfg;
3283
3284 return todoflags;
6de9cd9a
DN
3285}
3286
750628d8 3287
8ddbbcae 3288struct gimple_opt_pass pass_fold_builtins =
6de9cd9a 3289{
8ddbbcae
JH
3290 {
3291 GIMPLE_PASS,
6de9cd9a
DN
3292 "fab", /* name */
3293 NULL, /* gate */
3294 execute_fold_all_builtins, /* execute */
3295 NULL, /* sub */
3296 NULL, /* next */
3297 0, /* static_pass_number */
3298 0, /* tv_id */
7faade0f 3299 PROP_cfg | PROP_ssa, /* properties_required */
6de9cd9a
DN
3300 0, /* properties_provided */
3301 0, /* properties_destroyed */
3302 0, /* todo_flags_start */
b28b1600
JJ
3303 TODO_dump_func
3304 | TODO_verify_ssa
8ddbbcae
JH
3305 | TODO_update_ssa /* todo_flags_finish */
3306 }
6de9cd9a 3307};