]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/tree-ssa-ccp.c
re PR middle-end/38676 (ICE during regimplification of GIMPLE_SWITCH)
[thirdparty/gcc.git] / gcc / tree-ssa-ccp.c
CommitLineData
6de9cd9a 1/* Conditional constant propagation pass for the GNU compiler.
fa10beec 2 Copyright (C) 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008
1e052c19 3 Free Software Foundation, Inc.
6de9cd9a
DN
4 Adapted from original RTL SSA-CCP by Daniel Berlin <dberlin@dberlin.org>
5 Adapted to GIMPLE trees by Diego Novillo <dnovillo@redhat.com>
6
7This file is part of GCC.
8
9GCC is free software; you can redistribute it and/or modify it
10under the terms of the GNU General Public License as published by the
9dcd6f09 11Free Software Foundation; either version 3, or (at your option) any
6de9cd9a
DN
12later version.
13
14GCC is distributed in the hope that it will be useful, but WITHOUT
15ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
16FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17for more details.
18
19You should have received a copy of the GNU General Public License
9dcd6f09
NC
20along with GCC; see the file COPYING3. If not see
21<http://www.gnu.org/licenses/>. */
6de9cd9a 22
0bca51f0
DN
23/* Conditional constant propagation (CCP) is based on the SSA
24 propagation engine (tree-ssa-propagate.c). Constant assignments of
25 the form VAR = CST are propagated from the assignments into uses of
26 VAR, which in turn may generate new constants. The simulation uses
27 a four level lattice to keep track of constant values associated
28 with SSA names. Given an SSA name V_i, it may take one of the
29 following values:
30
106dec71
ZD
31 UNINITIALIZED -> the initial state of the value. This value
32 is replaced with a correct initial value
33 the first time the value is used, so the
34 rest of the pass does not need to care about
35 it. Using this value simplifies initialization
36 of the pass, and prevents us from needlessly
37 scanning statements that are never reached.
0bca51f0
DN
38
39 UNDEFINED -> V_i is a local variable whose definition
40 has not been processed yet. Therefore we
41 don't yet know if its value is a constant
42 or not.
43
44 CONSTANT -> V_i has been found to hold a constant
45 value C.
46
47 VARYING -> V_i cannot take a constant value, or if it
48 does, it is not possible to determine it
49 at compile time.
50
51 The core of SSA-CCP is in ccp_visit_stmt and ccp_visit_phi_node:
52
53 1- In ccp_visit_stmt, we are interested in assignments whose RHS
54 evaluates into a constant and conditional jumps whose predicate
55 evaluates into a boolean true or false. When an assignment of
56 the form V_i = CONST is found, V_i's lattice value is set to
57 CONSTANT and CONST is associated with it. This causes the
58 propagation engine to add all the SSA edges coming out the
59 assignment into the worklists, so that statements that use V_i
60 can be visited.
61
62 If the statement is a conditional with a constant predicate, we
63 mark the outgoing edges as executable or not executable
64 depending on the predicate's value. This is then used when
65 visiting PHI nodes to know when a PHI argument can be ignored.
66
67
68 2- In ccp_visit_phi_node, if all the PHI arguments evaluate to the
69 same constant C, then the LHS of the PHI is set to C. This
70 evaluation is known as the "meet operation". Since one of the
71 goals of this evaluation is to optimistically return constant
72 values as often as possible, it uses two main short cuts:
73
74 - If an argument is flowing in through a non-executable edge, it
75 is ignored. This is useful in cases like this:
76
77 if (PRED)
78 a_9 = 3;
79 else
80 a_10 = 100;
81 a_11 = PHI (a_9, a_10)
82
83 If PRED is known to always evaluate to false, then we can
84 assume that a_11 will always take its value from a_10, meaning
85 that instead of consider it VARYING (a_9 and a_10 have
86 different values), we can consider it CONSTANT 100.
87
88 - If an argument has an UNDEFINED value, then it does not affect
89 the outcome of the meet operation. If a variable V_i has an
90 UNDEFINED value, it means that either its defining statement
91 hasn't been visited yet or V_i has no defining statement, in
92 which case the original symbol 'V' is being used
93 uninitialized. Since 'V' is a local variable, the compiler
94 may assume any initial value for it.
95
96
97 After propagation, every variable V_i that ends up with a lattice
98 value of CONSTANT will have the associated constant value in the
99 array CONST_VAL[i].VALUE. That is fed into substitute_and_fold for
100 final substitution and folding.
101
102
103 Constant propagation in stores and loads (STORE-CCP)
104 ----------------------------------------------------
105
106 While CCP has all the logic to propagate constants in GIMPLE
107 registers, it is missing the ability to associate constants with
108 stores and loads (i.e., pointer dereferences, structures and
109 global/aliased variables). We don't keep loads and stores in
110 SSA, but we do build a factored use-def web for them (in the
111 virtual operands).
112
113 For instance, consider the following code fragment:
114
115 struct A a;
116 const int B = 42;
117
118 void foo (int i)
119 {
120 if (i > 10)
121 a.a = 42;
122 else
123 {
124 a.b = 21;
125 a.a = a.b + 21;
126 }
127
128 if (a.a != B)
129 never_executed ();
130 }
131
132 We should be able to deduce that the predicate 'a.a != B' is always
133 false. To achieve this, we associate constant values to the SSA
38635499
DN
134 names in the VDEF operands for each store. Additionally,
135 since we also glob partial loads/stores with the base symbol, we
136 also keep track of the memory reference where the constant value
137 was stored (in the MEM_REF field of PROP_VALUE_T). For instance,
0bca51f0 138
38635499 139 # a_5 = VDEF <a_4>
0bca51f0
DN
140 a.a = 2;
141
142 # VUSE <a_5>
143 x_3 = a.b;
144
145 In the example above, CCP will associate value '2' with 'a_5', but
146 it would be wrong to replace the load from 'a.b' with '2', because
147 '2' had been stored into a.a.
148
106dec71
ZD
149 Note that the initial value of virtual operands is VARYING, not
150 UNDEFINED. Consider, for instance global variables:
0bca51f0
DN
151
152 int A;
153
154 foo (int i)
155 {
156 if (i_3 > 10)
157 A_4 = 3;
158 # A_5 = PHI (A_4, A_2);
159
160 # VUSE <A_5>
161 A.0_6 = A;
162
163 return A.0_6;
164 }
165
166 The value of A_2 cannot be assumed to be UNDEFINED, as it may have
167 been defined outside of foo. If we were to assume it UNDEFINED, we
106dec71 168 would erroneously optimize the above into 'return 3;'.
0bca51f0
DN
169
170 Though STORE-CCP is not too expensive, it does have to do more work
171 than regular CCP, so it is only enabled at -O2. Both regular CCP
172 and STORE-CCP use the exact same algorithm. The only distinction
173 is that when doing STORE-CCP, the boolean variable DO_STORE_CCP is
174 set to true. This affects the evaluation of statements and PHI
175 nodes.
6de9cd9a
DN
176
177 References:
178
179 Constant propagation with conditional branches,
180 Wegman and Zadeck, ACM TOPLAS 13(2):181-210.
181
182 Building an Optimizing Compiler,
183 Robert Morgan, Butterworth-Heinemann, 1998, Section 8.9.
184
185 Advanced Compiler Design and Implementation,
186 Steven Muchnick, Morgan Kaufmann, 1997, Section 12.6 */
187
188#include "config.h"
189#include "system.h"
190#include "coretypes.h"
191#include "tm.h"
6de9cd9a 192#include "tree.h"
750628d8 193#include "flags.h"
6de9cd9a
DN
194#include "rtl.h"
195#include "tm_p.h"
750628d8 196#include "ggc.h"
6de9cd9a 197#include "basic-block.h"
750628d8 198#include "output.h"
750628d8
DN
199#include "expr.h"
200#include "function.h"
6de9cd9a 201#include "diagnostic.h"
750628d8 202#include "timevar.h"
6de9cd9a 203#include "tree-dump.h"
750628d8 204#include "tree-flow.h"
6de9cd9a 205#include "tree-pass.h"
750628d8 206#include "tree-ssa-propagate.h"
53a8f709 207#include "value-prof.h"
750628d8 208#include "langhooks.h"
ae3df618 209#include "target.h"
6ac01510 210#include "toplev.h"
6de9cd9a
DN
211
212
213/* Possible lattice values. */
214typedef enum
215{
106dec71 216 UNINITIALIZED,
6de9cd9a
DN
217 UNDEFINED,
218 CONSTANT,
219 VARYING
0bca51f0 220} ccp_lattice_t;
6de9cd9a 221
0bca51f0
DN
222/* Array of propagated constant values. After propagation,
223 CONST_VAL[I].VALUE holds the constant value for SSA_NAME(I). If
224 the constant is held in an SSA name representing a memory store
38635499
DN
225 (i.e., a VDEF), CONST_VAL[I].MEM_REF will contain the actual
226 memory reference used to store (i.e., the LHS of the assignment
227 doing the store). */
404f4351 228static prop_value_t *const_val;
6de9cd9a 229
0bca51f0 230/* Dump constant propagation value VAL to file OUTF prefixed by PREFIX. */
95eec0d6
DB
231
232static void
0bca51f0 233dump_lattice_value (FILE *outf, const char *prefix, prop_value_t val)
95eec0d6 234{
750628d8 235 switch (val.lattice_val)
95eec0d6 236 {
0bca51f0
DN
237 case UNINITIALIZED:
238 fprintf (outf, "%sUNINITIALIZED", prefix);
239 break;
750628d8
DN
240 case UNDEFINED:
241 fprintf (outf, "%sUNDEFINED", prefix);
242 break;
243 case VARYING:
244 fprintf (outf, "%sVARYING", prefix);
245 break;
750628d8
DN
246 case CONSTANT:
247 fprintf (outf, "%sCONSTANT ", prefix);
0bca51f0 248 print_generic_expr (outf, val.value, dump_flags);
750628d8
DN
249 break;
250 default:
1e128c5f 251 gcc_unreachable ();
750628d8 252 }
95eec0d6 253}
6de9cd9a 254
6de9cd9a 255
0bca51f0
DN
256/* Print lattice value VAL to stderr. */
257
258void debug_lattice_value (prop_value_t val);
259
260void
261debug_lattice_value (prop_value_t val)
262{
263 dump_lattice_value (stderr, "", val);
264 fprintf (stderr, "\n");
265}
6de9cd9a 266
6de9cd9a 267
726a989a 268
fc9962ee
ZD
269/* If SYM is a constant variable with known value, return the value.
270 NULL_TREE is returned otherwise. */
271
ed97ddc6 272tree
fc9962ee
ZD
273get_symbol_constant_value (tree sym)
274{
275 if (TREE_STATIC (sym)
276 && TREE_READONLY (sym)
ed97ddc6 277 && !MTAG_P (sym))
fc9962ee
ZD
278 {
279 tree val = DECL_INITIAL (sym);
62bc00e2
RG
280 if (val)
281 {
282 STRIP_USELESS_TYPE_CONVERSION (val);
283 if (is_gimple_min_invariant (val))
284 return val;
285 }
b0940154 286 /* Variables declared 'const' without an initializer
fa10beec 287 have zero as the initializer if they may not be
ed97ddc6 288 overridden at link or run time. */
b0940154 289 if (!val
ed97ddc6 290 && targetm.binds_local_p (sym)
b0940154
AP
291 && (INTEGRAL_TYPE_P (TREE_TYPE (sym))
292 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (sym))))
293 return fold_convert (TREE_TYPE (sym), integer_zero_node);
fc9962ee
ZD
294 }
295
296 return NULL_TREE;
297}
688e936d 298
0bca51f0
DN
299/* Compute a default value for variable VAR and store it in the
300 CONST_VAL array. The following rules are used to get default
301 values:
95eec0d6 302
0bca51f0
DN
303 1- Global and static variables that are declared constant are
304 considered CONSTANT.
305
306 2- Any other value is considered UNDEFINED. This is useful when
750628d8
DN
307 considering PHI nodes. PHI arguments that are undefined do not
308 change the constant value of the PHI node, which allows for more
0bca51f0 309 constants to be propagated.
6de9cd9a 310
caf55296 311 3- Variables defined by statements other than assignments and PHI
0bca51f0 312 nodes are considered VARYING.
6de9cd9a 313
caf55296 314 4- Initial values of variables that are not GIMPLE registers are
106dec71 315 considered VARYING. */
6de9cd9a 316
0bca51f0
DN
317static prop_value_t
318get_default_value (tree var)
319{
320 tree sym = SSA_NAME_VAR (var);
dce2b2f6 321 prop_value_t val = { UNINITIALIZED, NULL_TREE };
fc9962ee 322 tree cst_val;
106dec71 323
dce2b2f6 324 if (!is_gimple_reg (var))
6de9cd9a 325 {
0bca51f0
DN
326 /* Short circuit for regular CCP. We are not interested in any
327 non-register when DO_STORE_CCP is false. */
750628d8 328 val.lattice_val = VARYING;
6de9cd9a 329 }
fc9962ee 330 else if ((cst_val = get_symbol_constant_value (sym)) != NULL_TREE)
750628d8 331 {
0bca51f0
DN
332 /* Globals and static variables declared 'const' take their
333 initial value. */
334 val.lattice_val = CONSTANT;
fc9962ee 335 val.value = cst_val;
750628d8
DN
336 }
337 else
338 {
726a989a 339 gimple stmt = SSA_NAME_DEF_STMT (var);
6de9cd9a 340
726a989a 341 if (gimple_nop_p (stmt))
0bca51f0
DN
342 {
343 /* Variables defined by an empty statement are those used
344 before being initialized. If VAR is a local variable, we
106dec71
ZD
345 can assume initially that it is UNDEFINED, otherwise we must
346 consider it VARYING. */
0bca51f0
DN
347 if (is_gimple_reg (sym) && TREE_CODE (sym) != PARM_DECL)
348 val.lattice_val = UNDEFINED;
0bca51f0 349 else
750628d8
DN
350 val.lattice_val = VARYING;
351 }
726a989a
RB
352 else if (is_gimple_assign (stmt)
353 /* Value-returning GIMPLE_CALL statements assign to
354 a variable, and are treated similarly to GIMPLE_ASSIGN. */
355 || (is_gimple_call (stmt)
356 && gimple_call_lhs (stmt) != NULL_TREE)
357 || gimple_code (stmt) == GIMPLE_PHI)
358 {
0bca51f0 359 /* Any other variable defined by an assignment or a PHI node
106dec71
ZD
360 is considered UNDEFINED. */
361 val.lattice_val = UNDEFINED;
0bca51f0
DN
362 }
363 else
364 {
365 /* Otherwise, VAR will never take on a constant value. */
366 val.lattice_val = VARYING;
367 }
750628d8 368 }
6de9cd9a 369
750628d8
DN
370 return val;
371}
6de9cd9a 372
6de9cd9a 373
106dec71 374/* Get the constant value associated with variable VAR. */
6de9cd9a 375
106dec71
ZD
376static inline prop_value_t *
377get_value (tree var)
0bca51f0 378{
ed97ddc6 379 prop_value_t *val;
106dec71 380
ed97ddc6
RG
381 if (const_val == NULL)
382 return NULL;
383
384 val = &const_val[SSA_NAME_VERSION (var)];
106dec71 385 if (val->lattice_val == UNINITIALIZED)
6de9cd9a
DN
386 *val = get_default_value (var);
387
388 return val;
389}
390
106dec71
ZD
391/* Sets the value associated with VAR to VARYING. */
392
393static inline void
394set_value_varying (tree var)
395{
396 prop_value_t *val = &const_val[SSA_NAME_VERSION (var)];
397
398 val->lattice_val = VARYING;
399 val->value = NULL_TREE;
106dec71 400}
6de9cd9a 401
fbb5445b
L
402/* For float types, modify the value of VAL to make ccp work correctly
403 for non-standard values (-0, NaN):
404
405 If HONOR_SIGNED_ZEROS is false, and VAL = -0, we canonicalize it to 0.
406 If HONOR_NANS is false, and VAL is NaN, we canonicalize it to UNDEFINED.
407 This is to fix the following problem (see PR 29921): Suppose we have
408
409 x = 0.0 * y
410
411 and we set value of y to NaN. This causes value of x to be set to NaN.
412 When we later determine that y is in fact VARYING, fold uses the fact
413 that HONOR_NANS is false, and we try to change the value of x to 0,
414 causing an ICE. With HONOR_NANS being false, the real appearance of
415 NaN would cause undefined behavior, though, so claiming that y (and x)
416 are UNDEFINED initially is correct. */
417
418static void
419canonicalize_float_value (prop_value_t *val)
420{
421 enum machine_mode mode;
422 tree type;
423 REAL_VALUE_TYPE d;
424
425 if (val->lattice_val != CONSTANT
426 || TREE_CODE (val->value) != REAL_CST)
427 return;
428
429 d = TREE_REAL_CST (val->value);
430 type = TREE_TYPE (val->value);
431 mode = TYPE_MODE (type);
432
433 if (!HONOR_SIGNED_ZEROS (mode)
434 && REAL_VALUE_MINUS_ZERO (d))
435 {
436 val->value = build_real (type, dconst0);
437 return;
438 }
439
440 if (!HONOR_NANS (mode)
441 && REAL_VALUE_ISNAN (d))
442 {
443 val->lattice_val = UNDEFINED;
444 val->value = NULL;
fbb5445b
L
445 return;
446 }
447}
448
0bca51f0
DN
449/* Set the value for variable VAR to NEW_VAL. Return true if the new
450 value is different from VAR's previous value. */
6de9cd9a 451
750628d8 452static bool
0bca51f0 453set_lattice_value (tree var, prop_value_t new_val)
6de9cd9a 454{
106dec71 455 prop_value_t *old_val = get_value (var);
0bca51f0 456
fbb5445b
L
457 canonicalize_float_value (&new_val);
458
0bca51f0 459 /* Lattice transitions must always be monotonically increasing in
106dec71
ZD
460 value. If *OLD_VAL and NEW_VAL are the same, return false to
461 inform the caller that this was a non-transition. */
462
fc9962ee 463 gcc_assert (old_val->lattice_val < new_val.lattice_val
0bca51f0 464 || (old_val->lattice_val == new_val.lattice_val
fc9962ee 465 && ((!old_val->value && !new_val.value)
dce2b2f6 466 || operand_equal_p (old_val->value, new_val.value, 0))));
0bca51f0
DN
467
468 if (old_val->lattice_val != new_val.lattice_val)
6de9cd9a 469 {
750628d8
DN
470 if (dump_file && (dump_flags & TDF_DETAILS))
471 {
0bca51f0 472 dump_lattice_value (dump_file, "Lattice value changed to ", new_val);
106dec71 473 fprintf (dump_file, ". Adding SSA edges to worklist.\n");
750628d8
DN
474 }
475
0bca51f0
DN
476 *old_val = new_val;
477
106dec71
ZD
478 gcc_assert (new_val.lattice_val != UNDEFINED);
479 return true;
6de9cd9a 480 }
750628d8
DN
481
482 return false;
6de9cd9a
DN
483}
484
485
0bca51f0 486/* Return the likely CCP lattice value for STMT.
6de9cd9a 487
750628d8 488 If STMT has no operands, then return CONSTANT.
6de9cd9a 489
7f879c96
RG
490 Else if undefinedness of operands of STMT cause its value to be
491 undefined, then return UNDEFINED.
6de9cd9a 492
750628d8 493 Else if any operands of STMT are constants, then return CONSTANT.
6de9cd9a 494
750628d8 495 Else return VARYING. */
6de9cd9a 496
0bca51f0 497static ccp_lattice_t
726a989a 498likely_value (gimple stmt)
750628d8 499{
7f879c96 500 bool has_constant_operand, has_undefined_operand, all_undefined_operands;
750628d8
DN
501 tree use;
502 ssa_op_iter iter;
6de9cd9a 503
e0c68ce9 504 enum gimple_code code = gimple_code (stmt);
726a989a
RB
505
506 /* This function appears to be called only for assignments, calls,
507 conditionals, and switches, due to the logic in visit_stmt. */
508 gcc_assert (code == GIMPLE_ASSIGN
509 || code == GIMPLE_CALL
510 || code == GIMPLE_COND
511 || code == GIMPLE_SWITCH);
0bca51f0
DN
512
513 /* If the statement has volatile operands, it won't fold to a
514 constant value. */
726a989a 515 if (gimple_has_volatile_ops (stmt))
0bca51f0
DN
516 return VARYING;
517
518 /* If we are not doing store-ccp, statements with loads
519 and/or stores will never fold into a constant. */
dce2b2f6 520 if (!ZERO_SSA_OPERANDS (stmt, SSA_OP_ALL_VIRTUALS))
750628d8 521 return VARYING;
6de9cd9a 522
726a989a
RB
523 /* Note that only a GIMPLE_SINGLE_RHS assignment can satisfy
524 is_gimple_min_invariant, so we do not consider calls or
525 other forms of assignment. */
174ef36d 526 if (gimple_assign_single_p (stmt)
726a989a
RB
527 && is_gimple_min_invariant (gimple_assign_rhs1 (stmt)))
528 return CONSTANT;
0bca51f0 529
726a989a
RB
530 if (code == GIMPLE_COND
531 && is_gimple_min_invariant (gimple_cond_lhs (stmt))
532 && is_gimple_min_invariant (gimple_cond_rhs (stmt)))
a318e3ac
SB
533 return CONSTANT;
534
726a989a
RB
535 if (code == GIMPLE_SWITCH
536 && is_gimple_min_invariant (gimple_switch_index (stmt)))
537 return CONSTANT;
538
539 /* Arrive here for more complex cases. */
540
106dec71 541 has_constant_operand = false;
7f879c96
RG
542 has_undefined_operand = false;
543 all_undefined_operands = true;
106dec71 544 FOR_EACH_SSA_TREE_OPERAND (use, stmt, iter, SSA_OP_USE | SSA_OP_VUSE)
750628d8 545 {
106dec71 546 prop_value_t *val = get_value (use);
750628d8 547
106dec71 548 if (val->lattice_val == UNDEFINED)
7f879c96
RG
549 has_undefined_operand = true;
550 else
551 all_undefined_operands = false;
0bca51f0 552
750628d8 553 if (val->lattice_val == CONSTANT)
106dec71 554 has_constant_operand = true;
6de9cd9a 555 }
750628d8 556
7f879c96
RG
557 /* If the operation combines operands like COMPLEX_EXPR make sure to
558 not mark the result UNDEFINED if only one part of the result is
559 undefined. */
726a989a 560 if (has_undefined_operand && all_undefined_operands)
7f879c96 561 return UNDEFINED;
726a989a 562 else if (code == GIMPLE_ASSIGN && has_undefined_operand)
7f879c96 563 {
726a989a 564 switch (gimple_assign_rhs_code (stmt))
7f879c96
RG
565 {
566 /* Unary operators are handled with all_undefined_operands. */
567 case PLUS_EXPR:
568 case MINUS_EXPR:
7f879c96 569 case POINTER_PLUS_EXPR:
7f879c96
RG
570 /* Not MIN_EXPR, MAX_EXPR. One VARYING operand may be selected.
571 Not bitwise operators, one VARYING operand may specify the
572 result completely. Not logical operators for the same reason.
0cedb9e9
RG
573 Not COMPLEX_EXPR as one VARYING operand makes the result partly
574 not UNDEFINED. Not *DIV_EXPR, comparisons and shifts because
575 the undefined operand may be promoted. */
7f879c96
RG
576 return UNDEFINED;
577
578 default:
579 ;
580 }
581 }
582 /* If there was an UNDEFINED operand but the result may be not UNDEFINED
583 fall back to VARYING even if there were CONSTANT operands. */
584 if (has_undefined_operand)
585 return VARYING;
586
106dec71
ZD
587 if (has_constant_operand
588 /* We do not consider virtual operands here -- load from read-only
589 memory may have only VARYING virtual operands, but still be
590 constant. */
591 || ZERO_SSA_OPERANDS (stmt, SSA_OP_USE))
0bca51f0
DN
592 return CONSTANT;
593
106dec71 594 return VARYING;
6de9cd9a
DN
595}
596
106dec71
ZD
597/* Returns true if STMT cannot be constant. */
598
599static bool
726a989a 600surely_varying_stmt_p (gimple stmt)
106dec71
ZD
601{
602 /* If the statement has operands that we cannot handle, it cannot be
603 constant. */
726a989a 604 if (gimple_has_volatile_ops (stmt))
106dec71
ZD
605 return true;
606
607 if (!ZERO_SSA_OPERANDS (stmt, SSA_OP_ALL_VIRTUALS))
dce2b2f6 608 return true;
106dec71 609
174ef36d
RG
610 /* If it is a call and does not return a value or is not a
611 builtin and not an indirect call, it is varying. */
726a989a 612 if (is_gimple_call (stmt))
174ef36d
RG
613 {
614 tree fndecl;
615 if (!gimple_call_lhs (stmt)
616 || ((fndecl = gimple_call_fndecl (stmt)) != NULL_TREE
99f536cc 617 && !DECL_BUILT_IN (fndecl)))
174ef36d
RG
618 return true;
619 }
106dec71
ZD
620
621 /* Anything other than assignments and conditional jumps are not
622 interesting for CCP. */
726a989a 623 if (gimple_code (stmt) != GIMPLE_ASSIGN
174ef36d
RG
624 && gimple_code (stmt) != GIMPLE_COND
625 && gimple_code (stmt) != GIMPLE_SWITCH
626 && gimple_code (stmt) != GIMPLE_CALL)
106dec71
ZD
627 return true;
628
629 return false;
630}
6de9cd9a 631
750628d8 632/* Initialize local data structures for CCP. */
6de9cd9a
DN
633
634static void
750628d8 635ccp_initialize (void)
6de9cd9a 636{
750628d8 637 basic_block bb;
6de9cd9a 638
b9eae1a9 639 const_val = XCNEWVEC (prop_value_t, num_ssa_names);
6de9cd9a 640
750628d8
DN
641 /* Initialize simulation flags for PHI nodes and statements. */
642 FOR_EACH_BB (bb)
6de9cd9a 643 {
726a989a 644 gimple_stmt_iterator i;
6de9cd9a 645
726a989a 646 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
750628d8 647 {
726a989a 648 gimple stmt = gsi_stmt (i);
106dec71 649 bool is_varying = surely_varying_stmt_p (stmt);
6de9cd9a 650
106dec71 651 if (is_varying)
750628d8 652 {
0bca51f0
DN
653 tree def;
654 ssa_op_iter iter;
655
656 /* If the statement will not produce a constant, mark
657 all its outputs VARYING. */
658 FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_ALL_DEFS)
106dec71
ZD
659 {
660 if (is_varying)
661 set_value_varying (def);
662 }
750628d8 663 }
726a989a 664 prop_set_simulate_again (stmt, !is_varying);
750628d8 665 }
6de9cd9a
DN
666 }
667
726a989a
RB
668 /* Now process PHI nodes. We never clear the simulate_again flag on
669 phi nodes, since we do not know which edges are executable yet,
670 except for phi nodes for virtual operands when we do not do store ccp. */
750628d8 671 FOR_EACH_BB (bb)
6de9cd9a 672 {
726a989a 673 gimple_stmt_iterator i;
750628d8 674
726a989a
RB
675 for (i = gsi_start_phis (bb); !gsi_end_p (i); gsi_next (&i))
676 {
677 gimple phi = gsi_stmt (i);
678
dce2b2f6 679 if (!is_gimple_reg (gimple_phi_result (phi)))
726a989a 680 prop_set_simulate_again (phi, false);
106dec71 681 else
726a989a 682 prop_set_simulate_again (phi, true);
750628d8 683 }
6de9cd9a 684 }
750628d8 685}
6de9cd9a 686
6de9cd9a 687
0bca51f0 688/* Do final substitution of propagated values, cleanup the flowgraph and
3253eafb 689 free allocated storage.
6de9cd9a 690
3253eafb
JH
691 Return TRUE when something was optimized. */
692
693static bool
0bca51f0 694ccp_finalize (void)
6de9cd9a 695{
0bca51f0 696 /* Perform substitutions based on the known constant values. */
3253eafb 697 bool something_changed = substitute_and_fold (const_val, false);
6de9cd9a 698
0bca51f0 699 free (const_val);
ed97ddc6 700 const_val = NULL;
3253eafb 701 return something_changed;;
6de9cd9a
DN
702}
703
704
0bca51f0
DN
705/* Compute the meet operator between *VAL1 and *VAL2. Store the result
706 in VAL1.
707
708 any M UNDEFINED = any
0bca51f0
DN
709 any M VARYING = VARYING
710 Ci M Cj = Ci if (i == j)
711 Ci M Cj = VARYING if (i != j)
106dec71 712 */
6de9cd9a
DN
713
714static void
0bca51f0 715ccp_lattice_meet (prop_value_t *val1, prop_value_t *val2)
6de9cd9a 716{
0bca51f0 717 if (val1->lattice_val == UNDEFINED)
6de9cd9a 718 {
0bca51f0
DN
719 /* UNDEFINED M any = any */
720 *val1 = *val2;
750628d8 721 }
0bca51f0 722 else if (val2->lattice_val == UNDEFINED)
195da47b 723 {
0bca51f0
DN
724 /* any M UNDEFINED = any
725 Nothing to do. VAL1 already contains the value we want. */
726 ;
195da47b 727 }
0bca51f0
DN
728 else if (val1->lattice_val == VARYING
729 || val2->lattice_val == VARYING)
750628d8 730 {
0bca51f0
DN
731 /* any M VARYING = VARYING. */
732 val1->lattice_val = VARYING;
733 val1->value = NULL_TREE;
750628d8 734 }
0bca51f0
DN
735 else if (val1->lattice_val == CONSTANT
736 && val2->lattice_val == CONSTANT
dce2b2f6 737 && simple_cst_equal (val1->value, val2->value) == 1)
750628d8 738 {
0bca51f0
DN
739 /* Ci M Cj = Ci if (i == j)
740 Ci M Cj = VARYING if (i != j)
741
742 If these two values come from memory stores, make sure that
743 they come from the same memory reference. */
744 val1->lattice_val = CONSTANT;
745 val1->value = val1->value;
750628d8
DN
746 }
747 else
748 {
0bca51f0
DN
749 /* Any other combination is VARYING. */
750 val1->lattice_val = VARYING;
751 val1->value = NULL_TREE;
750628d8 752 }
6de9cd9a
DN
753}
754
755
750628d8
DN
756/* Loop through the PHI_NODE's parameters for BLOCK and compare their
757 lattice values to determine PHI_NODE's lattice value. The value of a
0bca51f0 758 PHI node is determined calling ccp_lattice_meet with all the arguments
750628d8 759 of the PHI node that are incoming via executable edges. */
6de9cd9a 760
750628d8 761static enum ssa_prop_result
726a989a 762ccp_visit_phi_node (gimple phi)
6de9cd9a 763{
726a989a 764 unsigned i;
0bca51f0 765 prop_value_t *old_val, new_val;
6de9cd9a 766
750628d8 767 if (dump_file && (dump_flags & TDF_DETAILS))
6de9cd9a 768 {
750628d8 769 fprintf (dump_file, "\nVisiting PHI node: ");
726a989a 770 print_gimple_stmt (dump_file, phi, 0, dump_flags);
6de9cd9a 771 }
6de9cd9a 772
726a989a 773 old_val = get_value (gimple_phi_result (phi));
750628d8
DN
774 switch (old_val->lattice_val)
775 {
776 case VARYING:
0bca51f0 777 return SSA_PROP_VARYING;
6de9cd9a 778
750628d8
DN
779 case CONSTANT:
780 new_val = *old_val;
781 break;
6de9cd9a 782
750628d8 783 case UNDEFINED:
750628d8 784 new_val.lattice_val = UNDEFINED;
0bca51f0 785 new_val.value = NULL_TREE;
750628d8 786 break;
6de9cd9a 787
750628d8 788 default:
1e128c5f 789 gcc_unreachable ();
750628d8 790 }
6de9cd9a 791
726a989a 792 for (i = 0; i < gimple_phi_num_args (phi); i++)
750628d8 793 {
0bca51f0
DN
794 /* Compute the meet operator over all the PHI arguments flowing
795 through executable edges. */
726a989a 796 edge e = gimple_phi_arg_edge (phi, i);
6de9cd9a 797
750628d8
DN
798 if (dump_file && (dump_flags & TDF_DETAILS))
799 {
800 fprintf (dump_file,
801 "\n Argument #%d (%d -> %d %sexecutable)\n",
802 i, e->src->index, e->dest->index,
803 (e->flags & EDGE_EXECUTABLE) ? "" : "not ");
804 }
805
806 /* If the incoming edge is executable, Compute the meet operator for
807 the existing value of the PHI node and the current PHI argument. */
808 if (e->flags & EDGE_EXECUTABLE)
809 {
726a989a 810 tree arg = gimple_phi_arg (phi, i)->def;
0bca51f0 811 prop_value_t arg_val;
6de9cd9a 812
0bca51f0 813 if (is_gimple_min_invariant (arg))
750628d8 814 {
0bca51f0
DN
815 arg_val.lattice_val = CONSTANT;
816 arg_val.value = arg;
750628d8
DN
817 }
818 else
106dec71 819 arg_val = *(get_value (arg));
6de9cd9a 820
0bca51f0 821 ccp_lattice_meet (&new_val, &arg_val);
6de9cd9a 822
750628d8
DN
823 if (dump_file && (dump_flags & TDF_DETAILS))
824 {
825 fprintf (dump_file, "\t");
0bca51f0
DN
826 print_generic_expr (dump_file, arg, dump_flags);
827 dump_lattice_value (dump_file, "\tValue: ", arg_val);
750628d8
DN
828 fprintf (dump_file, "\n");
829 }
6de9cd9a 830
750628d8
DN
831 if (new_val.lattice_val == VARYING)
832 break;
833 }
834 }
6de9cd9a
DN
835
836 if (dump_file && (dump_flags & TDF_DETAILS))
750628d8
DN
837 {
838 dump_lattice_value (dump_file, "\n PHI node value: ", new_val);
839 fprintf (dump_file, "\n\n");
840 }
841
106dec71 842 /* Make the transition to the new value. */
726a989a 843 if (set_lattice_value (gimple_phi_result (phi), new_val))
750628d8
DN
844 {
845 if (new_val.lattice_val == VARYING)
846 return SSA_PROP_VARYING;
847 else
848 return SSA_PROP_INTERESTING;
849 }
850 else
851 return SSA_PROP_NOT_INTERESTING;
6de9cd9a
DN
852}
853
f7c0ffb4
RG
854/* Return true if we may propagate the address expression ADDR into the
855 dereference DEREF and cancel them. */
856
857bool
858may_propagate_address_into_dereference (tree addr, tree deref)
859{
860 gcc_assert (INDIRECT_REF_P (deref)
861 && TREE_CODE (addr) == ADDR_EXPR);
862
863 /* If the address is invariant then we do not need to preserve restrict
864 qualifications. But we do need to preserve volatile qualifiers until
865 we can annotate the folded dereference itself properly. */
866 if (is_gimple_min_invariant (addr)
867 && (!TREE_THIS_VOLATILE (deref)
868 || TYPE_VOLATILE (TREE_TYPE (addr))))
869 return useless_type_conversion_p (TREE_TYPE (deref),
870 TREE_TYPE (TREE_OPERAND (addr, 0)));
871
872 /* Else both the address substitution and the folding must result in
873 a valid useless type conversion sequence. */
874 return (useless_type_conversion_p (TREE_TYPE (TREE_OPERAND (deref, 0)),
875 TREE_TYPE (addr))
876 && useless_type_conversion_p (TREE_TYPE (deref),
877 TREE_TYPE (TREE_OPERAND (addr, 0))));
878}
6de9cd9a 879
750628d8
DN
880/* CCP specific front-end to the non-destructive constant folding
881 routines.
6de9cd9a
DN
882
883 Attempt to simplify the RHS of STMT knowing that one or more
884 operands are constants.
885
886 If simplification is possible, return the simplified RHS,
726a989a 887 otherwise return the original RHS or NULL_TREE. */
6de9cd9a
DN
888
889static tree
726a989a 890ccp_fold (gimple stmt)
6de9cd9a 891{
726a989a 892 switch (gimple_code (stmt))
0bca51f0 893 {
726a989a
RB
894 case GIMPLE_ASSIGN:
895 {
896 enum tree_code subcode = gimple_assign_rhs_code (stmt);
897
898 switch (get_gimple_rhs_class (subcode))
899 {
900 case GIMPLE_SINGLE_RHS:
901 {
902 tree rhs = gimple_assign_rhs1 (stmt);
903 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
904
905 if (TREE_CODE (rhs) == SSA_NAME)
906 {
907 /* If the RHS is an SSA_NAME, return its known constant value,
908 if any. */
909 return get_value (rhs)->value;
910 }
911 /* Handle propagating invariant addresses into address operations.
912 The folding we do here matches that in tree-ssa-forwprop.c. */
913 else if (TREE_CODE (rhs) == ADDR_EXPR)
914 {
915 tree *base;
916 base = &TREE_OPERAND (rhs, 0);
917 while (handled_component_p (*base))
918 base = &TREE_OPERAND (*base, 0);
919 if (TREE_CODE (*base) == INDIRECT_REF
920 && TREE_CODE (TREE_OPERAND (*base, 0)) == SSA_NAME)
921 {
922 prop_value_t *val = get_value (TREE_OPERAND (*base, 0));
923 if (val->lattice_val == CONSTANT
924 && TREE_CODE (val->value) == ADDR_EXPR
f7c0ffb4
RG
925 && may_propagate_address_into_dereference
926 (val->value, *base))
726a989a
RB
927 {
928 /* We need to return a new tree, not modify the IL
929 or share parts of it. So play some tricks to
930 avoid manually building it. */
931 tree ret, save = *base;
932 *base = TREE_OPERAND (val->value, 0);
933 ret = unshare_expr (rhs);
934 recompute_tree_invariant_for_addr_expr (ret);
935 *base = save;
936 return ret;
937 }
938 }
939 }
6de9cd9a 940
726a989a 941 if (kind == tcc_reference)
c76a1f18
RG
942 {
943 if (TREE_CODE (rhs) == VIEW_CONVERT_EXPR
944 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
945 {
946 prop_value_t *val = get_value (TREE_OPERAND (rhs, 0));
947 if (val->lattice_val == CONSTANT)
948 return fold_unary (VIEW_CONVERT_EXPR,
949 TREE_TYPE (rhs), val->value);
950 }
951 return fold_const_aggregate_ref (rhs);
952 }
726a989a
RB
953 else if (kind == tcc_declaration)
954 return get_symbol_constant_value (rhs);
955 return rhs;
956 }
957
958 case GIMPLE_UNARY_RHS:
959 {
960 /* Handle unary operators that can appear in GIMPLE form.
961 Note that we know the single operand must be a constant,
962 so this should almost always return a simplified RHS. */
963 tree lhs = gimple_assign_lhs (stmt);
964 tree op0 = gimple_assign_rhs1 (stmt);
ff8b183b 965 tree res;
726a989a
RB
966
967 /* Simplify the operand down to a constant. */
968 if (TREE_CODE (op0) == SSA_NAME)
969 {
970 prop_value_t *val = get_value (op0);
971 if (val->lattice_val == CONSTANT)
972 op0 = get_value (op0)->value;
973 }
974
975 /* Conversions are useless for CCP purposes if they are
976 value-preserving. Thus the restrictions that
977 useless_type_conversion_p places for pointer type conversions
978 do not apply here. Substitution later will only substitute to
979 allowed places. */
1a87cf0c 980 if (CONVERT_EXPR_CODE_P (subcode)
99f536cc
RG
981 && POINTER_TYPE_P (TREE_TYPE (lhs))
982 && POINTER_TYPE_P (TREE_TYPE (op0))
983 /* Do not allow differences in volatile qualification
984 as this might get us confused as to whether a
985 propagation destination statement is volatile
986 or not. See PR36988. */
987 && (TYPE_VOLATILE (TREE_TYPE (TREE_TYPE (lhs)))
988 == TYPE_VOLATILE (TREE_TYPE (TREE_TYPE (op0)))))
989 {
990 tree tem;
991 /* Still try to generate a constant of correct type. */
992 if (!useless_type_conversion_p (TREE_TYPE (lhs),
993 TREE_TYPE (op0))
994 && ((tem = maybe_fold_offset_to_address
995 (op0, integer_zero_node, TREE_TYPE (lhs)))
996 != NULL_TREE))
997 return tem;
998 return op0;
999 }
726a989a 1000
ff8b183b
RG
1001 res = fold_unary (subcode, gimple_expr_type (stmt), op0);
1002
1003 /* If the operation was a conversion do _not_ mark a
1004 resulting constant with TREE_OVERFLOW if the original
1005 constant was not. These conversions have implementation
1006 defined behavior and retaining the TREE_OVERFLOW flag
1007 here would confuse later passes such as VRP. */
1008 if (res
1009 && TREE_CODE (res) == INTEGER_CST
1010 && TREE_CODE (op0) == INTEGER_CST
1011 && CONVERT_EXPR_CODE_P (subcode))
1012 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
1013
1014 return res;
1015 }
726a989a
RB
1016
1017 case GIMPLE_BINARY_RHS:
1018 {
1019 /* Handle binary operators that can appear in GIMPLE form. */
1020 tree op0 = gimple_assign_rhs1 (stmt);
1021 tree op1 = gimple_assign_rhs2 (stmt);
1022
1023 /* Simplify the operands down to constants when appropriate. */
1024 if (TREE_CODE (op0) == SSA_NAME)
1025 {
1026 prop_value_t *val = get_value (op0);
1027 if (val->lattice_val == CONSTANT)
1028 op0 = val->value;
1029 }
1030
1031 if (TREE_CODE (op1) == SSA_NAME)
1032 {
1033 prop_value_t *val = get_value (op1);
1034 if (val->lattice_val == CONSTANT)
1035 op1 = val->value;
1036 }
1037
99f536cc
RG
1038 /* Fold &foo + CST into an invariant reference if possible. */
1039 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
1040 && TREE_CODE (op0) == ADDR_EXPR
1041 && TREE_CODE (op1) == INTEGER_CST)
1042 {
1043 tree lhs = gimple_assign_lhs (stmt);
1044 tree tem = maybe_fold_offset_to_address (op0, op1,
1045 TREE_TYPE (lhs));
1046 if (tem != NULL_TREE)
1047 return tem;
1048 }
1049
726a989a
RB
1050 return fold_binary (subcode, gimple_expr_type (stmt), op0, op1);
1051 }
1052
1053 default:
1054 gcc_unreachable ();
1055 }
1056 }
1057 break;
6de9cd9a 1058
726a989a 1059 case GIMPLE_CALL:
174ef36d
RG
1060 {
1061 tree fn = gimple_call_fn (stmt);
1062 prop_value_t *val;
1063
1064 if (TREE_CODE (fn) == SSA_NAME)
1065 {
1066 val = get_value (fn);
1067 if (val->lattice_val == CONSTANT)
1068 fn = val->value;
1069 }
1070 if (TREE_CODE (fn) == ADDR_EXPR
a135b1c4 1071 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
174ef36d
RG
1072 && DECL_BUILT_IN (TREE_OPERAND (fn, 0)))
1073 {
1074 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
1075 tree call, retval;
1076 unsigned i;
1077 for (i = 0; i < gimple_call_num_args (stmt); ++i)
1078 {
1079 args[i] = gimple_call_arg (stmt, i);
1080 if (TREE_CODE (args[i]) == SSA_NAME)
1081 {
1082 val = get_value (args[i]);
1083 if (val->lattice_val == CONSTANT)
1084 args[i] = val->value;
1085 }
1086 }
1087 call = build_call_array (gimple_call_return_type (stmt),
1088 fn, gimple_call_num_args (stmt), args);
1089 retval = fold_call_expr (call, false);
1090 if (retval)
1091 /* fold_call_expr wraps the result inside a NOP_EXPR. */
1092 STRIP_NOPS (retval);
1093 return retval;
1094 }
1095 return NULL_TREE;
1096 }
6de9cd9a 1097
726a989a
RB
1098 case GIMPLE_COND:
1099 {
1100 /* Handle comparison operators that can appear in GIMPLE form. */
1101 tree op0 = gimple_cond_lhs (stmt);
1102 tree op1 = gimple_cond_rhs (stmt);
1103 enum tree_code code = gimple_cond_code (stmt);
1104
1105 /* Simplify the operands down to constants when appropriate. */
1106 if (TREE_CODE (op0) == SSA_NAME)
1107 {
1108 prop_value_t *val = get_value (op0);
1109 if (val->lattice_val == CONSTANT)
1110 op0 = val->value;
1111 }
1112
1113 if (TREE_CODE (op1) == SSA_NAME)
1114 {
1115 prop_value_t *val = get_value (op1);
1116 if (val->lattice_val == CONSTANT)
1117 op1 = val->value;
1118 }
1119
1120 return fold_binary (code, boolean_type_node, op0, op1);
1121 }
6de9cd9a 1122
726a989a
RB
1123 case GIMPLE_SWITCH:
1124 {
1125 tree rhs = gimple_switch_index (stmt);
87e1e42b 1126
726a989a
RB
1127 if (TREE_CODE (rhs) == SSA_NAME)
1128 {
1129 /* If the RHS is an SSA_NAME, return its known constant value,
1130 if any. */
1131 return get_value (rhs)->value;
1132 }
87e1e42b 1133
726a989a
RB
1134 return rhs;
1135 }
00d382a8 1136
726a989a
RB
1137 default:
1138 gcc_unreachable ();
6de9cd9a 1139 }
6de9cd9a
DN
1140}
1141
1142
ae3df618
SB
1143/* Return the tree representing the element referenced by T if T is an
1144 ARRAY_REF or COMPONENT_REF into constant aggregates. Return
1145 NULL_TREE otherwise. */
1146
ed97ddc6 1147tree
ae3df618
SB
1148fold_const_aggregate_ref (tree t)
1149{
1150 prop_value_t *value;
4038c495
GB
1151 tree base, ctor, idx, field;
1152 unsigned HOST_WIDE_INT cnt;
1153 tree cfield, cval;
ae3df618
SB
1154
1155 switch (TREE_CODE (t))
1156 {
1157 case ARRAY_REF:
1158 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
1159 DECL_INITIAL. If BASE is a nested reference into another
1160 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
1161 the inner reference. */
1162 base = TREE_OPERAND (t, 0);
1163 switch (TREE_CODE (base))
1164 {
1165 case VAR_DECL:
1166 if (!TREE_READONLY (base)
1167 || TREE_CODE (TREE_TYPE (base)) != ARRAY_TYPE
1168 || !targetm.binds_local_p (base))
1169 return NULL_TREE;
1170
1171 ctor = DECL_INITIAL (base);
1172 break;
1173
1174 case ARRAY_REF:
1175 case COMPONENT_REF:
1176 ctor = fold_const_aggregate_ref (base);
1177 break;
1178
87e1e42b
RG
1179 case STRING_CST:
1180 case CONSTRUCTOR:
1181 ctor = base;
1182 break;
1183
ae3df618
SB
1184 default:
1185 return NULL_TREE;
1186 }
1187
1188 if (ctor == NULL_TREE
faaf1436
RG
1189 || (TREE_CODE (ctor) != CONSTRUCTOR
1190 && TREE_CODE (ctor) != STRING_CST)
ae3df618
SB
1191 || !TREE_STATIC (ctor))
1192 return NULL_TREE;
1193
1194 /* Get the index. If we have an SSA_NAME, try to resolve it
1195 with the current lattice value for the SSA_NAME. */
1196 idx = TREE_OPERAND (t, 1);
1197 switch (TREE_CODE (idx))
1198 {
1199 case SSA_NAME:
106dec71 1200 if ((value = get_value (idx))
ae3df618
SB
1201 && value->lattice_val == CONSTANT
1202 && TREE_CODE (value->value) == INTEGER_CST)
1203 idx = value->value;
1204 else
1205 return NULL_TREE;
1206 break;
1207
1208 case INTEGER_CST:
1209 break;
1210
1211 default:
1212 return NULL_TREE;
1213 }
1214
faaf1436
RG
1215 /* Fold read from constant string. */
1216 if (TREE_CODE (ctor) == STRING_CST)
1217 {
1218 if ((TYPE_MODE (TREE_TYPE (t))
1219 == TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
1220 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
1221 == MODE_INT)
1222 && GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor)))) == 1
1223 && compare_tree_int (idx, TREE_STRING_LENGTH (ctor)) < 0)
0c4d4efb
DJ
1224 return build_int_cst_type (TREE_TYPE (t),
1225 (TREE_STRING_POINTER (ctor)
1226 [TREE_INT_CST_LOW (idx)]));
faaf1436
RG
1227 return NULL_TREE;
1228 }
1229
ae3df618 1230 /* Whoo-hoo! I'll fold ya baby. Yeah! */
4038c495
GB
1231 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
1232 if (tree_int_cst_equal (cfield, idx))
62bc00e2
RG
1233 {
1234 STRIP_USELESS_TYPE_CONVERSION (cval);
1235 return cval;
1236 }
ae3df618
SB
1237 break;
1238
1239 case COMPONENT_REF:
1240 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
1241 DECL_INITIAL. If BASE is a nested reference into another
1242 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
1243 the inner reference. */
1244 base = TREE_OPERAND (t, 0);
1245 switch (TREE_CODE (base))
1246 {
1247 case VAR_DECL:
1248 if (!TREE_READONLY (base)
1249 || TREE_CODE (TREE_TYPE (base)) != RECORD_TYPE
1250 || !targetm.binds_local_p (base))
1251 return NULL_TREE;
1252
1253 ctor = DECL_INITIAL (base);
1254 break;
1255
1256 case ARRAY_REF:
1257 case COMPONENT_REF:
1258 ctor = fold_const_aggregate_ref (base);
1259 break;
1260
1261 default:
1262 return NULL_TREE;
1263 }
1264
1265 if (ctor == NULL_TREE
1266 || TREE_CODE (ctor) != CONSTRUCTOR
1267 || !TREE_STATIC (ctor))
1268 return NULL_TREE;
1269
1270 field = TREE_OPERAND (t, 1);
1271
4038c495
GB
1272 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
1273 if (cfield == field
ae3df618 1274 /* FIXME: Handle bit-fields. */
4038c495 1275 && ! DECL_BIT_FIELD (cfield))
62bc00e2
RG
1276 {
1277 STRIP_USELESS_TYPE_CONVERSION (cval);
1278 return cval;
1279 }
ae3df618
SB
1280 break;
1281
1ebd8d9a
SB
1282 case REALPART_EXPR:
1283 case IMAGPART_EXPR:
1284 {
1285 tree c = fold_const_aggregate_ref (TREE_OPERAND (t, 0));
1286 if (c && TREE_CODE (c) == COMPLEX_CST)
1287 return fold_build1 (TREE_CODE (t), TREE_TYPE (t), c);
1288 break;
1289 }
87e1e42b
RG
1290
1291 case INDIRECT_REF:
1292 {
1293 tree base = TREE_OPERAND (t, 0);
1294 if (TREE_CODE (base) == SSA_NAME
1295 && (value = get_value (base))
1296 && value->lattice_val == CONSTANT
1297 && TREE_CODE (value->value) == ADDR_EXPR)
1298 return fold_const_aggregate_ref (TREE_OPERAND (value->value, 0));
1299 break;
1300 }
1301
ae3df618
SB
1302 default:
1303 break;
1304 }
1305
1306 return NULL_TREE;
1307}
726a989a
RB
1308
1309/* Evaluate statement STMT.
1310 Valid only for assignments, calls, conditionals, and switches. */
6de9cd9a 1311
0bca51f0 1312static prop_value_t
726a989a 1313evaluate_stmt (gimple stmt)
6de9cd9a 1314{
0bca51f0 1315 prop_value_t val;
faaf1436 1316 tree simplified = NULL_TREE;
0bca51f0 1317 ccp_lattice_t likelyvalue = likely_value (stmt);
6ac01510 1318 bool is_constant;
0bca51f0 1319
6ac01510
ILT
1320 fold_defer_overflow_warnings ();
1321
6de9cd9a
DN
1322 /* If the statement is likely to have a CONSTANT result, then try
1323 to fold the statement to determine the constant value. */
726a989a
RB
1324 /* FIXME. This is the only place that we call ccp_fold.
1325 Since likely_value never returns CONSTANT for calls, we will
1326 not attempt to fold them, including builtins that may profit. */
6de9cd9a
DN
1327 if (likelyvalue == CONSTANT)
1328 simplified = ccp_fold (stmt);
1329 /* If the statement is likely to have a VARYING result, then do not
1330 bother folding the statement. */
87e1e42b 1331 else if (likelyvalue == VARYING)
726a989a 1332 {
e0c68ce9 1333 enum gimple_code code = gimple_code (stmt);
726a989a
RB
1334 if (code == GIMPLE_ASSIGN)
1335 {
1336 enum tree_code subcode = gimple_assign_rhs_code (stmt);
1337
1338 /* Other cases cannot satisfy is_gimple_min_invariant
1339 without folding. */
1340 if (get_gimple_rhs_class (subcode) == GIMPLE_SINGLE_RHS)
1341 simplified = gimple_assign_rhs1 (stmt);
1342 }
1343 else if (code == GIMPLE_SWITCH)
1344 simplified = gimple_switch_index (stmt);
1345 else
1346 /* These cannot satisfy is_gimple_min_invariant without folding. */
1347 gcc_assert (code == GIMPLE_CALL || code == GIMPLE_COND);
1348 }
6de9cd9a 1349
6ac01510
ILT
1350 is_constant = simplified && is_gimple_min_invariant (simplified);
1351
1352 fold_undefer_overflow_warnings (is_constant, stmt, 0);
1353
00d382a8
RG
1354 if (dump_file && (dump_flags & TDF_DETAILS))
1355 {
1356 fprintf (dump_file, "which is likely ");
1357 switch (likelyvalue)
1358 {
1359 case CONSTANT:
1360 fprintf (dump_file, "CONSTANT");
1361 break;
1362 case UNDEFINED:
1363 fprintf (dump_file, "UNDEFINED");
1364 break;
1365 case VARYING:
1366 fprintf (dump_file, "VARYING");
1367 break;
1368 default:;
1369 }
1370 fprintf (dump_file, "\n");
1371 }
1372
6ac01510 1373 if (is_constant)
6de9cd9a
DN
1374 {
1375 /* The statement produced a constant value. */
1376 val.lattice_val = CONSTANT;
0bca51f0 1377 val.value = simplified;
6de9cd9a
DN
1378 }
1379 else
1380 {
1381 /* The statement produced a nonconstant value. If the statement
0bca51f0
DN
1382 had UNDEFINED operands, then the result of the statement
1383 should be UNDEFINED. Otherwise, the statement is VARYING. */
106dec71 1384 if (likelyvalue == UNDEFINED)
a318e3ac
SB
1385 val.lattice_val = likelyvalue;
1386 else
1387 val.lattice_val = VARYING;
1388
0bca51f0 1389 val.value = NULL_TREE;
6de9cd9a 1390 }
750628d8
DN
1391
1392 return val;
6de9cd9a
DN
1393}
1394
750628d8 1395/* Visit the assignment statement STMT. Set the value of its LHS to the
0bca51f0
DN
1396 value computed by the RHS and store LHS in *OUTPUT_P. If STMT
1397 creates virtual definitions, set the value of each new name to that
726a989a
RB
1398 of the RHS (if we can derive a constant out of the RHS).
1399 Value-returning call statements also perform an assignment, and
1400 are handled here. */
6de9cd9a 1401
750628d8 1402static enum ssa_prop_result
726a989a 1403visit_assignment (gimple stmt, tree *output_p)
6de9cd9a 1404{
0bca51f0 1405 prop_value_t val;
0bca51f0 1406 enum ssa_prop_result retval;
6de9cd9a 1407
726a989a 1408 tree lhs = gimple_get_lhs (stmt);
6de9cd9a 1409
726a989a
RB
1410 gcc_assert (gimple_code (stmt) != GIMPLE_CALL
1411 || gimple_call_lhs (stmt) != NULL_TREE);
1412
1413 if (gimple_assign_copy_p (stmt))
750628d8 1414 {
726a989a 1415 tree rhs = gimple_assign_rhs1 (stmt);
0bca51f0 1416
726a989a
RB
1417 if (TREE_CODE (rhs) == SSA_NAME)
1418 {
1419 /* For a simple copy operation, we copy the lattice values. */
1420 prop_value_t *nval = get_value (rhs);
1421 val = *nval;
1422 }
0bca51f0 1423 else
726a989a 1424 val = evaluate_stmt (stmt);
750628d8
DN
1425 }
1426 else
726a989a
RB
1427 /* Evaluate the statement, which could be
1428 either a GIMPLE_ASSIGN or a GIMPLE_CALL. */
87e1e42b 1429 val = evaluate_stmt (stmt);
6de9cd9a 1430
0bca51f0 1431 retval = SSA_PROP_NOT_INTERESTING;
6de9cd9a 1432
750628d8 1433 /* Set the lattice value of the statement's output. */
0bca51f0 1434 if (TREE_CODE (lhs) == SSA_NAME)
6de9cd9a 1435 {
0bca51f0
DN
1436 /* If STMT is an assignment to an SSA_NAME, we only have one
1437 value to set. */
1438 if (set_lattice_value (lhs, val))
1439 {
1440 *output_p = lhs;
1441 if (val.lattice_val == VARYING)
1442 retval = SSA_PROP_VARYING;
1443 else
1444 retval = SSA_PROP_INTERESTING;
1445 }
6de9cd9a 1446 }
0bca51f0
DN
1447
1448 return retval;
6de9cd9a
DN
1449}
1450
6de9cd9a 1451
750628d8
DN
1452/* Visit the conditional statement STMT. Return SSA_PROP_INTERESTING
1453 if it can determine which edge will be taken. Otherwise, return
1454 SSA_PROP_VARYING. */
1455
1456static enum ssa_prop_result
726a989a 1457visit_cond_stmt (gimple stmt, edge *taken_edge_p)
6de9cd9a 1458{
0bca51f0 1459 prop_value_t val;
750628d8
DN
1460 basic_block block;
1461
726a989a 1462 block = gimple_bb (stmt);
750628d8
DN
1463 val = evaluate_stmt (stmt);
1464
1465 /* Find which edge out of the conditional block will be taken and add it
1466 to the worklist. If no single edge can be determined statically,
1467 return SSA_PROP_VARYING to feed all the outgoing edges to the
1468 propagation engine. */
0bca51f0 1469 *taken_edge_p = val.value ? find_taken_edge (block, val.value) : 0;
750628d8
DN
1470 if (*taken_edge_p)
1471 return SSA_PROP_INTERESTING;
1472 else
1473 return SSA_PROP_VARYING;
6de9cd9a
DN
1474}
1475
6de9cd9a 1476
750628d8
DN
1477/* Evaluate statement STMT. If the statement produces an output value and
1478 its evaluation changes the lattice value of its output, return
1479 SSA_PROP_INTERESTING and set *OUTPUT_P to the SSA_NAME holding the
1480 output value.
1481
1482 If STMT is a conditional branch and we can determine its truth
1483 value, set *TAKEN_EDGE_P accordingly. If STMT produces a varying
1484 value, return SSA_PROP_VARYING. */
6de9cd9a 1485
750628d8 1486static enum ssa_prop_result
726a989a 1487ccp_visit_stmt (gimple stmt, edge *taken_edge_p, tree *output_p)
750628d8 1488{
750628d8
DN
1489 tree def;
1490 ssa_op_iter iter;
6de9cd9a 1491
750628d8 1492 if (dump_file && (dump_flags & TDF_DETAILS))
6de9cd9a 1493 {
0bca51f0 1494 fprintf (dump_file, "\nVisiting statement:\n");
726a989a 1495 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
6de9cd9a 1496 }
6de9cd9a 1497
726a989a 1498 switch (gimple_code (stmt))
6de9cd9a 1499 {
726a989a
RB
1500 case GIMPLE_ASSIGN:
1501 /* If the statement is an assignment that produces a single
1502 output value, evaluate its RHS to see if the lattice value of
1503 its output has changed. */
1504 return visit_assignment (stmt, output_p);
1505
1506 case GIMPLE_CALL:
1507 /* A value-returning call also performs an assignment. */
1508 if (gimple_call_lhs (stmt) != NULL_TREE)
1509 return visit_assignment (stmt, output_p);
1510 break;
1511
1512 case GIMPLE_COND:
1513 case GIMPLE_SWITCH:
1514 /* If STMT is a conditional branch, see if we can determine
1515 which branch will be taken. */
1516 /* FIXME. It appears that we should be able to optimize
1517 computed GOTOs here as well. */
1518 return visit_cond_stmt (stmt, taken_edge_p);
1519
1520 default:
1521 break;
6de9cd9a 1522 }
6de9cd9a 1523
750628d8
DN
1524 /* Any other kind of statement is not interesting for constant
1525 propagation and, therefore, not worth simulating. */
750628d8
DN
1526 if (dump_file && (dump_flags & TDF_DETAILS))
1527 fprintf (dump_file, "No interesting values produced. Marked VARYING.\n");
6de9cd9a 1528
750628d8
DN
1529 /* Definitions made by statements other than assignments to
1530 SSA_NAMEs represent unknown modifications to their outputs.
1531 Mark them VARYING. */
0bca51f0
DN
1532 FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_ALL_DEFS)
1533 {
dce2b2f6 1534 prop_value_t v = { VARYING, NULL_TREE };
0bca51f0
DN
1535 set_lattice_value (def, v);
1536 }
6de9cd9a 1537
750628d8
DN
1538 return SSA_PROP_VARYING;
1539}
6de9cd9a 1540
6de9cd9a 1541
0bca51f0 1542/* Main entry point for SSA Conditional Constant Propagation. */
750628d8 1543
3253eafb 1544static unsigned int
dce2b2f6 1545do_ssa_ccp (void)
750628d8
DN
1546{
1547 ccp_initialize ();
1548 ssa_propagate (ccp_visit_stmt, ccp_visit_phi_node);
3253eafb 1549 if (ccp_finalize ())
706ca88e 1550 return (TODO_cleanup_cfg | TODO_update_ssa | TODO_remove_unused_locals);
3253eafb
JH
1551 else
1552 return 0;
6de9cd9a
DN
1553}
1554
173b818d
BB
1555
1556static bool
750628d8 1557gate_ccp (void)
173b818d 1558{
750628d8 1559 return flag_tree_ccp != 0;
173b818d
BB
1560}
1561
6de9cd9a 1562
8ddbbcae 1563struct gimple_opt_pass pass_ccp =
750628d8 1564{
8ddbbcae
JH
1565 {
1566 GIMPLE_PASS,
750628d8
DN
1567 "ccp", /* name */
1568 gate_ccp, /* gate */
0bca51f0 1569 do_ssa_ccp, /* execute */
750628d8
DN
1570 NULL, /* sub */
1571 NULL, /* next */
1572 0, /* static_pass_number */
1573 TV_TREE_CCP, /* tv_id */
7faade0f 1574 PROP_cfg | PROP_ssa, /* properties_required */
750628d8 1575 0, /* properties_provided */
ae07b463 1576 0, /* properties_destroyed */
750628d8 1577 0, /* todo_flags_start */
3253eafb 1578 TODO_dump_func | TODO_verify_ssa
8ddbbcae
JH
1579 | TODO_verify_stmts | TODO_ggc_collect/* todo_flags_finish */
1580 }
750628d8 1581};
6de9cd9a 1582
6de9cd9a 1583
6de9cd9a
DN
1584/* A subroutine of fold_stmt_r. Attempts to fold *(A+O) to A[X].
1585 BASE is an array type. OFFSET is a byte displacement. ORIG_TYPE
9cf737f8 1586 is the desired result type. */
6de9cd9a
DN
1587
1588static tree
1fd89060
RG
1589maybe_fold_offset_to_array_ref (tree base, tree offset, tree orig_type,
1590 bool allow_negative_idx)
6de9cd9a 1591{
891fc5e9 1592 tree min_idx, idx, idx_type, elt_offset = integer_zero_node;
44de5aeb 1593 tree array_type, elt_type, elt_size;
3097760b 1594 tree domain_type;
44de5aeb
RK
1595
1596 /* If BASE is an ARRAY_REF, we can pick up another offset (this time
1597 measured in units of the size of elements type) from that ARRAY_REF).
1598 We can't do anything if either is variable.
1599
1600 The case we handle here is *(&A[N]+O). */
1601 if (TREE_CODE (base) == ARRAY_REF)
1602 {
1603 tree low_bound = array_ref_low_bound (base);
1604
1605 elt_offset = TREE_OPERAND (base, 1);
1606 if (TREE_CODE (low_bound) != INTEGER_CST
1607 || TREE_CODE (elt_offset) != INTEGER_CST)
1608 return NULL_TREE;
1609
1610 elt_offset = int_const_binop (MINUS_EXPR, elt_offset, low_bound, 0);
1611 base = TREE_OPERAND (base, 0);
1612 }
6de9cd9a
DN
1613
1614 /* Ignore stupid user tricks of indexing non-array variables. */
1615 array_type = TREE_TYPE (base);
1616 if (TREE_CODE (array_type) != ARRAY_TYPE)
1617 return NULL_TREE;
1618 elt_type = TREE_TYPE (array_type);
f4088621 1619 if (!useless_type_conversion_p (orig_type, elt_type))
6de9cd9a 1620 return NULL_TREE;
891fc5e9
RG
1621
1622 /* Use signed size type for intermediate computation on the index. */
1623 idx_type = signed_type_for (size_type_node);
1624
44de5aeb
RK
1625 /* If OFFSET and ELT_OFFSET are zero, we don't care about the size of the
1626 element type (so we can use the alignment if it's not constant).
1627 Otherwise, compute the offset as an index by using a division. If the
1628 division isn't exact, then don't do anything. */
6de9cd9a 1629 elt_size = TYPE_SIZE_UNIT (elt_type);
fe9821b8
JH
1630 if (!elt_size)
1631 return NULL;
44de5aeb
RK
1632 if (integer_zerop (offset))
1633 {
1634 if (TREE_CODE (elt_size) != INTEGER_CST)
1635 elt_size = size_int (TYPE_ALIGN (elt_type));
6de9cd9a 1636
891fc5e9 1637 idx = build_int_cst (idx_type, 0);
44de5aeb
RK
1638 }
1639 else
1640 {
1641 unsigned HOST_WIDE_INT lquo, lrem;
1642 HOST_WIDE_INT hquo, hrem;
891fc5e9 1643 double_int soffset;
44de5aeb 1644
891fc5e9
RG
1645 /* The final array offset should be signed, so we need
1646 to sign-extend the (possibly pointer) offset here
1647 and use signed division. */
1648 soffset = double_int_sext (tree_to_double_int (offset),
1649 TYPE_PRECISION (TREE_TYPE (offset)));
44de5aeb 1650 if (TREE_CODE (elt_size) != INTEGER_CST
891fc5e9
RG
1651 || div_and_round_double (TRUNC_DIV_EXPR, 0,
1652 soffset.low, soffset.high,
44de5aeb
RK
1653 TREE_INT_CST_LOW (elt_size),
1654 TREE_INT_CST_HIGH (elt_size),
1655 &lquo, &hquo, &lrem, &hrem)
1656 || lrem || hrem)
1657 return NULL_TREE;
6de9cd9a 1658
891fc5e9 1659 idx = build_int_cst_wide (idx_type, lquo, hquo);
44de5aeb
RK
1660 }
1661
1662 /* Assume the low bound is zero. If there is a domain type, get the
1663 low bound, if any, convert the index into that type, and add the
1664 low bound. */
891fc5e9 1665 min_idx = build_int_cst (idx_type, 0);
3097760b
JH
1666 domain_type = TYPE_DOMAIN (array_type);
1667 if (domain_type)
6de9cd9a 1668 {
3097760b 1669 idx_type = domain_type;
891fc5e9
RG
1670 if (TYPE_MIN_VALUE (idx_type))
1671 min_idx = TYPE_MIN_VALUE (idx_type);
44de5aeb 1672 else
891fc5e9 1673 min_idx = fold_convert (idx_type, min_idx);
44de5aeb
RK
1674
1675 if (TREE_CODE (min_idx) != INTEGER_CST)
1676 return NULL_TREE;
1677
891fc5e9 1678 elt_offset = fold_convert (idx_type, elt_offset);
6de9cd9a
DN
1679 }
1680
44de5aeb
RK
1681 if (!integer_zerop (min_idx))
1682 idx = int_const_binop (PLUS_EXPR, idx, min_idx, 0);
1683 if (!integer_zerop (elt_offset))
1684 idx = int_const_binop (PLUS_EXPR, idx, elt_offset, 0);
1685
891fc5e9
RG
1686 /* Make sure to possibly truncate late after offsetting. */
1687 idx = fold_convert (idx_type, idx);
1688
3097760b 1689 /* We don't want to construct access past array bounds. For example
1fd89060
RG
1690 char *(c[4]);
1691 c[3][2];
1692 should not be simplified into (*c)[14] or tree-vrp will
1693 give false warnings. The same is true for
1694 struct A { long x; char d[0]; } *a;
1695 (char *)a - 4;
1696 which should be not folded to &a->d[-8]. */
1697 if (domain_type
1698 && TYPE_MAX_VALUE (domain_type)
3097760b
JH
1699 && TREE_CODE (TYPE_MAX_VALUE (domain_type)) == INTEGER_CST)
1700 {
1701 tree up_bound = TYPE_MAX_VALUE (domain_type);
1702
1703 if (tree_int_cst_lt (up_bound, idx)
1704 /* Accesses after the end of arrays of size 0 (gcc
1705 extension) and 1 are likely intentional ("struct
1706 hack"). */
1707 && compare_tree_int (up_bound, 1) > 0)
1708 return NULL_TREE;
1709 }
1fd89060
RG
1710 if (domain_type
1711 && TYPE_MIN_VALUE (domain_type))
1712 {
1713 if (!allow_negative_idx
1714 && TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST
1715 && tree_int_cst_lt (idx, TYPE_MIN_VALUE (domain_type)))
1716 return NULL_TREE;
1717 }
1718 else if (!allow_negative_idx
1719 && compare_tree_int (idx, 0) < 0)
1720 return NULL_TREE;
3097760b 1721
04d86531 1722 return build4 (ARRAY_REF, elt_type, base, idx, NULL_TREE, NULL_TREE);
6de9cd9a
DN
1723}
1724
750628d8 1725
fe9821b8 1726/* Attempt to fold *(S+O) to S.X.
6de9cd9a
DN
1727 BASE is a record type. OFFSET is a byte displacement. ORIG_TYPE
1728 is the desired result type. */
6de9cd9a 1729
fe9821b8 1730static tree
6de9cd9a
DN
1731maybe_fold_offset_to_component_ref (tree record_type, tree base, tree offset,
1732 tree orig_type, bool base_is_ptr)
1733{
f34fa464 1734 tree f, t, field_type, tail_array_field, field_offset;
6e6e19cb
JH
1735 tree ret;
1736 tree new_base;
6de9cd9a
DN
1737
1738 if (TREE_CODE (record_type) != RECORD_TYPE
1739 && TREE_CODE (record_type) != UNION_TYPE
1740 && TREE_CODE (record_type) != QUAL_UNION_TYPE)
1741 return NULL_TREE;
1742
1743 /* Short-circuit silly cases. */
f4088621 1744 if (useless_type_conversion_p (record_type, orig_type))
6de9cd9a
DN
1745 return NULL_TREE;
1746
1747 tail_array_field = NULL_TREE;
1748 for (f = TYPE_FIELDS (record_type); f ; f = TREE_CHAIN (f))
1749 {
1750 int cmp;
1751
1752 if (TREE_CODE (f) != FIELD_DECL)
1753 continue;
1754 if (DECL_BIT_FIELD (f))
1755 continue;
f34fa464 1756
fe9821b8
JH
1757 if (!DECL_FIELD_OFFSET (f))
1758 continue;
f34fa464
ZD
1759 field_offset = byte_position (f);
1760 if (TREE_CODE (field_offset) != INTEGER_CST)
6de9cd9a
DN
1761 continue;
1762
1763 /* ??? Java creates "interesting" fields for representing base classes.
1764 They have no name, and have no context. With no context, we get into
1765 trouble with nonoverlapping_component_refs_p. Skip them. */
1766 if (!DECL_FIELD_CONTEXT (f))
1767 continue;
1768
1769 /* The previous array field isn't at the end. */
1770 tail_array_field = NULL_TREE;
1771
1772 /* Check to see if this offset overlaps with the field. */
f34fa464 1773 cmp = tree_int_cst_compare (field_offset, offset);
6de9cd9a
DN
1774 if (cmp > 0)
1775 continue;
1776
1777 field_type = TREE_TYPE (f);
6de9cd9a
DN
1778
1779 /* Here we exactly match the offset being checked. If the types match,
1780 then we can return that field. */
53dba802 1781 if (cmp == 0
f4088621 1782 && useless_type_conversion_p (orig_type, field_type))
6de9cd9a
DN
1783 {
1784 if (base_is_ptr)
1785 base = build1 (INDIRECT_REF, record_type, base);
b4257cfc 1786 t = build3 (COMPONENT_REF, field_type, base, f, NULL_TREE);
6de9cd9a
DN
1787 return t;
1788 }
53dba802
ZD
1789
1790 /* Don't care about offsets into the middle of scalars. */
1791 if (!AGGREGATE_TYPE_P (field_type))
1792 continue;
6de9cd9a 1793
53dba802
ZD
1794 /* Check for array at the end of the struct. This is often
1795 used as for flexible array members. We should be able to
1796 turn this into an array access anyway. */
1797 if (TREE_CODE (field_type) == ARRAY_TYPE)
1798 tail_array_field = f;
1799
1800 /* Check the end of the field against the offset. */
1801 if (!DECL_SIZE_UNIT (f)
1802 || TREE_CODE (DECL_SIZE_UNIT (f)) != INTEGER_CST)
1803 continue;
1804 t = int_const_binop (MINUS_EXPR, offset, field_offset, 1);
1805 if (!tree_int_cst_lt (t, DECL_SIZE_UNIT (f)))
1806 continue;
6de9cd9a 1807
53dba802
ZD
1808 /* If we matched, then set offset to the displacement into
1809 this field. */
6e6e19cb
JH
1810 if (base_is_ptr)
1811 new_base = build1 (INDIRECT_REF, record_type, base);
1812 else
1813 new_base = base;
1814 new_base = build3 (COMPONENT_REF, field_type, new_base, f, NULL_TREE);
1815
1816 /* Recurse to possibly find the match. */
1fd89060
RG
1817 ret = maybe_fold_offset_to_array_ref (new_base, t, orig_type,
1818 f == TYPE_FIELDS (record_type));
6e6e19cb
JH
1819 if (ret)
1820 return ret;
1821 ret = maybe_fold_offset_to_component_ref (field_type, new_base, t,
1822 orig_type, false);
1823 if (ret)
1824 return ret;
6de9cd9a
DN
1825 }
1826
1827 if (!tail_array_field)
1828 return NULL_TREE;
1829
1830 f = tail_array_field;
1831 field_type = TREE_TYPE (f);
53dba802 1832 offset = int_const_binop (MINUS_EXPR, offset, byte_position (f), 1);
6de9cd9a 1833
6de9cd9a 1834 /* If we get here, we've got an aggregate field, and a possibly
1ea7e6ad 1835 nonzero offset into them. Recurse and hope for a valid match. */
6de9cd9a
DN
1836 if (base_is_ptr)
1837 base = build1 (INDIRECT_REF, record_type, base);
b4257cfc 1838 base = build3 (COMPONENT_REF, field_type, base, f, NULL_TREE);
6de9cd9a 1839
1fd89060
RG
1840 t = maybe_fold_offset_to_array_ref (base, offset, orig_type,
1841 f == TYPE_FIELDS (record_type));
6de9cd9a
DN
1842 if (t)
1843 return t;
1844 return maybe_fold_offset_to_component_ref (field_type, base, offset,
1845 orig_type, false);
1846}
1847
fe9821b8
JH
1848/* Attempt to express (ORIG_TYPE)BASE+OFFSET as BASE->field_of_orig_type
1849 or BASE[index] or by combination of those.
1850
1851 Before attempting the conversion strip off existing ADDR_EXPRs and
1852 handled component refs. */
1853
1854tree
1855maybe_fold_offset_to_reference (tree base, tree offset, tree orig_type)
1856{
1857 tree ret;
1858 tree type;
1859 bool base_is_ptr = true;
1860
1861 STRIP_NOPS (base);
1862 if (TREE_CODE (base) == ADDR_EXPR)
1863 {
1864 base_is_ptr = false;
1865
1866 base = TREE_OPERAND (base, 0);
1867
1868 /* Handle case where existing COMPONENT_REF pick e.g. wrong field of union,
1869 so it needs to be removed and new COMPONENT_REF constructed.
1870 The wrong COMPONENT_REF are often constructed by folding the
1871 (type *)&object within the expression (type *)&object+offset */
99f536cc 1872 if (handled_component_p (base))
fe9821b8
JH
1873 {
1874 HOST_WIDE_INT sub_offset, size, maxsize;
1875 tree newbase;
1876 newbase = get_ref_base_and_extent (base, &sub_offset,
1877 &size, &maxsize);
1878 gcc_assert (newbase);
99f536cc 1879 if (size == maxsize
a214f4d6 1880 && size != -1
99f536cc 1881 && !(sub_offset & (BITS_PER_UNIT - 1)))
fe9821b8
JH
1882 {
1883 base = newbase;
1884 if (sub_offset)
1885 offset = int_const_binop (PLUS_EXPR, offset,
1886 build_int_cst (TREE_TYPE (offset),
1887 sub_offset / BITS_PER_UNIT), 1);
1888 }
1889 }
f4088621 1890 if (useless_type_conversion_p (orig_type, TREE_TYPE (base))
fe9821b8
JH
1891 && integer_zerop (offset))
1892 return base;
1893 type = TREE_TYPE (base);
1894 }
1895 else
1896 {
1897 base_is_ptr = true;
1898 if (!POINTER_TYPE_P (TREE_TYPE (base)))
1899 return NULL_TREE;
1900 type = TREE_TYPE (TREE_TYPE (base));
1901 }
1902 ret = maybe_fold_offset_to_component_ref (type, base, offset,
1903 orig_type, base_is_ptr);
1904 if (!ret)
1905 {
1906 if (base_is_ptr)
1907 base = build1 (INDIRECT_REF, type, base);
1fd89060 1908 ret = maybe_fold_offset_to_array_ref (base, offset, orig_type, true);
fe9821b8
JH
1909 }
1910 return ret;
1911}
750628d8 1912
99f536cc
RG
1913/* Attempt to express (ORIG_TYPE)&BASE+OFFSET as &BASE->field_of_orig_type
1914 or &BASE[index] or by combination of those.
1915
1916 Before attempting the conversion strip off existing component refs. */
1917
1918tree
1919maybe_fold_offset_to_address (tree addr, tree offset, tree orig_type)
1920{
1921 tree t;
1922
1923 gcc_assert (POINTER_TYPE_P (TREE_TYPE (addr))
1924 && POINTER_TYPE_P (orig_type));
1925
1926 t = maybe_fold_offset_to_reference (addr, offset, TREE_TYPE (orig_type));
1927 if (t != NULL_TREE)
1928 {
1929 tree orig = addr;
1930 tree ptr_type;
1931
1932 /* For __builtin_object_size to function correctly we need to
1933 make sure not to fold address arithmetic so that we change
1934 reference from one array to another. This would happen for
1935 example for
1936
1937 struct X { char s1[10]; char s2[10] } s;
1938 char *foo (void) { return &s.s2[-4]; }
1939
1940 where we need to avoid generating &s.s1[6]. As the C and
1941 C++ frontends create different initial trees
1942 (char *) &s.s1 + -4 vs. &s.s1[-4] we have to do some
1943 sophisticated comparisons here. Note that checking for the
1944 condition after the fact is easier than trying to avoid doing
1945 the folding. */
1946 STRIP_NOPS (orig);
1947 if (TREE_CODE (orig) == ADDR_EXPR)
1948 orig = TREE_OPERAND (orig, 0);
1949 if ((TREE_CODE (orig) == ARRAY_REF
1950 || (TREE_CODE (orig) == COMPONENT_REF
1951 && TREE_CODE (TREE_TYPE (TREE_OPERAND (orig, 1))) == ARRAY_TYPE))
1952 && (TREE_CODE (t) == ARRAY_REF
1953 || (TREE_CODE (t) == COMPONENT_REF
1954 && TREE_CODE (TREE_TYPE (TREE_OPERAND (t, 1))) == ARRAY_TYPE))
1955 && !operand_equal_p (TREE_CODE (orig) == ARRAY_REF
1956 ? TREE_OPERAND (orig, 0) : orig,
1957 TREE_CODE (t) == ARRAY_REF
1958 ? TREE_OPERAND (t, 0) : t, 0))
1959 return NULL_TREE;
1960
1961 ptr_type = build_pointer_type (TREE_TYPE (t));
1962 if (!useless_type_conversion_p (orig_type, ptr_type))
1963 return NULL_TREE;
1964 return build_fold_addr_expr_with_type (t, ptr_type);
1965 }
1966
1967 return NULL_TREE;
1968}
1969
6de9cd9a
DN
1970/* A subroutine of fold_stmt_r. Attempt to simplify *(BASE+OFFSET).
1971 Return the simplified expression, or NULL if nothing could be done. */
1972
1973static tree
1974maybe_fold_stmt_indirect (tree expr, tree base, tree offset)
1975{
1976 tree t;
cbf8d355 1977 bool volatile_p = TREE_THIS_VOLATILE (expr);
6de9cd9a
DN
1978
1979 /* We may well have constructed a double-nested PLUS_EXPR via multiple
1980 substitutions. Fold that down to one. Remove NON_LVALUE_EXPRs that
1981 are sometimes added. */
1982 base = fold (base);
ff7c8597 1983 STRIP_TYPE_NOPS (base);
6de9cd9a
DN
1984 TREE_OPERAND (expr, 0) = base;
1985
1986 /* One possibility is that the address reduces to a string constant. */
1987 t = fold_read_from_constant_string (expr);
1988 if (t)
1989 return t;
1990
5be014d5
AP
1991 /* Add in any offset from a POINTER_PLUS_EXPR. */
1992 if (TREE_CODE (base) == POINTER_PLUS_EXPR)
6de9cd9a
DN
1993 {
1994 tree offset2;
1995
1996 offset2 = TREE_OPERAND (base, 1);
1997 if (TREE_CODE (offset2) != INTEGER_CST)
1998 return NULL_TREE;
1999 base = TREE_OPERAND (base, 0);
2000
5be014d5
AP
2001 offset = fold_convert (sizetype,
2002 int_const_binop (PLUS_EXPR, offset, offset2, 1));
6de9cd9a
DN
2003 }
2004
2005 if (TREE_CODE (base) == ADDR_EXPR)
2006 {
fe9821b8
JH
2007 tree base_addr = base;
2008
6de9cd9a
DN
2009 /* Strip the ADDR_EXPR. */
2010 base = TREE_OPERAND (base, 0);
2011
0534fa56
RH
2012 /* Fold away CONST_DECL to its value, if the type is scalar. */
2013 if (TREE_CODE (base) == CONST_DECL
1b738915 2014 && is_gimple_min_invariant (DECL_INITIAL (base)))
0534fa56
RH
2015 return DECL_INITIAL (base);
2016
6de9cd9a 2017 /* Try folding *(&B+O) to B.X. */
fe9821b8
JH
2018 t = maybe_fold_offset_to_reference (base_addr, offset,
2019 TREE_TYPE (expr));
6de9cd9a 2020 if (t)
cbf8d355 2021 {
e2104f59
RG
2022 /* Preserve volatileness of the original expression.
2023 We can end up with a plain decl here which is shared
2024 and we shouldn't mess with its flags. */
2025 if (!SSA_VAR_P (t))
2026 TREE_THIS_VOLATILE (t) = volatile_p;
cbf8d355
RG
2027 return t;
2028 }
6de9cd9a
DN
2029 }
2030 else
2031 {
2032 /* We can get here for out-of-range string constant accesses,
2033 such as "_"[3]. Bail out of the entire substitution search
2034 and arrange for the entire statement to be replaced by a
aabcd309 2035 call to __builtin_trap. In all likelihood this will all be
6de9cd9a
DN
2036 constant-folded away, but in the meantime we can't leave with
2037 something that get_expr_operands can't understand. */
2038
2039 t = base;
2040 STRIP_NOPS (t);
2041 if (TREE_CODE (t) == ADDR_EXPR
2042 && TREE_CODE (TREE_OPERAND (t, 0)) == STRING_CST)
2043 {
2044 /* FIXME: Except that this causes problems elsewhere with dead
0e61db61 2045 code not being deleted, and we die in the rtl expanders
6de9cd9a
DN
2046 because we failed to remove some ssa_name. In the meantime,
2047 just return zero. */
2048 /* FIXME2: This condition should be signaled by
2049 fold_read_from_constant_string directly, rather than
2050 re-checking for it here. */
2051 return integer_zero_node;
2052 }
2053
2054 /* Try folding *(B+O) to B->X. Still an improvement. */
2055 if (POINTER_TYPE_P (TREE_TYPE (base)))
2056 {
fe9821b8
JH
2057 t = maybe_fold_offset_to_reference (base, offset,
2058 TREE_TYPE (expr));
6de9cd9a
DN
2059 if (t)
2060 return t;
2061 }
2062 }
2063
2064 /* Otherwise we had an offset that we could not simplify. */
2065 return NULL_TREE;
2066}
2067
750628d8 2068
726a989a 2069/* A quaint feature extant in our address arithmetic is that there
6de9cd9a
DN
2070 can be hidden type changes here. The type of the result need
2071 not be the same as the type of the input pointer.
2072
2073 What we're after here is an expression of the form
2074 (T *)(&array + const)
726a989a
RB
2075 where array is OP0, const is OP1, RES_TYPE is T and
2076 the cast doesn't actually exist, but is implicit in the
5be014d5 2077 type of the POINTER_PLUS_EXPR. We'd like to turn this into
6de9cd9a
DN
2078 &array[x]
2079 which may be able to propagate further. */
2080
726a989a
RB
2081tree
2082maybe_fold_stmt_addition (tree res_type, tree op0, tree op1)
6de9cd9a 2083{
6de9cd9a
DN
2084 tree ptd_type;
2085 tree t;
6de9cd9a 2086
6de9cd9a
DN
2087 /* It had better be a constant. */
2088 if (TREE_CODE (op1) != INTEGER_CST)
2089 return NULL_TREE;
2090 /* The first operand should be an ADDR_EXPR. */
2091 if (TREE_CODE (op0) != ADDR_EXPR)
2092 return NULL_TREE;
2093 op0 = TREE_OPERAND (op0, 0);
2094
2095 /* If the first operand is an ARRAY_REF, expand it so that we can fold
2096 the offset into it. */
2097 while (TREE_CODE (op0) == ARRAY_REF)
2098 {
2099 tree array_obj = TREE_OPERAND (op0, 0);
2100 tree array_idx = TREE_OPERAND (op0, 1);
2101 tree elt_type = TREE_TYPE (op0);
2102 tree elt_size = TYPE_SIZE_UNIT (elt_type);
2103 tree min_idx;
2104
2105 if (TREE_CODE (array_idx) != INTEGER_CST)
2106 break;
2107 if (TREE_CODE (elt_size) != INTEGER_CST)
2108 break;
2109
2110 /* Un-bias the index by the min index of the array type. */
2111 min_idx = TYPE_DOMAIN (TREE_TYPE (array_obj));
2112 if (min_idx)
2113 {
2114 min_idx = TYPE_MIN_VALUE (min_idx);
2115 if (min_idx)
2116 {
44de5aeb
RK
2117 if (TREE_CODE (min_idx) != INTEGER_CST)
2118 break;
2119
b6f65e3c 2120 array_idx = fold_convert (TREE_TYPE (min_idx), array_idx);
6de9cd9a
DN
2121 if (!integer_zerop (min_idx))
2122 array_idx = int_const_binop (MINUS_EXPR, array_idx,
2123 min_idx, 0);
2124 }
2125 }
2126
2127 /* Convert the index to a byte offset. */
b6f65e3c 2128 array_idx = fold_convert (sizetype, array_idx);
6de9cd9a
DN
2129 array_idx = int_const_binop (MULT_EXPR, array_idx, elt_size, 0);
2130
2131 /* Update the operands for the next round, or for folding. */
5be014d5 2132 op1 = int_const_binop (PLUS_EXPR,
6de9cd9a 2133 array_idx, op1, 0);
6de9cd9a
DN
2134 op0 = array_obj;
2135 }
2136
726a989a 2137 ptd_type = TREE_TYPE (res_type);
c4e5b5a8
RG
2138 /* If we want a pointer to void, reconstruct the reference from the
2139 array element type. A pointer to that can be trivially converted
2140 to void *. This happens as we fold (void *)(ptr p+ off). */
2141 if (VOID_TYPE_P (ptd_type)
2142 && TREE_CODE (TREE_TYPE (op0)) == ARRAY_TYPE)
2143 ptd_type = TREE_TYPE (TREE_TYPE (op0));
6de9cd9a
DN
2144
2145 /* At which point we can try some of the same things as for indirects. */
1fd89060 2146 t = maybe_fold_offset_to_array_ref (op0, op1, ptd_type, true);
6de9cd9a
DN
2147 if (!t)
2148 t = maybe_fold_offset_to_component_ref (TREE_TYPE (op0), op0, op1,
2149 ptd_type, false);
2150 if (t)
726a989a 2151 t = build1 (ADDR_EXPR, res_type, t);
6de9cd9a
DN
2152
2153 return t;
2154}
2155
622f91ba
JL
2156/* For passing state through walk_tree into fold_stmt_r and its
2157 children. */
2158
2159struct fold_stmt_r_data
2160{
726a989a 2161 gimple stmt;
6ac01510
ILT
2162 bool *changed_p;
2163 bool *inside_addr_expr_p;
622f91ba
JL
2164};
2165
6de9cd9a
DN
2166/* Subroutine of fold_stmt called via walk_tree. We perform several
2167 simplifications of EXPR_P, mostly having to do with pointer arithmetic. */
2168
2169static tree
2170fold_stmt_r (tree *expr_p, int *walk_subtrees, void *data)
2171{
726a989a
RB
2172 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
2173 struct fold_stmt_r_data *fold_stmt_r_data;
2174 bool *inside_addr_expr_p;
2175 bool *changed_p;
6de9cd9a 2176 tree expr = *expr_p, t;
6aaa4013 2177 bool volatile_p = TREE_THIS_VOLATILE (expr);
6de9cd9a 2178
726a989a
RB
2179 fold_stmt_r_data = (struct fold_stmt_r_data *) wi->info;
2180 inside_addr_expr_p = fold_stmt_r_data->inside_addr_expr_p;
2181 changed_p = fold_stmt_r_data->changed_p;
2182
6de9cd9a
DN
2183 /* ??? It'd be nice if walk_tree had a pre-order option. */
2184 switch (TREE_CODE (expr))
2185 {
2186 case INDIRECT_REF:
2187 t = walk_tree (&TREE_OPERAND (expr, 0), fold_stmt_r, data, NULL);
2188 if (t)
2189 return t;
2190 *walk_subtrees = 0;
2191
2192 t = maybe_fold_stmt_indirect (expr, TREE_OPERAND (expr, 0),
2193 integer_zero_node);
bab4e8fb
JJ
2194 /* Avoid folding *"abc" = 5 into 'a' = 5. */
2195 if (wi->is_lhs && t && TREE_CODE (t) == INTEGER_CST)
2196 t = NULL_TREE;
16ac8575
RG
2197 if (!t
2198 && TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR)
2199 /* If we had a good reason for propagating the address here,
2200 make sure we end up with valid gimple. See PR34989. */
2201 t = TREE_OPERAND (TREE_OPERAND (expr, 0), 0);
6de9cd9a
DN
2202 break;
2203
fe9821b8
JH
2204 case NOP_EXPR:
2205 t = walk_tree (&TREE_OPERAND (expr, 0), fold_stmt_r, data, NULL);
2206 if (t)
2207 return t;
2208 *walk_subtrees = 0;
2209
2210 if (POINTER_TYPE_P (TREE_TYPE (expr))
129a37fc 2211 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (expr)))
fe9821b8 2212 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 0)))
99f536cc
RG
2213 && (t = maybe_fold_offset_to_address (TREE_OPERAND (expr, 0),
2214 integer_zero_node,
2215 TREE_TYPE (TREE_TYPE (expr)))))
2216 return t;
fe9821b8
JH
2217 break;
2218
622f91ba 2219 /* ??? Could handle more ARRAY_REFs here, as a variant of INDIRECT_REF.
6de9cd9a
DN
2220 We'd only want to bother decomposing an existing ARRAY_REF if
2221 the base array is found to have another offset contained within.
2222 Otherwise we'd be wasting time. */
622f91ba
JL
2223 case ARRAY_REF:
2224 /* If we are not processing expressions found within an
bab4e8fb
JJ
2225 ADDR_EXPR, then we can fold constant array references.
2226 Don't fold on LHS either, to avoid folding "abc"[0] = 5
2227 into 'a' = 5. */
2228 if (!*inside_addr_expr_p && !wi->is_lhs)
622f91ba
JL
2229 t = fold_read_from_constant_string (expr);
2230 else
2231 t = NULL;
2232 break;
6de9cd9a
DN
2233
2234 case ADDR_EXPR:
622f91ba 2235 *inside_addr_expr_p = true;
6de9cd9a 2236 t = walk_tree (&TREE_OPERAND (expr, 0), fold_stmt_r, data, NULL);
622f91ba 2237 *inside_addr_expr_p = false;
6de9cd9a
DN
2238 if (t)
2239 return t;
2240 *walk_subtrees = 0;
2241
51eed280
PB
2242 /* Make sure the value is properly considered constant, and so gets
2243 propagated as expected. */
6de9cd9a 2244 if (*changed_p)
127203ac 2245 recompute_tree_invariant_for_addr_expr (expr);
6de9cd9a
DN
2246 return NULL_TREE;
2247
6de9cd9a
DN
2248 case COMPONENT_REF:
2249 t = walk_tree (&TREE_OPERAND (expr, 0), fold_stmt_r, data, NULL);
2250 if (t)
2251 return t;
2252 *walk_subtrees = 0;
2253
fa27426e
RH
2254 /* Make sure the FIELD_DECL is actually a field in the type on the lhs.
2255 We've already checked that the records are compatible, so we should
2256 come up with a set of compatible fields. */
2257 {
2258 tree expr_record = TREE_TYPE (TREE_OPERAND (expr, 0));
2259 tree expr_field = TREE_OPERAND (expr, 1);
2260
2261 if (DECL_FIELD_CONTEXT (expr_field) != TYPE_MAIN_VARIANT (expr_record))
2262 {
2263 expr_field = find_compatible_field (expr_record, expr_field);
2264 TREE_OPERAND (expr, 1) = expr_field;
2265 }
2266 }
6de9cd9a
DN
2267 break;
2268
ac182688
ZD
2269 case TARGET_MEM_REF:
2270 t = maybe_fold_tmr (expr);
2271 break;
2272
726a989a
RB
2273 case POINTER_PLUS_EXPR:
2274 t = walk_tree (&TREE_OPERAND (expr, 0), fold_stmt_r, data, NULL);
2275 if (t)
2276 return t;
2277 t = walk_tree (&TREE_OPERAND (expr, 1), fold_stmt_r, data, NULL);
2278 if (t)
2279 return t;
2280 *walk_subtrees = 0;
2281
2282 t = maybe_fold_stmt_addition (TREE_TYPE (expr),
2283 TREE_OPERAND (expr, 0),
2284 TREE_OPERAND (expr, 1));
2285 break;
2286
f393e7f5
RG
2287 case COND_EXPR:
2288 if (COMPARISON_CLASS_P (TREE_OPERAND (expr, 0)))
2289 {
2290 tree op0 = TREE_OPERAND (expr, 0);
6ac01510
ILT
2291 tree tem;
2292 bool set;
2293
2294 fold_defer_overflow_warnings ();
2295 tem = fold_binary (TREE_CODE (op0), TREE_TYPE (op0),
2296 TREE_OPERAND (op0, 0),
2297 TREE_OPERAND (op0, 1));
726a989a
RB
2298 /* This is actually a conditional expression, not a GIMPLE
2299 conditional statement, however, the valid_gimple_rhs_p
2300 test still applies. */
2301 set = tem && is_gimple_condexpr (tem) && valid_gimple_rhs_p (tem);
6ac01510
ILT
2302 fold_undefer_overflow_warnings (set, fold_stmt_r_data->stmt, 0);
2303 if (set)
5cdc4a26 2304 {
726a989a
RB
2305 COND_EXPR_COND (expr) = tem;
2306 t = expr;
5cdc4a26
RS
2307 break;
2308 }
f393e7f5 2309 }
5cdc4a26 2310 return NULL_TREE;
f393e7f5 2311
6de9cd9a
DN
2312 default:
2313 return NULL_TREE;
2314 }
2315
2316 if (t)
2317 {
e2104f59
RG
2318 /* Preserve volatileness of the original expression.
2319 We can end up with a plain decl here which is shared
2320 and we shouldn't mess with its flags. */
2321 if (!SSA_VAR_P (t))
2322 TREE_THIS_VOLATILE (t) = volatile_p;
6de9cd9a
DN
2323 *expr_p = t;
2324 *changed_p = true;
2325 }
2326
2327 return NULL_TREE;
2328}
2329
10a0d495
JJ
2330/* Return the string length, maximum string length or maximum value of
2331 ARG in LENGTH.
2332 If ARG is an SSA name variable, follow its use-def chains. If LENGTH
2333 is not NULL and, for TYPE == 0, its value is not equal to the length
2334 we determine or if we are unable to determine the length or value,
2335 return false. VISITED is a bitmap of visited variables.
2336 TYPE is 0 if string length should be returned, 1 for maximum string
2337 length and 2 for maximum value ARG can have. */
6de9cd9a 2338
06a9b53f 2339static bool
10a0d495 2340get_maxval_strlen (tree arg, tree *length, bitmap visited, int type)
6de9cd9a 2341{
726a989a
RB
2342 tree var, val;
2343 gimple def_stmt;
750628d8
DN
2344
2345 if (TREE_CODE (arg) != SSA_NAME)
06a9b53f 2346 {
f255541f
RC
2347 if (TREE_CODE (arg) == COND_EXPR)
2348 return get_maxval_strlen (COND_EXPR_THEN (arg), length, visited, type)
2349 && get_maxval_strlen (COND_EXPR_ELSE (arg), length, visited, type);
c4e5b5a8
RG
2350 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
2351 else if (TREE_CODE (arg) == ADDR_EXPR
2352 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF
2353 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg, 0), 1)))
2354 {
2355 tree aop0 = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
2356 if (TREE_CODE (aop0) == INDIRECT_REF
2357 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
2358 return get_maxval_strlen (TREE_OPERAND (aop0, 0),
2359 length, visited, type);
2360 }
f255541f 2361
10a0d495
JJ
2362 if (type == 2)
2363 {
2364 val = arg;
2365 if (TREE_CODE (val) != INTEGER_CST
2366 || tree_int_cst_sgn (val) < 0)
2367 return false;
2368 }
2369 else
2370 val = c_strlen (arg, 1);
750628d8 2371 if (!val)
06a9b53f 2372 return false;
cd709752 2373
10a0d495
JJ
2374 if (*length)
2375 {
2376 if (type > 0)
2377 {
2378 if (TREE_CODE (*length) != INTEGER_CST
2379 || TREE_CODE (val) != INTEGER_CST)
2380 return false;
2381
2382 if (tree_int_cst_lt (*length, val))
2383 *length = val;
2384 return true;
2385 }
2386 else if (simple_cst_equal (val, *length) != 1)
2387 return false;
2388 }
6de9cd9a 2389
750628d8
DN
2390 *length = val;
2391 return true;
6de9cd9a 2392 }
06a9b53f 2393
750628d8
DN
2394 /* If we were already here, break the infinite cycle. */
2395 if (bitmap_bit_p (visited, SSA_NAME_VERSION (arg)))
2396 return true;
2397 bitmap_set_bit (visited, SSA_NAME_VERSION (arg));
2398
2399 var = arg;
2400 def_stmt = SSA_NAME_DEF_STMT (var);
6de9cd9a 2401
726a989a
RB
2402 switch (gimple_code (def_stmt))
2403 {
2404 case GIMPLE_ASSIGN:
2405 /* The RHS of the statement defining VAR must either have a
2406 constant length or come from another SSA_NAME with a constant
2407 length. */
2408 if (gimple_assign_single_p (def_stmt)
2409 || gimple_assign_unary_nop_p (def_stmt))
2410 {
2411 tree rhs = gimple_assign_rhs1 (def_stmt);
2412 return get_maxval_strlen (rhs, length, visited, type);
2413 }
2414 return false;
2415
2416 case GIMPLE_PHI:
750628d8
DN
2417 {
2418 /* All the arguments of the PHI node must have the same constant
2419 length. */
726a989a
RB
2420 unsigned i;
2421
2422 for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
2423 {
2424 tree arg = gimple_phi_arg (def_stmt, i)->def;
2425
2426 /* If this PHI has itself as an argument, we cannot
2427 determine the string length of this argument. However,
2428 if we can find a constant string length for the other
2429 PHI args then we can still be sure that this is a
2430 constant string length. So be optimistic and just
2431 continue with the next argument. */
2432 if (arg == gimple_phi_result (def_stmt))
2433 continue;
2434
2435 if (!get_maxval_strlen (arg, length, visited, type))
2436 return false;
2437 }
2438 }
2439 return true;
6de9cd9a 2440
750628d8 2441 default:
726a989a 2442 return false;
6de9cd9a 2443 }
6de9cd9a
DN
2444}
2445
2446
726a989a
RB
2447/* Fold builtin call in statement STMT. Returns a simplified tree.
2448 We may return a non-constant expression, including another call
2449 to a different function and with different arguments, e.g.,
2450 substituting memcpy for strcpy when the string length is known.
2451 Note that some builtins expand into inline code that may not
2452 be valid in GIMPLE. Callers must take care. */
6de9cd9a
DN
2453
2454static tree
726a989a 2455ccp_fold_builtin (gimple stmt)
6de9cd9a 2456{
10a0d495 2457 tree result, val[3];
5039610b 2458 tree callee, a;
d9cc481a 2459 int arg_idx, type;
a32e70c3
RS
2460 bitmap visited;
2461 bool ignore;
5039610b 2462 int nargs;
6de9cd9a 2463
726a989a
RB
2464 gcc_assert (is_gimple_call (stmt));
2465
2466 ignore = (gimple_call_lhs (stmt) == NULL);
6de9cd9a
DN
2467
2468 /* First try the generic builtin folder. If that succeeds, return the
2469 result directly. */
726a989a 2470 result = fold_call_stmt (stmt, ignore);
6de9cd9a 2471 if (result)
10a0d495
JJ
2472 {
2473 if (ignore)
2474 STRIP_NOPS (result);
2475 return result;
2476 }
a32e70c3
RS
2477
2478 /* Ignore MD builtins. */
726a989a 2479 callee = gimple_call_fndecl (stmt);
a32e70c3
RS
2480 if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_MD)
2481 return NULL_TREE;
6de9cd9a
DN
2482
2483 /* If the builtin could not be folded, and it has no argument list,
2484 we're done. */
726a989a 2485 nargs = gimple_call_num_args (stmt);
5039610b 2486 if (nargs == 0)
6de9cd9a
DN
2487 return NULL_TREE;
2488
2489 /* Limit the work only for builtins we know how to simplify. */
2490 switch (DECL_FUNCTION_CODE (callee))
2491 {
2492 case BUILT_IN_STRLEN:
2493 case BUILT_IN_FPUTS:
2494 case BUILT_IN_FPUTS_UNLOCKED:
d9cc481a 2495 arg_idx = 0;
10a0d495 2496 type = 0;
6de9cd9a
DN
2497 break;
2498 case BUILT_IN_STRCPY:
2499 case BUILT_IN_STRNCPY:
d9cc481a 2500 arg_idx = 1;
10a0d495
JJ
2501 type = 0;
2502 break;
2503 case BUILT_IN_MEMCPY_CHK:
2504 case BUILT_IN_MEMPCPY_CHK:
2505 case BUILT_IN_MEMMOVE_CHK:
2506 case BUILT_IN_MEMSET_CHK:
2507 case BUILT_IN_STRNCPY_CHK:
d9cc481a 2508 arg_idx = 2;
10a0d495
JJ
2509 type = 2;
2510 break;
2511 case BUILT_IN_STRCPY_CHK:
2512 case BUILT_IN_STPCPY_CHK:
d9cc481a 2513 arg_idx = 1;
10a0d495
JJ
2514 type = 1;
2515 break;
2516 case BUILT_IN_SNPRINTF_CHK:
2517 case BUILT_IN_VSNPRINTF_CHK:
d9cc481a 2518 arg_idx = 1;
10a0d495 2519 type = 2;
6de9cd9a
DN
2520 break;
2521 default:
2522 return NULL_TREE;
2523 }
2524
f6ab27bb
JJ
2525 if (arg_idx >= nargs)
2526 return NULL_TREE;
2527
6de9cd9a 2528 /* Try to use the dataflow information gathered by the CCP process. */
8bdbfff5 2529 visited = BITMAP_ALLOC (NULL);
d9cc481a 2530 bitmap_clear (visited);
6de9cd9a 2531
10a0d495 2532 memset (val, 0, sizeof (val));
d9cc481a
AN
2533 a = gimple_call_arg (stmt, arg_idx);
2534 if (!get_maxval_strlen (a, &val[arg_idx], visited, type))
2535 val[arg_idx] = NULL_TREE;
6de9cd9a 2536
8bdbfff5 2537 BITMAP_FREE (visited);
6de9cd9a 2538
a32e70c3 2539 result = NULL_TREE;
6de9cd9a
DN
2540 switch (DECL_FUNCTION_CODE (callee))
2541 {
2542 case BUILT_IN_STRLEN:
f6ab27bb 2543 if (val[0] && nargs == 1)
6de9cd9a 2544 {
726a989a
RB
2545 tree new_val =
2546 fold_convert (TREE_TYPE (gimple_call_lhs (stmt)), val[0]);
6de9cd9a
DN
2547
2548 /* If the result is not a valid gimple value, or not a cast
2549 of a valid gimple value, then we can not use the result. */
c22940cd
TN
2550 if (is_gimple_val (new_val)
2551 || (is_gimple_cast (new_val)
2552 && is_gimple_val (TREE_OPERAND (new_val, 0))))
2553 return new_val;
6de9cd9a 2554 }
a32e70c3
RS
2555 break;
2556
6de9cd9a 2557 case BUILT_IN_STRCPY:
5039610b
SL
2558 if (val[1] && is_gimple_val (val[1]) && nargs == 2)
2559 result = fold_builtin_strcpy (callee,
726a989a
RB
2560 gimple_call_arg (stmt, 0),
2561 gimple_call_arg (stmt, 1),
5039610b 2562 val[1]);
a32e70c3
RS
2563 break;
2564
6de9cd9a 2565 case BUILT_IN_STRNCPY:
5039610b
SL
2566 if (val[1] && is_gimple_val (val[1]) && nargs == 3)
2567 result = fold_builtin_strncpy (callee,
726a989a
RB
2568 gimple_call_arg (stmt, 0),
2569 gimple_call_arg (stmt, 1),
2570 gimple_call_arg (stmt, 2),
5039610b 2571 val[1]);
a32e70c3
RS
2572 break;
2573
6de9cd9a 2574 case BUILT_IN_FPUTS:
f6ab27bb
JJ
2575 if (nargs == 2)
2576 result = fold_builtin_fputs (gimple_call_arg (stmt, 0),
2577 gimple_call_arg (stmt, 1),
2578 ignore, false, val[0]);
a32e70c3
RS
2579 break;
2580
6de9cd9a 2581 case BUILT_IN_FPUTS_UNLOCKED:
f6ab27bb
JJ
2582 if (nargs == 2)
2583 result = fold_builtin_fputs (gimple_call_arg (stmt, 0),
2584 gimple_call_arg (stmt, 1),
2585 ignore, true, val[0]);
10a0d495
JJ
2586 break;
2587
2588 case BUILT_IN_MEMCPY_CHK:
2589 case BUILT_IN_MEMPCPY_CHK:
2590 case BUILT_IN_MEMMOVE_CHK:
2591 case BUILT_IN_MEMSET_CHK:
f6ab27bb 2592 if (val[2] && is_gimple_val (val[2]) && nargs == 4)
5039610b 2593 result = fold_builtin_memory_chk (callee,
726a989a
RB
2594 gimple_call_arg (stmt, 0),
2595 gimple_call_arg (stmt, 1),
2596 gimple_call_arg (stmt, 2),
2597 gimple_call_arg (stmt, 3),
5039610b 2598 val[2], ignore,
10a0d495
JJ
2599 DECL_FUNCTION_CODE (callee));
2600 break;
2601
2602 case BUILT_IN_STRCPY_CHK:
2603 case BUILT_IN_STPCPY_CHK:
f6ab27bb 2604 if (val[1] && is_gimple_val (val[1]) && nargs == 3)
5039610b 2605 result = fold_builtin_stxcpy_chk (callee,
726a989a
RB
2606 gimple_call_arg (stmt, 0),
2607 gimple_call_arg (stmt, 1),
2608 gimple_call_arg (stmt, 2),
5039610b 2609 val[1], ignore,
10a0d495
JJ
2610 DECL_FUNCTION_CODE (callee));
2611 break;
2612
2613 case BUILT_IN_STRNCPY_CHK:
f6ab27bb 2614 if (val[2] && is_gimple_val (val[2]) && nargs == 4)
726a989a
RB
2615 result = fold_builtin_strncpy_chk (gimple_call_arg (stmt, 0),
2616 gimple_call_arg (stmt, 1),
2617 gimple_call_arg (stmt, 2),
2618 gimple_call_arg (stmt, 3),
5039610b 2619 val[2]);
10a0d495
JJ
2620 break;
2621
2622 case BUILT_IN_SNPRINTF_CHK:
2623 case BUILT_IN_VSNPRINTF_CHK:
2624 if (val[1] && is_gimple_val (val[1]))
726a989a
RB
2625 result = gimple_fold_builtin_snprintf_chk (stmt, val[1],
2626 DECL_FUNCTION_CODE (callee));
a32e70c3 2627 break;
6de9cd9a
DN
2628
2629 default:
1e128c5f 2630 gcc_unreachable ();
6de9cd9a
DN
2631 }
2632
a32e70c3 2633 if (result && ignore)
9675412f 2634 result = fold_ignored_result (result);
a32e70c3 2635 return result;
6de9cd9a
DN
2636}
2637
726a989a
RB
2638/* Attempt to fold an assignment statement pointed-to by SI. Returns a
2639 replacement rhs for the statement or NULL_TREE if no simplification
2640 could be made. It is assumed that the operands have been previously
2641 folded. */
2642
2643static tree
2644fold_gimple_assign (gimple_stmt_iterator *si)
2645{
2646 gimple stmt = gsi_stmt (*si);
2647 enum tree_code subcode = gimple_assign_rhs_code (stmt);
2648
2649 tree result = NULL;
2650
2651 switch (get_gimple_rhs_class (subcode))
2652 {
2653 case GIMPLE_SINGLE_RHS:
2654 {
2655 tree rhs = gimple_assign_rhs1 (stmt);
2656
2657 /* Try to fold a conditional expression. */
2658 if (TREE_CODE (rhs) == COND_EXPR)
2659 {
2660 tree temp = fold (COND_EXPR_COND (rhs));
2661 if (temp != COND_EXPR_COND (rhs))
2662 result = fold_build3 (COND_EXPR, TREE_TYPE (rhs), temp,
2663 COND_EXPR_THEN (rhs), COND_EXPR_ELSE (rhs));
2664 }
2665
2666 /* If we couldn't fold the RHS, hand over to the generic
2667 fold routines. */
2668 if (result == NULL_TREE)
2669 result = fold (rhs);
2670
2671 /* Strip away useless type conversions. Both the NON_LVALUE_EXPR
2672 that may have been added by fold, and "useless" type
2673 conversions that might now be apparent due to propagation. */
2674 STRIP_USELESS_TYPE_CONVERSION (result);
2675
2676 if (result != rhs && valid_gimple_rhs_p (result))
2677 return result;
2678 else
2679 /* It is possible that fold_stmt_r simplified the RHS.
2680 Make sure that the subcode of this statement still
2681 reflects the principal operator of the rhs operand. */
2682 return rhs;
2683 }
2684 break;
2685
2686 case GIMPLE_UNARY_RHS:
ff8b183b
RG
2687 {
2688 tree rhs = gimple_assign_rhs1 (stmt);
726a989a 2689
ff8b183b
RG
2690 result = fold_unary (subcode, gimple_expr_type (stmt), rhs);
2691 if (result)
2692 {
2693 /* If the operation was a conversion do _not_ mark a
2694 resulting constant with TREE_OVERFLOW if the original
2695 constant was not. These conversions have implementation
2696 defined behavior and retaining the TREE_OVERFLOW flag
2697 here would confuse later passes such as VRP. */
2698 if (CONVERT_EXPR_CODE_P (subcode)
2699 && TREE_CODE (result) == INTEGER_CST
2700 && TREE_CODE (rhs) == INTEGER_CST)
2701 TREE_OVERFLOW (result) = TREE_OVERFLOW (rhs);
2702
2703 STRIP_USELESS_TYPE_CONVERSION (result);
2704 if (valid_gimple_rhs_p (result))
2705 return result;
2706 }
2707 else if (CONVERT_EXPR_CODE_P (subcode)
2708 && POINTER_TYPE_P (gimple_expr_type (stmt))
2709 && POINTER_TYPE_P (TREE_TYPE (gimple_assign_rhs1 (stmt))))
2710 {
2711 tree type = gimple_expr_type (stmt);
2712 tree t = maybe_fold_offset_to_address (gimple_assign_rhs1 (stmt),
2713 integer_zero_node, type);
2714 if (t)
2715 return t;
2716 }
2717 }
726a989a
RB
2718 break;
2719
2720 case GIMPLE_BINARY_RHS:
2721 /* Try to fold pointer addition. */
2722 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
66e8b99c
RG
2723 {
2724 tree type = TREE_TYPE (gimple_assign_rhs1 (stmt));
2725 if (TREE_CODE (TREE_TYPE (type)) == ARRAY_TYPE)
2726 {
2727 type = build_pointer_type (TREE_TYPE (TREE_TYPE (type)));
2728 if (!useless_type_conversion_p
2729 (TREE_TYPE (gimple_assign_lhs (stmt)), type))
2730 type = TREE_TYPE (gimple_assign_rhs1 (stmt));
2731 }
2732 result = maybe_fold_stmt_addition (type,
2733 gimple_assign_rhs1 (stmt),
2734 gimple_assign_rhs2 (stmt));
2735 }
726a989a
RB
2736
2737 if (!result)
2738 result = fold_binary (subcode,
2739 TREE_TYPE (gimple_assign_lhs (stmt)),
2740 gimple_assign_rhs1 (stmt),
2741 gimple_assign_rhs2 (stmt));
2742
2743 if (result)
2744 {
2745 STRIP_USELESS_TYPE_CONVERSION (result);
2746 if (valid_gimple_rhs_p (result))
2747 return result;
001003c2
PB
2748
2749 /* Fold might have produced non-GIMPLE, so if we trust it blindly
2750 we lose canonicalization opportunities. Do not go again
2751 through fold here though, or the same non-GIMPLE will be
2752 produced. */
2753 if (commutative_tree_code (subcode)
2754 && tree_swap_operands_p (gimple_assign_rhs1 (stmt),
2755 gimple_assign_rhs2 (stmt), false))
2756 return build2 (subcode, TREE_TYPE (gimple_assign_lhs (stmt)),
2757 gimple_assign_rhs2 (stmt),
2758 gimple_assign_rhs1 (stmt));
726a989a
RB
2759 }
2760 break;
2761
2762 case GIMPLE_INVALID_RHS:
2763 gcc_unreachable ();
2764 }
2765
2766 return NULL_TREE;
2767}
2768
2769/* Attempt to fold a conditional statement. Return true if any changes were
2770 made. We only attempt to fold the condition expression, and do not perform
2771 any transformation that would require alteration of the cfg. It is
2772 assumed that the operands have been previously folded. */
2773
2774static bool
2775fold_gimple_cond (gimple stmt)
2776{
2777 tree result = fold_binary (gimple_cond_code (stmt),
2778 boolean_type_node,
2779 gimple_cond_lhs (stmt),
2780 gimple_cond_rhs (stmt));
2781
2782 if (result)
2783 {
2784 STRIP_USELESS_TYPE_CONVERSION (result);
2785 if (is_gimple_condexpr (result) && valid_gimple_rhs_p (result))
2786 {
2787 gimple_cond_set_condition_from_tree (stmt, result);
2788 return true;
2789 }
2790 }
2791
2792 return false;
2793}
2794
2795
2796/* Attempt to fold a call statement referenced by the statement iterator GSI.
2797 The statement may be replaced by another statement, e.g., if the call
2798 simplifies to a constant value. Return true if any changes were made.
2799 It is assumed that the operands have been previously folded. */
2800
2801static bool
2802fold_gimple_call (gimple_stmt_iterator *gsi)
2803{
2804 gimple stmt = gsi_stmt (*gsi);
2805
2806 tree callee = gimple_call_fndecl (stmt);
2807
2808 /* Check for builtins that CCP can handle using information not
2809 available in the generic fold routines. */
2810 if (callee && DECL_BUILT_IN (callee))
2811 {
2812 tree result = ccp_fold_builtin (stmt);
2813
2814 if (result)
2815 return update_call_from_tree (gsi, result);
2816 }
2817 else
2818 {
2819 /* Check for resolvable OBJ_TYPE_REF. The only sorts we can resolve
2820 here are when we've propagated the address of a decl into the
2821 object slot. */
2822 /* ??? Should perhaps do this in fold proper. However, doing it
2823 there requires that we create a new CALL_EXPR, and that requires
2824 copying EH region info to the new node. Easier to just do it
2825 here where we can just smash the call operand. */
2826 /* ??? Is there a good reason not to do this in fold_stmt_inplace? */
2827 callee = gimple_call_fn (stmt);
2828 if (TREE_CODE (callee) == OBJ_TYPE_REF
2829 && lang_hooks.fold_obj_type_ref
2830 && TREE_CODE (OBJ_TYPE_REF_OBJECT (callee)) == ADDR_EXPR
2831 && DECL_P (TREE_OPERAND
2832 (OBJ_TYPE_REF_OBJECT (callee), 0)))
2833 {
2834 tree t;
2835
2836 /* ??? Caution: Broken ADDR_EXPR semantics means that
2837 looking at the type of the operand of the addr_expr
2838 can yield an array type. See silly exception in
2839 check_pointer_types_r. */
2840 t = TREE_TYPE (TREE_TYPE (OBJ_TYPE_REF_OBJECT (callee)));
2841 t = lang_hooks.fold_obj_type_ref (callee, t);
2842 if (t)
2843 {
2844 gimple_call_set_fn (stmt, t);
2845 return true;
2846 }
2847 }
2848 }
2849
2850 return false;
2851}
6de9cd9a 2852
726a989a 2853/* Fold the statement pointed to by GSI. In some cases, this function may
750628d8
DN
2854 replace the whole statement with a new one. Returns true iff folding
2855 makes any changes. */
6de9cd9a 2856
750628d8 2857bool
726a989a 2858fold_stmt (gimple_stmt_iterator *gsi)
6de9cd9a 2859{
726a989a 2860 tree res;
622f91ba 2861 struct fold_stmt_r_data fold_stmt_r_data;
726a989a
RB
2862 struct walk_stmt_info wi;
2863
750628d8 2864 bool changed = false;
622f91ba
JL
2865 bool inside_addr_expr = false;
2866
726a989a 2867 gimple stmt = gsi_stmt (*gsi);
6ac01510
ILT
2868
2869 fold_stmt_r_data.stmt = stmt;
622f91ba
JL
2870 fold_stmt_r_data.changed_p = &changed;
2871 fold_stmt_r_data.inside_addr_expr_p = &inside_addr_expr;
6de9cd9a 2872
726a989a
RB
2873 memset (&wi, 0, sizeof (wi));
2874 wi.info = &fold_stmt_r_data;
6de9cd9a 2875
726a989a
RB
2876 /* Fold the individual operands.
2877 For example, fold instances of *&VAR into VAR, etc. */
2878 res = walk_gimple_op (stmt, fold_stmt_r, &wi);
2879 gcc_assert (!res);
6de9cd9a 2880
726a989a
RB
2881 /* Fold the main computation performed by the statement. */
2882 switch (gimple_code (stmt))
6de9cd9a 2883 {
726a989a
RB
2884 case GIMPLE_ASSIGN:
2885 {
2886 tree new_rhs = fold_gimple_assign (gsi);
2887 if (new_rhs != NULL_TREE)
2888 {
2889 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
2890 changed = true;
2891 }
2892 stmt = gsi_stmt (*gsi);
2893 break;
2894 }
2895 case GIMPLE_COND:
2896 changed |= fold_gimple_cond (stmt);
2897 break;
2898 case GIMPLE_CALL:
2899 /* The entire statement may be replaced in this case. */
2900 changed |= fold_gimple_call (gsi);
2901 break;
1809ff6b 2902
726a989a
RB
2903 default:
2904 return changed;
2905 break;
f255541f 2906 }
6de9cd9a 2907
750628d8 2908 return changed;
6de9cd9a
DN
2909}
2910
38965eb2
ZD
2911/* Perform the minimal folding on statement STMT. Only operations like
2912 *&x created by constant propagation are handled. The statement cannot
726a989a
RB
2913 be replaced with a new one. Return true if the statement was
2914 changed, false otherwise. */
38965eb2
ZD
2915
2916bool
726a989a 2917fold_stmt_inplace (gimple stmt)
38965eb2 2918{
726a989a 2919 tree res;
622f91ba 2920 struct fold_stmt_r_data fold_stmt_r_data;
726a989a
RB
2921 struct walk_stmt_info wi;
2922 gimple_stmt_iterator si;
2923
38965eb2 2924 bool changed = false;
622f91ba
JL
2925 bool inside_addr_expr = false;
2926
6ac01510 2927 fold_stmt_r_data.stmt = stmt;
622f91ba
JL
2928 fold_stmt_r_data.changed_p = &changed;
2929 fold_stmt_r_data.inside_addr_expr_p = &inside_addr_expr;
38965eb2 2930
726a989a
RB
2931 memset (&wi, 0, sizeof (wi));
2932 wi.info = &fold_stmt_r_data;
2933
2934 /* Fold the individual operands.
2935 For example, fold instances of *&VAR into VAR, etc.
38965eb2 2936
726a989a
RB
2937 It appears that, at one time, maybe_fold_stmt_indirect
2938 would cause the walk to return non-null in order to
2939 signal that the entire statement should be replaced with
2940 a call to _builtin_trap. This functionality is currently
2941 disabled, as noted in a FIXME, and cannot be supported here. */
2942 res = walk_gimple_op (stmt, fold_stmt_r, &wi);
2943 gcc_assert (!res);
38965eb2 2944
726a989a
RB
2945 /* Fold the main computation performed by the statement. */
2946 switch (gimple_code (stmt))
2947 {
2948 case GIMPLE_ASSIGN:
2949 {
2950 unsigned old_num_ops;
2951 tree new_rhs;
2952 old_num_ops = gimple_num_ops (stmt);
2953 si = gsi_for_stmt (stmt);
2954 new_rhs = fold_gimple_assign (&si);
2955 if (new_rhs != NULL_TREE
2956 && get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops)
2957 {
2958 gimple_assign_set_rhs_from_tree (&si, new_rhs);
2959 changed = true;
2960 }
2961 gcc_assert (gsi_stmt (si) == stmt);
2962 break;
2963 }
2964 case GIMPLE_COND:
2965 changed |= fold_gimple_cond (stmt);
2966 break;
38965eb2 2967
726a989a
RB
2968 default:
2969 break;
2970 }
38965eb2
ZD
2971
2972 return changed;
2973}
726a989a 2974
cb8e078d
JJ
2975/* Try to optimize out __builtin_stack_restore. Optimize it out
2976 if there is another __builtin_stack_restore in the same basic
2977 block and no calls or ASM_EXPRs are in between, or if this block's
2978 only outgoing edge is to EXIT_BLOCK and there are no calls or
2979 ASM_EXPRs after this __builtin_stack_restore. */
2980
2981static tree
726a989a 2982optimize_stack_restore (gimple_stmt_iterator i)
cb8e078d 2983{
726a989a
RB
2984 tree callee, rhs;
2985 gimple stmt, stack_save;
2986 gimple_stmt_iterator stack_save_gsi;
2987
2988 basic_block bb = gsi_bb (i);
2989 gimple call = gsi_stmt (i);
cb8e078d 2990
726a989a
RB
2991 if (gimple_code (call) != GIMPLE_CALL
2992 || gimple_call_num_args (call) != 1
2993 || TREE_CODE (gimple_call_arg (call, 0)) != SSA_NAME
2994 || !POINTER_TYPE_P (TREE_TYPE (gimple_call_arg (call, 0))))
cb8e078d
JJ
2995 return NULL_TREE;
2996
726a989a 2997 for (gsi_next (&i); !gsi_end_p (i); gsi_next (&i))
cb8e078d 2998 {
726a989a
RB
2999 stmt = gsi_stmt (i);
3000 if (gimple_code (stmt) == GIMPLE_ASM)
cb8e078d 3001 return NULL_TREE;
726a989a 3002 if (gimple_code (stmt) != GIMPLE_CALL)
cb8e078d
JJ
3003 continue;
3004
726a989a 3005 callee = gimple_call_fndecl (stmt);
cb8e078d
JJ
3006 if (!callee || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL)
3007 return NULL_TREE;
3008
3009 if (DECL_FUNCTION_CODE (callee) == BUILT_IN_STACK_RESTORE)
3010 break;
3011 }
3012
726a989a 3013 if (gsi_end_p (i)
cb8e078d
JJ
3014 && (! single_succ_p (bb)
3015 || single_succ_edge (bb)->dest != EXIT_BLOCK_PTR))
3016 return NULL_TREE;
3017
726a989a
RB
3018 stack_save = SSA_NAME_DEF_STMT (gimple_call_arg (call, 0));
3019 if (gimple_code (stack_save) != GIMPLE_CALL
3020 || gimple_call_lhs (stack_save) != gimple_call_arg (call, 0)
3021 || stmt_could_throw_p (stack_save)
3022 || !has_single_use (gimple_call_arg (call, 0)))
cb8e078d
JJ
3023 return NULL_TREE;
3024
726a989a 3025 callee = gimple_call_fndecl (stack_save);
cb8e078d
JJ
3026 if (!callee
3027 || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL
3028 || DECL_FUNCTION_CODE (callee) != BUILT_IN_STACK_SAVE
726a989a 3029 || gimple_call_num_args (stack_save) != 0)
cb8e078d
JJ
3030 return NULL_TREE;
3031
726a989a
RB
3032 stack_save_gsi = gsi_for_stmt (stack_save);
3033 push_stmt_changes (gsi_stmt_ptr (&stack_save_gsi));
3034 rhs = build_int_cst (TREE_TYPE (gimple_call_arg (call, 0)), 0);
3035 if (!update_call_from_tree (&stack_save_gsi, rhs))
cb8e078d 3036 {
726a989a 3037 discard_stmt_changes (gsi_stmt_ptr (&stack_save_gsi));
cb8e078d
JJ
3038 return NULL_TREE;
3039 }
726a989a 3040 pop_stmt_changes (gsi_stmt_ptr (&stack_save_gsi));
cb8e078d 3041
726a989a 3042 /* No effect, so the statement will be deleted. */
cb8e078d
JJ
3043 return integer_zero_node;
3044}
726a989a 3045
d7bd8aeb
JJ
3046/* If va_list type is a simple pointer and nothing special is needed,
3047 optimize __builtin_va_start (&ap, 0) into ap = __builtin_next_arg (0),
3048 __builtin_va_end (&ap) out as NOP and __builtin_va_copy into a simple
3049 pointer assignment. */
3050
3051static tree
726a989a 3052optimize_stdarg_builtin (gimple call)
d7bd8aeb 3053{
35cbb299 3054 tree callee, lhs, rhs, cfun_va_list;
d7bd8aeb
JJ
3055 bool va_list_simple_ptr;
3056
726a989a 3057 if (gimple_code (call) != GIMPLE_CALL)
d7bd8aeb
JJ
3058 return NULL_TREE;
3059
726a989a 3060 callee = gimple_call_fndecl (call);
35cbb299
KT
3061
3062 cfun_va_list = targetm.fn_abi_va_list (callee);
3063 va_list_simple_ptr = POINTER_TYPE_P (cfun_va_list)
3064 && (TREE_TYPE (cfun_va_list) == void_type_node
3065 || TREE_TYPE (cfun_va_list) == char_type_node);
3066
d7bd8aeb
JJ
3067 switch (DECL_FUNCTION_CODE (callee))
3068 {
3069 case BUILT_IN_VA_START:
3070 if (!va_list_simple_ptr
3071 || targetm.expand_builtin_va_start != NULL
726a989a 3072 || built_in_decls[BUILT_IN_NEXT_ARG] == NULL)
d7bd8aeb
JJ
3073 return NULL_TREE;
3074
726a989a 3075 if (gimple_call_num_args (call) != 2)
d7bd8aeb
JJ
3076 return NULL_TREE;
3077
726a989a 3078 lhs = gimple_call_arg (call, 0);
d7bd8aeb
JJ
3079 if (!POINTER_TYPE_P (TREE_TYPE (lhs))
3080 || TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (lhs)))
35cbb299 3081 != TYPE_MAIN_VARIANT (cfun_va_list))
d7bd8aeb 3082 return NULL_TREE;
726a989a 3083
d7bd8aeb
JJ
3084 lhs = build_fold_indirect_ref (lhs);
3085 rhs = build_call_expr (built_in_decls[BUILT_IN_NEXT_ARG],
726a989a 3086 1, integer_zero_node);
d7bd8aeb
JJ
3087 rhs = fold_convert (TREE_TYPE (lhs), rhs);
3088 return build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, rhs);
3089
3090 case BUILT_IN_VA_COPY:
3091 if (!va_list_simple_ptr)
3092 return NULL_TREE;
3093
726a989a 3094 if (gimple_call_num_args (call) != 2)
d7bd8aeb
JJ
3095 return NULL_TREE;
3096
726a989a 3097 lhs = gimple_call_arg (call, 0);
d7bd8aeb
JJ
3098 if (!POINTER_TYPE_P (TREE_TYPE (lhs))
3099 || TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (lhs)))
35cbb299 3100 != TYPE_MAIN_VARIANT (cfun_va_list))
d7bd8aeb
JJ
3101 return NULL_TREE;
3102
3103 lhs = build_fold_indirect_ref (lhs);
726a989a 3104 rhs = gimple_call_arg (call, 1);
d7bd8aeb 3105 if (TYPE_MAIN_VARIANT (TREE_TYPE (rhs))
35cbb299 3106 != TYPE_MAIN_VARIANT (cfun_va_list))
d7bd8aeb
JJ
3107 return NULL_TREE;
3108
3109 rhs = fold_convert (TREE_TYPE (lhs), rhs);
3110 return build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, rhs);
3111
3112 case BUILT_IN_VA_END:
726a989a 3113 /* No effect, so the statement will be deleted. */
d7bd8aeb
JJ
3114 return integer_zero_node;
3115
3116 default:
3117 gcc_unreachable ();
3118 }
3119}
726a989a 3120
b28b1600
JJ
3121/* Convert EXPR into a GIMPLE value suitable for substitution on the
3122 RHS of an assignment. Insert the necessary statements before
726a989a
RB
3123 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
3124 is replaced. If the call is expected to produces a result, then it
3125 is replaced by an assignment of the new RHS to the result variable.
3126 If the result is to be ignored, then the call is replaced by a
3127 GIMPLE_NOP. */
b28b1600 3128
726a989a
RB
3129static void
3130gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
b28b1600 3131{
726a989a
RB
3132 tree lhs;
3133 tree tmp = NULL_TREE; /* Silence warning. */
3134 gimple stmt, new_stmt;
3135 gimple_stmt_iterator i;
3136 gimple_seq stmts = gimple_seq_alloc();
d406b663 3137 struct gimplify_ctx gctx;
b28b1600 3138
726a989a
RB
3139 stmt = gsi_stmt (*si_p);
3140
3141 gcc_assert (is_gimple_call (stmt));
3142
3143 lhs = gimple_call_lhs (stmt);
3144
d406b663 3145 push_gimplify_context (&gctx);
726a989a
RB
3146
3147 if (lhs == NULL_TREE)
3148 gimplify_and_add (expr, &stmts);
3149 else
2e929cf3 3150 tmp = get_initialized_tmp_var (expr, &stmts, NULL);
726a989a 3151
b28b1600
JJ
3152 pop_gimplify_context (NULL);
3153
726a989a
RB
3154 if (gimple_has_location (stmt))
3155 annotate_all_with_location (stmts, gimple_location (stmt));
f47c96aa 3156
b28b1600 3157 /* The replacement can expose previously unreferenced variables. */
726a989a
RB
3158 for (i = gsi_start (stmts); !gsi_end_p (i); gsi_next (&i))
3159 {
3160 new_stmt = gsi_stmt (i);
3161 find_new_referenced_vars (new_stmt);
3162 gsi_insert_before (si_p, new_stmt, GSI_NEW_STMT);
3163 mark_symbols_for_renaming (new_stmt);
3164 gsi_next (si_p);
3165 }
3166
3167 if (lhs == NULL_TREE)
3168 new_stmt = gimple_build_nop ();
3169 else
b28b1600 3170 {
726a989a
RB
3171 new_stmt = gimple_build_assign (lhs, tmp);
3172 copy_virtual_operands (new_stmt, stmt);
3173 move_ssa_defining_stmt_for_defs (new_stmt, stmt);
b28b1600
JJ
3174 }
3175
726a989a
RB
3176 gimple_set_location (new_stmt, gimple_location (stmt));
3177 gsi_replace (si_p, new_stmt, false);
b28b1600
JJ
3178}
3179
6de9cd9a
DN
3180/* A simple pass that attempts to fold all builtin functions. This pass
3181 is run after we've propagated as many constants as we can. */
3182
c2924966 3183static unsigned int
6de9cd9a
DN
3184execute_fold_all_builtins (void)
3185{
a7d6ba24 3186 bool cfg_changed = false;
6de9cd9a 3187 basic_block bb;
7b0e48fb
DB
3188 unsigned int todoflags = 0;
3189
6de9cd9a
DN
3190 FOR_EACH_BB (bb)
3191 {
726a989a
RB
3192 gimple_stmt_iterator i;
3193 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
6de9cd9a 3194 {
726a989a 3195 gimple stmt, old_stmt;
6de9cd9a 3196 tree callee, result;
10a0d495 3197 enum built_in_function fcode;
6de9cd9a 3198
726a989a
RB
3199 stmt = gsi_stmt (i);
3200
3201 if (gimple_code (stmt) != GIMPLE_CALL)
10a0d495 3202 {
726a989a 3203 gsi_next (&i);
10a0d495
JJ
3204 continue;
3205 }
726a989a 3206 callee = gimple_call_fndecl (stmt);
6de9cd9a 3207 if (!callee || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL)
10a0d495 3208 {
726a989a 3209 gsi_next (&i);
10a0d495
JJ
3210 continue;
3211 }
3212 fcode = DECL_FUNCTION_CODE (callee);
6de9cd9a 3213
726a989a 3214 result = ccp_fold_builtin (stmt);
53a8f709
UB
3215
3216 if (result)
726a989a 3217 gimple_remove_stmt_histograms (cfun, stmt);
53a8f709 3218
6de9cd9a
DN
3219 if (!result)
3220 switch (DECL_FUNCTION_CODE (callee))
3221 {
3222 case BUILT_IN_CONSTANT_P:
3223 /* Resolve __builtin_constant_p. If it hasn't been
3224 folded to integer_one_node by now, it's fairly
3225 certain that the value simply isn't constant. */
726a989a 3226 result = integer_zero_node;
6de9cd9a
DN
3227 break;
3228
cb8e078d 3229 case BUILT_IN_STACK_RESTORE:
726a989a 3230 result = optimize_stack_restore (i);
d7bd8aeb
JJ
3231 if (result)
3232 break;
726a989a 3233 gsi_next (&i);
d7bd8aeb
JJ
3234 continue;
3235
3236 case BUILT_IN_VA_START:
3237 case BUILT_IN_VA_END:
3238 case BUILT_IN_VA_COPY:
3239 /* These shouldn't be folded before pass_stdarg. */
726a989a 3240 result = optimize_stdarg_builtin (stmt);
cb8e078d
JJ
3241 if (result)
3242 break;
3243 /* FALLTHRU */
3244
6de9cd9a 3245 default:
726a989a 3246 gsi_next (&i);
6de9cd9a
DN
3247 continue;
3248 }
3249
3250 if (dump_file && (dump_flags & TDF_DETAILS))
3251 {
3252 fprintf (dump_file, "Simplified\n ");
726a989a 3253 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
6de9cd9a
DN
3254 }
3255
726a989a
RB
3256 old_stmt = stmt;
3257 push_stmt_changes (gsi_stmt_ptr (&i));
cfaab3a9 3258
726a989a
RB
3259 if (!update_call_from_tree (&i, result))
3260 {
3261 gimplify_and_update_call_from_tree (&i, result);
3262 todoflags |= TODO_rebuild_alias;
3263 }
cfaab3a9 3264
726a989a
RB
3265 stmt = gsi_stmt (i);
3266 pop_stmt_changes (gsi_stmt_ptr (&i));
cfaab3a9 3267
726a989a
RB
3268 if (maybe_clean_or_replace_eh_stmt (old_stmt, stmt)
3269 && gimple_purge_dead_eh_edges (bb))
a7d6ba24 3270 cfg_changed = true;
6de9cd9a
DN
3271
3272 if (dump_file && (dump_flags & TDF_DETAILS))
3273 {
3274 fprintf (dump_file, "to\n ");
726a989a 3275 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
6de9cd9a
DN
3276 fprintf (dump_file, "\n");
3277 }
10a0d495
JJ
3278
3279 /* Retry the same statement if it changed into another
3280 builtin, there might be new opportunities now. */
726a989a 3281 if (gimple_code (stmt) != GIMPLE_CALL)
10a0d495 3282 {
726a989a 3283 gsi_next (&i);
10a0d495
JJ
3284 continue;
3285 }
726a989a 3286 callee = gimple_call_fndecl (stmt);
10a0d495 3287 if (!callee
726a989a 3288 || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL
10a0d495 3289 || DECL_FUNCTION_CODE (callee) == fcode)
726a989a 3290 gsi_next (&i);
6de9cd9a
DN
3291 }
3292 }
7b0e48fb 3293
a7d6ba24 3294 /* Delete unreachable blocks. */
7b0e48fb
DB
3295 if (cfg_changed)
3296 todoflags |= TODO_cleanup_cfg;
3297
3298 return todoflags;
6de9cd9a
DN
3299}
3300
750628d8 3301
8ddbbcae 3302struct gimple_opt_pass pass_fold_builtins =
6de9cd9a 3303{
8ddbbcae
JH
3304 {
3305 GIMPLE_PASS,
6de9cd9a
DN
3306 "fab", /* name */
3307 NULL, /* gate */
3308 execute_fold_all_builtins, /* execute */
3309 NULL, /* sub */
3310 NULL, /* next */
3311 0, /* static_pass_number */
3312 0, /* tv_id */
7faade0f 3313 PROP_cfg | PROP_ssa, /* properties_required */
6de9cd9a
DN
3314 0, /* properties_provided */
3315 0, /* properties_destroyed */
3316 0, /* todo_flags_start */
b28b1600
JJ
3317 TODO_dump_func
3318 | TODO_verify_ssa
8ddbbcae
JH
3319 | TODO_update_ssa /* todo_flags_finish */
3320 }
6de9cd9a 3321};