]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/tree-ssa-ccp.c
re PR target/37170 (gcc.dg/weak/weak-1.c)
[thirdparty/gcc.git] / gcc / tree-ssa-ccp.c
CommitLineData
6de9cd9a 1/* Conditional constant propagation pass for the GNU compiler.
fa10beec 2 Copyright (C) 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008
1e052c19 3 Free Software Foundation, Inc.
6de9cd9a
DN
4 Adapted from original RTL SSA-CCP by Daniel Berlin <dberlin@dberlin.org>
5 Adapted to GIMPLE trees by Diego Novillo <dnovillo@redhat.com>
6
7This file is part of GCC.
8
9GCC is free software; you can redistribute it and/or modify it
10under the terms of the GNU General Public License as published by the
9dcd6f09 11Free Software Foundation; either version 3, or (at your option) any
6de9cd9a
DN
12later version.
13
14GCC is distributed in the hope that it will be useful, but WITHOUT
15ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
16FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17for more details.
18
19You should have received a copy of the GNU General Public License
9dcd6f09
NC
20along with GCC; see the file COPYING3. If not see
21<http://www.gnu.org/licenses/>. */
6de9cd9a 22
0bca51f0
DN
23/* Conditional constant propagation (CCP) is based on the SSA
24 propagation engine (tree-ssa-propagate.c). Constant assignments of
25 the form VAR = CST are propagated from the assignments into uses of
26 VAR, which in turn may generate new constants. The simulation uses
27 a four level lattice to keep track of constant values associated
28 with SSA names. Given an SSA name V_i, it may take one of the
29 following values:
30
106dec71
ZD
31 UNINITIALIZED -> the initial state of the value. This value
32 is replaced with a correct initial value
33 the first time the value is used, so the
34 rest of the pass does not need to care about
35 it. Using this value simplifies initialization
36 of the pass, and prevents us from needlessly
37 scanning statements that are never reached.
0bca51f0
DN
38
39 UNDEFINED -> V_i is a local variable whose definition
40 has not been processed yet. Therefore we
41 don't yet know if its value is a constant
42 or not.
43
44 CONSTANT -> V_i has been found to hold a constant
45 value C.
46
47 VARYING -> V_i cannot take a constant value, or if it
48 does, it is not possible to determine it
49 at compile time.
50
51 The core of SSA-CCP is in ccp_visit_stmt and ccp_visit_phi_node:
52
53 1- In ccp_visit_stmt, we are interested in assignments whose RHS
54 evaluates into a constant and conditional jumps whose predicate
55 evaluates into a boolean true or false. When an assignment of
56 the form V_i = CONST is found, V_i's lattice value is set to
57 CONSTANT and CONST is associated with it. This causes the
58 propagation engine to add all the SSA edges coming out the
59 assignment into the worklists, so that statements that use V_i
60 can be visited.
61
62 If the statement is a conditional with a constant predicate, we
63 mark the outgoing edges as executable or not executable
64 depending on the predicate's value. This is then used when
65 visiting PHI nodes to know when a PHI argument can be ignored.
66
67
68 2- In ccp_visit_phi_node, if all the PHI arguments evaluate to the
69 same constant C, then the LHS of the PHI is set to C. This
70 evaluation is known as the "meet operation". Since one of the
71 goals of this evaluation is to optimistically return constant
72 values as often as possible, it uses two main short cuts:
73
74 - If an argument is flowing in through a non-executable edge, it
75 is ignored. This is useful in cases like this:
76
77 if (PRED)
78 a_9 = 3;
79 else
80 a_10 = 100;
81 a_11 = PHI (a_9, a_10)
82
83 If PRED is known to always evaluate to false, then we can
84 assume that a_11 will always take its value from a_10, meaning
85 that instead of consider it VARYING (a_9 and a_10 have
86 different values), we can consider it CONSTANT 100.
87
88 - If an argument has an UNDEFINED value, then it does not affect
89 the outcome of the meet operation. If a variable V_i has an
90 UNDEFINED value, it means that either its defining statement
91 hasn't been visited yet or V_i has no defining statement, in
92 which case the original symbol 'V' is being used
93 uninitialized. Since 'V' is a local variable, the compiler
94 may assume any initial value for it.
95
96
97 After propagation, every variable V_i that ends up with a lattice
98 value of CONSTANT will have the associated constant value in the
99 array CONST_VAL[i].VALUE. That is fed into substitute_and_fold for
100 final substitution and folding.
101
102
103 Constant propagation in stores and loads (STORE-CCP)
104 ----------------------------------------------------
105
106 While CCP has all the logic to propagate constants in GIMPLE
107 registers, it is missing the ability to associate constants with
108 stores and loads (i.e., pointer dereferences, structures and
109 global/aliased variables). We don't keep loads and stores in
110 SSA, but we do build a factored use-def web for them (in the
111 virtual operands).
112
113 For instance, consider the following code fragment:
114
115 struct A a;
116 const int B = 42;
117
118 void foo (int i)
119 {
120 if (i > 10)
121 a.a = 42;
122 else
123 {
124 a.b = 21;
125 a.a = a.b + 21;
126 }
127
128 if (a.a != B)
129 never_executed ();
130 }
131
132 We should be able to deduce that the predicate 'a.a != B' is always
133 false. To achieve this, we associate constant values to the SSA
38635499
DN
134 names in the VDEF operands for each store. Additionally,
135 since we also glob partial loads/stores with the base symbol, we
136 also keep track of the memory reference where the constant value
137 was stored (in the MEM_REF field of PROP_VALUE_T). For instance,
0bca51f0 138
38635499 139 # a_5 = VDEF <a_4>
0bca51f0
DN
140 a.a = 2;
141
142 # VUSE <a_5>
143 x_3 = a.b;
144
145 In the example above, CCP will associate value '2' with 'a_5', but
146 it would be wrong to replace the load from 'a.b' with '2', because
147 '2' had been stored into a.a.
148
106dec71
ZD
149 Note that the initial value of virtual operands is VARYING, not
150 UNDEFINED. Consider, for instance global variables:
0bca51f0
DN
151
152 int A;
153
154 foo (int i)
155 {
156 if (i_3 > 10)
157 A_4 = 3;
158 # A_5 = PHI (A_4, A_2);
159
160 # VUSE <A_5>
161 A.0_6 = A;
162
163 return A.0_6;
164 }
165
166 The value of A_2 cannot be assumed to be UNDEFINED, as it may have
167 been defined outside of foo. If we were to assume it UNDEFINED, we
106dec71 168 would erroneously optimize the above into 'return 3;'.
0bca51f0
DN
169
170 Though STORE-CCP is not too expensive, it does have to do more work
171 than regular CCP, so it is only enabled at -O2. Both regular CCP
172 and STORE-CCP use the exact same algorithm. The only distinction
173 is that when doing STORE-CCP, the boolean variable DO_STORE_CCP is
174 set to true. This affects the evaluation of statements and PHI
175 nodes.
6de9cd9a
DN
176
177 References:
178
179 Constant propagation with conditional branches,
180 Wegman and Zadeck, ACM TOPLAS 13(2):181-210.
181
182 Building an Optimizing Compiler,
183 Robert Morgan, Butterworth-Heinemann, 1998, Section 8.9.
184
185 Advanced Compiler Design and Implementation,
186 Steven Muchnick, Morgan Kaufmann, 1997, Section 12.6 */
187
188#include "config.h"
189#include "system.h"
190#include "coretypes.h"
191#include "tm.h"
6de9cd9a 192#include "tree.h"
750628d8 193#include "flags.h"
6de9cd9a
DN
194#include "rtl.h"
195#include "tm_p.h"
750628d8 196#include "ggc.h"
6de9cd9a 197#include "basic-block.h"
750628d8 198#include "output.h"
750628d8
DN
199#include "expr.h"
200#include "function.h"
6de9cd9a 201#include "diagnostic.h"
750628d8 202#include "timevar.h"
6de9cd9a 203#include "tree-dump.h"
750628d8 204#include "tree-flow.h"
6de9cd9a 205#include "tree-pass.h"
750628d8 206#include "tree-ssa-propagate.h"
53a8f709 207#include "value-prof.h"
750628d8 208#include "langhooks.h"
ae3df618 209#include "target.h"
6ac01510 210#include "toplev.h"
6de9cd9a
DN
211
212
213/* Possible lattice values. */
214typedef enum
215{
106dec71 216 UNINITIALIZED,
6de9cd9a
DN
217 UNDEFINED,
218 CONSTANT,
219 VARYING
0bca51f0 220} ccp_lattice_t;
6de9cd9a 221
0bca51f0
DN
222/* Array of propagated constant values. After propagation,
223 CONST_VAL[I].VALUE holds the constant value for SSA_NAME(I). If
224 the constant is held in an SSA name representing a memory store
38635499
DN
225 (i.e., a VDEF), CONST_VAL[I].MEM_REF will contain the actual
226 memory reference used to store (i.e., the LHS of the assignment
227 doing the store). */
404f4351 228static prop_value_t *const_val;
6de9cd9a 229
0bca51f0 230/* Dump constant propagation value VAL to file OUTF prefixed by PREFIX. */
95eec0d6
DB
231
232static void
0bca51f0 233dump_lattice_value (FILE *outf, const char *prefix, prop_value_t val)
95eec0d6 234{
750628d8 235 switch (val.lattice_val)
95eec0d6 236 {
0bca51f0
DN
237 case UNINITIALIZED:
238 fprintf (outf, "%sUNINITIALIZED", prefix);
239 break;
750628d8
DN
240 case UNDEFINED:
241 fprintf (outf, "%sUNDEFINED", prefix);
242 break;
243 case VARYING:
244 fprintf (outf, "%sVARYING", prefix);
245 break;
750628d8
DN
246 case CONSTANT:
247 fprintf (outf, "%sCONSTANT ", prefix);
0bca51f0 248 print_generic_expr (outf, val.value, dump_flags);
750628d8
DN
249 break;
250 default:
1e128c5f 251 gcc_unreachable ();
750628d8 252 }
95eec0d6 253}
6de9cd9a 254
6de9cd9a 255
0bca51f0
DN
256/* Print lattice value VAL to stderr. */
257
258void debug_lattice_value (prop_value_t val);
259
260void
261debug_lattice_value (prop_value_t val)
262{
263 dump_lattice_value (stderr, "", val);
264 fprintf (stderr, "\n");
265}
6de9cd9a 266
6de9cd9a 267
726a989a 268
fc9962ee
ZD
269/* If SYM is a constant variable with known value, return the value.
270 NULL_TREE is returned otherwise. */
271
ed97ddc6 272tree
fc9962ee
ZD
273get_symbol_constant_value (tree sym)
274{
275 if (TREE_STATIC (sym)
276 && TREE_READONLY (sym)
ed97ddc6 277 && !MTAG_P (sym))
fc9962ee
ZD
278 {
279 tree val = DECL_INITIAL (sym);
62bc00e2
RG
280 if (val)
281 {
282 STRIP_USELESS_TYPE_CONVERSION (val);
283 if (is_gimple_min_invariant (val))
284 return val;
285 }
b0940154 286 /* Variables declared 'const' without an initializer
fa10beec 287 have zero as the initializer if they may not be
ed97ddc6 288 overridden at link or run time. */
b0940154 289 if (!val
ed97ddc6 290 && targetm.binds_local_p (sym)
b0940154
AP
291 && (INTEGRAL_TYPE_P (TREE_TYPE (sym))
292 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (sym))))
293 return fold_convert (TREE_TYPE (sym), integer_zero_node);
fc9962ee
ZD
294 }
295
296 return NULL_TREE;
297}
688e936d 298
0bca51f0
DN
299/* Compute a default value for variable VAR and store it in the
300 CONST_VAL array. The following rules are used to get default
301 values:
95eec0d6 302
0bca51f0
DN
303 1- Global and static variables that are declared constant are
304 considered CONSTANT.
305
306 2- Any other value is considered UNDEFINED. This is useful when
750628d8
DN
307 considering PHI nodes. PHI arguments that are undefined do not
308 change the constant value of the PHI node, which allows for more
0bca51f0 309 constants to be propagated.
6de9cd9a 310
caf55296 311 3- Variables defined by statements other than assignments and PHI
0bca51f0 312 nodes are considered VARYING.
6de9cd9a 313
caf55296 314 4- Initial values of variables that are not GIMPLE registers are
106dec71 315 considered VARYING. */
6de9cd9a 316
0bca51f0
DN
317static prop_value_t
318get_default_value (tree var)
319{
320 tree sym = SSA_NAME_VAR (var);
dce2b2f6 321 prop_value_t val = { UNINITIALIZED, NULL_TREE };
fc9962ee 322 tree cst_val;
106dec71 323
dce2b2f6 324 if (!is_gimple_reg (var))
6de9cd9a 325 {
0bca51f0
DN
326 /* Short circuit for regular CCP. We are not interested in any
327 non-register when DO_STORE_CCP is false. */
750628d8 328 val.lattice_val = VARYING;
6de9cd9a 329 }
fc9962ee 330 else if ((cst_val = get_symbol_constant_value (sym)) != NULL_TREE)
750628d8 331 {
0bca51f0
DN
332 /* Globals and static variables declared 'const' take their
333 initial value. */
334 val.lattice_val = CONSTANT;
fc9962ee 335 val.value = cst_val;
750628d8
DN
336 }
337 else
338 {
726a989a 339 gimple stmt = SSA_NAME_DEF_STMT (var);
6de9cd9a 340
726a989a 341 if (gimple_nop_p (stmt))
0bca51f0
DN
342 {
343 /* Variables defined by an empty statement are those used
344 before being initialized. If VAR is a local variable, we
106dec71
ZD
345 can assume initially that it is UNDEFINED, otherwise we must
346 consider it VARYING. */
0bca51f0
DN
347 if (is_gimple_reg (sym) && TREE_CODE (sym) != PARM_DECL)
348 val.lattice_val = UNDEFINED;
0bca51f0 349 else
750628d8
DN
350 val.lattice_val = VARYING;
351 }
726a989a
RB
352 else if (is_gimple_assign (stmt)
353 /* Value-returning GIMPLE_CALL statements assign to
354 a variable, and are treated similarly to GIMPLE_ASSIGN. */
355 || (is_gimple_call (stmt)
356 && gimple_call_lhs (stmt) != NULL_TREE)
357 || gimple_code (stmt) == GIMPLE_PHI)
358 {
0bca51f0 359 /* Any other variable defined by an assignment or a PHI node
106dec71
ZD
360 is considered UNDEFINED. */
361 val.lattice_val = UNDEFINED;
0bca51f0
DN
362 }
363 else
364 {
365 /* Otherwise, VAR will never take on a constant value. */
366 val.lattice_val = VARYING;
367 }
750628d8 368 }
6de9cd9a 369
750628d8
DN
370 return val;
371}
6de9cd9a 372
6de9cd9a 373
106dec71 374/* Get the constant value associated with variable VAR. */
6de9cd9a 375
106dec71
ZD
376static inline prop_value_t *
377get_value (tree var)
0bca51f0 378{
ed97ddc6 379 prop_value_t *val;
106dec71 380
ed97ddc6
RG
381 if (const_val == NULL)
382 return NULL;
383
384 val = &const_val[SSA_NAME_VERSION (var)];
106dec71 385 if (val->lattice_val == UNINITIALIZED)
6de9cd9a
DN
386 *val = get_default_value (var);
387
388 return val;
389}
390
106dec71
ZD
391/* Sets the value associated with VAR to VARYING. */
392
393static inline void
394set_value_varying (tree var)
395{
396 prop_value_t *val = &const_val[SSA_NAME_VERSION (var)];
397
398 val->lattice_val = VARYING;
399 val->value = NULL_TREE;
106dec71 400}
6de9cd9a 401
fbb5445b
L
402/* For float types, modify the value of VAL to make ccp work correctly
403 for non-standard values (-0, NaN):
404
405 If HONOR_SIGNED_ZEROS is false, and VAL = -0, we canonicalize it to 0.
406 If HONOR_NANS is false, and VAL is NaN, we canonicalize it to UNDEFINED.
407 This is to fix the following problem (see PR 29921): Suppose we have
408
409 x = 0.0 * y
410
411 and we set value of y to NaN. This causes value of x to be set to NaN.
412 When we later determine that y is in fact VARYING, fold uses the fact
413 that HONOR_NANS is false, and we try to change the value of x to 0,
414 causing an ICE. With HONOR_NANS being false, the real appearance of
415 NaN would cause undefined behavior, though, so claiming that y (and x)
416 are UNDEFINED initially is correct. */
417
418static void
419canonicalize_float_value (prop_value_t *val)
420{
421 enum machine_mode mode;
422 tree type;
423 REAL_VALUE_TYPE d;
424
425 if (val->lattice_val != CONSTANT
426 || TREE_CODE (val->value) != REAL_CST)
427 return;
428
429 d = TREE_REAL_CST (val->value);
430 type = TREE_TYPE (val->value);
431 mode = TYPE_MODE (type);
432
433 if (!HONOR_SIGNED_ZEROS (mode)
434 && REAL_VALUE_MINUS_ZERO (d))
435 {
436 val->value = build_real (type, dconst0);
437 return;
438 }
439
440 if (!HONOR_NANS (mode)
441 && REAL_VALUE_ISNAN (d))
442 {
443 val->lattice_val = UNDEFINED;
444 val->value = NULL;
fbb5445b
L
445 return;
446 }
447}
448
0bca51f0
DN
449/* Set the value for variable VAR to NEW_VAL. Return true if the new
450 value is different from VAR's previous value. */
6de9cd9a 451
750628d8 452static bool
0bca51f0 453set_lattice_value (tree var, prop_value_t new_val)
6de9cd9a 454{
106dec71 455 prop_value_t *old_val = get_value (var);
0bca51f0 456
fbb5445b
L
457 canonicalize_float_value (&new_val);
458
0bca51f0 459 /* Lattice transitions must always be monotonically increasing in
106dec71
ZD
460 value. If *OLD_VAL and NEW_VAL are the same, return false to
461 inform the caller that this was a non-transition. */
462
fc9962ee 463 gcc_assert (old_val->lattice_val < new_val.lattice_val
0bca51f0 464 || (old_val->lattice_val == new_val.lattice_val
fc9962ee 465 && ((!old_val->value && !new_val.value)
dce2b2f6 466 || operand_equal_p (old_val->value, new_val.value, 0))));
0bca51f0
DN
467
468 if (old_val->lattice_val != new_val.lattice_val)
6de9cd9a 469 {
750628d8
DN
470 if (dump_file && (dump_flags & TDF_DETAILS))
471 {
0bca51f0 472 dump_lattice_value (dump_file, "Lattice value changed to ", new_val);
106dec71 473 fprintf (dump_file, ". Adding SSA edges to worklist.\n");
750628d8
DN
474 }
475
0bca51f0
DN
476 *old_val = new_val;
477
106dec71
ZD
478 gcc_assert (new_val.lattice_val != UNDEFINED);
479 return true;
6de9cd9a 480 }
750628d8
DN
481
482 return false;
6de9cd9a
DN
483}
484
485
0bca51f0 486/* Return the likely CCP lattice value for STMT.
6de9cd9a 487
750628d8 488 If STMT has no operands, then return CONSTANT.
6de9cd9a 489
7f879c96
RG
490 Else if undefinedness of operands of STMT cause its value to be
491 undefined, then return UNDEFINED.
6de9cd9a 492
750628d8 493 Else if any operands of STMT are constants, then return CONSTANT.
6de9cd9a 494
750628d8 495 Else return VARYING. */
6de9cd9a 496
0bca51f0 497static ccp_lattice_t
726a989a 498likely_value (gimple stmt)
750628d8 499{
7f879c96 500 bool has_constant_operand, has_undefined_operand, all_undefined_operands;
750628d8
DN
501 tree use;
502 ssa_op_iter iter;
6de9cd9a 503
e0c68ce9 504 enum gimple_code code = gimple_code (stmt);
726a989a
RB
505
506 /* This function appears to be called only for assignments, calls,
507 conditionals, and switches, due to the logic in visit_stmt. */
508 gcc_assert (code == GIMPLE_ASSIGN
509 || code == GIMPLE_CALL
510 || code == GIMPLE_COND
511 || code == GIMPLE_SWITCH);
0bca51f0
DN
512
513 /* If the statement has volatile operands, it won't fold to a
514 constant value. */
726a989a 515 if (gimple_has_volatile_ops (stmt))
0bca51f0
DN
516 return VARYING;
517
518 /* If we are not doing store-ccp, statements with loads
519 and/or stores will never fold into a constant. */
dce2b2f6 520 if (!ZERO_SSA_OPERANDS (stmt, SSA_OP_ALL_VIRTUALS))
750628d8 521 return VARYING;
6de9cd9a 522
726a989a
RB
523 /* Note that only a GIMPLE_SINGLE_RHS assignment can satisfy
524 is_gimple_min_invariant, so we do not consider calls or
525 other forms of assignment. */
174ef36d 526 if (gimple_assign_single_p (stmt)
726a989a
RB
527 && is_gimple_min_invariant (gimple_assign_rhs1 (stmt)))
528 return CONSTANT;
0bca51f0 529
726a989a
RB
530 if (code == GIMPLE_COND
531 && is_gimple_min_invariant (gimple_cond_lhs (stmt))
532 && is_gimple_min_invariant (gimple_cond_rhs (stmt)))
a318e3ac
SB
533 return CONSTANT;
534
726a989a
RB
535 if (code == GIMPLE_SWITCH
536 && is_gimple_min_invariant (gimple_switch_index (stmt)))
537 return CONSTANT;
538
539 /* Arrive here for more complex cases. */
540
106dec71 541 has_constant_operand = false;
7f879c96
RG
542 has_undefined_operand = false;
543 all_undefined_operands = true;
106dec71 544 FOR_EACH_SSA_TREE_OPERAND (use, stmt, iter, SSA_OP_USE | SSA_OP_VUSE)
750628d8 545 {
106dec71 546 prop_value_t *val = get_value (use);
750628d8 547
106dec71 548 if (val->lattice_val == UNDEFINED)
7f879c96
RG
549 has_undefined_operand = true;
550 else
551 all_undefined_operands = false;
0bca51f0 552
750628d8 553 if (val->lattice_val == CONSTANT)
106dec71 554 has_constant_operand = true;
6de9cd9a 555 }
750628d8 556
7f879c96
RG
557 /* If the operation combines operands like COMPLEX_EXPR make sure to
558 not mark the result UNDEFINED if only one part of the result is
559 undefined. */
726a989a 560 if (has_undefined_operand && all_undefined_operands)
7f879c96 561 return UNDEFINED;
726a989a 562 else if (code == GIMPLE_ASSIGN && has_undefined_operand)
7f879c96 563 {
726a989a 564 switch (gimple_assign_rhs_code (stmt))
7f879c96
RG
565 {
566 /* Unary operators are handled with all_undefined_operands. */
567 case PLUS_EXPR:
568 case MINUS_EXPR:
7f879c96 569 case POINTER_PLUS_EXPR:
7f879c96
RG
570 /* Not MIN_EXPR, MAX_EXPR. One VARYING operand may be selected.
571 Not bitwise operators, one VARYING operand may specify the
572 result completely. Not logical operators for the same reason.
0cedb9e9
RG
573 Not COMPLEX_EXPR as one VARYING operand makes the result partly
574 not UNDEFINED. Not *DIV_EXPR, comparisons and shifts because
575 the undefined operand may be promoted. */
7f879c96
RG
576 return UNDEFINED;
577
578 default:
579 ;
580 }
581 }
582 /* If there was an UNDEFINED operand but the result may be not UNDEFINED
583 fall back to VARYING even if there were CONSTANT operands. */
584 if (has_undefined_operand)
585 return VARYING;
586
106dec71
ZD
587 if (has_constant_operand
588 /* We do not consider virtual operands here -- load from read-only
589 memory may have only VARYING virtual operands, but still be
590 constant. */
591 || ZERO_SSA_OPERANDS (stmt, SSA_OP_USE))
0bca51f0
DN
592 return CONSTANT;
593
106dec71 594 return VARYING;
6de9cd9a
DN
595}
596
106dec71
ZD
597/* Returns true if STMT cannot be constant. */
598
599static bool
726a989a 600surely_varying_stmt_p (gimple stmt)
106dec71
ZD
601{
602 /* If the statement has operands that we cannot handle, it cannot be
603 constant. */
726a989a 604 if (gimple_has_volatile_ops (stmt))
106dec71
ZD
605 return true;
606
607 if (!ZERO_SSA_OPERANDS (stmt, SSA_OP_ALL_VIRTUALS))
dce2b2f6 608 return true;
106dec71 609
174ef36d
RG
610 /* If it is a call and does not return a value or is not a
611 builtin and not an indirect call, it is varying. */
726a989a 612 if (is_gimple_call (stmt))
174ef36d
RG
613 {
614 tree fndecl;
615 if (!gimple_call_lhs (stmt)
616 || ((fndecl = gimple_call_fndecl (stmt)) != NULL_TREE
99f536cc 617 && !DECL_BUILT_IN (fndecl)))
174ef36d
RG
618 return true;
619 }
106dec71
ZD
620
621 /* Anything other than assignments and conditional jumps are not
622 interesting for CCP. */
726a989a 623 if (gimple_code (stmt) != GIMPLE_ASSIGN
174ef36d
RG
624 && gimple_code (stmt) != GIMPLE_COND
625 && gimple_code (stmt) != GIMPLE_SWITCH
626 && gimple_code (stmt) != GIMPLE_CALL)
106dec71
ZD
627 return true;
628
629 return false;
630}
6de9cd9a 631
750628d8 632/* Initialize local data structures for CCP. */
6de9cd9a
DN
633
634static void
750628d8 635ccp_initialize (void)
6de9cd9a 636{
750628d8 637 basic_block bb;
6de9cd9a 638
b9eae1a9 639 const_val = XCNEWVEC (prop_value_t, num_ssa_names);
6de9cd9a 640
750628d8
DN
641 /* Initialize simulation flags for PHI nodes and statements. */
642 FOR_EACH_BB (bb)
6de9cd9a 643 {
726a989a 644 gimple_stmt_iterator i;
6de9cd9a 645
726a989a 646 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
750628d8 647 {
726a989a 648 gimple stmt = gsi_stmt (i);
106dec71 649 bool is_varying = surely_varying_stmt_p (stmt);
6de9cd9a 650
106dec71 651 if (is_varying)
750628d8 652 {
0bca51f0
DN
653 tree def;
654 ssa_op_iter iter;
655
656 /* If the statement will not produce a constant, mark
657 all its outputs VARYING. */
658 FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_ALL_DEFS)
106dec71
ZD
659 {
660 if (is_varying)
661 set_value_varying (def);
662 }
750628d8 663 }
726a989a 664 prop_set_simulate_again (stmt, !is_varying);
750628d8 665 }
6de9cd9a
DN
666 }
667
726a989a
RB
668 /* Now process PHI nodes. We never clear the simulate_again flag on
669 phi nodes, since we do not know which edges are executable yet,
670 except for phi nodes for virtual operands when we do not do store ccp. */
750628d8 671 FOR_EACH_BB (bb)
6de9cd9a 672 {
726a989a 673 gimple_stmt_iterator i;
750628d8 674
726a989a
RB
675 for (i = gsi_start_phis (bb); !gsi_end_p (i); gsi_next (&i))
676 {
677 gimple phi = gsi_stmt (i);
678
dce2b2f6 679 if (!is_gimple_reg (gimple_phi_result (phi)))
726a989a 680 prop_set_simulate_again (phi, false);
106dec71 681 else
726a989a 682 prop_set_simulate_again (phi, true);
750628d8 683 }
6de9cd9a 684 }
750628d8 685}
6de9cd9a 686
6de9cd9a 687
0bca51f0 688/* Do final substitution of propagated values, cleanup the flowgraph and
3253eafb 689 free allocated storage.
6de9cd9a 690
3253eafb
JH
691 Return TRUE when something was optimized. */
692
693static bool
0bca51f0 694ccp_finalize (void)
6de9cd9a 695{
0bca51f0 696 /* Perform substitutions based on the known constant values. */
3253eafb 697 bool something_changed = substitute_and_fold (const_val, false);
6de9cd9a 698
0bca51f0 699 free (const_val);
ed97ddc6 700 const_val = NULL;
3253eafb 701 return something_changed;;
6de9cd9a
DN
702}
703
704
0bca51f0
DN
705/* Compute the meet operator between *VAL1 and *VAL2. Store the result
706 in VAL1.
707
708 any M UNDEFINED = any
0bca51f0
DN
709 any M VARYING = VARYING
710 Ci M Cj = Ci if (i == j)
711 Ci M Cj = VARYING if (i != j)
106dec71 712 */
6de9cd9a
DN
713
714static void
0bca51f0 715ccp_lattice_meet (prop_value_t *val1, prop_value_t *val2)
6de9cd9a 716{
0bca51f0 717 if (val1->lattice_val == UNDEFINED)
6de9cd9a 718 {
0bca51f0
DN
719 /* UNDEFINED M any = any */
720 *val1 = *val2;
750628d8 721 }
0bca51f0 722 else if (val2->lattice_val == UNDEFINED)
195da47b 723 {
0bca51f0
DN
724 /* any M UNDEFINED = any
725 Nothing to do. VAL1 already contains the value we want. */
726 ;
195da47b 727 }
0bca51f0
DN
728 else if (val1->lattice_val == VARYING
729 || val2->lattice_val == VARYING)
750628d8 730 {
0bca51f0
DN
731 /* any M VARYING = VARYING. */
732 val1->lattice_val = VARYING;
733 val1->value = NULL_TREE;
750628d8 734 }
0bca51f0
DN
735 else if (val1->lattice_val == CONSTANT
736 && val2->lattice_val == CONSTANT
dce2b2f6 737 && simple_cst_equal (val1->value, val2->value) == 1)
750628d8 738 {
0bca51f0
DN
739 /* Ci M Cj = Ci if (i == j)
740 Ci M Cj = VARYING if (i != j)
741
742 If these two values come from memory stores, make sure that
743 they come from the same memory reference. */
744 val1->lattice_val = CONSTANT;
745 val1->value = val1->value;
750628d8
DN
746 }
747 else
748 {
0bca51f0
DN
749 /* Any other combination is VARYING. */
750 val1->lattice_val = VARYING;
751 val1->value = NULL_TREE;
750628d8 752 }
6de9cd9a
DN
753}
754
755
750628d8
DN
756/* Loop through the PHI_NODE's parameters for BLOCK and compare their
757 lattice values to determine PHI_NODE's lattice value. The value of a
0bca51f0 758 PHI node is determined calling ccp_lattice_meet with all the arguments
750628d8 759 of the PHI node that are incoming via executable edges. */
6de9cd9a 760
750628d8 761static enum ssa_prop_result
726a989a 762ccp_visit_phi_node (gimple phi)
6de9cd9a 763{
726a989a 764 unsigned i;
0bca51f0 765 prop_value_t *old_val, new_val;
6de9cd9a 766
750628d8 767 if (dump_file && (dump_flags & TDF_DETAILS))
6de9cd9a 768 {
750628d8 769 fprintf (dump_file, "\nVisiting PHI node: ");
726a989a 770 print_gimple_stmt (dump_file, phi, 0, dump_flags);
6de9cd9a 771 }
6de9cd9a 772
726a989a 773 old_val = get_value (gimple_phi_result (phi));
750628d8
DN
774 switch (old_val->lattice_val)
775 {
776 case VARYING:
0bca51f0 777 return SSA_PROP_VARYING;
6de9cd9a 778
750628d8
DN
779 case CONSTANT:
780 new_val = *old_val;
781 break;
6de9cd9a 782
750628d8 783 case UNDEFINED:
750628d8 784 new_val.lattice_val = UNDEFINED;
0bca51f0 785 new_val.value = NULL_TREE;
750628d8 786 break;
6de9cd9a 787
750628d8 788 default:
1e128c5f 789 gcc_unreachable ();
750628d8 790 }
6de9cd9a 791
726a989a 792 for (i = 0; i < gimple_phi_num_args (phi); i++)
750628d8 793 {
0bca51f0
DN
794 /* Compute the meet operator over all the PHI arguments flowing
795 through executable edges. */
726a989a 796 edge e = gimple_phi_arg_edge (phi, i);
6de9cd9a 797
750628d8
DN
798 if (dump_file && (dump_flags & TDF_DETAILS))
799 {
800 fprintf (dump_file,
801 "\n Argument #%d (%d -> %d %sexecutable)\n",
802 i, e->src->index, e->dest->index,
803 (e->flags & EDGE_EXECUTABLE) ? "" : "not ");
804 }
805
806 /* If the incoming edge is executable, Compute the meet operator for
807 the existing value of the PHI node and the current PHI argument. */
808 if (e->flags & EDGE_EXECUTABLE)
809 {
726a989a 810 tree arg = gimple_phi_arg (phi, i)->def;
0bca51f0 811 prop_value_t arg_val;
6de9cd9a 812
0bca51f0 813 if (is_gimple_min_invariant (arg))
750628d8 814 {
0bca51f0
DN
815 arg_val.lattice_val = CONSTANT;
816 arg_val.value = arg;
750628d8
DN
817 }
818 else
106dec71 819 arg_val = *(get_value (arg));
6de9cd9a 820
0bca51f0 821 ccp_lattice_meet (&new_val, &arg_val);
6de9cd9a 822
750628d8
DN
823 if (dump_file && (dump_flags & TDF_DETAILS))
824 {
825 fprintf (dump_file, "\t");
0bca51f0
DN
826 print_generic_expr (dump_file, arg, dump_flags);
827 dump_lattice_value (dump_file, "\tValue: ", arg_val);
750628d8
DN
828 fprintf (dump_file, "\n");
829 }
6de9cd9a 830
750628d8
DN
831 if (new_val.lattice_val == VARYING)
832 break;
833 }
834 }
6de9cd9a
DN
835
836 if (dump_file && (dump_flags & TDF_DETAILS))
750628d8
DN
837 {
838 dump_lattice_value (dump_file, "\n PHI node value: ", new_val);
839 fprintf (dump_file, "\n\n");
840 }
841
106dec71 842 /* Make the transition to the new value. */
726a989a 843 if (set_lattice_value (gimple_phi_result (phi), new_val))
750628d8
DN
844 {
845 if (new_val.lattice_val == VARYING)
846 return SSA_PROP_VARYING;
847 else
848 return SSA_PROP_INTERESTING;
849 }
850 else
851 return SSA_PROP_NOT_INTERESTING;
6de9cd9a
DN
852}
853
f7c0ffb4
RG
854/* Return true if we may propagate the address expression ADDR into the
855 dereference DEREF and cancel them. */
856
857bool
858may_propagate_address_into_dereference (tree addr, tree deref)
859{
860 gcc_assert (INDIRECT_REF_P (deref)
861 && TREE_CODE (addr) == ADDR_EXPR);
862
863 /* If the address is invariant then we do not need to preserve restrict
864 qualifications. But we do need to preserve volatile qualifiers until
865 we can annotate the folded dereference itself properly. */
866 if (is_gimple_min_invariant (addr)
867 && (!TREE_THIS_VOLATILE (deref)
868 || TYPE_VOLATILE (TREE_TYPE (addr))))
869 return useless_type_conversion_p (TREE_TYPE (deref),
870 TREE_TYPE (TREE_OPERAND (addr, 0)));
871
872 /* Else both the address substitution and the folding must result in
873 a valid useless type conversion sequence. */
874 return (useless_type_conversion_p (TREE_TYPE (TREE_OPERAND (deref, 0)),
875 TREE_TYPE (addr))
876 && useless_type_conversion_p (TREE_TYPE (deref),
877 TREE_TYPE (TREE_OPERAND (addr, 0))));
878}
6de9cd9a 879
750628d8
DN
880/* CCP specific front-end to the non-destructive constant folding
881 routines.
6de9cd9a
DN
882
883 Attempt to simplify the RHS of STMT knowing that one or more
884 operands are constants.
885
886 If simplification is possible, return the simplified RHS,
726a989a 887 otherwise return the original RHS or NULL_TREE. */
6de9cd9a
DN
888
889static tree
726a989a 890ccp_fold (gimple stmt)
6de9cd9a 891{
726a989a 892 switch (gimple_code (stmt))
0bca51f0 893 {
726a989a
RB
894 case GIMPLE_ASSIGN:
895 {
896 enum tree_code subcode = gimple_assign_rhs_code (stmt);
897
898 switch (get_gimple_rhs_class (subcode))
899 {
900 case GIMPLE_SINGLE_RHS:
901 {
902 tree rhs = gimple_assign_rhs1 (stmt);
903 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
904
905 if (TREE_CODE (rhs) == SSA_NAME)
906 {
907 /* If the RHS is an SSA_NAME, return its known constant value,
908 if any. */
909 return get_value (rhs)->value;
910 }
911 /* Handle propagating invariant addresses into address operations.
912 The folding we do here matches that in tree-ssa-forwprop.c. */
913 else if (TREE_CODE (rhs) == ADDR_EXPR)
914 {
915 tree *base;
916 base = &TREE_OPERAND (rhs, 0);
917 while (handled_component_p (*base))
918 base = &TREE_OPERAND (*base, 0);
919 if (TREE_CODE (*base) == INDIRECT_REF
920 && TREE_CODE (TREE_OPERAND (*base, 0)) == SSA_NAME)
921 {
922 prop_value_t *val = get_value (TREE_OPERAND (*base, 0));
923 if (val->lattice_val == CONSTANT
924 && TREE_CODE (val->value) == ADDR_EXPR
f7c0ffb4
RG
925 && may_propagate_address_into_dereference
926 (val->value, *base))
726a989a
RB
927 {
928 /* We need to return a new tree, not modify the IL
929 or share parts of it. So play some tricks to
930 avoid manually building it. */
931 tree ret, save = *base;
932 *base = TREE_OPERAND (val->value, 0);
933 ret = unshare_expr (rhs);
934 recompute_tree_invariant_for_addr_expr (ret);
935 *base = save;
936 return ret;
937 }
938 }
939 }
6de9cd9a 940
726a989a 941 if (kind == tcc_reference)
c76a1f18
RG
942 {
943 if (TREE_CODE (rhs) == VIEW_CONVERT_EXPR
944 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
945 {
946 prop_value_t *val = get_value (TREE_OPERAND (rhs, 0));
947 if (val->lattice_val == CONSTANT)
948 return fold_unary (VIEW_CONVERT_EXPR,
949 TREE_TYPE (rhs), val->value);
950 }
951 return fold_const_aggregate_ref (rhs);
952 }
726a989a
RB
953 else if (kind == tcc_declaration)
954 return get_symbol_constant_value (rhs);
955 return rhs;
956 }
957
958 case GIMPLE_UNARY_RHS:
959 {
960 /* Handle unary operators that can appear in GIMPLE form.
961 Note that we know the single operand must be a constant,
962 so this should almost always return a simplified RHS. */
963 tree lhs = gimple_assign_lhs (stmt);
964 tree op0 = gimple_assign_rhs1 (stmt);
ff8b183b 965 tree res;
726a989a
RB
966
967 /* Simplify the operand down to a constant. */
968 if (TREE_CODE (op0) == SSA_NAME)
969 {
970 prop_value_t *val = get_value (op0);
971 if (val->lattice_val == CONSTANT)
972 op0 = get_value (op0)->value;
973 }
974
975 /* Conversions are useless for CCP purposes if they are
976 value-preserving. Thus the restrictions that
977 useless_type_conversion_p places for pointer type conversions
978 do not apply here. Substitution later will only substitute to
979 allowed places. */
1a87cf0c 980 if (CONVERT_EXPR_CODE_P (subcode)
99f536cc
RG
981 && POINTER_TYPE_P (TREE_TYPE (lhs))
982 && POINTER_TYPE_P (TREE_TYPE (op0))
983 /* Do not allow differences in volatile qualification
984 as this might get us confused as to whether a
985 propagation destination statement is volatile
986 or not. See PR36988. */
987 && (TYPE_VOLATILE (TREE_TYPE (TREE_TYPE (lhs)))
988 == TYPE_VOLATILE (TREE_TYPE (TREE_TYPE (op0)))))
989 {
990 tree tem;
991 /* Still try to generate a constant of correct type. */
992 if (!useless_type_conversion_p (TREE_TYPE (lhs),
993 TREE_TYPE (op0))
994 && ((tem = maybe_fold_offset_to_address
995 (op0, integer_zero_node, TREE_TYPE (lhs)))
996 != NULL_TREE))
997 return tem;
998 return op0;
999 }
726a989a 1000
ff8b183b
RG
1001 res = fold_unary (subcode, gimple_expr_type (stmt), op0);
1002
1003 /* If the operation was a conversion do _not_ mark a
1004 resulting constant with TREE_OVERFLOW if the original
1005 constant was not. These conversions have implementation
1006 defined behavior and retaining the TREE_OVERFLOW flag
1007 here would confuse later passes such as VRP. */
1008 if (res
1009 && TREE_CODE (res) == INTEGER_CST
1010 && TREE_CODE (op0) == INTEGER_CST
1011 && CONVERT_EXPR_CODE_P (subcode))
1012 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
1013
1014 return res;
1015 }
726a989a
RB
1016
1017 case GIMPLE_BINARY_RHS:
1018 {
1019 /* Handle binary operators that can appear in GIMPLE form. */
1020 tree op0 = gimple_assign_rhs1 (stmt);
1021 tree op1 = gimple_assign_rhs2 (stmt);
1022
1023 /* Simplify the operands down to constants when appropriate. */
1024 if (TREE_CODE (op0) == SSA_NAME)
1025 {
1026 prop_value_t *val = get_value (op0);
1027 if (val->lattice_val == CONSTANT)
1028 op0 = val->value;
1029 }
1030
1031 if (TREE_CODE (op1) == SSA_NAME)
1032 {
1033 prop_value_t *val = get_value (op1);
1034 if (val->lattice_val == CONSTANT)
1035 op1 = val->value;
1036 }
1037
99f536cc
RG
1038 /* Fold &foo + CST into an invariant reference if possible. */
1039 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
1040 && TREE_CODE (op0) == ADDR_EXPR
1041 && TREE_CODE (op1) == INTEGER_CST)
1042 {
1043 tree lhs = gimple_assign_lhs (stmt);
1044 tree tem = maybe_fold_offset_to_address (op0, op1,
1045 TREE_TYPE (lhs));
1046 if (tem != NULL_TREE)
1047 return tem;
1048 }
1049
726a989a
RB
1050 return fold_binary (subcode, gimple_expr_type (stmt), op0, op1);
1051 }
1052
1053 default:
1054 gcc_unreachable ();
1055 }
1056 }
1057 break;
6de9cd9a 1058
726a989a 1059 case GIMPLE_CALL:
174ef36d
RG
1060 {
1061 tree fn = gimple_call_fn (stmt);
1062 prop_value_t *val;
1063
1064 if (TREE_CODE (fn) == SSA_NAME)
1065 {
1066 val = get_value (fn);
1067 if (val->lattice_val == CONSTANT)
1068 fn = val->value;
1069 }
1070 if (TREE_CODE (fn) == ADDR_EXPR
a135b1c4 1071 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
174ef36d
RG
1072 && DECL_BUILT_IN (TREE_OPERAND (fn, 0)))
1073 {
1074 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
1075 tree call, retval;
1076 unsigned i;
1077 for (i = 0; i < gimple_call_num_args (stmt); ++i)
1078 {
1079 args[i] = gimple_call_arg (stmt, i);
1080 if (TREE_CODE (args[i]) == SSA_NAME)
1081 {
1082 val = get_value (args[i]);
1083 if (val->lattice_val == CONSTANT)
1084 args[i] = val->value;
1085 }
1086 }
1087 call = build_call_array (gimple_call_return_type (stmt),
1088 fn, gimple_call_num_args (stmt), args);
1089 retval = fold_call_expr (call, false);
1090 if (retval)
1091 /* fold_call_expr wraps the result inside a NOP_EXPR. */
1092 STRIP_NOPS (retval);
1093 return retval;
1094 }
1095 return NULL_TREE;
1096 }
6de9cd9a 1097
726a989a
RB
1098 case GIMPLE_COND:
1099 {
1100 /* Handle comparison operators that can appear in GIMPLE form. */
1101 tree op0 = gimple_cond_lhs (stmt);
1102 tree op1 = gimple_cond_rhs (stmt);
1103 enum tree_code code = gimple_cond_code (stmt);
1104
1105 /* Simplify the operands down to constants when appropriate. */
1106 if (TREE_CODE (op0) == SSA_NAME)
1107 {
1108 prop_value_t *val = get_value (op0);
1109 if (val->lattice_val == CONSTANT)
1110 op0 = val->value;
1111 }
1112
1113 if (TREE_CODE (op1) == SSA_NAME)
1114 {
1115 prop_value_t *val = get_value (op1);
1116 if (val->lattice_val == CONSTANT)
1117 op1 = val->value;
1118 }
1119
1120 return fold_binary (code, boolean_type_node, op0, op1);
1121 }
6de9cd9a 1122
726a989a
RB
1123 case GIMPLE_SWITCH:
1124 {
1125 tree rhs = gimple_switch_index (stmt);
87e1e42b 1126
726a989a
RB
1127 if (TREE_CODE (rhs) == SSA_NAME)
1128 {
1129 /* If the RHS is an SSA_NAME, return its known constant value,
1130 if any. */
1131 return get_value (rhs)->value;
1132 }
87e1e42b 1133
726a989a
RB
1134 return rhs;
1135 }
00d382a8 1136
726a989a
RB
1137 default:
1138 gcc_unreachable ();
6de9cd9a 1139 }
6de9cd9a
DN
1140}
1141
1142
ae3df618
SB
1143/* Return the tree representing the element referenced by T if T is an
1144 ARRAY_REF or COMPONENT_REF into constant aggregates. Return
1145 NULL_TREE otherwise. */
1146
ed97ddc6 1147tree
ae3df618
SB
1148fold_const_aggregate_ref (tree t)
1149{
1150 prop_value_t *value;
4038c495
GB
1151 tree base, ctor, idx, field;
1152 unsigned HOST_WIDE_INT cnt;
1153 tree cfield, cval;
ae3df618
SB
1154
1155 switch (TREE_CODE (t))
1156 {
1157 case ARRAY_REF:
1158 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
1159 DECL_INITIAL. If BASE is a nested reference into another
1160 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
1161 the inner reference. */
1162 base = TREE_OPERAND (t, 0);
1163 switch (TREE_CODE (base))
1164 {
1165 case VAR_DECL:
1166 if (!TREE_READONLY (base)
1167 || TREE_CODE (TREE_TYPE (base)) != ARRAY_TYPE
1168 || !targetm.binds_local_p (base))
1169 return NULL_TREE;
1170
1171 ctor = DECL_INITIAL (base);
1172 break;
1173
1174 case ARRAY_REF:
1175 case COMPONENT_REF:
1176 ctor = fold_const_aggregate_ref (base);
1177 break;
1178
87e1e42b
RG
1179 case STRING_CST:
1180 case CONSTRUCTOR:
1181 ctor = base;
1182 break;
1183
ae3df618
SB
1184 default:
1185 return NULL_TREE;
1186 }
1187
1188 if (ctor == NULL_TREE
faaf1436
RG
1189 || (TREE_CODE (ctor) != CONSTRUCTOR
1190 && TREE_CODE (ctor) != STRING_CST)
ae3df618
SB
1191 || !TREE_STATIC (ctor))
1192 return NULL_TREE;
1193
1194 /* Get the index. If we have an SSA_NAME, try to resolve it
1195 with the current lattice value for the SSA_NAME. */
1196 idx = TREE_OPERAND (t, 1);
1197 switch (TREE_CODE (idx))
1198 {
1199 case SSA_NAME:
106dec71 1200 if ((value = get_value (idx))
ae3df618
SB
1201 && value->lattice_val == CONSTANT
1202 && TREE_CODE (value->value) == INTEGER_CST)
1203 idx = value->value;
1204 else
1205 return NULL_TREE;
1206 break;
1207
1208 case INTEGER_CST:
1209 break;
1210
1211 default:
1212 return NULL_TREE;
1213 }
1214
faaf1436
RG
1215 /* Fold read from constant string. */
1216 if (TREE_CODE (ctor) == STRING_CST)
1217 {
1218 if ((TYPE_MODE (TREE_TYPE (t))
1219 == TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
1220 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
1221 == MODE_INT)
1222 && GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor)))) == 1
1223 && compare_tree_int (idx, TREE_STRING_LENGTH (ctor)) < 0)
0c4d4efb
DJ
1224 return build_int_cst_type (TREE_TYPE (t),
1225 (TREE_STRING_POINTER (ctor)
1226 [TREE_INT_CST_LOW (idx)]));
faaf1436
RG
1227 return NULL_TREE;
1228 }
1229
ae3df618 1230 /* Whoo-hoo! I'll fold ya baby. Yeah! */
4038c495
GB
1231 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
1232 if (tree_int_cst_equal (cfield, idx))
62bc00e2
RG
1233 {
1234 STRIP_USELESS_TYPE_CONVERSION (cval);
1235 return cval;
1236 }
ae3df618
SB
1237 break;
1238
1239 case COMPONENT_REF:
1240 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
1241 DECL_INITIAL. If BASE is a nested reference into another
1242 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
1243 the inner reference. */
1244 base = TREE_OPERAND (t, 0);
1245 switch (TREE_CODE (base))
1246 {
1247 case VAR_DECL:
1248 if (!TREE_READONLY (base)
1249 || TREE_CODE (TREE_TYPE (base)) != RECORD_TYPE
1250 || !targetm.binds_local_p (base))
1251 return NULL_TREE;
1252
1253 ctor = DECL_INITIAL (base);
1254 break;
1255
1256 case ARRAY_REF:
1257 case COMPONENT_REF:
1258 ctor = fold_const_aggregate_ref (base);
1259 break;
1260
1261 default:
1262 return NULL_TREE;
1263 }
1264
1265 if (ctor == NULL_TREE
1266 || TREE_CODE (ctor) != CONSTRUCTOR
1267 || !TREE_STATIC (ctor))
1268 return NULL_TREE;
1269
1270 field = TREE_OPERAND (t, 1);
1271
4038c495
GB
1272 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
1273 if (cfield == field
ae3df618 1274 /* FIXME: Handle bit-fields. */
4038c495 1275 && ! DECL_BIT_FIELD (cfield))
62bc00e2
RG
1276 {
1277 STRIP_USELESS_TYPE_CONVERSION (cval);
1278 return cval;
1279 }
ae3df618
SB
1280 break;
1281
1ebd8d9a
SB
1282 case REALPART_EXPR:
1283 case IMAGPART_EXPR:
1284 {
1285 tree c = fold_const_aggregate_ref (TREE_OPERAND (t, 0));
1286 if (c && TREE_CODE (c) == COMPLEX_CST)
1287 return fold_build1 (TREE_CODE (t), TREE_TYPE (t), c);
1288 break;
1289 }
87e1e42b
RG
1290
1291 case INDIRECT_REF:
1292 {
1293 tree base = TREE_OPERAND (t, 0);
1294 if (TREE_CODE (base) == SSA_NAME
1295 && (value = get_value (base))
1296 && value->lattice_val == CONSTANT
1297 && TREE_CODE (value->value) == ADDR_EXPR)
1298 return fold_const_aggregate_ref (TREE_OPERAND (value->value, 0));
1299 break;
1300 }
1301
ae3df618
SB
1302 default:
1303 break;
1304 }
1305
1306 return NULL_TREE;
1307}
726a989a
RB
1308
1309/* Evaluate statement STMT.
1310 Valid only for assignments, calls, conditionals, and switches. */
6de9cd9a 1311
0bca51f0 1312static prop_value_t
726a989a 1313evaluate_stmt (gimple stmt)
6de9cd9a 1314{
0bca51f0 1315 prop_value_t val;
faaf1436 1316 tree simplified = NULL_TREE;
0bca51f0 1317 ccp_lattice_t likelyvalue = likely_value (stmt);
6ac01510 1318 bool is_constant;
0bca51f0 1319
6ac01510
ILT
1320 fold_defer_overflow_warnings ();
1321
6de9cd9a
DN
1322 /* If the statement is likely to have a CONSTANT result, then try
1323 to fold the statement to determine the constant value. */
726a989a
RB
1324 /* FIXME. This is the only place that we call ccp_fold.
1325 Since likely_value never returns CONSTANT for calls, we will
1326 not attempt to fold them, including builtins that may profit. */
6de9cd9a
DN
1327 if (likelyvalue == CONSTANT)
1328 simplified = ccp_fold (stmt);
1329 /* If the statement is likely to have a VARYING result, then do not
1330 bother folding the statement. */
87e1e42b 1331 else if (likelyvalue == VARYING)
726a989a 1332 {
e0c68ce9 1333 enum gimple_code code = gimple_code (stmt);
726a989a
RB
1334 if (code == GIMPLE_ASSIGN)
1335 {
1336 enum tree_code subcode = gimple_assign_rhs_code (stmt);
1337
1338 /* Other cases cannot satisfy is_gimple_min_invariant
1339 without folding. */
1340 if (get_gimple_rhs_class (subcode) == GIMPLE_SINGLE_RHS)
1341 simplified = gimple_assign_rhs1 (stmt);
1342 }
1343 else if (code == GIMPLE_SWITCH)
1344 simplified = gimple_switch_index (stmt);
1345 else
1346 /* These cannot satisfy is_gimple_min_invariant without folding. */
1347 gcc_assert (code == GIMPLE_CALL || code == GIMPLE_COND);
1348 }
6de9cd9a 1349
6ac01510
ILT
1350 is_constant = simplified && is_gimple_min_invariant (simplified);
1351
1352 fold_undefer_overflow_warnings (is_constant, stmt, 0);
1353
00d382a8
RG
1354 if (dump_file && (dump_flags & TDF_DETAILS))
1355 {
1356 fprintf (dump_file, "which is likely ");
1357 switch (likelyvalue)
1358 {
1359 case CONSTANT:
1360 fprintf (dump_file, "CONSTANT");
1361 break;
1362 case UNDEFINED:
1363 fprintf (dump_file, "UNDEFINED");
1364 break;
1365 case VARYING:
1366 fprintf (dump_file, "VARYING");
1367 break;
1368 default:;
1369 }
1370 fprintf (dump_file, "\n");
1371 }
1372
6ac01510 1373 if (is_constant)
6de9cd9a
DN
1374 {
1375 /* The statement produced a constant value. */
1376 val.lattice_val = CONSTANT;
0bca51f0 1377 val.value = simplified;
6de9cd9a
DN
1378 }
1379 else
1380 {
1381 /* The statement produced a nonconstant value. If the statement
0bca51f0
DN
1382 had UNDEFINED operands, then the result of the statement
1383 should be UNDEFINED. Otherwise, the statement is VARYING. */
106dec71 1384 if (likelyvalue == UNDEFINED)
a318e3ac
SB
1385 val.lattice_val = likelyvalue;
1386 else
1387 val.lattice_val = VARYING;
1388
0bca51f0 1389 val.value = NULL_TREE;
6de9cd9a 1390 }
750628d8
DN
1391
1392 return val;
6de9cd9a
DN
1393}
1394
750628d8 1395/* Visit the assignment statement STMT. Set the value of its LHS to the
0bca51f0
DN
1396 value computed by the RHS and store LHS in *OUTPUT_P. If STMT
1397 creates virtual definitions, set the value of each new name to that
726a989a
RB
1398 of the RHS (if we can derive a constant out of the RHS).
1399 Value-returning call statements also perform an assignment, and
1400 are handled here. */
6de9cd9a 1401
750628d8 1402static enum ssa_prop_result
726a989a 1403visit_assignment (gimple stmt, tree *output_p)
6de9cd9a 1404{
0bca51f0 1405 prop_value_t val;
0bca51f0 1406 enum ssa_prop_result retval;
6de9cd9a 1407
726a989a 1408 tree lhs = gimple_get_lhs (stmt);
6de9cd9a 1409
726a989a
RB
1410 gcc_assert (gimple_code (stmt) != GIMPLE_CALL
1411 || gimple_call_lhs (stmt) != NULL_TREE);
1412
1413 if (gimple_assign_copy_p (stmt))
750628d8 1414 {
726a989a 1415 tree rhs = gimple_assign_rhs1 (stmt);
0bca51f0 1416
726a989a
RB
1417 if (TREE_CODE (rhs) == SSA_NAME)
1418 {
1419 /* For a simple copy operation, we copy the lattice values. */
1420 prop_value_t *nval = get_value (rhs);
1421 val = *nval;
1422 }
0bca51f0 1423 else
726a989a 1424 val = evaluate_stmt (stmt);
750628d8
DN
1425 }
1426 else
726a989a
RB
1427 /* Evaluate the statement, which could be
1428 either a GIMPLE_ASSIGN or a GIMPLE_CALL. */
87e1e42b 1429 val = evaluate_stmt (stmt);
6de9cd9a 1430
0bca51f0 1431 retval = SSA_PROP_NOT_INTERESTING;
6de9cd9a 1432
750628d8 1433 /* Set the lattice value of the statement's output. */
0bca51f0 1434 if (TREE_CODE (lhs) == SSA_NAME)
6de9cd9a 1435 {
0bca51f0
DN
1436 /* If STMT is an assignment to an SSA_NAME, we only have one
1437 value to set. */
1438 if (set_lattice_value (lhs, val))
1439 {
1440 *output_p = lhs;
1441 if (val.lattice_val == VARYING)
1442 retval = SSA_PROP_VARYING;
1443 else
1444 retval = SSA_PROP_INTERESTING;
1445 }
6de9cd9a 1446 }
0bca51f0
DN
1447
1448 return retval;
6de9cd9a
DN
1449}
1450
6de9cd9a 1451
750628d8
DN
1452/* Visit the conditional statement STMT. Return SSA_PROP_INTERESTING
1453 if it can determine which edge will be taken. Otherwise, return
1454 SSA_PROP_VARYING. */
1455
1456static enum ssa_prop_result
726a989a 1457visit_cond_stmt (gimple stmt, edge *taken_edge_p)
6de9cd9a 1458{
0bca51f0 1459 prop_value_t val;
750628d8
DN
1460 basic_block block;
1461
726a989a 1462 block = gimple_bb (stmt);
750628d8
DN
1463 val = evaluate_stmt (stmt);
1464
1465 /* Find which edge out of the conditional block will be taken and add it
1466 to the worklist. If no single edge can be determined statically,
1467 return SSA_PROP_VARYING to feed all the outgoing edges to the
1468 propagation engine. */
0bca51f0 1469 *taken_edge_p = val.value ? find_taken_edge (block, val.value) : 0;
750628d8
DN
1470 if (*taken_edge_p)
1471 return SSA_PROP_INTERESTING;
1472 else
1473 return SSA_PROP_VARYING;
6de9cd9a
DN
1474}
1475
6de9cd9a 1476
750628d8
DN
1477/* Evaluate statement STMT. If the statement produces an output value and
1478 its evaluation changes the lattice value of its output, return
1479 SSA_PROP_INTERESTING and set *OUTPUT_P to the SSA_NAME holding the
1480 output value.
1481
1482 If STMT is a conditional branch and we can determine its truth
1483 value, set *TAKEN_EDGE_P accordingly. If STMT produces a varying
1484 value, return SSA_PROP_VARYING. */
6de9cd9a 1485
750628d8 1486static enum ssa_prop_result
726a989a 1487ccp_visit_stmt (gimple stmt, edge *taken_edge_p, tree *output_p)
750628d8 1488{
750628d8
DN
1489 tree def;
1490 ssa_op_iter iter;
6de9cd9a 1491
750628d8 1492 if (dump_file && (dump_flags & TDF_DETAILS))
6de9cd9a 1493 {
0bca51f0 1494 fprintf (dump_file, "\nVisiting statement:\n");
726a989a 1495 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
6de9cd9a 1496 }
6de9cd9a 1497
726a989a 1498 switch (gimple_code (stmt))
6de9cd9a 1499 {
726a989a
RB
1500 case GIMPLE_ASSIGN:
1501 /* If the statement is an assignment that produces a single
1502 output value, evaluate its RHS to see if the lattice value of
1503 its output has changed. */
1504 return visit_assignment (stmt, output_p);
1505
1506 case GIMPLE_CALL:
1507 /* A value-returning call also performs an assignment. */
1508 if (gimple_call_lhs (stmt) != NULL_TREE)
1509 return visit_assignment (stmt, output_p);
1510 break;
1511
1512 case GIMPLE_COND:
1513 case GIMPLE_SWITCH:
1514 /* If STMT is a conditional branch, see if we can determine
1515 which branch will be taken. */
1516 /* FIXME. It appears that we should be able to optimize
1517 computed GOTOs here as well. */
1518 return visit_cond_stmt (stmt, taken_edge_p);
1519
1520 default:
1521 break;
6de9cd9a 1522 }
6de9cd9a 1523
750628d8
DN
1524 /* Any other kind of statement is not interesting for constant
1525 propagation and, therefore, not worth simulating. */
750628d8
DN
1526 if (dump_file && (dump_flags & TDF_DETAILS))
1527 fprintf (dump_file, "No interesting values produced. Marked VARYING.\n");
6de9cd9a 1528
750628d8
DN
1529 /* Definitions made by statements other than assignments to
1530 SSA_NAMEs represent unknown modifications to their outputs.
1531 Mark them VARYING. */
0bca51f0
DN
1532 FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_ALL_DEFS)
1533 {
dce2b2f6 1534 prop_value_t v = { VARYING, NULL_TREE };
0bca51f0
DN
1535 set_lattice_value (def, v);
1536 }
6de9cd9a 1537
750628d8
DN
1538 return SSA_PROP_VARYING;
1539}
6de9cd9a 1540
6de9cd9a 1541
0bca51f0 1542/* Main entry point for SSA Conditional Constant Propagation. */
750628d8 1543
3253eafb 1544static unsigned int
dce2b2f6 1545do_ssa_ccp (void)
750628d8
DN
1546{
1547 ccp_initialize ();
1548 ssa_propagate (ccp_visit_stmt, ccp_visit_phi_node);
3253eafb 1549 if (ccp_finalize ())
706ca88e 1550 return (TODO_cleanup_cfg | TODO_update_ssa | TODO_remove_unused_locals);
3253eafb
JH
1551 else
1552 return 0;
6de9cd9a
DN
1553}
1554
173b818d
BB
1555
1556static bool
750628d8 1557gate_ccp (void)
173b818d 1558{
750628d8 1559 return flag_tree_ccp != 0;
173b818d
BB
1560}
1561
6de9cd9a 1562
8ddbbcae 1563struct gimple_opt_pass pass_ccp =
750628d8 1564{
8ddbbcae
JH
1565 {
1566 GIMPLE_PASS,
750628d8
DN
1567 "ccp", /* name */
1568 gate_ccp, /* gate */
0bca51f0 1569 do_ssa_ccp, /* execute */
750628d8
DN
1570 NULL, /* sub */
1571 NULL, /* next */
1572 0, /* static_pass_number */
1573 TV_TREE_CCP, /* tv_id */
7faade0f 1574 PROP_cfg | PROP_ssa, /* properties_required */
750628d8 1575 0, /* properties_provided */
ae07b463 1576 0, /* properties_destroyed */
750628d8 1577 0, /* todo_flags_start */
3253eafb 1578 TODO_dump_func | TODO_verify_ssa
8ddbbcae
JH
1579 | TODO_verify_stmts | TODO_ggc_collect/* todo_flags_finish */
1580 }
750628d8 1581};
6de9cd9a 1582
6de9cd9a 1583
6de9cd9a
DN
1584/* A subroutine of fold_stmt_r. Attempts to fold *(A+O) to A[X].
1585 BASE is an array type. OFFSET is a byte displacement. ORIG_TYPE
9cf737f8 1586 is the desired result type. */
6de9cd9a
DN
1587
1588static tree
1fd89060
RG
1589maybe_fold_offset_to_array_ref (tree base, tree offset, tree orig_type,
1590 bool allow_negative_idx)
6de9cd9a 1591{
891fc5e9 1592 tree min_idx, idx, idx_type, elt_offset = integer_zero_node;
44de5aeb 1593 tree array_type, elt_type, elt_size;
3097760b 1594 tree domain_type;
44de5aeb
RK
1595
1596 /* If BASE is an ARRAY_REF, we can pick up another offset (this time
1597 measured in units of the size of elements type) from that ARRAY_REF).
1598 We can't do anything if either is variable.
1599
1600 The case we handle here is *(&A[N]+O). */
1601 if (TREE_CODE (base) == ARRAY_REF)
1602 {
1603 tree low_bound = array_ref_low_bound (base);
1604
1605 elt_offset = TREE_OPERAND (base, 1);
1606 if (TREE_CODE (low_bound) != INTEGER_CST
1607 || TREE_CODE (elt_offset) != INTEGER_CST)
1608 return NULL_TREE;
1609
1610 elt_offset = int_const_binop (MINUS_EXPR, elt_offset, low_bound, 0);
1611 base = TREE_OPERAND (base, 0);
1612 }
6de9cd9a
DN
1613
1614 /* Ignore stupid user tricks of indexing non-array variables. */
1615 array_type = TREE_TYPE (base);
1616 if (TREE_CODE (array_type) != ARRAY_TYPE)
1617 return NULL_TREE;
1618 elt_type = TREE_TYPE (array_type);
f4088621 1619 if (!useless_type_conversion_p (orig_type, elt_type))
6de9cd9a 1620 return NULL_TREE;
891fc5e9
RG
1621
1622 /* Use signed size type for intermediate computation on the index. */
1623 idx_type = signed_type_for (size_type_node);
1624
44de5aeb
RK
1625 /* If OFFSET and ELT_OFFSET are zero, we don't care about the size of the
1626 element type (so we can use the alignment if it's not constant).
1627 Otherwise, compute the offset as an index by using a division. If the
1628 division isn't exact, then don't do anything. */
6de9cd9a 1629 elt_size = TYPE_SIZE_UNIT (elt_type);
fe9821b8
JH
1630 if (!elt_size)
1631 return NULL;
44de5aeb
RK
1632 if (integer_zerop (offset))
1633 {
1634 if (TREE_CODE (elt_size) != INTEGER_CST)
1635 elt_size = size_int (TYPE_ALIGN (elt_type));
6de9cd9a 1636
891fc5e9 1637 idx = build_int_cst (idx_type, 0);
44de5aeb
RK
1638 }
1639 else
1640 {
1641 unsigned HOST_WIDE_INT lquo, lrem;
1642 HOST_WIDE_INT hquo, hrem;
891fc5e9 1643 double_int soffset;
44de5aeb 1644
891fc5e9
RG
1645 /* The final array offset should be signed, so we need
1646 to sign-extend the (possibly pointer) offset here
1647 and use signed division. */
1648 soffset = double_int_sext (tree_to_double_int (offset),
1649 TYPE_PRECISION (TREE_TYPE (offset)));
44de5aeb 1650 if (TREE_CODE (elt_size) != INTEGER_CST
891fc5e9
RG
1651 || div_and_round_double (TRUNC_DIV_EXPR, 0,
1652 soffset.low, soffset.high,
44de5aeb
RK
1653 TREE_INT_CST_LOW (elt_size),
1654 TREE_INT_CST_HIGH (elt_size),
1655 &lquo, &hquo, &lrem, &hrem)
1656 || lrem || hrem)
1657 return NULL_TREE;
6de9cd9a 1658
891fc5e9 1659 idx = build_int_cst_wide (idx_type, lquo, hquo);
44de5aeb
RK
1660 }
1661
1662 /* Assume the low bound is zero. If there is a domain type, get the
1663 low bound, if any, convert the index into that type, and add the
1664 low bound. */
891fc5e9 1665 min_idx = build_int_cst (idx_type, 0);
3097760b
JH
1666 domain_type = TYPE_DOMAIN (array_type);
1667 if (domain_type)
6de9cd9a 1668 {
3097760b 1669 idx_type = domain_type;
891fc5e9
RG
1670 if (TYPE_MIN_VALUE (idx_type))
1671 min_idx = TYPE_MIN_VALUE (idx_type);
44de5aeb 1672 else
891fc5e9 1673 min_idx = fold_convert (idx_type, min_idx);
44de5aeb
RK
1674
1675 if (TREE_CODE (min_idx) != INTEGER_CST)
1676 return NULL_TREE;
1677
891fc5e9 1678 elt_offset = fold_convert (idx_type, elt_offset);
6de9cd9a
DN
1679 }
1680
44de5aeb
RK
1681 if (!integer_zerop (min_idx))
1682 idx = int_const_binop (PLUS_EXPR, idx, min_idx, 0);
1683 if (!integer_zerop (elt_offset))
1684 idx = int_const_binop (PLUS_EXPR, idx, elt_offset, 0);
1685
891fc5e9
RG
1686 /* Make sure to possibly truncate late after offsetting. */
1687 idx = fold_convert (idx_type, idx);
1688
3097760b 1689 /* We don't want to construct access past array bounds. For example
1fd89060
RG
1690 char *(c[4]);
1691 c[3][2];
1692 should not be simplified into (*c)[14] or tree-vrp will
1693 give false warnings. The same is true for
1694 struct A { long x; char d[0]; } *a;
1695 (char *)a - 4;
1696 which should be not folded to &a->d[-8]. */
1697 if (domain_type
1698 && TYPE_MAX_VALUE (domain_type)
3097760b
JH
1699 && TREE_CODE (TYPE_MAX_VALUE (domain_type)) == INTEGER_CST)
1700 {
1701 tree up_bound = TYPE_MAX_VALUE (domain_type);
1702
1703 if (tree_int_cst_lt (up_bound, idx)
1704 /* Accesses after the end of arrays of size 0 (gcc
1705 extension) and 1 are likely intentional ("struct
1706 hack"). */
1707 && compare_tree_int (up_bound, 1) > 0)
1708 return NULL_TREE;
1709 }
1fd89060
RG
1710 if (domain_type
1711 && TYPE_MIN_VALUE (domain_type))
1712 {
1713 if (!allow_negative_idx
1714 && TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST
1715 && tree_int_cst_lt (idx, TYPE_MIN_VALUE (domain_type)))
1716 return NULL_TREE;
1717 }
1718 else if (!allow_negative_idx
1719 && compare_tree_int (idx, 0) < 0)
1720 return NULL_TREE;
3097760b 1721
04d86531 1722 return build4 (ARRAY_REF, elt_type, base, idx, NULL_TREE, NULL_TREE);
6de9cd9a
DN
1723}
1724
750628d8 1725
fe9821b8 1726/* Attempt to fold *(S+O) to S.X.
6de9cd9a
DN
1727 BASE is a record type. OFFSET is a byte displacement. ORIG_TYPE
1728 is the desired result type. */
6de9cd9a 1729
fe9821b8 1730static tree
6de9cd9a
DN
1731maybe_fold_offset_to_component_ref (tree record_type, tree base, tree offset,
1732 tree orig_type, bool base_is_ptr)
1733{
f34fa464 1734 tree f, t, field_type, tail_array_field, field_offset;
6e6e19cb
JH
1735 tree ret;
1736 tree new_base;
6de9cd9a
DN
1737
1738 if (TREE_CODE (record_type) != RECORD_TYPE
1739 && TREE_CODE (record_type) != UNION_TYPE
1740 && TREE_CODE (record_type) != QUAL_UNION_TYPE)
1741 return NULL_TREE;
1742
1743 /* Short-circuit silly cases. */
f4088621 1744 if (useless_type_conversion_p (record_type, orig_type))
6de9cd9a
DN
1745 return NULL_TREE;
1746
1747 tail_array_field = NULL_TREE;
1748 for (f = TYPE_FIELDS (record_type); f ; f = TREE_CHAIN (f))
1749 {
1750 int cmp;
1751
1752 if (TREE_CODE (f) != FIELD_DECL)
1753 continue;
1754 if (DECL_BIT_FIELD (f))
1755 continue;
f34fa464 1756
fe9821b8
JH
1757 if (!DECL_FIELD_OFFSET (f))
1758 continue;
f34fa464
ZD
1759 field_offset = byte_position (f);
1760 if (TREE_CODE (field_offset) != INTEGER_CST)
6de9cd9a
DN
1761 continue;
1762
1763 /* ??? Java creates "interesting" fields for representing base classes.
1764 They have no name, and have no context. With no context, we get into
1765 trouble with nonoverlapping_component_refs_p. Skip them. */
1766 if (!DECL_FIELD_CONTEXT (f))
1767 continue;
1768
1769 /* The previous array field isn't at the end. */
1770 tail_array_field = NULL_TREE;
1771
1772 /* Check to see if this offset overlaps with the field. */
f34fa464 1773 cmp = tree_int_cst_compare (field_offset, offset);
6de9cd9a
DN
1774 if (cmp > 0)
1775 continue;
1776
1777 field_type = TREE_TYPE (f);
6de9cd9a
DN
1778
1779 /* Here we exactly match the offset being checked. If the types match,
1780 then we can return that field. */
53dba802 1781 if (cmp == 0
f4088621 1782 && useless_type_conversion_p (orig_type, field_type))
6de9cd9a
DN
1783 {
1784 if (base_is_ptr)
1785 base = build1 (INDIRECT_REF, record_type, base);
b4257cfc 1786 t = build3 (COMPONENT_REF, field_type, base, f, NULL_TREE);
6de9cd9a
DN
1787 return t;
1788 }
53dba802
ZD
1789
1790 /* Don't care about offsets into the middle of scalars. */
1791 if (!AGGREGATE_TYPE_P (field_type))
1792 continue;
6de9cd9a 1793
53dba802
ZD
1794 /* Check for array at the end of the struct. This is often
1795 used as for flexible array members. We should be able to
1796 turn this into an array access anyway. */
1797 if (TREE_CODE (field_type) == ARRAY_TYPE)
1798 tail_array_field = f;
1799
1800 /* Check the end of the field against the offset. */
1801 if (!DECL_SIZE_UNIT (f)
1802 || TREE_CODE (DECL_SIZE_UNIT (f)) != INTEGER_CST)
1803 continue;
1804 t = int_const_binop (MINUS_EXPR, offset, field_offset, 1);
1805 if (!tree_int_cst_lt (t, DECL_SIZE_UNIT (f)))
1806 continue;
6de9cd9a 1807
53dba802
ZD
1808 /* If we matched, then set offset to the displacement into
1809 this field. */
6e6e19cb
JH
1810 if (base_is_ptr)
1811 new_base = build1 (INDIRECT_REF, record_type, base);
1812 else
1813 new_base = base;
1814 new_base = build3 (COMPONENT_REF, field_type, new_base, f, NULL_TREE);
1815
1816 /* Recurse to possibly find the match. */
1fd89060
RG
1817 ret = maybe_fold_offset_to_array_ref (new_base, t, orig_type,
1818 f == TYPE_FIELDS (record_type));
6e6e19cb
JH
1819 if (ret)
1820 return ret;
1821 ret = maybe_fold_offset_to_component_ref (field_type, new_base, t,
1822 orig_type, false);
1823 if (ret)
1824 return ret;
6de9cd9a
DN
1825 }
1826
1827 if (!tail_array_field)
1828 return NULL_TREE;
1829
1830 f = tail_array_field;
1831 field_type = TREE_TYPE (f);
53dba802 1832 offset = int_const_binop (MINUS_EXPR, offset, byte_position (f), 1);
6de9cd9a 1833
6de9cd9a 1834 /* If we get here, we've got an aggregate field, and a possibly
1ea7e6ad 1835 nonzero offset into them. Recurse and hope for a valid match. */
6de9cd9a
DN
1836 if (base_is_ptr)
1837 base = build1 (INDIRECT_REF, record_type, base);
b4257cfc 1838 base = build3 (COMPONENT_REF, field_type, base, f, NULL_TREE);
6de9cd9a 1839
1fd89060
RG
1840 t = maybe_fold_offset_to_array_ref (base, offset, orig_type,
1841 f == TYPE_FIELDS (record_type));
6de9cd9a
DN
1842 if (t)
1843 return t;
1844 return maybe_fold_offset_to_component_ref (field_type, base, offset,
1845 orig_type, false);
1846}
1847
fe9821b8
JH
1848/* Attempt to express (ORIG_TYPE)BASE+OFFSET as BASE->field_of_orig_type
1849 or BASE[index] or by combination of those.
1850
1851 Before attempting the conversion strip off existing ADDR_EXPRs and
1852 handled component refs. */
1853
1854tree
1855maybe_fold_offset_to_reference (tree base, tree offset, tree orig_type)
1856{
1857 tree ret;
1858 tree type;
1859 bool base_is_ptr = true;
1860
1861 STRIP_NOPS (base);
1862 if (TREE_CODE (base) == ADDR_EXPR)
1863 {
1864 base_is_ptr = false;
1865
1866 base = TREE_OPERAND (base, 0);
1867
1868 /* Handle case where existing COMPONENT_REF pick e.g. wrong field of union,
1869 so it needs to be removed and new COMPONENT_REF constructed.
1870 The wrong COMPONENT_REF are often constructed by folding the
1871 (type *)&object within the expression (type *)&object+offset */
99f536cc 1872 if (handled_component_p (base))
fe9821b8
JH
1873 {
1874 HOST_WIDE_INT sub_offset, size, maxsize;
1875 tree newbase;
1876 newbase = get_ref_base_and_extent (base, &sub_offset,
1877 &size, &maxsize);
1878 gcc_assert (newbase);
99f536cc 1879 if (size == maxsize
a214f4d6 1880 && size != -1
99f536cc 1881 && !(sub_offset & (BITS_PER_UNIT - 1)))
fe9821b8
JH
1882 {
1883 base = newbase;
1884 if (sub_offset)
1885 offset = int_const_binop (PLUS_EXPR, offset,
1886 build_int_cst (TREE_TYPE (offset),
1887 sub_offset / BITS_PER_UNIT), 1);
1888 }
1889 }
f4088621 1890 if (useless_type_conversion_p (orig_type, TREE_TYPE (base))
fe9821b8
JH
1891 && integer_zerop (offset))
1892 return base;
1893 type = TREE_TYPE (base);
1894 }
1895 else
1896 {
1897 base_is_ptr = true;
1898 if (!POINTER_TYPE_P (TREE_TYPE (base)))
1899 return NULL_TREE;
1900 type = TREE_TYPE (TREE_TYPE (base));
1901 }
1902 ret = maybe_fold_offset_to_component_ref (type, base, offset,
1903 orig_type, base_is_ptr);
1904 if (!ret)
1905 {
1906 if (base_is_ptr)
1907 base = build1 (INDIRECT_REF, type, base);
1fd89060 1908 ret = maybe_fold_offset_to_array_ref (base, offset, orig_type, true);
fe9821b8
JH
1909 }
1910 return ret;
1911}
750628d8 1912
99f536cc
RG
1913/* Attempt to express (ORIG_TYPE)&BASE+OFFSET as &BASE->field_of_orig_type
1914 or &BASE[index] or by combination of those.
1915
1916 Before attempting the conversion strip off existing component refs. */
1917
1918tree
1919maybe_fold_offset_to_address (tree addr, tree offset, tree orig_type)
1920{
1921 tree t;
1922
1923 gcc_assert (POINTER_TYPE_P (TREE_TYPE (addr))
1924 && POINTER_TYPE_P (orig_type));
1925
1926 t = maybe_fold_offset_to_reference (addr, offset, TREE_TYPE (orig_type));
1927 if (t != NULL_TREE)
1928 {
1929 tree orig = addr;
1930 tree ptr_type;
1931
1932 /* For __builtin_object_size to function correctly we need to
1933 make sure not to fold address arithmetic so that we change
1934 reference from one array to another. This would happen for
1935 example for
1936
1937 struct X { char s1[10]; char s2[10] } s;
1938 char *foo (void) { return &s.s2[-4]; }
1939
1940 where we need to avoid generating &s.s1[6]. As the C and
1941 C++ frontends create different initial trees
1942 (char *) &s.s1 + -4 vs. &s.s1[-4] we have to do some
1943 sophisticated comparisons here. Note that checking for the
1944 condition after the fact is easier than trying to avoid doing
1945 the folding. */
1946 STRIP_NOPS (orig);
1947 if (TREE_CODE (orig) == ADDR_EXPR)
1948 orig = TREE_OPERAND (orig, 0);
1949 if ((TREE_CODE (orig) == ARRAY_REF
1950 || (TREE_CODE (orig) == COMPONENT_REF
1951 && TREE_CODE (TREE_TYPE (TREE_OPERAND (orig, 1))) == ARRAY_TYPE))
1952 && (TREE_CODE (t) == ARRAY_REF
1953 || (TREE_CODE (t) == COMPONENT_REF
1954 && TREE_CODE (TREE_TYPE (TREE_OPERAND (t, 1))) == ARRAY_TYPE))
1955 && !operand_equal_p (TREE_CODE (orig) == ARRAY_REF
1956 ? TREE_OPERAND (orig, 0) : orig,
1957 TREE_CODE (t) == ARRAY_REF
1958 ? TREE_OPERAND (t, 0) : t, 0))
1959 return NULL_TREE;
1960
1961 ptr_type = build_pointer_type (TREE_TYPE (t));
1962 if (!useless_type_conversion_p (orig_type, ptr_type))
1963 return NULL_TREE;
1964 return build_fold_addr_expr_with_type (t, ptr_type);
1965 }
1966
1967 return NULL_TREE;
1968}
1969
6de9cd9a
DN
1970/* A subroutine of fold_stmt_r. Attempt to simplify *(BASE+OFFSET).
1971 Return the simplified expression, or NULL if nothing could be done. */
1972
1973static tree
1974maybe_fold_stmt_indirect (tree expr, tree base, tree offset)
1975{
1976 tree t;
cbf8d355 1977 bool volatile_p = TREE_THIS_VOLATILE (expr);
6de9cd9a
DN
1978
1979 /* We may well have constructed a double-nested PLUS_EXPR via multiple
1980 substitutions. Fold that down to one. Remove NON_LVALUE_EXPRs that
1981 are sometimes added. */
1982 base = fold (base);
ff7c8597 1983 STRIP_TYPE_NOPS (base);
6de9cd9a
DN
1984 TREE_OPERAND (expr, 0) = base;
1985
1986 /* One possibility is that the address reduces to a string constant. */
1987 t = fold_read_from_constant_string (expr);
1988 if (t)
1989 return t;
1990
5be014d5
AP
1991 /* Add in any offset from a POINTER_PLUS_EXPR. */
1992 if (TREE_CODE (base) == POINTER_PLUS_EXPR)
6de9cd9a
DN
1993 {
1994 tree offset2;
1995
1996 offset2 = TREE_OPERAND (base, 1);
1997 if (TREE_CODE (offset2) != INTEGER_CST)
1998 return NULL_TREE;
1999 base = TREE_OPERAND (base, 0);
2000
5be014d5
AP
2001 offset = fold_convert (sizetype,
2002 int_const_binop (PLUS_EXPR, offset, offset2, 1));
6de9cd9a
DN
2003 }
2004
2005 if (TREE_CODE (base) == ADDR_EXPR)
2006 {
fe9821b8
JH
2007 tree base_addr = base;
2008
6de9cd9a
DN
2009 /* Strip the ADDR_EXPR. */
2010 base = TREE_OPERAND (base, 0);
2011
0534fa56
RH
2012 /* Fold away CONST_DECL to its value, if the type is scalar. */
2013 if (TREE_CODE (base) == CONST_DECL
1b738915 2014 && is_gimple_min_invariant (DECL_INITIAL (base)))
0534fa56
RH
2015 return DECL_INITIAL (base);
2016
6de9cd9a 2017 /* Try folding *(&B+O) to B.X. */
fe9821b8
JH
2018 t = maybe_fold_offset_to_reference (base_addr, offset,
2019 TREE_TYPE (expr));
6de9cd9a 2020 if (t)
cbf8d355 2021 {
e2104f59
RG
2022 /* Preserve volatileness of the original expression.
2023 We can end up with a plain decl here which is shared
2024 and we shouldn't mess with its flags. */
2025 if (!SSA_VAR_P (t))
2026 TREE_THIS_VOLATILE (t) = volatile_p;
cbf8d355
RG
2027 return t;
2028 }
6de9cd9a
DN
2029 }
2030 else
2031 {
2032 /* We can get here for out-of-range string constant accesses,
2033 such as "_"[3]. Bail out of the entire substitution search
2034 and arrange for the entire statement to be replaced by a
aabcd309 2035 call to __builtin_trap. In all likelihood this will all be
6de9cd9a
DN
2036 constant-folded away, but in the meantime we can't leave with
2037 something that get_expr_operands can't understand. */
2038
2039 t = base;
2040 STRIP_NOPS (t);
2041 if (TREE_CODE (t) == ADDR_EXPR
2042 && TREE_CODE (TREE_OPERAND (t, 0)) == STRING_CST)
2043 {
2044 /* FIXME: Except that this causes problems elsewhere with dead
0e61db61 2045 code not being deleted, and we die in the rtl expanders
6de9cd9a
DN
2046 because we failed to remove some ssa_name. In the meantime,
2047 just return zero. */
2048 /* FIXME2: This condition should be signaled by
2049 fold_read_from_constant_string directly, rather than
2050 re-checking for it here. */
2051 return integer_zero_node;
2052 }
2053
2054 /* Try folding *(B+O) to B->X. Still an improvement. */
2055 if (POINTER_TYPE_P (TREE_TYPE (base)))
2056 {
fe9821b8
JH
2057 t = maybe_fold_offset_to_reference (base, offset,
2058 TREE_TYPE (expr));
6de9cd9a
DN
2059 if (t)
2060 return t;
2061 }
2062 }
2063
2064 /* Otherwise we had an offset that we could not simplify. */
2065 return NULL_TREE;
2066}
2067
750628d8 2068
726a989a 2069/* A quaint feature extant in our address arithmetic is that there
6de9cd9a
DN
2070 can be hidden type changes here. The type of the result need
2071 not be the same as the type of the input pointer.
2072
2073 What we're after here is an expression of the form
2074 (T *)(&array + const)
726a989a
RB
2075 where array is OP0, const is OP1, RES_TYPE is T and
2076 the cast doesn't actually exist, but is implicit in the
5be014d5 2077 type of the POINTER_PLUS_EXPR. We'd like to turn this into
6de9cd9a
DN
2078 &array[x]
2079 which may be able to propagate further. */
2080
726a989a
RB
2081tree
2082maybe_fold_stmt_addition (tree res_type, tree op0, tree op1)
6de9cd9a 2083{
6de9cd9a
DN
2084 tree ptd_type;
2085 tree t;
6de9cd9a 2086
6de9cd9a
DN
2087 /* It had better be a constant. */
2088 if (TREE_CODE (op1) != INTEGER_CST)
2089 return NULL_TREE;
2090 /* The first operand should be an ADDR_EXPR. */
2091 if (TREE_CODE (op0) != ADDR_EXPR)
2092 return NULL_TREE;
2093 op0 = TREE_OPERAND (op0, 0);
2094
2095 /* If the first operand is an ARRAY_REF, expand it so that we can fold
2096 the offset into it. */
2097 while (TREE_CODE (op0) == ARRAY_REF)
2098 {
2099 tree array_obj = TREE_OPERAND (op0, 0);
2100 tree array_idx = TREE_OPERAND (op0, 1);
2101 tree elt_type = TREE_TYPE (op0);
2102 tree elt_size = TYPE_SIZE_UNIT (elt_type);
2103 tree min_idx;
2104
2105 if (TREE_CODE (array_idx) != INTEGER_CST)
2106 break;
2107 if (TREE_CODE (elt_size) != INTEGER_CST)
2108 break;
2109
2110 /* Un-bias the index by the min index of the array type. */
2111 min_idx = TYPE_DOMAIN (TREE_TYPE (array_obj));
2112 if (min_idx)
2113 {
2114 min_idx = TYPE_MIN_VALUE (min_idx);
2115 if (min_idx)
2116 {
44de5aeb
RK
2117 if (TREE_CODE (min_idx) != INTEGER_CST)
2118 break;
2119
b6f65e3c 2120 array_idx = fold_convert (TREE_TYPE (min_idx), array_idx);
6de9cd9a
DN
2121 if (!integer_zerop (min_idx))
2122 array_idx = int_const_binop (MINUS_EXPR, array_idx,
2123 min_idx, 0);
2124 }
2125 }
2126
2127 /* Convert the index to a byte offset. */
b6f65e3c 2128 array_idx = fold_convert (sizetype, array_idx);
6de9cd9a
DN
2129 array_idx = int_const_binop (MULT_EXPR, array_idx, elt_size, 0);
2130
2131 /* Update the operands for the next round, or for folding. */
5be014d5 2132 op1 = int_const_binop (PLUS_EXPR,
6de9cd9a 2133 array_idx, op1, 0);
6de9cd9a
DN
2134 op0 = array_obj;
2135 }
2136
726a989a 2137 ptd_type = TREE_TYPE (res_type);
c4e5b5a8
RG
2138 /* If we want a pointer to void, reconstruct the reference from the
2139 array element type. A pointer to that can be trivially converted
2140 to void *. This happens as we fold (void *)(ptr p+ off). */
2141 if (VOID_TYPE_P (ptd_type)
2142 && TREE_CODE (TREE_TYPE (op0)) == ARRAY_TYPE)
2143 ptd_type = TREE_TYPE (TREE_TYPE (op0));
6de9cd9a
DN
2144
2145 /* At which point we can try some of the same things as for indirects. */
1fd89060 2146 t = maybe_fold_offset_to_array_ref (op0, op1, ptd_type, true);
6de9cd9a
DN
2147 if (!t)
2148 t = maybe_fold_offset_to_component_ref (TREE_TYPE (op0), op0, op1,
2149 ptd_type, false);
2150 if (t)
726a989a 2151 t = build1 (ADDR_EXPR, res_type, t);
6de9cd9a
DN
2152
2153 return t;
2154}
2155
622f91ba
JL
2156/* For passing state through walk_tree into fold_stmt_r and its
2157 children. */
2158
2159struct fold_stmt_r_data
2160{
726a989a 2161 gimple stmt;
6ac01510
ILT
2162 bool *changed_p;
2163 bool *inside_addr_expr_p;
622f91ba
JL
2164};
2165
6de9cd9a
DN
2166/* Subroutine of fold_stmt called via walk_tree. We perform several
2167 simplifications of EXPR_P, mostly having to do with pointer arithmetic. */
2168
2169static tree
2170fold_stmt_r (tree *expr_p, int *walk_subtrees, void *data)
2171{
726a989a
RB
2172 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
2173 struct fold_stmt_r_data *fold_stmt_r_data;
2174 bool *inside_addr_expr_p;
2175 bool *changed_p;
6de9cd9a 2176 tree expr = *expr_p, t;
6aaa4013 2177 bool volatile_p = TREE_THIS_VOLATILE (expr);
6de9cd9a 2178
726a989a
RB
2179 fold_stmt_r_data = (struct fold_stmt_r_data *) wi->info;
2180 inside_addr_expr_p = fold_stmt_r_data->inside_addr_expr_p;
2181 changed_p = fold_stmt_r_data->changed_p;
2182
6de9cd9a
DN
2183 /* ??? It'd be nice if walk_tree had a pre-order option. */
2184 switch (TREE_CODE (expr))
2185 {
2186 case INDIRECT_REF:
2187 t = walk_tree (&TREE_OPERAND (expr, 0), fold_stmt_r, data, NULL);
2188 if (t)
2189 return t;
2190 *walk_subtrees = 0;
2191
2192 t = maybe_fold_stmt_indirect (expr, TREE_OPERAND (expr, 0),
2193 integer_zero_node);
16ac8575
RG
2194 if (!t
2195 && TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR)
2196 /* If we had a good reason for propagating the address here,
2197 make sure we end up with valid gimple. See PR34989. */
2198 t = TREE_OPERAND (TREE_OPERAND (expr, 0), 0);
6de9cd9a
DN
2199 break;
2200
fe9821b8
JH
2201 case NOP_EXPR:
2202 t = walk_tree (&TREE_OPERAND (expr, 0), fold_stmt_r, data, NULL);
2203 if (t)
2204 return t;
2205 *walk_subtrees = 0;
2206
2207 if (POINTER_TYPE_P (TREE_TYPE (expr))
129a37fc 2208 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (expr)))
fe9821b8 2209 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 0)))
99f536cc
RG
2210 && (t = maybe_fold_offset_to_address (TREE_OPERAND (expr, 0),
2211 integer_zero_node,
2212 TREE_TYPE (TREE_TYPE (expr)))))
2213 return t;
fe9821b8
JH
2214 break;
2215
622f91ba 2216 /* ??? Could handle more ARRAY_REFs here, as a variant of INDIRECT_REF.
6de9cd9a
DN
2217 We'd only want to bother decomposing an existing ARRAY_REF if
2218 the base array is found to have another offset contained within.
2219 Otherwise we'd be wasting time. */
622f91ba
JL
2220 case ARRAY_REF:
2221 /* If we are not processing expressions found within an
2222 ADDR_EXPR, then we can fold constant array references. */
2223 if (!*inside_addr_expr_p)
2224 t = fold_read_from_constant_string (expr);
2225 else
2226 t = NULL;
2227 break;
6de9cd9a
DN
2228
2229 case ADDR_EXPR:
622f91ba 2230 *inside_addr_expr_p = true;
6de9cd9a 2231 t = walk_tree (&TREE_OPERAND (expr, 0), fold_stmt_r, data, NULL);
622f91ba 2232 *inside_addr_expr_p = false;
6de9cd9a
DN
2233 if (t)
2234 return t;
2235 *walk_subtrees = 0;
2236
51eed280
PB
2237 /* Make sure the value is properly considered constant, and so gets
2238 propagated as expected. */
6de9cd9a 2239 if (*changed_p)
127203ac 2240 recompute_tree_invariant_for_addr_expr (expr);
6de9cd9a
DN
2241 return NULL_TREE;
2242
6de9cd9a
DN
2243 case COMPONENT_REF:
2244 t = walk_tree (&TREE_OPERAND (expr, 0), fold_stmt_r, data, NULL);
2245 if (t)
2246 return t;
2247 *walk_subtrees = 0;
2248
fa27426e
RH
2249 /* Make sure the FIELD_DECL is actually a field in the type on the lhs.
2250 We've already checked that the records are compatible, so we should
2251 come up with a set of compatible fields. */
2252 {
2253 tree expr_record = TREE_TYPE (TREE_OPERAND (expr, 0));
2254 tree expr_field = TREE_OPERAND (expr, 1);
2255
2256 if (DECL_FIELD_CONTEXT (expr_field) != TYPE_MAIN_VARIANT (expr_record))
2257 {
2258 expr_field = find_compatible_field (expr_record, expr_field);
2259 TREE_OPERAND (expr, 1) = expr_field;
2260 }
2261 }
6de9cd9a
DN
2262 break;
2263
ac182688
ZD
2264 case TARGET_MEM_REF:
2265 t = maybe_fold_tmr (expr);
2266 break;
2267
726a989a
RB
2268 case POINTER_PLUS_EXPR:
2269 t = walk_tree (&TREE_OPERAND (expr, 0), fold_stmt_r, data, NULL);
2270 if (t)
2271 return t;
2272 t = walk_tree (&TREE_OPERAND (expr, 1), fold_stmt_r, data, NULL);
2273 if (t)
2274 return t;
2275 *walk_subtrees = 0;
2276
2277 t = maybe_fold_stmt_addition (TREE_TYPE (expr),
2278 TREE_OPERAND (expr, 0),
2279 TREE_OPERAND (expr, 1));
2280 break;
2281
f393e7f5
RG
2282 case COND_EXPR:
2283 if (COMPARISON_CLASS_P (TREE_OPERAND (expr, 0)))
2284 {
2285 tree op0 = TREE_OPERAND (expr, 0);
6ac01510
ILT
2286 tree tem;
2287 bool set;
2288
2289 fold_defer_overflow_warnings ();
2290 tem = fold_binary (TREE_CODE (op0), TREE_TYPE (op0),
2291 TREE_OPERAND (op0, 0),
2292 TREE_OPERAND (op0, 1));
726a989a
RB
2293 /* This is actually a conditional expression, not a GIMPLE
2294 conditional statement, however, the valid_gimple_rhs_p
2295 test still applies. */
2296 set = tem && is_gimple_condexpr (tem) && valid_gimple_rhs_p (tem);
6ac01510
ILT
2297 fold_undefer_overflow_warnings (set, fold_stmt_r_data->stmt, 0);
2298 if (set)
5cdc4a26 2299 {
726a989a
RB
2300 COND_EXPR_COND (expr) = tem;
2301 t = expr;
5cdc4a26
RS
2302 break;
2303 }
f393e7f5 2304 }
5cdc4a26 2305 return NULL_TREE;
f393e7f5 2306
6de9cd9a
DN
2307 default:
2308 return NULL_TREE;
2309 }
2310
2311 if (t)
2312 {
e2104f59
RG
2313 /* Preserve volatileness of the original expression.
2314 We can end up with a plain decl here which is shared
2315 and we shouldn't mess with its flags. */
2316 if (!SSA_VAR_P (t))
2317 TREE_THIS_VOLATILE (t) = volatile_p;
6de9cd9a
DN
2318 *expr_p = t;
2319 *changed_p = true;
2320 }
2321
2322 return NULL_TREE;
2323}
2324
10a0d495
JJ
2325/* Return the string length, maximum string length or maximum value of
2326 ARG in LENGTH.
2327 If ARG is an SSA name variable, follow its use-def chains. If LENGTH
2328 is not NULL and, for TYPE == 0, its value is not equal to the length
2329 we determine or if we are unable to determine the length or value,
2330 return false. VISITED is a bitmap of visited variables.
2331 TYPE is 0 if string length should be returned, 1 for maximum string
2332 length and 2 for maximum value ARG can have. */
6de9cd9a 2333
06a9b53f 2334static bool
10a0d495 2335get_maxval_strlen (tree arg, tree *length, bitmap visited, int type)
6de9cd9a 2336{
726a989a
RB
2337 tree var, val;
2338 gimple def_stmt;
750628d8
DN
2339
2340 if (TREE_CODE (arg) != SSA_NAME)
06a9b53f 2341 {
f255541f
RC
2342 if (TREE_CODE (arg) == COND_EXPR)
2343 return get_maxval_strlen (COND_EXPR_THEN (arg), length, visited, type)
2344 && get_maxval_strlen (COND_EXPR_ELSE (arg), length, visited, type);
c4e5b5a8
RG
2345 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
2346 else if (TREE_CODE (arg) == ADDR_EXPR
2347 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF
2348 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg, 0), 1)))
2349 {
2350 tree aop0 = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
2351 if (TREE_CODE (aop0) == INDIRECT_REF
2352 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
2353 return get_maxval_strlen (TREE_OPERAND (aop0, 0),
2354 length, visited, type);
2355 }
f255541f 2356
10a0d495
JJ
2357 if (type == 2)
2358 {
2359 val = arg;
2360 if (TREE_CODE (val) != INTEGER_CST
2361 || tree_int_cst_sgn (val) < 0)
2362 return false;
2363 }
2364 else
2365 val = c_strlen (arg, 1);
750628d8 2366 if (!val)
06a9b53f 2367 return false;
cd709752 2368
10a0d495
JJ
2369 if (*length)
2370 {
2371 if (type > 0)
2372 {
2373 if (TREE_CODE (*length) != INTEGER_CST
2374 || TREE_CODE (val) != INTEGER_CST)
2375 return false;
2376
2377 if (tree_int_cst_lt (*length, val))
2378 *length = val;
2379 return true;
2380 }
2381 else if (simple_cst_equal (val, *length) != 1)
2382 return false;
2383 }
6de9cd9a 2384
750628d8
DN
2385 *length = val;
2386 return true;
6de9cd9a 2387 }
06a9b53f 2388
750628d8
DN
2389 /* If we were already here, break the infinite cycle. */
2390 if (bitmap_bit_p (visited, SSA_NAME_VERSION (arg)))
2391 return true;
2392 bitmap_set_bit (visited, SSA_NAME_VERSION (arg));
2393
2394 var = arg;
2395 def_stmt = SSA_NAME_DEF_STMT (var);
6de9cd9a 2396
726a989a
RB
2397 switch (gimple_code (def_stmt))
2398 {
2399 case GIMPLE_ASSIGN:
2400 /* The RHS of the statement defining VAR must either have a
2401 constant length or come from another SSA_NAME with a constant
2402 length. */
2403 if (gimple_assign_single_p (def_stmt)
2404 || gimple_assign_unary_nop_p (def_stmt))
2405 {
2406 tree rhs = gimple_assign_rhs1 (def_stmt);
2407 return get_maxval_strlen (rhs, length, visited, type);
2408 }
2409 return false;
2410
2411 case GIMPLE_PHI:
750628d8
DN
2412 {
2413 /* All the arguments of the PHI node must have the same constant
2414 length. */
726a989a
RB
2415 unsigned i;
2416
2417 for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
2418 {
2419 tree arg = gimple_phi_arg (def_stmt, i)->def;
2420
2421 /* If this PHI has itself as an argument, we cannot
2422 determine the string length of this argument. However,
2423 if we can find a constant string length for the other
2424 PHI args then we can still be sure that this is a
2425 constant string length. So be optimistic and just
2426 continue with the next argument. */
2427 if (arg == gimple_phi_result (def_stmt))
2428 continue;
2429
2430 if (!get_maxval_strlen (arg, length, visited, type))
2431 return false;
2432 }
2433 }
2434 return true;
6de9cd9a 2435
750628d8 2436 default:
726a989a 2437 return false;
6de9cd9a 2438 }
6de9cd9a
DN
2439}
2440
2441
726a989a
RB
2442/* Fold builtin call in statement STMT. Returns a simplified tree.
2443 We may return a non-constant expression, including another call
2444 to a different function and with different arguments, e.g.,
2445 substituting memcpy for strcpy when the string length is known.
2446 Note that some builtins expand into inline code that may not
2447 be valid in GIMPLE. Callers must take care. */
6de9cd9a
DN
2448
2449static tree
726a989a 2450ccp_fold_builtin (gimple stmt)
6de9cd9a 2451{
10a0d495 2452 tree result, val[3];
5039610b 2453 tree callee, a;
d9cc481a 2454 int arg_idx, type;
a32e70c3
RS
2455 bitmap visited;
2456 bool ignore;
5039610b 2457 int nargs;
6de9cd9a 2458
726a989a
RB
2459 gcc_assert (is_gimple_call (stmt));
2460
2461 ignore = (gimple_call_lhs (stmt) == NULL);
6de9cd9a
DN
2462
2463 /* First try the generic builtin folder. If that succeeds, return the
2464 result directly. */
726a989a 2465 result = fold_call_stmt (stmt, ignore);
6de9cd9a 2466 if (result)
10a0d495
JJ
2467 {
2468 if (ignore)
2469 STRIP_NOPS (result);
2470 return result;
2471 }
a32e70c3
RS
2472
2473 /* Ignore MD builtins. */
726a989a 2474 callee = gimple_call_fndecl (stmt);
a32e70c3
RS
2475 if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_MD)
2476 return NULL_TREE;
6de9cd9a
DN
2477
2478 /* If the builtin could not be folded, and it has no argument list,
2479 we're done. */
726a989a 2480 nargs = gimple_call_num_args (stmt);
5039610b 2481 if (nargs == 0)
6de9cd9a
DN
2482 return NULL_TREE;
2483
2484 /* Limit the work only for builtins we know how to simplify. */
2485 switch (DECL_FUNCTION_CODE (callee))
2486 {
2487 case BUILT_IN_STRLEN:
2488 case BUILT_IN_FPUTS:
2489 case BUILT_IN_FPUTS_UNLOCKED:
d9cc481a 2490 arg_idx = 0;
10a0d495 2491 type = 0;
6de9cd9a
DN
2492 break;
2493 case BUILT_IN_STRCPY:
2494 case BUILT_IN_STRNCPY:
d9cc481a 2495 arg_idx = 1;
10a0d495
JJ
2496 type = 0;
2497 break;
2498 case BUILT_IN_MEMCPY_CHK:
2499 case BUILT_IN_MEMPCPY_CHK:
2500 case BUILT_IN_MEMMOVE_CHK:
2501 case BUILT_IN_MEMSET_CHK:
2502 case BUILT_IN_STRNCPY_CHK:
d9cc481a 2503 arg_idx = 2;
10a0d495
JJ
2504 type = 2;
2505 break;
2506 case BUILT_IN_STRCPY_CHK:
2507 case BUILT_IN_STPCPY_CHK:
d9cc481a 2508 arg_idx = 1;
10a0d495
JJ
2509 type = 1;
2510 break;
2511 case BUILT_IN_SNPRINTF_CHK:
2512 case BUILT_IN_VSNPRINTF_CHK:
d9cc481a 2513 arg_idx = 1;
10a0d495 2514 type = 2;
6de9cd9a
DN
2515 break;
2516 default:
2517 return NULL_TREE;
2518 }
2519
2520 /* Try to use the dataflow information gathered by the CCP process. */
8bdbfff5 2521 visited = BITMAP_ALLOC (NULL);
d9cc481a 2522 bitmap_clear (visited);
6de9cd9a 2523
10a0d495 2524 memset (val, 0, sizeof (val));
d9cc481a
AN
2525 a = gimple_call_arg (stmt, arg_idx);
2526 if (!get_maxval_strlen (a, &val[arg_idx], visited, type))
2527 val[arg_idx] = NULL_TREE;
6de9cd9a 2528
8bdbfff5 2529 BITMAP_FREE (visited);
6de9cd9a 2530
a32e70c3 2531 result = NULL_TREE;
6de9cd9a
DN
2532 switch (DECL_FUNCTION_CODE (callee))
2533 {
2534 case BUILT_IN_STRLEN:
10a0d495 2535 if (val[0])
6de9cd9a 2536 {
726a989a
RB
2537 tree new_val =
2538 fold_convert (TREE_TYPE (gimple_call_lhs (stmt)), val[0]);
6de9cd9a
DN
2539
2540 /* If the result is not a valid gimple value, or not a cast
2541 of a valid gimple value, then we can not use the result. */
c22940cd
TN
2542 if (is_gimple_val (new_val)
2543 || (is_gimple_cast (new_val)
2544 && is_gimple_val (TREE_OPERAND (new_val, 0))))
2545 return new_val;
6de9cd9a 2546 }
a32e70c3
RS
2547 break;
2548
6de9cd9a 2549 case BUILT_IN_STRCPY:
5039610b
SL
2550 if (val[1] && is_gimple_val (val[1]) && nargs == 2)
2551 result = fold_builtin_strcpy (callee,
726a989a
RB
2552 gimple_call_arg (stmt, 0),
2553 gimple_call_arg (stmt, 1),
5039610b 2554 val[1]);
a32e70c3
RS
2555 break;
2556
6de9cd9a 2557 case BUILT_IN_STRNCPY:
5039610b
SL
2558 if (val[1] && is_gimple_val (val[1]) && nargs == 3)
2559 result = fold_builtin_strncpy (callee,
726a989a
RB
2560 gimple_call_arg (stmt, 0),
2561 gimple_call_arg (stmt, 1),
2562 gimple_call_arg (stmt, 2),
5039610b 2563 val[1]);
a32e70c3
RS
2564 break;
2565
6de9cd9a 2566 case BUILT_IN_FPUTS:
726a989a
RB
2567 result = fold_builtin_fputs (gimple_call_arg (stmt, 0),
2568 gimple_call_arg (stmt, 1),
2569 ignore, false, val[0]);
a32e70c3
RS
2570 break;
2571
6de9cd9a 2572 case BUILT_IN_FPUTS_UNLOCKED:
726a989a
RB
2573 result = fold_builtin_fputs (gimple_call_arg (stmt, 0),
2574 gimple_call_arg (stmt, 1),
2575 ignore, true, val[0]);
10a0d495
JJ
2576 break;
2577
2578 case BUILT_IN_MEMCPY_CHK:
2579 case BUILT_IN_MEMPCPY_CHK:
2580 case BUILT_IN_MEMMOVE_CHK:
2581 case BUILT_IN_MEMSET_CHK:
2582 if (val[2] && is_gimple_val (val[2]))
5039610b 2583 result = fold_builtin_memory_chk (callee,
726a989a
RB
2584 gimple_call_arg (stmt, 0),
2585 gimple_call_arg (stmt, 1),
2586 gimple_call_arg (stmt, 2),
2587 gimple_call_arg (stmt, 3),
5039610b 2588 val[2], ignore,
10a0d495
JJ
2589 DECL_FUNCTION_CODE (callee));
2590 break;
2591
2592 case BUILT_IN_STRCPY_CHK:
2593 case BUILT_IN_STPCPY_CHK:
2594 if (val[1] && is_gimple_val (val[1]))
5039610b 2595 result = fold_builtin_stxcpy_chk (callee,
726a989a
RB
2596 gimple_call_arg (stmt, 0),
2597 gimple_call_arg (stmt, 1),
2598 gimple_call_arg (stmt, 2),
5039610b 2599 val[1], ignore,
10a0d495
JJ
2600 DECL_FUNCTION_CODE (callee));
2601 break;
2602
2603 case BUILT_IN_STRNCPY_CHK:
2604 if (val[2] && is_gimple_val (val[2]))
726a989a
RB
2605 result = fold_builtin_strncpy_chk (gimple_call_arg (stmt, 0),
2606 gimple_call_arg (stmt, 1),
2607 gimple_call_arg (stmt, 2),
2608 gimple_call_arg (stmt, 3),
5039610b 2609 val[2]);
10a0d495
JJ
2610 break;
2611
2612 case BUILT_IN_SNPRINTF_CHK:
2613 case BUILT_IN_VSNPRINTF_CHK:
2614 if (val[1] && is_gimple_val (val[1]))
726a989a
RB
2615 result = gimple_fold_builtin_snprintf_chk (stmt, val[1],
2616 DECL_FUNCTION_CODE (callee));
a32e70c3 2617 break;
6de9cd9a
DN
2618
2619 default:
1e128c5f 2620 gcc_unreachable ();
6de9cd9a
DN
2621 }
2622
a32e70c3 2623 if (result && ignore)
9675412f 2624 result = fold_ignored_result (result);
a32e70c3 2625 return result;
6de9cd9a
DN
2626}
2627
726a989a
RB
2628/* Attempt to fold an assignment statement pointed-to by SI. Returns a
2629 replacement rhs for the statement or NULL_TREE if no simplification
2630 could be made. It is assumed that the operands have been previously
2631 folded. */
2632
2633static tree
2634fold_gimple_assign (gimple_stmt_iterator *si)
2635{
2636 gimple stmt = gsi_stmt (*si);
2637 enum tree_code subcode = gimple_assign_rhs_code (stmt);
2638
2639 tree result = NULL;
2640
2641 switch (get_gimple_rhs_class (subcode))
2642 {
2643 case GIMPLE_SINGLE_RHS:
2644 {
2645 tree rhs = gimple_assign_rhs1 (stmt);
2646
2647 /* Try to fold a conditional expression. */
2648 if (TREE_CODE (rhs) == COND_EXPR)
2649 {
2650 tree temp = fold (COND_EXPR_COND (rhs));
2651 if (temp != COND_EXPR_COND (rhs))
2652 result = fold_build3 (COND_EXPR, TREE_TYPE (rhs), temp,
2653 COND_EXPR_THEN (rhs), COND_EXPR_ELSE (rhs));
2654 }
2655
2656 /* If we couldn't fold the RHS, hand over to the generic
2657 fold routines. */
2658 if (result == NULL_TREE)
2659 result = fold (rhs);
2660
2661 /* Strip away useless type conversions. Both the NON_LVALUE_EXPR
2662 that may have been added by fold, and "useless" type
2663 conversions that might now be apparent due to propagation. */
2664 STRIP_USELESS_TYPE_CONVERSION (result);
2665
2666 if (result != rhs && valid_gimple_rhs_p (result))
2667 return result;
2668 else
2669 /* It is possible that fold_stmt_r simplified the RHS.
2670 Make sure that the subcode of this statement still
2671 reflects the principal operator of the rhs operand. */
2672 return rhs;
2673 }
2674 break;
2675
2676 case GIMPLE_UNARY_RHS:
ff8b183b
RG
2677 {
2678 tree rhs = gimple_assign_rhs1 (stmt);
726a989a 2679
ff8b183b
RG
2680 result = fold_unary (subcode, gimple_expr_type (stmt), rhs);
2681 if (result)
2682 {
2683 /* If the operation was a conversion do _not_ mark a
2684 resulting constant with TREE_OVERFLOW if the original
2685 constant was not. These conversions have implementation
2686 defined behavior and retaining the TREE_OVERFLOW flag
2687 here would confuse later passes such as VRP. */
2688 if (CONVERT_EXPR_CODE_P (subcode)
2689 && TREE_CODE (result) == INTEGER_CST
2690 && TREE_CODE (rhs) == INTEGER_CST)
2691 TREE_OVERFLOW (result) = TREE_OVERFLOW (rhs);
2692
2693 STRIP_USELESS_TYPE_CONVERSION (result);
2694 if (valid_gimple_rhs_p (result))
2695 return result;
2696 }
2697 else if (CONVERT_EXPR_CODE_P (subcode)
2698 && POINTER_TYPE_P (gimple_expr_type (stmt))
2699 && POINTER_TYPE_P (TREE_TYPE (gimple_assign_rhs1 (stmt))))
2700 {
2701 tree type = gimple_expr_type (stmt);
2702 tree t = maybe_fold_offset_to_address (gimple_assign_rhs1 (stmt),
2703 integer_zero_node, type);
2704 if (t)
2705 return t;
2706 }
2707 }
726a989a
RB
2708 break;
2709
2710 case GIMPLE_BINARY_RHS:
2711 /* Try to fold pointer addition. */
2712 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
2713 result = maybe_fold_stmt_addition (
2714 TREE_TYPE (gimple_assign_lhs (stmt)),
2715 gimple_assign_rhs1 (stmt),
2716 gimple_assign_rhs2 (stmt));
2717
2718 if (!result)
2719 result = fold_binary (subcode,
2720 TREE_TYPE (gimple_assign_lhs (stmt)),
2721 gimple_assign_rhs1 (stmt),
2722 gimple_assign_rhs2 (stmt));
2723
2724 if (result)
2725 {
2726 STRIP_USELESS_TYPE_CONVERSION (result);
2727 if (valid_gimple_rhs_p (result))
2728 return result;
001003c2
PB
2729
2730 /* Fold might have produced non-GIMPLE, so if we trust it blindly
2731 we lose canonicalization opportunities. Do not go again
2732 through fold here though, or the same non-GIMPLE will be
2733 produced. */
2734 if (commutative_tree_code (subcode)
2735 && tree_swap_operands_p (gimple_assign_rhs1 (stmt),
2736 gimple_assign_rhs2 (stmt), false))
2737 return build2 (subcode, TREE_TYPE (gimple_assign_lhs (stmt)),
2738 gimple_assign_rhs2 (stmt),
2739 gimple_assign_rhs1 (stmt));
726a989a
RB
2740 }
2741 break;
2742
2743 case GIMPLE_INVALID_RHS:
2744 gcc_unreachable ();
2745 }
2746
2747 return NULL_TREE;
2748}
2749
2750/* Attempt to fold a conditional statement. Return true if any changes were
2751 made. We only attempt to fold the condition expression, and do not perform
2752 any transformation that would require alteration of the cfg. It is
2753 assumed that the operands have been previously folded. */
2754
2755static bool
2756fold_gimple_cond (gimple stmt)
2757{
2758 tree result = fold_binary (gimple_cond_code (stmt),
2759 boolean_type_node,
2760 gimple_cond_lhs (stmt),
2761 gimple_cond_rhs (stmt));
2762
2763 if (result)
2764 {
2765 STRIP_USELESS_TYPE_CONVERSION (result);
2766 if (is_gimple_condexpr (result) && valid_gimple_rhs_p (result))
2767 {
2768 gimple_cond_set_condition_from_tree (stmt, result);
2769 return true;
2770 }
2771 }
2772
2773 return false;
2774}
2775
2776
2777/* Attempt to fold a call statement referenced by the statement iterator GSI.
2778 The statement may be replaced by another statement, e.g., if the call
2779 simplifies to a constant value. Return true if any changes were made.
2780 It is assumed that the operands have been previously folded. */
2781
2782static bool
2783fold_gimple_call (gimple_stmt_iterator *gsi)
2784{
2785 gimple stmt = gsi_stmt (*gsi);
2786
2787 tree callee = gimple_call_fndecl (stmt);
2788
2789 /* Check for builtins that CCP can handle using information not
2790 available in the generic fold routines. */
2791 if (callee && DECL_BUILT_IN (callee))
2792 {
2793 tree result = ccp_fold_builtin (stmt);
2794
2795 if (result)
2796 return update_call_from_tree (gsi, result);
2797 }
2798 else
2799 {
2800 /* Check for resolvable OBJ_TYPE_REF. The only sorts we can resolve
2801 here are when we've propagated the address of a decl into the
2802 object slot. */
2803 /* ??? Should perhaps do this in fold proper. However, doing it
2804 there requires that we create a new CALL_EXPR, and that requires
2805 copying EH region info to the new node. Easier to just do it
2806 here where we can just smash the call operand. */
2807 /* ??? Is there a good reason not to do this in fold_stmt_inplace? */
2808 callee = gimple_call_fn (stmt);
2809 if (TREE_CODE (callee) == OBJ_TYPE_REF
2810 && lang_hooks.fold_obj_type_ref
2811 && TREE_CODE (OBJ_TYPE_REF_OBJECT (callee)) == ADDR_EXPR
2812 && DECL_P (TREE_OPERAND
2813 (OBJ_TYPE_REF_OBJECT (callee), 0)))
2814 {
2815 tree t;
2816
2817 /* ??? Caution: Broken ADDR_EXPR semantics means that
2818 looking at the type of the operand of the addr_expr
2819 can yield an array type. See silly exception in
2820 check_pointer_types_r. */
2821 t = TREE_TYPE (TREE_TYPE (OBJ_TYPE_REF_OBJECT (callee)));
2822 t = lang_hooks.fold_obj_type_ref (callee, t);
2823 if (t)
2824 {
2825 gimple_call_set_fn (stmt, t);
2826 return true;
2827 }
2828 }
2829 }
2830
2831 return false;
2832}
6de9cd9a 2833
726a989a 2834/* Fold the statement pointed to by GSI. In some cases, this function may
750628d8
DN
2835 replace the whole statement with a new one. Returns true iff folding
2836 makes any changes. */
6de9cd9a 2837
750628d8 2838bool
726a989a 2839fold_stmt (gimple_stmt_iterator *gsi)
6de9cd9a 2840{
726a989a 2841 tree res;
622f91ba 2842 struct fold_stmt_r_data fold_stmt_r_data;
726a989a
RB
2843 struct walk_stmt_info wi;
2844
750628d8 2845 bool changed = false;
622f91ba
JL
2846 bool inside_addr_expr = false;
2847
726a989a 2848 gimple stmt = gsi_stmt (*gsi);
6ac01510
ILT
2849
2850 fold_stmt_r_data.stmt = stmt;
622f91ba
JL
2851 fold_stmt_r_data.changed_p = &changed;
2852 fold_stmt_r_data.inside_addr_expr_p = &inside_addr_expr;
6de9cd9a 2853
726a989a
RB
2854 memset (&wi, 0, sizeof (wi));
2855 wi.info = &fold_stmt_r_data;
6de9cd9a 2856
726a989a
RB
2857 /* Fold the individual operands.
2858 For example, fold instances of *&VAR into VAR, etc. */
2859 res = walk_gimple_op (stmt, fold_stmt_r, &wi);
2860 gcc_assert (!res);
6de9cd9a 2861
726a989a
RB
2862 /* Fold the main computation performed by the statement. */
2863 switch (gimple_code (stmt))
6de9cd9a 2864 {
726a989a
RB
2865 case GIMPLE_ASSIGN:
2866 {
2867 tree new_rhs = fold_gimple_assign (gsi);
2868 if (new_rhs != NULL_TREE)
2869 {
2870 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
2871 changed = true;
2872 }
2873 stmt = gsi_stmt (*gsi);
2874 break;
2875 }
2876 case GIMPLE_COND:
2877 changed |= fold_gimple_cond (stmt);
2878 break;
2879 case GIMPLE_CALL:
2880 /* The entire statement may be replaced in this case. */
2881 changed |= fold_gimple_call (gsi);
2882 break;
1809ff6b 2883
726a989a
RB
2884 default:
2885 return changed;
2886 break;
f255541f 2887 }
6de9cd9a 2888
750628d8 2889 return changed;
6de9cd9a
DN
2890}
2891
38965eb2
ZD
2892/* Perform the minimal folding on statement STMT. Only operations like
2893 *&x created by constant propagation are handled. The statement cannot
726a989a
RB
2894 be replaced with a new one. Return true if the statement was
2895 changed, false otherwise. */
38965eb2
ZD
2896
2897bool
726a989a 2898fold_stmt_inplace (gimple stmt)
38965eb2 2899{
726a989a 2900 tree res;
622f91ba 2901 struct fold_stmt_r_data fold_stmt_r_data;
726a989a
RB
2902 struct walk_stmt_info wi;
2903 gimple_stmt_iterator si;
2904
38965eb2 2905 bool changed = false;
622f91ba
JL
2906 bool inside_addr_expr = false;
2907
6ac01510 2908 fold_stmt_r_data.stmt = stmt;
622f91ba
JL
2909 fold_stmt_r_data.changed_p = &changed;
2910 fold_stmt_r_data.inside_addr_expr_p = &inside_addr_expr;
38965eb2 2911
726a989a
RB
2912 memset (&wi, 0, sizeof (wi));
2913 wi.info = &fold_stmt_r_data;
2914
2915 /* Fold the individual operands.
2916 For example, fold instances of *&VAR into VAR, etc.
38965eb2 2917
726a989a
RB
2918 It appears that, at one time, maybe_fold_stmt_indirect
2919 would cause the walk to return non-null in order to
2920 signal that the entire statement should be replaced with
2921 a call to _builtin_trap. This functionality is currently
2922 disabled, as noted in a FIXME, and cannot be supported here. */
2923 res = walk_gimple_op (stmt, fold_stmt_r, &wi);
2924 gcc_assert (!res);
38965eb2 2925
726a989a
RB
2926 /* Fold the main computation performed by the statement. */
2927 switch (gimple_code (stmt))
2928 {
2929 case GIMPLE_ASSIGN:
2930 {
2931 unsigned old_num_ops;
2932 tree new_rhs;
2933 old_num_ops = gimple_num_ops (stmt);
2934 si = gsi_for_stmt (stmt);
2935 new_rhs = fold_gimple_assign (&si);
2936 if (new_rhs != NULL_TREE
2937 && get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops)
2938 {
2939 gimple_assign_set_rhs_from_tree (&si, new_rhs);
2940 changed = true;
2941 }
2942 gcc_assert (gsi_stmt (si) == stmt);
2943 break;
2944 }
2945 case GIMPLE_COND:
2946 changed |= fold_gimple_cond (stmt);
2947 break;
38965eb2 2948
726a989a
RB
2949 default:
2950 break;
2951 }
38965eb2
ZD
2952
2953 return changed;
2954}
726a989a 2955
cb8e078d
JJ
2956/* Try to optimize out __builtin_stack_restore. Optimize it out
2957 if there is another __builtin_stack_restore in the same basic
2958 block and no calls or ASM_EXPRs are in between, or if this block's
2959 only outgoing edge is to EXIT_BLOCK and there are no calls or
2960 ASM_EXPRs after this __builtin_stack_restore. */
2961
2962static tree
726a989a 2963optimize_stack_restore (gimple_stmt_iterator i)
cb8e078d 2964{
726a989a
RB
2965 tree callee, rhs;
2966 gimple stmt, stack_save;
2967 gimple_stmt_iterator stack_save_gsi;
2968
2969 basic_block bb = gsi_bb (i);
2970 gimple call = gsi_stmt (i);
cb8e078d 2971
726a989a
RB
2972 if (gimple_code (call) != GIMPLE_CALL
2973 || gimple_call_num_args (call) != 1
2974 || TREE_CODE (gimple_call_arg (call, 0)) != SSA_NAME
2975 || !POINTER_TYPE_P (TREE_TYPE (gimple_call_arg (call, 0))))
cb8e078d
JJ
2976 return NULL_TREE;
2977
726a989a 2978 for (gsi_next (&i); !gsi_end_p (i); gsi_next (&i))
cb8e078d 2979 {
726a989a
RB
2980 stmt = gsi_stmt (i);
2981 if (gimple_code (stmt) == GIMPLE_ASM)
cb8e078d 2982 return NULL_TREE;
726a989a 2983 if (gimple_code (stmt) != GIMPLE_CALL)
cb8e078d
JJ
2984 continue;
2985
726a989a 2986 callee = gimple_call_fndecl (stmt);
cb8e078d
JJ
2987 if (!callee || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL)
2988 return NULL_TREE;
2989
2990 if (DECL_FUNCTION_CODE (callee) == BUILT_IN_STACK_RESTORE)
2991 break;
2992 }
2993
726a989a 2994 if (gsi_end_p (i)
cb8e078d
JJ
2995 && (! single_succ_p (bb)
2996 || single_succ_edge (bb)->dest != EXIT_BLOCK_PTR))
2997 return NULL_TREE;
2998
726a989a
RB
2999 stack_save = SSA_NAME_DEF_STMT (gimple_call_arg (call, 0));
3000 if (gimple_code (stack_save) != GIMPLE_CALL
3001 || gimple_call_lhs (stack_save) != gimple_call_arg (call, 0)
3002 || stmt_could_throw_p (stack_save)
3003 || !has_single_use (gimple_call_arg (call, 0)))
cb8e078d
JJ
3004 return NULL_TREE;
3005
726a989a 3006 callee = gimple_call_fndecl (stack_save);
cb8e078d
JJ
3007 if (!callee
3008 || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL
3009 || DECL_FUNCTION_CODE (callee) != BUILT_IN_STACK_SAVE
726a989a 3010 || gimple_call_num_args (stack_save) != 0)
cb8e078d
JJ
3011 return NULL_TREE;
3012
726a989a
RB
3013 stack_save_gsi = gsi_for_stmt (stack_save);
3014 push_stmt_changes (gsi_stmt_ptr (&stack_save_gsi));
3015 rhs = build_int_cst (TREE_TYPE (gimple_call_arg (call, 0)), 0);
3016 if (!update_call_from_tree (&stack_save_gsi, rhs))
cb8e078d 3017 {
726a989a 3018 discard_stmt_changes (gsi_stmt_ptr (&stack_save_gsi));
cb8e078d
JJ
3019 return NULL_TREE;
3020 }
726a989a 3021 pop_stmt_changes (gsi_stmt_ptr (&stack_save_gsi));
cb8e078d 3022
726a989a 3023 /* No effect, so the statement will be deleted. */
cb8e078d
JJ
3024 return integer_zero_node;
3025}
726a989a 3026
d7bd8aeb
JJ
3027/* If va_list type is a simple pointer and nothing special is needed,
3028 optimize __builtin_va_start (&ap, 0) into ap = __builtin_next_arg (0),
3029 __builtin_va_end (&ap) out as NOP and __builtin_va_copy into a simple
3030 pointer assignment. */
3031
3032static tree
726a989a 3033optimize_stdarg_builtin (gimple call)
d7bd8aeb 3034{
35cbb299 3035 tree callee, lhs, rhs, cfun_va_list;
d7bd8aeb
JJ
3036 bool va_list_simple_ptr;
3037
726a989a 3038 if (gimple_code (call) != GIMPLE_CALL)
d7bd8aeb
JJ
3039 return NULL_TREE;
3040
726a989a 3041 callee = gimple_call_fndecl (call);
35cbb299
KT
3042
3043 cfun_va_list = targetm.fn_abi_va_list (callee);
3044 va_list_simple_ptr = POINTER_TYPE_P (cfun_va_list)
3045 && (TREE_TYPE (cfun_va_list) == void_type_node
3046 || TREE_TYPE (cfun_va_list) == char_type_node);
3047
d7bd8aeb
JJ
3048 switch (DECL_FUNCTION_CODE (callee))
3049 {
3050 case BUILT_IN_VA_START:
3051 if (!va_list_simple_ptr
3052 || targetm.expand_builtin_va_start != NULL
726a989a 3053 || built_in_decls[BUILT_IN_NEXT_ARG] == NULL)
d7bd8aeb
JJ
3054 return NULL_TREE;
3055
726a989a 3056 if (gimple_call_num_args (call) != 2)
d7bd8aeb
JJ
3057 return NULL_TREE;
3058
726a989a 3059 lhs = gimple_call_arg (call, 0);
d7bd8aeb
JJ
3060 if (!POINTER_TYPE_P (TREE_TYPE (lhs))
3061 || TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (lhs)))
35cbb299 3062 != TYPE_MAIN_VARIANT (cfun_va_list))
d7bd8aeb 3063 return NULL_TREE;
726a989a 3064
d7bd8aeb
JJ
3065 lhs = build_fold_indirect_ref (lhs);
3066 rhs = build_call_expr (built_in_decls[BUILT_IN_NEXT_ARG],
726a989a 3067 1, integer_zero_node);
d7bd8aeb
JJ
3068 rhs = fold_convert (TREE_TYPE (lhs), rhs);
3069 return build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, rhs);
3070
3071 case BUILT_IN_VA_COPY:
3072 if (!va_list_simple_ptr)
3073 return NULL_TREE;
3074
726a989a 3075 if (gimple_call_num_args (call) != 2)
d7bd8aeb
JJ
3076 return NULL_TREE;
3077
726a989a 3078 lhs = gimple_call_arg (call, 0);
d7bd8aeb
JJ
3079 if (!POINTER_TYPE_P (TREE_TYPE (lhs))
3080 || TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (lhs)))
35cbb299 3081 != TYPE_MAIN_VARIANT (cfun_va_list))
d7bd8aeb
JJ
3082 return NULL_TREE;
3083
3084 lhs = build_fold_indirect_ref (lhs);
726a989a 3085 rhs = gimple_call_arg (call, 1);
d7bd8aeb 3086 if (TYPE_MAIN_VARIANT (TREE_TYPE (rhs))
35cbb299 3087 != TYPE_MAIN_VARIANT (cfun_va_list))
d7bd8aeb
JJ
3088 return NULL_TREE;
3089
3090 rhs = fold_convert (TREE_TYPE (lhs), rhs);
3091 return build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, rhs);
3092
3093 case BUILT_IN_VA_END:
726a989a 3094 /* No effect, so the statement will be deleted. */
d7bd8aeb
JJ
3095 return integer_zero_node;
3096
3097 default:
3098 gcc_unreachable ();
3099 }
3100}
726a989a 3101
b28b1600
JJ
3102/* Convert EXPR into a GIMPLE value suitable for substitution on the
3103 RHS of an assignment. Insert the necessary statements before
726a989a
RB
3104 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
3105 is replaced. If the call is expected to produces a result, then it
3106 is replaced by an assignment of the new RHS to the result variable.
3107 If the result is to be ignored, then the call is replaced by a
3108 GIMPLE_NOP. */
b28b1600 3109
726a989a
RB
3110static void
3111gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
b28b1600 3112{
726a989a
RB
3113 tree lhs;
3114 tree tmp = NULL_TREE; /* Silence warning. */
3115 gimple stmt, new_stmt;
3116 gimple_stmt_iterator i;
3117 gimple_seq stmts = gimple_seq_alloc();
d406b663 3118 struct gimplify_ctx gctx;
b28b1600 3119
726a989a
RB
3120 stmt = gsi_stmt (*si_p);
3121
3122 gcc_assert (is_gimple_call (stmt));
3123
3124 lhs = gimple_call_lhs (stmt);
3125
d406b663 3126 push_gimplify_context (&gctx);
726a989a
RB
3127
3128 if (lhs == NULL_TREE)
3129 gimplify_and_add (expr, &stmts);
3130 else
2e929cf3 3131 tmp = get_initialized_tmp_var (expr, &stmts, NULL);
726a989a 3132
b28b1600
JJ
3133 pop_gimplify_context (NULL);
3134
726a989a
RB
3135 if (gimple_has_location (stmt))
3136 annotate_all_with_location (stmts, gimple_location (stmt));
f47c96aa 3137
b28b1600 3138 /* The replacement can expose previously unreferenced variables. */
726a989a
RB
3139 for (i = gsi_start (stmts); !gsi_end_p (i); gsi_next (&i))
3140 {
3141 new_stmt = gsi_stmt (i);
3142 find_new_referenced_vars (new_stmt);
3143 gsi_insert_before (si_p, new_stmt, GSI_NEW_STMT);
3144 mark_symbols_for_renaming (new_stmt);
3145 gsi_next (si_p);
3146 }
3147
3148 if (lhs == NULL_TREE)
3149 new_stmt = gimple_build_nop ();
3150 else
b28b1600 3151 {
726a989a
RB
3152 new_stmt = gimple_build_assign (lhs, tmp);
3153 copy_virtual_operands (new_stmt, stmt);
3154 move_ssa_defining_stmt_for_defs (new_stmt, stmt);
b28b1600
JJ
3155 }
3156
726a989a
RB
3157 gimple_set_location (new_stmt, gimple_location (stmt));
3158 gsi_replace (si_p, new_stmt, false);
b28b1600
JJ
3159}
3160
6de9cd9a
DN
3161/* A simple pass that attempts to fold all builtin functions. This pass
3162 is run after we've propagated as many constants as we can. */
3163
c2924966 3164static unsigned int
6de9cd9a
DN
3165execute_fold_all_builtins (void)
3166{
a7d6ba24 3167 bool cfg_changed = false;
6de9cd9a 3168 basic_block bb;
7b0e48fb
DB
3169 unsigned int todoflags = 0;
3170
6de9cd9a
DN
3171 FOR_EACH_BB (bb)
3172 {
726a989a
RB
3173 gimple_stmt_iterator i;
3174 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
6de9cd9a 3175 {
726a989a 3176 gimple stmt, old_stmt;
6de9cd9a 3177 tree callee, result;
10a0d495 3178 enum built_in_function fcode;
6de9cd9a 3179
726a989a
RB
3180 stmt = gsi_stmt (i);
3181
3182 if (gimple_code (stmt) != GIMPLE_CALL)
10a0d495 3183 {
726a989a 3184 gsi_next (&i);
10a0d495
JJ
3185 continue;
3186 }
726a989a 3187 callee = gimple_call_fndecl (stmt);
6de9cd9a 3188 if (!callee || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL)
10a0d495 3189 {
726a989a 3190 gsi_next (&i);
10a0d495
JJ
3191 continue;
3192 }
3193 fcode = DECL_FUNCTION_CODE (callee);
6de9cd9a 3194
726a989a 3195 result = ccp_fold_builtin (stmt);
53a8f709
UB
3196
3197 if (result)
726a989a 3198 gimple_remove_stmt_histograms (cfun, stmt);
53a8f709 3199
6de9cd9a
DN
3200 if (!result)
3201 switch (DECL_FUNCTION_CODE (callee))
3202 {
3203 case BUILT_IN_CONSTANT_P:
3204 /* Resolve __builtin_constant_p. If it hasn't been
3205 folded to integer_one_node by now, it's fairly
3206 certain that the value simply isn't constant. */
726a989a 3207 result = integer_zero_node;
6de9cd9a
DN
3208 break;
3209
cb8e078d 3210 case BUILT_IN_STACK_RESTORE:
726a989a 3211 result = optimize_stack_restore (i);
d7bd8aeb
JJ
3212 if (result)
3213 break;
726a989a 3214 gsi_next (&i);
d7bd8aeb
JJ
3215 continue;
3216
3217 case BUILT_IN_VA_START:
3218 case BUILT_IN_VA_END:
3219 case BUILT_IN_VA_COPY:
3220 /* These shouldn't be folded before pass_stdarg. */
726a989a 3221 result = optimize_stdarg_builtin (stmt);
cb8e078d
JJ
3222 if (result)
3223 break;
3224 /* FALLTHRU */
3225
6de9cd9a 3226 default:
726a989a 3227 gsi_next (&i);
6de9cd9a
DN
3228 continue;
3229 }
3230
3231 if (dump_file && (dump_flags & TDF_DETAILS))
3232 {
3233 fprintf (dump_file, "Simplified\n ");
726a989a 3234 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
6de9cd9a
DN
3235 }
3236
726a989a
RB
3237 old_stmt = stmt;
3238 push_stmt_changes (gsi_stmt_ptr (&i));
cfaab3a9 3239
726a989a
RB
3240 if (!update_call_from_tree (&i, result))
3241 {
3242 gimplify_and_update_call_from_tree (&i, result);
3243 todoflags |= TODO_rebuild_alias;
3244 }
cfaab3a9 3245
726a989a
RB
3246 stmt = gsi_stmt (i);
3247 pop_stmt_changes (gsi_stmt_ptr (&i));
cfaab3a9 3248
726a989a
RB
3249 if (maybe_clean_or_replace_eh_stmt (old_stmt, stmt)
3250 && gimple_purge_dead_eh_edges (bb))
a7d6ba24 3251 cfg_changed = true;
6de9cd9a
DN
3252
3253 if (dump_file && (dump_flags & TDF_DETAILS))
3254 {
3255 fprintf (dump_file, "to\n ");
726a989a 3256 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
6de9cd9a
DN
3257 fprintf (dump_file, "\n");
3258 }
10a0d495
JJ
3259
3260 /* Retry the same statement if it changed into another
3261 builtin, there might be new opportunities now. */
726a989a 3262 if (gimple_code (stmt) != GIMPLE_CALL)
10a0d495 3263 {
726a989a 3264 gsi_next (&i);
10a0d495
JJ
3265 continue;
3266 }
726a989a 3267 callee = gimple_call_fndecl (stmt);
10a0d495 3268 if (!callee
726a989a 3269 || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL
10a0d495 3270 || DECL_FUNCTION_CODE (callee) == fcode)
726a989a 3271 gsi_next (&i);
6de9cd9a
DN
3272 }
3273 }
7b0e48fb 3274
a7d6ba24 3275 /* Delete unreachable blocks. */
7b0e48fb
DB
3276 if (cfg_changed)
3277 todoflags |= TODO_cleanup_cfg;
3278
3279 return todoflags;
6de9cd9a
DN
3280}
3281
750628d8 3282
8ddbbcae 3283struct gimple_opt_pass pass_fold_builtins =
6de9cd9a 3284{
8ddbbcae
JH
3285 {
3286 GIMPLE_PASS,
6de9cd9a
DN
3287 "fab", /* name */
3288 NULL, /* gate */
3289 execute_fold_all_builtins, /* execute */
3290 NULL, /* sub */
3291 NULL, /* next */
3292 0, /* static_pass_number */
3293 0, /* tv_id */
7faade0f 3294 PROP_cfg | PROP_ssa, /* properties_required */
6de9cd9a
DN
3295 0, /* properties_provided */
3296 0, /* properties_destroyed */
3297 0, /* todo_flags_start */
b28b1600
JJ
3298 TODO_dump_func
3299 | TODO_verify_ssa
8ddbbcae
JH
3300 | TODO_update_ssa /* todo_flags_finish */
3301 }
6de9cd9a 3302};