]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/tree-ssa-ccp.c
avr.c (avr_arch_types): Add avr6 entry.
[thirdparty/gcc.git] / gcc / tree-ssa-ccp.c
CommitLineData
6de9cd9a 1/* Conditional constant propagation pass for the GNU compiler.
6ac01510 2 Copyright (C) 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
1e052c19 3 Free Software Foundation, Inc.
6de9cd9a
DN
4 Adapted from original RTL SSA-CCP by Daniel Berlin <dberlin@dberlin.org>
5 Adapted to GIMPLE trees by Diego Novillo <dnovillo@redhat.com>
6
7This file is part of GCC.
8
9GCC is free software; you can redistribute it and/or modify it
10under the terms of the GNU General Public License as published by the
9dcd6f09 11Free Software Foundation; either version 3, or (at your option) any
6de9cd9a
DN
12later version.
13
14GCC is distributed in the hope that it will be useful, but WITHOUT
15ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
16FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17for more details.
18
19You should have received a copy of the GNU General Public License
9dcd6f09
NC
20along with GCC; see the file COPYING3. If not see
21<http://www.gnu.org/licenses/>. */
6de9cd9a 22
0bca51f0
DN
23/* Conditional constant propagation (CCP) is based on the SSA
24 propagation engine (tree-ssa-propagate.c). Constant assignments of
25 the form VAR = CST are propagated from the assignments into uses of
26 VAR, which in turn may generate new constants. The simulation uses
27 a four level lattice to keep track of constant values associated
28 with SSA names. Given an SSA name V_i, it may take one of the
29 following values:
30
106dec71
ZD
31 UNINITIALIZED -> the initial state of the value. This value
32 is replaced with a correct initial value
33 the first time the value is used, so the
34 rest of the pass does not need to care about
35 it. Using this value simplifies initialization
36 of the pass, and prevents us from needlessly
37 scanning statements that are never reached.
0bca51f0
DN
38
39 UNDEFINED -> V_i is a local variable whose definition
40 has not been processed yet. Therefore we
41 don't yet know if its value is a constant
42 or not.
43
44 CONSTANT -> V_i has been found to hold a constant
45 value C.
46
47 VARYING -> V_i cannot take a constant value, or if it
48 does, it is not possible to determine it
49 at compile time.
50
51 The core of SSA-CCP is in ccp_visit_stmt and ccp_visit_phi_node:
52
53 1- In ccp_visit_stmt, we are interested in assignments whose RHS
54 evaluates into a constant and conditional jumps whose predicate
55 evaluates into a boolean true or false. When an assignment of
56 the form V_i = CONST is found, V_i's lattice value is set to
57 CONSTANT and CONST is associated with it. This causes the
58 propagation engine to add all the SSA edges coming out the
59 assignment into the worklists, so that statements that use V_i
60 can be visited.
61
62 If the statement is a conditional with a constant predicate, we
63 mark the outgoing edges as executable or not executable
64 depending on the predicate's value. This is then used when
65 visiting PHI nodes to know when a PHI argument can be ignored.
66
67
68 2- In ccp_visit_phi_node, if all the PHI arguments evaluate to the
69 same constant C, then the LHS of the PHI is set to C. This
70 evaluation is known as the "meet operation". Since one of the
71 goals of this evaluation is to optimistically return constant
72 values as often as possible, it uses two main short cuts:
73
74 - If an argument is flowing in through a non-executable edge, it
75 is ignored. This is useful in cases like this:
76
77 if (PRED)
78 a_9 = 3;
79 else
80 a_10 = 100;
81 a_11 = PHI (a_9, a_10)
82
83 If PRED is known to always evaluate to false, then we can
84 assume that a_11 will always take its value from a_10, meaning
85 that instead of consider it VARYING (a_9 and a_10 have
86 different values), we can consider it CONSTANT 100.
87
88 - If an argument has an UNDEFINED value, then it does not affect
89 the outcome of the meet operation. If a variable V_i has an
90 UNDEFINED value, it means that either its defining statement
91 hasn't been visited yet or V_i has no defining statement, in
92 which case the original symbol 'V' is being used
93 uninitialized. Since 'V' is a local variable, the compiler
94 may assume any initial value for it.
95
96
97 After propagation, every variable V_i that ends up with a lattice
98 value of CONSTANT will have the associated constant value in the
99 array CONST_VAL[i].VALUE. That is fed into substitute_and_fold for
100 final substitution and folding.
101
102
103 Constant propagation in stores and loads (STORE-CCP)
104 ----------------------------------------------------
105
106 While CCP has all the logic to propagate constants in GIMPLE
107 registers, it is missing the ability to associate constants with
108 stores and loads (i.e., pointer dereferences, structures and
109 global/aliased variables). We don't keep loads and stores in
110 SSA, but we do build a factored use-def web for them (in the
111 virtual operands).
112
113 For instance, consider the following code fragment:
114
115 struct A a;
116 const int B = 42;
117
118 void foo (int i)
119 {
120 if (i > 10)
121 a.a = 42;
122 else
123 {
124 a.b = 21;
125 a.a = a.b + 21;
126 }
127
128 if (a.a != B)
129 never_executed ();
130 }
131
132 We should be able to deduce that the predicate 'a.a != B' is always
133 false. To achieve this, we associate constant values to the SSA
38635499
DN
134 names in the VDEF operands for each store. Additionally,
135 since we also glob partial loads/stores with the base symbol, we
136 also keep track of the memory reference where the constant value
137 was stored (in the MEM_REF field of PROP_VALUE_T). For instance,
0bca51f0 138
38635499 139 # a_5 = VDEF <a_4>
0bca51f0
DN
140 a.a = 2;
141
142 # VUSE <a_5>
143 x_3 = a.b;
144
145 In the example above, CCP will associate value '2' with 'a_5', but
146 it would be wrong to replace the load from 'a.b' with '2', because
147 '2' had been stored into a.a.
148
106dec71
ZD
149 Note that the initial value of virtual operands is VARYING, not
150 UNDEFINED. Consider, for instance global variables:
0bca51f0
DN
151
152 int A;
153
154 foo (int i)
155 {
156 if (i_3 > 10)
157 A_4 = 3;
158 # A_5 = PHI (A_4, A_2);
159
160 # VUSE <A_5>
161 A.0_6 = A;
162
163 return A.0_6;
164 }
165
166 The value of A_2 cannot be assumed to be UNDEFINED, as it may have
167 been defined outside of foo. If we were to assume it UNDEFINED, we
106dec71 168 would erroneously optimize the above into 'return 3;'.
0bca51f0
DN
169
170 Though STORE-CCP is not too expensive, it does have to do more work
171 than regular CCP, so it is only enabled at -O2. Both regular CCP
172 and STORE-CCP use the exact same algorithm. The only distinction
173 is that when doing STORE-CCP, the boolean variable DO_STORE_CCP is
174 set to true. This affects the evaluation of statements and PHI
175 nodes.
6de9cd9a
DN
176
177 References:
178
179 Constant propagation with conditional branches,
180 Wegman and Zadeck, ACM TOPLAS 13(2):181-210.
181
182 Building an Optimizing Compiler,
183 Robert Morgan, Butterworth-Heinemann, 1998, Section 8.9.
184
185 Advanced Compiler Design and Implementation,
186 Steven Muchnick, Morgan Kaufmann, 1997, Section 12.6 */
187
188#include "config.h"
189#include "system.h"
190#include "coretypes.h"
191#include "tm.h"
6de9cd9a 192#include "tree.h"
750628d8 193#include "flags.h"
6de9cd9a
DN
194#include "rtl.h"
195#include "tm_p.h"
750628d8 196#include "ggc.h"
6de9cd9a 197#include "basic-block.h"
750628d8 198#include "output.h"
750628d8
DN
199#include "expr.h"
200#include "function.h"
6de9cd9a 201#include "diagnostic.h"
750628d8 202#include "timevar.h"
6de9cd9a 203#include "tree-dump.h"
750628d8 204#include "tree-flow.h"
6de9cd9a 205#include "tree-pass.h"
750628d8
DN
206#include "tree-ssa-propagate.h"
207#include "langhooks.h"
ae3df618 208#include "target.h"
6ac01510 209#include "toplev.h"
6de9cd9a
DN
210
211
212/* Possible lattice values. */
213typedef enum
214{
106dec71 215 UNINITIALIZED,
6de9cd9a
DN
216 UNDEFINED,
217 CONSTANT,
218 VARYING
0bca51f0 219} ccp_lattice_t;
6de9cd9a 220
0bca51f0
DN
221/* Array of propagated constant values. After propagation,
222 CONST_VAL[I].VALUE holds the constant value for SSA_NAME(I). If
223 the constant is held in an SSA name representing a memory store
38635499
DN
224 (i.e., a VDEF), CONST_VAL[I].MEM_REF will contain the actual
225 memory reference used to store (i.e., the LHS of the assignment
226 doing the store). */
404f4351 227static prop_value_t *const_val;
6de9cd9a 228
0bca51f0
DN
229/* True if we are also propagating constants in stores and loads. */
230static bool do_store_ccp;
6de9cd9a 231
0bca51f0 232/* Dump constant propagation value VAL to file OUTF prefixed by PREFIX. */
95eec0d6
DB
233
234static void
0bca51f0 235dump_lattice_value (FILE *outf, const char *prefix, prop_value_t val)
95eec0d6 236{
750628d8 237 switch (val.lattice_val)
95eec0d6 238 {
0bca51f0
DN
239 case UNINITIALIZED:
240 fprintf (outf, "%sUNINITIALIZED", prefix);
241 break;
750628d8
DN
242 case UNDEFINED:
243 fprintf (outf, "%sUNDEFINED", prefix);
244 break;
245 case VARYING:
246 fprintf (outf, "%sVARYING", prefix);
247 break;
750628d8
DN
248 case CONSTANT:
249 fprintf (outf, "%sCONSTANT ", prefix);
0bca51f0 250 print_generic_expr (outf, val.value, dump_flags);
750628d8
DN
251 break;
252 default:
1e128c5f 253 gcc_unreachable ();
750628d8 254 }
95eec0d6 255}
6de9cd9a 256
6de9cd9a 257
0bca51f0
DN
258/* Print lattice value VAL to stderr. */
259
260void debug_lattice_value (prop_value_t val);
261
262void
263debug_lattice_value (prop_value_t val)
264{
265 dump_lattice_value (stderr, "", val);
266 fprintf (stderr, "\n");
267}
6de9cd9a 268
6de9cd9a 269
688e936d
RH
270/* The regular is_gimple_min_invariant does a shallow test of the object.
271 It assumes that full gimplification has happened, or will happen on the
272 object. For a value coming from DECL_INITIAL, this is not true, so we
6416ae7f 273 have to be more strict ourselves. */
688e936d
RH
274
275static bool
276ccp_decl_initial_min_invariant (tree t)
277{
278 if (!is_gimple_min_invariant (t))
279 return false;
280 if (TREE_CODE (t) == ADDR_EXPR)
281 {
282 /* Inline and unroll is_gimple_addressable. */
283 while (1)
284 {
285 t = TREE_OPERAND (t, 0);
286 if (is_gimple_id (t))
287 return true;
288 if (!handled_component_p (t))
289 return false;
290 }
291 }
292 return true;
293}
294
fc9962ee
ZD
295/* If SYM is a constant variable with known value, return the value.
296 NULL_TREE is returned otherwise. */
297
298static tree
299get_symbol_constant_value (tree sym)
300{
301 if (TREE_STATIC (sym)
302 && TREE_READONLY (sym)
725c2d32
L
303 && !MTAG_P (sym)
304 /* Check if a read-only definition may be overridden at
305 link and run time. */
306 && targetm.binds_local_p (sym))
fc9962ee
ZD
307 {
308 tree val = DECL_INITIAL (sym);
309 if (val
310 && ccp_decl_initial_min_invariant (val))
311 return val;
b0940154
AP
312 /* Variables declared 'const' without an initializer
313 have zero as the intializer. */
314 if (!val
315 && (INTEGRAL_TYPE_P (TREE_TYPE (sym))
316 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (sym))))
317 return fold_convert (TREE_TYPE (sym), integer_zero_node);
fc9962ee
ZD
318 }
319
320 return NULL_TREE;
321}
688e936d 322
0bca51f0
DN
323/* Compute a default value for variable VAR and store it in the
324 CONST_VAL array. The following rules are used to get default
325 values:
95eec0d6 326
0bca51f0
DN
327 1- Global and static variables that are declared constant are
328 considered CONSTANT.
329
330 2- Any other value is considered UNDEFINED. This is useful when
750628d8
DN
331 considering PHI nodes. PHI arguments that are undefined do not
332 change the constant value of the PHI node, which allows for more
0bca51f0 333 constants to be propagated.
6de9cd9a 334
0bca51f0
DN
335 3- If SSA_NAME_VALUE is set and it is a constant, its value is
336 used.
6de9cd9a 337
0bca51f0
DN
338 4- Variables defined by statements other than assignments and PHI
339 nodes are considered VARYING.
6de9cd9a 340
106dec71
ZD
341 5- Initial values of variables that are not GIMPLE registers are
342 considered VARYING. */
6de9cd9a 343
0bca51f0
DN
344static prop_value_t
345get_default_value (tree var)
346{
347 tree sym = SSA_NAME_VAR (var);
348 prop_value_t val = { UNINITIALIZED, NULL_TREE, NULL_TREE };
fc9962ee 349 tree cst_val;
106dec71 350
0bca51f0 351 if (!do_store_ccp && !is_gimple_reg (var))
6de9cd9a 352 {
0bca51f0
DN
353 /* Short circuit for regular CCP. We are not interested in any
354 non-register when DO_STORE_CCP is false. */
750628d8 355 val.lattice_val = VARYING;
6de9cd9a 356 }
0bca51f0
DN
357 else if (SSA_NAME_VALUE (var)
358 && is_gimple_min_invariant (SSA_NAME_VALUE (var)))
750628d8 359 {
0bca51f0
DN
360 val.lattice_val = CONSTANT;
361 val.value = SSA_NAME_VALUE (var);
750628d8 362 }
fc9962ee 363 else if ((cst_val = get_symbol_constant_value (sym)) != NULL_TREE)
750628d8 364 {
0bca51f0
DN
365 /* Globals and static variables declared 'const' take their
366 initial value. */
367 val.lattice_val = CONSTANT;
fc9962ee 368 val.value = cst_val;
0bca51f0 369 val.mem_ref = sym;
750628d8
DN
370 }
371 else
372 {
750628d8 373 tree stmt = SSA_NAME_DEF_STMT (var);
6de9cd9a 374
0bca51f0
DN
375 if (IS_EMPTY_STMT (stmt))
376 {
377 /* Variables defined by an empty statement are those used
378 before being initialized. If VAR is a local variable, we
106dec71
ZD
379 can assume initially that it is UNDEFINED, otherwise we must
380 consider it VARYING. */
0bca51f0
DN
381 if (is_gimple_reg (sym) && TREE_CODE (sym) != PARM_DECL)
382 val.lattice_val = UNDEFINED;
0bca51f0 383 else
750628d8
DN
384 val.lattice_val = VARYING;
385 }
07beea0d 386 else if (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT
0bca51f0
DN
387 || TREE_CODE (stmt) == PHI_NODE)
388 {
389 /* Any other variable defined by an assignment or a PHI node
106dec71
ZD
390 is considered UNDEFINED. */
391 val.lattice_val = UNDEFINED;
0bca51f0
DN
392 }
393 else
394 {
395 /* Otherwise, VAR will never take on a constant value. */
396 val.lattice_val = VARYING;
397 }
750628d8 398 }
6de9cd9a 399
750628d8
DN
400 return val;
401}
6de9cd9a 402
6de9cd9a 403
106dec71 404/* Get the constant value associated with variable VAR. */
6de9cd9a 405
106dec71
ZD
406static inline prop_value_t *
407get_value (tree var)
0bca51f0
DN
408{
409 prop_value_t *val = &const_val[SSA_NAME_VERSION (var)];
106dec71
ZD
410
411 if (val->lattice_val == UNINITIALIZED)
6de9cd9a
DN
412 *val = get_default_value (var);
413
414 return val;
415}
416
106dec71
ZD
417/* Sets the value associated with VAR to VARYING. */
418
419static inline void
420set_value_varying (tree var)
421{
422 prop_value_t *val = &const_val[SSA_NAME_VERSION (var)];
423
424 val->lattice_val = VARYING;
425 val->value = NULL_TREE;
426 val->mem_ref = NULL_TREE;
427}
6de9cd9a 428
fbb5445b
L
429/* For float types, modify the value of VAL to make ccp work correctly
430 for non-standard values (-0, NaN):
431
432 If HONOR_SIGNED_ZEROS is false, and VAL = -0, we canonicalize it to 0.
433 If HONOR_NANS is false, and VAL is NaN, we canonicalize it to UNDEFINED.
434 This is to fix the following problem (see PR 29921): Suppose we have
435
436 x = 0.0 * y
437
438 and we set value of y to NaN. This causes value of x to be set to NaN.
439 When we later determine that y is in fact VARYING, fold uses the fact
440 that HONOR_NANS is false, and we try to change the value of x to 0,
441 causing an ICE. With HONOR_NANS being false, the real appearance of
442 NaN would cause undefined behavior, though, so claiming that y (and x)
443 are UNDEFINED initially is correct. */
444
445static void
446canonicalize_float_value (prop_value_t *val)
447{
448 enum machine_mode mode;
449 tree type;
450 REAL_VALUE_TYPE d;
451
452 if (val->lattice_val != CONSTANT
453 || TREE_CODE (val->value) != REAL_CST)
454 return;
455
456 d = TREE_REAL_CST (val->value);
457 type = TREE_TYPE (val->value);
458 mode = TYPE_MODE (type);
459
460 if (!HONOR_SIGNED_ZEROS (mode)
461 && REAL_VALUE_MINUS_ZERO (d))
462 {
463 val->value = build_real (type, dconst0);
464 return;
465 }
466
467 if (!HONOR_NANS (mode)
468 && REAL_VALUE_ISNAN (d))
469 {
470 val->lattice_val = UNDEFINED;
471 val->value = NULL;
472 val->mem_ref = NULL;
473 return;
474 }
475}
476
0bca51f0
DN
477/* Set the value for variable VAR to NEW_VAL. Return true if the new
478 value is different from VAR's previous value. */
6de9cd9a 479
750628d8 480static bool
0bca51f0 481set_lattice_value (tree var, prop_value_t new_val)
6de9cd9a 482{
106dec71 483 prop_value_t *old_val = get_value (var);
0bca51f0 484
fbb5445b
L
485 canonicalize_float_value (&new_val);
486
0bca51f0 487 /* Lattice transitions must always be monotonically increasing in
106dec71
ZD
488 value. If *OLD_VAL and NEW_VAL are the same, return false to
489 inform the caller that this was a non-transition. */
490
fc9962ee 491 gcc_assert (old_val->lattice_val < new_val.lattice_val
0bca51f0 492 || (old_val->lattice_val == new_val.lattice_val
fc9962ee
ZD
493 && ((!old_val->value && !new_val.value)
494 || operand_equal_p (old_val->value, new_val.value, 0))
106dec71 495 && old_val->mem_ref == new_val.mem_ref));
0bca51f0
DN
496
497 if (old_val->lattice_val != new_val.lattice_val)
6de9cd9a 498 {
750628d8
DN
499 if (dump_file && (dump_flags & TDF_DETAILS))
500 {
0bca51f0 501 dump_lattice_value (dump_file, "Lattice value changed to ", new_val);
106dec71 502 fprintf (dump_file, ". Adding SSA edges to worklist.\n");
750628d8
DN
503 }
504
0bca51f0
DN
505 *old_val = new_val;
506
106dec71
ZD
507 gcc_assert (new_val.lattice_val != UNDEFINED);
508 return true;
6de9cd9a 509 }
750628d8
DN
510
511 return false;
6de9cd9a
DN
512}
513
514
0bca51f0 515/* Return the likely CCP lattice value for STMT.
6de9cd9a 516
750628d8 517 If STMT has no operands, then return CONSTANT.
6de9cd9a 518
7f879c96
RG
519 Else if undefinedness of operands of STMT cause its value to be
520 undefined, then return UNDEFINED.
6de9cd9a 521
750628d8 522 Else if any operands of STMT are constants, then return CONSTANT.
6de9cd9a 523
750628d8 524 Else return VARYING. */
6de9cd9a 525
0bca51f0 526static ccp_lattice_t
750628d8
DN
527likely_value (tree stmt)
528{
7f879c96 529 bool has_constant_operand, has_undefined_operand, all_undefined_operands;
750628d8
DN
530 stmt_ann_t ann;
531 tree use;
532 ssa_op_iter iter;
6de9cd9a 533
750628d8 534 ann = stmt_ann (stmt);
0bca51f0
DN
535
536 /* If the statement has volatile operands, it won't fold to a
537 constant value. */
538 if (ann->has_volatile_ops)
539 return VARYING;
540
541 /* If we are not doing store-ccp, statements with loads
542 and/or stores will never fold into a constant. */
543 if (!do_store_ccp
ff88c5aa 544 && !ZERO_SSA_OPERANDS (stmt, SSA_OP_ALL_VIRTUALS))
750628d8 545 return VARYING;
6de9cd9a 546
0bca51f0
DN
547
548 /* A CALL_EXPR is assumed to be varying. NOTE: This may be overly
549 conservative, in the presence of const and pure calls. */
750628d8
DN
550 if (get_call_expr_in (stmt) != NULL_TREE)
551 return VARYING;
6de9cd9a 552
0bca51f0
DN
553 /* Anything other than assignments and conditional jumps are not
554 interesting for CCP. */
07beea0d 555 if (TREE_CODE (stmt) != GIMPLE_MODIFY_STMT
106dec71 556 && !(TREE_CODE (stmt) == RETURN_EXPR && get_rhs (stmt) != NULL_TREE)
0bca51f0
DN
557 && TREE_CODE (stmt) != COND_EXPR
558 && TREE_CODE (stmt) != SWITCH_EXPR)
559 return VARYING;
560
a318e3ac
SB
561 if (is_gimple_min_invariant (get_rhs (stmt)))
562 return CONSTANT;
563
106dec71 564 has_constant_operand = false;
7f879c96
RG
565 has_undefined_operand = false;
566 all_undefined_operands = true;
106dec71 567 FOR_EACH_SSA_TREE_OPERAND (use, stmt, iter, SSA_OP_USE | SSA_OP_VUSE)
750628d8 568 {
106dec71 569 prop_value_t *val = get_value (use);
750628d8 570
106dec71 571 if (val->lattice_val == UNDEFINED)
7f879c96
RG
572 has_undefined_operand = true;
573 else
574 all_undefined_operands = false;
0bca51f0 575
750628d8 576 if (val->lattice_val == CONSTANT)
106dec71 577 has_constant_operand = true;
6de9cd9a 578 }
750628d8 579
7f879c96
RG
580 /* If the operation combines operands like COMPLEX_EXPR make sure to
581 not mark the result UNDEFINED if only one part of the result is
582 undefined. */
583 if (has_undefined_operand
584 && all_undefined_operands)
585 return UNDEFINED;
586 else if (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT
587 && has_undefined_operand)
588 {
589 switch (TREE_CODE (GIMPLE_STMT_OPERAND (stmt, 1)))
590 {
591 /* Unary operators are handled with all_undefined_operands. */
592 case PLUS_EXPR:
593 case MINUS_EXPR:
7f879c96 594 case POINTER_PLUS_EXPR:
7f879c96
RG
595 /* Not MIN_EXPR, MAX_EXPR. One VARYING operand may be selected.
596 Not bitwise operators, one VARYING operand may specify the
597 result completely. Not logical operators for the same reason.
0cedb9e9
RG
598 Not COMPLEX_EXPR as one VARYING operand makes the result partly
599 not UNDEFINED. Not *DIV_EXPR, comparisons and shifts because
600 the undefined operand may be promoted. */
7f879c96
RG
601 return UNDEFINED;
602
603 default:
604 ;
605 }
606 }
607 /* If there was an UNDEFINED operand but the result may be not UNDEFINED
608 fall back to VARYING even if there were CONSTANT operands. */
609 if (has_undefined_operand)
610 return VARYING;
611
106dec71
ZD
612 if (has_constant_operand
613 /* We do not consider virtual operands here -- load from read-only
614 memory may have only VARYING virtual operands, but still be
615 constant. */
616 || ZERO_SSA_OPERANDS (stmt, SSA_OP_USE))
0bca51f0
DN
617 return CONSTANT;
618
106dec71 619 return VARYING;
6de9cd9a
DN
620}
621
106dec71
ZD
622/* Returns true if STMT cannot be constant. */
623
624static bool
625surely_varying_stmt_p (tree stmt)
626{
627 /* If the statement has operands that we cannot handle, it cannot be
628 constant. */
629 if (stmt_ann (stmt)->has_volatile_ops)
630 return true;
631
632 if (!ZERO_SSA_OPERANDS (stmt, SSA_OP_ALL_VIRTUALS))
633 {
634 if (!do_store_ccp)
635 return true;
636
637 /* We can only handle simple loads and stores. */
638 if (!stmt_makes_single_load (stmt)
639 && !stmt_makes_single_store (stmt))
640 return true;
641 }
642
643 /* If it contains a call, it is varying. */
644 if (get_call_expr_in (stmt) != NULL_TREE)
645 return true;
646
647 /* Anything other than assignments and conditional jumps are not
648 interesting for CCP. */
07beea0d 649 if (TREE_CODE (stmt) != GIMPLE_MODIFY_STMT
106dec71
ZD
650 && !(TREE_CODE (stmt) == RETURN_EXPR && get_rhs (stmt) != NULL_TREE)
651 && TREE_CODE (stmt) != COND_EXPR
652 && TREE_CODE (stmt) != SWITCH_EXPR)
653 return true;
654
655 return false;
656}
6de9cd9a 657
750628d8 658/* Initialize local data structures for CCP. */
6de9cd9a
DN
659
660static void
750628d8 661ccp_initialize (void)
6de9cd9a 662{
750628d8 663 basic_block bb;
6de9cd9a 664
b9eae1a9 665 const_val = XCNEWVEC (prop_value_t, num_ssa_names);
6de9cd9a 666
750628d8
DN
667 /* Initialize simulation flags for PHI nodes and statements. */
668 FOR_EACH_BB (bb)
6de9cd9a 669 {
750628d8 670 block_stmt_iterator i;
6de9cd9a 671
750628d8
DN
672 for (i = bsi_start (bb); !bsi_end_p (i); bsi_next (&i))
673 {
750628d8 674 tree stmt = bsi_stmt (i);
106dec71 675 bool is_varying = surely_varying_stmt_p (stmt);
6de9cd9a 676
106dec71 677 if (is_varying)
750628d8 678 {
0bca51f0
DN
679 tree def;
680 ssa_op_iter iter;
681
682 /* If the statement will not produce a constant, mark
683 all its outputs VARYING. */
684 FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_ALL_DEFS)
106dec71
ZD
685 {
686 if (is_varying)
687 set_value_varying (def);
688 }
750628d8
DN
689 }
690
750628d8
DN
691 DONT_SIMULATE_AGAIN (stmt) = is_varying;
692 }
6de9cd9a
DN
693 }
694
106dec71
ZD
695 /* Now process PHI nodes. We never set DONT_SIMULATE_AGAIN on phi node,
696 since we do not know which edges are executable yet, except for
697 phi nodes for virtual operands when we do not do store ccp. */
750628d8 698 FOR_EACH_BB (bb)
6de9cd9a 699 {
0bca51f0 700 tree phi;
750628d8
DN
701
702 for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
703 {
106dec71
ZD
704 if (!do_store_ccp && !is_gimple_reg (PHI_RESULT (phi)))
705 DONT_SIMULATE_AGAIN (phi) = true;
706 else
707 DONT_SIMULATE_AGAIN (phi) = false;
750628d8 708 }
6de9cd9a 709 }
750628d8 710}
6de9cd9a 711
6de9cd9a 712
0bca51f0 713/* Do final substitution of propagated values, cleanup the flowgraph and
3253eafb 714 free allocated storage.
6de9cd9a 715
3253eafb
JH
716 Return TRUE when something was optimized. */
717
718static bool
0bca51f0 719ccp_finalize (void)
6de9cd9a 720{
0bca51f0 721 /* Perform substitutions based on the known constant values. */
3253eafb 722 bool something_changed = substitute_and_fold (const_val, false);
6de9cd9a 723
0bca51f0 724 free (const_val);
3253eafb 725 return something_changed;;
6de9cd9a
DN
726}
727
728
0bca51f0
DN
729/* Compute the meet operator between *VAL1 and *VAL2. Store the result
730 in VAL1.
731
732 any M UNDEFINED = any
0bca51f0
DN
733 any M VARYING = VARYING
734 Ci M Cj = Ci if (i == j)
735 Ci M Cj = VARYING if (i != j)
106dec71 736 */
6de9cd9a
DN
737
738static void
0bca51f0 739ccp_lattice_meet (prop_value_t *val1, prop_value_t *val2)
6de9cd9a 740{
0bca51f0 741 if (val1->lattice_val == UNDEFINED)
6de9cd9a 742 {
0bca51f0
DN
743 /* UNDEFINED M any = any */
744 *val1 = *val2;
750628d8 745 }
0bca51f0 746 else if (val2->lattice_val == UNDEFINED)
195da47b 747 {
0bca51f0
DN
748 /* any M UNDEFINED = any
749 Nothing to do. VAL1 already contains the value we want. */
750 ;
195da47b 751 }
0bca51f0
DN
752 else if (val1->lattice_val == VARYING
753 || val2->lattice_val == VARYING)
750628d8 754 {
0bca51f0
DN
755 /* any M VARYING = VARYING. */
756 val1->lattice_val = VARYING;
757 val1->value = NULL_TREE;
758 val1->mem_ref = NULL_TREE;
750628d8 759 }
0bca51f0
DN
760 else if (val1->lattice_val == CONSTANT
761 && val2->lattice_val == CONSTANT
762 && simple_cst_equal (val1->value, val2->value) == 1
763 && (!do_store_ccp
a318e3ac
SB
764 || (val1->mem_ref && val2->mem_ref
765 && operand_equal_p (val1->mem_ref, val2->mem_ref, 0))))
750628d8 766 {
0bca51f0
DN
767 /* Ci M Cj = Ci if (i == j)
768 Ci M Cj = VARYING if (i != j)
769
770 If these two values come from memory stores, make sure that
771 they come from the same memory reference. */
772 val1->lattice_val = CONSTANT;
773 val1->value = val1->value;
774 val1->mem_ref = val1->mem_ref;
750628d8
DN
775 }
776 else
777 {
0bca51f0
DN
778 /* Any other combination is VARYING. */
779 val1->lattice_val = VARYING;
780 val1->value = NULL_TREE;
781 val1->mem_ref = NULL_TREE;
750628d8 782 }
6de9cd9a
DN
783}
784
785
750628d8
DN
786/* Loop through the PHI_NODE's parameters for BLOCK and compare their
787 lattice values to determine PHI_NODE's lattice value. The value of a
0bca51f0 788 PHI node is determined calling ccp_lattice_meet with all the arguments
750628d8 789 of the PHI node that are incoming via executable edges. */
6de9cd9a 790
750628d8
DN
791static enum ssa_prop_result
792ccp_visit_phi_node (tree phi)
6de9cd9a 793{
750628d8 794 int i;
0bca51f0 795 prop_value_t *old_val, new_val;
6de9cd9a 796
750628d8 797 if (dump_file && (dump_flags & TDF_DETAILS))
6de9cd9a 798 {
750628d8
DN
799 fprintf (dump_file, "\nVisiting PHI node: ");
800 print_generic_expr (dump_file, phi, dump_flags);
6de9cd9a 801 }
6de9cd9a 802
106dec71 803 old_val = get_value (PHI_RESULT (phi));
750628d8
DN
804 switch (old_val->lattice_val)
805 {
806 case VARYING:
0bca51f0 807 return SSA_PROP_VARYING;
6de9cd9a 808
750628d8
DN
809 case CONSTANT:
810 new_val = *old_val;
811 break;
6de9cd9a 812
750628d8 813 case UNDEFINED:
750628d8 814 new_val.lattice_val = UNDEFINED;
0bca51f0
DN
815 new_val.value = NULL_TREE;
816 new_val.mem_ref = NULL_TREE;
750628d8 817 break;
6de9cd9a 818
750628d8 819 default:
1e128c5f 820 gcc_unreachable ();
750628d8 821 }
6de9cd9a 822
750628d8
DN
823 for (i = 0; i < PHI_NUM_ARGS (phi); i++)
824 {
0bca51f0
DN
825 /* Compute the meet operator over all the PHI arguments flowing
826 through executable edges. */
750628d8 827 edge e = PHI_ARG_EDGE (phi, i);
6de9cd9a 828
750628d8
DN
829 if (dump_file && (dump_flags & TDF_DETAILS))
830 {
831 fprintf (dump_file,
832 "\n Argument #%d (%d -> %d %sexecutable)\n",
833 i, e->src->index, e->dest->index,
834 (e->flags & EDGE_EXECUTABLE) ? "" : "not ");
835 }
836
837 /* If the incoming edge is executable, Compute the meet operator for
838 the existing value of the PHI node and the current PHI argument. */
839 if (e->flags & EDGE_EXECUTABLE)
840 {
0bca51f0
DN
841 tree arg = PHI_ARG_DEF (phi, i);
842 prop_value_t arg_val;
6de9cd9a 843
0bca51f0 844 if (is_gimple_min_invariant (arg))
750628d8 845 {
0bca51f0
DN
846 arg_val.lattice_val = CONSTANT;
847 arg_val.value = arg;
848 arg_val.mem_ref = NULL_TREE;
750628d8
DN
849 }
850 else
106dec71 851 arg_val = *(get_value (arg));
6de9cd9a 852
0bca51f0 853 ccp_lattice_meet (&new_val, &arg_val);
6de9cd9a 854
750628d8
DN
855 if (dump_file && (dump_flags & TDF_DETAILS))
856 {
857 fprintf (dump_file, "\t");
0bca51f0
DN
858 print_generic_expr (dump_file, arg, dump_flags);
859 dump_lattice_value (dump_file, "\tValue: ", arg_val);
750628d8
DN
860 fprintf (dump_file, "\n");
861 }
6de9cd9a 862
750628d8
DN
863 if (new_val.lattice_val == VARYING)
864 break;
865 }
866 }
6de9cd9a
DN
867
868 if (dump_file && (dump_flags & TDF_DETAILS))
750628d8
DN
869 {
870 dump_lattice_value (dump_file, "\n PHI node value: ", new_val);
871 fprintf (dump_file, "\n\n");
872 }
873
106dec71 874 /* Make the transition to the new value. */
750628d8
DN
875 if (set_lattice_value (PHI_RESULT (phi), new_val))
876 {
877 if (new_val.lattice_val == VARYING)
878 return SSA_PROP_VARYING;
879 else
880 return SSA_PROP_INTERESTING;
881 }
882 else
883 return SSA_PROP_NOT_INTERESTING;
6de9cd9a
DN
884}
885
886
750628d8
DN
887/* CCP specific front-end to the non-destructive constant folding
888 routines.
6de9cd9a
DN
889
890 Attempt to simplify the RHS of STMT knowing that one or more
891 operands are constants.
892
893 If simplification is possible, return the simplified RHS,
894 otherwise return the original RHS. */
895
896static tree
897ccp_fold (tree stmt)
898{
899 tree rhs = get_rhs (stmt);
900 enum tree_code code = TREE_CODE (rhs);
6615c446 901 enum tree_code_class kind = TREE_CODE_CLASS (code);
6de9cd9a
DN
902 tree retval = NULL_TREE;
903
6de9cd9a 904 if (TREE_CODE (rhs) == SSA_NAME)
0bca51f0
DN
905 {
906 /* If the RHS is an SSA_NAME, return its known constant value,
907 if any. */
106dec71 908 return get_value (rhs)->value;
0bca51f0
DN
909 }
910 else if (do_store_ccp && stmt_makes_single_load (stmt))
911 {
912 /* If the RHS is a memory load, see if the VUSEs associated with
913 it are a valid constant for that memory load. */
914 prop_value_t *val = get_value_loaded_by (stmt, const_val);
add9e6d3
AP
915 if (val && val->mem_ref)
916 {
917 if (operand_equal_p (val->mem_ref, rhs, 0))
918 return val->value;
919
920 /* If RHS is extracting REALPART_EXPR or IMAGPART_EXPR of a
921 complex type with a known constant value, return it. */
922 if ((TREE_CODE (rhs) == REALPART_EXPR
923 || TREE_CODE (rhs) == IMAGPART_EXPR)
924 && operand_equal_p (val->mem_ref, TREE_OPERAND (rhs, 0), 0))
925 return fold_build1 (TREE_CODE (rhs), TREE_TYPE (rhs), val->value);
926 }
927 return NULL_TREE;
0bca51f0 928 }
6de9cd9a
DN
929
930 /* Unary operators. Note that we know the single operand must
931 be a constant. So this should almost always return a
932 simplified RHS. */
6615c446 933 if (kind == tcc_unary)
6de9cd9a
DN
934 {
935 /* Handle unary operators which can appear in GIMPLE form. */
936 tree op0 = TREE_OPERAND (rhs, 0);
937
938 /* Simplify the operand down to a constant. */
939 if (TREE_CODE (op0) == SSA_NAME)
940 {
106dec71 941 prop_value_t *val = get_value (op0);
6de9cd9a 942 if (val->lattice_val == CONSTANT)
106dec71 943 op0 = get_value (op0)->value;
6de9cd9a
DN
944 }
945
078152a2 946 if ((code == NOP_EXPR || code == CONVERT_EXPR)
36618b93 947 && useless_type_conversion_p (TREE_TYPE (rhs), TREE_TYPE (op0)))
078152a2 948 return op0;
615bc06a 949 return fold_unary (code, TREE_TYPE (rhs), op0);
6de9cd9a
DN
950 }
951
952 /* Binary and comparison operators. We know one or both of the
953 operands are constants. */
6615c446
JO
954 else if (kind == tcc_binary
955 || kind == tcc_comparison
6de9cd9a
DN
956 || code == TRUTH_AND_EXPR
957 || code == TRUTH_OR_EXPR
958 || code == TRUTH_XOR_EXPR)
959 {
960 /* Handle binary and comparison operators that can appear in
961 GIMPLE form. */
962 tree op0 = TREE_OPERAND (rhs, 0);
963 tree op1 = TREE_OPERAND (rhs, 1);
964
965 /* Simplify the operands down to constants when appropriate. */
966 if (TREE_CODE (op0) == SSA_NAME)
967 {
106dec71 968 prop_value_t *val = get_value (op0);
6de9cd9a 969 if (val->lattice_val == CONSTANT)
0bca51f0 970 op0 = val->value;
6de9cd9a
DN
971 }
972
973 if (TREE_CODE (op1) == SSA_NAME)
974 {
106dec71 975 prop_value_t *val = get_value (op1);
6de9cd9a 976 if (val->lattice_val == CONSTANT)
0bca51f0 977 op1 = val->value;
6de9cd9a
DN
978 }
979
615bc06a 980 return fold_binary (code, TREE_TYPE (rhs), op0, op1);
6de9cd9a
DN
981 }
982
983 /* We may be able to fold away calls to builtin functions if their
9cf737f8 984 arguments are constants. */
6de9cd9a 985 else if (code == CALL_EXPR
5039610b
SL
986 && TREE_CODE (CALL_EXPR_FN (rhs)) == ADDR_EXPR
987 && TREE_CODE (TREE_OPERAND (CALL_EXPR_FN (rhs), 0)) == FUNCTION_DECL
988 && DECL_BUILT_IN (TREE_OPERAND (CALL_EXPR_FN (rhs), 0)))
6de9cd9a 989 {
f47c96aa 990 if (!ZERO_SSA_OPERANDS (stmt, SSA_OP_USE))
6de9cd9a 991 {
f47c96aa 992 tree *orig, var;
f47c96aa
AM
993 size_t i = 0;
994 ssa_op_iter iter;
995 use_operand_p var_p;
6de9cd9a
DN
996
997 /* Preserve the original values of every operand. */
858904db 998 orig = XNEWVEC (tree, NUM_SSA_OPERANDS (stmt, SSA_OP_USE));
f47c96aa
AM
999 FOR_EACH_SSA_TREE_OPERAND (var, stmt, iter, SSA_OP_USE)
1000 orig[i++] = var;
6de9cd9a
DN
1001
1002 /* Substitute operands with their values and try to fold. */
0bca51f0 1003 replace_uses_in (stmt, NULL, const_val);
5039610b 1004 retval = fold_call_expr (rhs, false);
6de9cd9a
DN
1005
1006 /* Restore operands to their original form. */
f47c96aa
AM
1007 i = 0;
1008 FOR_EACH_SSA_USE_OPERAND (var_p, stmt, iter, SSA_OP_USE)
1009 SET_USE (var_p, orig[i++]);
6de9cd9a
DN
1010 free (orig);
1011 }
1012 }
1013 else
1014 return rhs;
1015
1016 /* If we got a simplified form, see if we need to convert its type. */
1017 if (retval)
a32e70c3 1018 return fold_convert (TREE_TYPE (rhs), retval);
6de9cd9a
DN
1019
1020 /* No simplification was possible. */
1021 return rhs;
1022}
1023
1024
ae3df618
SB
1025/* Return the tree representing the element referenced by T if T is an
1026 ARRAY_REF or COMPONENT_REF into constant aggregates. Return
1027 NULL_TREE otherwise. */
1028
1029static tree
1030fold_const_aggregate_ref (tree t)
1031{
1032 prop_value_t *value;
4038c495
GB
1033 tree base, ctor, idx, field;
1034 unsigned HOST_WIDE_INT cnt;
1035 tree cfield, cval;
ae3df618
SB
1036
1037 switch (TREE_CODE (t))
1038 {
1039 case ARRAY_REF:
1040 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
1041 DECL_INITIAL. If BASE is a nested reference into another
1042 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
1043 the inner reference. */
1044 base = TREE_OPERAND (t, 0);
1045 switch (TREE_CODE (base))
1046 {
1047 case VAR_DECL:
1048 if (!TREE_READONLY (base)
1049 || TREE_CODE (TREE_TYPE (base)) != ARRAY_TYPE
1050 || !targetm.binds_local_p (base))
1051 return NULL_TREE;
1052
1053 ctor = DECL_INITIAL (base);
1054 break;
1055
1056 case ARRAY_REF:
1057 case COMPONENT_REF:
1058 ctor = fold_const_aggregate_ref (base);
1059 break;
1060
1061 default:
1062 return NULL_TREE;
1063 }
1064
1065 if (ctor == NULL_TREE
faaf1436
RG
1066 || (TREE_CODE (ctor) != CONSTRUCTOR
1067 && TREE_CODE (ctor) != STRING_CST)
ae3df618
SB
1068 || !TREE_STATIC (ctor))
1069 return NULL_TREE;
1070
1071 /* Get the index. If we have an SSA_NAME, try to resolve it
1072 with the current lattice value for the SSA_NAME. */
1073 idx = TREE_OPERAND (t, 1);
1074 switch (TREE_CODE (idx))
1075 {
1076 case SSA_NAME:
106dec71 1077 if ((value = get_value (idx))
ae3df618
SB
1078 && value->lattice_val == CONSTANT
1079 && TREE_CODE (value->value) == INTEGER_CST)
1080 idx = value->value;
1081 else
1082 return NULL_TREE;
1083 break;
1084
1085 case INTEGER_CST:
1086 break;
1087
1088 default:
1089 return NULL_TREE;
1090 }
1091
faaf1436
RG
1092 /* Fold read from constant string. */
1093 if (TREE_CODE (ctor) == STRING_CST)
1094 {
1095 if ((TYPE_MODE (TREE_TYPE (t))
1096 == TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
1097 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
1098 == MODE_INT)
1099 && GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor)))) == 1
1100 && compare_tree_int (idx, TREE_STRING_LENGTH (ctor)) < 0)
0c4d4efb
DJ
1101 return build_int_cst_type (TREE_TYPE (t),
1102 (TREE_STRING_POINTER (ctor)
1103 [TREE_INT_CST_LOW (idx)]));
faaf1436
RG
1104 return NULL_TREE;
1105 }
1106
ae3df618 1107 /* Whoo-hoo! I'll fold ya baby. Yeah! */
4038c495
GB
1108 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
1109 if (tree_int_cst_equal (cfield, idx))
1110 return cval;
ae3df618
SB
1111 break;
1112
1113 case COMPONENT_REF:
1114 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
1115 DECL_INITIAL. If BASE is a nested reference into another
1116 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
1117 the inner reference. */
1118 base = TREE_OPERAND (t, 0);
1119 switch (TREE_CODE (base))
1120 {
1121 case VAR_DECL:
1122 if (!TREE_READONLY (base)
1123 || TREE_CODE (TREE_TYPE (base)) != RECORD_TYPE
1124 || !targetm.binds_local_p (base))
1125 return NULL_TREE;
1126
1127 ctor = DECL_INITIAL (base);
1128 break;
1129
1130 case ARRAY_REF:
1131 case COMPONENT_REF:
1132 ctor = fold_const_aggregate_ref (base);
1133 break;
1134
1135 default:
1136 return NULL_TREE;
1137 }
1138
1139 if (ctor == NULL_TREE
1140 || TREE_CODE (ctor) != CONSTRUCTOR
1141 || !TREE_STATIC (ctor))
1142 return NULL_TREE;
1143
1144 field = TREE_OPERAND (t, 1);
1145
4038c495
GB
1146 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
1147 if (cfield == field
ae3df618 1148 /* FIXME: Handle bit-fields. */
4038c495
GB
1149 && ! DECL_BIT_FIELD (cfield))
1150 return cval;
ae3df618
SB
1151 break;
1152
1ebd8d9a
SB
1153 case REALPART_EXPR:
1154 case IMAGPART_EXPR:
1155 {
1156 tree c = fold_const_aggregate_ref (TREE_OPERAND (t, 0));
1157 if (c && TREE_CODE (c) == COMPLEX_CST)
1158 return fold_build1 (TREE_CODE (t), TREE_TYPE (t), c);
1159 break;
1160 }
1161
ae3df618
SB
1162 default:
1163 break;
1164 }
1165
1166 return NULL_TREE;
1167}
1168
6de9cd9a
DN
1169/* Evaluate statement STMT. */
1170
0bca51f0 1171static prop_value_t
6de9cd9a
DN
1172evaluate_stmt (tree stmt)
1173{
0bca51f0 1174 prop_value_t val;
faaf1436 1175 tree simplified = NULL_TREE;
0bca51f0 1176 ccp_lattice_t likelyvalue = likely_value (stmt);
6ac01510 1177 bool is_constant;
0bca51f0
DN
1178
1179 val.mem_ref = NULL_TREE;
6de9cd9a 1180
6ac01510
ILT
1181 fold_defer_overflow_warnings ();
1182
6de9cd9a
DN
1183 /* If the statement is likely to have a CONSTANT result, then try
1184 to fold the statement to determine the constant value. */
1185 if (likelyvalue == CONSTANT)
1186 simplified = ccp_fold (stmt);
1187 /* If the statement is likely to have a VARYING result, then do not
1188 bother folding the statement. */
faaf1436 1189 if (likelyvalue == VARYING)
6de9cd9a 1190 simplified = get_rhs (stmt);
ae3df618
SB
1191 /* If the statement is an ARRAY_REF or COMPONENT_REF into constant
1192 aggregates, extract the referenced constant. Otherwise the
1193 statement is likely to have an UNDEFINED value, and there will be
1194 nothing to do. Note that fold_const_aggregate_ref returns
1195 NULL_TREE if the first case does not match. */
faaf1436 1196 else if (!simplified)
ae3df618 1197 simplified = fold_const_aggregate_ref (get_rhs (stmt));
6de9cd9a 1198
6ac01510
ILT
1199 is_constant = simplified && is_gimple_min_invariant (simplified);
1200
1201 fold_undefer_overflow_warnings (is_constant, stmt, 0);
1202
1203 if (is_constant)
6de9cd9a
DN
1204 {
1205 /* The statement produced a constant value. */
1206 val.lattice_val = CONSTANT;
0bca51f0 1207 val.value = simplified;
6de9cd9a
DN
1208 }
1209 else
1210 {
1211 /* The statement produced a nonconstant value. If the statement
0bca51f0
DN
1212 had UNDEFINED operands, then the result of the statement
1213 should be UNDEFINED. Otherwise, the statement is VARYING. */
106dec71 1214 if (likelyvalue == UNDEFINED)
a318e3ac
SB
1215 val.lattice_val = likelyvalue;
1216 else
1217 val.lattice_val = VARYING;
1218
0bca51f0 1219 val.value = NULL_TREE;
6de9cd9a 1220 }
750628d8
DN
1221
1222 return val;
6de9cd9a
DN
1223}
1224
1225
750628d8 1226/* Visit the assignment statement STMT. Set the value of its LHS to the
0bca51f0
DN
1227 value computed by the RHS and store LHS in *OUTPUT_P. If STMT
1228 creates virtual definitions, set the value of each new name to that
1229 of the RHS (if we can derive a constant out of the RHS). */
6de9cd9a 1230
750628d8
DN
1231static enum ssa_prop_result
1232visit_assignment (tree stmt, tree *output_p)
6de9cd9a 1233{
0bca51f0 1234 prop_value_t val;
750628d8 1235 tree lhs, rhs;
0bca51f0 1236 enum ssa_prop_result retval;
6de9cd9a 1237
07beea0d
AH
1238 lhs = GIMPLE_STMT_OPERAND (stmt, 0);
1239 rhs = GIMPLE_STMT_OPERAND (stmt, 1);
6de9cd9a 1240
750628d8
DN
1241 if (TREE_CODE (rhs) == SSA_NAME)
1242 {
1243 /* For a simple copy operation, we copy the lattice values. */
106dec71 1244 prop_value_t *nval = get_value (rhs);
750628d8
DN
1245 val = *nval;
1246 }
0bca51f0 1247 else if (do_store_ccp && stmt_makes_single_load (stmt))
750628d8 1248 {
0bca51f0 1249 /* Same as above, but the RHS is not a gimple register and yet
106dec71 1250 has a known VUSE. If STMT is loading from the same memory
0bca51f0
DN
1251 location that created the SSA_NAMEs for the virtual operands,
1252 we can propagate the value on the RHS. */
1253 prop_value_t *nval = get_value_loaded_by (stmt, const_val);
1254
106dec71
ZD
1255 if (nval
1256 && nval->mem_ref
a318e3ac 1257 && operand_equal_p (nval->mem_ref, rhs, 0))
0bca51f0
DN
1258 val = *nval;
1259 else
1260 val = evaluate_stmt (stmt);
750628d8
DN
1261 }
1262 else
9390c347 1263 /* Evaluate the statement. */
750628d8 1264 val = evaluate_stmt (stmt);
6de9cd9a 1265
9390c347 1266 /* If the original LHS was a VIEW_CONVERT_EXPR, modify the constant
ae95537a 1267 value to be a VIEW_CONVERT_EXPR of the old constant value.
9390c347
RK
1268
1269 ??? Also, if this was a definition of a bitfield, we need to widen
750628d8
DN
1270 the constant value into the type of the destination variable. This
1271 should not be necessary if GCC represented bitfields properly. */
1272 {
07beea0d 1273 tree orig_lhs = GIMPLE_STMT_OPERAND (stmt, 0);
9390c347
RK
1274
1275 if (TREE_CODE (orig_lhs) == VIEW_CONVERT_EXPR
1276 && val.lattice_val == CONSTANT)
1277 {
0dd4b47b
KH
1278 tree w = fold_unary (VIEW_CONVERT_EXPR,
1279 TREE_TYPE (TREE_OPERAND (orig_lhs, 0)),
1280 val.value);
ae95537a 1281
3d36fb45 1282 orig_lhs = TREE_OPERAND (orig_lhs, 0);
ae95537a 1283 if (w && is_gimple_min_invariant (w))
0bca51f0 1284 val.value = w;
ae95537a
RK
1285 else
1286 {
1287 val.lattice_val = VARYING;
0bca51f0 1288 val.value = NULL;
ae95537a 1289 }
9390c347
RK
1290 }
1291
750628d8 1292 if (val.lattice_val == CONSTANT
9390c347
RK
1293 && TREE_CODE (orig_lhs) == COMPONENT_REF
1294 && DECL_BIT_FIELD (TREE_OPERAND (orig_lhs, 1)))
6de9cd9a 1295 {
0bca51f0 1296 tree w = widen_bitfield (val.value, TREE_OPERAND (orig_lhs, 1),
9390c347 1297 orig_lhs);
750628d8
DN
1298
1299 if (w && is_gimple_min_invariant (w))
0bca51f0 1300 val.value = w;
750628d8 1301 else
6de9cd9a 1302 {
750628d8 1303 val.lattice_val = VARYING;
0bca51f0
DN
1304 val.value = NULL_TREE;
1305 val.mem_ref = NULL_TREE;
6de9cd9a 1306 }
6de9cd9a 1307 }
750628d8 1308 }
6de9cd9a 1309
0bca51f0 1310 retval = SSA_PROP_NOT_INTERESTING;
6de9cd9a 1311
750628d8 1312 /* Set the lattice value of the statement's output. */
0bca51f0 1313 if (TREE_CODE (lhs) == SSA_NAME)
6de9cd9a 1314 {
0bca51f0
DN
1315 /* If STMT is an assignment to an SSA_NAME, we only have one
1316 value to set. */
1317 if (set_lattice_value (lhs, val))
1318 {
1319 *output_p = lhs;
1320 if (val.lattice_val == VARYING)
1321 retval = SSA_PROP_VARYING;
1322 else
1323 retval = SSA_PROP_INTERESTING;
1324 }
6de9cd9a 1325 }
0bca51f0
DN
1326 else if (do_store_ccp && stmt_makes_single_store (stmt))
1327 {
38635499
DN
1328 /* Otherwise, set the names in VDEF operands to the new
1329 constant value and mark the LHS as the memory reference
1330 associated with VAL. */
0bca51f0
DN
1331 ssa_op_iter i;
1332 tree vdef;
1333 bool changed;
1334
0bca51f0 1335 /* Mark VAL as stored in the LHS of this assignment. */
106dec71
ZD
1336 if (val.lattice_val == CONSTANT)
1337 val.mem_ref = lhs;
0bca51f0
DN
1338
1339 /* Set the value of every VDEF to VAL. */
1340 changed = false;
1341 FOR_EACH_SSA_TREE_OPERAND (vdef, stmt, i, SSA_OP_VIRTUAL_DEFS)
fc9962ee
ZD
1342 {
1343 /* See PR 29801. We may have VDEFs for read-only variables
1344 (see the handling of unmodifiable variables in
1345 add_virtual_operand); do not attempt to change their value. */
1346 if (get_symbol_constant_value (SSA_NAME_VAR (vdef)) != NULL_TREE)
1347 continue;
1348
1349 changed |= set_lattice_value (vdef, val);
1350 }
0bca51f0
DN
1351
1352 /* Note that for propagation purposes, we are only interested in
1353 visiting statements that load the exact same memory reference
1354 stored here. Those statements will have the exact same list
1355 of virtual uses, so it is enough to set the output of this
1356 statement to be its first virtual definition. */
1357 *output_p = first_vdef (stmt);
1358 if (changed)
1359 {
1360 if (val.lattice_val == VARYING)
1361 retval = SSA_PROP_VARYING;
1362 else
1363 retval = SSA_PROP_INTERESTING;
1364 }
1365 }
1366
1367 return retval;
6de9cd9a
DN
1368}
1369
6de9cd9a 1370
750628d8
DN
1371/* Visit the conditional statement STMT. Return SSA_PROP_INTERESTING
1372 if it can determine which edge will be taken. Otherwise, return
1373 SSA_PROP_VARYING. */
1374
1375static enum ssa_prop_result
1376visit_cond_stmt (tree stmt, edge *taken_edge_p)
6de9cd9a 1377{
0bca51f0 1378 prop_value_t val;
750628d8
DN
1379 basic_block block;
1380
1381 block = bb_for_stmt (stmt);
1382 val = evaluate_stmt (stmt);
1383
1384 /* Find which edge out of the conditional block will be taken and add it
1385 to the worklist. If no single edge can be determined statically,
1386 return SSA_PROP_VARYING to feed all the outgoing edges to the
1387 propagation engine. */
0bca51f0 1388 *taken_edge_p = val.value ? find_taken_edge (block, val.value) : 0;
750628d8
DN
1389 if (*taken_edge_p)
1390 return SSA_PROP_INTERESTING;
1391 else
1392 return SSA_PROP_VARYING;
6de9cd9a
DN
1393}
1394
6de9cd9a 1395
750628d8
DN
1396/* Evaluate statement STMT. If the statement produces an output value and
1397 its evaluation changes the lattice value of its output, return
1398 SSA_PROP_INTERESTING and set *OUTPUT_P to the SSA_NAME holding the
1399 output value.
1400
1401 If STMT is a conditional branch and we can determine its truth
1402 value, set *TAKEN_EDGE_P accordingly. If STMT produces a varying
1403 value, return SSA_PROP_VARYING. */
6de9cd9a 1404
750628d8
DN
1405static enum ssa_prop_result
1406ccp_visit_stmt (tree stmt, edge *taken_edge_p, tree *output_p)
1407{
750628d8
DN
1408 tree def;
1409 ssa_op_iter iter;
6de9cd9a 1410
750628d8 1411 if (dump_file && (dump_flags & TDF_DETAILS))
6de9cd9a 1412 {
0bca51f0
DN
1413 fprintf (dump_file, "\nVisiting statement:\n");
1414 print_generic_stmt (dump_file, stmt, dump_flags);
750628d8 1415 fprintf (dump_file, "\n");
6de9cd9a 1416 }
6de9cd9a 1417
07beea0d 1418 if (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT)
6de9cd9a 1419 {
750628d8
DN
1420 /* If the statement is an assignment that produces a single
1421 output value, evaluate its RHS to see if the lattice value of
1422 its output has changed. */
1423 return visit_assignment (stmt, output_p);
6de9cd9a 1424 }
750628d8 1425 else if (TREE_CODE (stmt) == COND_EXPR || TREE_CODE (stmt) == SWITCH_EXPR)
6de9cd9a 1426 {
750628d8
DN
1427 /* If STMT is a conditional branch, see if we can determine
1428 which branch will be taken. */
1429 return visit_cond_stmt (stmt, taken_edge_p);
6de9cd9a 1430 }
6de9cd9a 1431
750628d8
DN
1432 /* Any other kind of statement is not interesting for constant
1433 propagation and, therefore, not worth simulating. */
750628d8
DN
1434 if (dump_file && (dump_flags & TDF_DETAILS))
1435 fprintf (dump_file, "No interesting values produced. Marked VARYING.\n");
6de9cd9a 1436
750628d8
DN
1437 /* Definitions made by statements other than assignments to
1438 SSA_NAMEs represent unknown modifications to their outputs.
1439 Mark them VARYING. */
0bca51f0
DN
1440 FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_ALL_DEFS)
1441 {
1442 prop_value_t v = { VARYING, NULL_TREE, NULL_TREE };
1443 set_lattice_value (def, v);
1444 }
6de9cd9a 1445
750628d8
DN
1446 return SSA_PROP_VARYING;
1447}
6de9cd9a 1448
6de9cd9a 1449
0bca51f0 1450/* Main entry point for SSA Conditional Constant Propagation. */
750628d8 1451
3253eafb 1452static unsigned int
0bca51f0 1453execute_ssa_ccp (bool store_ccp)
750628d8 1454{
0bca51f0 1455 do_store_ccp = store_ccp;
750628d8
DN
1456 ccp_initialize ();
1457 ssa_propagate (ccp_visit_stmt, ccp_visit_phi_node);
3253eafb 1458 if (ccp_finalize ())
706ca88e 1459 return (TODO_cleanup_cfg | TODO_update_ssa | TODO_remove_unused_locals);
3253eafb
JH
1460 else
1461 return 0;
6de9cd9a
DN
1462}
1463
173b818d 1464
c2924966 1465static unsigned int
0bca51f0
DN
1466do_ssa_ccp (void)
1467{
3253eafb 1468 return execute_ssa_ccp (false);
0bca51f0
DN
1469}
1470
1471
173b818d 1472static bool
750628d8 1473gate_ccp (void)
173b818d 1474{
750628d8 1475 return flag_tree_ccp != 0;
173b818d
BB
1476}
1477
6de9cd9a 1478
750628d8
DN
1479struct tree_opt_pass pass_ccp =
1480{
1481 "ccp", /* name */
1482 gate_ccp, /* gate */
0bca51f0 1483 do_ssa_ccp, /* execute */
750628d8
DN
1484 NULL, /* sub */
1485 NULL, /* next */
1486 0, /* static_pass_number */
1487 TV_TREE_CCP, /* tv_id */
7faade0f 1488 PROP_cfg | PROP_ssa, /* properties_required */
750628d8 1489 0, /* properties_provided */
ae07b463 1490 0, /* properties_destroyed */
750628d8 1491 0, /* todo_flags_start */
3253eafb
JH
1492 TODO_dump_func | TODO_verify_ssa
1493 | TODO_verify_stmts | TODO_ggc_collect,/* todo_flags_finish */
9f8628ba 1494 0 /* letter */
750628d8 1495};
6de9cd9a 1496
6de9cd9a 1497
c2924966 1498static unsigned int
0bca51f0
DN
1499do_ssa_store_ccp (void)
1500{
1501 /* If STORE-CCP is not enabled, we just run regular CCP. */
3253eafb 1502 return execute_ssa_ccp (flag_tree_store_ccp != 0);
0bca51f0
DN
1503}
1504
1505static bool
1506gate_store_ccp (void)
1507{
1508 /* STORE-CCP is enabled only with -ftree-store-ccp, but when
1509 -fno-tree-store-ccp is specified, we should run regular CCP.
1510 That's why the pass is enabled with either flag. */
1511 return flag_tree_store_ccp != 0 || flag_tree_ccp != 0;
1512}
1513
1514
1515struct tree_opt_pass pass_store_ccp =
1516{
1517 "store_ccp", /* name */
1518 gate_store_ccp, /* gate */
1519 do_ssa_store_ccp, /* execute */
1520 NULL, /* sub */
1521 NULL, /* next */
1522 0, /* static_pass_number */
1523 TV_TREE_STORE_CCP, /* tv_id */
1524 PROP_cfg | PROP_ssa | PROP_alias, /* properties_required */
1525 0, /* properties_provided */
ae07b463 1526 0, /* properties_destroyed */
0bca51f0 1527 0, /* todo_flags_start */
3253eafb
JH
1528 TODO_dump_func | TODO_verify_ssa
1529 | TODO_verify_stmts | TODO_ggc_collect,/* todo_flags_finish */
0bca51f0
DN
1530 0 /* letter */
1531};
1532
750628d8
DN
1533/* Given a constant value VAL for bitfield FIELD, and a destination
1534 variable VAR, return VAL appropriately widened to fit into VAR. If
1535 FIELD is wider than HOST_WIDE_INT, NULL is returned. */
6de9cd9a 1536
750628d8
DN
1537tree
1538widen_bitfield (tree val, tree field, tree var)
6de9cd9a 1539{
750628d8
DN
1540 unsigned HOST_WIDE_INT var_size, field_size;
1541 tree wide_val;
1542 unsigned HOST_WIDE_INT mask;
1543 unsigned int i;
6de9cd9a 1544
750628d8
DN
1545 /* We can only do this if the size of the type and field and VAL are
1546 all constants representable in HOST_WIDE_INT. */
1547 if (!host_integerp (TYPE_SIZE (TREE_TYPE (var)), 1)
1548 || !host_integerp (DECL_SIZE (field), 1)
1549 || !host_integerp (val, 0))
1550 return NULL_TREE;
6de9cd9a 1551
750628d8
DN
1552 var_size = tree_low_cst (TYPE_SIZE (TREE_TYPE (var)), 1);
1553 field_size = tree_low_cst (DECL_SIZE (field), 1);
6de9cd9a 1554
750628d8
DN
1555 /* Give up if either the bitfield or the variable are too wide. */
1556 if (field_size > HOST_BITS_PER_WIDE_INT || var_size > HOST_BITS_PER_WIDE_INT)
1557 return NULL_TREE;
6de9cd9a 1558
1e128c5f 1559 gcc_assert (var_size >= field_size);
6de9cd9a 1560
750628d8
DN
1561 /* If the sign bit of the value is not set or the field's type is unsigned,
1562 just mask off the high order bits of the value. */
1563 if (DECL_UNSIGNED (field)
1564 || !(tree_low_cst (val, 0) & (((HOST_WIDE_INT)1) << (field_size - 1))))
1565 {
1566 /* Zero extension. Build a mask with the lower 'field_size' bits
1567 set and a BIT_AND_EXPR node to clear the high order bits of
1568 the value. */
1569 for (i = 0, mask = 0; i < field_size; i++)
1570 mask |= ((HOST_WIDE_INT) 1) << i;
6de9cd9a 1571
f457cf40
JM
1572 wide_val = fold_build2 (BIT_AND_EXPR, TREE_TYPE (var), val,
1573 build_int_cst (TREE_TYPE (var), mask));
6de9cd9a 1574 }
750628d8 1575 else
173b818d 1576 {
750628d8
DN
1577 /* Sign extension. Create a mask with the upper 'field_size'
1578 bits set and a BIT_IOR_EXPR to set the high order bits of the
1579 value. */
1580 for (i = 0, mask = 0; i < (var_size - field_size); i++)
1581 mask |= ((HOST_WIDE_INT) 1) << (var_size - i - 1);
1582
f457cf40
JM
1583 wide_val = fold_build2 (BIT_IOR_EXPR, TREE_TYPE (var), val,
1584 build_int_cst (TREE_TYPE (var), mask));
173b818d 1585 }
6de9cd9a 1586
f457cf40 1587 return wide_val;
6de9cd9a
DN
1588}
1589
750628d8 1590
6de9cd9a
DN
1591/* A subroutine of fold_stmt_r. Attempts to fold *(A+O) to A[X].
1592 BASE is an array type. OFFSET is a byte displacement. ORIG_TYPE
9cf737f8 1593 is the desired result type. */
6de9cd9a
DN
1594
1595static tree
1596maybe_fold_offset_to_array_ref (tree base, tree offset, tree orig_type)
1597{
891fc5e9 1598 tree min_idx, idx, idx_type, elt_offset = integer_zero_node;
44de5aeb 1599 tree array_type, elt_type, elt_size;
3097760b 1600 tree domain_type;
44de5aeb
RK
1601
1602 /* If BASE is an ARRAY_REF, we can pick up another offset (this time
1603 measured in units of the size of elements type) from that ARRAY_REF).
1604 We can't do anything if either is variable.
1605
1606 The case we handle here is *(&A[N]+O). */
1607 if (TREE_CODE (base) == ARRAY_REF)
1608 {
1609 tree low_bound = array_ref_low_bound (base);
1610
1611 elt_offset = TREE_OPERAND (base, 1);
1612 if (TREE_CODE (low_bound) != INTEGER_CST
1613 || TREE_CODE (elt_offset) != INTEGER_CST)
1614 return NULL_TREE;
1615
1616 elt_offset = int_const_binop (MINUS_EXPR, elt_offset, low_bound, 0);
1617 base = TREE_OPERAND (base, 0);
1618 }
6de9cd9a
DN
1619
1620 /* Ignore stupid user tricks of indexing non-array variables. */
1621 array_type = TREE_TYPE (base);
1622 if (TREE_CODE (array_type) != ARRAY_TYPE)
1623 return NULL_TREE;
1624 elt_type = TREE_TYPE (array_type);
f4088621 1625 if (!useless_type_conversion_p (orig_type, elt_type))
6de9cd9a 1626 return NULL_TREE;
891fc5e9
RG
1627
1628 /* Use signed size type for intermediate computation on the index. */
1629 idx_type = signed_type_for (size_type_node);
1630
44de5aeb
RK
1631 /* If OFFSET and ELT_OFFSET are zero, we don't care about the size of the
1632 element type (so we can use the alignment if it's not constant).
1633 Otherwise, compute the offset as an index by using a division. If the
1634 division isn't exact, then don't do anything. */
6de9cd9a 1635 elt_size = TYPE_SIZE_UNIT (elt_type);
fe9821b8
JH
1636 if (!elt_size)
1637 return NULL;
44de5aeb
RK
1638 if (integer_zerop (offset))
1639 {
1640 if (TREE_CODE (elt_size) != INTEGER_CST)
1641 elt_size = size_int (TYPE_ALIGN (elt_type));
6de9cd9a 1642
891fc5e9 1643 idx = build_int_cst (idx_type, 0);
44de5aeb
RK
1644 }
1645 else
1646 {
1647 unsigned HOST_WIDE_INT lquo, lrem;
1648 HOST_WIDE_INT hquo, hrem;
891fc5e9 1649 double_int soffset;
44de5aeb 1650
891fc5e9
RG
1651 /* The final array offset should be signed, so we need
1652 to sign-extend the (possibly pointer) offset here
1653 and use signed division. */
1654 soffset = double_int_sext (tree_to_double_int (offset),
1655 TYPE_PRECISION (TREE_TYPE (offset)));
44de5aeb 1656 if (TREE_CODE (elt_size) != INTEGER_CST
891fc5e9
RG
1657 || div_and_round_double (TRUNC_DIV_EXPR, 0,
1658 soffset.low, soffset.high,
44de5aeb
RK
1659 TREE_INT_CST_LOW (elt_size),
1660 TREE_INT_CST_HIGH (elt_size),
1661 &lquo, &hquo, &lrem, &hrem)
1662 || lrem || hrem)
1663 return NULL_TREE;
6de9cd9a 1664
891fc5e9 1665 idx = build_int_cst_wide (idx_type, lquo, hquo);
44de5aeb
RK
1666 }
1667
1668 /* Assume the low bound is zero. If there is a domain type, get the
1669 low bound, if any, convert the index into that type, and add the
1670 low bound. */
891fc5e9 1671 min_idx = build_int_cst (idx_type, 0);
3097760b
JH
1672 domain_type = TYPE_DOMAIN (array_type);
1673 if (domain_type)
6de9cd9a 1674 {
3097760b 1675 idx_type = domain_type;
891fc5e9
RG
1676 if (TYPE_MIN_VALUE (idx_type))
1677 min_idx = TYPE_MIN_VALUE (idx_type);
44de5aeb 1678 else
891fc5e9 1679 min_idx = fold_convert (idx_type, min_idx);
44de5aeb
RK
1680
1681 if (TREE_CODE (min_idx) != INTEGER_CST)
1682 return NULL_TREE;
1683
891fc5e9 1684 elt_offset = fold_convert (idx_type, elt_offset);
6de9cd9a
DN
1685 }
1686
44de5aeb
RK
1687 if (!integer_zerop (min_idx))
1688 idx = int_const_binop (PLUS_EXPR, idx, min_idx, 0);
1689 if (!integer_zerop (elt_offset))
1690 idx = int_const_binop (PLUS_EXPR, idx, elt_offset, 0);
1691
891fc5e9
RG
1692 /* Make sure to possibly truncate late after offsetting. */
1693 idx = fold_convert (idx_type, idx);
1694
3097760b
JH
1695 /* We don't want to construct access past array bounds. For example
1696 char *(c[4]);
1697
1698 c[3][2]; should not be simplified into (*c)[14] or tree-vrp will give false
1699 warning. */
1700 if (domain_type && TYPE_MAX_VALUE (domain_type)
1701 && TREE_CODE (TYPE_MAX_VALUE (domain_type)) == INTEGER_CST)
1702 {
1703 tree up_bound = TYPE_MAX_VALUE (domain_type);
1704
1705 if (tree_int_cst_lt (up_bound, idx)
1706 /* Accesses after the end of arrays of size 0 (gcc
1707 extension) and 1 are likely intentional ("struct
1708 hack"). */
1709 && compare_tree_int (up_bound, 1) > 0)
1710 return NULL_TREE;
1711 }
1712
04d86531 1713 return build4 (ARRAY_REF, elt_type, base, idx, NULL_TREE, NULL_TREE);
6de9cd9a
DN
1714}
1715
750628d8 1716
fe9821b8 1717/* Attempt to fold *(S+O) to S.X.
6de9cd9a
DN
1718 BASE is a record type. OFFSET is a byte displacement. ORIG_TYPE
1719 is the desired result type. */
6de9cd9a 1720
fe9821b8 1721static tree
6de9cd9a
DN
1722maybe_fold_offset_to_component_ref (tree record_type, tree base, tree offset,
1723 tree orig_type, bool base_is_ptr)
1724{
f34fa464 1725 tree f, t, field_type, tail_array_field, field_offset;
6e6e19cb
JH
1726 tree ret;
1727 tree new_base;
6de9cd9a
DN
1728
1729 if (TREE_CODE (record_type) != RECORD_TYPE
1730 && TREE_CODE (record_type) != UNION_TYPE
1731 && TREE_CODE (record_type) != QUAL_UNION_TYPE)
1732 return NULL_TREE;
1733
1734 /* Short-circuit silly cases. */
f4088621 1735 if (useless_type_conversion_p (record_type, orig_type))
6de9cd9a
DN
1736 return NULL_TREE;
1737
1738 tail_array_field = NULL_TREE;
1739 for (f = TYPE_FIELDS (record_type); f ; f = TREE_CHAIN (f))
1740 {
1741 int cmp;
1742
1743 if (TREE_CODE (f) != FIELD_DECL)
1744 continue;
1745 if (DECL_BIT_FIELD (f))
1746 continue;
f34fa464 1747
fe9821b8
JH
1748 if (!DECL_FIELD_OFFSET (f))
1749 continue;
f34fa464
ZD
1750 field_offset = byte_position (f);
1751 if (TREE_CODE (field_offset) != INTEGER_CST)
6de9cd9a
DN
1752 continue;
1753
1754 /* ??? Java creates "interesting" fields for representing base classes.
1755 They have no name, and have no context. With no context, we get into
1756 trouble with nonoverlapping_component_refs_p. Skip them. */
1757 if (!DECL_FIELD_CONTEXT (f))
1758 continue;
1759
1760 /* The previous array field isn't at the end. */
1761 tail_array_field = NULL_TREE;
1762
1763 /* Check to see if this offset overlaps with the field. */
f34fa464 1764 cmp = tree_int_cst_compare (field_offset, offset);
6de9cd9a
DN
1765 if (cmp > 0)
1766 continue;
1767
1768 field_type = TREE_TYPE (f);
6de9cd9a
DN
1769
1770 /* Here we exactly match the offset being checked. If the types match,
1771 then we can return that field. */
53dba802 1772 if (cmp == 0
f4088621 1773 && useless_type_conversion_p (orig_type, field_type))
6de9cd9a
DN
1774 {
1775 if (base_is_ptr)
1776 base = build1 (INDIRECT_REF, record_type, base);
b4257cfc 1777 t = build3 (COMPONENT_REF, field_type, base, f, NULL_TREE);
6de9cd9a
DN
1778 return t;
1779 }
53dba802
ZD
1780
1781 /* Don't care about offsets into the middle of scalars. */
1782 if (!AGGREGATE_TYPE_P (field_type))
1783 continue;
6de9cd9a 1784
53dba802
ZD
1785 /* Check for array at the end of the struct. This is often
1786 used as for flexible array members. We should be able to
1787 turn this into an array access anyway. */
1788 if (TREE_CODE (field_type) == ARRAY_TYPE)
1789 tail_array_field = f;
1790
1791 /* Check the end of the field against the offset. */
1792 if (!DECL_SIZE_UNIT (f)
1793 || TREE_CODE (DECL_SIZE_UNIT (f)) != INTEGER_CST)
1794 continue;
1795 t = int_const_binop (MINUS_EXPR, offset, field_offset, 1);
1796 if (!tree_int_cst_lt (t, DECL_SIZE_UNIT (f)))
1797 continue;
6de9cd9a 1798
53dba802
ZD
1799 /* If we matched, then set offset to the displacement into
1800 this field. */
6e6e19cb
JH
1801 if (base_is_ptr)
1802 new_base = build1 (INDIRECT_REF, record_type, base);
1803 else
1804 new_base = base;
1805 new_base = build3 (COMPONENT_REF, field_type, new_base, f, NULL_TREE);
1806
1807 /* Recurse to possibly find the match. */
1808 ret = maybe_fold_offset_to_array_ref (new_base, t, orig_type);
1809 if (ret)
1810 return ret;
1811 ret = maybe_fold_offset_to_component_ref (field_type, new_base, t,
1812 orig_type, false);
1813 if (ret)
1814 return ret;
6de9cd9a
DN
1815 }
1816
1817 if (!tail_array_field)
1818 return NULL_TREE;
1819
1820 f = tail_array_field;
1821 field_type = TREE_TYPE (f);
53dba802 1822 offset = int_const_binop (MINUS_EXPR, offset, byte_position (f), 1);
6de9cd9a 1823
6de9cd9a 1824 /* If we get here, we've got an aggregate field, and a possibly
1ea7e6ad 1825 nonzero offset into them. Recurse and hope for a valid match. */
6de9cd9a
DN
1826 if (base_is_ptr)
1827 base = build1 (INDIRECT_REF, record_type, base);
b4257cfc 1828 base = build3 (COMPONENT_REF, field_type, base, f, NULL_TREE);
6de9cd9a
DN
1829
1830 t = maybe_fold_offset_to_array_ref (base, offset, orig_type);
1831 if (t)
1832 return t;
1833 return maybe_fold_offset_to_component_ref (field_type, base, offset,
1834 orig_type, false);
1835}
1836
fe9821b8
JH
1837/* Attempt to express (ORIG_TYPE)BASE+OFFSET as BASE->field_of_orig_type
1838 or BASE[index] or by combination of those.
1839
1840 Before attempting the conversion strip off existing ADDR_EXPRs and
1841 handled component refs. */
1842
1843tree
1844maybe_fold_offset_to_reference (tree base, tree offset, tree orig_type)
1845{
1846 tree ret;
1847 tree type;
1848 bool base_is_ptr = true;
1849
1850 STRIP_NOPS (base);
1851 if (TREE_CODE (base) == ADDR_EXPR)
1852 {
1853 base_is_ptr = false;
1854
1855 base = TREE_OPERAND (base, 0);
1856
1857 /* Handle case where existing COMPONENT_REF pick e.g. wrong field of union,
1858 so it needs to be removed and new COMPONENT_REF constructed.
1859 The wrong COMPONENT_REF are often constructed by folding the
1860 (type *)&object within the expression (type *)&object+offset */
1861 if (handled_component_p (base) && 0)
1862 {
1863 HOST_WIDE_INT sub_offset, size, maxsize;
1864 tree newbase;
1865 newbase = get_ref_base_and_extent (base, &sub_offset,
1866 &size, &maxsize);
1867 gcc_assert (newbase);
1868 gcc_assert (!(sub_offset & (BITS_PER_UNIT - 1)));
1869 if (size == maxsize)
1870 {
1871 base = newbase;
1872 if (sub_offset)
1873 offset = int_const_binop (PLUS_EXPR, offset,
1874 build_int_cst (TREE_TYPE (offset),
1875 sub_offset / BITS_PER_UNIT), 1);
1876 }
1877 }
f4088621 1878 if (useless_type_conversion_p (orig_type, TREE_TYPE (base))
fe9821b8
JH
1879 && integer_zerop (offset))
1880 return base;
1881 type = TREE_TYPE (base);
1882 }
1883 else
1884 {
1885 base_is_ptr = true;
1886 if (!POINTER_TYPE_P (TREE_TYPE (base)))
1887 return NULL_TREE;
1888 type = TREE_TYPE (TREE_TYPE (base));
1889 }
1890 ret = maybe_fold_offset_to_component_ref (type, base, offset,
1891 orig_type, base_is_ptr);
1892 if (!ret)
1893 {
1894 if (base_is_ptr)
1895 base = build1 (INDIRECT_REF, type, base);
1896 ret = maybe_fold_offset_to_array_ref (base, offset, orig_type);
1897 }
1898 return ret;
1899}
750628d8 1900
6de9cd9a
DN
1901/* A subroutine of fold_stmt_r. Attempt to simplify *(BASE+OFFSET).
1902 Return the simplified expression, or NULL if nothing could be done. */
1903
1904static tree
1905maybe_fold_stmt_indirect (tree expr, tree base, tree offset)
1906{
1907 tree t;
cbf8d355 1908 bool volatile_p = TREE_THIS_VOLATILE (expr);
6de9cd9a
DN
1909
1910 /* We may well have constructed a double-nested PLUS_EXPR via multiple
1911 substitutions. Fold that down to one. Remove NON_LVALUE_EXPRs that
1912 are sometimes added. */
1913 base = fold (base);
ff7c8597 1914 STRIP_TYPE_NOPS (base);
6de9cd9a
DN
1915 TREE_OPERAND (expr, 0) = base;
1916
1917 /* One possibility is that the address reduces to a string constant. */
1918 t = fold_read_from_constant_string (expr);
1919 if (t)
1920 return t;
1921
5be014d5
AP
1922 /* Add in any offset from a POINTER_PLUS_EXPR. */
1923 if (TREE_CODE (base) == POINTER_PLUS_EXPR)
6de9cd9a
DN
1924 {
1925 tree offset2;
1926
1927 offset2 = TREE_OPERAND (base, 1);
1928 if (TREE_CODE (offset2) != INTEGER_CST)
1929 return NULL_TREE;
1930 base = TREE_OPERAND (base, 0);
1931
5be014d5
AP
1932 offset = fold_convert (sizetype,
1933 int_const_binop (PLUS_EXPR, offset, offset2, 1));
6de9cd9a
DN
1934 }
1935
1936 if (TREE_CODE (base) == ADDR_EXPR)
1937 {
fe9821b8
JH
1938 tree base_addr = base;
1939
6de9cd9a
DN
1940 /* Strip the ADDR_EXPR. */
1941 base = TREE_OPERAND (base, 0);
1942
0534fa56
RH
1943 /* Fold away CONST_DECL to its value, if the type is scalar. */
1944 if (TREE_CODE (base) == CONST_DECL
688e936d 1945 && ccp_decl_initial_min_invariant (DECL_INITIAL (base)))
0534fa56
RH
1946 return DECL_INITIAL (base);
1947
6de9cd9a 1948 /* Try folding *(&B+O) to B.X. */
fe9821b8
JH
1949 t = maybe_fold_offset_to_reference (base_addr, offset,
1950 TREE_TYPE (expr));
6de9cd9a 1951 if (t)
cbf8d355
RG
1952 {
1953 TREE_THIS_VOLATILE (t) = volatile_p;
1954 return t;
1955 }
6de9cd9a
DN
1956 }
1957 else
1958 {
1959 /* We can get here for out-of-range string constant accesses,
1960 such as "_"[3]. Bail out of the entire substitution search
1961 and arrange for the entire statement to be replaced by a
aabcd309 1962 call to __builtin_trap. In all likelihood this will all be
6de9cd9a
DN
1963 constant-folded away, but in the meantime we can't leave with
1964 something that get_expr_operands can't understand. */
1965
1966 t = base;
1967 STRIP_NOPS (t);
1968 if (TREE_CODE (t) == ADDR_EXPR
1969 && TREE_CODE (TREE_OPERAND (t, 0)) == STRING_CST)
1970 {
1971 /* FIXME: Except that this causes problems elsewhere with dead
0e61db61 1972 code not being deleted, and we die in the rtl expanders
6de9cd9a
DN
1973 because we failed to remove some ssa_name. In the meantime,
1974 just return zero. */
1975 /* FIXME2: This condition should be signaled by
1976 fold_read_from_constant_string directly, rather than
1977 re-checking for it here. */
1978 return integer_zero_node;
1979 }
1980
1981 /* Try folding *(B+O) to B->X. Still an improvement. */
1982 if (POINTER_TYPE_P (TREE_TYPE (base)))
1983 {
fe9821b8
JH
1984 t = maybe_fold_offset_to_reference (base, offset,
1985 TREE_TYPE (expr));
6de9cd9a
DN
1986 if (t)
1987 return t;
1988 }
1989 }
1990
1991 /* Otherwise we had an offset that we could not simplify. */
1992 return NULL_TREE;
1993}
1994
750628d8 1995
5be014d5 1996/* A subroutine of fold_stmt_r. EXPR is a POINTER_PLUS_EXPR.
6de9cd9a
DN
1997
1998 A quaint feature extant in our address arithmetic is that there
1999 can be hidden type changes here. The type of the result need
2000 not be the same as the type of the input pointer.
2001
2002 What we're after here is an expression of the form
2003 (T *)(&array + const)
2004 where the cast doesn't actually exist, but is implicit in the
5be014d5 2005 type of the POINTER_PLUS_EXPR. We'd like to turn this into
6de9cd9a
DN
2006 &array[x]
2007 which may be able to propagate further. */
2008
2009static tree
2010maybe_fold_stmt_addition (tree expr)
2011{
2012 tree op0 = TREE_OPERAND (expr, 0);
2013 tree op1 = TREE_OPERAND (expr, 1);
2014 tree ptr_type = TREE_TYPE (expr);
2015 tree ptd_type;
2016 tree t;
6de9cd9a 2017
5be014d5
AP
2018 gcc_assert (TREE_CODE (expr) == POINTER_PLUS_EXPR);
2019
6de9cd9a
DN
2020 /* It had better be a constant. */
2021 if (TREE_CODE (op1) != INTEGER_CST)
2022 return NULL_TREE;
2023 /* The first operand should be an ADDR_EXPR. */
2024 if (TREE_CODE (op0) != ADDR_EXPR)
2025 return NULL_TREE;
2026 op0 = TREE_OPERAND (op0, 0);
2027
2028 /* If the first operand is an ARRAY_REF, expand it so that we can fold
2029 the offset into it. */
2030 while (TREE_CODE (op0) == ARRAY_REF)
2031 {
2032 tree array_obj = TREE_OPERAND (op0, 0);
2033 tree array_idx = TREE_OPERAND (op0, 1);
2034 tree elt_type = TREE_TYPE (op0);
2035 tree elt_size = TYPE_SIZE_UNIT (elt_type);
2036 tree min_idx;
2037
2038 if (TREE_CODE (array_idx) != INTEGER_CST)
2039 break;
2040 if (TREE_CODE (elt_size) != INTEGER_CST)
2041 break;
2042
2043 /* Un-bias the index by the min index of the array type. */
2044 min_idx = TYPE_DOMAIN (TREE_TYPE (array_obj));
2045 if (min_idx)
2046 {
2047 min_idx = TYPE_MIN_VALUE (min_idx);
2048 if (min_idx)
2049 {
44de5aeb
RK
2050 if (TREE_CODE (min_idx) != INTEGER_CST)
2051 break;
2052
b6f65e3c 2053 array_idx = fold_convert (TREE_TYPE (min_idx), array_idx);
6de9cd9a
DN
2054 if (!integer_zerop (min_idx))
2055 array_idx = int_const_binop (MINUS_EXPR, array_idx,
2056 min_idx, 0);
2057 }
2058 }
2059
2060 /* Convert the index to a byte offset. */
b6f65e3c 2061 array_idx = fold_convert (sizetype, array_idx);
6de9cd9a
DN
2062 array_idx = int_const_binop (MULT_EXPR, array_idx, elt_size, 0);
2063
2064 /* Update the operands for the next round, or for folding. */
5be014d5 2065 op1 = int_const_binop (PLUS_EXPR,
6de9cd9a 2066 array_idx, op1, 0);
6de9cd9a
DN
2067 op0 = array_obj;
2068 }
2069
6de9cd9a
DN
2070 ptd_type = TREE_TYPE (ptr_type);
2071
2072 /* At which point we can try some of the same things as for indirects. */
2073 t = maybe_fold_offset_to_array_ref (op0, op1, ptd_type);
2074 if (!t)
2075 t = maybe_fold_offset_to_component_ref (TREE_TYPE (op0), op0, op1,
2076 ptd_type, false);
2077 if (t)
2078 t = build1 (ADDR_EXPR, ptr_type, t);
2079
2080 return t;
2081}
2082
622f91ba
JL
2083/* For passing state through walk_tree into fold_stmt_r and its
2084 children. */
2085
2086struct fold_stmt_r_data
2087{
6ac01510
ILT
2088 tree stmt;
2089 bool *changed_p;
2090 bool *inside_addr_expr_p;
622f91ba
JL
2091};
2092
6de9cd9a
DN
2093/* Subroutine of fold_stmt called via walk_tree. We perform several
2094 simplifications of EXPR_P, mostly having to do with pointer arithmetic. */
2095
2096static tree
2097fold_stmt_r (tree *expr_p, int *walk_subtrees, void *data)
2098{
858904db 2099 struct fold_stmt_r_data *fold_stmt_r_data = (struct fold_stmt_r_data *) data;
622f91ba
JL
2100 bool *inside_addr_expr_p = fold_stmt_r_data->inside_addr_expr_p;
2101 bool *changed_p = fold_stmt_r_data->changed_p;
6de9cd9a 2102 tree expr = *expr_p, t;
6aaa4013 2103 bool volatile_p = TREE_THIS_VOLATILE (expr);
6de9cd9a
DN
2104
2105 /* ??? It'd be nice if walk_tree had a pre-order option. */
2106 switch (TREE_CODE (expr))
2107 {
2108 case INDIRECT_REF:
2109 t = walk_tree (&TREE_OPERAND (expr, 0), fold_stmt_r, data, NULL);
2110 if (t)
2111 return t;
2112 *walk_subtrees = 0;
2113
2114 t = maybe_fold_stmt_indirect (expr, TREE_OPERAND (expr, 0),
2115 integer_zero_node);
2116 break;
2117
fe9821b8
JH
2118 case NOP_EXPR:
2119 t = walk_tree (&TREE_OPERAND (expr, 0), fold_stmt_r, data, NULL);
2120 if (t)
2121 return t;
2122 *walk_subtrees = 0;
2123
2124 if (POINTER_TYPE_P (TREE_TYPE (expr))
2125 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 0)))
2126 && (t = maybe_fold_offset_to_reference
2127 (TREE_OPERAND (expr, 0),
2128 integer_zero_node,
2129 TREE_TYPE (TREE_TYPE (expr)))))
fa2050d2
RG
2130 {
2131 tree ptr_type = build_pointer_type (TREE_TYPE (t));
2132 if (!useless_type_conversion_p (TREE_TYPE (expr), ptr_type))
2133 return NULL_TREE;
2134 t = build_fold_addr_expr_with_type (t, ptr_type);
2135 }
fe9821b8
JH
2136 break;
2137
622f91ba 2138 /* ??? Could handle more ARRAY_REFs here, as a variant of INDIRECT_REF.
6de9cd9a
DN
2139 We'd only want to bother decomposing an existing ARRAY_REF if
2140 the base array is found to have another offset contained within.
2141 Otherwise we'd be wasting time. */
622f91ba
JL
2142 case ARRAY_REF:
2143 /* If we are not processing expressions found within an
2144 ADDR_EXPR, then we can fold constant array references. */
2145 if (!*inside_addr_expr_p)
2146 t = fold_read_from_constant_string (expr);
2147 else
2148 t = NULL;
2149 break;
6de9cd9a
DN
2150
2151 case ADDR_EXPR:
622f91ba 2152 *inside_addr_expr_p = true;
6de9cd9a 2153 t = walk_tree (&TREE_OPERAND (expr, 0), fold_stmt_r, data, NULL);
622f91ba 2154 *inside_addr_expr_p = false;
6de9cd9a
DN
2155 if (t)
2156 return t;
2157 *walk_subtrees = 0;
2158
2159 /* Set TREE_INVARIANT properly so that the value is properly
2160 considered constant, and so gets propagated as expected. */
2161 if (*changed_p)
127203ac 2162 recompute_tree_invariant_for_addr_expr (expr);
6de9cd9a
DN
2163 return NULL_TREE;
2164
5be014d5 2165 case POINTER_PLUS_EXPR:
6de9cd9a
DN
2166 t = walk_tree (&TREE_OPERAND (expr, 0), fold_stmt_r, data, NULL);
2167 if (t)
2168 return t;
2169 t = walk_tree (&TREE_OPERAND (expr, 1), fold_stmt_r, data, NULL);
2170 if (t)
2171 return t;
2172 *walk_subtrees = 0;
2173
2174 t = maybe_fold_stmt_addition (expr);
2175 break;
2176
2177 case COMPONENT_REF:
2178 t = walk_tree (&TREE_OPERAND (expr, 0), fold_stmt_r, data, NULL);
2179 if (t)
2180 return t;
2181 *walk_subtrees = 0;
2182
fa27426e
RH
2183 /* Make sure the FIELD_DECL is actually a field in the type on the lhs.
2184 We've already checked that the records are compatible, so we should
2185 come up with a set of compatible fields. */
2186 {
2187 tree expr_record = TREE_TYPE (TREE_OPERAND (expr, 0));
2188 tree expr_field = TREE_OPERAND (expr, 1);
2189
2190 if (DECL_FIELD_CONTEXT (expr_field) != TYPE_MAIN_VARIANT (expr_record))
2191 {
2192 expr_field = find_compatible_field (expr_record, expr_field);
2193 TREE_OPERAND (expr, 1) = expr_field;
2194 }
2195 }
6de9cd9a
DN
2196 break;
2197
ac182688
ZD
2198 case TARGET_MEM_REF:
2199 t = maybe_fold_tmr (expr);
2200 break;
2201
f393e7f5
RG
2202 case COND_EXPR:
2203 if (COMPARISON_CLASS_P (TREE_OPERAND (expr, 0)))
2204 {
2205 tree op0 = TREE_OPERAND (expr, 0);
6ac01510
ILT
2206 tree tem;
2207 bool set;
2208
2209 fold_defer_overflow_warnings ();
2210 tem = fold_binary (TREE_CODE (op0), TREE_TYPE (op0),
2211 TREE_OPERAND (op0, 0),
2212 TREE_OPERAND (op0, 1));
2213 set = tem && set_rhs (expr_p, tem);
2214 fold_undefer_overflow_warnings (set, fold_stmt_r_data->stmt, 0);
2215 if (set)
5cdc4a26
RS
2216 {
2217 t = *expr_p;
2218 break;
2219 }
f393e7f5 2220 }
5cdc4a26 2221 return NULL_TREE;
f393e7f5 2222
6de9cd9a
DN
2223 default:
2224 return NULL_TREE;
2225 }
2226
2227 if (t)
2228 {
6aaa4013
RG
2229 /* Preserve volatileness of the original expression. */
2230 TREE_THIS_VOLATILE (t) = volatile_p;
6de9cd9a
DN
2231 *expr_p = t;
2232 *changed_p = true;
2233 }
2234
2235 return NULL_TREE;
2236}
2237
6de9cd9a 2238
10a0d495
JJ
2239/* Return the string length, maximum string length or maximum value of
2240 ARG in LENGTH.
2241 If ARG is an SSA name variable, follow its use-def chains. If LENGTH
2242 is not NULL and, for TYPE == 0, its value is not equal to the length
2243 we determine or if we are unable to determine the length or value,
2244 return false. VISITED is a bitmap of visited variables.
2245 TYPE is 0 if string length should be returned, 1 for maximum string
2246 length and 2 for maximum value ARG can have. */
6de9cd9a 2247
06a9b53f 2248static bool
10a0d495 2249get_maxval_strlen (tree arg, tree *length, bitmap visited, int type)
6de9cd9a 2250{
750628d8
DN
2251 tree var, def_stmt, val;
2252
2253 if (TREE_CODE (arg) != SSA_NAME)
06a9b53f 2254 {
f255541f
RC
2255 if (TREE_CODE (arg) == COND_EXPR)
2256 return get_maxval_strlen (COND_EXPR_THEN (arg), length, visited, type)
2257 && get_maxval_strlen (COND_EXPR_ELSE (arg), length, visited, type);
2258
10a0d495
JJ
2259 if (type == 2)
2260 {
2261 val = arg;
2262 if (TREE_CODE (val) != INTEGER_CST
2263 || tree_int_cst_sgn (val) < 0)
2264 return false;
2265 }
2266 else
2267 val = c_strlen (arg, 1);
750628d8 2268 if (!val)
06a9b53f 2269 return false;
cd709752 2270
10a0d495
JJ
2271 if (*length)
2272 {
2273 if (type > 0)
2274 {
2275 if (TREE_CODE (*length) != INTEGER_CST
2276 || TREE_CODE (val) != INTEGER_CST)
2277 return false;
2278
2279 if (tree_int_cst_lt (*length, val))
2280 *length = val;
2281 return true;
2282 }
2283 else if (simple_cst_equal (val, *length) != 1)
2284 return false;
2285 }
6de9cd9a 2286
750628d8
DN
2287 *length = val;
2288 return true;
6de9cd9a 2289 }
06a9b53f 2290
750628d8
DN
2291 /* If we were already here, break the infinite cycle. */
2292 if (bitmap_bit_p (visited, SSA_NAME_VERSION (arg)))
2293 return true;
2294 bitmap_set_bit (visited, SSA_NAME_VERSION (arg));
2295
2296 var = arg;
2297 def_stmt = SSA_NAME_DEF_STMT (var);
6de9cd9a 2298
750628d8
DN
2299 switch (TREE_CODE (def_stmt))
2300 {
07beea0d 2301 case GIMPLE_MODIFY_STMT:
750628d8 2302 {
10a0d495
JJ
2303 tree rhs;
2304
750628d8
DN
2305 /* The RHS of the statement defining VAR must either have a
2306 constant length or come from another SSA_NAME with a constant
2307 length. */
07beea0d 2308 rhs = GIMPLE_STMT_OPERAND (def_stmt, 1);
750628d8 2309 STRIP_NOPS (rhs);
10a0d495 2310 return get_maxval_strlen (rhs, length, visited, type);
750628d8 2311 }
6de9cd9a 2312
750628d8
DN
2313 case PHI_NODE:
2314 {
2315 /* All the arguments of the PHI node must have the same constant
2316 length. */
2317 int i;
6de9cd9a 2318
750628d8
DN
2319 for (i = 0; i < PHI_NUM_ARGS (def_stmt); i++)
2320 {
2321 tree arg = PHI_ARG_DEF (def_stmt, i);
6de9cd9a 2322
750628d8
DN
2323 /* If this PHI has itself as an argument, we cannot
2324 determine the string length of this argument. However,
2325 if we can find a constant string length for the other
2326 PHI args then we can still be sure that this is a
2327 constant string length. So be optimistic and just
2328 continue with the next argument. */
2329 if (arg == PHI_RESULT (def_stmt))
2330 continue;
6de9cd9a 2331
10a0d495 2332 if (!get_maxval_strlen (arg, length, visited, type))
750628d8
DN
2333 return false;
2334 }
6de9cd9a 2335
750628d8 2336 return true;
173b818d 2337 }
6de9cd9a 2338
750628d8
DN
2339 default:
2340 break;
6de9cd9a
DN
2341 }
2342
750628d8
DN
2343
2344 return false;
6de9cd9a
DN
2345}
2346
2347
2348/* Fold builtin call FN in statement STMT. If it cannot be folded into a
2349 constant, return NULL_TREE. Otherwise, return its constant value. */
2350
2351static tree
2352ccp_fold_builtin (tree stmt, tree fn)
2353{
10a0d495 2354 tree result, val[3];
5039610b 2355 tree callee, a;
10a0d495 2356 int arg_mask, i, type;
a32e70c3
RS
2357 bitmap visited;
2358 bool ignore;
5039610b
SL
2359 call_expr_arg_iterator iter;
2360 int nargs;
6de9cd9a 2361
07beea0d 2362 ignore = TREE_CODE (stmt) != GIMPLE_MODIFY_STMT;
6de9cd9a
DN
2363
2364 /* First try the generic builtin folder. If that succeeds, return the
2365 result directly. */
5039610b 2366 result = fold_call_expr (fn, ignore);
6de9cd9a 2367 if (result)
10a0d495
JJ
2368 {
2369 if (ignore)
2370 STRIP_NOPS (result);
2371 return result;
2372 }
a32e70c3
RS
2373
2374 /* Ignore MD builtins. */
5039610b 2375 callee = get_callee_fndecl (fn);
a32e70c3
RS
2376 if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_MD)
2377 return NULL_TREE;
6de9cd9a
DN
2378
2379 /* If the builtin could not be folded, and it has no argument list,
2380 we're done. */
5039610b
SL
2381 nargs = call_expr_nargs (fn);
2382 if (nargs == 0)
6de9cd9a
DN
2383 return NULL_TREE;
2384
2385 /* Limit the work only for builtins we know how to simplify. */
2386 switch (DECL_FUNCTION_CODE (callee))
2387 {
2388 case BUILT_IN_STRLEN:
2389 case BUILT_IN_FPUTS:
2390 case BUILT_IN_FPUTS_UNLOCKED:
10a0d495
JJ
2391 arg_mask = 1;
2392 type = 0;
6de9cd9a
DN
2393 break;
2394 case BUILT_IN_STRCPY:
2395 case BUILT_IN_STRNCPY:
10a0d495
JJ
2396 arg_mask = 2;
2397 type = 0;
2398 break;
2399 case BUILT_IN_MEMCPY_CHK:
2400 case BUILT_IN_MEMPCPY_CHK:
2401 case BUILT_IN_MEMMOVE_CHK:
2402 case BUILT_IN_MEMSET_CHK:
2403 case BUILT_IN_STRNCPY_CHK:
2404 arg_mask = 4;
2405 type = 2;
2406 break;
2407 case BUILT_IN_STRCPY_CHK:
2408 case BUILT_IN_STPCPY_CHK:
2409 arg_mask = 2;
2410 type = 1;
2411 break;
2412 case BUILT_IN_SNPRINTF_CHK:
2413 case BUILT_IN_VSNPRINTF_CHK:
2414 arg_mask = 2;
2415 type = 2;
6de9cd9a
DN
2416 break;
2417 default:
2418 return NULL_TREE;
2419 }
2420
2421 /* Try to use the dataflow information gathered by the CCP process. */
8bdbfff5 2422 visited = BITMAP_ALLOC (NULL);
6de9cd9a 2423
10a0d495 2424 memset (val, 0, sizeof (val));
5039610b
SL
2425 init_call_expr_arg_iterator (fn, &iter);
2426 for (i = 0; arg_mask; i++, arg_mask >>= 1)
2427 {
2428 a = next_call_expr_arg (&iter);
2429 if (arg_mask & 1)
2430 {
2431 bitmap_clear (visited);
2432 if (!get_maxval_strlen (a, &val[i], visited, type))
2433 val[i] = NULL_TREE;
2434 }
2435 }
6de9cd9a 2436
8bdbfff5 2437 BITMAP_FREE (visited);
6de9cd9a 2438
a32e70c3 2439 result = NULL_TREE;
6de9cd9a
DN
2440 switch (DECL_FUNCTION_CODE (callee))
2441 {
2442 case BUILT_IN_STRLEN:
10a0d495 2443 if (val[0])
6de9cd9a 2444 {
c22940cd 2445 tree new_val = fold_convert (TREE_TYPE (fn), val[0]);
6de9cd9a
DN
2446
2447 /* If the result is not a valid gimple value, or not a cast
2448 of a valid gimple value, then we can not use the result. */
c22940cd
TN
2449 if (is_gimple_val (new_val)
2450 || (is_gimple_cast (new_val)
2451 && is_gimple_val (TREE_OPERAND (new_val, 0))))
2452 return new_val;
6de9cd9a 2453 }
a32e70c3
RS
2454 break;
2455
6de9cd9a 2456 case BUILT_IN_STRCPY:
5039610b
SL
2457 if (val[1] && is_gimple_val (val[1]) && nargs == 2)
2458 result = fold_builtin_strcpy (callee,
2459 CALL_EXPR_ARG (fn, 0),
2460 CALL_EXPR_ARG (fn, 1),
2461 val[1]);
a32e70c3
RS
2462 break;
2463
6de9cd9a 2464 case BUILT_IN_STRNCPY:
5039610b
SL
2465 if (val[1] && is_gimple_val (val[1]) && nargs == 3)
2466 result = fold_builtin_strncpy (callee,
2467 CALL_EXPR_ARG (fn, 0),
2468 CALL_EXPR_ARG (fn, 1),
2469 CALL_EXPR_ARG (fn, 2),
2470 val[1]);
a32e70c3
RS
2471 break;
2472
6de9cd9a 2473 case BUILT_IN_FPUTS:
5039610b
SL
2474 result = fold_builtin_fputs (CALL_EXPR_ARG (fn, 0),
2475 CALL_EXPR_ARG (fn, 1),
07beea0d 2476 TREE_CODE (stmt) != GIMPLE_MODIFY_STMT, 0,
10a0d495 2477 val[0]);
a32e70c3
RS
2478 break;
2479
6de9cd9a 2480 case BUILT_IN_FPUTS_UNLOCKED:
5039610b
SL
2481 result = fold_builtin_fputs (CALL_EXPR_ARG (fn, 0),
2482 CALL_EXPR_ARG (fn, 1),
07beea0d 2483 TREE_CODE (stmt) != GIMPLE_MODIFY_STMT, 1,
10a0d495
JJ
2484 val[0]);
2485 break;
2486
2487 case BUILT_IN_MEMCPY_CHK:
2488 case BUILT_IN_MEMPCPY_CHK:
2489 case BUILT_IN_MEMMOVE_CHK:
2490 case BUILT_IN_MEMSET_CHK:
2491 if (val[2] && is_gimple_val (val[2]))
5039610b
SL
2492 result = fold_builtin_memory_chk (callee,
2493 CALL_EXPR_ARG (fn, 0),
2494 CALL_EXPR_ARG (fn, 1),
2495 CALL_EXPR_ARG (fn, 2),
2496 CALL_EXPR_ARG (fn, 3),
2497 val[2], ignore,
10a0d495
JJ
2498 DECL_FUNCTION_CODE (callee));
2499 break;
2500
2501 case BUILT_IN_STRCPY_CHK:
2502 case BUILT_IN_STPCPY_CHK:
2503 if (val[1] && is_gimple_val (val[1]))
5039610b
SL
2504 result = fold_builtin_stxcpy_chk (callee,
2505 CALL_EXPR_ARG (fn, 0),
2506 CALL_EXPR_ARG (fn, 1),
2507 CALL_EXPR_ARG (fn, 2),
2508 val[1], ignore,
10a0d495
JJ
2509 DECL_FUNCTION_CODE (callee));
2510 break;
2511
2512 case BUILT_IN_STRNCPY_CHK:
2513 if (val[2] && is_gimple_val (val[2]))
5039610b
SL
2514 result = fold_builtin_strncpy_chk (CALL_EXPR_ARG (fn, 0),
2515 CALL_EXPR_ARG (fn, 1),
2516 CALL_EXPR_ARG (fn, 2),
2517 CALL_EXPR_ARG (fn, 3),
2518 val[2]);
10a0d495
JJ
2519 break;
2520
2521 case BUILT_IN_SNPRINTF_CHK:
2522 case BUILT_IN_VSNPRINTF_CHK:
2523 if (val[1] && is_gimple_val (val[1]))
5039610b 2524 result = fold_builtin_snprintf_chk (fn, val[1],
10a0d495 2525 DECL_FUNCTION_CODE (callee));
a32e70c3 2526 break;
6de9cd9a
DN
2527
2528 default:
1e128c5f 2529 gcc_unreachable ();
6de9cd9a
DN
2530 }
2531
a32e70c3 2532 if (result && ignore)
9675412f 2533 result = fold_ignored_result (result);
a32e70c3 2534 return result;
6de9cd9a
DN
2535}
2536
2537
206048bd 2538/* Fold the statement pointed to by STMT_P. In some cases, this function may
750628d8
DN
2539 replace the whole statement with a new one. Returns true iff folding
2540 makes any changes. */
6de9cd9a 2541
750628d8
DN
2542bool
2543fold_stmt (tree *stmt_p)
6de9cd9a 2544{
750628d8 2545 tree rhs, result, stmt;
622f91ba 2546 struct fold_stmt_r_data fold_stmt_r_data;
750628d8 2547 bool changed = false;
622f91ba
JL
2548 bool inside_addr_expr = false;
2549
6ac01510
ILT
2550 stmt = *stmt_p;
2551
2552 fold_stmt_r_data.stmt = stmt;
622f91ba
JL
2553 fold_stmt_r_data.changed_p = &changed;
2554 fold_stmt_r_data.inside_addr_expr_p = &inside_addr_expr;
6de9cd9a 2555
750628d8
DN
2556 /* If we replaced constants and the statement makes pointer dereferences,
2557 then we may need to fold instances of *&VAR into VAR, etc. */
622f91ba 2558 if (walk_tree (stmt_p, fold_stmt_r, &fold_stmt_r_data, NULL))
750628d8 2559 {
5039610b 2560 *stmt_p = build_call_expr (implicit_built_in_decls[BUILT_IN_TRAP], 0);
6de9cd9a
DN
2561 return true;
2562 }
2563
750628d8
DN
2564 rhs = get_rhs (stmt);
2565 if (!rhs)
2566 return changed;
2567 result = NULL_TREE;
6de9cd9a 2568
750628d8 2569 if (TREE_CODE (rhs) == CALL_EXPR)
6de9cd9a 2570 {
750628d8 2571 tree callee;
6de9cd9a 2572
750628d8
DN
2573 /* Check for builtins that CCP can handle using information not
2574 available in the generic fold routines. */
2575 callee = get_callee_fndecl (rhs);
2576 if (callee && DECL_BUILT_IN (callee))
2577 result = ccp_fold_builtin (stmt, rhs);
1809ff6b
AP
2578 else
2579 {
2580 /* Check for resolvable OBJ_TYPE_REF. The only sorts we can resolve
2581 here are when we've propagated the address of a decl into the
2582 object slot. */
2583 /* ??? Should perhaps do this in fold proper. However, doing it
2584 there requires that we create a new CALL_EXPR, and that requires
2585 copying EH region info to the new node. Easier to just do it
2586 here where we can just smash the call operand. Also
2587 CALL_EXPR_RETURN_SLOT_OPT needs to be handled correctly and
5039610b
SL
2588 copied, fold_call_expr does not have not information. */
2589 callee = CALL_EXPR_FN (rhs);
1809ff6b
AP
2590 if (TREE_CODE (callee) == OBJ_TYPE_REF
2591 && lang_hooks.fold_obj_type_ref
2592 && TREE_CODE (OBJ_TYPE_REF_OBJECT (callee)) == ADDR_EXPR
2593 && DECL_P (TREE_OPERAND
2594 (OBJ_TYPE_REF_OBJECT (callee), 0)))
2595 {
2596 tree t;
2597
2598 /* ??? Caution: Broken ADDR_EXPR semantics means that
2599 looking at the type of the operand of the addr_expr
2600 can yield an array type. See silly exception in
2601 check_pointer_types_r. */
2602
2603 t = TREE_TYPE (TREE_TYPE (OBJ_TYPE_REF_OBJECT (callee)));
2604 t = lang_hooks.fold_obj_type_ref (callee, t);
2605 if (t)
2606 {
5039610b 2607 CALL_EXPR_FN (rhs) = t;
1809ff6b
AP
2608 changed = true;
2609 }
2610 }
2611 }
6de9cd9a 2612 }
f255541f
RC
2613 else if (TREE_CODE (rhs) == COND_EXPR)
2614 {
2615 tree temp = fold (COND_EXPR_COND (rhs));
2616 if (temp != COND_EXPR_COND (rhs))
2617 result = fold_build3 (COND_EXPR, TREE_TYPE (rhs), temp,
2618 COND_EXPR_THEN (rhs), COND_EXPR_ELSE (rhs));
2619 }
6de9cd9a 2620
750628d8
DN
2621 /* If we couldn't fold the RHS, hand over to the generic fold routines. */
2622 if (result == NULL_TREE)
2623 result = fold (rhs);
6de9cd9a 2624
750628d8
DN
2625 /* Strip away useless type conversions. Both the NON_LVALUE_EXPR that
2626 may have been added by fold, and "useless" type conversions that might
2627 now be apparent due to propagation. */
2628 STRIP_USELESS_TYPE_CONVERSION (result);
2629
2630 if (result != rhs)
2631 changed |= set_rhs (stmt_p, result);
2632
2633 return changed;
6de9cd9a
DN
2634}
2635
38965eb2
ZD
2636/* Perform the minimal folding on statement STMT. Only operations like
2637 *&x created by constant propagation are handled. The statement cannot
2638 be replaced with a new one. */
2639
2640bool
2641fold_stmt_inplace (tree stmt)
2642{
2643 tree old_stmt = stmt, rhs, new_rhs;
622f91ba 2644 struct fold_stmt_r_data fold_stmt_r_data;
38965eb2 2645 bool changed = false;
622f91ba
JL
2646 bool inside_addr_expr = false;
2647
6ac01510 2648 fold_stmt_r_data.stmt = stmt;
622f91ba
JL
2649 fold_stmt_r_data.changed_p = &changed;
2650 fold_stmt_r_data.inside_addr_expr_p = &inside_addr_expr;
38965eb2 2651
622f91ba 2652 walk_tree (&stmt, fold_stmt_r, &fold_stmt_r_data, NULL);
38965eb2
ZD
2653 gcc_assert (stmt == old_stmt);
2654
2655 rhs = get_rhs (stmt);
2656 if (!rhs || rhs == stmt)
2657 return changed;
2658
2659 new_rhs = fold (rhs);
43e364dd 2660 STRIP_USELESS_TYPE_CONVERSION (new_rhs);
38965eb2
ZD
2661 if (new_rhs == rhs)
2662 return changed;
2663
2664 changed |= set_rhs (&stmt, new_rhs);
2665 gcc_assert (stmt == old_stmt);
2666
2667 return changed;
2668}
6de9cd9a 2669\f
cb8e078d
JJ
2670/* Try to optimize out __builtin_stack_restore. Optimize it out
2671 if there is another __builtin_stack_restore in the same basic
2672 block and no calls or ASM_EXPRs are in between, or if this block's
2673 only outgoing edge is to EXIT_BLOCK and there are no calls or
2674 ASM_EXPRs after this __builtin_stack_restore. */
2675
2676static tree
2677optimize_stack_restore (basic_block bb, tree call, block_stmt_iterator i)
2678{
2679 tree stack_save, stmt, callee;
2680
2681 if (TREE_CODE (call) != CALL_EXPR
2682 || call_expr_nargs (call) != 1
2683 || TREE_CODE (CALL_EXPR_ARG (call, 0)) != SSA_NAME
2684 || !POINTER_TYPE_P (TREE_TYPE (CALL_EXPR_ARG (call, 0))))
2685 return NULL_TREE;
2686
2687 for (bsi_next (&i); !bsi_end_p (i); bsi_next (&i))
2688 {
2689 tree call;
2690
2691 stmt = bsi_stmt (i);
2692 if (TREE_CODE (stmt) == ASM_EXPR)
2693 return NULL_TREE;
2694 call = get_call_expr_in (stmt);
2695 if (call == NULL)
2696 continue;
2697
2698 callee = get_callee_fndecl (call);
2699 if (!callee || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL)
2700 return NULL_TREE;
2701
2702 if (DECL_FUNCTION_CODE (callee) == BUILT_IN_STACK_RESTORE)
2703 break;
2704 }
2705
2706 if (bsi_end_p (i)
2707 && (! single_succ_p (bb)
2708 || single_succ_edge (bb)->dest != EXIT_BLOCK_PTR))
2709 return NULL_TREE;
2710
2711 stack_save = SSA_NAME_DEF_STMT (CALL_EXPR_ARG (call, 0));
2712 if (TREE_CODE (stack_save) != GIMPLE_MODIFY_STMT
2713 || GIMPLE_STMT_OPERAND (stack_save, 0) != CALL_EXPR_ARG (call, 0)
2714 || TREE_CODE (GIMPLE_STMT_OPERAND (stack_save, 1)) != CALL_EXPR
2715 || tree_could_throw_p (stack_save)
2716 || !has_single_use (CALL_EXPR_ARG (call, 0)))
2717 return NULL_TREE;
2718
2719 callee = get_callee_fndecl (GIMPLE_STMT_OPERAND (stack_save, 1));
2720 if (!callee
2721 || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL
2722 || DECL_FUNCTION_CODE (callee) != BUILT_IN_STACK_SAVE
2723 || call_expr_nargs (GIMPLE_STMT_OPERAND (stack_save, 1)) != 0)
2724 return NULL_TREE;
2725
2726 stmt = stack_save;
2727 push_stmt_changes (&stmt);
2728 if (!set_rhs (&stmt,
2729 build_int_cst (TREE_TYPE (CALL_EXPR_ARG (call, 0)), 0)))
2730 {
2731 discard_stmt_changes (&stmt);
2732 return NULL_TREE;
2733 }
2734 gcc_assert (stmt == stack_save);
2735 pop_stmt_changes (&stmt);
2736
2737 return integer_zero_node;
2738}
2739\f
d7bd8aeb
JJ
2740/* If va_list type is a simple pointer and nothing special is needed,
2741 optimize __builtin_va_start (&ap, 0) into ap = __builtin_next_arg (0),
2742 __builtin_va_end (&ap) out as NOP and __builtin_va_copy into a simple
2743 pointer assignment. */
2744
2745static tree
2746optimize_stdarg_builtin (tree call)
2747{
2748 tree callee, lhs, rhs;
2749 bool va_list_simple_ptr;
2750
2751 if (TREE_CODE (call) != CALL_EXPR)
2752 return NULL_TREE;
2753
2754 va_list_simple_ptr = POINTER_TYPE_P (va_list_type_node)
2755 && (TREE_TYPE (va_list_type_node) == void_type_node
2756 || TREE_TYPE (va_list_type_node) == char_type_node);
2757
2758 callee = get_callee_fndecl (call);
2759 switch (DECL_FUNCTION_CODE (callee))
2760 {
2761 case BUILT_IN_VA_START:
2762 if (!va_list_simple_ptr
2763 || targetm.expand_builtin_va_start != NULL
2764 || built_in_decls[BUILT_IN_NEXT_ARG] == NULL)
2765 return NULL_TREE;
2766
2767 if (call_expr_nargs (call) != 2)
2768 return NULL_TREE;
2769
2770 lhs = CALL_EXPR_ARG (call, 0);
2771 if (!POINTER_TYPE_P (TREE_TYPE (lhs))
2772 || TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (lhs)))
2773 != TYPE_MAIN_VARIANT (va_list_type_node))
2774 return NULL_TREE;
2775
2776 lhs = build_fold_indirect_ref (lhs);
2777 rhs = build_call_expr (built_in_decls[BUILT_IN_NEXT_ARG],
2778 1, integer_zero_node);
2779 rhs = fold_convert (TREE_TYPE (lhs), rhs);
2780 return build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, rhs);
2781
2782 case BUILT_IN_VA_COPY:
2783 if (!va_list_simple_ptr)
2784 return NULL_TREE;
2785
2786 if (call_expr_nargs (call) != 2)
2787 return NULL_TREE;
2788
2789 lhs = CALL_EXPR_ARG (call, 0);
2790 if (!POINTER_TYPE_P (TREE_TYPE (lhs))
2791 || TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (lhs)))
2792 != TYPE_MAIN_VARIANT (va_list_type_node))
2793 return NULL_TREE;
2794
2795 lhs = build_fold_indirect_ref (lhs);
2796 rhs = CALL_EXPR_ARG (call, 1);
2797 if (TYPE_MAIN_VARIANT (TREE_TYPE (rhs))
2798 != TYPE_MAIN_VARIANT (va_list_type_node))
2799 return NULL_TREE;
2800
2801 rhs = fold_convert (TREE_TYPE (lhs), rhs);
2802 return build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, rhs);
2803
2804 case BUILT_IN_VA_END:
2805 return integer_zero_node;
2806
2807 default:
2808 gcc_unreachable ();
2809 }
2810}
2811\f
b28b1600
JJ
2812/* Convert EXPR into a GIMPLE value suitable for substitution on the
2813 RHS of an assignment. Insert the necessary statements before
2e929cf3
JH
2814 iterator *SI_P.
2815 When IGNORE is set, don't worry about the return value. */
b28b1600
JJ
2816
2817static tree
2e929cf3 2818convert_to_gimple_builtin (block_stmt_iterator *si_p, tree expr, bool ignore)
b28b1600
JJ
2819{
2820 tree_stmt_iterator ti;
2821 tree stmt = bsi_stmt (*si_p);
2822 tree tmp, stmts = NULL;
2823
2824 push_gimplify_context ();
2e929cf3
JH
2825 if (ignore)
2826 {
2827 tmp = build_empty_stmt ();
2828 gimplify_and_add (expr, &stmts);
2829 }
2830 else
2831 tmp = get_initialized_tmp_var (expr, &stmts, NULL);
b28b1600
JJ
2832 pop_gimplify_context (NULL);
2833
f47c96aa
AM
2834 if (EXPR_HAS_LOCATION (stmt))
2835 annotate_all_with_locus (&stmts, EXPR_LOCATION (stmt));
2836
b28b1600
JJ
2837 /* The replacement can expose previously unreferenced variables. */
2838 for (ti = tsi_start (stmts); !tsi_end_p (ti); tsi_next (&ti))
2839 {
f47c96aa 2840 tree new_stmt = tsi_stmt (ti);
b28b1600 2841 find_new_referenced_vars (tsi_stmt_ptr (ti));
f47c96aa 2842 bsi_insert_before (si_p, new_stmt, BSI_NEW_STMT);
cfaab3a9 2843 mark_symbols_for_renaming (new_stmt);
f47c96aa 2844 bsi_next (si_p);
b28b1600
JJ
2845 }
2846
b28b1600
JJ
2847 return tmp;
2848}
2849
2850
6de9cd9a
DN
2851/* A simple pass that attempts to fold all builtin functions. This pass
2852 is run after we've propagated as many constants as we can. */
2853
c2924966 2854static unsigned int
6de9cd9a
DN
2855execute_fold_all_builtins (void)
2856{
a7d6ba24 2857 bool cfg_changed = false;
6de9cd9a 2858 basic_block bb;
7b0e48fb
DB
2859 unsigned int todoflags = 0;
2860
6de9cd9a
DN
2861 FOR_EACH_BB (bb)
2862 {
2863 block_stmt_iterator i;
10a0d495 2864 for (i = bsi_start (bb); !bsi_end_p (i); )
6de9cd9a
DN
2865 {
2866 tree *stmtp = bsi_stmt_ptr (i);
af47810a 2867 tree old_stmt = *stmtp;
6de9cd9a
DN
2868 tree call = get_rhs (*stmtp);
2869 tree callee, result;
10a0d495 2870 enum built_in_function fcode;
6de9cd9a
DN
2871
2872 if (!call || TREE_CODE (call) != CALL_EXPR)
10a0d495
JJ
2873 {
2874 bsi_next (&i);
2875 continue;
2876 }
6de9cd9a
DN
2877 callee = get_callee_fndecl (call);
2878 if (!callee || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL)
10a0d495
JJ
2879 {
2880 bsi_next (&i);
2881 continue;
2882 }
2883 fcode = DECL_FUNCTION_CODE (callee);
6de9cd9a
DN
2884
2885 result = ccp_fold_builtin (*stmtp, call);
2886 if (!result)
2887 switch (DECL_FUNCTION_CODE (callee))
2888 {
2889 case BUILT_IN_CONSTANT_P:
2890 /* Resolve __builtin_constant_p. If it hasn't been
2891 folded to integer_one_node by now, it's fairly
2892 certain that the value simply isn't constant. */
2893 result = integer_zero_node;
2894 break;
2895
cb8e078d
JJ
2896 case BUILT_IN_STACK_RESTORE:
2897 result = optimize_stack_restore (bb, *stmtp, i);
d7bd8aeb
JJ
2898 if (result)
2899 break;
2900 bsi_next (&i);
2901 continue;
2902
2903 case BUILT_IN_VA_START:
2904 case BUILT_IN_VA_END:
2905 case BUILT_IN_VA_COPY:
2906 /* These shouldn't be folded before pass_stdarg. */
2907 result = optimize_stdarg_builtin (*stmtp);
cb8e078d
JJ
2908 if (result)
2909 break;
2910 /* FALLTHRU */
2911
6de9cd9a 2912 default:
10a0d495 2913 bsi_next (&i);
6de9cd9a
DN
2914 continue;
2915 }
2916
2917 if (dump_file && (dump_flags & TDF_DETAILS))
2918 {
2919 fprintf (dump_file, "Simplified\n ");
2920 print_generic_stmt (dump_file, *stmtp, dump_flags);
2921 }
2922
cfaab3a9
DN
2923 push_stmt_changes (stmtp);
2924
b28b1600
JJ
2925 if (!set_rhs (stmtp, result))
2926 {
2e929cf3
JH
2927 result = convert_to_gimple_builtin (&i, result,
2928 TREE_CODE (old_stmt)
07beea0d 2929 != GIMPLE_MODIFY_STMT);
e16acfcd
NS
2930 if (result)
2931 {
2932 bool ok = set_rhs (stmtp, result);
e16acfcd 2933 gcc_assert (ok);
7b0e48fb 2934 todoflags |= TODO_rebuild_alias;
e16acfcd 2935 }
b28b1600 2936 }
cfaab3a9
DN
2937
2938 pop_stmt_changes (stmtp);
2939
af47810a 2940 if (maybe_clean_or_replace_eh_stmt (old_stmt, *stmtp)
a7d6ba24
JJ
2941 && tree_purge_dead_eh_edges (bb))
2942 cfg_changed = true;
6de9cd9a
DN
2943
2944 if (dump_file && (dump_flags & TDF_DETAILS))
2945 {
2946 fprintf (dump_file, "to\n ");
2947 print_generic_stmt (dump_file, *stmtp, dump_flags);
2948 fprintf (dump_file, "\n");
2949 }
10a0d495
JJ
2950
2951 /* Retry the same statement if it changed into another
2952 builtin, there might be new opportunities now. */
2953 call = get_rhs (*stmtp);
2954 if (!call || TREE_CODE (call) != CALL_EXPR)
2955 {
2956 bsi_next (&i);
2957 continue;
2958 }
2959 callee = get_callee_fndecl (call);
2960 if (!callee
2961 || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL
2962 || DECL_FUNCTION_CODE (callee) == fcode)
2963 bsi_next (&i);
6de9cd9a
DN
2964 }
2965 }
7b0e48fb 2966
a7d6ba24 2967 /* Delete unreachable blocks. */
7b0e48fb
DB
2968 if (cfg_changed)
2969 todoflags |= TODO_cleanup_cfg;
2970
2971 return todoflags;
6de9cd9a
DN
2972}
2973
750628d8 2974
6de9cd9a
DN
2975struct tree_opt_pass pass_fold_builtins =
2976{
2977 "fab", /* name */
2978 NULL, /* gate */
2979 execute_fold_all_builtins, /* execute */
2980 NULL, /* sub */
2981 NULL, /* next */
2982 0, /* static_pass_number */
2983 0, /* tv_id */
7faade0f 2984 PROP_cfg | PROP_ssa, /* properties_required */
6de9cd9a
DN
2985 0, /* properties_provided */
2986 0, /* properties_destroyed */
2987 0, /* todo_flags_start */
b28b1600
JJ
2988 TODO_dump_func
2989 | TODO_verify_ssa
0bca51f0 2990 | TODO_update_ssa, /* todo_flags_finish */
9f8628ba 2991 0 /* letter */
6de9cd9a 2992};