]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/tree-ssa-ccp.c
2009-04-06 Paolo Carlini <paolo.carlini@oracle.com>
[thirdparty/gcc.git] / gcc / tree-ssa-ccp.c
CommitLineData
4ee9c684 1/* Conditional constant propagation pass for the GNU compiler.
cfaf579d 2 Copyright (C) 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
000657b5 3 Free Software Foundation, Inc.
4ee9c684 4 Adapted from original RTL SSA-CCP by Daniel Berlin <dberlin@dberlin.org>
5 Adapted to GIMPLE trees by Diego Novillo <dnovillo@redhat.com>
6
7This file is part of GCC.
8
9GCC is free software; you can redistribute it and/or modify it
10under the terms of the GNU General Public License as published by the
8c4c00c1 11Free Software Foundation; either version 3, or (at your option) any
4ee9c684 12later version.
13
14GCC is distributed in the hope that it will be useful, but WITHOUT
15ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
16FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17for more details.
18
19You should have received a copy of the GNU General Public License
8c4c00c1 20along with GCC; see the file COPYING3. If not see
21<http://www.gnu.org/licenses/>. */
4ee9c684 22
88dbf20f 23/* Conditional constant propagation (CCP) is based on the SSA
24 propagation engine (tree-ssa-propagate.c). Constant assignments of
25 the form VAR = CST are propagated from the assignments into uses of
26 VAR, which in turn may generate new constants. The simulation uses
27 a four level lattice to keep track of constant values associated
28 with SSA names. Given an SSA name V_i, it may take one of the
29 following values:
30
bfa30570 31 UNINITIALIZED -> the initial state of the value. This value
32 is replaced with a correct initial value
33 the first time the value is used, so the
34 rest of the pass does not need to care about
35 it. Using this value simplifies initialization
36 of the pass, and prevents us from needlessly
37 scanning statements that are never reached.
88dbf20f 38
39 UNDEFINED -> V_i is a local variable whose definition
40 has not been processed yet. Therefore we
41 don't yet know if its value is a constant
42 or not.
43
44 CONSTANT -> V_i has been found to hold a constant
45 value C.
46
47 VARYING -> V_i cannot take a constant value, or if it
48 does, it is not possible to determine it
49 at compile time.
50
51 The core of SSA-CCP is in ccp_visit_stmt and ccp_visit_phi_node:
52
53 1- In ccp_visit_stmt, we are interested in assignments whose RHS
54 evaluates into a constant and conditional jumps whose predicate
55 evaluates into a boolean true or false. When an assignment of
56 the form V_i = CONST is found, V_i's lattice value is set to
57 CONSTANT and CONST is associated with it. This causes the
58 propagation engine to add all the SSA edges coming out the
59 assignment into the worklists, so that statements that use V_i
60 can be visited.
61
62 If the statement is a conditional with a constant predicate, we
63 mark the outgoing edges as executable or not executable
64 depending on the predicate's value. This is then used when
65 visiting PHI nodes to know when a PHI argument can be ignored.
66
67
68 2- In ccp_visit_phi_node, if all the PHI arguments evaluate to the
69 same constant C, then the LHS of the PHI is set to C. This
70 evaluation is known as the "meet operation". Since one of the
71 goals of this evaluation is to optimistically return constant
72 values as often as possible, it uses two main short cuts:
73
74 - If an argument is flowing in through a non-executable edge, it
75 is ignored. This is useful in cases like this:
76
77 if (PRED)
78 a_9 = 3;
79 else
80 a_10 = 100;
81 a_11 = PHI (a_9, a_10)
82
83 If PRED is known to always evaluate to false, then we can
84 assume that a_11 will always take its value from a_10, meaning
85 that instead of consider it VARYING (a_9 and a_10 have
86 different values), we can consider it CONSTANT 100.
87
88 - If an argument has an UNDEFINED value, then it does not affect
89 the outcome of the meet operation. If a variable V_i has an
90 UNDEFINED value, it means that either its defining statement
91 hasn't been visited yet or V_i has no defining statement, in
92 which case the original symbol 'V' is being used
93 uninitialized. Since 'V' is a local variable, the compiler
94 may assume any initial value for it.
95
96
97 After propagation, every variable V_i that ends up with a lattice
98 value of CONSTANT will have the associated constant value in the
99 array CONST_VAL[i].VALUE. That is fed into substitute_and_fold for
100 final substitution and folding.
101
102
103 Constant propagation in stores and loads (STORE-CCP)
104 ----------------------------------------------------
105
106 While CCP has all the logic to propagate constants in GIMPLE
107 registers, it is missing the ability to associate constants with
108 stores and loads (i.e., pointer dereferences, structures and
109 global/aliased variables). We don't keep loads and stores in
110 SSA, but we do build a factored use-def web for them (in the
111 virtual operands).
112
113 For instance, consider the following code fragment:
114
115 struct A a;
116 const int B = 42;
117
118 void foo (int i)
119 {
120 if (i > 10)
121 a.a = 42;
122 else
123 {
124 a.b = 21;
125 a.a = a.b + 21;
126 }
127
128 if (a.a != B)
129 never_executed ();
130 }
131
132 We should be able to deduce that the predicate 'a.a != B' is always
133 false. To achieve this, we associate constant values to the SSA
4fb5e5ca 134 names in the VDEF operands for each store. Additionally,
135 since we also glob partial loads/stores with the base symbol, we
136 also keep track of the memory reference where the constant value
137 was stored (in the MEM_REF field of PROP_VALUE_T). For instance,
88dbf20f 138
4fb5e5ca 139 # a_5 = VDEF <a_4>
88dbf20f 140 a.a = 2;
141
142 # VUSE <a_5>
143 x_3 = a.b;
144
145 In the example above, CCP will associate value '2' with 'a_5', but
146 it would be wrong to replace the load from 'a.b' with '2', because
147 '2' had been stored into a.a.
148
bfa30570 149 Note that the initial value of virtual operands is VARYING, not
150 UNDEFINED. Consider, for instance global variables:
88dbf20f 151
152 int A;
153
154 foo (int i)
155 {
156 if (i_3 > 10)
157 A_4 = 3;
158 # A_5 = PHI (A_4, A_2);
159
160 # VUSE <A_5>
161 A.0_6 = A;
162
163 return A.0_6;
164 }
165
166 The value of A_2 cannot be assumed to be UNDEFINED, as it may have
167 been defined outside of foo. If we were to assume it UNDEFINED, we
bfa30570 168 would erroneously optimize the above into 'return 3;'.
88dbf20f 169
170 Though STORE-CCP is not too expensive, it does have to do more work
171 than regular CCP, so it is only enabled at -O2. Both regular CCP
172 and STORE-CCP use the exact same algorithm. The only distinction
173 is that when doing STORE-CCP, the boolean variable DO_STORE_CCP is
174 set to true. This affects the evaluation of statements and PHI
175 nodes.
4ee9c684 176
177 References:
178
179 Constant propagation with conditional branches,
180 Wegman and Zadeck, ACM TOPLAS 13(2):181-210.
181
182 Building an Optimizing Compiler,
183 Robert Morgan, Butterworth-Heinemann, 1998, Section 8.9.
184
185 Advanced Compiler Design and Implementation,
186 Steven Muchnick, Morgan Kaufmann, 1997, Section 12.6 */
187
188#include "config.h"
189#include "system.h"
190#include "coretypes.h"
191#include "tm.h"
4ee9c684 192#include "tree.h"
41511585 193#include "flags.h"
4ee9c684 194#include "rtl.h"
195#include "tm_p.h"
41511585 196#include "ggc.h"
4ee9c684 197#include "basic-block.h"
41511585 198#include "output.h"
41511585 199#include "expr.h"
200#include "function.h"
4ee9c684 201#include "diagnostic.h"
41511585 202#include "timevar.h"
4ee9c684 203#include "tree-dump.h"
41511585 204#include "tree-flow.h"
4ee9c684 205#include "tree-pass.h"
41511585 206#include "tree-ssa-propagate.h"
5a4b7e1e 207#include "value-prof.h"
41511585 208#include "langhooks.h"
8782adcf 209#include "target.h"
add6ee5e 210#include "toplev.h"
43fb76c1 211#include "dbgcnt.h"
4ee9c684 212
213
214/* Possible lattice values. */
215typedef enum
216{
bfa30570 217 UNINITIALIZED,
4ee9c684 218 UNDEFINED,
219 CONSTANT,
220 VARYING
88dbf20f 221} ccp_lattice_t;
4ee9c684 222
88dbf20f 223/* Array of propagated constant values. After propagation,
224 CONST_VAL[I].VALUE holds the constant value for SSA_NAME(I). If
225 the constant is held in an SSA name representing a memory store
4fb5e5ca 226 (i.e., a VDEF), CONST_VAL[I].MEM_REF will contain the actual
227 memory reference used to store (i.e., the LHS of the assignment
228 doing the store). */
20140406 229static prop_value_t *const_val;
4ee9c684 230
88dbf20f 231/* Dump constant propagation value VAL to file OUTF prefixed by PREFIX. */
01406fc0 232
233static void
88dbf20f 234dump_lattice_value (FILE *outf, const char *prefix, prop_value_t val)
01406fc0 235{
41511585 236 switch (val.lattice_val)
01406fc0 237 {
88dbf20f 238 case UNINITIALIZED:
239 fprintf (outf, "%sUNINITIALIZED", prefix);
240 break;
41511585 241 case UNDEFINED:
242 fprintf (outf, "%sUNDEFINED", prefix);
243 break;
244 case VARYING:
245 fprintf (outf, "%sVARYING", prefix);
246 break;
41511585 247 case CONSTANT:
248 fprintf (outf, "%sCONSTANT ", prefix);
88dbf20f 249 print_generic_expr (outf, val.value, dump_flags);
41511585 250 break;
251 default:
8c0963c4 252 gcc_unreachable ();
41511585 253 }
01406fc0 254}
4ee9c684 255
4ee9c684 256
88dbf20f 257/* Print lattice value VAL to stderr. */
258
259void debug_lattice_value (prop_value_t val);
260
261void
262debug_lattice_value (prop_value_t val)
263{
264 dump_lattice_value (stderr, "", val);
265 fprintf (stderr, "\n");
266}
4ee9c684 267
4ee9c684 268
75a70cf9 269
aecfc21d 270/* If SYM is a constant variable with known value, return the value.
271 NULL_TREE is returned otherwise. */
272
e004838d 273tree
aecfc21d 274get_symbol_constant_value (tree sym)
275{
276 if (TREE_STATIC (sym)
dd277d48 277 && TREE_READONLY (sym))
aecfc21d 278 {
279 tree val = DECL_INITIAL (sym);
590d65aa 280 if (val)
281 {
282 STRIP_USELESS_TYPE_CONVERSION (val);
283 if (is_gimple_min_invariant (val))
284 return val;
285 }
6e6e51e5 286 /* Variables declared 'const' without an initializer
f0b5f617 287 have zero as the initializer if they may not be
e004838d 288 overridden at link or run time. */
6e6e51e5 289 if (!val
807bf718 290 && !DECL_EXTERNAL (sym)
e004838d 291 && targetm.binds_local_p (sym)
6e6e51e5 292 && (INTEGRAL_TYPE_P (TREE_TYPE (sym))
293 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (sym))))
807bf718 294 return fold_convert (TREE_TYPE (sym), integer_zero_node);
aecfc21d 295 }
296
297 return NULL_TREE;
298}
d03bd588 299
88dbf20f 300/* Compute a default value for variable VAR and store it in the
301 CONST_VAL array. The following rules are used to get default
302 values:
01406fc0 303
88dbf20f 304 1- Global and static variables that are declared constant are
305 considered CONSTANT.
306
307 2- Any other value is considered UNDEFINED. This is useful when
41511585 308 considering PHI nodes. PHI arguments that are undefined do not
309 change the constant value of the PHI node, which allows for more
88dbf20f 310 constants to be propagated.
4ee9c684 311
8883e700 312 3- Variables defined by statements other than assignments and PHI
88dbf20f 313 nodes are considered VARYING.
4ee9c684 314
8883e700 315 4- Initial values of variables that are not GIMPLE registers are
bfa30570 316 considered VARYING. */
4ee9c684 317
88dbf20f 318static prop_value_t
319get_default_value (tree var)
320{
321 tree sym = SSA_NAME_VAR (var);
61207d43 322 prop_value_t val = { UNINITIALIZED, NULL_TREE };
8edeb88b 323 gimple stmt;
324
325 stmt = SSA_NAME_DEF_STMT (var);
326
327 if (gimple_nop_p (stmt))
4ee9c684 328 {
8edeb88b 329 /* Variables defined by an empty statement are those used
330 before being initialized. If VAR is a local variable, we
331 can assume initially that it is UNDEFINED, otherwise we must
332 consider it VARYING. */
333 if (is_gimple_reg (sym) && TREE_CODE (sym) != PARM_DECL)
334 val.lattice_val = UNDEFINED;
335 else
336 val.lattice_val = VARYING;
4ee9c684 337 }
8edeb88b 338 else if (is_gimple_assign (stmt)
339 /* Value-returning GIMPLE_CALL statements assign to
340 a variable, and are treated similarly to GIMPLE_ASSIGN. */
341 || (is_gimple_call (stmt)
342 && gimple_call_lhs (stmt) != NULL_TREE)
343 || gimple_code (stmt) == GIMPLE_PHI)
41511585 344 {
8edeb88b 345 tree cst;
346 if (gimple_assign_single_p (stmt)
347 && DECL_P (gimple_assign_rhs1 (stmt))
348 && (cst = get_symbol_constant_value (gimple_assign_rhs1 (stmt))))
88dbf20f 349 {
8edeb88b 350 val.lattice_val = CONSTANT;
351 val.value = cst;
88dbf20f 352 }
353 else
8edeb88b 354 /* Any other variable defined by an assignment or a PHI node
355 is considered UNDEFINED. */
356 val.lattice_val = UNDEFINED;
357 }
358 else
359 {
360 /* Otherwise, VAR will never take on a constant value. */
361 val.lattice_val = VARYING;
41511585 362 }
4ee9c684 363
41511585 364 return val;
365}
4ee9c684 366
4ee9c684 367
bfa30570 368/* Get the constant value associated with variable VAR. */
4ee9c684 369
bfa30570 370static inline prop_value_t *
371get_value (tree var)
88dbf20f 372{
e004838d 373 prop_value_t *val;
bfa30570 374
e004838d 375 if (const_val == NULL)
376 return NULL;
377
378 val = &const_val[SSA_NAME_VERSION (var)];
bfa30570 379 if (val->lattice_val == UNINITIALIZED)
4ee9c684 380 *val = get_default_value (var);
381
382 return val;
383}
384
bfa30570 385/* Sets the value associated with VAR to VARYING. */
386
387static inline void
388set_value_varying (tree var)
389{
390 prop_value_t *val = &const_val[SSA_NAME_VERSION (var)];
391
392 val->lattice_val = VARYING;
393 val->value = NULL_TREE;
bfa30570 394}
4ee9c684 395
b31eb493 396/* For float types, modify the value of VAL to make ccp work correctly
397 for non-standard values (-0, NaN):
398
399 If HONOR_SIGNED_ZEROS is false, and VAL = -0, we canonicalize it to 0.
400 If HONOR_NANS is false, and VAL is NaN, we canonicalize it to UNDEFINED.
401 This is to fix the following problem (see PR 29921): Suppose we have
402
403 x = 0.0 * y
404
405 and we set value of y to NaN. This causes value of x to be set to NaN.
406 When we later determine that y is in fact VARYING, fold uses the fact
407 that HONOR_NANS is false, and we try to change the value of x to 0,
408 causing an ICE. With HONOR_NANS being false, the real appearance of
409 NaN would cause undefined behavior, though, so claiming that y (and x)
410 are UNDEFINED initially is correct. */
411
412static void
413canonicalize_float_value (prop_value_t *val)
414{
415 enum machine_mode mode;
416 tree type;
417 REAL_VALUE_TYPE d;
418
419 if (val->lattice_val != CONSTANT
420 || TREE_CODE (val->value) != REAL_CST)
421 return;
422
423 d = TREE_REAL_CST (val->value);
424 type = TREE_TYPE (val->value);
425 mode = TYPE_MODE (type);
426
427 if (!HONOR_SIGNED_ZEROS (mode)
428 && REAL_VALUE_MINUS_ZERO (d))
429 {
430 val->value = build_real (type, dconst0);
431 return;
432 }
433
434 if (!HONOR_NANS (mode)
435 && REAL_VALUE_ISNAN (d))
436 {
437 val->lattice_val = UNDEFINED;
438 val->value = NULL;
b31eb493 439 return;
440 }
441}
442
88dbf20f 443/* Set the value for variable VAR to NEW_VAL. Return true if the new
444 value is different from VAR's previous value. */
4ee9c684 445
41511585 446static bool
88dbf20f 447set_lattice_value (tree var, prop_value_t new_val)
4ee9c684 448{
bfa30570 449 prop_value_t *old_val = get_value (var);
88dbf20f 450
b31eb493 451 canonicalize_float_value (&new_val);
452
88dbf20f 453 /* Lattice transitions must always be monotonically increasing in
bfa30570 454 value. If *OLD_VAL and NEW_VAL are the same, return false to
455 inform the caller that this was a non-transition. */
456
aecfc21d 457 gcc_assert (old_val->lattice_val < new_val.lattice_val
88dbf20f 458 || (old_val->lattice_val == new_val.lattice_val
aecfc21d 459 && ((!old_val->value && !new_val.value)
61207d43 460 || operand_equal_p (old_val->value, new_val.value, 0))));
88dbf20f 461
462 if (old_val->lattice_val != new_val.lattice_val)
4ee9c684 463 {
41511585 464 if (dump_file && (dump_flags & TDF_DETAILS))
465 {
88dbf20f 466 dump_lattice_value (dump_file, "Lattice value changed to ", new_val);
bfa30570 467 fprintf (dump_file, ". Adding SSA edges to worklist.\n");
41511585 468 }
469
88dbf20f 470 *old_val = new_val;
471
bfa30570 472 gcc_assert (new_val.lattice_val != UNDEFINED);
473 return true;
4ee9c684 474 }
41511585 475
476 return false;
4ee9c684 477}
478
479
88dbf20f 480/* Return the likely CCP lattice value for STMT.
4ee9c684 481
41511585 482 If STMT has no operands, then return CONSTANT.
4ee9c684 483
d61b9af3 484 Else if undefinedness of operands of STMT cause its value to be
485 undefined, then return UNDEFINED.
4ee9c684 486
41511585 487 Else if any operands of STMT are constants, then return CONSTANT.
4ee9c684 488
41511585 489 Else return VARYING. */
4ee9c684 490
88dbf20f 491static ccp_lattice_t
75a70cf9 492likely_value (gimple stmt)
41511585 493{
d61b9af3 494 bool has_constant_operand, has_undefined_operand, all_undefined_operands;
41511585 495 tree use;
496 ssa_op_iter iter;
8edeb88b 497 unsigned i;
4ee9c684 498
590c3166 499 enum gimple_code code = gimple_code (stmt);
75a70cf9 500
501 /* This function appears to be called only for assignments, calls,
502 conditionals, and switches, due to the logic in visit_stmt. */
503 gcc_assert (code == GIMPLE_ASSIGN
504 || code == GIMPLE_CALL
505 || code == GIMPLE_COND
506 || code == GIMPLE_SWITCH);
88dbf20f 507
508 /* If the statement has volatile operands, it won't fold to a
509 constant value. */
75a70cf9 510 if (gimple_has_volatile_ops (stmt))
88dbf20f 511 return VARYING;
512
75a70cf9 513 /* Arrive here for more complex cases. */
bfa30570 514 has_constant_operand = false;
d61b9af3 515 has_undefined_operand = false;
516 all_undefined_operands = true;
8edeb88b 517 FOR_EACH_SSA_TREE_OPERAND (use, stmt, iter, SSA_OP_USE)
41511585 518 {
bfa30570 519 prop_value_t *val = get_value (use);
41511585 520
bfa30570 521 if (val->lattice_val == UNDEFINED)
d61b9af3 522 has_undefined_operand = true;
523 else
524 all_undefined_operands = false;
88dbf20f 525
41511585 526 if (val->lattice_val == CONSTANT)
bfa30570 527 has_constant_operand = true;
4ee9c684 528 }
41511585 529
dd277d48 530 /* There may be constants in regular rhs operands. For calls we
531 have to ignore lhs, fndecl and static chain, otherwise only
532 the lhs. */
533 for (i = (is_gimple_call (stmt) ? 2 : 0) + gimple_has_lhs (stmt);
8edeb88b 534 i < gimple_num_ops (stmt); ++i)
535 {
536 tree op = gimple_op (stmt, i);
537 if (!op || TREE_CODE (op) == SSA_NAME)
538 continue;
539 if (is_gimple_min_invariant (op))
540 has_constant_operand = true;
541 }
542
d61b9af3 543 /* If the operation combines operands like COMPLEX_EXPR make sure to
544 not mark the result UNDEFINED if only one part of the result is
545 undefined. */
75a70cf9 546 if (has_undefined_operand && all_undefined_operands)
d61b9af3 547 return UNDEFINED;
75a70cf9 548 else if (code == GIMPLE_ASSIGN && has_undefined_operand)
d61b9af3 549 {
75a70cf9 550 switch (gimple_assign_rhs_code (stmt))
d61b9af3 551 {
552 /* Unary operators are handled with all_undefined_operands. */
553 case PLUS_EXPR:
554 case MINUS_EXPR:
d61b9af3 555 case POINTER_PLUS_EXPR:
d61b9af3 556 /* Not MIN_EXPR, MAX_EXPR. One VARYING operand may be selected.
557 Not bitwise operators, one VARYING operand may specify the
558 result completely. Not logical operators for the same reason.
05a936a0 559 Not COMPLEX_EXPR as one VARYING operand makes the result partly
560 not UNDEFINED. Not *DIV_EXPR, comparisons and shifts because
561 the undefined operand may be promoted. */
d61b9af3 562 return UNDEFINED;
563
564 default:
565 ;
566 }
567 }
568 /* If there was an UNDEFINED operand but the result may be not UNDEFINED
569 fall back to VARYING even if there were CONSTANT operands. */
570 if (has_undefined_operand)
571 return VARYING;
572
8edeb88b 573 /* We do not consider virtual operands here -- load from read-only
574 memory may have only VARYING virtual operands, but still be
575 constant. */
bfa30570 576 if (has_constant_operand
8edeb88b 577 || gimple_references_memory_p (stmt))
88dbf20f 578 return CONSTANT;
579
bfa30570 580 return VARYING;
4ee9c684 581}
582
bfa30570 583/* Returns true if STMT cannot be constant. */
584
585static bool
75a70cf9 586surely_varying_stmt_p (gimple stmt)
bfa30570 587{
588 /* If the statement has operands that we cannot handle, it cannot be
589 constant. */
75a70cf9 590 if (gimple_has_volatile_ops (stmt))
bfa30570 591 return true;
592
f257af64 593 /* If it is a call and does not return a value or is not a
594 builtin and not an indirect call, it is varying. */
75a70cf9 595 if (is_gimple_call (stmt))
f257af64 596 {
597 tree fndecl;
598 if (!gimple_call_lhs (stmt)
599 || ((fndecl = gimple_call_fndecl (stmt)) != NULL_TREE
5768aeb3 600 && !DECL_BUILT_IN (fndecl)))
f257af64 601 return true;
602 }
bfa30570 603
8edeb88b 604 /* Any other store operation is not interesting. */
dd277d48 605 else if (gimple_vdef (stmt))
8edeb88b 606 return true;
607
bfa30570 608 /* Anything other than assignments and conditional jumps are not
609 interesting for CCP. */
75a70cf9 610 if (gimple_code (stmt) != GIMPLE_ASSIGN
f257af64 611 && gimple_code (stmt) != GIMPLE_COND
612 && gimple_code (stmt) != GIMPLE_SWITCH
613 && gimple_code (stmt) != GIMPLE_CALL)
bfa30570 614 return true;
615
616 return false;
617}
4ee9c684 618
41511585 619/* Initialize local data structures for CCP. */
4ee9c684 620
621static void
41511585 622ccp_initialize (void)
4ee9c684 623{
41511585 624 basic_block bb;
4ee9c684 625
43959b95 626 const_val = XCNEWVEC (prop_value_t, num_ssa_names);
4ee9c684 627
41511585 628 /* Initialize simulation flags for PHI nodes and statements. */
629 FOR_EACH_BB (bb)
4ee9c684 630 {
75a70cf9 631 gimple_stmt_iterator i;
4ee9c684 632
75a70cf9 633 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
41511585 634 {
75a70cf9 635 gimple stmt = gsi_stmt (i);
bfa30570 636 bool is_varying = surely_varying_stmt_p (stmt);
4ee9c684 637
bfa30570 638 if (is_varying)
41511585 639 {
88dbf20f 640 tree def;
641 ssa_op_iter iter;
642
643 /* If the statement will not produce a constant, mark
644 all its outputs VARYING. */
645 FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_ALL_DEFS)
8edeb88b 646 set_value_varying (def);
41511585 647 }
75a70cf9 648 prop_set_simulate_again (stmt, !is_varying);
41511585 649 }
4ee9c684 650 }
651
75a70cf9 652 /* Now process PHI nodes. We never clear the simulate_again flag on
653 phi nodes, since we do not know which edges are executable yet,
654 except for phi nodes for virtual operands when we do not do store ccp. */
41511585 655 FOR_EACH_BB (bb)
4ee9c684 656 {
75a70cf9 657 gimple_stmt_iterator i;
41511585 658
75a70cf9 659 for (i = gsi_start_phis (bb); !gsi_end_p (i); gsi_next (&i))
660 {
661 gimple phi = gsi_stmt (i);
662
61207d43 663 if (!is_gimple_reg (gimple_phi_result (phi)))
75a70cf9 664 prop_set_simulate_again (phi, false);
bfa30570 665 else
75a70cf9 666 prop_set_simulate_again (phi, true);
41511585 667 }
4ee9c684 668 }
41511585 669}
4ee9c684 670
43fb76c1 671/* Debug count support. Reset the values of ssa names
672 VARYING when the total number ssa names analyzed is
673 beyond the debug count specified. */
674
675static void
676do_dbg_cnt (void)
677{
678 unsigned i;
679 for (i = 0; i < num_ssa_names; i++)
680 {
681 if (!dbg_cnt (ccp))
682 {
683 const_val[i].lattice_val = VARYING;
684 const_val[i].value = NULL_TREE;
685 }
686 }
687}
688
4ee9c684 689
88dbf20f 690/* Do final substitution of propagated values, cleanup the flowgraph and
33a34f1e 691 free allocated storage.
4ee9c684 692
33a34f1e 693 Return TRUE when something was optimized. */
694
695static bool
88dbf20f 696ccp_finalize (void)
4ee9c684 697{
43fb76c1 698 bool something_changed;
699
700 do_dbg_cnt ();
88dbf20f 701 /* Perform substitutions based on the known constant values. */
43fb76c1 702 something_changed = substitute_and_fold (const_val, false);
4ee9c684 703
88dbf20f 704 free (const_val);
e004838d 705 const_val = NULL;
33a34f1e 706 return something_changed;;
4ee9c684 707}
708
709
88dbf20f 710/* Compute the meet operator between *VAL1 and *VAL2. Store the result
711 in VAL1.
712
713 any M UNDEFINED = any
88dbf20f 714 any M VARYING = VARYING
715 Ci M Cj = Ci if (i == j)
716 Ci M Cj = VARYING if (i != j)
bfa30570 717 */
4ee9c684 718
719static void
88dbf20f 720ccp_lattice_meet (prop_value_t *val1, prop_value_t *val2)
4ee9c684 721{
88dbf20f 722 if (val1->lattice_val == UNDEFINED)
4ee9c684 723 {
88dbf20f 724 /* UNDEFINED M any = any */
725 *val1 = *val2;
41511585 726 }
88dbf20f 727 else if (val2->lattice_val == UNDEFINED)
92481a4d 728 {
88dbf20f 729 /* any M UNDEFINED = any
730 Nothing to do. VAL1 already contains the value we want. */
731 ;
92481a4d 732 }
88dbf20f 733 else if (val1->lattice_val == VARYING
734 || val2->lattice_val == VARYING)
41511585 735 {
88dbf20f 736 /* any M VARYING = VARYING. */
737 val1->lattice_val = VARYING;
738 val1->value = NULL_TREE;
41511585 739 }
88dbf20f 740 else if (val1->lattice_val == CONSTANT
741 && val2->lattice_val == CONSTANT
61207d43 742 && simple_cst_equal (val1->value, val2->value) == 1)
41511585 743 {
88dbf20f 744 /* Ci M Cj = Ci if (i == j)
745 Ci M Cj = VARYING if (i != j)
746
747 If these two values come from memory stores, make sure that
748 they come from the same memory reference. */
749 val1->lattice_val = CONSTANT;
750 val1->value = val1->value;
41511585 751 }
752 else
753 {
88dbf20f 754 /* Any other combination is VARYING. */
755 val1->lattice_val = VARYING;
756 val1->value = NULL_TREE;
41511585 757 }
4ee9c684 758}
759
760
41511585 761/* Loop through the PHI_NODE's parameters for BLOCK and compare their
762 lattice values to determine PHI_NODE's lattice value. The value of a
88dbf20f 763 PHI node is determined calling ccp_lattice_meet with all the arguments
41511585 764 of the PHI node that are incoming via executable edges. */
4ee9c684 765
41511585 766static enum ssa_prop_result
75a70cf9 767ccp_visit_phi_node (gimple phi)
4ee9c684 768{
75a70cf9 769 unsigned i;
88dbf20f 770 prop_value_t *old_val, new_val;
4ee9c684 771
41511585 772 if (dump_file && (dump_flags & TDF_DETAILS))
4ee9c684 773 {
41511585 774 fprintf (dump_file, "\nVisiting PHI node: ");
75a70cf9 775 print_gimple_stmt (dump_file, phi, 0, dump_flags);
4ee9c684 776 }
4ee9c684 777
75a70cf9 778 old_val = get_value (gimple_phi_result (phi));
41511585 779 switch (old_val->lattice_val)
780 {
781 case VARYING:
88dbf20f 782 return SSA_PROP_VARYING;
4ee9c684 783
41511585 784 case CONSTANT:
785 new_val = *old_val;
786 break;
4ee9c684 787
41511585 788 case UNDEFINED:
41511585 789 new_val.lattice_val = UNDEFINED;
88dbf20f 790 new_val.value = NULL_TREE;
41511585 791 break;
4ee9c684 792
41511585 793 default:
8c0963c4 794 gcc_unreachable ();
41511585 795 }
4ee9c684 796
75a70cf9 797 for (i = 0; i < gimple_phi_num_args (phi); i++)
41511585 798 {
88dbf20f 799 /* Compute the meet operator over all the PHI arguments flowing
800 through executable edges. */
75a70cf9 801 edge e = gimple_phi_arg_edge (phi, i);
4ee9c684 802
41511585 803 if (dump_file && (dump_flags & TDF_DETAILS))
804 {
805 fprintf (dump_file,
806 "\n Argument #%d (%d -> %d %sexecutable)\n",
807 i, e->src->index, e->dest->index,
808 (e->flags & EDGE_EXECUTABLE) ? "" : "not ");
809 }
810
811 /* If the incoming edge is executable, Compute the meet operator for
812 the existing value of the PHI node and the current PHI argument. */
813 if (e->flags & EDGE_EXECUTABLE)
814 {
75a70cf9 815 tree arg = gimple_phi_arg (phi, i)->def;
88dbf20f 816 prop_value_t arg_val;
4ee9c684 817
88dbf20f 818 if (is_gimple_min_invariant (arg))
41511585 819 {
88dbf20f 820 arg_val.lattice_val = CONSTANT;
821 arg_val.value = arg;
41511585 822 }
823 else
bfa30570 824 arg_val = *(get_value (arg));
4ee9c684 825
88dbf20f 826 ccp_lattice_meet (&new_val, &arg_val);
4ee9c684 827
41511585 828 if (dump_file && (dump_flags & TDF_DETAILS))
829 {
830 fprintf (dump_file, "\t");
88dbf20f 831 print_generic_expr (dump_file, arg, dump_flags);
832 dump_lattice_value (dump_file, "\tValue: ", arg_val);
41511585 833 fprintf (dump_file, "\n");
834 }
4ee9c684 835
41511585 836 if (new_val.lattice_val == VARYING)
837 break;
838 }
839 }
4ee9c684 840
841 if (dump_file && (dump_flags & TDF_DETAILS))
41511585 842 {
843 dump_lattice_value (dump_file, "\n PHI node value: ", new_val);
844 fprintf (dump_file, "\n\n");
845 }
846
bfa30570 847 /* Make the transition to the new value. */
75a70cf9 848 if (set_lattice_value (gimple_phi_result (phi), new_val))
41511585 849 {
850 if (new_val.lattice_val == VARYING)
851 return SSA_PROP_VARYING;
852 else
853 return SSA_PROP_INTERESTING;
854 }
855 else
856 return SSA_PROP_NOT_INTERESTING;
4ee9c684 857}
858
fb8ed03f 859/* Return true if we may propagate the address expression ADDR into the
860 dereference DEREF and cancel them. */
861
862bool
863may_propagate_address_into_dereference (tree addr, tree deref)
864{
865 gcc_assert (INDIRECT_REF_P (deref)
866 && TREE_CODE (addr) == ADDR_EXPR);
867
0d051ece 868 /* Don't propagate if ADDR's operand has incomplete type. */
869 if (!COMPLETE_TYPE_P (TREE_TYPE (TREE_OPERAND (addr, 0))))
870 return false;
871
fb8ed03f 872 /* If the address is invariant then we do not need to preserve restrict
873 qualifications. But we do need to preserve volatile qualifiers until
874 we can annotate the folded dereference itself properly. */
875 if (is_gimple_min_invariant (addr)
876 && (!TREE_THIS_VOLATILE (deref)
877 || TYPE_VOLATILE (TREE_TYPE (addr))))
878 return useless_type_conversion_p (TREE_TYPE (deref),
879 TREE_TYPE (TREE_OPERAND (addr, 0)));
880
881 /* Else both the address substitution and the folding must result in
882 a valid useless type conversion sequence. */
883 return (useless_type_conversion_p (TREE_TYPE (TREE_OPERAND (deref, 0)),
884 TREE_TYPE (addr))
885 && useless_type_conversion_p (TREE_TYPE (deref),
886 TREE_TYPE (TREE_OPERAND (addr, 0))));
887}
4ee9c684 888
41511585 889/* CCP specific front-end to the non-destructive constant folding
890 routines.
4ee9c684 891
892 Attempt to simplify the RHS of STMT knowing that one or more
893 operands are constants.
894
895 If simplification is possible, return the simplified RHS,
75a70cf9 896 otherwise return the original RHS or NULL_TREE. */
4ee9c684 897
898static tree
75a70cf9 899ccp_fold (gimple stmt)
4ee9c684 900{
75a70cf9 901 switch (gimple_code (stmt))
88dbf20f 902 {
75a70cf9 903 case GIMPLE_ASSIGN:
904 {
905 enum tree_code subcode = gimple_assign_rhs_code (stmt);
906
907 switch (get_gimple_rhs_class (subcode))
908 {
909 case GIMPLE_SINGLE_RHS:
910 {
911 tree rhs = gimple_assign_rhs1 (stmt);
912 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
913
914 if (TREE_CODE (rhs) == SSA_NAME)
915 {
916 /* If the RHS is an SSA_NAME, return its known constant value,
917 if any. */
918 return get_value (rhs)->value;
919 }
920 /* Handle propagating invariant addresses into address operations.
921 The folding we do here matches that in tree-ssa-forwprop.c. */
922 else if (TREE_CODE (rhs) == ADDR_EXPR)
923 {
924 tree *base;
925 base = &TREE_OPERAND (rhs, 0);
926 while (handled_component_p (*base))
927 base = &TREE_OPERAND (*base, 0);
928 if (TREE_CODE (*base) == INDIRECT_REF
929 && TREE_CODE (TREE_OPERAND (*base, 0)) == SSA_NAME)
930 {
931 prop_value_t *val = get_value (TREE_OPERAND (*base, 0));
932 if (val->lattice_val == CONSTANT
933 && TREE_CODE (val->value) == ADDR_EXPR
fb8ed03f 934 && may_propagate_address_into_dereference
935 (val->value, *base))
75a70cf9 936 {
937 /* We need to return a new tree, not modify the IL
938 or share parts of it. So play some tricks to
939 avoid manually building it. */
940 tree ret, save = *base;
941 *base = TREE_OPERAND (val->value, 0);
942 ret = unshare_expr (rhs);
943 recompute_tree_invariant_for_addr_expr (ret);
944 *base = save;
945 return ret;
946 }
947 }
948 }
4ee9c684 949
75a70cf9 950 if (kind == tcc_reference)
3e4be816 951 {
952 if (TREE_CODE (rhs) == VIEW_CONVERT_EXPR
953 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
954 {
955 prop_value_t *val = get_value (TREE_OPERAND (rhs, 0));
956 if (val->lattice_val == CONSTANT)
957 return fold_unary (VIEW_CONVERT_EXPR,
958 TREE_TYPE (rhs), val->value);
959 }
8edeb88b 960 else if (TREE_CODE (rhs) == INDIRECT_REF
961 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
962 {
963 prop_value_t *val = get_value (TREE_OPERAND (rhs, 0));
964 if (val->lattice_val == CONSTANT
965 && TREE_CODE (val->value) == ADDR_EXPR
966 && useless_type_conversion_p (TREE_TYPE (rhs),
967 TREE_TYPE (TREE_TYPE (val->value))))
968 rhs = TREE_OPERAND (val->value, 0);
969 }
3e4be816 970 return fold_const_aggregate_ref (rhs);
971 }
75a70cf9 972 else if (kind == tcc_declaration)
973 return get_symbol_constant_value (rhs);
974 return rhs;
975 }
976
977 case GIMPLE_UNARY_RHS:
978 {
979 /* Handle unary operators that can appear in GIMPLE form.
980 Note that we know the single operand must be a constant,
981 so this should almost always return a simplified RHS. */
982 tree lhs = gimple_assign_lhs (stmt);
983 tree op0 = gimple_assign_rhs1 (stmt);
984
985 /* Simplify the operand down to a constant. */
986 if (TREE_CODE (op0) == SSA_NAME)
987 {
988 prop_value_t *val = get_value (op0);
989 if (val->lattice_val == CONSTANT)
990 op0 = get_value (op0)->value;
991 }
992
993 /* Conversions are useless for CCP purposes if they are
994 value-preserving. Thus the restrictions that
995 useless_type_conversion_p places for pointer type conversions
996 do not apply here. Substitution later will only substitute to
997 allowed places. */
d9659041 998 if (CONVERT_EXPR_CODE_P (subcode)
5768aeb3 999 && POINTER_TYPE_P (TREE_TYPE (lhs))
1000 && POINTER_TYPE_P (TREE_TYPE (op0))
1001 /* Do not allow differences in volatile qualification
1002 as this might get us confused as to whether a
1003 propagation destination statement is volatile
1004 or not. See PR36988. */
1005 && (TYPE_VOLATILE (TREE_TYPE (TREE_TYPE (lhs)))
1006 == TYPE_VOLATILE (TREE_TYPE (TREE_TYPE (op0)))))
1007 {
1008 tree tem;
1009 /* Still try to generate a constant of correct type. */
1010 if (!useless_type_conversion_p (TREE_TYPE (lhs),
1011 TREE_TYPE (op0))
1012 && ((tem = maybe_fold_offset_to_address
1013 (op0, integer_zero_node, TREE_TYPE (lhs)))
1014 != NULL_TREE))
1015 return tem;
1016 return op0;
1017 }
75a70cf9 1018
cd30b839 1019 return fold_unary_ignore_overflow (subcode,
1020 gimple_expr_type (stmt), op0);
f1fb2997 1021 }
75a70cf9 1022
1023 case GIMPLE_BINARY_RHS:
1024 {
1025 /* Handle binary operators that can appear in GIMPLE form. */
1026 tree op0 = gimple_assign_rhs1 (stmt);
1027 tree op1 = gimple_assign_rhs2 (stmt);
1028
1029 /* Simplify the operands down to constants when appropriate. */
1030 if (TREE_CODE (op0) == SSA_NAME)
1031 {
1032 prop_value_t *val = get_value (op0);
1033 if (val->lattice_val == CONSTANT)
1034 op0 = val->value;
1035 }
1036
1037 if (TREE_CODE (op1) == SSA_NAME)
1038 {
1039 prop_value_t *val = get_value (op1);
1040 if (val->lattice_val == CONSTANT)
1041 op1 = val->value;
1042 }
1043
5768aeb3 1044 /* Fold &foo + CST into an invariant reference if possible. */
1045 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
1046 && TREE_CODE (op0) == ADDR_EXPR
1047 && TREE_CODE (op1) == INTEGER_CST)
1048 {
1049 tree lhs = gimple_assign_lhs (stmt);
1050 tree tem = maybe_fold_offset_to_address (op0, op1,
1051 TREE_TYPE (lhs));
1052 if (tem != NULL_TREE)
1053 return tem;
1054 }
1055
75a70cf9 1056 return fold_binary (subcode, gimple_expr_type (stmt), op0, op1);
1057 }
1058
1059 default:
1060 gcc_unreachable ();
1061 }
1062 }
1063 break;
4ee9c684 1064
75a70cf9 1065 case GIMPLE_CALL:
f257af64 1066 {
1067 tree fn = gimple_call_fn (stmt);
1068 prop_value_t *val;
1069
1070 if (TREE_CODE (fn) == SSA_NAME)
1071 {
1072 val = get_value (fn);
1073 if (val->lattice_val == CONSTANT)
1074 fn = val->value;
1075 }
1076 if (TREE_CODE (fn) == ADDR_EXPR
8ef4f124 1077 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
f257af64 1078 && DECL_BUILT_IN (TREE_OPERAND (fn, 0)))
1079 {
1080 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
1081 tree call, retval;
1082 unsigned i;
1083 for (i = 0; i < gimple_call_num_args (stmt); ++i)
1084 {
1085 args[i] = gimple_call_arg (stmt, i);
1086 if (TREE_CODE (args[i]) == SSA_NAME)
1087 {
1088 val = get_value (args[i]);
1089 if (val->lattice_val == CONSTANT)
1090 args[i] = val->value;
1091 }
1092 }
1093 call = build_call_array (gimple_call_return_type (stmt),
1094 fn, gimple_call_num_args (stmt), args);
1095 retval = fold_call_expr (call, false);
1096 if (retval)
1097 /* fold_call_expr wraps the result inside a NOP_EXPR. */
1098 STRIP_NOPS (retval);
1099 return retval;
1100 }
1101 return NULL_TREE;
1102 }
4ee9c684 1103
75a70cf9 1104 case GIMPLE_COND:
1105 {
1106 /* Handle comparison operators that can appear in GIMPLE form. */
1107 tree op0 = gimple_cond_lhs (stmt);
1108 tree op1 = gimple_cond_rhs (stmt);
1109 enum tree_code code = gimple_cond_code (stmt);
1110
1111 /* Simplify the operands down to constants when appropriate. */
1112 if (TREE_CODE (op0) == SSA_NAME)
1113 {
1114 prop_value_t *val = get_value (op0);
1115 if (val->lattice_val == CONSTANT)
1116 op0 = val->value;
1117 }
1118
1119 if (TREE_CODE (op1) == SSA_NAME)
1120 {
1121 prop_value_t *val = get_value (op1);
1122 if (val->lattice_val == CONSTANT)
1123 op1 = val->value;
1124 }
1125
1126 return fold_binary (code, boolean_type_node, op0, op1);
1127 }
4ee9c684 1128
75a70cf9 1129 case GIMPLE_SWITCH:
1130 {
1131 tree rhs = gimple_switch_index (stmt);
04236c3a 1132
75a70cf9 1133 if (TREE_CODE (rhs) == SSA_NAME)
1134 {
1135 /* If the RHS is an SSA_NAME, return its known constant value,
1136 if any. */
1137 return get_value (rhs)->value;
1138 }
04236c3a 1139
75a70cf9 1140 return rhs;
1141 }
912f109f 1142
75a70cf9 1143 default:
1144 gcc_unreachable ();
4ee9c684 1145 }
4ee9c684 1146}
1147
1148
8782adcf 1149/* Return the tree representing the element referenced by T if T is an
1150 ARRAY_REF or COMPONENT_REF into constant aggregates. Return
1151 NULL_TREE otherwise. */
1152
e004838d 1153tree
8782adcf 1154fold_const_aggregate_ref (tree t)
1155{
1156 prop_value_t *value;
c75b4594 1157 tree base, ctor, idx, field;
1158 unsigned HOST_WIDE_INT cnt;
1159 tree cfield, cval;
8782adcf 1160
8edeb88b 1161 if (TREE_CODE_CLASS (TREE_CODE (t)) == tcc_declaration)
1162 return get_symbol_constant_value (t);
1163
8782adcf 1164 switch (TREE_CODE (t))
1165 {
1166 case ARRAY_REF:
1167 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
1168 DECL_INITIAL. If BASE is a nested reference into another
1169 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
1170 the inner reference. */
1171 base = TREE_OPERAND (t, 0);
1172 switch (TREE_CODE (base))
1173 {
1174 case VAR_DECL:
1175 if (!TREE_READONLY (base)
1176 || TREE_CODE (TREE_TYPE (base)) != ARRAY_TYPE
1177 || !targetm.binds_local_p (base))
1178 return NULL_TREE;
1179
1180 ctor = DECL_INITIAL (base);
1181 break;
1182
1183 case ARRAY_REF:
1184 case COMPONENT_REF:
1185 ctor = fold_const_aggregate_ref (base);
1186 break;
1187
04236c3a 1188 case STRING_CST:
1189 case CONSTRUCTOR:
1190 ctor = base;
1191 break;
1192
8782adcf 1193 default:
1194 return NULL_TREE;
1195 }
1196
1197 if (ctor == NULL_TREE
4f61cce6 1198 || (TREE_CODE (ctor) != CONSTRUCTOR
1199 && TREE_CODE (ctor) != STRING_CST)
8782adcf 1200 || !TREE_STATIC (ctor))
1201 return NULL_TREE;
1202
1203 /* Get the index. If we have an SSA_NAME, try to resolve it
1204 with the current lattice value for the SSA_NAME. */
1205 idx = TREE_OPERAND (t, 1);
1206 switch (TREE_CODE (idx))
1207 {
1208 case SSA_NAME:
bfa30570 1209 if ((value = get_value (idx))
8782adcf 1210 && value->lattice_val == CONSTANT
1211 && TREE_CODE (value->value) == INTEGER_CST)
1212 idx = value->value;
1213 else
1214 return NULL_TREE;
1215 break;
1216
1217 case INTEGER_CST:
1218 break;
1219
1220 default:
1221 return NULL_TREE;
1222 }
1223
4f61cce6 1224 /* Fold read from constant string. */
1225 if (TREE_CODE (ctor) == STRING_CST)
1226 {
1227 if ((TYPE_MODE (TREE_TYPE (t))
1228 == TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
1229 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
1230 == MODE_INT)
1231 && GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor)))) == 1
1232 && compare_tree_int (idx, TREE_STRING_LENGTH (ctor)) < 0)
7b050b7b 1233 return build_int_cst_type (TREE_TYPE (t),
1234 (TREE_STRING_POINTER (ctor)
1235 [TREE_INT_CST_LOW (idx)]));
4f61cce6 1236 return NULL_TREE;
1237 }
1238
8782adcf 1239 /* Whoo-hoo! I'll fold ya baby. Yeah! */
c75b4594 1240 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
1241 if (tree_int_cst_equal (cfield, idx))
590d65aa 1242 {
1243 STRIP_USELESS_TYPE_CONVERSION (cval);
1244 return cval;
1245 }
8782adcf 1246 break;
1247
1248 case COMPONENT_REF:
1249 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
1250 DECL_INITIAL. If BASE is a nested reference into another
1251 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
1252 the inner reference. */
1253 base = TREE_OPERAND (t, 0);
1254 switch (TREE_CODE (base))
1255 {
1256 case VAR_DECL:
1257 if (!TREE_READONLY (base)
1258 || TREE_CODE (TREE_TYPE (base)) != RECORD_TYPE
1259 || !targetm.binds_local_p (base))
1260 return NULL_TREE;
1261
1262 ctor = DECL_INITIAL (base);
1263 break;
1264
1265 case ARRAY_REF:
1266 case COMPONENT_REF:
1267 ctor = fold_const_aggregate_ref (base);
1268 break;
1269
1270 default:
1271 return NULL_TREE;
1272 }
1273
1274 if (ctor == NULL_TREE
1275 || TREE_CODE (ctor) != CONSTRUCTOR
1276 || !TREE_STATIC (ctor))
1277 return NULL_TREE;
1278
1279 field = TREE_OPERAND (t, 1);
1280
c75b4594 1281 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
1282 if (cfield == field
8782adcf 1283 /* FIXME: Handle bit-fields. */
c75b4594 1284 && ! DECL_BIT_FIELD (cfield))
590d65aa 1285 {
1286 STRIP_USELESS_TYPE_CONVERSION (cval);
1287 return cval;
1288 }
8782adcf 1289 break;
1290
908cb59d 1291 case REALPART_EXPR:
1292 case IMAGPART_EXPR:
1293 {
1294 tree c = fold_const_aggregate_ref (TREE_OPERAND (t, 0));
1295 if (c && TREE_CODE (c) == COMPLEX_CST)
1296 return fold_build1 (TREE_CODE (t), TREE_TYPE (t), c);
1297 break;
1298 }
04236c3a 1299
1300 case INDIRECT_REF:
1301 {
1302 tree base = TREE_OPERAND (t, 0);
1303 if (TREE_CODE (base) == SSA_NAME
1304 && (value = get_value (base))
1305 && value->lattice_val == CONSTANT
1306 && TREE_CODE (value->value) == ADDR_EXPR)
1307 return fold_const_aggregate_ref (TREE_OPERAND (value->value, 0));
1308 break;
1309 }
1310
8782adcf 1311 default:
1312 break;
1313 }
1314
1315 return NULL_TREE;
1316}
75a70cf9 1317
1318/* Evaluate statement STMT.
1319 Valid only for assignments, calls, conditionals, and switches. */
4ee9c684 1320
88dbf20f 1321static prop_value_t
75a70cf9 1322evaluate_stmt (gimple stmt)
4ee9c684 1323{
88dbf20f 1324 prop_value_t val;
4f61cce6 1325 tree simplified = NULL_TREE;
88dbf20f 1326 ccp_lattice_t likelyvalue = likely_value (stmt);
add6ee5e 1327 bool is_constant;
88dbf20f 1328
add6ee5e 1329 fold_defer_overflow_warnings ();
1330
4ee9c684 1331 /* If the statement is likely to have a CONSTANT result, then try
1332 to fold the statement to determine the constant value. */
75a70cf9 1333 /* FIXME. This is the only place that we call ccp_fold.
1334 Since likely_value never returns CONSTANT for calls, we will
1335 not attempt to fold them, including builtins that may profit. */
4ee9c684 1336 if (likelyvalue == CONSTANT)
1337 simplified = ccp_fold (stmt);
1338 /* If the statement is likely to have a VARYING result, then do not
1339 bother folding the statement. */
04236c3a 1340 else if (likelyvalue == VARYING)
75a70cf9 1341 {
590c3166 1342 enum gimple_code code = gimple_code (stmt);
75a70cf9 1343 if (code == GIMPLE_ASSIGN)
1344 {
1345 enum tree_code subcode = gimple_assign_rhs_code (stmt);
1346
1347 /* Other cases cannot satisfy is_gimple_min_invariant
1348 without folding. */
1349 if (get_gimple_rhs_class (subcode) == GIMPLE_SINGLE_RHS)
1350 simplified = gimple_assign_rhs1 (stmt);
1351 }
1352 else if (code == GIMPLE_SWITCH)
1353 simplified = gimple_switch_index (stmt);
1354 else
1355 /* These cannot satisfy is_gimple_min_invariant without folding. */
1356 gcc_assert (code == GIMPLE_CALL || code == GIMPLE_COND);
1357 }
4ee9c684 1358
add6ee5e 1359 is_constant = simplified && is_gimple_min_invariant (simplified);
1360
1361 fold_undefer_overflow_warnings (is_constant, stmt, 0);
1362
912f109f 1363 if (dump_file && (dump_flags & TDF_DETAILS))
1364 {
1365 fprintf (dump_file, "which is likely ");
1366 switch (likelyvalue)
1367 {
1368 case CONSTANT:
1369 fprintf (dump_file, "CONSTANT");
1370 break;
1371 case UNDEFINED:
1372 fprintf (dump_file, "UNDEFINED");
1373 break;
1374 case VARYING:
1375 fprintf (dump_file, "VARYING");
1376 break;
1377 default:;
1378 }
1379 fprintf (dump_file, "\n");
1380 }
1381
add6ee5e 1382 if (is_constant)
4ee9c684 1383 {
1384 /* The statement produced a constant value. */
1385 val.lattice_val = CONSTANT;
88dbf20f 1386 val.value = simplified;
4ee9c684 1387 }
1388 else
1389 {
1390 /* The statement produced a nonconstant value. If the statement
88dbf20f 1391 had UNDEFINED operands, then the result of the statement
1392 should be UNDEFINED. Otherwise, the statement is VARYING. */
bfa30570 1393 if (likelyvalue == UNDEFINED)
b765fa12 1394 val.lattice_val = likelyvalue;
1395 else
1396 val.lattice_val = VARYING;
1397
88dbf20f 1398 val.value = NULL_TREE;
4ee9c684 1399 }
41511585 1400
1401 return val;
4ee9c684 1402}
1403
41511585 1404/* Visit the assignment statement STMT. Set the value of its LHS to the
88dbf20f 1405 value computed by the RHS and store LHS in *OUTPUT_P. If STMT
1406 creates virtual definitions, set the value of each new name to that
75a70cf9 1407 of the RHS (if we can derive a constant out of the RHS).
1408 Value-returning call statements also perform an assignment, and
1409 are handled here. */
4ee9c684 1410
41511585 1411static enum ssa_prop_result
75a70cf9 1412visit_assignment (gimple stmt, tree *output_p)
4ee9c684 1413{
88dbf20f 1414 prop_value_t val;
88dbf20f 1415 enum ssa_prop_result retval;
4ee9c684 1416
75a70cf9 1417 tree lhs = gimple_get_lhs (stmt);
4ee9c684 1418
75a70cf9 1419 gcc_assert (gimple_code (stmt) != GIMPLE_CALL
1420 || gimple_call_lhs (stmt) != NULL_TREE);
1421
1422 if (gimple_assign_copy_p (stmt))
41511585 1423 {
75a70cf9 1424 tree rhs = gimple_assign_rhs1 (stmt);
88dbf20f 1425
75a70cf9 1426 if (TREE_CODE (rhs) == SSA_NAME)
1427 {
1428 /* For a simple copy operation, we copy the lattice values. */
1429 prop_value_t *nval = get_value (rhs);
1430 val = *nval;
1431 }
88dbf20f 1432 else
75a70cf9 1433 val = evaluate_stmt (stmt);
41511585 1434 }
1435 else
75a70cf9 1436 /* Evaluate the statement, which could be
1437 either a GIMPLE_ASSIGN or a GIMPLE_CALL. */
04236c3a 1438 val = evaluate_stmt (stmt);
4ee9c684 1439
88dbf20f 1440 retval = SSA_PROP_NOT_INTERESTING;
4ee9c684 1441
41511585 1442 /* Set the lattice value of the statement's output. */
88dbf20f 1443 if (TREE_CODE (lhs) == SSA_NAME)
4ee9c684 1444 {
88dbf20f 1445 /* If STMT is an assignment to an SSA_NAME, we only have one
1446 value to set. */
1447 if (set_lattice_value (lhs, val))
1448 {
1449 *output_p = lhs;
1450 if (val.lattice_val == VARYING)
1451 retval = SSA_PROP_VARYING;
1452 else
1453 retval = SSA_PROP_INTERESTING;
1454 }
4ee9c684 1455 }
88dbf20f 1456
1457 return retval;
4ee9c684 1458}
1459
4ee9c684 1460
41511585 1461/* Visit the conditional statement STMT. Return SSA_PROP_INTERESTING
1462 if it can determine which edge will be taken. Otherwise, return
1463 SSA_PROP_VARYING. */
1464
1465static enum ssa_prop_result
75a70cf9 1466visit_cond_stmt (gimple stmt, edge *taken_edge_p)
4ee9c684 1467{
88dbf20f 1468 prop_value_t val;
41511585 1469 basic_block block;
1470
75a70cf9 1471 block = gimple_bb (stmt);
41511585 1472 val = evaluate_stmt (stmt);
1473
1474 /* Find which edge out of the conditional block will be taken and add it
1475 to the worklist. If no single edge can be determined statically,
1476 return SSA_PROP_VARYING to feed all the outgoing edges to the
1477 propagation engine. */
88dbf20f 1478 *taken_edge_p = val.value ? find_taken_edge (block, val.value) : 0;
41511585 1479 if (*taken_edge_p)
1480 return SSA_PROP_INTERESTING;
1481 else
1482 return SSA_PROP_VARYING;
4ee9c684 1483}
1484
4ee9c684 1485
41511585 1486/* Evaluate statement STMT. If the statement produces an output value and
1487 its evaluation changes the lattice value of its output, return
1488 SSA_PROP_INTERESTING and set *OUTPUT_P to the SSA_NAME holding the
1489 output value.
1490
1491 If STMT is a conditional branch and we can determine its truth
1492 value, set *TAKEN_EDGE_P accordingly. If STMT produces a varying
1493 value, return SSA_PROP_VARYING. */
4ee9c684 1494
41511585 1495static enum ssa_prop_result
75a70cf9 1496ccp_visit_stmt (gimple stmt, edge *taken_edge_p, tree *output_p)
41511585 1497{
41511585 1498 tree def;
1499 ssa_op_iter iter;
4ee9c684 1500
41511585 1501 if (dump_file && (dump_flags & TDF_DETAILS))
4ee9c684 1502 {
88dbf20f 1503 fprintf (dump_file, "\nVisiting statement:\n");
75a70cf9 1504 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
4ee9c684 1505 }
4ee9c684 1506
75a70cf9 1507 switch (gimple_code (stmt))
4ee9c684 1508 {
75a70cf9 1509 case GIMPLE_ASSIGN:
1510 /* If the statement is an assignment that produces a single
1511 output value, evaluate its RHS to see if the lattice value of
1512 its output has changed. */
1513 return visit_assignment (stmt, output_p);
1514
1515 case GIMPLE_CALL:
1516 /* A value-returning call also performs an assignment. */
1517 if (gimple_call_lhs (stmt) != NULL_TREE)
1518 return visit_assignment (stmt, output_p);
1519 break;
1520
1521 case GIMPLE_COND:
1522 case GIMPLE_SWITCH:
1523 /* If STMT is a conditional branch, see if we can determine
1524 which branch will be taken. */
1525 /* FIXME. It appears that we should be able to optimize
1526 computed GOTOs here as well. */
1527 return visit_cond_stmt (stmt, taken_edge_p);
1528
1529 default:
1530 break;
4ee9c684 1531 }
4ee9c684 1532
41511585 1533 /* Any other kind of statement is not interesting for constant
1534 propagation and, therefore, not worth simulating. */
41511585 1535 if (dump_file && (dump_flags & TDF_DETAILS))
1536 fprintf (dump_file, "No interesting values produced. Marked VARYING.\n");
4ee9c684 1537
41511585 1538 /* Definitions made by statements other than assignments to
1539 SSA_NAMEs represent unknown modifications to their outputs.
1540 Mark them VARYING. */
88dbf20f 1541 FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_ALL_DEFS)
1542 {
61207d43 1543 prop_value_t v = { VARYING, NULL_TREE };
88dbf20f 1544 set_lattice_value (def, v);
1545 }
4ee9c684 1546
41511585 1547 return SSA_PROP_VARYING;
1548}
4ee9c684 1549
4ee9c684 1550
88dbf20f 1551/* Main entry point for SSA Conditional Constant Propagation. */
41511585 1552
33a34f1e 1553static unsigned int
61207d43 1554do_ssa_ccp (void)
41511585 1555{
1556 ccp_initialize ();
1557 ssa_propagate (ccp_visit_stmt, ccp_visit_phi_node);
33a34f1e 1558 if (ccp_finalize ())
eb9161e7 1559 return (TODO_cleanup_cfg | TODO_update_ssa | TODO_remove_unused_locals);
33a34f1e 1560 else
1561 return 0;
4ee9c684 1562}
1563
5664499b 1564
1565static bool
41511585 1566gate_ccp (void)
5664499b 1567{
41511585 1568 return flag_tree_ccp != 0;
5664499b 1569}
1570
4ee9c684 1571
20099e35 1572struct gimple_opt_pass pass_ccp =
41511585 1573{
20099e35 1574 {
1575 GIMPLE_PASS,
41511585 1576 "ccp", /* name */
1577 gate_ccp, /* gate */
88dbf20f 1578 do_ssa_ccp, /* execute */
41511585 1579 NULL, /* sub */
1580 NULL, /* next */
1581 0, /* static_pass_number */
1582 TV_TREE_CCP, /* tv_id */
49290934 1583 PROP_cfg | PROP_ssa, /* properties_required */
41511585 1584 0, /* properties_provided */
b6246c40 1585 0, /* properties_destroyed */
41511585 1586 0, /* todo_flags_start */
33a34f1e 1587 TODO_dump_func | TODO_verify_ssa
20099e35 1588 | TODO_verify_stmts | TODO_ggc_collect/* todo_flags_finish */
1589 }
41511585 1590};
4ee9c684 1591
4ee9c684 1592
4ee9c684 1593/* A subroutine of fold_stmt_r. Attempts to fold *(A+O) to A[X].
1594 BASE is an array type. OFFSET is a byte displacement. ORIG_TYPE
0bed3869 1595 is the desired result type. */
4ee9c684 1596
1597static tree
b7488229 1598maybe_fold_offset_to_array_ref (tree base, tree offset, tree orig_type,
1599 bool allow_negative_idx)
4ee9c684 1600{
e71da05f 1601 tree min_idx, idx, idx_type, elt_offset = integer_zero_node;
6374121b 1602 tree array_type, elt_type, elt_size;
b513c084 1603 tree domain_type;
6374121b 1604
1605 /* If BASE is an ARRAY_REF, we can pick up another offset (this time
1606 measured in units of the size of elements type) from that ARRAY_REF).
1607 We can't do anything if either is variable.
1608
1609 The case we handle here is *(&A[N]+O). */
1610 if (TREE_CODE (base) == ARRAY_REF)
1611 {
1612 tree low_bound = array_ref_low_bound (base);
1613
1614 elt_offset = TREE_OPERAND (base, 1);
1615 if (TREE_CODE (low_bound) != INTEGER_CST
1616 || TREE_CODE (elt_offset) != INTEGER_CST)
1617 return NULL_TREE;
1618
1619 elt_offset = int_const_binop (MINUS_EXPR, elt_offset, low_bound, 0);
1620 base = TREE_OPERAND (base, 0);
1621 }
4ee9c684 1622
1623 /* Ignore stupid user tricks of indexing non-array variables. */
1624 array_type = TREE_TYPE (base);
1625 if (TREE_CODE (array_type) != ARRAY_TYPE)
1626 return NULL_TREE;
1627 elt_type = TREE_TYPE (array_type);
c8ca3ee7 1628 if (!useless_type_conversion_p (orig_type, elt_type))
4ee9c684 1629 return NULL_TREE;
e71da05f 1630
1631 /* Use signed size type for intermediate computation on the index. */
1632 idx_type = signed_type_for (size_type_node);
1633
6374121b 1634 /* If OFFSET and ELT_OFFSET are zero, we don't care about the size of the
1635 element type (so we can use the alignment if it's not constant).
1636 Otherwise, compute the offset as an index by using a division. If the
1637 division isn't exact, then don't do anything. */
4ee9c684 1638 elt_size = TYPE_SIZE_UNIT (elt_type);
3b45913d 1639 if (!elt_size)
1640 return NULL;
6374121b 1641 if (integer_zerop (offset))
1642 {
1643 if (TREE_CODE (elt_size) != INTEGER_CST)
1644 elt_size = size_int (TYPE_ALIGN (elt_type));
4ee9c684 1645
e71da05f 1646 idx = build_int_cst (idx_type, 0);
6374121b 1647 }
1648 else
1649 {
1650 unsigned HOST_WIDE_INT lquo, lrem;
1651 HOST_WIDE_INT hquo, hrem;
e71da05f 1652 double_int soffset;
6374121b 1653
e71da05f 1654 /* The final array offset should be signed, so we need
1655 to sign-extend the (possibly pointer) offset here
1656 and use signed division. */
1657 soffset = double_int_sext (tree_to_double_int (offset),
1658 TYPE_PRECISION (TREE_TYPE (offset)));
6374121b 1659 if (TREE_CODE (elt_size) != INTEGER_CST
e71da05f 1660 || div_and_round_double (TRUNC_DIV_EXPR, 0,
1661 soffset.low, soffset.high,
6374121b 1662 TREE_INT_CST_LOW (elt_size),
1663 TREE_INT_CST_HIGH (elt_size),
1664 &lquo, &hquo, &lrem, &hrem)
1665 || lrem || hrem)
1666 return NULL_TREE;
4ee9c684 1667
e71da05f 1668 idx = build_int_cst_wide (idx_type, lquo, hquo);
6374121b 1669 }
1670
1671 /* Assume the low bound is zero. If there is a domain type, get the
1672 low bound, if any, convert the index into that type, and add the
1673 low bound. */
e71da05f 1674 min_idx = build_int_cst (idx_type, 0);
b513c084 1675 domain_type = TYPE_DOMAIN (array_type);
1676 if (domain_type)
4ee9c684 1677 {
b513c084 1678 idx_type = domain_type;
e71da05f 1679 if (TYPE_MIN_VALUE (idx_type))
1680 min_idx = TYPE_MIN_VALUE (idx_type);
6374121b 1681 else
e71da05f 1682 min_idx = fold_convert (idx_type, min_idx);
6374121b 1683
1684 if (TREE_CODE (min_idx) != INTEGER_CST)
1685 return NULL_TREE;
1686
e71da05f 1687 elt_offset = fold_convert (idx_type, elt_offset);
4ee9c684 1688 }
1689
6374121b 1690 if (!integer_zerop (min_idx))
1691 idx = int_const_binop (PLUS_EXPR, idx, min_idx, 0);
1692 if (!integer_zerop (elt_offset))
1693 idx = int_const_binop (PLUS_EXPR, idx, elt_offset, 0);
1694
e71da05f 1695 /* Make sure to possibly truncate late after offsetting. */
1696 idx = fold_convert (idx_type, idx);
1697
b513c084 1698 /* We don't want to construct access past array bounds. For example
b7488229 1699 char *(c[4]);
1700 c[3][2];
1701 should not be simplified into (*c)[14] or tree-vrp will
1702 give false warnings. The same is true for
1703 struct A { long x; char d[0]; } *a;
1704 (char *)a - 4;
1705 which should be not folded to &a->d[-8]. */
1706 if (domain_type
1707 && TYPE_MAX_VALUE (domain_type)
b513c084 1708 && TREE_CODE (TYPE_MAX_VALUE (domain_type)) == INTEGER_CST)
1709 {
1710 tree up_bound = TYPE_MAX_VALUE (domain_type);
1711
1712 if (tree_int_cst_lt (up_bound, idx)
1713 /* Accesses after the end of arrays of size 0 (gcc
1714 extension) and 1 are likely intentional ("struct
1715 hack"). */
1716 && compare_tree_int (up_bound, 1) > 0)
1717 return NULL_TREE;
1718 }
b7488229 1719 if (domain_type
1720 && TYPE_MIN_VALUE (domain_type))
1721 {
1722 if (!allow_negative_idx
1723 && TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST
1724 && tree_int_cst_lt (idx, TYPE_MIN_VALUE (domain_type)))
1725 return NULL_TREE;
1726 }
1727 else if (!allow_negative_idx
1728 && compare_tree_int (idx, 0) < 0)
1729 return NULL_TREE;
b513c084 1730
d3828421 1731 return build4 (ARRAY_REF, elt_type, base, idx, NULL_TREE, NULL_TREE);
4ee9c684 1732}
1733
41511585 1734
3b45913d 1735/* Attempt to fold *(S+O) to S.X.
4ee9c684 1736 BASE is a record type. OFFSET is a byte displacement. ORIG_TYPE
1737 is the desired result type. */
4ee9c684 1738
3b45913d 1739static tree
4ee9c684 1740maybe_fold_offset_to_component_ref (tree record_type, tree base, tree offset,
1741 tree orig_type, bool base_is_ptr)
1742{
6d5d8428 1743 tree f, t, field_type, tail_array_field, field_offset;
d9745cea 1744 tree ret;
1745 tree new_base;
4ee9c684 1746
1747 if (TREE_CODE (record_type) != RECORD_TYPE
1748 && TREE_CODE (record_type) != UNION_TYPE
1749 && TREE_CODE (record_type) != QUAL_UNION_TYPE)
1750 return NULL_TREE;
1751
1752 /* Short-circuit silly cases. */
c8ca3ee7 1753 if (useless_type_conversion_p (record_type, orig_type))
4ee9c684 1754 return NULL_TREE;
1755
1756 tail_array_field = NULL_TREE;
1757 for (f = TYPE_FIELDS (record_type); f ; f = TREE_CHAIN (f))
1758 {
1759 int cmp;
1760
1761 if (TREE_CODE (f) != FIELD_DECL)
1762 continue;
1763 if (DECL_BIT_FIELD (f))
1764 continue;
6d5d8428 1765
3b45913d 1766 if (!DECL_FIELD_OFFSET (f))
1767 continue;
6d5d8428 1768 field_offset = byte_position (f);
1769 if (TREE_CODE (field_offset) != INTEGER_CST)
4ee9c684 1770 continue;
1771
1772 /* ??? Java creates "interesting" fields for representing base classes.
1773 They have no name, and have no context. With no context, we get into
1774 trouble with nonoverlapping_component_refs_p. Skip them. */
1775 if (!DECL_FIELD_CONTEXT (f))
1776 continue;
1777
1778 /* The previous array field isn't at the end. */
1779 tail_array_field = NULL_TREE;
1780
1781 /* Check to see if this offset overlaps with the field. */
6d5d8428 1782 cmp = tree_int_cst_compare (field_offset, offset);
4ee9c684 1783 if (cmp > 0)
1784 continue;
1785
1786 field_type = TREE_TYPE (f);
4ee9c684 1787
1788 /* Here we exactly match the offset being checked. If the types match,
1789 then we can return that field. */
115073ff 1790 if (cmp == 0
c8ca3ee7 1791 && useless_type_conversion_p (orig_type, field_type))
4ee9c684 1792 {
1793 if (base_is_ptr)
1794 base = build1 (INDIRECT_REF, record_type, base);
40b19772 1795 t = build3 (COMPONENT_REF, field_type, base, f, NULL_TREE);
4ee9c684 1796 return t;
1797 }
115073ff 1798
1799 /* Don't care about offsets into the middle of scalars. */
1800 if (!AGGREGATE_TYPE_P (field_type))
1801 continue;
4ee9c684 1802
115073ff 1803 /* Check for array at the end of the struct. This is often
1804 used as for flexible array members. We should be able to
1805 turn this into an array access anyway. */
1806 if (TREE_CODE (field_type) == ARRAY_TYPE)
1807 tail_array_field = f;
1808
1809 /* Check the end of the field against the offset. */
1810 if (!DECL_SIZE_UNIT (f)
1811 || TREE_CODE (DECL_SIZE_UNIT (f)) != INTEGER_CST)
1812 continue;
1813 t = int_const_binop (MINUS_EXPR, offset, field_offset, 1);
1814 if (!tree_int_cst_lt (t, DECL_SIZE_UNIT (f)))
1815 continue;
4ee9c684 1816
115073ff 1817 /* If we matched, then set offset to the displacement into
1818 this field. */
d9745cea 1819 if (base_is_ptr)
1820 new_base = build1 (INDIRECT_REF, record_type, base);
1821 else
1822 new_base = base;
1823 new_base = build3 (COMPONENT_REF, field_type, new_base, f, NULL_TREE);
1824
1825 /* Recurse to possibly find the match. */
b7488229 1826 ret = maybe_fold_offset_to_array_ref (new_base, t, orig_type,
1827 f == TYPE_FIELDS (record_type));
d9745cea 1828 if (ret)
1829 return ret;
1830 ret = maybe_fold_offset_to_component_ref (field_type, new_base, t,
1831 orig_type, false);
1832 if (ret)
1833 return ret;
4ee9c684 1834 }
1835
1836 if (!tail_array_field)
1837 return NULL_TREE;
1838
1839 f = tail_array_field;
1840 field_type = TREE_TYPE (f);
115073ff 1841 offset = int_const_binop (MINUS_EXPR, offset, byte_position (f), 1);
4ee9c684 1842
4ee9c684 1843 /* If we get here, we've got an aggregate field, and a possibly
365db11e 1844 nonzero offset into them. Recurse and hope for a valid match. */
4ee9c684 1845 if (base_is_ptr)
1846 base = build1 (INDIRECT_REF, record_type, base);
40b19772 1847 base = build3 (COMPONENT_REF, field_type, base, f, NULL_TREE);
4ee9c684 1848
b7488229 1849 t = maybe_fold_offset_to_array_ref (base, offset, orig_type,
1850 f == TYPE_FIELDS (record_type));
4ee9c684 1851 if (t)
1852 return t;
1853 return maybe_fold_offset_to_component_ref (field_type, base, offset,
1854 orig_type, false);
1855}
1856
3b45913d 1857/* Attempt to express (ORIG_TYPE)BASE+OFFSET as BASE->field_of_orig_type
1858 or BASE[index] or by combination of those.
1859
1860 Before attempting the conversion strip off existing ADDR_EXPRs and
1861 handled component refs. */
1862
1863tree
1864maybe_fold_offset_to_reference (tree base, tree offset, tree orig_type)
1865{
1866 tree ret;
1867 tree type;
1868 bool base_is_ptr = true;
1869
1870 STRIP_NOPS (base);
1871 if (TREE_CODE (base) == ADDR_EXPR)
1872 {
1873 base_is_ptr = false;
1874
1875 base = TREE_OPERAND (base, 0);
1876
1877 /* Handle case where existing COMPONENT_REF pick e.g. wrong field of union,
1878 so it needs to be removed and new COMPONENT_REF constructed.
1879 The wrong COMPONENT_REF are often constructed by folding the
1880 (type *)&object within the expression (type *)&object+offset */
5768aeb3 1881 if (handled_component_p (base))
3b45913d 1882 {
1883 HOST_WIDE_INT sub_offset, size, maxsize;
1884 tree newbase;
1885 newbase = get_ref_base_and_extent (base, &sub_offset,
1886 &size, &maxsize);
1887 gcc_assert (newbase);
5768aeb3 1888 if (size == maxsize
844461a0 1889 && size != -1
5768aeb3 1890 && !(sub_offset & (BITS_PER_UNIT - 1)))
3b45913d 1891 {
1892 base = newbase;
1893 if (sub_offset)
1894 offset = int_const_binop (PLUS_EXPR, offset,
1895 build_int_cst (TREE_TYPE (offset),
1896 sub_offset / BITS_PER_UNIT), 1);
1897 }
1898 }
c8ca3ee7 1899 if (useless_type_conversion_p (orig_type, TREE_TYPE (base))
3b45913d 1900 && integer_zerop (offset))
1901 return base;
1902 type = TREE_TYPE (base);
1903 }
1904 else
1905 {
1906 base_is_ptr = true;
1907 if (!POINTER_TYPE_P (TREE_TYPE (base)))
1908 return NULL_TREE;
1909 type = TREE_TYPE (TREE_TYPE (base));
1910 }
1911 ret = maybe_fold_offset_to_component_ref (type, base, offset,
1912 orig_type, base_is_ptr);
1913 if (!ret)
1914 {
1915 if (base_is_ptr)
1916 base = build1 (INDIRECT_REF, type, base);
b7488229 1917 ret = maybe_fold_offset_to_array_ref (base, offset, orig_type, true);
3b45913d 1918 }
1919 return ret;
1920}
41511585 1921
5768aeb3 1922/* Attempt to express (ORIG_TYPE)&BASE+OFFSET as &BASE->field_of_orig_type
1923 or &BASE[index] or by combination of those.
1924
1925 Before attempting the conversion strip off existing component refs. */
1926
1927tree
1928maybe_fold_offset_to_address (tree addr, tree offset, tree orig_type)
1929{
1930 tree t;
1931
1932 gcc_assert (POINTER_TYPE_P (TREE_TYPE (addr))
1933 && POINTER_TYPE_P (orig_type));
1934
1935 t = maybe_fold_offset_to_reference (addr, offset, TREE_TYPE (orig_type));
1936 if (t != NULL_TREE)
1937 {
1938 tree orig = addr;
1939 tree ptr_type;
1940
1941 /* For __builtin_object_size to function correctly we need to
1942 make sure not to fold address arithmetic so that we change
1943 reference from one array to another. This would happen for
1944 example for
1945
1946 struct X { char s1[10]; char s2[10] } s;
1947 char *foo (void) { return &s.s2[-4]; }
1948
1949 where we need to avoid generating &s.s1[6]. As the C and
1950 C++ frontends create different initial trees
1951 (char *) &s.s1 + -4 vs. &s.s1[-4] we have to do some
1952 sophisticated comparisons here. Note that checking for the
1953 condition after the fact is easier than trying to avoid doing
1954 the folding. */
1955 STRIP_NOPS (orig);
1956 if (TREE_CODE (orig) == ADDR_EXPR)
1957 orig = TREE_OPERAND (orig, 0);
1958 if ((TREE_CODE (orig) == ARRAY_REF
1959 || (TREE_CODE (orig) == COMPONENT_REF
1960 && TREE_CODE (TREE_TYPE (TREE_OPERAND (orig, 1))) == ARRAY_TYPE))
1961 && (TREE_CODE (t) == ARRAY_REF
bc79a76b 1962 || TREE_CODE (t) == COMPONENT_REF)
5768aeb3 1963 && !operand_equal_p (TREE_CODE (orig) == ARRAY_REF
1964 ? TREE_OPERAND (orig, 0) : orig,
1965 TREE_CODE (t) == ARRAY_REF
1966 ? TREE_OPERAND (t, 0) : t, 0))
1967 return NULL_TREE;
1968
1969 ptr_type = build_pointer_type (TREE_TYPE (t));
1970 if (!useless_type_conversion_p (orig_type, ptr_type))
1971 return NULL_TREE;
1972 return build_fold_addr_expr_with_type (t, ptr_type);
1973 }
1974
1975 return NULL_TREE;
1976}
1977
4ee9c684 1978/* A subroutine of fold_stmt_r. Attempt to simplify *(BASE+OFFSET).
1979 Return the simplified expression, or NULL if nothing could be done. */
1980
1981static tree
1982maybe_fold_stmt_indirect (tree expr, tree base, tree offset)
1983{
1984 tree t;
5acf8305 1985 bool volatile_p = TREE_THIS_VOLATILE (expr);
4ee9c684 1986
1987 /* We may well have constructed a double-nested PLUS_EXPR via multiple
1988 substitutions. Fold that down to one. Remove NON_LVALUE_EXPRs that
1989 are sometimes added. */
1990 base = fold (base);
40bcfc86 1991 STRIP_TYPE_NOPS (base);
4ee9c684 1992 TREE_OPERAND (expr, 0) = base;
1993
1994 /* One possibility is that the address reduces to a string constant. */
1995 t = fold_read_from_constant_string (expr);
1996 if (t)
1997 return t;
1998
0de36bdb 1999 /* Add in any offset from a POINTER_PLUS_EXPR. */
2000 if (TREE_CODE (base) == POINTER_PLUS_EXPR)
4ee9c684 2001 {
2002 tree offset2;
2003
2004 offset2 = TREE_OPERAND (base, 1);
2005 if (TREE_CODE (offset2) != INTEGER_CST)
2006 return NULL_TREE;
2007 base = TREE_OPERAND (base, 0);
2008
0de36bdb 2009 offset = fold_convert (sizetype,
2010 int_const_binop (PLUS_EXPR, offset, offset2, 1));
4ee9c684 2011 }
2012
2013 if (TREE_CODE (base) == ADDR_EXPR)
2014 {
3b45913d 2015 tree base_addr = base;
2016
4ee9c684 2017 /* Strip the ADDR_EXPR. */
2018 base = TREE_OPERAND (base, 0);
2019
e67e5e1f 2020 /* Fold away CONST_DECL to its value, if the type is scalar. */
2021 if (TREE_CODE (base) == CONST_DECL
0a685b29 2022 && is_gimple_min_invariant (DECL_INITIAL (base)))
e67e5e1f 2023 return DECL_INITIAL (base);
2024
4ee9c684 2025 /* Try folding *(&B+O) to B.X. */
3b45913d 2026 t = maybe_fold_offset_to_reference (base_addr, offset,
2027 TREE_TYPE (expr));
4ee9c684 2028 if (t)
5acf8305 2029 {
bcc7452f 2030 /* Preserve volatileness of the original expression.
2031 We can end up with a plain decl here which is shared
2032 and we shouldn't mess with its flags. */
2033 if (!SSA_VAR_P (t))
2034 TREE_THIS_VOLATILE (t) = volatile_p;
5acf8305 2035 return t;
2036 }
4ee9c684 2037 }
2038 else
2039 {
2040 /* We can get here for out-of-range string constant accesses,
2041 such as "_"[3]. Bail out of the entire substitution search
2042 and arrange for the entire statement to be replaced by a
06b27565 2043 call to __builtin_trap. In all likelihood this will all be
4ee9c684 2044 constant-folded away, but in the meantime we can't leave with
2045 something that get_expr_operands can't understand. */
2046
2047 t = base;
2048 STRIP_NOPS (t);
2049 if (TREE_CODE (t) == ADDR_EXPR
2050 && TREE_CODE (TREE_OPERAND (t, 0)) == STRING_CST)
2051 {
2052 /* FIXME: Except that this causes problems elsewhere with dead
1fa3a8f6 2053 code not being deleted, and we die in the rtl expanders
4ee9c684 2054 because we failed to remove some ssa_name. In the meantime,
2055 just return zero. */
2056 /* FIXME2: This condition should be signaled by
2057 fold_read_from_constant_string directly, rather than
2058 re-checking for it here. */
2059 return integer_zero_node;
2060 }
2061
2062 /* Try folding *(B+O) to B->X. Still an improvement. */
2063 if (POINTER_TYPE_P (TREE_TYPE (base)))
2064 {
3b45913d 2065 t = maybe_fold_offset_to_reference (base, offset,
2066 TREE_TYPE (expr));
4ee9c684 2067 if (t)
2068 return t;
2069 }
2070 }
2071
2072 /* Otherwise we had an offset that we could not simplify. */
2073 return NULL_TREE;
2074}
2075
41511585 2076
75a70cf9 2077/* A quaint feature extant in our address arithmetic is that there
4ee9c684 2078 can be hidden type changes here. The type of the result need
2079 not be the same as the type of the input pointer.
2080
2081 What we're after here is an expression of the form
2082 (T *)(&array + const)
75a70cf9 2083 where array is OP0, const is OP1, RES_TYPE is T and
2084 the cast doesn't actually exist, but is implicit in the
0de36bdb 2085 type of the POINTER_PLUS_EXPR. We'd like to turn this into
4ee9c684 2086 &array[x]
2087 which may be able to propagate further. */
2088
75a70cf9 2089tree
2090maybe_fold_stmt_addition (tree res_type, tree op0, tree op1)
4ee9c684 2091{
4ee9c684 2092 tree ptd_type;
2093 tree t;
4ee9c684 2094
4ee9c684 2095 /* It had better be a constant. */
2096 if (TREE_CODE (op1) != INTEGER_CST)
2097 return NULL_TREE;
2098 /* The first operand should be an ADDR_EXPR. */
2099 if (TREE_CODE (op0) != ADDR_EXPR)
2100 return NULL_TREE;
2101 op0 = TREE_OPERAND (op0, 0);
2102
2103 /* If the first operand is an ARRAY_REF, expand it so that we can fold
2104 the offset into it. */
2105 while (TREE_CODE (op0) == ARRAY_REF)
2106 {
2107 tree array_obj = TREE_OPERAND (op0, 0);
2108 tree array_idx = TREE_OPERAND (op0, 1);
2109 tree elt_type = TREE_TYPE (op0);
2110 tree elt_size = TYPE_SIZE_UNIT (elt_type);
2111 tree min_idx;
2112
2113 if (TREE_CODE (array_idx) != INTEGER_CST)
2114 break;
2115 if (TREE_CODE (elt_size) != INTEGER_CST)
2116 break;
2117
2118 /* Un-bias the index by the min index of the array type. */
2119 min_idx = TYPE_DOMAIN (TREE_TYPE (array_obj));
2120 if (min_idx)
2121 {
2122 min_idx = TYPE_MIN_VALUE (min_idx);
2123 if (min_idx)
2124 {
6374121b 2125 if (TREE_CODE (min_idx) != INTEGER_CST)
2126 break;
2127
535664e3 2128 array_idx = fold_convert (TREE_TYPE (min_idx), array_idx);
4ee9c684 2129 if (!integer_zerop (min_idx))
2130 array_idx = int_const_binop (MINUS_EXPR, array_idx,
2131 min_idx, 0);
2132 }
2133 }
2134
2135 /* Convert the index to a byte offset. */
535664e3 2136 array_idx = fold_convert (sizetype, array_idx);
4ee9c684 2137 array_idx = int_const_binop (MULT_EXPR, array_idx, elt_size, 0);
2138
2139 /* Update the operands for the next round, or for folding. */
0de36bdb 2140 op1 = int_const_binop (PLUS_EXPR,
4ee9c684 2141 array_idx, op1, 0);
4ee9c684 2142 op0 = array_obj;
2143 }
2144
75a70cf9 2145 ptd_type = TREE_TYPE (res_type);
0b4a6afc 2146 /* If we want a pointer to void, reconstruct the reference from the
2147 array element type. A pointer to that can be trivially converted
2148 to void *. This happens as we fold (void *)(ptr p+ off). */
2149 if (VOID_TYPE_P (ptd_type)
2150 && TREE_CODE (TREE_TYPE (op0)) == ARRAY_TYPE)
2151 ptd_type = TREE_TYPE (TREE_TYPE (op0));
4ee9c684 2152
2153 /* At which point we can try some of the same things as for indirects. */
b7488229 2154 t = maybe_fold_offset_to_array_ref (op0, op1, ptd_type, true);
4ee9c684 2155 if (!t)
2156 t = maybe_fold_offset_to_component_ref (TREE_TYPE (op0), op0, op1,
2157 ptd_type, false);
2158 if (t)
75a70cf9 2159 t = build1 (ADDR_EXPR, res_type, t);
4ee9c684 2160
2161 return t;
2162}
2163
0d759d9c 2164/* For passing state through walk_tree into fold_stmt_r and its
2165 children. */
2166
2167struct fold_stmt_r_data
2168{
75a70cf9 2169 gimple stmt;
add6ee5e 2170 bool *changed_p;
2171 bool *inside_addr_expr_p;
0d759d9c 2172};
2173
4ee9c684 2174/* Subroutine of fold_stmt called via walk_tree. We perform several
2175 simplifications of EXPR_P, mostly having to do with pointer arithmetic. */
2176
2177static tree
2178fold_stmt_r (tree *expr_p, int *walk_subtrees, void *data)
2179{
75a70cf9 2180 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
2181 struct fold_stmt_r_data *fold_stmt_r_data;
2182 bool *inside_addr_expr_p;
2183 bool *changed_p;
4ee9c684 2184 tree expr = *expr_p, t;
30fde358 2185 bool volatile_p = TREE_THIS_VOLATILE (expr);
4ee9c684 2186
75a70cf9 2187 fold_stmt_r_data = (struct fold_stmt_r_data *) wi->info;
2188 inside_addr_expr_p = fold_stmt_r_data->inside_addr_expr_p;
2189 changed_p = fold_stmt_r_data->changed_p;
2190
4ee9c684 2191 /* ??? It'd be nice if walk_tree had a pre-order option. */
2192 switch (TREE_CODE (expr))
2193 {
2194 case INDIRECT_REF:
2195 t = walk_tree (&TREE_OPERAND (expr, 0), fold_stmt_r, data, NULL);
2196 if (t)
2197 return t;
2198 *walk_subtrees = 0;
2199
2200 t = maybe_fold_stmt_indirect (expr, TREE_OPERAND (expr, 0),
2201 integer_zero_node);
4d444068 2202 /* Avoid folding *"abc" = 5 into 'a' = 5. */
2203 if (wi->is_lhs && t && TREE_CODE (t) == INTEGER_CST)
2204 t = NULL_TREE;
8ac2d49b 2205 if (!t
2206 && TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR)
2207 /* If we had a good reason for propagating the address here,
2208 make sure we end up with valid gimple. See PR34989. */
2209 t = TREE_OPERAND (TREE_OPERAND (expr, 0), 0);
4ee9c684 2210 break;
2211
3b45913d 2212 case NOP_EXPR:
2213 t = walk_tree (&TREE_OPERAND (expr, 0), fold_stmt_r, data, NULL);
2214 if (t)
2215 return t;
2216 *walk_subtrees = 0;
2217
2218 if (POINTER_TYPE_P (TREE_TYPE (expr))
50828ed8 2219 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (expr)))
3b45913d 2220 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 0)))
5768aeb3 2221 && (t = maybe_fold_offset_to_address (TREE_OPERAND (expr, 0),
2222 integer_zero_node,
2223 TREE_TYPE (TREE_TYPE (expr)))))
2224 return t;
3b45913d 2225 break;
2226
0d759d9c 2227 /* ??? Could handle more ARRAY_REFs here, as a variant of INDIRECT_REF.
4ee9c684 2228 We'd only want to bother decomposing an existing ARRAY_REF if
2229 the base array is found to have another offset contained within.
2230 Otherwise we'd be wasting time. */
0d759d9c 2231 case ARRAY_REF:
2232 /* If we are not processing expressions found within an
4d444068 2233 ADDR_EXPR, then we can fold constant array references.
2234 Don't fold on LHS either, to avoid folding "abc"[0] = 5
2235 into 'a' = 5. */
2236 if (!*inside_addr_expr_p && !wi->is_lhs)
0d759d9c 2237 t = fold_read_from_constant_string (expr);
2238 else
2239 t = NULL;
2240 break;
4ee9c684 2241
2242 case ADDR_EXPR:
0d759d9c 2243 *inside_addr_expr_p = true;
4ee9c684 2244 t = walk_tree (&TREE_OPERAND (expr, 0), fold_stmt_r, data, NULL);
0d759d9c 2245 *inside_addr_expr_p = false;
4ee9c684 2246 if (t)
2247 return t;
2248 *walk_subtrees = 0;
2249
c7d4e749 2250 /* Make sure the value is properly considered constant, and so gets
2251 propagated as expected. */
4ee9c684 2252 if (*changed_p)
750ad201 2253 recompute_tree_invariant_for_addr_expr (expr);
4ee9c684 2254 return NULL_TREE;
2255
4ee9c684 2256 case COMPONENT_REF:
2257 t = walk_tree (&TREE_OPERAND (expr, 0), fold_stmt_r, data, NULL);
2258 if (t)
2259 return t;
2260 *walk_subtrees = 0;
2261
504d3463 2262 /* Make sure the FIELD_DECL is actually a field in the type on the lhs.
2263 We've already checked that the records are compatible, so we should
2264 come up with a set of compatible fields. */
2265 {
2266 tree expr_record = TREE_TYPE (TREE_OPERAND (expr, 0));
2267 tree expr_field = TREE_OPERAND (expr, 1);
2268
2269 if (DECL_FIELD_CONTEXT (expr_field) != TYPE_MAIN_VARIANT (expr_record))
2270 {
2271 expr_field = find_compatible_field (expr_record, expr_field);
2272 TREE_OPERAND (expr, 1) = expr_field;
2273 }
2274 }
4ee9c684 2275 break;
2276
aed164c3 2277 case TARGET_MEM_REF:
2278 t = maybe_fold_tmr (expr);
2279 break;
2280
75a70cf9 2281 case POINTER_PLUS_EXPR:
2282 t = walk_tree (&TREE_OPERAND (expr, 0), fold_stmt_r, data, NULL);
2283 if (t)
2284 return t;
2285 t = walk_tree (&TREE_OPERAND (expr, 1), fold_stmt_r, data, NULL);
2286 if (t)
2287 return t;
2288 *walk_subtrees = 0;
2289
2290 t = maybe_fold_stmt_addition (TREE_TYPE (expr),
2291 TREE_OPERAND (expr, 0),
2292 TREE_OPERAND (expr, 1));
2293 break;
2294
bb8a9715 2295 case COND_EXPR:
2296 if (COMPARISON_CLASS_P (TREE_OPERAND (expr, 0)))
2297 {
2298 tree op0 = TREE_OPERAND (expr, 0);
add6ee5e 2299 tree tem;
2300 bool set;
2301
2302 fold_defer_overflow_warnings ();
2303 tem = fold_binary (TREE_CODE (op0), TREE_TYPE (op0),
2304 TREE_OPERAND (op0, 0),
2305 TREE_OPERAND (op0, 1));
75a70cf9 2306 /* This is actually a conditional expression, not a GIMPLE
2307 conditional statement, however, the valid_gimple_rhs_p
2308 test still applies. */
2309 set = tem && is_gimple_condexpr (tem) && valid_gimple_rhs_p (tem);
add6ee5e 2310 fold_undefer_overflow_warnings (set, fold_stmt_r_data->stmt, 0);
2311 if (set)
f2532264 2312 {
75a70cf9 2313 COND_EXPR_COND (expr) = tem;
2314 t = expr;
f2532264 2315 break;
2316 }
bb8a9715 2317 }
f2532264 2318 return NULL_TREE;
bb8a9715 2319
4ee9c684 2320 default:
2321 return NULL_TREE;
2322 }
2323
2324 if (t)
2325 {
bcc7452f 2326 /* Preserve volatileness of the original expression.
2327 We can end up with a plain decl here which is shared
2328 and we shouldn't mess with its flags. */
2329 if (!SSA_VAR_P (t))
2330 TREE_THIS_VOLATILE (t) = volatile_p;
4ee9c684 2331 *expr_p = t;
2332 *changed_p = true;
2333 }
2334
2335 return NULL_TREE;
2336}
2337
0a39fd54 2338/* Return the string length, maximum string length or maximum value of
2339 ARG in LENGTH.
2340 If ARG is an SSA name variable, follow its use-def chains. If LENGTH
2341 is not NULL and, for TYPE == 0, its value is not equal to the length
2342 we determine or if we are unable to determine the length or value,
2343 return false. VISITED is a bitmap of visited variables.
2344 TYPE is 0 if string length should be returned, 1 for maximum string
2345 length and 2 for maximum value ARG can have. */
4ee9c684 2346
72648a0e 2347static bool
0a39fd54 2348get_maxval_strlen (tree arg, tree *length, bitmap visited, int type)
4ee9c684 2349{
75a70cf9 2350 tree var, val;
2351 gimple def_stmt;
41511585 2352
2353 if (TREE_CODE (arg) != SSA_NAME)
72648a0e 2354 {
ec0fa513 2355 if (TREE_CODE (arg) == COND_EXPR)
2356 return get_maxval_strlen (COND_EXPR_THEN (arg), length, visited, type)
2357 && get_maxval_strlen (COND_EXPR_ELSE (arg), length, visited, type);
0b4a6afc 2358 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
2359 else if (TREE_CODE (arg) == ADDR_EXPR
2360 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF
2361 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg, 0), 1)))
2362 {
2363 tree aop0 = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
2364 if (TREE_CODE (aop0) == INDIRECT_REF
2365 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
2366 return get_maxval_strlen (TREE_OPERAND (aop0, 0),
2367 length, visited, type);
2368 }
ec0fa513 2369
0a39fd54 2370 if (type == 2)
2371 {
2372 val = arg;
2373 if (TREE_CODE (val) != INTEGER_CST
2374 || tree_int_cst_sgn (val) < 0)
2375 return false;
2376 }
2377 else
2378 val = c_strlen (arg, 1);
41511585 2379 if (!val)
72648a0e 2380 return false;
e37235f0 2381
0a39fd54 2382 if (*length)
2383 {
2384 if (type > 0)
2385 {
2386 if (TREE_CODE (*length) != INTEGER_CST
2387 || TREE_CODE (val) != INTEGER_CST)
2388 return false;
2389
2390 if (tree_int_cst_lt (*length, val))
2391 *length = val;
2392 return true;
2393 }
2394 else if (simple_cst_equal (val, *length) != 1)
2395 return false;
2396 }
4ee9c684 2397
41511585 2398 *length = val;
2399 return true;
4ee9c684 2400 }
72648a0e 2401
41511585 2402 /* If we were already here, break the infinite cycle. */
2403 if (bitmap_bit_p (visited, SSA_NAME_VERSION (arg)))
2404 return true;
2405 bitmap_set_bit (visited, SSA_NAME_VERSION (arg));
2406
2407 var = arg;
2408 def_stmt = SSA_NAME_DEF_STMT (var);
4ee9c684 2409
75a70cf9 2410 switch (gimple_code (def_stmt))
2411 {
2412 case GIMPLE_ASSIGN:
2413 /* The RHS of the statement defining VAR must either have a
2414 constant length or come from another SSA_NAME with a constant
2415 length. */
2416 if (gimple_assign_single_p (def_stmt)
2417 || gimple_assign_unary_nop_p (def_stmt))
2418 {
2419 tree rhs = gimple_assign_rhs1 (def_stmt);
2420 return get_maxval_strlen (rhs, length, visited, type);
2421 }
2422 return false;
2423
2424 case GIMPLE_PHI:
41511585 2425 {
2426 /* All the arguments of the PHI node must have the same constant
2427 length. */
75a70cf9 2428 unsigned i;
2429
2430 for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
2431 {
2432 tree arg = gimple_phi_arg (def_stmt, i)->def;
2433
2434 /* If this PHI has itself as an argument, we cannot
2435 determine the string length of this argument. However,
2436 if we can find a constant string length for the other
2437 PHI args then we can still be sure that this is a
2438 constant string length. So be optimistic and just
2439 continue with the next argument. */
2440 if (arg == gimple_phi_result (def_stmt))
2441 continue;
2442
2443 if (!get_maxval_strlen (arg, length, visited, type))
2444 return false;
2445 }
2446 }
2447 return true;
4ee9c684 2448
41511585 2449 default:
75a70cf9 2450 return false;
4ee9c684 2451 }
4ee9c684 2452}
2453
2454
75a70cf9 2455/* Fold builtin call in statement STMT. Returns a simplified tree.
2456 We may return a non-constant expression, including another call
2457 to a different function and with different arguments, e.g.,
2458 substituting memcpy for strcpy when the string length is known.
2459 Note that some builtins expand into inline code that may not
2460 be valid in GIMPLE. Callers must take care. */
4ee9c684 2461
2462static tree
75a70cf9 2463ccp_fold_builtin (gimple stmt)
4ee9c684 2464{
0a39fd54 2465 tree result, val[3];
c2f47e15 2466 tree callee, a;
be9f921e 2467 int arg_idx, type;
f0613857 2468 bitmap visited;
2469 bool ignore;
c2f47e15 2470 int nargs;
4ee9c684 2471
75a70cf9 2472 gcc_assert (is_gimple_call (stmt));
2473
2474 ignore = (gimple_call_lhs (stmt) == NULL);
4ee9c684 2475
2476 /* First try the generic builtin folder. If that succeeds, return the
2477 result directly. */
75a70cf9 2478 result = fold_call_stmt (stmt, ignore);
4ee9c684 2479 if (result)
0a39fd54 2480 {
2481 if (ignore)
2482 STRIP_NOPS (result);
2483 return result;
2484 }
f0613857 2485
2486 /* Ignore MD builtins. */
75a70cf9 2487 callee = gimple_call_fndecl (stmt);
f0613857 2488 if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_MD)
2489 return NULL_TREE;
4ee9c684 2490
2491 /* If the builtin could not be folded, and it has no argument list,
2492 we're done. */
75a70cf9 2493 nargs = gimple_call_num_args (stmt);
c2f47e15 2494 if (nargs == 0)
4ee9c684 2495 return NULL_TREE;
2496
2497 /* Limit the work only for builtins we know how to simplify. */
2498 switch (DECL_FUNCTION_CODE (callee))
2499 {
2500 case BUILT_IN_STRLEN:
2501 case BUILT_IN_FPUTS:
2502 case BUILT_IN_FPUTS_UNLOCKED:
be9f921e 2503 arg_idx = 0;
0a39fd54 2504 type = 0;
4ee9c684 2505 break;
2506 case BUILT_IN_STRCPY:
2507 case BUILT_IN_STRNCPY:
be9f921e 2508 arg_idx = 1;
0a39fd54 2509 type = 0;
2510 break;
2511 case BUILT_IN_MEMCPY_CHK:
2512 case BUILT_IN_MEMPCPY_CHK:
2513 case BUILT_IN_MEMMOVE_CHK:
2514 case BUILT_IN_MEMSET_CHK:
2515 case BUILT_IN_STRNCPY_CHK:
be9f921e 2516 arg_idx = 2;
0a39fd54 2517 type = 2;
2518 break;
2519 case BUILT_IN_STRCPY_CHK:
2520 case BUILT_IN_STPCPY_CHK:
be9f921e 2521 arg_idx = 1;
0a39fd54 2522 type = 1;
2523 break;
2524 case BUILT_IN_SNPRINTF_CHK:
2525 case BUILT_IN_VSNPRINTF_CHK:
be9f921e 2526 arg_idx = 1;
0a39fd54 2527 type = 2;
4ee9c684 2528 break;
2529 default:
2530 return NULL_TREE;
2531 }
2532
54807c88 2533 if (arg_idx >= nargs)
2534 return NULL_TREE;
2535
4ee9c684 2536 /* Try to use the dataflow information gathered by the CCP process. */
27335ffd 2537 visited = BITMAP_ALLOC (NULL);
be9f921e 2538 bitmap_clear (visited);
4ee9c684 2539
0a39fd54 2540 memset (val, 0, sizeof (val));
be9f921e 2541 a = gimple_call_arg (stmt, arg_idx);
2542 if (!get_maxval_strlen (a, &val[arg_idx], visited, type))
2543 val[arg_idx] = NULL_TREE;
4ee9c684 2544
27335ffd 2545 BITMAP_FREE (visited);
4ee9c684 2546
f0613857 2547 result = NULL_TREE;
4ee9c684 2548 switch (DECL_FUNCTION_CODE (callee))
2549 {
2550 case BUILT_IN_STRLEN:
54807c88 2551 if (val[0] && nargs == 1)
4ee9c684 2552 {
75a70cf9 2553 tree new_val =
2554 fold_convert (TREE_TYPE (gimple_call_lhs (stmt)), val[0]);
4ee9c684 2555
2556 /* If the result is not a valid gimple value, or not a cast
2557 of a valid gimple value, then we can not use the result. */
f0d6e81c 2558 if (is_gimple_val (new_val)
2559 || (is_gimple_cast (new_val)
2560 && is_gimple_val (TREE_OPERAND (new_val, 0))))
2561 return new_val;
4ee9c684 2562 }
f0613857 2563 break;
2564
4ee9c684 2565 case BUILT_IN_STRCPY:
c2f47e15 2566 if (val[1] && is_gimple_val (val[1]) && nargs == 2)
2567 result = fold_builtin_strcpy (callee,
75a70cf9 2568 gimple_call_arg (stmt, 0),
2569 gimple_call_arg (stmt, 1),
c2f47e15 2570 val[1]);
f0613857 2571 break;
2572
4ee9c684 2573 case BUILT_IN_STRNCPY:
c2f47e15 2574 if (val[1] && is_gimple_val (val[1]) && nargs == 3)
2575 result = fold_builtin_strncpy (callee,
75a70cf9 2576 gimple_call_arg (stmt, 0),
2577 gimple_call_arg (stmt, 1),
2578 gimple_call_arg (stmt, 2),
c2f47e15 2579 val[1]);
f0613857 2580 break;
2581
4ee9c684 2582 case BUILT_IN_FPUTS:
54807c88 2583 if (nargs == 2)
2584 result = fold_builtin_fputs (gimple_call_arg (stmt, 0),
2585 gimple_call_arg (stmt, 1),
2586 ignore, false, val[0]);
f0613857 2587 break;
2588
4ee9c684 2589 case BUILT_IN_FPUTS_UNLOCKED:
54807c88 2590 if (nargs == 2)
2591 result = fold_builtin_fputs (gimple_call_arg (stmt, 0),
2592 gimple_call_arg (stmt, 1),
2593 ignore, true, val[0]);
0a39fd54 2594 break;
2595
2596 case BUILT_IN_MEMCPY_CHK:
2597 case BUILT_IN_MEMPCPY_CHK:
2598 case BUILT_IN_MEMMOVE_CHK:
2599 case BUILT_IN_MEMSET_CHK:
54807c88 2600 if (val[2] && is_gimple_val (val[2]) && nargs == 4)
c2f47e15 2601 result = fold_builtin_memory_chk (callee,
75a70cf9 2602 gimple_call_arg (stmt, 0),
2603 gimple_call_arg (stmt, 1),
2604 gimple_call_arg (stmt, 2),
2605 gimple_call_arg (stmt, 3),
c2f47e15 2606 val[2], ignore,
0a39fd54 2607 DECL_FUNCTION_CODE (callee));
2608 break;
2609
2610 case BUILT_IN_STRCPY_CHK:
2611 case BUILT_IN_STPCPY_CHK:
54807c88 2612 if (val[1] && is_gimple_val (val[1]) && nargs == 3)
c2f47e15 2613 result = fold_builtin_stxcpy_chk (callee,
75a70cf9 2614 gimple_call_arg (stmt, 0),
2615 gimple_call_arg (stmt, 1),
2616 gimple_call_arg (stmt, 2),
c2f47e15 2617 val[1], ignore,
0a39fd54 2618 DECL_FUNCTION_CODE (callee));
2619 break;
2620
2621 case BUILT_IN_STRNCPY_CHK:
54807c88 2622 if (val[2] && is_gimple_val (val[2]) && nargs == 4)
75a70cf9 2623 result = fold_builtin_strncpy_chk (gimple_call_arg (stmt, 0),
2624 gimple_call_arg (stmt, 1),
2625 gimple_call_arg (stmt, 2),
2626 gimple_call_arg (stmt, 3),
c2f47e15 2627 val[2]);
0a39fd54 2628 break;
2629
2630 case BUILT_IN_SNPRINTF_CHK:
2631 case BUILT_IN_VSNPRINTF_CHK:
2632 if (val[1] && is_gimple_val (val[1]))
75a70cf9 2633 result = gimple_fold_builtin_snprintf_chk (stmt, val[1],
2634 DECL_FUNCTION_CODE (callee));
f0613857 2635 break;
4ee9c684 2636
2637 default:
8c0963c4 2638 gcc_unreachable ();
4ee9c684 2639 }
2640
f0613857 2641 if (result && ignore)
db97ad41 2642 result = fold_ignored_result (result);
f0613857 2643 return result;
4ee9c684 2644}
2645
75a70cf9 2646/* Attempt to fold an assignment statement pointed-to by SI. Returns a
2647 replacement rhs for the statement or NULL_TREE if no simplification
2648 could be made. It is assumed that the operands have been previously
2649 folded. */
2650
2651static tree
2652fold_gimple_assign (gimple_stmt_iterator *si)
2653{
2654 gimple stmt = gsi_stmt (*si);
2655 enum tree_code subcode = gimple_assign_rhs_code (stmt);
2656
2657 tree result = NULL;
2658
2659 switch (get_gimple_rhs_class (subcode))
2660 {
2661 case GIMPLE_SINGLE_RHS:
2662 {
2663 tree rhs = gimple_assign_rhs1 (stmt);
2664
2665 /* Try to fold a conditional expression. */
2666 if (TREE_CODE (rhs) == COND_EXPR)
2667 {
2668 tree temp = fold (COND_EXPR_COND (rhs));
2669 if (temp != COND_EXPR_COND (rhs))
2670 result = fold_build3 (COND_EXPR, TREE_TYPE (rhs), temp,
2671 COND_EXPR_THEN (rhs), COND_EXPR_ELSE (rhs));
2672 }
2673
2674 /* If we couldn't fold the RHS, hand over to the generic
2675 fold routines. */
2676 if (result == NULL_TREE)
2677 result = fold (rhs);
2678
2679 /* Strip away useless type conversions. Both the NON_LVALUE_EXPR
2680 that may have been added by fold, and "useless" type
2681 conversions that might now be apparent due to propagation. */
2682 STRIP_USELESS_TYPE_CONVERSION (result);
2683
2684 if (result != rhs && valid_gimple_rhs_p (result))
2685 return result;
2686 else
2687 /* It is possible that fold_stmt_r simplified the RHS.
2688 Make sure that the subcode of this statement still
2689 reflects the principal operator of the rhs operand. */
2690 return rhs;
2691 }
2692 break;
2693
2694 case GIMPLE_UNARY_RHS:
f1fb2997 2695 {
2696 tree rhs = gimple_assign_rhs1 (stmt);
75a70cf9 2697
f1fb2997 2698 result = fold_unary (subcode, gimple_expr_type (stmt), rhs);
2699 if (result)
2700 {
2701 /* If the operation was a conversion do _not_ mark a
2702 resulting constant with TREE_OVERFLOW if the original
2703 constant was not. These conversions have implementation
2704 defined behavior and retaining the TREE_OVERFLOW flag
2705 here would confuse later passes such as VRP. */
2706 if (CONVERT_EXPR_CODE_P (subcode)
2707 && TREE_CODE (result) == INTEGER_CST
2708 && TREE_CODE (rhs) == INTEGER_CST)
2709 TREE_OVERFLOW (result) = TREE_OVERFLOW (rhs);
2710
2711 STRIP_USELESS_TYPE_CONVERSION (result);
2712 if (valid_gimple_rhs_p (result))
2713 return result;
2714 }
2715 else if (CONVERT_EXPR_CODE_P (subcode)
2716 && POINTER_TYPE_P (gimple_expr_type (stmt))
2717 && POINTER_TYPE_P (TREE_TYPE (gimple_assign_rhs1 (stmt))))
2718 {
2719 tree type = gimple_expr_type (stmt);
2720 tree t = maybe_fold_offset_to_address (gimple_assign_rhs1 (stmt),
2721 integer_zero_node, type);
2722 if (t)
2723 return t;
2724 }
2725 }
75a70cf9 2726 break;
2727
2728 case GIMPLE_BINARY_RHS:
2729 /* Try to fold pointer addition. */
2730 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
93f3673b 2731 {
2732 tree type = TREE_TYPE (gimple_assign_rhs1 (stmt));
2733 if (TREE_CODE (TREE_TYPE (type)) == ARRAY_TYPE)
2734 {
2735 type = build_pointer_type (TREE_TYPE (TREE_TYPE (type)));
2736 if (!useless_type_conversion_p
2737 (TREE_TYPE (gimple_assign_lhs (stmt)), type))
2738 type = TREE_TYPE (gimple_assign_rhs1 (stmt));
2739 }
2740 result = maybe_fold_stmt_addition (type,
2741 gimple_assign_rhs1 (stmt),
2742 gimple_assign_rhs2 (stmt));
2743 }
75a70cf9 2744
2745 if (!result)
2746 result = fold_binary (subcode,
2747 TREE_TYPE (gimple_assign_lhs (stmt)),
2748 gimple_assign_rhs1 (stmt),
2749 gimple_assign_rhs2 (stmt));
2750
2751 if (result)
2752 {
2753 STRIP_USELESS_TYPE_CONVERSION (result);
2754 if (valid_gimple_rhs_p (result))
2755 return result;
ec3753d0 2756
2757 /* Fold might have produced non-GIMPLE, so if we trust it blindly
2758 we lose canonicalization opportunities. Do not go again
2759 through fold here though, or the same non-GIMPLE will be
2760 produced. */
2761 if (commutative_tree_code (subcode)
2762 && tree_swap_operands_p (gimple_assign_rhs1 (stmt),
2763 gimple_assign_rhs2 (stmt), false))
2764 return build2 (subcode, TREE_TYPE (gimple_assign_lhs (stmt)),
2765 gimple_assign_rhs2 (stmt),
2766 gimple_assign_rhs1 (stmt));
75a70cf9 2767 }
2768 break;
2769
2770 case GIMPLE_INVALID_RHS:
2771 gcc_unreachable ();
2772 }
2773
2774 return NULL_TREE;
2775}
2776
2777/* Attempt to fold a conditional statement. Return true if any changes were
2778 made. We only attempt to fold the condition expression, and do not perform
2779 any transformation that would require alteration of the cfg. It is
2780 assumed that the operands have been previously folded. */
2781
2782static bool
2783fold_gimple_cond (gimple stmt)
2784{
2785 tree result = fold_binary (gimple_cond_code (stmt),
2786 boolean_type_node,
2787 gimple_cond_lhs (stmt),
2788 gimple_cond_rhs (stmt));
2789
2790 if (result)
2791 {
2792 STRIP_USELESS_TYPE_CONVERSION (result);
2793 if (is_gimple_condexpr (result) && valid_gimple_rhs_p (result))
2794 {
2795 gimple_cond_set_condition_from_tree (stmt, result);
2796 return true;
2797 }
2798 }
2799
2800 return false;
2801}
2802
2803
2804/* Attempt to fold a call statement referenced by the statement iterator GSI.
2805 The statement may be replaced by another statement, e.g., if the call
2806 simplifies to a constant value. Return true if any changes were made.
2807 It is assumed that the operands have been previously folded. */
2808
2809static bool
2810fold_gimple_call (gimple_stmt_iterator *gsi)
2811{
2812 gimple stmt = gsi_stmt (*gsi);
2813
2814 tree callee = gimple_call_fndecl (stmt);
2815
2816 /* Check for builtins that CCP can handle using information not
2817 available in the generic fold routines. */
2818 if (callee && DECL_BUILT_IN (callee))
2819 {
2820 tree result = ccp_fold_builtin (stmt);
2821
2822 if (result)
2823 return update_call_from_tree (gsi, result);
2824 }
2825 else
2826 {
2827 /* Check for resolvable OBJ_TYPE_REF. The only sorts we can resolve
2828 here are when we've propagated the address of a decl into the
2829 object slot. */
2830 /* ??? Should perhaps do this in fold proper. However, doing it
2831 there requires that we create a new CALL_EXPR, and that requires
2832 copying EH region info to the new node. Easier to just do it
2833 here where we can just smash the call operand. */
2834 /* ??? Is there a good reason not to do this in fold_stmt_inplace? */
2835 callee = gimple_call_fn (stmt);
2836 if (TREE_CODE (callee) == OBJ_TYPE_REF
2837 && lang_hooks.fold_obj_type_ref
2838 && TREE_CODE (OBJ_TYPE_REF_OBJECT (callee)) == ADDR_EXPR
2839 && DECL_P (TREE_OPERAND
2840 (OBJ_TYPE_REF_OBJECT (callee), 0)))
2841 {
2842 tree t;
2843
2844 /* ??? Caution: Broken ADDR_EXPR semantics means that
2845 looking at the type of the operand of the addr_expr
2846 can yield an array type. See silly exception in
2847 check_pointer_types_r. */
2848 t = TREE_TYPE (TREE_TYPE (OBJ_TYPE_REF_OBJECT (callee)));
2849 t = lang_hooks.fold_obj_type_ref (callee, t);
2850 if (t)
2851 {
2852 gimple_call_set_fn (stmt, t);
2853 return true;
2854 }
2855 }
2856 }
2857
2858 return false;
2859}
4ee9c684 2860
75a70cf9 2861/* Fold the statement pointed to by GSI. In some cases, this function may
41511585 2862 replace the whole statement with a new one. Returns true iff folding
2863 makes any changes. */
4ee9c684 2864
41511585 2865bool
75a70cf9 2866fold_stmt (gimple_stmt_iterator *gsi)
4ee9c684 2867{
75a70cf9 2868 tree res;
0d759d9c 2869 struct fold_stmt_r_data fold_stmt_r_data;
75a70cf9 2870 struct walk_stmt_info wi;
2871
41511585 2872 bool changed = false;
0d759d9c 2873 bool inside_addr_expr = false;
2874
75a70cf9 2875 gimple stmt = gsi_stmt (*gsi);
add6ee5e 2876
2877 fold_stmt_r_data.stmt = stmt;
0d759d9c 2878 fold_stmt_r_data.changed_p = &changed;
2879 fold_stmt_r_data.inside_addr_expr_p = &inside_addr_expr;
4ee9c684 2880
75a70cf9 2881 memset (&wi, 0, sizeof (wi));
2882 wi.info = &fold_stmt_r_data;
4ee9c684 2883
75a70cf9 2884 /* Fold the individual operands.
2885 For example, fold instances of *&VAR into VAR, etc. */
2886 res = walk_gimple_op (stmt, fold_stmt_r, &wi);
2887 gcc_assert (!res);
4ee9c684 2888
75a70cf9 2889 /* Fold the main computation performed by the statement. */
2890 switch (gimple_code (stmt))
4ee9c684 2891 {
75a70cf9 2892 case GIMPLE_ASSIGN:
2893 {
2894 tree new_rhs = fold_gimple_assign (gsi);
2895 if (new_rhs != NULL_TREE)
2896 {
2897 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
2898 changed = true;
2899 }
2900 stmt = gsi_stmt (*gsi);
2901 break;
2902 }
2903 case GIMPLE_COND:
2904 changed |= fold_gimple_cond (stmt);
2905 break;
2906 case GIMPLE_CALL:
2907 /* The entire statement may be replaced in this case. */
2908 changed |= fold_gimple_call (gsi);
2909 break;
e77b8618 2910
75a70cf9 2911 default:
2912 return changed;
2913 break;
ec0fa513 2914 }
4ee9c684 2915
41511585 2916 return changed;
4ee9c684 2917}
2918
8171a1dd 2919/* Perform the minimal folding on statement STMT. Only operations like
2920 *&x created by constant propagation are handled. The statement cannot
75a70cf9 2921 be replaced with a new one. Return true if the statement was
2922 changed, false otherwise. */
8171a1dd 2923
2924bool
75a70cf9 2925fold_stmt_inplace (gimple stmt)
8171a1dd 2926{
75a70cf9 2927 tree res;
0d759d9c 2928 struct fold_stmt_r_data fold_stmt_r_data;
75a70cf9 2929 struct walk_stmt_info wi;
2930 gimple_stmt_iterator si;
2931
8171a1dd 2932 bool changed = false;
0d759d9c 2933 bool inside_addr_expr = false;
2934
add6ee5e 2935 fold_stmt_r_data.stmt = stmt;
0d759d9c 2936 fold_stmt_r_data.changed_p = &changed;
2937 fold_stmt_r_data.inside_addr_expr_p = &inside_addr_expr;
8171a1dd 2938
75a70cf9 2939 memset (&wi, 0, sizeof (wi));
2940 wi.info = &fold_stmt_r_data;
2941
2942 /* Fold the individual operands.
2943 For example, fold instances of *&VAR into VAR, etc.
8171a1dd 2944
75a70cf9 2945 It appears that, at one time, maybe_fold_stmt_indirect
2946 would cause the walk to return non-null in order to
2947 signal that the entire statement should be replaced with
2948 a call to _builtin_trap. This functionality is currently
2949 disabled, as noted in a FIXME, and cannot be supported here. */
2950 res = walk_gimple_op (stmt, fold_stmt_r, &wi);
2951 gcc_assert (!res);
8171a1dd 2952
75a70cf9 2953 /* Fold the main computation performed by the statement. */
2954 switch (gimple_code (stmt))
2955 {
2956 case GIMPLE_ASSIGN:
2957 {
2958 unsigned old_num_ops;
2959 tree new_rhs;
2960 old_num_ops = gimple_num_ops (stmt);
2961 si = gsi_for_stmt (stmt);
2962 new_rhs = fold_gimple_assign (&si);
2963 if (new_rhs != NULL_TREE
2964 && get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops)
2965 {
2966 gimple_assign_set_rhs_from_tree (&si, new_rhs);
2967 changed = true;
2968 }
2969 gcc_assert (gsi_stmt (si) == stmt);
2970 break;
2971 }
2972 case GIMPLE_COND:
2973 changed |= fold_gimple_cond (stmt);
2974 break;
8171a1dd 2975
75a70cf9 2976 default:
2977 break;
2978 }
8171a1dd 2979
2980 return changed;
2981}
75a70cf9 2982
bdd0e199 2983/* Try to optimize out __builtin_stack_restore. Optimize it out
2984 if there is another __builtin_stack_restore in the same basic
2985 block and no calls or ASM_EXPRs are in between, or if this block's
2986 only outgoing edge is to EXIT_BLOCK and there are no calls or
2987 ASM_EXPRs after this __builtin_stack_restore. */
2988
2989static tree
75a70cf9 2990optimize_stack_restore (gimple_stmt_iterator i)
bdd0e199 2991{
75a70cf9 2992 tree callee, rhs;
2993 gimple stmt, stack_save;
2994 gimple_stmt_iterator stack_save_gsi;
2995
2996 basic_block bb = gsi_bb (i);
2997 gimple call = gsi_stmt (i);
bdd0e199 2998
75a70cf9 2999 if (gimple_code (call) != GIMPLE_CALL
3000 || gimple_call_num_args (call) != 1
3001 || TREE_CODE (gimple_call_arg (call, 0)) != SSA_NAME
3002 || !POINTER_TYPE_P (TREE_TYPE (gimple_call_arg (call, 0))))
bdd0e199 3003 return NULL_TREE;
3004
75a70cf9 3005 for (gsi_next (&i); !gsi_end_p (i); gsi_next (&i))
bdd0e199 3006 {
75a70cf9 3007 stmt = gsi_stmt (i);
3008 if (gimple_code (stmt) == GIMPLE_ASM)
bdd0e199 3009 return NULL_TREE;
75a70cf9 3010 if (gimple_code (stmt) != GIMPLE_CALL)
bdd0e199 3011 continue;
3012
75a70cf9 3013 callee = gimple_call_fndecl (stmt);
bdd0e199 3014 if (!callee || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL)
3015 return NULL_TREE;
3016
3017 if (DECL_FUNCTION_CODE (callee) == BUILT_IN_STACK_RESTORE)
3018 break;
3019 }
3020
75a70cf9 3021 if (gsi_end_p (i)
bdd0e199 3022 && (! single_succ_p (bb)
3023 || single_succ_edge (bb)->dest != EXIT_BLOCK_PTR))
3024 return NULL_TREE;
3025
75a70cf9 3026 stack_save = SSA_NAME_DEF_STMT (gimple_call_arg (call, 0));
3027 if (gimple_code (stack_save) != GIMPLE_CALL
3028 || gimple_call_lhs (stack_save) != gimple_call_arg (call, 0)
3029 || stmt_could_throw_p (stack_save)
3030 || !has_single_use (gimple_call_arg (call, 0)))
bdd0e199 3031 return NULL_TREE;
3032
75a70cf9 3033 callee = gimple_call_fndecl (stack_save);
bdd0e199 3034 if (!callee
3035 || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL
3036 || DECL_FUNCTION_CODE (callee) != BUILT_IN_STACK_SAVE
75a70cf9 3037 || gimple_call_num_args (stack_save) != 0)
bdd0e199 3038 return NULL_TREE;
3039
75a70cf9 3040 stack_save_gsi = gsi_for_stmt (stack_save);
3041 push_stmt_changes (gsi_stmt_ptr (&stack_save_gsi));
3042 rhs = build_int_cst (TREE_TYPE (gimple_call_arg (call, 0)), 0);
3043 if (!update_call_from_tree (&stack_save_gsi, rhs))
bdd0e199 3044 {
75a70cf9 3045 discard_stmt_changes (gsi_stmt_ptr (&stack_save_gsi));
bdd0e199 3046 return NULL_TREE;
3047 }
75a70cf9 3048 pop_stmt_changes (gsi_stmt_ptr (&stack_save_gsi));
bdd0e199 3049
75a70cf9 3050 /* No effect, so the statement will be deleted. */
bdd0e199 3051 return integer_zero_node;
3052}
75a70cf9 3053
8a58ed0a 3054/* If va_list type is a simple pointer and nothing special is needed,
3055 optimize __builtin_va_start (&ap, 0) into ap = __builtin_next_arg (0),
3056 __builtin_va_end (&ap) out as NOP and __builtin_va_copy into a simple
3057 pointer assignment. */
3058
3059static tree
75a70cf9 3060optimize_stdarg_builtin (gimple call)
8a58ed0a 3061{
5f57a8b1 3062 tree callee, lhs, rhs, cfun_va_list;
8a58ed0a 3063 bool va_list_simple_ptr;
3064
75a70cf9 3065 if (gimple_code (call) != GIMPLE_CALL)
8a58ed0a 3066 return NULL_TREE;
3067
75a70cf9 3068 callee = gimple_call_fndecl (call);
5f57a8b1 3069
3070 cfun_va_list = targetm.fn_abi_va_list (callee);
3071 va_list_simple_ptr = POINTER_TYPE_P (cfun_va_list)
3072 && (TREE_TYPE (cfun_va_list) == void_type_node
3073 || TREE_TYPE (cfun_va_list) == char_type_node);
3074
8a58ed0a 3075 switch (DECL_FUNCTION_CODE (callee))
3076 {
3077 case BUILT_IN_VA_START:
3078 if (!va_list_simple_ptr
3079 || targetm.expand_builtin_va_start != NULL
75a70cf9 3080 || built_in_decls[BUILT_IN_NEXT_ARG] == NULL)
8a58ed0a 3081 return NULL_TREE;
3082
75a70cf9 3083 if (gimple_call_num_args (call) != 2)
8a58ed0a 3084 return NULL_TREE;
3085
75a70cf9 3086 lhs = gimple_call_arg (call, 0);
8a58ed0a 3087 if (!POINTER_TYPE_P (TREE_TYPE (lhs))
3088 || TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (lhs)))
5f57a8b1 3089 != TYPE_MAIN_VARIANT (cfun_va_list))
8a58ed0a 3090 return NULL_TREE;
75a70cf9 3091
8a58ed0a 3092 lhs = build_fold_indirect_ref (lhs);
3093 rhs = build_call_expr (built_in_decls[BUILT_IN_NEXT_ARG],
75a70cf9 3094 1, integer_zero_node);
8a58ed0a 3095 rhs = fold_convert (TREE_TYPE (lhs), rhs);
3096 return build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, rhs);
3097
3098 case BUILT_IN_VA_COPY:
3099 if (!va_list_simple_ptr)
3100 return NULL_TREE;
3101
75a70cf9 3102 if (gimple_call_num_args (call) != 2)
8a58ed0a 3103 return NULL_TREE;
3104
75a70cf9 3105 lhs = gimple_call_arg (call, 0);
8a58ed0a 3106 if (!POINTER_TYPE_P (TREE_TYPE (lhs))
3107 || TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (lhs)))
5f57a8b1 3108 != TYPE_MAIN_VARIANT (cfun_va_list))
8a58ed0a 3109 return NULL_TREE;
3110
3111 lhs = build_fold_indirect_ref (lhs);
75a70cf9 3112 rhs = gimple_call_arg (call, 1);
8a58ed0a 3113 if (TYPE_MAIN_VARIANT (TREE_TYPE (rhs))
5f57a8b1 3114 != TYPE_MAIN_VARIANT (cfun_va_list))
8a58ed0a 3115 return NULL_TREE;
3116
3117 rhs = fold_convert (TREE_TYPE (lhs), rhs);
3118 return build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, rhs);
3119
3120 case BUILT_IN_VA_END:
75a70cf9 3121 /* No effect, so the statement will be deleted. */
8a58ed0a 3122 return integer_zero_node;
3123
3124 default:
3125 gcc_unreachable ();
3126 }
3127}
75a70cf9 3128
909e5ecb 3129/* Convert EXPR into a GIMPLE value suitable for substitution on the
3130 RHS of an assignment. Insert the necessary statements before
75a70cf9 3131 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
3132 is replaced. If the call is expected to produces a result, then it
3133 is replaced by an assignment of the new RHS to the result variable.
3134 If the result is to be ignored, then the call is replaced by a
3135 GIMPLE_NOP. */
909e5ecb 3136
75a70cf9 3137static void
3138gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
909e5ecb 3139{
75a70cf9 3140 tree lhs;
3141 tree tmp = NULL_TREE; /* Silence warning. */
3142 gimple stmt, new_stmt;
3143 gimple_stmt_iterator i;
3144 gimple_seq stmts = gimple_seq_alloc();
dac18d1a 3145 struct gimplify_ctx gctx;
909e5ecb 3146
75a70cf9 3147 stmt = gsi_stmt (*si_p);
3148
3149 gcc_assert (is_gimple_call (stmt));
3150
3151 lhs = gimple_call_lhs (stmt);
3152
dac18d1a 3153 push_gimplify_context (&gctx);
75a70cf9 3154
3155 if (lhs == NULL_TREE)
3156 gimplify_and_add (expr, &stmts);
3157 else
a280136a 3158 tmp = get_initialized_tmp_var (expr, &stmts, NULL);
75a70cf9 3159
909e5ecb 3160 pop_gimplify_context (NULL);
3161
75a70cf9 3162 if (gimple_has_location (stmt))
3163 annotate_all_with_location (stmts, gimple_location (stmt));
b66731e8 3164
909e5ecb 3165 /* The replacement can expose previously unreferenced variables. */
75a70cf9 3166 for (i = gsi_start (stmts); !gsi_end_p (i); gsi_next (&i))
3167 {
3168 new_stmt = gsi_stmt (i);
3169 find_new_referenced_vars (new_stmt);
3170 gsi_insert_before (si_p, new_stmt, GSI_NEW_STMT);
3171 mark_symbols_for_renaming (new_stmt);
3172 gsi_next (si_p);
3173 }
3174
3175 if (lhs == NULL_TREE)
dd277d48 3176 {
3177 new_stmt = gimple_build_nop ();
3178 unlink_stmt_vdef (stmt);
3179 release_defs (stmt);
3180 }
75a70cf9 3181 else
909e5ecb 3182 {
75a70cf9 3183 new_stmt = gimple_build_assign (lhs, tmp);
dd277d48 3184 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
3185 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
75a70cf9 3186 move_ssa_defining_stmt_for_defs (new_stmt, stmt);
909e5ecb 3187 }
3188
75a70cf9 3189 gimple_set_location (new_stmt, gimple_location (stmt));
3190 gsi_replace (si_p, new_stmt, false);
909e5ecb 3191}
3192
4ee9c684 3193/* A simple pass that attempts to fold all builtin functions. This pass
3194 is run after we've propagated as many constants as we can. */
3195
2a1990e9 3196static unsigned int
4ee9c684 3197execute_fold_all_builtins (void)
3198{
b36237eb 3199 bool cfg_changed = false;
4ee9c684 3200 basic_block bb;
b1b7c0c4 3201 unsigned int todoflags = 0;
3202
4ee9c684 3203 FOR_EACH_BB (bb)
3204 {
75a70cf9 3205 gimple_stmt_iterator i;
3206 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
4ee9c684 3207 {
75a70cf9 3208 gimple stmt, old_stmt;
4ee9c684 3209 tree callee, result;
0a39fd54 3210 enum built_in_function fcode;
4ee9c684 3211
75a70cf9 3212 stmt = gsi_stmt (i);
3213
3214 if (gimple_code (stmt) != GIMPLE_CALL)
0a39fd54 3215 {
75a70cf9 3216 gsi_next (&i);
0a39fd54 3217 continue;
3218 }
75a70cf9 3219 callee = gimple_call_fndecl (stmt);
4ee9c684 3220 if (!callee || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL)
0a39fd54 3221 {
75a70cf9 3222 gsi_next (&i);
0a39fd54 3223 continue;
3224 }
3225 fcode = DECL_FUNCTION_CODE (callee);
4ee9c684 3226
75a70cf9 3227 result = ccp_fold_builtin (stmt);
5a4b7e1e 3228
3229 if (result)
75a70cf9 3230 gimple_remove_stmt_histograms (cfun, stmt);
5a4b7e1e 3231
4ee9c684 3232 if (!result)
3233 switch (DECL_FUNCTION_CODE (callee))
3234 {
3235 case BUILT_IN_CONSTANT_P:
3236 /* Resolve __builtin_constant_p. If it hasn't been
3237 folded to integer_one_node by now, it's fairly
3238 certain that the value simply isn't constant. */
75a70cf9 3239 result = integer_zero_node;
4ee9c684 3240 break;
3241
bdd0e199 3242 case BUILT_IN_STACK_RESTORE:
75a70cf9 3243 result = optimize_stack_restore (i);
8a58ed0a 3244 if (result)
3245 break;
75a70cf9 3246 gsi_next (&i);
8a58ed0a 3247 continue;
3248
3249 case BUILT_IN_VA_START:
3250 case BUILT_IN_VA_END:
3251 case BUILT_IN_VA_COPY:
3252 /* These shouldn't be folded before pass_stdarg. */
75a70cf9 3253 result = optimize_stdarg_builtin (stmt);
bdd0e199 3254 if (result)
3255 break;
3256 /* FALLTHRU */
3257
4ee9c684 3258 default:
75a70cf9 3259 gsi_next (&i);
4ee9c684 3260 continue;
3261 }
3262
3263 if (dump_file && (dump_flags & TDF_DETAILS))
3264 {
3265 fprintf (dump_file, "Simplified\n ");
75a70cf9 3266 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
4ee9c684 3267 }
3268
75a70cf9 3269 old_stmt = stmt;
3270 push_stmt_changes (gsi_stmt_ptr (&i));
de6ed584 3271
75a70cf9 3272 if (!update_call_from_tree (&i, result))
dd277d48 3273 gimplify_and_update_call_from_tree (&i, result);
de6ed584 3274
75a70cf9 3275 stmt = gsi_stmt (i);
3276 pop_stmt_changes (gsi_stmt_ptr (&i));
de6ed584 3277
75a70cf9 3278 if (maybe_clean_or_replace_eh_stmt (old_stmt, stmt)
3279 && gimple_purge_dead_eh_edges (bb))
b36237eb 3280 cfg_changed = true;
4ee9c684 3281
3282 if (dump_file && (dump_flags & TDF_DETAILS))
3283 {
3284 fprintf (dump_file, "to\n ");
75a70cf9 3285 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
4ee9c684 3286 fprintf (dump_file, "\n");
3287 }
0a39fd54 3288
3289 /* Retry the same statement if it changed into another
3290 builtin, there might be new opportunities now. */
75a70cf9 3291 if (gimple_code (stmt) != GIMPLE_CALL)
0a39fd54 3292 {
75a70cf9 3293 gsi_next (&i);
0a39fd54 3294 continue;
3295 }
75a70cf9 3296 callee = gimple_call_fndecl (stmt);
0a39fd54 3297 if (!callee
75a70cf9 3298 || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL
0a39fd54 3299 || DECL_FUNCTION_CODE (callee) == fcode)
75a70cf9 3300 gsi_next (&i);
4ee9c684 3301 }
3302 }
b1b7c0c4 3303
b36237eb 3304 /* Delete unreachable blocks. */
b1b7c0c4 3305 if (cfg_changed)
3306 todoflags |= TODO_cleanup_cfg;
3307
3308 return todoflags;
4ee9c684 3309}
3310
41511585 3311
20099e35 3312struct gimple_opt_pass pass_fold_builtins =
4ee9c684 3313{
20099e35 3314 {
3315 GIMPLE_PASS,
4ee9c684 3316 "fab", /* name */
3317 NULL, /* gate */
3318 execute_fold_all_builtins, /* execute */
3319 NULL, /* sub */
3320 NULL, /* next */
3321 0, /* static_pass_number */
3322 0, /* tv_id */
49290934 3323 PROP_cfg | PROP_ssa, /* properties_required */
4ee9c684 3324 0, /* properties_provided */
3325 0, /* properties_destroyed */
3326 0, /* todo_flags_start */
909e5ecb 3327 TODO_dump_func
3328 | TODO_verify_ssa
20099e35 3329 | TODO_update_ssa /* todo_flags_finish */
3330 }
4ee9c684 3331};