]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/tree-ssa-ccp.c
PR libstdc++/37907
[thirdparty/gcc.git] / gcc / tree-ssa-ccp.c
CommitLineData
4ee9c684 1/* Conditional constant propagation pass for the GNU compiler.
cfaf579d 2 Copyright (C) 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
000657b5 3 Free Software Foundation, Inc.
4ee9c684 4 Adapted from original RTL SSA-CCP by Daniel Berlin <dberlin@dberlin.org>
5 Adapted to GIMPLE trees by Diego Novillo <dnovillo@redhat.com>
6
7This file is part of GCC.
8
9GCC is free software; you can redistribute it and/or modify it
10under the terms of the GNU General Public License as published by the
8c4c00c1 11Free Software Foundation; either version 3, or (at your option) any
4ee9c684 12later version.
13
14GCC is distributed in the hope that it will be useful, but WITHOUT
15ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
16FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17for more details.
18
19You should have received a copy of the GNU General Public License
8c4c00c1 20along with GCC; see the file COPYING3. If not see
21<http://www.gnu.org/licenses/>. */
4ee9c684 22
88dbf20f 23/* Conditional constant propagation (CCP) is based on the SSA
24 propagation engine (tree-ssa-propagate.c). Constant assignments of
25 the form VAR = CST are propagated from the assignments into uses of
26 VAR, which in turn may generate new constants. The simulation uses
27 a four level lattice to keep track of constant values associated
28 with SSA names. Given an SSA name V_i, it may take one of the
29 following values:
30
bfa30570 31 UNINITIALIZED -> the initial state of the value. This value
32 is replaced with a correct initial value
33 the first time the value is used, so the
34 rest of the pass does not need to care about
35 it. Using this value simplifies initialization
36 of the pass, and prevents us from needlessly
37 scanning statements that are never reached.
88dbf20f 38
39 UNDEFINED -> V_i is a local variable whose definition
40 has not been processed yet. Therefore we
41 don't yet know if its value is a constant
42 or not.
43
44 CONSTANT -> V_i has been found to hold a constant
45 value C.
46
47 VARYING -> V_i cannot take a constant value, or if it
48 does, it is not possible to determine it
49 at compile time.
50
51 The core of SSA-CCP is in ccp_visit_stmt and ccp_visit_phi_node:
52
53 1- In ccp_visit_stmt, we are interested in assignments whose RHS
54 evaluates into a constant and conditional jumps whose predicate
55 evaluates into a boolean true or false. When an assignment of
56 the form V_i = CONST is found, V_i's lattice value is set to
57 CONSTANT and CONST is associated with it. This causes the
58 propagation engine to add all the SSA edges coming out the
59 assignment into the worklists, so that statements that use V_i
60 can be visited.
61
62 If the statement is a conditional with a constant predicate, we
63 mark the outgoing edges as executable or not executable
64 depending on the predicate's value. This is then used when
65 visiting PHI nodes to know when a PHI argument can be ignored.
66
67
68 2- In ccp_visit_phi_node, if all the PHI arguments evaluate to the
69 same constant C, then the LHS of the PHI is set to C. This
70 evaluation is known as the "meet operation". Since one of the
71 goals of this evaluation is to optimistically return constant
72 values as often as possible, it uses two main short cuts:
73
74 - If an argument is flowing in through a non-executable edge, it
75 is ignored. This is useful in cases like this:
76
77 if (PRED)
78 a_9 = 3;
79 else
80 a_10 = 100;
81 a_11 = PHI (a_9, a_10)
82
83 If PRED is known to always evaluate to false, then we can
84 assume that a_11 will always take its value from a_10, meaning
85 that instead of consider it VARYING (a_9 and a_10 have
86 different values), we can consider it CONSTANT 100.
87
88 - If an argument has an UNDEFINED value, then it does not affect
89 the outcome of the meet operation. If a variable V_i has an
90 UNDEFINED value, it means that either its defining statement
91 hasn't been visited yet or V_i has no defining statement, in
92 which case the original symbol 'V' is being used
93 uninitialized. Since 'V' is a local variable, the compiler
94 may assume any initial value for it.
95
96
97 After propagation, every variable V_i that ends up with a lattice
98 value of CONSTANT will have the associated constant value in the
99 array CONST_VAL[i].VALUE. That is fed into substitute_and_fold for
100 final substitution and folding.
101
102
103 Constant propagation in stores and loads (STORE-CCP)
104 ----------------------------------------------------
105
106 While CCP has all the logic to propagate constants in GIMPLE
107 registers, it is missing the ability to associate constants with
108 stores and loads (i.e., pointer dereferences, structures and
109 global/aliased variables). We don't keep loads and stores in
110 SSA, but we do build a factored use-def web for them (in the
111 virtual operands).
112
113 For instance, consider the following code fragment:
114
115 struct A a;
116 const int B = 42;
117
118 void foo (int i)
119 {
120 if (i > 10)
121 a.a = 42;
122 else
123 {
124 a.b = 21;
125 a.a = a.b + 21;
126 }
127
128 if (a.a != B)
129 never_executed ();
130 }
131
132 We should be able to deduce that the predicate 'a.a != B' is always
133 false. To achieve this, we associate constant values to the SSA
4fb5e5ca 134 names in the VDEF operands for each store. Additionally,
135 since we also glob partial loads/stores with the base symbol, we
136 also keep track of the memory reference where the constant value
137 was stored (in the MEM_REF field of PROP_VALUE_T). For instance,
88dbf20f 138
4fb5e5ca 139 # a_5 = VDEF <a_4>
88dbf20f 140 a.a = 2;
141
142 # VUSE <a_5>
143 x_3 = a.b;
144
145 In the example above, CCP will associate value '2' with 'a_5', but
146 it would be wrong to replace the load from 'a.b' with '2', because
147 '2' had been stored into a.a.
148
bfa30570 149 Note that the initial value of virtual operands is VARYING, not
150 UNDEFINED. Consider, for instance global variables:
88dbf20f 151
152 int A;
153
154 foo (int i)
155 {
156 if (i_3 > 10)
157 A_4 = 3;
158 # A_5 = PHI (A_4, A_2);
159
160 # VUSE <A_5>
161 A.0_6 = A;
162
163 return A.0_6;
164 }
165
166 The value of A_2 cannot be assumed to be UNDEFINED, as it may have
167 been defined outside of foo. If we were to assume it UNDEFINED, we
bfa30570 168 would erroneously optimize the above into 'return 3;'.
88dbf20f 169
170 Though STORE-CCP is not too expensive, it does have to do more work
171 than regular CCP, so it is only enabled at -O2. Both regular CCP
172 and STORE-CCP use the exact same algorithm. The only distinction
173 is that when doing STORE-CCP, the boolean variable DO_STORE_CCP is
174 set to true. This affects the evaluation of statements and PHI
175 nodes.
4ee9c684 176
177 References:
178
179 Constant propagation with conditional branches,
180 Wegman and Zadeck, ACM TOPLAS 13(2):181-210.
181
182 Building an Optimizing Compiler,
183 Robert Morgan, Butterworth-Heinemann, 1998, Section 8.9.
184
185 Advanced Compiler Design and Implementation,
186 Steven Muchnick, Morgan Kaufmann, 1997, Section 12.6 */
187
188#include "config.h"
189#include "system.h"
190#include "coretypes.h"
191#include "tm.h"
4ee9c684 192#include "tree.h"
41511585 193#include "flags.h"
4ee9c684 194#include "rtl.h"
195#include "tm_p.h"
41511585 196#include "ggc.h"
4ee9c684 197#include "basic-block.h"
41511585 198#include "output.h"
41511585 199#include "expr.h"
200#include "function.h"
4ee9c684 201#include "diagnostic.h"
41511585 202#include "timevar.h"
4ee9c684 203#include "tree-dump.h"
41511585 204#include "tree-flow.h"
4ee9c684 205#include "tree-pass.h"
41511585 206#include "tree-ssa-propagate.h"
5a4b7e1e 207#include "value-prof.h"
41511585 208#include "langhooks.h"
8782adcf 209#include "target.h"
add6ee5e 210#include "toplev.h"
43fb76c1 211#include "dbgcnt.h"
4ee9c684 212
213
214/* Possible lattice values. */
215typedef enum
216{
bfa30570 217 UNINITIALIZED,
4ee9c684 218 UNDEFINED,
219 CONSTANT,
220 VARYING
88dbf20f 221} ccp_lattice_t;
4ee9c684 222
88dbf20f 223/* Array of propagated constant values. After propagation,
224 CONST_VAL[I].VALUE holds the constant value for SSA_NAME(I). If
225 the constant is held in an SSA name representing a memory store
4fb5e5ca 226 (i.e., a VDEF), CONST_VAL[I].MEM_REF will contain the actual
227 memory reference used to store (i.e., the LHS of the assignment
228 doing the store). */
20140406 229static prop_value_t *const_val;
4ee9c684 230
4af351a8 231static void canonicalize_float_value (prop_value_t *);
232
88dbf20f 233/* Dump constant propagation value VAL to file OUTF prefixed by PREFIX. */
01406fc0 234
235static void
88dbf20f 236dump_lattice_value (FILE *outf, const char *prefix, prop_value_t val)
01406fc0 237{
41511585 238 switch (val.lattice_val)
01406fc0 239 {
88dbf20f 240 case UNINITIALIZED:
241 fprintf (outf, "%sUNINITIALIZED", prefix);
242 break;
41511585 243 case UNDEFINED:
244 fprintf (outf, "%sUNDEFINED", prefix);
245 break;
246 case VARYING:
247 fprintf (outf, "%sVARYING", prefix);
248 break;
41511585 249 case CONSTANT:
250 fprintf (outf, "%sCONSTANT ", prefix);
88dbf20f 251 print_generic_expr (outf, val.value, dump_flags);
41511585 252 break;
253 default:
8c0963c4 254 gcc_unreachable ();
41511585 255 }
01406fc0 256}
4ee9c684 257
4ee9c684 258
88dbf20f 259/* Print lattice value VAL to stderr. */
260
261void debug_lattice_value (prop_value_t val);
262
263void
264debug_lattice_value (prop_value_t val)
265{
266 dump_lattice_value (stderr, "", val);
267 fprintf (stderr, "\n");
268}
4ee9c684 269
4ee9c684 270
75a70cf9 271
aecfc21d 272/* If SYM is a constant variable with known value, return the value.
273 NULL_TREE is returned otherwise. */
274
e004838d 275tree
aecfc21d 276get_symbol_constant_value (tree sym)
277{
278 if (TREE_STATIC (sym)
dd277d48 279 && TREE_READONLY (sym))
aecfc21d 280 {
281 tree val = DECL_INITIAL (sym);
590d65aa 282 if (val)
283 {
284 STRIP_USELESS_TYPE_CONVERSION (val);
285 if (is_gimple_min_invariant (val))
3a44a278 286 {
287 if (TREE_CODE (val) == ADDR_EXPR)
288 {
289 tree base = get_base_address (TREE_OPERAND (val, 0));
290 if (base && TREE_CODE (base) == VAR_DECL)
291 add_referenced_var (base);
292 }
293 return val;
294 }
590d65aa 295 }
6e6e51e5 296 /* Variables declared 'const' without an initializer
f0b5f617 297 have zero as the initializer if they may not be
e004838d 298 overridden at link or run time. */
6e6e51e5 299 if (!val
807bf718 300 && !DECL_EXTERNAL (sym)
e004838d 301 && targetm.binds_local_p (sym)
6e6e51e5 302 && (INTEGRAL_TYPE_P (TREE_TYPE (sym))
303 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (sym))))
807bf718 304 return fold_convert (TREE_TYPE (sym), integer_zero_node);
aecfc21d 305 }
306
307 return NULL_TREE;
308}
d03bd588 309
88dbf20f 310/* Compute a default value for variable VAR and store it in the
311 CONST_VAL array. The following rules are used to get default
312 values:
01406fc0 313
88dbf20f 314 1- Global and static variables that are declared constant are
315 considered CONSTANT.
316
317 2- Any other value is considered UNDEFINED. This is useful when
41511585 318 considering PHI nodes. PHI arguments that are undefined do not
319 change the constant value of the PHI node, which allows for more
88dbf20f 320 constants to be propagated.
4ee9c684 321
8883e700 322 3- Variables defined by statements other than assignments and PHI
88dbf20f 323 nodes are considered VARYING.
4ee9c684 324
8883e700 325 4- Initial values of variables that are not GIMPLE registers are
bfa30570 326 considered VARYING. */
4ee9c684 327
88dbf20f 328static prop_value_t
329get_default_value (tree var)
330{
331 tree sym = SSA_NAME_VAR (var);
61207d43 332 prop_value_t val = { UNINITIALIZED, NULL_TREE };
8edeb88b 333 gimple stmt;
334
335 stmt = SSA_NAME_DEF_STMT (var);
336
337 if (gimple_nop_p (stmt))
4ee9c684 338 {
8edeb88b 339 /* Variables defined by an empty statement are those used
340 before being initialized. If VAR is a local variable, we
341 can assume initially that it is UNDEFINED, otherwise we must
342 consider it VARYING. */
343 if (is_gimple_reg (sym) && TREE_CODE (sym) != PARM_DECL)
344 val.lattice_val = UNDEFINED;
345 else
346 val.lattice_val = VARYING;
4ee9c684 347 }
8edeb88b 348 else if (is_gimple_assign (stmt)
349 /* Value-returning GIMPLE_CALL statements assign to
350 a variable, and are treated similarly to GIMPLE_ASSIGN. */
351 || (is_gimple_call (stmt)
352 && gimple_call_lhs (stmt) != NULL_TREE)
353 || gimple_code (stmt) == GIMPLE_PHI)
41511585 354 {
8edeb88b 355 tree cst;
356 if (gimple_assign_single_p (stmt)
357 && DECL_P (gimple_assign_rhs1 (stmt))
358 && (cst = get_symbol_constant_value (gimple_assign_rhs1 (stmt))))
88dbf20f 359 {
8edeb88b 360 val.lattice_val = CONSTANT;
361 val.value = cst;
88dbf20f 362 }
363 else
8edeb88b 364 /* Any other variable defined by an assignment or a PHI node
365 is considered UNDEFINED. */
366 val.lattice_val = UNDEFINED;
367 }
368 else
369 {
370 /* Otherwise, VAR will never take on a constant value. */
371 val.lattice_val = VARYING;
41511585 372 }
4ee9c684 373
41511585 374 return val;
375}
4ee9c684 376
4ee9c684 377
bfa30570 378/* Get the constant value associated with variable VAR. */
4ee9c684 379
bfa30570 380static inline prop_value_t *
381get_value (tree var)
88dbf20f 382{
e004838d 383 prop_value_t *val;
bfa30570 384
e004838d 385 if (const_val == NULL)
386 return NULL;
387
388 val = &const_val[SSA_NAME_VERSION (var)];
bfa30570 389 if (val->lattice_val == UNINITIALIZED)
4ee9c684 390 *val = get_default_value (var);
391
4af351a8 392 canonicalize_float_value (val);
393
4ee9c684 394 return val;
395}
396
bfa30570 397/* Sets the value associated with VAR to VARYING. */
398
399static inline void
400set_value_varying (tree var)
401{
402 prop_value_t *val = &const_val[SSA_NAME_VERSION (var)];
403
404 val->lattice_val = VARYING;
405 val->value = NULL_TREE;
bfa30570 406}
4ee9c684 407
b31eb493 408/* For float types, modify the value of VAL to make ccp work correctly
409 for non-standard values (-0, NaN):
410
411 If HONOR_SIGNED_ZEROS is false, and VAL = -0, we canonicalize it to 0.
412 If HONOR_NANS is false, and VAL is NaN, we canonicalize it to UNDEFINED.
413 This is to fix the following problem (see PR 29921): Suppose we have
414
415 x = 0.0 * y
416
417 and we set value of y to NaN. This causes value of x to be set to NaN.
418 When we later determine that y is in fact VARYING, fold uses the fact
419 that HONOR_NANS is false, and we try to change the value of x to 0,
420 causing an ICE. With HONOR_NANS being false, the real appearance of
421 NaN would cause undefined behavior, though, so claiming that y (and x)
422 are UNDEFINED initially is correct. */
423
424static void
425canonicalize_float_value (prop_value_t *val)
426{
427 enum machine_mode mode;
428 tree type;
429 REAL_VALUE_TYPE d;
430
431 if (val->lattice_val != CONSTANT
432 || TREE_CODE (val->value) != REAL_CST)
433 return;
434
435 d = TREE_REAL_CST (val->value);
436 type = TREE_TYPE (val->value);
437 mode = TYPE_MODE (type);
438
439 if (!HONOR_SIGNED_ZEROS (mode)
440 && REAL_VALUE_MINUS_ZERO (d))
441 {
442 val->value = build_real (type, dconst0);
443 return;
444 }
445
446 if (!HONOR_NANS (mode)
447 && REAL_VALUE_ISNAN (d))
448 {
449 val->lattice_val = UNDEFINED;
450 val->value = NULL;
b31eb493 451 return;
452 }
453}
454
88dbf20f 455/* Set the value for variable VAR to NEW_VAL. Return true if the new
456 value is different from VAR's previous value. */
4ee9c684 457
41511585 458static bool
88dbf20f 459set_lattice_value (tree var, prop_value_t new_val)
4ee9c684 460{
bfa30570 461 prop_value_t *old_val = get_value (var);
88dbf20f 462
b31eb493 463 canonicalize_float_value (&new_val);
464
88dbf20f 465 /* Lattice transitions must always be monotonically increasing in
bfa30570 466 value. If *OLD_VAL and NEW_VAL are the same, return false to
467 inform the caller that this was a non-transition. */
468
aecfc21d 469 gcc_assert (old_val->lattice_val < new_val.lattice_val
88dbf20f 470 || (old_val->lattice_val == new_val.lattice_val
aecfc21d 471 && ((!old_val->value && !new_val.value)
61207d43 472 || operand_equal_p (old_val->value, new_val.value, 0))));
88dbf20f 473
474 if (old_val->lattice_val != new_val.lattice_val)
4ee9c684 475 {
41511585 476 if (dump_file && (dump_flags & TDF_DETAILS))
477 {
88dbf20f 478 dump_lattice_value (dump_file, "Lattice value changed to ", new_val);
bfa30570 479 fprintf (dump_file, ". Adding SSA edges to worklist.\n");
41511585 480 }
481
88dbf20f 482 *old_val = new_val;
483
bfa30570 484 gcc_assert (new_val.lattice_val != UNDEFINED);
485 return true;
4ee9c684 486 }
41511585 487
488 return false;
4ee9c684 489}
490
491
88dbf20f 492/* Return the likely CCP lattice value for STMT.
4ee9c684 493
41511585 494 If STMT has no operands, then return CONSTANT.
4ee9c684 495
d61b9af3 496 Else if undefinedness of operands of STMT cause its value to be
497 undefined, then return UNDEFINED.
4ee9c684 498
41511585 499 Else if any operands of STMT are constants, then return CONSTANT.
4ee9c684 500
41511585 501 Else return VARYING. */
4ee9c684 502
88dbf20f 503static ccp_lattice_t
75a70cf9 504likely_value (gimple stmt)
41511585 505{
d61b9af3 506 bool has_constant_operand, has_undefined_operand, all_undefined_operands;
41511585 507 tree use;
508 ssa_op_iter iter;
8edeb88b 509 unsigned i;
4ee9c684 510
590c3166 511 enum gimple_code code = gimple_code (stmt);
75a70cf9 512
513 /* This function appears to be called only for assignments, calls,
514 conditionals, and switches, due to the logic in visit_stmt. */
515 gcc_assert (code == GIMPLE_ASSIGN
516 || code == GIMPLE_CALL
517 || code == GIMPLE_COND
518 || code == GIMPLE_SWITCH);
88dbf20f 519
520 /* If the statement has volatile operands, it won't fold to a
521 constant value. */
75a70cf9 522 if (gimple_has_volatile_ops (stmt))
88dbf20f 523 return VARYING;
524
75a70cf9 525 /* Arrive here for more complex cases. */
bfa30570 526 has_constant_operand = false;
d61b9af3 527 has_undefined_operand = false;
528 all_undefined_operands = true;
8edeb88b 529 FOR_EACH_SSA_TREE_OPERAND (use, stmt, iter, SSA_OP_USE)
41511585 530 {
bfa30570 531 prop_value_t *val = get_value (use);
41511585 532
bfa30570 533 if (val->lattice_val == UNDEFINED)
d61b9af3 534 has_undefined_operand = true;
535 else
536 all_undefined_operands = false;
88dbf20f 537
41511585 538 if (val->lattice_val == CONSTANT)
bfa30570 539 has_constant_operand = true;
4ee9c684 540 }
41511585 541
dd277d48 542 /* There may be constants in regular rhs operands. For calls we
543 have to ignore lhs, fndecl and static chain, otherwise only
544 the lhs. */
545 for (i = (is_gimple_call (stmt) ? 2 : 0) + gimple_has_lhs (stmt);
8edeb88b 546 i < gimple_num_ops (stmt); ++i)
547 {
548 tree op = gimple_op (stmt, i);
549 if (!op || TREE_CODE (op) == SSA_NAME)
550 continue;
551 if (is_gimple_min_invariant (op))
552 has_constant_operand = true;
553 }
554
d61b9af3 555 /* If the operation combines operands like COMPLEX_EXPR make sure to
556 not mark the result UNDEFINED if only one part of the result is
557 undefined. */
75a70cf9 558 if (has_undefined_operand && all_undefined_operands)
d61b9af3 559 return UNDEFINED;
75a70cf9 560 else if (code == GIMPLE_ASSIGN && has_undefined_operand)
d61b9af3 561 {
75a70cf9 562 switch (gimple_assign_rhs_code (stmt))
d61b9af3 563 {
564 /* Unary operators are handled with all_undefined_operands. */
565 case PLUS_EXPR:
566 case MINUS_EXPR:
d61b9af3 567 case POINTER_PLUS_EXPR:
d61b9af3 568 /* Not MIN_EXPR, MAX_EXPR. One VARYING operand may be selected.
569 Not bitwise operators, one VARYING operand may specify the
570 result completely. Not logical operators for the same reason.
05a936a0 571 Not COMPLEX_EXPR as one VARYING operand makes the result partly
572 not UNDEFINED. Not *DIV_EXPR, comparisons and shifts because
573 the undefined operand may be promoted. */
d61b9af3 574 return UNDEFINED;
575
576 default:
577 ;
578 }
579 }
580 /* If there was an UNDEFINED operand but the result may be not UNDEFINED
581 fall back to VARYING even if there were CONSTANT operands. */
582 if (has_undefined_operand)
583 return VARYING;
584
8edeb88b 585 /* We do not consider virtual operands here -- load from read-only
586 memory may have only VARYING virtual operands, but still be
587 constant. */
bfa30570 588 if (has_constant_operand
8edeb88b 589 || gimple_references_memory_p (stmt))
88dbf20f 590 return CONSTANT;
591
bfa30570 592 return VARYING;
4ee9c684 593}
594
bfa30570 595/* Returns true if STMT cannot be constant. */
596
597static bool
75a70cf9 598surely_varying_stmt_p (gimple stmt)
bfa30570 599{
600 /* If the statement has operands that we cannot handle, it cannot be
601 constant. */
75a70cf9 602 if (gimple_has_volatile_ops (stmt))
bfa30570 603 return true;
604
f257af64 605 /* If it is a call and does not return a value or is not a
606 builtin and not an indirect call, it is varying. */
75a70cf9 607 if (is_gimple_call (stmt))
f257af64 608 {
609 tree fndecl;
610 if (!gimple_call_lhs (stmt)
611 || ((fndecl = gimple_call_fndecl (stmt)) != NULL_TREE
5768aeb3 612 && !DECL_BUILT_IN (fndecl)))
f257af64 613 return true;
614 }
bfa30570 615
8edeb88b 616 /* Any other store operation is not interesting. */
dd277d48 617 else if (gimple_vdef (stmt))
8edeb88b 618 return true;
619
bfa30570 620 /* Anything other than assignments and conditional jumps are not
621 interesting for CCP. */
75a70cf9 622 if (gimple_code (stmt) != GIMPLE_ASSIGN
f257af64 623 && gimple_code (stmt) != GIMPLE_COND
624 && gimple_code (stmt) != GIMPLE_SWITCH
625 && gimple_code (stmt) != GIMPLE_CALL)
bfa30570 626 return true;
627
628 return false;
629}
4ee9c684 630
41511585 631/* Initialize local data structures for CCP. */
4ee9c684 632
633static void
41511585 634ccp_initialize (void)
4ee9c684 635{
41511585 636 basic_block bb;
4ee9c684 637
43959b95 638 const_val = XCNEWVEC (prop_value_t, num_ssa_names);
4ee9c684 639
41511585 640 /* Initialize simulation flags for PHI nodes and statements. */
641 FOR_EACH_BB (bb)
4ee9c684 642 {
75a70cf9 643 gimple_stmt_iterator i;
4ee9c684 644
75a70cf9 645 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
41511585 646 {
75a70cf9 647 gimple stmt = gsi_stmt (i);
bfa30570 648 bool is_varying = surely_varying_stmt_p (stmt);
4ee9c684 649
bfa30570 650 if (is_varying)
41511585 651 {
88dbf20f 652 tree def;
653 ssa_op_iter iter;
654
655 /* If the statement will not produce a constant, mark
656 all its outputs VARYING. */
657 FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_ALL_DEFS)
8edeb88b 658 set_value_varying (def);
41511585 659 }
75a70cf9 660 prop_set_simulate_again (stmt, !is_varying);
41511585 661 }
4ee9c684 662 }
663
75a70cf9 664 /* Now process PHI nodes. We never clear the simulate_again flag on
665 phi nodes, since we do not know which edges are executable yet,
666 except for phi nodes for virtual operands when we do not do store ccp. */
41511585 667 FOR_EACH_BB (bb)
4ee9c684 668 {
75a70cf9 669 gimple_stmt_iterator i;
41511585 670
75a70cf9 671 for (i = gsi_start_phis (bb); !gsi_end_p (i); gsi_next (&i))
672 {
673 gimple phi = gsi_stmt (i);
674
61207d43 675 if (!is_gimple_reg (gimple_phi_result (phi)))
75a70cf9 676 prop_set_simulate_again (phi, false);
bfa30570 677 else
75a70cf9 678 prop_set_simulate_again (phi, true);
41511585 679 }
4ee9c684 680 }
41511585 681}
4ee9c684 682
43fb76c1 683/* Debug count support. Reset the values of ssa names
684 VARYING when the total number ssa names analyzed is
685 beyond the debug count specified. */
686
687static void
688do_dbg_cnt (void)
689{
690 unsigned i;
691 for (i = 0; i < num_ssa_names; i++)
692 {
693 if (!dbg_cnt (ccp))
694 {
695 const_val[i].lattice_val = VARYING;
696 const_val[i].value = NULL_TREE;
697 }
698 }
699}
700
4ee9c684 701
88dbf20f 702/* Do final substitution of propagated values, cleanup the flowgraph and
33a34f1e 703 free allocated storage.
4ee9c684 704
33a34f1e 705 Return TRUE when something was optimized. */
706
707static bool
88dbf20f 708ccp_finalize (void)
4ee9c684 709{
43fb76c1 710 bool something_changed;
711
712 do_dbg_cnt ();
88dbf20f 713 /* Perform substitutions based on the known constant values. */
43fb76c1 714 something_changed = substitute_and_fold (const_val, false);
4ee9c684 715
88dbf20f 716 free (const_val);
e004838d 717 const_val = NULL;
33a34f1e 718 return something_changed;;
4ee9c684 719}
720
721
88dbf20f 722/* Compute the meet operator between *VAL1 and *VAL2. Store the result
723 in VAL1.
724
725 any M UNDEFINED = any
88dbf20f 726 any M VARYING = VARYING
727 Ci M Cj = Ci if (i == j)
728 Ci M Cj = VARYING if (i != j)
bfa30570 729 */
4ee9c684 730
731static void
88dbf20f 732ccp_lattice_meet (prop_value_t *val1, prop_value_t *val2)
4ee9c684 733{
88dbf20f 734 if (val1->lattice_val == UNDEFINED)
4ee9c684 735 {
88dbf20f 736 /* UNDEFINED M any = any */
737 *val1 = *val2;
41511585 738 }
88dbf20f 739 else if (val2->lattice_val == UNDEFINED)
92481a4d 740 {
88dbf20f 741 /* any M UNDEFINED = any
742 Nothing to do. VAL1 already contains the value we want. */
743 ;
92481a4d 744 }
88dbf20f 745 else if (val1->lattice_val == VARYING
746 || val2->lattice_val == VARYING)
41511585 747 {
88dbf20f 748 /* any M VARYING = VARYING. */
749 val1->lattice_val = VARYING;
750 val1->value = NULL_TREE;
41511585 751 }
88dbf20f 752 else if (val1->lattice_val == CONSTANT
753 && val2->lattice_val == CONSTANT
61207d43 754 && simple_cst_equal (val1->value, val2->value) == 1)
41511585 755 {
88dbf20f 756 /* Ci M Cj = Ci if (i == j)
757 Ci M Cj = VARYING if (i != j)
758
759 If these two values come from memory stores, make sure that
760 they come from the same memory reference. */
761 val1->lattice_val = CONSTANT;
762 val1->value = val1->value;
41511585 763 }
764 else
765 {
88dbf20f 766 /* Any other combination is VARYING. */
767 val1->lattice_val = VARYING;
768 val1->value = NULL_TREE;
41511585 769 }
4ee9c684 770}
771
772
41511585 773/* Loop through the PHI_NODE's parameters for BLOCK and compare their
774 lattice values to determine PHI_NODE's lattice value. The value of a
88dbf20f 775 PHI node is determined calling ccp_lattice_meet with all the arguments
41511585 776 of the PHI node that are incoming via executable edges. */
4ee9c684 777
41511585 778static enum ssa_prop_result
75a70cf9 779ccp_visit_phi_node (gimple phi)
4ee9c684 780{
75a70cf9 781 unsigned i;
88dbf20f 782 prop_value_t *old_val, new_val;
4ee9c684 783
41511585 784 if (dump_file && (dump_flags & TDF_DETAILS))
4ee9c684 785 {
41511585 786 fprintf (dump_file, "\nVisiting PHI node: ");
75a70cf9 787 print_gimple_stmt (dump_file, phi, 0, dump_flags);
4ee9c684 788 }
4ee9c684 789
75a70cf9 790 old_val = get_value (gimple_phi_result (phi));
41511585 791 switch (old_val->lattice_val)
792 {
793 case VARYING:
88dbf20f 794 return SSA_PROP_VARYING;
4ee9c684 795
41511585 796 case CONSTANT:
797 new_val = *old_val;
798 break;
4ee9c684 799
41511585 800 case UNDEFINED:
41511585 801 new_val.lattice_val = UNDEFINED;
88dbf20f 802 new_val.value = NULL_TREE;
41511585 803 break;
4ee9c684 804
41511585 805 default:
8c0963c4 806 gcc_unreachable ();
41511585 807 }
4ee9c684 808
75a70cf9 809 for (i = 0; i < gimple_phi_num_args (phi); i++)
41511585 810 {
88dbf20f 811 /* Compute the meet operator over all the PHI arguments flowing
812 through executable edges. */
75a70cf9 813 edge e = gimple_phi_arg_edge (phi, i);
4ee9c684 814
41511585 815 if (dump_file && (dump_flags & TDF_DETAILS))
816 {
817 fprintf (dump_file,
818 "\n Argument #%d (%d -> %d %sexecutable)\n",
819 i, e->src->index, e->dest->index,
820 (e->flags & EDGE_EXECUTABLE) ? "" : "not ");
821 }
822
823 /* If the incoming edge is executable, Compute the meet operator for
824 the existing value of the PHI node and the current PHI argument. */
825 if (e->flags & EDGE_EXECUTABLE)
826 {
75a70cf9 827 tree arg = gimple_phi_arg (phi, i)->def;
88dbf20f 828 prop_value_t arg_val;
4ee9c684 829
88dbf20f 830 if (is_gimple_min_invariant (arg))
41511585 831 {
88dbf20f 832 arg_val.lattice_val = CONSTANT;
833 arg_val.value = arg;
41511585 834 }
835 else
bfa30570 836 arg_val = *(get_value (arg));
4ee9c684 837
88dbf20f 838 ccp_lattice_meet (&new_val, &arg_val);
4ee9c684 839
41511585 840 if (dump_file && (dump_flags & TDF_DETAILS))
841 {
842 fprintf (dump_file, "\t");
88dbf20f 843 print_generic_expr (dump_file, arg, dump_flags);
844 dump_lattice_value (dump_file, "\tValue: ", arg_val);
41511585 845 fprintf (dump_file, "\n");
846 }
4ee9c684 847
41511585 848 if (new_val.lattice_val == VARYING)
849 break;
850 }
851 }
4ee9c684 852
853 if (dump_file && (dump_flags & TDF_DETAILS))
41511585 854 {
855 dump_lattice_value (dump_file, "\n PHI node value: ", new_val);
856 fprintf (dump_file, "\n\n");
857 }
858
bfa30570 859 /* Make the transition to the new value. */
75a70cf9 860 if (set_lattice_value (gimple_phi_result (phi), new_val))
41511585 861 {
862 if (new_val.lattice_val == VARYING)
863 return SSA_PROP_VARYING;
864 else
865 return SSA_PROP_INTERESTING;
866 }
867 else
868 return SSA_PROP_NOT_INTERESTING;
4ee9c684 869}
870
fb8ed03f 871/* Return true if we may propagate the address expression ADDR into the
872 dereference DEREF and cancel them. */
873
874bool
875may_propagate_address_into_dereference (tree addr, tree deref)
876{
877 gcc_assert (INDIRECT_REF_P (deref)
878 && TREE_CODE (addr) == ADDR_EXPR);
879
0d051ece 880 /* Don't propagate if ADDR's operand has incomplete type. */
881 if (!COMPLETE_TYPE_P (TREE_TYPE (TREE_OPERAND (addr, 0))))
882 return false;
883
fb8ed03f 884 /* If the address is invariant then we do not need to preserve restrict
885 qualifications. But we do need to preserve volatile qualifiers until
886 we can annotate the folded dereference itself properly. */
887 if (is_gimple_min_invariant (addr)
888 && (!TREE_THIS_VOLATILE (deref)
889 || TYPE_VOLATILE (TREE_TYPE (addr))))
890 return useless_type_conversion_p (TREE_TYPE (deref),
891 TREE_TYPE (TREE_OPERAND (addr, 0)));
892
893 /* Else both the address substitution and the folding must result in
894 a valid useless type conversion sequence. */
895 return (useless_type_conversion_p (TREE_TYPE (TREE_OPERAND (deref, 0)),
896 TREE_TYPE (addr))
897 && useless_type_conversion_p (TREE_TYPE (deref),
898 TREE_TYPE (TREE_OPERAND (addr, 0))));
899}
4ee9c684 900
41511585 901/* CCP specific front-end to the non-destructive constant folding
902 routines.
4ee9c684 903
904 Attempt to simplify the RHS of STMT knowing that one or more
905 operands are constants.
906
907 If simplification is possible, return the simplified RHS,
75a70cf9 908 otherwise return the original RHS or NULL_TREE. */
4ee9c684 909
910static tree
75a70cf9 911ccp_fold (gimple stmt)
4ee9c684 912{
75a70cf9 913 switch (gimple_code (stmt))
88dbf20f 914 {
75a70cf9 915 case GIMPLE_ASSIGN:
916 {
917 enum tree_code subcode = gimple_assign_rhs_code (stmt);
918
919 switch (get_gimple_rhs_class (subcode))
920 {
921 case GIMPLE_SINGLE_RHS:
922 {
923 tree rhs = gimple_assign_rhs1 (stmt);
924 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
925
926 if (TREE_CODE (rhs) == SSA_NAME)
927 {
928 /* If the RHS is an SSA_NAME, return its known constant value,
929 if any. */
930 return get_value (rhs)->value;
931 }
932 /* Handle propagating invariant addresses into address operations.
933 The folding we do here matches that in tree-ssa-forwprop.c. */
934 else if (TREE_CODE (rhs) == ADDR_EXPR)
935 {
936 tree *base;
937 base = &TREE_OPERAND (rhs, 0);
938 while (handled_component_p (*base))
939 base = &TREE_OPERAND (*base, 0);
940 if (TREE_CODE (*base) == INDIRECT_REF
941 && TREE_CODE (TREE_OPERAND (*base, 0)) == SSA_NAME)
942 {
943 prop_value_t *val = get_value (TREE_OPERAND (*base, 0));
944 if (val->lattice_val == CONSTANT
945 && TREE_CODE (val->value) == ADDR_EXPR
fb8ed03f 946 && may_propagate_address_into_dereference
947 (val->value, *base))
75a70cf9 948 {
949 /* We need to return a new tree, not modify the IL
950 or share parts of it. So play some tricks to
951 avoid manually building it. */
952 tree ret, save = *base;
953 *base = TREE_OPERAND (val->value, 0);
954 ret = unshare_expr (rhs);
955 recompute_tree_invariant_for_addr_expr (ret);
956 *base = save;
957 return ret;
958 }
959 }
960 }
388a0bc7 961 else if (TREE_CODE (rhs) == CONSTRUCTOR
962 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
963 && (CONSTRUCTOR_NELTS (rhs)
964 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
965 {
966 unsigned i;
967 tree val, list;
968
969 list = NULL_TREE;
970 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
971 {
972 if (TREE_CODE (val) == SSA_NAME
973 && get_value (val)->lattice_val == CONSTANT)
974 val = get_value (val)->value;
975 if (TREE_CODE (val) == INTEGER_CST
976 || TREE_CODE (val) == REAL_CST
977 || TREE_CODE (val) == FIXED_CST)
978 list = tree_cons (NULL_TREE, val, list);
979 else
980 return NULL_TREE;
981 }
982
983 return build_vector (TREE_TYPE (rhs), nreverse (list));
984 }
4ee9c684 985
75a70cf9 986 if (kind == tcc_reference)
3e4be816 987 {
0fefde02 988 if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
989 || TREE_CODE (rhs) == REALPART_EXPR
990 || TREE_CODE (rhs) == IMAGPART_EXPR)
3e4be816 991 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
992 {
993 prop_value_t *val = get_value (TREE_OPERAND (rhs, 0));
994 if (val->lattice_val == CONSTANT)
0fefde02 995 return fold_unary (TREE_CODE (rhs),
3e4be816 996 TREE_TYPE (rhs), val->value);
997 }
8edeb88b 998 else if (TREE_CODE (rhs) == INDIRECT_REF
999 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
1000 {
1001 prop_value_t *val = get_value (TREE_OPERAND (rhs, 0));
1002 if (val->lattice_val == CONSTANT
1003 && TREE_CODE (val->value) == ADDR_EXPR
1004 && useless_type_conversion_p (TREE_TYPE (rhs),
1005 TREE_TYPE (TREE_TYPE (val->value))))
1006 rhs = TREE_OPERAND (val->value, 0);
1007 }
3e4be816 1008 return fold_const_aggregate_ref (rhs);
1009 }
75a70cf9 1010 else if (kind == tcc_declaration)
1011 return get_symbol_constant_value (rhs);
1012 return rhs;
1013 }
1014
1015 case GIMPLE_UNARY_RHS:
1016 {
1017 /* Handle unary operators that can appear in GIMPLE form.
1018 Note that we know the single operand must be a constant,
1019 so this should almost always return a simplified RHS. */
1020 tree lhs = gimple_assign_lhs (stmt);
1021 tree op0 = gimple_assign_rhs1 (stmt);
1022
1023 /* Simplify the operand down to a constant. */
1024 if (TREE_CODE (op0) == SSA_NAME)
1025 {
1026 prop_value_t *val = get_value (op0);
1027 if (val->lattice_val == CONSTANT)
1028 op0 = get_value (op0)->value;
1029 }
1030
1031 /* Conversions are useless for CCP purposes if they are
1032 value-preserving. Thus the restrictions that
1033 useless_type_conversion_p places for pointer type conversions
1034 do not apply here. Substitution later will only substitute to
1035 allowed places. */
d9659041 1036 if (CONVERT_EXPR_CODE_P (subcode)
5768aeb3 1037 && POINTER_TYPE_P (TREE_TYPE (lhs))
1038 && POINTER_TYPE_P (TREE_TYPE (op0))
1039 /* Do not allow differences in volatile qualification
1040 as this might get us confused as to whether a
1041 propagation destination statement is volatile
1042 or not. See PR36988. */
1043 && (TYPE_VOLATILE (TREE_TYPE (TREE_TYPE (lhs)))
1044 == TYPE_VOLATILE (TREE_TYPE (TREE_TYPE (op0)))))
1045 {
1046 tree tem;
1047 /* Still try to generate a constant of correct type. */
1048 if (!useless_type_conversion_p (TREE_TYPE (lhs),
1049 TREE_TYPE (op0))
1050 && ((tem = maybe_fold_offset_to_address
e60a6f7b 1051 (gimple_location (stmt),
1052 op0, integer_zero_node, TREE_TYPE (lhs)))
5768aeb3 1053 != NULL_TREE))
1054 return tem;
1055 return op0;
1056 }
75a70cf9 1057
cd30b839 1058 return fold_unary_ignore_overflow (subcode,
1059 gimple_expr_type (stmt), op0);
f1fb2997 1060 }
75a70cf9 1061
1062 case GIMPLE_BINARY_RHS:
1063 {
1064 /* Handle binary operators that can appear in GIMPLE form. */
1065 tree op0 = gimple_assign_rhs1 (stmt);
1066 tree op1 = gimple_assign_rhs2 (stmt);
1067
1068 /* Simplify the operands down to constants when appropriate. */
1069 if (TREE_CODE (op0) == SSA_NAME)
1070 {
1071 prop_value_t *val = get_value (op0);
1072 if (val->lattice_val == CONSTANT)
1073 op0 = val->value;
1074 }
1075
1076 if (TREE_CODE (op1) == SSA_NAME)
1077 {
1078 prop_value_t *val = get_value (op1);
1079 if (val->lattice_val == CONSTANT)
1080 op1 = val->value;
1081 }
1082
5768aeb3 1083 /* Fold &foo + CST into an invariant reference if possible. */
1084 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
1085 && TREE_CODE (op0) == ADDR_EXPR
1086 && TREE_CODE (op1) == INTEGER_CST)
1087 {
1088 tree lhs = gimple_assign_lhs (stmt);
e60a6f7b 1089 tree tem = maybe_fold_offset_to_address
1090 (gimple_location (stmt), op0, op1, TREE_TYPE (lhs));
5768aeb3 1091 if (tem != NULL_TREE)
1092 return tem;
1093 }
1094
75a70cf9 1095 return fold_binary (subcode, gimple_expr_type (stmt), op0, op1);
1096 }
1097
1098 default:
1099 gcc_unreachable ();
1100 }
1101 }
1102 break;
4ee9c684 1103
75a70cf9 1104 case GIMPLE_CALL:
f257af64 1105 {
1106 tree fn = gimple_call_fn (stmt);
1107 prop_value_t *val;
1108
1109 if (TREE_CODE (fn) == SSA_NAME)
1110 {
1111 val = get_value (fn);
1112 if (val->lattice_val == CONSTANT)
1113 fn = val->value;
1114 }
1115 if (TREE_CODE (fn) == ADDR_EXPR
8ef4f124 1116 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
f257af64 1117 && DECL_BUILT_IN (TREE_OPERAND (fn, 0)))
1118 {
1119 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
1120 tree call, retval;
1121 unsigned i;
1122 for (i = 0; i < gimple_call_num_args (stmt); ++i)
1123 {
1124 args[i] = gimple_call_arg (stmt, i);
1125 if (TREE_CODE (args[i]) == SSA_NAME)
1126 {
1127 val = get_value (args[i]);
1128 if (val->lattice_val == CONSTANT)
1129 args[i] = val->value;
1130 }
1131 }
1132 call = build_call_array (gimple_call_return_type (stmt),
1133 fn, gimple_call_num_args (stmt), args);
1134 retval = fold_call_expr (call, false);
1135 if (retval)
1136 /* fold_call_expr wraps the result inside a NOP_EXPR. */
1137 STRIP_NOPS (retval);
1138 return retval;
1139 }
1140 return NULL_TREE;
1141 }
4ee9c684 1142
75a70cf9 1143 case GIMPLE_COND:
1144 {
1145 /* Handle comparison operators that can appear in GIMPLE form. */
1146 tree op0 = gimple_cond_lhs (stmt);
1147 tree op1 = gimple_cond_rhs (stmt);
1148 enum tree_code code = gimple_cond_code (stmt);
1149
1150 /* Simplify the operands down to constants when appropriate. */
1151 if (TREE_CODE (op0) == SSA_NAME)
1152 {
1153 prop_value_t *val = get_value (op0);
1154 if (val->lattice_val == CONSTANT)
1155 op0 = val->value;
1156 }
1157
1158 if (TREE_CODE (op1) == SSA_NAME)
1159 {
1160 prop_value_t *val = get_value (op1);
1161 if (val->lattice_val == CONSTANT)
1162 op1 = val->value;
1163 }
1164
1165 return fold_binary (code, boolean_type_node, op0, op1);
1166 }
4ee9c684 1167
75a70cf9 1168 case GIMPLE_SWITCH:
1169 {
1170 tree rhs = gimple_switch_index (stmt);
04236c3a 1171
75a70cf9 1172 if (TREE_CODE (rhs) == SSA_NAME)
1173 {
1174 /* If the RHS is an SSA_NAME, return its known constant value,
1175 if any. */
1176 return get_value (rhs)->value;
1177 }
04236c3a 1178
75a70cf9 1179 return rhs;
1180 }
912f109f 1181
75a70cf9 1182 default:
1183 gcc_unreachable ();
4ee9c684 1184 }
4ee9c684 1185}
1186
1187
8782adcf 1188/* Return the tree representing the element referenced by T if T is an
1189 ARRAY_REF or COMPONENT_REF into constant aggregates. Return
1190 NULL_TREE otherwise. */
1191
e004838d 1192tree
8782adcf 1193fold_const_aggregate_ref (tree t)
1194{
1195 prop_value_t *value;
c75b4594 1196 tree base, ctor, idx, field;
1197 unsigned HOST_WIDE_INT cnt;
1198 tree cfield, cval;
8782adcf 1199
8edeb88b 1200 if (TREE_CODE_CLASS (TREE_CODE (t)) == tcc_declaration)
1201 return get_symbol_constant_value (t);
1202
8782adcf 1203 switch (TREE_CODE (t))
1204 {
1205 case ARRAY_REF:
1206 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
1207 DECL_INITIAL. If BASE is a nested reference into another
1208 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
1209 the inner reference. */
1210 base = TREE_OPERAND (t, 0);
1211 switch (TREE_CODE (base))
1212 {
1213 case VAR_DECL:
1214 if (!TREE_READONLY (base)
1215 || TREE_CODE (TREE_TYPE (base)) != ARRAY_TYPE
1216 || !targetm.binds_local_p (base))
1217 return NULL_TREE;
1218
1219 ctor = DECL_INITIAL (base);
1220 break;
1221
1222 case ARRAY_REF:
1223 case COMPONENT_REF:
1224 ctor = fold_const_aggregate_ref (base);
1225 break;
1226
04236c3a 1227 case STRING_CST:
1228 case CONSTRUCTOR:
1229 ctor = base;
1230 break;
1231
8782adcf 1232 default:
1233 return NULL_TREE;
1234 }
1235
1236 if (ctor == NULL_TREE
4f61cce6 1237 || (TREE_CODE (ctor) != CONSTRUCTOR
1238 && TREE_CODE (ctor) != STRING_CST)
8782adcf 1239 || !TREE_STATIC (ctor))
1240 return NULL_TREE;
1241
1242 /* Get the index. If we have an SSA_NAME, try to resolve it
1243 with the current lattice value for the SSA_NAME. */
1244 idx = TREE_OPERAND (t, 1);
1245 switch (TREE_CODE (idx))
1246 {
1247 case SSA_NAME:
bfa30570 1248 if ((value = get_value (idx))
8782adcf 1249 && value->lattice_val == CONSTANT
1250 && TREE_CODE (value->value) == INTEGER_CST)
1251 idx = value->value;
1252 else
1253 return NULL_TREE;
1254 break;
1255
1256 case INTEGER_CST:
1257 break;
1258
1259 default:
1260 return NULL_TREE;
1261 }
1262
4f61cce6 1263 /* Fold read from constant string. */
1264 if (TREE_CODE (ctor) == STRING_CST)
1265 {
1266 if ((TYPE_MODE (TREE_TYPE (t))
1267 == TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
1268 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
1269 == MODE_INT)
1270 && GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor)))) == 1
1271 && compare_tree_int (idx, TREE_STRING_LENGTH (ctor)) < 0)
7b050b7b 1272 return build_int_cst_type (TREE_TYPE (t),
1273 (TREE_STRING_POINTER (ctor)
1274 [TREE_INT_CST_LOW (idx)]));
4f61cce6 1275 return NULL_TREE;
1276 }
1277
8782adcf 1278 /* Whoo-hoo! I'll fold ya baby. Yeah! */
c75b4594 1279 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
1280 if (tree_int_cst_equal (cfield, idx))
590d65aa 1281 {
1282 STRIP_USELESS_TYPE_CONVERSION (cval);
3a44a278 1283 if (TREE_CODE (cval) == ADDR_EXPR)
1284 {
1285 tree base = get_base_address (TREE_OPERAND (cval, 0));
1286 if (base && TREE_CODE (base) == VAR_DECL)
1287 add_referenced_var (base);
1288 }
590d65aa 1289 return cval;
1290 }
8782adcf 1291 break;
1292
1293 case COMPONENT_REF:
1294 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
1295 DECL_INITIAL. If BASE is a nested reference into another
1296 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
1297 the inner reference. */
1298 base = TREE_OPERAND (t, 0);
1299 switch (TREE_CODE (base))
1300 {
1301 case VAR_DECL:
1302 if (!TREE_READONLY (base)
1303 || TREE_CODE (TREE_TYPE (base)) != RECORD_TYPE
1304 || !targetm.binds_local_p (base))
1305 return NULL_TREE;
1306
1307 ctor = DECL_INITIAL (base);
1308 break;
1309
1310 case ARRAY_REF:
1311 case COMPONENT_REF:
1312 ctor = fold_const_aggregate_ref (base);
1313 break;
1314
1315 default:
1316 return NULL_TREE;
1317 }
1318
1319 if (ctor == NULL_TREE
1320 || TREE_CODE (ctor) != CONSTRUCTOR
1321 || !TREE_STATIC (ctor))
1322 return NULL_TREE;
1323
1324 field = TREE_OPERAND (t, 1);
1325
c75b4594 1326 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
1327 if (cfield == field
8782adcf 1328 /* FIXME: Handle bit-fields. */
c75b4594 1329 && ! DECL_BIT_FIELD (cfield))
590d65aa 1330 {
1331 STRIP_USELESS_TYPE_CONVERSION (cval);
3a44a278 1332 if (TREE_CODE (cval) == ADDR_EXPR)
1333 {
1334 tree base = get_base_address (TREE_OPERAND (cval, 0));
1335 if (base && TREE_CODE (base) == VAR_DECL)
1336 add_referenced_var (base);
1337 }
590d65aa 1338 return cval;
1339 }
8782adcf 1340 break;
1341
908cb59d 1342 case REALPART_EXPR:
1343 case IMAGPART_EXPR:
1344 {
1345 tree c = fold_const_aggregate_ref (TREE_OPERAND (t, 0));
1346 if (c && TREE_CODE (c) == COMPLEX_CST)
1347 return fold_build1 (TREE_CODE (t), TREE_TYPE (t), c);
1348 break;
1349 }
04236c3a 1350
1351 case INDIRECT_REF:
1352 {
1353 tree base = TREE_OPERAND (t, 0);
1354 if (TREE_CODE (base) == SSA_NAME
1355 && (value = get_value (base))
1356 && value->lattice_val == CONSTANT
65c463fd 1357 && TREE_CODE (value->value) == ADDR_EXPR
1358 && useless_type_conversion_p (TREE_TYPE (t),
1359 TREE_TYPE (TREE_TYPE (value->value))))
04236c3a 1360 return fold_const_aggregate_ref (TREE_OPERAND (value->value, 0));
1361 break;
1362 }
1363
8782adcf 1364 default:
1365 break;
1366 }
1367
1368 return NULL_TREE;
1369}
75a70cf9 1370
1371/* Evaluate statement STMT.
1372 Valid only for assignments, calls, conditionals, and switches. */
4ee9c684 1373
88dbf20f 1374static prop_value_t
75a70cf9 1375evaluate_stmt (gimple stmt)
4ee9c684 1376{
88dbf20f 1377 prop_value_t val;
4f61cce6 1378 tree simplified = NULL_TREE;
88dbf20f 1379 ccp_lattice_t likelyvalue = likely_value (stmt);
add6ee5e 1380 bool is_constant;
88dbf20f 1381
add6ee5e 1382 fold_defer_overflow_warnings ();
1383
4ee9c684 1384 /* If the statement is likely to have a CONSTANT result, then try
1385 to fold the statement to determine the constant value. */
75a70cf9 1386 /* FIXME. This is the only place that we call ccp_fold.
1387 Since likely_value never returns CONSTANT for calls, we will
1388 not attempt to fold them, including builtins that may profit. */
4ee9c684 1389 if (likelyvalue == CONSTANT)
1390 simplified = ccp_fold (stmt);
1391 /* If the statement is likely to have a VARYING result, then do not
1392 bother folding the statement. */
04236c3a 1393 else if (likelyvalue == VARYING)
75a70cf9 1394 {
590c3166 1395 enum gimple_code code = gimple_code (stmt);
75a70cf9 1396 if (code == GIMPLE_ASSIGN)
1397 {
1398 enum tree_code subcode = gimple_assign_rhs_code (stmt);
1399
1400 /* Other cases cannot satisfy is_gimple_min_invariant
1401 without folding. */
1402 if (get_gimple_rhs_class (subcode) == GIMPLE_SINGLE_RHS)
1403 simplified = gimple_assign_rhs1 (stmt);
1404 }
1405 else if (code == GIMPLE_SWITCH)
1406 simplified = gimple_switch_index (stmt);
1407 else
1408 /* These cannot satisfy is_gimple_min_invariant without folding. */
1409 gcc_assert (code == GIMPLE_CALL || code == GIMPLE_COND);
1410 }
4ee9c684 1411
add6ee5e 1412 is_constant = simplified && is_gimple_min_invariant (simplified);
1413
1414 fold_undefer_overflow_warnings (is_constant, stmt, 0);
1415
912f109f 1416 if (dump_file && (dump_flags & TDF_DETAILS))
1417 {
1418 fprintf (dump_file, "which is likely ");
1419 switch (likelyvalue)
1420 {
1421 case CONSTANT:
1422 fprintf (dump_file, "CONSTANT");
1423 break;
1424 case UNDEFINED:
1425 fprintf (dump_file, "UNDEFINED");
1426 break;
1427 case VARYING:
1428 fprintf (dump_file, "VARYING");
1429 break;
1430 default:;
1431 }
1432 fprintf (dump_file, "\n");
1433 }
1434
add6ee5e 1435 if (is_constant)
4ee9c684 1436 {
1437 /* The statement produced a constant value. */
1438 val.lattice_val = CONSTANT;
88dbf20f 1439 val.value = simplified;
4ee9c684 1440 }
1441 else
1442 {
1443 /* The statement produced a nonconstant value. If the statement
88dbf20f 1444 had UNDEFINED operands, then the result of the statement
1445 should be UNDEFINED. Otherwise, the statement is VARYING. */
bfa30570 1446 if (likelyvalue == UNDEFINED)
b765fa12 1447 val.lattice_val = likelyvalue;
1448 else
1449 val.lattice_val = VARYING;
1450
88dbf20f 1451 val.value = NULL_TREE;
4ee9c684 1452 }
41511585 1453
1454 return val;
4ee9c684 1455}
1456
41511585 1457/* Visit the assignment statement STMT. Set the value of its LHS to the
88dbf20f 1458 value computed by the RHS and store LHS in *OUTPUT_P. If STMT
1459 creates virtual definitions, set the value of each new name to that
75a70cf9 1460 of the RHS (if we can derive a constant out of the RHS).
1461 Value-returning call statements also perform an assignment, and
1462 are handled here. */
4ee9c684 1463
41511585 1464static enum ssa_prop_result
75a70cf9 1465visit_assignment (gimple stmt, tree *output_p)
4ee9c684 1466{
88dbf20f 1467 prop_value_t val;
88dbf20f 1468 enum ssa_prop_result retval;
4ee9c684 1469
75a70cf9 1470 tree lhs = gimple_get_lhs (stmt);
4ee9c684 1471
75a70cf9 1472 gcc_assert (gimple_code (stmt) != GIMPLE_CALL
1473 || gimple_call_lhs (stmt) != NULL_TREE);
1474
1475 if (gimple_assign_copy_p (stmt))
41511585 1476 {
75a70cf9 1477 tree rhs = gimple_assign_rhs1 (stmt);
88dbf20f 1478
75a70cf9 1479 if (TREE_CODE (rhs) == SSA_NAME)
1480 {
1481 /* For a simple copy operation, we copy the lattice values. */
1482 prop_value_t *nval = get_value (rhs);
1483 val = *nval;
1484 }
88dbf20f 1485 else
75a70cf9 1486 val = evaluate_stmt (stmt);
41511585 1487 }
1488 else
75a70cf9 1489 /* Evaluate the statement, which could be
1490 either a GIMPLE_ASSIGN or a GIMPLE_CALL. */
04236c3a 1491 val = evaluate_stmt (stmt);
4ee9c684 1492
88dbf20f 1493 retval = SSA_PROP_NOT_INTERESTING;
4ee9c684 1494
41511585 1495 /* Set the lattice value of the statement's output. */
88dbf20f 1496 if (TREE_CODE (lhs) == SSA_NAME)
4ee9c684 1497 {
88dbf20f 1498 /* If STMT is an assignment to an SSA_NAME, we only have one
1499 value to set. */
1500 if (set_lattice_value (lhs, val))
1501 {
1502 *output_p = lhs;
1503 if (val.lattice_val == VARYING)
1504 retval = SSA_PROP_VARYING;
1505 else
1506 retval = SSA_PROP_INTERESTING;
1507 }
4ee9c684 1508 }
88dbf20f 1509
1510 return retval;
4ee9c684 1511}
1512
4ee9c684 1513
41511585 1514/* Visit the conditional statement STMT. Return SSA_PROP_INTERESTING
1515 if it can determine which edge will be taken. Otherwise, return
1516 SSA_PROP_VARYING. */
1517
1518static enum ssa_prop_result
75a70cf9 1519visit_cond_stmt (gimple stmt, edge *taken_edge_p)
4ee9c684 1520{
88dbf20f 1521 prop_value_t val;
41511585 1522 basic_block block;
1523
75a70cf9 1524 block = gimple_bb (stmt);
41511585 1525 val = evaluate_stmt (stmt);
1526
1527 /* Find which edge out of the conditional block will be taken and add it
1528 to the worklist. If no single edge can be determined statically,
1529 return SSA_PROP_VARYING to feed all the outgoing edges to the
1530 propagation engine. */
88dbf20f 1531 *taken_edge_p = val.value ? find_taken_edge (block, val.value) : 0;
41511585 1532 if (*taken_edge_p)
1533 return SSA_PROP_INTERESTING;
1534 else
1535 return SSA_PROP_VARYING;
4ee9c684 1536}
1537
4ee9c684 1538
41511585 1539/* Evaluate statement STMT. If the statement produces an output value and
1540 its evaluation changes the lattice value of its output, return
1541 SSA_PROP_INTERESTING and set *OUTPUT_P to the SSA_NAME holding the
1542 output value.
1543
1544 If STMT is a conditional branch and we can determine its truth
1545 value, set *TAKEN_EDGE_P accordingly. If STMT produces a varying
1546 value, return SSA_PROP_VARYING. */
4ee9c684 1547
41511585 1548static enum ssa_prop_result
75a70cf9 1549ccp_visit_stmt (gimple stmt, edge *taken_edge_p, tree *output_p)
41511585 1550{
41511585 1551 tree def;
1552 ssa_op_iter iter;
4ee9c684 1553
41511585 1554 if (dump_file && (dump_flags & TDF_DETAILS))
4ee9c684 1555 {
88dbf20f 1556 fprintf (dump_file, "\nVisiting statement:\n");
75a70cf9 1557 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
4ee9c684 1558 }
4ee9c684 1559
75a70cf9 1560 switch (gimple_code (stmt))
4ee9c684 1561 {
75a70cf9 1562 case GIMPLE_ASSIGN:
1563 /* If the statement is an assignment that produces a single
1564 output value, evaluate its RHS to see if the lattice value of
1565 its output has changed. */
1566 return visit_assignment (stmt, output_p);
1567
1568 case GIMPLE_CALL:
1569 /* A value-returning call also performs an assignment. */
1570 if (gimple_call_lhs (stmt) != NULL_TREE)
1571 return visit_assignment (stmt, output_p);
1572 break;
1573
1574 case GIMPLE_COND:
1575 case GIMPLE_SWITCH:
1576 /* If STMT is a conditional branch, see if we can determine
1577 which branch will be taken. */
1578 /* FIXME. It appears that we should be able to optimize
1579 computed GOTOs here as well. */
1580 return visit_cond_stmt (stmt, taken_edge_p);
1581
1582 default:
1583 break;
4ee9c684 1584 }
4ee9c684 1585
41511585 1586 /* Any other kind of statement is not interesting for constant
1587 propagation and, therefore, not worth simulating. */
41511585 1588 if (dump_file && (dump_flags & TDF_DETAILS))
1589 fprintf (dump_file, "No interesting values produced. Marked VARYING.\n");
4ee9c684 1590
41511585 1591 /* Definitions made by statements other than assignments to
1592 SSA_NAMEs represent unknown modifications to their outputs.
1593 Mark them VARYING. */
88dbf20f 1594 FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_ALL_DEFS)
1595 {
61207d43 1596 prop_value_t v = { VARYING, NULL_TREE };
88dbf20f 1597 set_lattice_value (def, v);
1598 }
4ee9c684 1599
41511585 1600 return SSA_PROP_VARYING;
1601}
4ee9c684 1602
4ee9c684 1603
88dbf20f 1604/* Main entry point for SSA Conditional Constant Propagation. */
41511585 1605
33a34f1e 1606static unsigned int
61207d43 1607do_ssa_ccp (void)
41511585 1608{
1609 ccp_initialize ();
1610 ssa_propagate (ccp_visit_stmt, ccp_visit_phi_node);
33a34f1e 1611 if (ccp_finalize ())
eb9161e7 1612 return (TODO_cleanup_cfg | TODO_update_ssa | TODO_remove_unused_locals);
33a34f1e 1613 else
1614 return 0;
4ee9c684 1615}
1616
5664499b 1617
1618static bool
41511585 1619gate_ccp (void)
5664499b 1620{
41511585 1621 return flag_tree_ccp != 0;
5664499b 1622}
1623
4ee9c684 1624
20099e35 1625struct gimple_opt_pass pass_ccp =
41511585 1626{
20099e35 1627 {
1628 GIMPLE_PASS,
41511585 1629 "ccp", /* name */
1630 gate_ccp, /* gate */
88dbf20f 1631 do_ssa_ccp, /* execute */
41511585 1632 NULL, /* sub */
1633 NULL, /* next */
1634 0, /* static_pass_number */
1635 TV_TREE_CCP, /* tv_id */
49290934 1636 PROP_cfg | PROP_ssa, /* properties_required */
41511585 1637 0, /* properties_provided */
b6246c40 1638 0, /* properties_destroyed */
41511585 1639 0, /* todo_flags_start */
33a34f1e 1640 TODO_dump_func | TODO_verify_ssa
20099e35 1641 | TODO_verify_stmts | TODO_ggc_collect/* todo_flags_finish */
1642 }
41511585 1643};
4ee9c684 1644
4ee9c684 1645
304557cd 1646/* A subroutine of fold_stmt. Attempts to fold *(A+O) to A[X].
4ee9c684 1647 BASE is an array type. OFFSET is a byte displacement. ORIG_TYPE
e60a6f7b 1648 is the desired result type.
1649
1650 LOC is the location of the original expression. */
4ee9c684 1651
1652static tree
e60a6f7b 1653maybe_fold_offset_to_array_ref (location_t loc, tree base, tree offset,
1654 tree orig_type,
b7488229 1655 bool allow_negative_idx)
4ee9c684 1656{
e71da05f 1657 tree min_idx, idx, idx_type, elt_offset = integer_zero_node;
6374121b 1658 tree array_type, elt_type, elt_size;
b513c084 1659 tree domain_type;
6374121b 1660
1661 /* If BASE is an ARRAY_REF, we can pick up another offset (this time
1662 measured in units of the size of elements type) from that ARRAY_REF).
1663 We can't do anything if either is variable.
1664
1665 The case we handle here is *(&A[N]+O). */
1666 if (TREE_CODE (base) == ARRAY_REF)
1667 {
1668 tree low_bound = array_ref_low_bound (base);
1669
1670 elt_offset = TREE_OPERAND (base, 1);
1671 if (TREE_CODE (low_bound) != INTEGER_CST
1672 || TREE_CODE (elt_offset) != INTEGER_CST)
1673 return NULL_TREE;
1674
1675 elt_offset = int_const_binop (MINUS_EXPR, elt_offset, low_bound, 0);
1676 base = TREE_OPERAND (base, 0);
1677 }
4ee9c684 1678
1679 /* Ignore stupid user tricks of indexing non-array variables. */
1680 array_type = TREE_TYPE (base);
1681 if (TREE_CODE (array_type) != ARRAY_TYPE)
1682 return NULL_TREE;
1683 elt_type = TREE_TYPE (array_type);
c8ca3ee7 1684 if (!useless_type_conversion_p (orig_type, elt_type))
4ee9c684 1685 return NULL_TREE;
e71da05f 1686
1687 /* Use signed size type for intermediate computation on the index. */
1688 idx_type = signed_type_for (size_type_node);
1689
6374121b 1690 /* If OFFSET and ELT_OFFSET are zero, we don't care about the size of the
1691 element type (so we can use the alignment if it's not constant).
1692 Otherwise, compute the offset as an index by using a division. If the
1693 division isn't exact, then don't do anything. */
4ee9c684 1694 elt_size = TYPE_SIZE_UNIT (elt_type);
3b45913d 1695 if (!elt_size)
1696 return NULL;
6374121b 1697 if (integer_zerop (offset))
1698 {
1699 if (TREE_CODE (elt_size) != INTEGER_CST)
1700 elt_size = size_int (TYPE_ALIGN (elt_type));
4ee9c684 1701
e71da05f 1702 idx = build_int_cst (idx_type, 0);
6374121b 1703 }
1704 else
1705 {
1706 unsigned HOST_WIDE_INT lquo, lrem;
1707 HOST_WIDE_INT hquo, hrem;
e71da05f 1708 double_int soffset;
6374121b 1709
e71da05f 1710 /* The final array offset should be signed, so we need
1711 to sign-extend the (possibly pointer) offset here
1712 and use signed division. */
1713 soffset = double_int_sext (tree_to_double_int (offset),
1714 TYPE_PRECISION (TREE_TYPE (offset)));
6374121b 1715 if (TREE_CODE (elt_size) != INTEGER_CST
e71da05f 1716 || div_and_round_double (TRUNC_DIV_EXPR, 0,
1717 soffset.low, soffset.high,
6374121b 1718 TREE_INT_CST_LOW (elt_size),
1719 TREE_INT_CST_HIGH (elt_size),
1720 &lquo, &hquo, &lrem, &hrem)
1721 || lrem || hrem)
1722 return NULL_TREE;
4ee9c684 1723
e71da05f 1724 idx = build_int_cst_wide (idx_type, lquo, hquo);
6374121b 1725 }
1726
1727 /* Assume the low bound is zero. If there is a domain type, get the
1728 low bound, if any, convert the index into that type, and add the
1729 low bound. */
e71da05f 1730 min_idx = build_int_cst (idx_type, 0);
b513c084 1731 domain_type = TYPE_DOMAIN (array_type);
1732 if (domain_type)
4ee9c684 1733 {
b513c084 1734 idx_type = domain_type;
e71da05f 1735 if (TYPE_MIN_VALUE (idx_type))
1736 min_idx = TYPE_MIN_VALUE (idx_type);
6374121b 1737 else
e71da05f 1738 min_idx = fold_convert (idx_type, min_idx);
6374121b 1739
1740 if (TREE_CODE (min_idx) != INTEGER_CST)
1741 return NULL_TREE;
1742
e71da05f 1743 elt_offset = fold_convert (idx_type, elt_offset);
4ee9c684 1744 }
1745
6374121b 1746 if (!integer_zerop (min_idx))
1747 idx = int_const_binop (PLUS_EXPR, idx, min_idx, 0);
1748 if (!integer_zerop (elt_offset))
1749 idx = int_const_binop (PLUS_EXPR, idx, elt_offset, 0);
1750
e71da05f 1751 /* Make sure to possibly truncate late after offsetting. */
1752 idx = fold_convert (idx_type, idx);
1753
b513c084 1754 /* We don't want to construct access past array bounds. For example
b7488229 1755 char *(c[4]);
1756 c[3][2];
1757 should not be simplified into (*c)[14] or tree-vrp will
1758 give false warnings. The same is true for
1759 struct A { long x; char d[0]; } *a;
1760 (char *)a - 4;
1761 which should be not folded to &a->d[-8]. */
1762 if (domain_type
1763 && TYPE_MAX_VALUE (domain_type)
b513c084 1764 && TREE_CODE (TYPE_MAX_VALUE (domain_type)) == INTEGER_CST)
1765 {
1766 tree up_bound = TYPE_MAX_VALUE (domain_type);
1767
1768 if (tree_int_cst_lt (up_bound, idx)
1769 /* Accesses after the end of arrays of size 0 (gcc
1770 extension) and 1 are likely intentional ("struct
1771 hack"). */
1772 && compare_tree_int (up_bound, 1) > 0)
1773 return NULL_TREE;
1774 }
b7488229 1775 if (domain_type
1776 && TYPE_MIN_VALUE (domain_type))
1777 {
1778 if (!allow_negative_idx
1779 && TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST
1780 && tree_int_cst_lt (idx, TYPE_MIN_VALUE (domain_type)))
1781 return NULL_TREE;
1782 }
1783 else if (!allow_negative_idx
1784 && compare_tree_int (idx, 0) < 0)
1785 return NULL_TREE;
b513c084 1786
e60a6f7b 1787 {
1788 tree t = build4 (ARRAY_REF, elt_type, base, idx, NULL_TREE, NULL_TREE);
1789 SET_EXPR_LOCATION (t, loc);
1790 return t;
1791 }
4ee9c684 1792}
1793
41511585 1794
3b45913d 1795/* Attempt to fold *(S+O) to S.X.
4ee9c684 1796 BASE is a record type. OFFSET is a byte displacement. ORIG_TYPE
e60a6f7b 1797 is the desired result type.
1798
1799 LOC is the location of the original expression. */
4ee9c684 1800
3b45913d 1801static tree
e60a6f7b 1802maybe_fold_offset_to_component_ref (location_t loc, tree record_type,
1803 tree base, tree offset,
4ee9c684 1804 tree orig_type, bool base_is_ptr)
1805{
6d5d8428 1806 tree f, t, field_type, tail_array_field, field_offset;
d9745cea 1807 tree ret;
1808 tree new_base;
4ee9c684 1809
1810 if (TREE_CODE (record_type) != RECORD_TYPE
1811 && TREE_CODE (record_type) != UNION_TYPE
1812 && TREE_CODE (record_type) != QUAL_UNION_TYPE)
1813 return NULL_TREE;
1814
1815 /* Short-circuit silly cases. */
c8ca3ee7 1816 if (useless_type_conversion_p (record_type, orig_type))
4ee9c684 1817 return NULL_TREE;
1818
1819 tail_array_field = NULL_TREE;
1820 for (f = TYPE_FIELDS (record_type); f ; f = TREE_CHAIN (f))
1821 {
1822 int cmp;
1823
1824 if (TREE_CODE (f) != FIELD_DECL)
1825 continue;
1826 if (DECL_BIT_FIELD (f))
1827 continue;
6d5d8428 1828
3b45913d 1829 if (!DECL_FIELD_OFFSET (f))
1830 continue;
6d5d8428 1831 field_offset = byte_position (f);
1832 if (TREE_CODE (field_offset) != INTEGER_CST)
4ee9c684 1833 continue;
1834
1835 /* ??? Java creates "interesting" fields for representing base classes.
1836 They have no name, and have no context. With no context, we get into
1837 trouble with nonoverlapping_component_refs_p. Skip them. */
1838 if (!DECL_FIELD_CONTEXT (f))
1839 continue;
1840
1841 /* The previous array field isn't at the end. */
1842 tail_array_field = NULL_TREE;
1843
1844 /* Check to see if this offset overlaps with the field. */
6d5d8428 1845 cmp = tree_int_cst_compare (field_offset, offset);
4ee9c684 1846 if (cmp > 0)
1847 continue;
1848
1849 field_type = TREE_TYPE (f);
4ee9c684 1850
1851 /* Here we exactly match the offset being checked. If the types match,
1852 then we can return that field. */
115073ff 1853 if (cmp == 0
c8ca3ee7 1854 && useless_type_conversion_p (orig_type, field_type))
4ee9c684 1855 {
1856 if (base_is_ptr)
1857 base = build1 (INDIRECT_REF, record_type, base);
40b19772 1858 t = build3 (COMPONENT_REF, field_type, base, f, NULL_TREE);
4ee9c684 1859 return t;
1860 }
115073ff 1861
1862 /* Don't care about offsets into the middle of scalars. */
1863 if (!AGGREGATE_TYPE_P (field_type))
1864 continue;
4ee9c684 1865
115073ff 1866 /* Check for array at the end of the struct. This is often
1867 used as for flexible array members. We should be able to
1868 turn this into an array access anyway. */
1869 if (TREE_CODE (field_type) == ARRAY_TYPE)
1870 tail_array_field = f;
1871
1872 /* Check the end of the field against the offset. */
1873 if (!DECL_SIZE_UNIT (f)
1874 || TREE_CODE (DECL_SIZE_UNIT (f)) != INTEGER_CST)
1875 continue;
1876 t = int_const_binop (MINUS_EXPR, offset, field_offset, 1);
1877 if (!tree_int_cst_lt (t, DECL_SIZE_UNIT (f)))
1878 continue;
4ee9c684 1879
115073ff 1880 /* If we matched, then set offset to the displacement into
1881 this field. */
d9745cea 1882 if (base_is_ptr)
1883 new_base = build1 (INDIRECT_REF, record_type, base);
1884 else
1885 new_base = base;
e60a6f7b 1886 protected_set_expr_location (new_base, loc);
d9745cea 1887 new_base = build3 (COMPONENT_REF, field_type, new_base, f, NULL_TREE);
e60a6f7b 1888 protected_set_expr_location (new_base, loc);
d9745cea 1889
1890 /* Recurse to possibly find the match. */
e60a6f7b 1891 ret = maybe_fold_offset_to_array_ref (loc, new_base, t, orig_type,
b7488229 1892 f == TYPE_FIELDS (record_type));
d9745cea 1893 if (ret)
1894 return ret;
e60a6f7b 1895 ret = maybe_fold_offset_to_component_ref (loc, field_type, new_base, t,
d9745cea 1896 orig_type, false);
1897 if (ret)
1898 return ret;
4ee9c684 1899 }
1900
1901 if (!tail_array_field)
1902 return NULL_TREE;
1903
1904 f = tail_array_field;
1905 field_type = TREE_TYPE (f);
115073ff 1906 offset = int_const_binop (MINUS_EXPR, offset, byte_position (f), 1);
4ee9c684 1907
4ee9c684 1908 /* If we get here, we've got an aggregate field, and a possibly
365db11e 1909 nonzero offset into them. Recurse and hope for a valid match. */
4ee9c684 1910 if (base_is_ptr)
e60a6f7b 1911 {
1912 base = build1 (INDIRECT_REF, record_type, base);
1913 SET_EXPR_LOCATION (base, loc);
1914 }
40b19772 1915 base = build3 (COMPONENT_REF, field_type, base, f, NULL_TREE);
e60a6f7b 1916 SET_EXPR_LOCATION (base, loc);
4ee9c684 1917
e60a6f7b 1918 t = maybe_fold_offset_to_array_ref (loc, base, offset, orig_type,
b7488229 1919 f == TYPE_FIELDS (record_type));
4ee9c684 1920 if (t)
1921 return t;
e60a6f7b 1922 return maybe_fold_offset_to_component_ref (loc, field_type, base, offset,
4ee9c684 1923 orig_type, false);
1924}
1925
3b45913d 1926/* Attempt to express (ORIG_TYPE)BASE+OFFSET as BASE->field_of_orig_type
e60a6f7b 1927 or BASE[index] or by combination of those.
1928
1929 LOC is the location of original expression.
3b45913d 1930
1931 Before attempting the conversion strip off existing ADDR_EXPRs and
1932 handled component refs. */
1933
1934tree
e60a6f7b 1935maybe_fold_offset_to_reference (location_t loc, tree base, tree offset,
1936 tree orig_type)
3b45913d 1937{
1938 tree ret;
1939 tree type;
1940 bool base_is_ptr = true;
1941
1942 STRIP_NOPS (base);
1943 if (TREE_CODE (base) == ADDR_EXPR)
1944 {
1945 base_is_ptr = false;
1946
1947 base = TREE_OPERAND (base, 0);
1948
1949 /* Handle case where existing COMPONENT_REF pick e.g. wrong field of union,
1950 so it needs to be removed and new COMPONENT_REF constructed.
1951 The wrong COMPONENT_REF are often constructed by folding the
1952 (type *)&object within the expression (type *)&object+offset */
5768aeb3 1953 if (handled_component_p (base))
3b45913d 1954 {
1955 HOST_WIDE_INT sub_offset, size, maxsize;
1956 tree newbase;
1957 newbase = get_ref_base_and_extent (base, &sub_offset,
1958 &size, &maxsize);
1959 gcc_assert (newbase);
5768aeb3 1960 if (size == maxsize
844461a0 1961 && size != -1
5768aeb3 1962 && !(sub_offset & (BITS_PER_UNIT - 1)))
3b45913d 1963 {
1964 base = newbase;
1965 if (sub_offset)
1966 offset = int_const_binop (PLUS_EXPR, offset,
1967 build_int_cst (TREE_TYPE (offset),
1968 sub_offset / BITS_PER_UNIT), 1);
1969 }
1970 }
c8ca3ee7 1971 if (useless_type_conversion_p (orig_type, TREE_TYPE (base))
3b45913d 1972 && integer_zerop (offset))
1973 return base;
1974 type = TREE_TYPE (base);
1975 }
1976 else
1977 {
1978 base_is_ptr = true;
1979 if (!POINTER_TYPE_P (TREE_TYPE (base)))
1980 return NULL_TREE;
1981 type = TREE_TYPE (TREE_TYPE (base));
1982 }
e60a6f7b 1983 ret = maybe_fold_offset_to_component_ref (loc, type, base, offset,
3b45913d 1984 orig_type, base_is_ptr);
1985 if (!ret)
1986 {
1987 if (base_is_ptr)
e60a6f7b 1988 {
1989 base = build1 (INDIRECT_REF, type, base);
1990 SET_EXPR_LOCATION (base, loc);
1991 }
1992 ret = maybe_fold_offset_to_array_ref (loc,
1993 base, offset, orig_type, true);
3b45913d 1994 }
1995 return ret;
1996}
41511585 1997
5768aeb3 1998/* Attempt to express (ORIG_TYPE)&BASE+OFFSET as &BASE->field_of_orig_type
1999 or &BASE[index] or by combination of those.
2000
e60a6f7b 2001 LOC is the location of the original expression.
2002
5768aeb3 2003 Before attempting the conversion strip off existing component refs. */
2004
2005tree
e60a6f7b 2006maybe_fold_offset_to_address (location_t loc, tree addr, tree offset,
2007 tree orig_type)
5768aeb3 2008{
2009 tree t;
2010
2011 gcc_assert (POINTER_TYPE_P (TREE_TYPE (addr))
2012 && POINTER_TYPE_P (orig_type));
2013
e60a6f7b 2014 t = maybe_fold_offset_to_reference (loc, addr, offset,
2015 TREE_TYPE (orig_type));
5768aeb3 2016 if (t != NULL_TREE)
2017 {
2018 tree orig = addr;
2019 tree ptr_type;
2020
2021 /* For __builtin_object_size to function correctly we need to
2022 make sure not to fold address arithmetic so that we change
2023 reference from one array to another. This would happen for
2024 example for
2025
2026 struct X { char s1[10]; char s2[10] } s;
2027 char *foo (void) { return &s.s2[-4]; }
2028
2029 where we need to avoid generating &s.s1[6]. As the C and
2030 C++ frontends create different initial trees
2031 (char *) &s.s1 + -4 vs. &s.s1[-4] we have to do some
2032 sophisticated comparisons here. Note that checking for the
2033 condition after the fact is easier than trying to avoid doing
2034 the folding. */
2035 STRIP_NOPS (orig);
2036 if (TREE_CODE (orig) == ADDR_EXPR)
2037 orig = TREE_OPERAND (orig, 0);
2038 if ((TREE_CODE (orig) == ARRAY_REF
2039 || (TREE_CODE (orig) == COMPONENT_REF
2040 && TREE_CODE (TREE_TYPE (TREE_OPERAND (orig, 1))) == ARRAY_TYPE))
2041 && (TREE_CODE (t) == ARRAY_REF
bc79a76b 2042 || TREE_CODE (t) == COMPONENT_REF)
5768aeb3 2043 && !operand_equal_p (TREE_CODE (orig) == ARRAY_REF
2044 ? TREE_OPERAND (orig, 0) : orig,
2045 TREE_CODE (t) == ARRAY_REF
2046 ? TREE_OPERAND (t, 0) : t, 0))
2047 return NULL_TREE;
2048
2049 ptr_type = build_pointer_type (TREE_TYPE (t));
2050 if (!useless_type_conversion_p (orig_type, ptr_type))
2051 return NULL_TREE;
e60a6f7b 2052 t = build_fold_addr_expr_with_type (t, ptr_type);
2053 protected_set_expr_location (t, loc);
2054 return t;
5768aeb3 2055 }
2056
2057 return NULL_TREE;
2058}
2059
304557cd 2060/* A subroutine of fold_stmt. Attempt to simplify *(BASE+OFFSET).
4ee9c684 2061 Return the simplified expression, or NULL if nothing could be done. */
2062
2063static tree
2064maybe_fold_stmt_indirect (tree expr, tree base, tree offset)
2065{
2066 tree t;
5acf8305 2067 bool volatile_p = TREE_THIS_VOLATILE (expr);
e60a6f7b 2068 location_t loc = EXPR_LOCATION (expr);
4ee9c684 2069
2070 /* We may well have constructed a double-nested PLUS_EXPR via multiple
2071 substitutions. Fold that down to one. Remove NON_LVALUE_EXPRs that
2072 are sometimes added. */
2073 base = fold (base);
40bcfc86 2074 STRIP_TYPE_NOPS (base);
4ee9c684 2075 TREE_OPERAND (expr, 0) = base;
2076
2077 /* One possibility is that the address reduces to a string constant. */
2078 t = fold_read_from_constant_string (expr);
2079 if (t)
2080 return t;
2081
0de36bdb 2082 /* Add in any offset from a POINTER_PLUS_EXPR. */
2083 if (TREE_CODE (base) == POINTER_PLUS_EXPR)
4ee9c684 2084 {
2085 tree offset2;
2086
2087 offset2 = TREE_OPERAND (base, 1);
2088 if (TREE_CODE (offset2) != INTEGER_CST)
2089 return NULL_TREE;
2090 base = TREE_OPERAND (base, 0);
2091
0de36bdb 2092 offset = fold_convert (sizetype,
2093 int_const_binop (PLUS_EXPR, offset, offset2, 1));
4ee9c684 2094 }
2095
2096 if (TREE_CODE (base) == ADDR_EXPR)
2097 {
3b45913d 2098 tree base_addr = base;
2099
4ee9c684 2100 /* Strip the ADDR_EXPR. */
2101 base = TREE_OPERAND (base, 0);
2102
e67e5e1f 2103 /* Fold away CONST_DECL to its value, if the type is scalar. */
2104 if (TREE_CODE (base) == CONST_DECL
0a685b29 2105 && is_gimple_min_invariant (DECL_INITIAL (base)))
e67e5e1f 2106 return DECL_INITIAL (base);
2107
4ee9c684 2108 /* Try folding *(&B+O) to B.X. */
e60a6f7b 2109 t = maybe_fold_offset_to_reference (loc, base_addr, offset,
3b45913d 2110 TREE_TYPE (expr));
4ee9c684 2111 if (t)
5acf8305 2112 {
bcc7452f 2113 /* Preserve volatileness of the original expression.
2114 We can end up with a plain decl here which is shared
2115 and we shouldn't mess with its flags. */
2116 if (!SSA_VAR_P (t))
2117 TREE_THIS_VOLATILE (t) = volatile_p;
5acf8305 2118 return t;
2119 }
4ee9c684 2120 }
2121 else
2122 {
2123 /* We can get here for out-of-range string constant accesses,
2124 such as "_"[3]. Bail out of the entire substitution search
2125 and arrange for the entire statement to be replaced by a
06b27565 2126 call to __builtin_trap. In all likelihood this will all be
4ee9c684 2127 constant-folded away, but in the meantime we can't leave with
2128 something that get_expr_operands can't understand. */
2129
2130 t = base;
2131 STRIP_NOPS (t);
2132 if (TREE_CODE (t) == ADDR_EXPR
2133 && TREE_CODE (TREE_OPERAND (t, 0)) == STRING_CST)
2134 {
2135 /* FIXME: Except that this causes problems elsewhere with dead
1fa3a8f6 2136 code not being deleted, and we die in the rtl expanders
4ee9c684 2137 because we failed to remove some ssa_name. In the meantime,
2138 just return zero. */
2139 /* FIXME2: This condition should be signaled by
2140 fold_read_from_constant_string directly, rather than
2141 re-checking for it here. */
2142 return integer_zero_node;
2143 }
2144
2145 /* Try folding *(B+O) to B->X. Still an improvement. */
2146 if (POINTER_TYPE_P (TREE_TYPE (base)))
2147 {
e60a6f7b 2148 t = maybe_fold_offset_to_reference (loc, base, offset,
3b45913d 2149 TREE_TYPE (expr));
4ee9c684 2150 if (t)
2151 return t;
2152 }
2153 }
2154
2155 /* Otherwise we had an offset that we could not simplify. */
2156 return NULL_TREE;
2157}
2158
41511585 2159
75a70cf9 2160/* A quaint feature extant in our address arithmetic is that there
4ee9c684 2161 can be hidden type changes here. The type of the result need
2162 not be the same as the type of the input pointer.
2163
2164 What we're after here is an expression of the form
2165 (T *)(&array + const)
75a70cf9 2166 where array is OP0, const is OP1, RES_TYPE is T and
2167 the cast doesn't actually exist, but is implicit in the
0de36bdb 2168 type of the POINTER_PLUS_EXPR. We'd like to turn this into
4ee9c684 2169 &array[x]
2170 which may be able to propagate further. */
2171
75a70cf9 2172tree
e60a6f7b 2173maybe_fold_stmt_addition (location_t loc, tree res_type, tree op0, tree op1)
4ee9c684 2174{
4ee9c684 2175 tree ptd_type;
2176 tree t;
4ee9c684 2177
4ee9c684 2178 /* The first operand should be an ADDR_EXPR. */
2179 if (TREE_CODE (op0) != ADDR_EXPR)
2180 return NULL_TREE;
2181 op0 = TREE_OPERAND (op0, 0);
2182
87c5de3b 2183 /* It had better be a constant. */
2184 if (TREE_CODE (op1) != INTEGER_CST)
2185 {
2186 /* Or op0 should now be A[0] and the non-constant offset defined
2187 via a multiplication by the array element size. */
2188 if (TREE_CODE (op0) == ARRAY_REF
2189 && integer_zerop (TREE_OPERAND (op0, 1))
2190 && TREE_CODE (op1) == SSA_NAME
2191 && host_integerp (TYPE_SIZE_UNIT (TREE_TYPE (op0)), 1))
2192 {
2193 gimple offset_def = SSA_NAME_DEF_STMT (op1);
2194 if (!is_gimple_assign (offset_def))
2195 return NULL_TREE;
2196
2197 if (gimple_assign_rhs_code (offset_def) == MULT_EXPR
2198 && TREE_CODE (gimple_assign_rhs2 (offset_def)) == INTEGER_CST
2199 && tree_int_cst_equal (gimple_assign_rhs2 (offset_def),
2200 TYPE_SIZE_UNIT (TREE_TYPE (op0))))
2201 return build1 (ADDR_EXPR, res_type,
2202 build4 (ARRAY_REF, TREE_TYPE (op0),
2203 TREE_OPERAND (op0, 0),
2204 gimple_assign_rhs1 (offset_def),
2205 TREE_OPERAND (op0, 2),
2206 TREE_OPERAND (op0, 3)));
2207 else if (integer_onep (TYPE_SIZE_UNIT (TREE_TYPE (op0)))
2208 && gimple_assign_rhs_code (offset_def) != MULT_EXPR)
2209 return build1 (ADDR_EXPR, res_type,
2210 build4 (ARRAY_REF, TREE_TYPE (op0),
2211 TREE_OPERAND (op0, 0),
2212 op1,
2213 TREE_OPERAND (op0, 2),
2214 TREE_OPERAND (op0, 3)));
2215 }
2216 return NULL_TREE;
2217 }
2218
4ee9c684 2219 /* If the first operand is an ARRAY_REF, expand it so that we can fold
2220 the offset into it. */
2221 while (TREE_CODE (op0) == ARRAY_REF)
2222 {
2223 tree array_obj = TREE_OPERAND (op0, 0);
2224 tree array_idx = TREE_OPERAND (op0, 1);
2225 tree elt_type = TREE_TYPE (op0);
2226 tree elt_size = TYPE_SIZE_UNIT (elt_type);
2227 tree min_idx;
2228
2229 if (TREE_CODE (array_idx) != INTEGER_CST)
2230 break;
2231 if (TREE_CODE (elt_size) != INTEGER_CST)
2232 break;
2233
2234 /* Un-bias the index by the min index of the array type. */
2235 min_idx = TYPE_DOMAIN (TREE_TYPE (array_obj));
2236 if (min_idx)
2237 {
2238 min_idx = TYPE_MIN_VALUE (min_idx);
2239 if (min_idx)
2240 {
6374121b 2241 if (TREE_CODE (min_idx) != INTEGER_CST)
2242 break;
2243
535664e3 2244 array_idx = fold_convert (TREE_TYPE (min_idx), array_idx);
4ee9c684 2245 if (!integer_zerop (min_idx))
2246 array_idx = int_const_binop (MINUS_EXPR, array_idx,
2247 min_idx, 0);
2248 }
2249 }
2250
2251 /* Convert the index to a byte offset. */
535664e3 2252 array_idx = fold_convert (sizetype, array_idx);
4ee9c684 2253 array_idx = int_const_binop (MULT_EXPR, array_idx, elt_size, 0);
2254
2255 /* Update the operands for the next round, or for folding. */
0de36bdb 2256 op1 = int_const_binop (PLUS_EXPR,
4ee9c684 2257 array_idx, op1, 0);
4ee9c684 2258 op0 = array_obj;
2259 }
2260
75a70cf9 2261 ptd_type = TREE_TYPE (res_type);
0b4a6afc 2262 /* If we want a pointer to void, reconstruct the reference from the
2263 array element type. A pointer to that can be trivially converted
2264 to void *. This happens as we fold (void *)(ptr p+ off). */
2265 if (VOID_TYPE_P (ptd_type)
2266 && TREE_CODE (TREE_TYPE (op0)) == ARRAY_TYPE)
2267 ptd_type = TREE_TYPE (TREE_TYPE (op0));
4ee9c684 2268
2269 /* At which point we can try some of the same things as for indirects. */
e60a6f7b 2270 t = maybe_fold_offset_to_array_ref (loc, op0, op1, ptd_type, true);
4ee9c684 2271 if (!t)
e60a6f7b 2272 t = maybe_fold_offset_to_component_ref (loc, TREE_TYPE (op0), op0, op1,
4ee9c684 2273 ptd_type, false);
2274 if (t)
e60a6f7b 2275 {
2276 t = build1 (ADDR_EXPR, res_type, t);
2277 SET_EXPR_LOCATION (t, loc);
2278 }
4ee9c684 2279
2280 return t;
2281}
2282
304557cd 2283/* Subroutine of fold_stmt. We perform several simplifications of the
2284 memory reference tree EXPR and make sure to re-gimplify them properly
2285 after propagation of constant addresses. IS_LHS is true if the
2286 reference is supposed to be an lvalue. */
4ee9c684 2287
2288static tree
304557cd 2289maybe_fold_reference (tree expr, bool is_lhs)
4ee9c684 2290{
304557cd 2291 tree *t = &expr;
4ee9c684 2292
304557cd 2293 if (TREE_CODE (expr) == ARRAY_REF
2294 && !is_lhs)
2295 {
2296 tree tem = fold_read_from_constant_string (expr);
2297 if (tem)
2298 return tem;
2299 }
75a70cf9 2300
304557cd 2301 /* ??? We might want to open-code the relevant remaining cases
2302 to avoid using the generic fold. */
2303 if (handled_component_p (*t)
2304 && CONSTANT_CLASS_P (TREE_OPERAND (*t, 0)))
4ee9c684 2305 {
304557cd 2306 tree tem = fold (*t);
2307 if (tem != *t)
2308 return tem;
2309 }
2310
2311 while (handled_component_p (*t))
2312 t = &TREE_OPERAND (*t, 0);
4ee9c684 2313
304557cd 2314 if (TREE_CODE (*t) == INDIRECT_REF)
2315 {
2316 tree tem = maybe_fold_stmt_indirect (*t, TREE_OPERAND (*t, 0),
2317 integer_zero_node);
4d444068 2318 /* Avoid folding *"abc" = 5 into 'a' = 5. */
304557cd 2319 if (is_lhs && tem && CONSTANT_CLASS_P (tem))
2320 tem = NULL_TREE;
2321 if (!tem
2322 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR)
8ac2d49b 2323 /* If we had a good reason for propagating the address here,
2324 make sure we end up with valid gimple. See PR34989. */
304557cd 2325 tem = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
4ee9c684 2326
304557cd 2327 if (tem)
2328 {
2329 *t = tem;
2330 tem = maybe_fold_reference (expr, is_lhs);
2331 if (tem)
2332 return tem;
2333 return expr;
2334 }
4ee9c684 2335 }
2336
2337 return NULL_TREE;
2338}
2339
304557cd 2340
0a39fd54 2341/* Return the string length, maximum string length or maximum value of
2342 ARG in LENGTH.
2343 If ARG is an SSA name variable, follow its use-def chains. If LENGTH
2344 is not NULL and, for TYPE == 0, its value is not equal to the length
2345 we determine or if we are unable to determine the length or value,
2346 return false. VISITED is a bitmap of visited variables.
2347 TYPE is 0 if string length should be returned, 1 for maximum string
2348 length and 2 for maximum value ARG can have. */
4ee9c684 2349
72648a0e 2350static bool
0a39fd54 2351get_maxval_strlen (tree arg, tree *length, bitmap visited, int type)
4ee9c684 2352{
75a70cf9 2353 tree var, val;
2354 gimple def_stmt;
41511585 2355
2356 if (TREE_CODE (arg) != SSA_NAME)
72648a0e 2357 {
ec0fa513 2358 if (TREE_CODE (arg) == COND_EXPR)
2359 return get_maxval_strlen (COND_EXPR_THEN (arg), length, visited, type)
2360 && get_maxval_strlen (COND_EXPR_ELSE (arg), length, visited, type);
0b4a6afc 2361 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
2362 else if (TREE_CODE (arg) == ADDR_EXPR
2363 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF
2364 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg, 0), 1)))
2365 {
2366 tree aop0 = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
2367 if (TREE_CODE (aop0) == INDIRECT_REF
2368 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
2369 return get_maxval_strlen (TREE_OPERAND (aop0, 0),
2370 length, visited, type);
2371 }
ec0fa513 2372
0a39fd54 2373 if (type == 2)
2374 {
2375 val = arg;
2376 if (TREE_CODE (val) != INTEGER_CST
2377 || tree_int_cst_sgn (val) < 0)
2378 return false;
2379 }
2380 else
2381 val = c_strlen (arg, 1);
41511585 2382 if (!val)
72648a0e 2383 return false;
e37235f0 2384
0a39fd54 2385 if (*length)
2386 {
2387 if (type > 0)
2388 {
2389 if (TREE_CODE (*length) != INTEGER_CST
2390 || TREE_CODE (val) != INTEGER_CST)
2391 return false;
2392
2393 if (tree_int_cst_lt (*length, val))
2394 *length = val;
2395 return true;
2396 }
2397 else if (simple_cst_equal (val, *length) != 1)
2398 return false;
2399 }
4ee9c684 2400
41511585 2401 *length = val;
2402 return true;
4ee9c684 2403 }
72648a0e 2404
41511585 2405 /* If we were already here, break the infinite cycle. */
2406 if (bitmap_bit_p (visited, SSA_NAME_VERSION (arg)))
2407 return true;
2408 bitmap_set_bit (visited, SSA_NAME_VERSION (arg));
2409
2410 var = arg;
2411 def_stmt = SSA_NAME_DEF_STMT (var);
4ee9c684 2412
75a70cf9 2413 switch (gimple_code (def_stmt))
2414 {
2415 case GIMPLE_ASSIGN:
2416 /* The RHS of the statement defining VAR must either have a
2417 constant length or come from another SSA_NAME with a constant
2418 length. */
2419 if (gimple_assign_single_p (def_stmt)
2420 || gimple_assign_unary_nop_p (def_stmt))
2421 {
2422 tree rhs = gimple_assign_rhs1 (def_stmt);
2423 return get_maxval_strlen (rhs, length, visited, type);
2424 }
2425 return false;
2426
2427 case GIMPLE_PHI:
41511585 2428 {
2429 /* All the arguments of the PHI node must have the same constant
2430 length. */
75a70cf9 2431 unsigned i;
2432
2433 for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
2434 {
2435 tree arg = gimple_phi_arg (def_stmt, i)->def;
2436
2437 /* If this PHI has itself as an argument, we cannot
2438 determine the string length of this argument. However,
2439 if we can find a constant string length for the other
2440 PHI args then we can still be sure that this is a
2441 constant string length. So be optimistic and just
2442 continue with the next argument. */
2443 if (arg == gimple_phi_result (def_stmt))
2444 continue;
2445
2446 if (!get_maxval_strlen (arg, length, visited, type))
2447 return false;
2448 }
2449 }
2450 return true;
4ee9c684 2451
41511585 2452 default:
75a70cf9 2453 return false;
4ee9c684 2454 }
4ee9c684 2455}
2456
2457
75a70cf9 2458/* Fold builtin call in statement STMT. Returns a simplified tree.
2459 We may return a non-constant expression, including another call
2460 to a different function and with different arguments, e.g.,
2461 substituting memcpy for strcpy when the string length is known.
2462 Note that some builtins expand into inline code that may not
2463 be valid in GIMPLE. Callers must take care. */
4ee9c684 2464
2465static tree
75a70cf9 2466ccp_fold_builtin (gimple stmt)
4ee9c684 2467{
0a39fd54 2468 tree result, val[3];
c2f47e15 2469 tree callee, a;
be9f921e 2470 int arg_idx, type;
f0613857 2471 bitmap visited;
2472 bool ignore;
c2f47e15 2473 int nargs;
4ee9c684 2474
75a70cf9 2475 gcc_assert (is_gimple_call (stmt));
2476
2477 ignore = (gimple_call_lhs (stmt) == NULL);
4ee9c684 2478
2479 /* First try the generic builtin folder. If that succeeds, return the
2480 result directly. */
75a70cf9 2481 result = fold_call_stmt (stmt, ignore);
4ee9c684 2482 if (result)
0a39fd54 2483 {
2484 if (ignore)
2485 STRIP_NOPS (result);
2486 return result;
2487 }
f0613857 2488
2489 /* Ignore MD builtins. */
75a70cf9 2490 callee = gimple_call_fndecl (stmt);
f0613857 2491 if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_MD)
2492 return NULL_TREE;
4ee9c684 2493
2494 /* If the builtin could not be folded, and it has no argument list,
2495 we're done. */
75a70cf9 2496 nargs = gimple_call_num_args (stmt);
c2f47e15 2497 if (nargs == 0)
4ee9c684 2498 return NULL_TREE;
2499
2500 /* Limit the work only for builtins we know how to simplify. */
2501 switch (DECL_FUNCTION_CODE (callee))
2502 {
2503 case BUILT_IN_STRLEN:
2504 case BUILT_IN_FPUTS:
2505 case BUILT_IN_FPUTS_UNLOCKED:
be9f921e 2506 arg_idx = 0;
0a39fd54 2507 type = 0;
4ee9c684 2508 break;
2509 case BUILT_IN_STRCPY:
2510 case BUILT_IN_STRNCPY:
be9f921e 2511 arg_idx = 1;
0a39fd54 2512 type = 0;
2513 break;
2514 case BUILT_IN_MEMCPY_CHK:
2515 case BUILT_IN_MEMPCPY_CHK:
2516 case BUILT_IN_MEMMOVE_CHK:
2517 case BUILT_IN_MEMSET_CHK:
2518 case BUILT_IN_STRNCPY_CHK:
be9f921e 2519 arg_idx = 2;
0a39fd54 2520 type = 2;
2521 break;
2522 case BUILT_IN_STRCPY_CHK:
2523 case BUILT_IN_STPCPY_CHK:
be9f921e 2524 arg_idx = 1;
0a39fd54 2525 type = 1;
2526 break;
2527 case BUILT_IN_SNPRINTF_CHK:
2528 case BUILT_IN_VSNPRINTF_CHK:
be9f921e 2529 arg_idx = 1;
0a39fd54 2530 type = 2;
4ee9c684 2531 break;
2532 default:
2533 return NULL_TREE;
2534 }
2535
54807c88 2536 if (arg_idx >= nargs)
2537 return NULL_TREE;
2538
4ee9c684 2539 /* Try to use the dataflow information gathered by the CCP process. */
27335ffd 2540 visited = BITMAP_ALLOC (NULL);
be9f921e 2541 bitmap_clear (visited);
4ee9c684 2542
0a39fd54 2543 memset (val, 0, sizeof (val));
be9f921e 2544 a = gimple_call_arg (stmt, arg_idx);
2545 if (!get_maxval_strlen (a, &val[arg_idx], visited, type))
2546 val[arg_idx] = NULL_TREE;
4ee9c684 2547
27335ffd 2548 BITMAP_FREE (visited);
4ee9c684 2549
f0613857 2550 result = NULL_TREE;
4ee9c684 2551 switch (DECL_FUNCTION_CODE (callee))
2552 {
2553 case BUILT_IN_STRLEN:
54807c88 2554 if (val[0] && nargs == 1)
4ee9c684 2555 {
75a70cf9 2556 tree new_val =
2557 fold_convert (TREE_TYPE (gimple_call_lhs (stmt)), val[0]);
4ee9c684 2558
2559 /* If the result is not a valid gimple value, or not a cast
2560 of a valid gimple value, then we can not use the result. */
f0d6e81c 2561 if (is_gimple_val (new_val)
2562 || (is_gimple_cast (new_val)
2563 && is_gimple_val (TREE_OPERAND (new_val, 0))))
2564 return new_val;
4ee9c684 2565 }
f0613857 2566 break;
2567
4ee9c684 2568 case BUILT_IN_STRCPY:
c2f47e15 2569 if (val[1] && is_gimple_val (val[1]) && nargs == 2)
2570 result = fold_builtin_strcpy (callee,
75a70cf9 2571 gimple_call_arg (stmt, 0),
2572 gimple_call_arg (stmt, 1),
c2f47e15 2573 val[1]);
f0613857 2574 break;
2575
4ee9c684 2576 case BUILT_IN_STRNCPY:
c2f47e15 2577 if (val[1] && is_gimple_val (val[1]) && nargs == 3)
2578 result = fold_builtin_strncpy (callee,
75a70cf9 2579 gimple_call_arg (stmt, 0),
2580 gimple_call_arg (stmt, 1),
2581 gimple_call_arg (stmt, 2),
c2f47e15 2582 val[1]);
f0613857 2583 break;
2584
4ee9c684 2585 case BUILT_IN_FPUTS:
54807c88 2586 if (nargs == 2)
2587 result = fold_builtin_fputs (gimple_call_arg (stmt, 0),
2588 gimple_call_arg (stmt, 1),
2589 ignore, false, val[0]);
f0613857 2590 break;
2591
4ee9c684 2592 case BUILT_IN_FPUTS_UNLOCKED:
54807c88 2593 if (nargs == 2)
2594 result = fold_builtin_fputs (gimple_call_arg (stmt, 0),
2595 gimple_call_arg (stmt, 1),
2596 ignore, true, val[0]);
0a39fd54 2597 break;
2598
2599 case BUILT_IN_MEMCPY_CHK:
2600 case BUILT_IN_MEMPCPY_CHK:
2601 case BUILT_IN_MEMMOVE_CHK:
2602 case BUILT_IN_MEMSET_CHK:
54807c88 2603 if (val[2] && is_gimple_val (val[2]) && nargs == 4)
c2f47e15 2604 result = fold_builtin_memory_chk (callee,
75a70cf9 2605 gimple_call_arg (stmt, 0),
2606 gimple_call_arg (stmt, 1),
2607 gimple_call_arg (stmt, 2),
2608 gimple_call_arg (stmt, 3),
c2f47e15 2609 val[2], ignore,
0a39fd54 2610 DECL_FUNCTION_CODE (callee));
2611 break;
2612
2613 case BUILT_IN_STRCPY_CHK:
2614 case BUILT_IN_STPCPY_CHK:
54807c88 2615 if (val[1] && is_gimple_val (val[1]) && nargs == 3)
c2f47e15 2616 result = fold_builtin_stxcpy_chk (callee,
75a70cf9 2617 gimple_call_arg (stmt, 0),
2618 gimple_call_arg (stmt, 1),
2619 gimple_call_arg (stmt, 2),
c2f47e15 2620 val[1], ignore,
0a39fd54 2621 DECL_FUNCTION_CODE (callee));
2622 break;
2623
2624 case BUILT_IN_STRNCPY_CHK:
54807c88 2625 if (val[2] && is_gimple_val (val[2]) && nargs == 4)
75a70cf9 2626 result = fold_builtin_strncpy_chk (gimple_call_arg (stmt, 0),
2627 gimple_call_arg (stmt, 1),
2628 gimple_call_arg (stmt, 2),
2629 gimple_call_arg (stmt, 3),
c2f47e15 2630 val[2]);
0a39fd54 2631 break;
2632
2633 case BUILT_IN_SNPRINTF_CHK:
2634 case BUILT_IN_VSNPRINTF_CHK:
2635 if (val[1] && is_gimple_val (val[1]))
75a70cf9 2636 result = gimple_fold_builtin_snprintf_chk (stmt, val[1],
2637 DECL_FUNCTION_CODE (callee));
f0613857 2638 break;
4ee9c684 2639
2640 default:
8c0963c4 2641 gcc_unreachable ();
4ee9c684 2642 }
2643
f0613857 2644 if (result && ignore)
db97ad41 2645 result = fold_ignored_result (result);
f0613857 2646 return result;
4ee9c684 2647}
2648
75a70cf9 2649/* Attempt to fold an assignment statement pointed-to by SI. Returns a
2650 replacement rhs for the statement or NULL_TREE if no simplification
2651 could be made. It is assumed that the operands have been previously
2652 folded. */
2653
2654static tree
2655fold_gimple_assign (gimple_stmt_iterator *si)
2656{
2657 gimple stmt = gsi_stmt (*si);
2658 enum tree_code subcode = gimple_assign_rhs_code (stmt);
2659
304557cd 2660 tree result = NULL_TREE;
75a70cf9 2661
2662 switch (get_gimple_rhs_class (subcode))
2663 {
2664 case GIMPLE_SINGLE_RHS:
2665 {
2666 tree rhs = gimple_assign_rhs1 (stmt);
304557cd 2667
75a70cf9 2668 /* Try to fold a conditional expression. */
2669 if (TREE_CODE (rhs) == COND_EXPR)
2670 {
304557cd 2671 tree op0 = COND_EXPR_COND (rhs);
2672 tree tem;
2673 bool set = false;
2674
2675 if (COMPARISON_CLASS_P (op0))
2676 {
2677 fold_defer_overflow_warnings ();
2678 tem = fold_binary (TREE_CODE (op0), TREE_TYPE (op0),
2679 TREE_OPERAND (op0, 0),
2680 TREE_OPERAND (op0, 1));
2681 /* This is actually a conditional expression, not a GIMPLE
2682 conditional statement, however, the valid_gimple_rhs_p
2683 test still applies. */
2684 set = (tem && is_gimple_condexpr (tem)
2685 && valid_gimple_rhs_p (tem));
2686 fold_undefer_overflow_warnings (set, stmt, 0);
2687 }
2688 else if (is_gimple_min_invariant (op0))
2689 {
2690 tem = op0;
2691 set = true;
2692 }
2693 else
2694 return NULL_TREE;
2695
2696 if (set)
2697 result = fold_build3 (COND_EXPR, TREE_TYPE (rhs), tem,
2698 COND_EXPR_THEN (rhs), COND_EXPR_ELSE (rhs));
75a70cf9 2699 }
2700
304557cd 2701 else if (TREE_CODE (rhs) == TARGET_MEM_REF)
2702 return maybe_fold_tmr (rhs);
2703
2704 else if (REFERENCE_CLASS_P (rhs))
2705 return maybe_fold_reference (rhs, false);
2706
2707 else if (TREE_CODE (rhs) == ADDR_EXPR)
2708 {
2709 tree tem = maybe_fold_reference (TREE_OPERAND (rhs, 0), true);
2710 if (tem)
2711 result = fold_convert (TREE_TYPE (rhs),
2712 build_fold_addr_expr (tem));
2713 }
2714
388a0bc7 2715 else if (TREE_CODE (rhs) == CONSTRUCTOR
2716 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
2717 && (CONSTRUCTOR_NELTS (rhs)
2718 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
2719 {
2720 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
2721 unsigned i;
2722 tree val;
2723
2724 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
2725 if (TREE_CODE (val) != INTEGER_CST
2726 && TREE_CODE (val) != REAL_CST
2727 && TREE_CODE (val) != FIXED_CST)
2728 return NULL_TREE;
2729
2730 return build_vector_from_ctor (TREE_TYPE (rhs),
2731 CONSTRUCTOR_ELTS (rhs));
2732 }
2733
75a70cf9 2734 /* If we couldn't fold the RHS, hand over to the generic
2735 fold routines. */
2736 if (result == NULL_TREE)
2737 result = fold (rhs);
2738
2739 /* Strip away useless type conversions. Both the NON_LVALUE_EXPR
2740 that may have been added by fold, and "useless" type
2741 conversions that might now be apparent due to propagation. */
2742 STRIP_USELESS_TYPE_CONVERSION (result);
2743
2744 if (result != rhs && valid_gimple_rhs_p (result))
2745 return result;
304557cd 2746
2747 return NULL_TREE;
75a70cf9 2748 }
2749 break;
2750
2751 case GIMPLE_UNARY_RHS:
f1fb2997 2752 {
2753 tree rhs = gimple_assign_rhs1 (stmt);
75a70cf9 2754
f1fb2997 2755 result = fold_unary (subcode, gimple_expr_type (stmt), rhs);
2756 if (result)
2757 {
2758 /* If the operation was a conversion do _not_ mark a
2759 resulting constant with TREE_OVERFLOW if the original
2760 constant was not. These conversions have implementation
2761 defined behavior and retaining the TREE_OVERFLOW flag
2762 here would confuse later passes such as VRP. */
2763 if (CONVERT_EXPR_CODE_P (subcode)
2764 && TREE_CODE (result) == INTEGER_CST
2765 && TREE_CODE (rhs) == INTEGER_CST)
2766 TREE_OVERFLOW (result) = TREE_OVERFLOW (rhs);
2767
2768 STRIP_USELESS_TYPE_CONVERSION (result);
2769 if (valid_gimple_rhs_p (result))
2770 return result;
2771 }
2772 else if (CONVERT_EXPR_CODE_P (subcode)
2773 && POINTER_TYPE_P (gimple_expr_type (stmt))
2774 && POINTER_TYPE_P (TREE_TYPE (gimple_assign_rhs1 (stmt))))
2775 {
2776 tree type = gimple_expr_type (stmt);
e60a6f7b 2777 tree t = maybe_fold_offset_to_address (gimple_location (stmt),
2778 gimple_assign_rhs1 (stmt),
f1fb2997 2779 integer_zero_node, type);
2780 if (t)
2781 return t;
2782 }
2783 }
75a70cf9 2784 break;
2785
2786 case GIMPLE_BINARY_RHS:
2787 /* Try to fold pointer addition. */
2788 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
93f3673b 2789 {
2790 tree type = TREE_TYPE (gimple_assign_rhs1 (stmt));
2791 if (TREE_CODE (TREE_TYPE (type)) == ARRAY_TYPE)
2792 {
2793 type = build_pointer_type (TREE_TYPE (TREE_TYPE (type)));
2794 if (!useless_type_conversion_p
2795 (TREE_TYPE (gimple_assign_lhs (stmt)), type))
2796 type = TREE_TYPE (gimple_assign_rhs1 (stmt));
2797 }
e60a6f7b 2798 result = maybe_fold_stmt_addition (gimple_location (stmt),
2799 type,
93f3673b 2800 gimple_assign_rhs1 (stmt),
2801 gimple_assign_rhs2 (stmt));
2802 }
75a70cf9 2803
2804 if (!result)
2805 result = fold_binary (subcode,
2806 TREE_TYPE (gimple_assign_lhs (stmt)),
2807 gimple_assign_rhs1 (stmt),
2808 gimple_assign_rhs2 (stmt));
2809
2810 if (result)
2811 {
2812 STRIP_USELESS_TYPE_CONVERSION (result);
2813 if (valid_gimple_rhs_p (result))
2814 return result;
ec3753d0 2815
2816 /* Fold might have produced non-GIMPLE, so if we trust it blindly
2817 we lose canonicalization opportunities. Do not go again
2818 through fold here though, or the same non-GIMPLE will be
2819 produced. */
2820 if (commutative_tree_code (subcode)
2821 && tree_swap_operands_p (gimple_assign_rhs1 (stmt),
2822 gimple_assign_rhs2 (stmt), false))
2823 return build2 (subcode, TREE_TYPE (gimple_assign_lhs (stmt)),
2824 gimple_assign_rhs2 (stmt),
2825 gimple_assign_rhs1 (stmt));
75a70cf9 2826 }
2827 break;
2828
2829 case GIMPLE_INVALID_RHS:
2830 gcc_unreachable ();
2831 }
2832
2833 return NULL_TREE;
2834}
2835
2836/* Attempt to fold a conditional statement. Return true if any changes were
2837 made. We only attempt to fold the condition expression, and do not perform
2838 any transformation that would require alteration of the cfg. It is
2839 assumed that the operands have been previously folded. */
2840
2841static bool
2842fold_gimple_cond (gimple stmt)
2843{
2844 tree result = fold_binary (gimple_cond_code (stmt),
2845 boolean_type_node,
2846 gimple_cond_lhs (stmt),
2847 gimple_cond_rhs (stmt));
2848
2849 if (result)
2850 {
2851 STRIP_USELESS_TYPE_CONVERSION (result);
2852 if (is_gimple_condexpr (result) && valid_gimple_rhs_p (result))
2853 {
2854 gimple_cond_set_condition_from_tree (stmt, result);
2855 return true;
2856 }
2857 }
2858
2859 return false;
2860}
2861
2862
2863/* Attempt to fold a call statement referenced by the statement iterator GSI.
2864 The statement may be replaced by another statement, e.g., if the call
2865 simplifies to a constant value. Return true if any changes were made.
2866 It is assumed that the operands have been previously folded. */
2867
2868static bool
2869fold_gimple_call (gimple_stmt_iterator *gsi)
2870{
2871 gimple stmt = gsi_stmt (*gsi);
2872
2873 tree callee = gimple_call_fndecl (stmt);
2874
2875 /* Check for builtins that CCP can handle using information not
2876 available in the generic fold routines. */
2877 if (callee && DECL_BUILT_IN (callee))
2878 {
2879 tree result = ccp_fold_builtin (stmt);
2880
2881 if (result)
2882 return update_call_from_tree (gsi, result);
2883 }
2884 else
2885 {
2886 /* Check for resolvable OBJ_TYPE_REF. The only sorts we can resolve
2887 here are when we've propagated the address of a decl into the
2888 object slot. */
2889 /* ??? Should perhaps do this in fold proper. However, doing it
2890 there requires that we create a new CALL_EXPR, and that requires
2891 copying EH region info to the new node. Easier to just do it
2892 here where we can just smash the call operand. */
2893 /* ??? Is there a good reason not to do this in fold_stmt_inplace? */
2894 callee = gimple_call_fn (stmt);
2895 if (TREE_CODE (callee) == OBJ_TYPE_REF
2896 && lang_hooks.fold_obj_type_ref
2897 && TREE_CODE (OBJ_TYPE_REF_OBJECT (callee)) == ADDR_EXPR
2898 && DECL_P (TREE_OPERAND
2899 (OBJ_TYPE_REF_OBJECT (callee), 0)))
2900 {
2901 tree t;
2902
2903 /* ??? Caution: Broken ADDR_EXPR semantics means that
2904 looking at the type of the operand of the addr_expr
2905 can yield an array type. See silly exception in
2906 check_pointer_types_r. */
2907 t = TREE_TYPE (TREE_TYPE (OBJ_TYPE_REF_OBJECT (callee)));
2908 t = lang_hooks.fold_obj_type_ref (callee, t);
2909 if (t)
2910 {
2911 gimple_call_set_fn (stmt, t);
2912 return true;
2913 }
2914 }
2915 }
2916
2917 return false;
2918}
4ee9c684 2919
1c03e359 2920/* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
2921 distinguishes both cases. */
4ee9c684 2922
1c03e359 2923static bool
2924fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace)
4ee9c684 2925{
41511585 2926 bool changed = false;
75a70cf9 2927 gimple stmt = gsi_stmt (*gsi);
1c03e359 2928 unsigned i;
add6ee5e 2929
75a70cf9 2930 /* Fold the main computation performed by the statement. */
2931 switch (gimple_code (stmt))
4ee9c684 2932 {
75a70cf9 2933 case GIMPLE_ASSIGN:
2934 {
1c03e359 2935 unsigned old_num_ops = gimple_num_ops (stmt);
75a70cf9 2936 tree new_rhs = fold_gimple_assign (gsi);
1c03e359 2937 if (new_rhs != NULL_TREE
2938 && (!inplace
2939 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
75a70cf9 2940 {
2941 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
2942 changed = true;
2943 }
75a70cf9 2944 break;
2945 }
1c03e359 2946
75a70cf9 2947 case GIMPLE_COND:
2948 changed |= fold_gimple_cond (stmt);
2949 break;
1c03e359 2950
75a70cf9 2951 case GIMPLE_CALL:
1c03e359 2952 /* Fold *& in call arguments. */
2953 for (i = 0; i < gimple_call_num_args (stmt); ++i)
2954 if (REFERENCE_CLASS_P (gimple_call_arg (stmt, i)))
2955 {
2956 tree tmp = maybe_fold_reference (gimple_call_arg (stmt, i), false);
2957 if (tmp)
2958 {
2959 gimple_call_set_arg (stmt, i, tmp);
2960 changed = true;
2961 }
2962 }
75a70cf9 2963 /* The entire statement may be replaced in this case. */
1c03e359 2964 if (!inplace)
2965 changed |= fold_gimple_call (gsi);
75a70cf9 2966 break;
e77b8618 2967
1c03e359 2968 case GIMPLE_ASM:
2969 /* Fold *& in asm operands. */
2970 for (i = 0; i < gimple_asm_noutputs (stmt); ++i)
2971 {
2972 tree link = gimple_asm_output_op (stmt, i);
2973 tree op = TREE_VALUE (link);
2974 if (REFERENCE_CLASS_P (op)
2975 && (op = maybe_fold_reference (op, true)) != NULL_TREE)
2976 {
2977 TREE_VALUE (link) = op;
2978 changed = true;
2979 }
2980 }
2981 for (i = 0; i < gimple_asm_ninputs (stmt); ++i)
2982 {
2983 tree link = gimple_asm_input_op (stmt, i);
2984 tree op = TREE_VALUE (link);
2985 if (REFERENCE_CLASS_P (op)
2986 && (op = maybe_fold_reference (op, false)) != NULL_TREE)
2987 {
2988 TREE_VALUE (link) = op;
2989 changed = true;
2990 }
2991 }
75a70cf9 2992 break;
1c03e359 2993
2994 default:;
ec0fa513 2995 }
4ee9c684 2996
304557cd 2997 stmt = gsi_stmt (*gsi);
2998
2999 /* Fold *& on the lhs. */
3000 if (gimple_has_lhs (stmt))
3001 {
3002 tree lhs = gimple_get_lhs (stmt);
3003 if (lhs && REFERENCE_CLASS_P (lhs))
3004 {
3005 tree new_lhs = maybe_fold_reference (lhs, true);
3006 if (new_lhs)
3007 {
3008 gimple_set_lhs (stmt, new_lhs);
3009 changed = true;
3010 }
3011 }
3012 }
3013
41511585 3014 return changed;
4ee9c684 3015}
3016
1c03e359 3017/* Fold the statement pointed to by GSI. In some cases, this function may
3018 replace the whole statement with a new one. Returns true iff folding
3019 makes any changes.
3020 The statement pointed to by GSI should be in valid gimple form but may
3021 be in unfolded state as resulting from for example constant propagation
3022 which can produce *&x = 0. */
3023
3024bool
3025fold_stmt (gimple_stmt_iterator *gsi)
3026{
3027 return fold_stmt_1 (gsi, false);
3028}
3029
8171a1dd 3030/* Perform the minimal folding on statement STMT. Only operations like
3031 *&x created by constant propagation are handled. The statement cannot
75a70cf9 3032 be replaced with a new one. Return true if the statement was
304557cd 3033 changed, false otherwise.
3034 The statement STMT should be in valid gimple form but may
3035 be in unfolded state as resulting from for example constant propagation
3036 which can produce *&x = 0. */
8171a1dd 3037
3038bool
75a70cf9 3039fold_stmt_inplace (gimple stmt)
8171a1dd 3040{
1c03e359 3041 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
3042 bool changed = fold_stmt_1 (&gsi, true);
3043 gcc_assert (gsi_stmt (gsi) == stmt);
8171a1dd 3044 return changed;
3045}
75a70cf9 3046
bdd0e199 3047/* Try to optimize out __builtin_stack_restore. Optimize it out
3048 if there is another __builtin_stack_restore in the same basic
3049 block and no calls or ASM_EXPRs are in between, or if this block's
3050 only outgoing edge is to EXIT_BLOCK and there are no calls or
3051 ASM_EXPRs after this __builtin_stack_restore. */
3052
3053static tree
75a70cf9 3054optimize_stack_restore (gimple_stmt_iterator i)
bdd0e199 3055{
75a70cf9 3056 tree callee, rhs;
3057 gimple stmt, stack_save;
3058 gimple_stmt_iterator stack_save_gsi;
3059
3060 basic_block bb = gsi_bb (i);
3061 gimple call = gsi_stmt (i);
bdd0e199 3062
75a70cf9 3063 if (gimple_code (call) != GIMPLE_CALL
3064 || gimple_call_num_args (call) != 1
3065 || TREE_CODE (gimple_call_arg (call, 0)) != SSA_NAME
3066 || !POINTER_TYPE_P (TREE_TYPE (gimple_call_arg (call, 0))))
bdd0e199 3067 return NULL_TREE;
3068
75a70cf9 3069 for (gsi_next (&i); !gsi_end_p (i); gsi_next (&i))
bdd0e199 3070 {
75a70cf9 3071 stmt = gsi_stmt (i);
3072 if (gimple_code (stmt) == GIMPLE_ASM)
bdd0e199 3073 return NULL_TREE;
75a70cf9 3074 if (gimple_code (stmt) != GIMPLE_CALL)
bdd0e199 3075 continue;
3076
75a70cf9 3077 callee = gimple_call_fndecl (stmt);
bdd0e199 3078 if (!callee || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL)
3079 return NULL_TREE;
3080
3081 if (DECL_FUNCTION_CODE (callee) == BUILT_IN_STACK_RESTORE)
3082 break;
3083 }
3084
75a70cf9 3085 if (gsi_end_p (i)
bdd0e199 3086 && (! single_succ_p (bb)
3087 || single_succ_edge (bb)->dest != EXIT_BLOCK_PTR))
3088 return NULL_TREE;
3089
75a70cf9 3090 stack_save = SSA_NAME_DEF_STMT (gimple_call_arg (call, 0));
3091 if (gimple_code (stack_save) != GIMPLE_CALL
3092 || gimple_call_lhs (stack_save) != gimple_call_arg (call, 0)
3093 || stmt_could_throw_p (stack_save)
3094 || !has_single_use (gimple_call_arg (call, 0)))
bdd0e199 3095 return NULL_TREE;
3096
75a70cf9 3097 callee = gimple_call_fndecl (stack_save);
bdd0e199 3098 if (!callee
3099 || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL
3100 || DECL_FUNCTION_CODE (callee) != BUILT_IN_STACK_SAVE
75a70cf9 3101 || gimple_call_num_args (stack_save) != 0)
bdd0e199 3102 return NULL_TREE;
3103
75a70cf9 3104 stack_save_gsi = gsi_for_stmt (stack_save);
75a70cf9 3105 rhs = build_int_cst (TREE_TYPE (gimple_call_arg (call, 0)), 0);
3106 if (!update_call_from_tree (&stack_save_gsi, rhs))
4c5fd53c 3107 return NULL_TREE;
bdd0e199 3108
75a70cf9 3109 /* No effect, so the statement will be deleted. */
bdd0e199 3110 return integer_zero_node;
3111}
75a70cf9 3112
8a58ed0a 3113/* If va_list type is a simple pointer and nothing special is needed,
3114 optimize __builtin_va_start (&ap, 0) into ap = __builtin_next_arg (0),
3115 __builtin_va_end (&ap) out as NOP and __builtin_va_copy into a simple
3116 pointer assignment. */
3117
3118static tree
75a70cf9 3119optimize_stdarg_builtin (gimple call)
8a58ed0a 3120{
5f57a8b1 3121 tree callee, lhs, rhs, cfun_va_list;
8a58ed0a 3122 bool va_list_simple_ptr;
3123
75a70cf9 3124 if (gimple_code (call) != GIMPLE_CALL)
8a58ed0a 3125 return NULL_TREE;
3126
75a70cf9 3127 callee = gimple_call_fndecl (call);
5f57a8b1 3128
3129 cfun_va_list = targetm.fn_abi_va_list (callee);
3130 va_list_simple_ptr = POINTER_TYPE_P (cfun_va_list)
3131 && (TREE_TYPE (cfun_va_list) == void_type_node
3132 || TREE_TYPE (cfun_va_list) == char_type_node);
3133
8a58ed0a 3134 switch (DECL_FUNCTION_CODE (callee))
3135 {
3136 case BUILT_IN_VA_START:
3137 if (!va_list_simple_ptr
3138 || targetm.expand_builtin_va_start != NULL
75a70cf9 3139 || built_in_decls[BUILT_IN_NEXT_ARG] == NULL)
8a58ed0a 3140 return NULL_TREE;
3141
75a70cf9 3142 if (gimple_call_num_args (call) != 2)
8a58ed0a 3143 return NULL_TREE;
3144
75a70cf9 3145 lhs = gimple_call_arg (call, 0);
8a58ed0a 3146 if (!POINTER_TYPE_P (TREE_TYPE (lhs))
3147 || TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (lhs)))
5f57a8b1 3148 != TYPE_MAIN_VARIANT (cfun_va_list))
8a58ed0a 3149 return NULL_TREE;
75a70cf9 3150
8a58ed0a 3151 lhs = build_fold_indirect_ref (lhs);
3152 rhs = build_call_expr (built_in_decls[BUILT_IN_NEXT_ARG],
75a70cf9 3153 1, integer_zero_node);
8a58ed0a 3154 rhs = fold_convert (TREE_TYPE (lhs), rhs);
3155 return build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, rhs);
3156
3157 case BUILT_IN_VA_COPY:
3158 if (!va_list_simple_ptr)
3159 return NULL_TREE;
3160
75a70cf9 3161 if (gimple_call_num_args (call) != 2)
8a58ed0a 3162 return NULL_TREE;
3163
75a70cf9 3164 lhs = gimple_call_arg (call, 0);
8a58ed0a 3165 if (!POINTER_TYPE_P (TREE_TYPE (lhs))
3166 || TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (lhs)))
5f57a8b1 3167 != TYPE_MAIN_VARIANT (cfun_va_list))
8a58ed0a 3168 return NULL_TREE;
3169
3170 lhs = build_fold_indirect_ref (lhs);
75a70cf9 3171 rhs = gimple_call_arg (call, 1);
8a58ed0a 3172 if (TYPE_MAIN_VARIANT (TREE_TYPE (rhs))
5f57a8b1 3173 != TYPE_MAIN_VARIANT (cfun_va_list))
8a58ed0a 3174 return NULL_TREE;
3175
3176 rhs = fold_convert (TREE_TYPE (lhs), rhs);
3177 return build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, rhs);
3178
3179 case BUILT_IN_VA_END:
75a70cf9 3180 /* No effect, so the statement will be deleted. */
8a58ed0a 3181 return integer_zero_node;
3182
3183 default:
3184 gcc_unreachable ();
3185 }
3186}
75a70cf9 3187
909e5ecb 3188/* Convert EXPR into a GIMPLE value suitable for substitution on the
3189 RHS of an assignment. Insert the necessary statements before
75a70cf9 3190 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
3191 is replaced. If the call is expected to produces a result, then it
3192 is replaced by an assignment of the new RHS to the result variable.
3193 If the result is to be ignored, then the call is replaced by a
3194 GIMPLE_NOP. */
909e5ecb 3195
75a70cf9 3196static void
3197gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
909e5ecb 3198{
75a70cf9 3199 tree lhs;
3200 tree tmp = NULL_TREE; /* Silence warning. */
3201 gimple stmt, new_stmt;
3202 gimple_stmt_iterator i;
3203 gimple_seq stmts = gimple_seq_alloc();
dac18d1a 3204 struct gimplify_ctx gctx;
909e5ecb 3205
75a70cf9 3206 stmt = gsi_stmt (*si_p);
3207
3208 gcc_assert (is_gimple_call (stmt));
3209
3210 lhs = gimple_call_lhs (stmt);
3211
dac18d1a 3212 push_gimplify_context (&gctx);
75a70cf9 3213
3214 if (lhs == NULL_TREE)
3215 gimplify_and_add (expr, &stmts);
3216 else
a280136a 3217 tmp = get_initialized_tmp_var (expr, &stmts, NULL);
75a70cf9 3218
909e5ecb 3219 pop_gimplify_context (NULL);
3220
75a70cf9 3221 if (gimple_has_location (stmt))
3222 annotate_all_with_location (stmts, gimple_location (stmt));
b66731e8 3223
909e5ecb 3224 /* The replacement can expose previously unreferenced variables. */
75a70cf9 3225 for (i = gsi_start (stmts); !gsi_end_p (i); gsi_next (&i))
3226 {
3227 new_stmt = gsi_stmt (i);
3228 find_new_referenced_vars (new_stmt);
3229 gsi_insert_before (si_p, new_stmt, GSI_NEW_STMT);
3230 mark_symbols_for_renaming (new_stmt);
3231 gsi_next (si_p);
3232 }
3233
3234 if (lhs == NULL_TREE)
dd277d48 3235 {
3236 new_stmt = gimple_build_nop ();
3237 unlink_stmt_vdef (stmt);
3238 release_defs (stmt);
3239 }
75a70cf9 3240 else
909e5ecb 3241 {
75a70cf9 3242 new_stmt = gimple_build_assign (lhs, tmp);
dd277d48 3243 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
3244 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
75a70cf9 3245 move_ssa_defining_stmt_for_defs (new_stmt, stmt);
909e5ecb 3246 }
3247
75a70cf9 3248 gimple_set_location (new_stmt, gimple_location (stmt));
3249 gsi_replace (si_p, new_stmt, false);
909e5ecb 3250}
3251
4ee9c684 3252/* A simple pass that attempts to fold all builtin functions. This pass
3253 is run after we've propagated as many constants as we can. */
3254
2a1990e9 3255static unsigned int
4ee9c684 3256execute_fold_all_builtins (void)
3257{
b36237eb 3258 bool cfg_changed = false;
4ee9c684 3259 basic_block bb;
b1b7c0c4 3260 unsigned int todoflags = 0;
3261
4ee9c684 3262 FOR_EACH_BB (bb)
3263 {
75a70cf9 3264 gimple_stmt_iterator i;
3265 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
4ee9c684 3266 {
75a70cf9 3267 gimple stmt, old_stmt;
4ee9c684 3268 tree callee, result;
0a39fd54 3269 enum built_in_function fcode;
4ee9c684 3270
75a70cf9 3271 stmt = gsi_stmt (i);
3272
3273 if (gimple_code (stmt) != GIMPLE_CALL)
0a39fd54 3274 {
75a70cf9 3275 gsi_next (&i);
0a39fd54 3276 continue;
3277 }
75a70cf9 3278 callee = gimple_call_fndecl (stmt);
4ee9c684 3279 if (!callee || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL)
0a39fd54 3280 {
75a70cf9 3281 gsi_next (&i);
0a39fd54 3282 continue;
3283 }
3284 fcode = DECL_FUNCTION_CODE (callee);
4ee9c684 3285
75a70cf9 3286 result = ccp_fold_builtin (stmt);
5a4b7e1e 3287
3288 if (result)
75a70cf9 3289 gimple_remove_stmt_histograms (cfun, stmt);
5a4b7e1e 3290
4ee9c684 3291 if (!result)
3292 switch (DECL_FUNCTION_CODE (callee))
3293 {
3294 case BUILT_IN_CONSTANT_P:
3295 /* Resolve __builtin_constant_p. If it hasn't been
3296 folded to integer_one_node by now, it's fairly
3297 certain that the value simply isn't constant. */
75a70cf9 3298 result = integer_zero_node;
4ee9c684 3299 break;
3300
bdd0e199 3301 case BUILT_IN_STACK_RESTORE:
75a70cf9 3302 result = optimize_stack_restore (i);
8a58ed0a 3303 if (result)
3304 break;
75a70cf9 3305 gsi_next (&i);
8a58ed0a 3306 continue;
3307
3308 case BUILT_IN_VA_START:
3309 case BUILT_IN_VA_END:
3310 case BUILT_IN_VA_COPY:
3311 /* These shouldn't be folded before pass_stdarg. */
75a70cf9 3312 result = optimize_stdarg_builtin (stmt);
bdd0e199 3313 if (result)
3314 break;
3315 /* FALLTHRU */
3316
4ee9c684 3317 default:
75a70cf9 3318 gsi_next (&i);
4ee9c684 3319 continue;
3320 }
3321
3322 if (dump_file && (dump_flags & TDF_DETAILS))
3323 {
3324 fprintf (dump_file, "Simplified\n ");
75a70cf9 3325 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
4ee9c684 3326 }
3327
75a70cf9 3328 old_stmt = stmt;
75a70cf9 3329 if (!update_call_from_tree (&i, result))
0fefde02 3330 {
3331 gimplify_and_update_call_from_tree (&i, result);
3332 todoflags |= TODO_update_address_taken;
3333 }
de6ed584 3334
75a70cf9 3335 stmt = gsi_stmt (i);
4c5fd53c 3336 update_stmt (stmt);
de6ed584 3337
75a70cf9 3338 if (maybe_clean_or_replace_eh_stmt (old_stmt, stmt)
3339 && gimple_purge_dead_eh_edges (bb))
b36237eb 3340 cfg_changed = true;
4ee9c684 3341
3342 if (dump_file && (dump_flags & TDF_DETAILS))
3343 {
3344 fprintf (dump_file, "to\n ");
75a70cf9 3345 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
4ee9c684 3346 fprintf (dump_file, "\n");
3347 }
0a39fd54 3348
3349 /* Retry the same statement if it changed into another
3350 builtin, there might be new opportunities now. */
75a70cf9 3351 if (gimple_code (stmt) != GIMPLE_CALL)
0a39fd54 3352 {
75a70cf9 3353 gsi_next (&i);
0a39fd54 3354 continue;
3355 }
75a70cf9 3356 callee = gimple_call_fndecl (stmt);
0a39fd54 3357 if (!callee
75a70cf9 3358 || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL
0a39fd54 3359 || DECL_FUNCTION_CODE (callee) == fcode)
75a70cf9 3360 gsi_next (&i);
4ee9c684 3361 }
3362 }
b1b7c0c4 3363
b36237eb 3364 /* Delete unreachable blocks. */
b1b7c0c4 3365 if (cfg_changed)
3366 todoflags |= TODO_cleanup_cfg;
3367
3368 return todoflags;
4ee9c684 3369}
3370
41511585 3371
20099e35 3372struct gimple_opt_pass pass_fold_builtins =
4ee9c684 3373{
20099e35 3374 {
3375 GIMPLE_PASS,
4ee9c684 3376 "fab", /* name */
3377 NULL, /* gate */
3378 execute_fold_all_builtins, /* execute */
3379 NULL, /* sub */
3380 NULL, /* next */
3381 0, /* static_pass_number */
0b1615c1 3382 TV_NONE, /* tv_id */
49290934 3383 PROP_cfg | PROP_ssa, /* properties_required */
4ee9c684 3384 0, /* properties_provided */
3385 0, /* properties_destroyed */
3386 0, /* todo_flags_start */
909e5ecb 3387 TODO_dump_func
3388 | TODO_verify_ssa
20099e35 3389 | TODO_update_ssa /* todo_flags_finish */
3390 }
4ee9c684 3391};