1 /* Conditional constant propagation pass for the GNU compiler.
2 Copyright (C) 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
3 Free Software Foundation, Inc.
4 Adapted from original RTL SSA-CCP by Daniel Berlin <dberlin@dberlin.org>
5 Adapted to GIMPLE trees by Diego Novillo <dnovillo@redhat.com>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it
10 under the terms of the GNU General Public License as published by the
11 Free Software Foundation; either version 3, or (at your option) any
14 GCC is distributed in the hope that it will be useful, but WITHOUT
15 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 /* Conditional constant propagation (CCP) is based on the SSA
24 propagation engine (tree-ssa-propagate.c). Constant assignments of
25 the form VAR = CST are propagated from the assignments into uses of
26 VAR, which in turn may generate new constants. The simulation uses
27 a four level lattice to keep track of constant values associated
28 with SSA names. Given an SSA name V_i, it may take one of the
31 UNINITIALIZED -> the initial state of the value. This value
32 is replaced with a correct initial value
33 the first time the value is used, so the
34 rest of the pass does not need to care about
35 it. Using this value simplifies initialization
36 of the pass, and prevents us from needlessly
37 scanning statements that are never reached.
39 UNDEFINED -> V_i is a local variable whose definition
40 has not been processed yet. Therefore we
41 don't yet know if its value is a constant
44 CONSTANT -> V_i has been found to hold a constant
47 VARYING -> V_i cannot take a constant value, or if it
48 does, it is not possible to determine it
51 The core of SSA-CCP is in ccp_visit_stmt and ccp_visit_phi_node:
53 1- In ccp_visit_stmt, we are interested in assignments whose RHS
54 evaluates into a constant and conditional jumps whose predicate
55 evaluates into a boolean true or false. When an assignment of
56 the form V_i = CONST is found, V_i's lattice value is set to
57 CONSTANT and CONST is associated with it. This causes the
58 propagation engine to add all the SSA edges coming out the
59 assignment into the worklists, so that statements that use V_i
62 If the statement is a conditional with a constant predicate, we
63 mark the outgoing edges as executable or not executable
64 depending on the predicate's value. This is then used when
65 visiting PHI nodes to know when a PHI argument can be ignored.
68 2- In ccp_visit_phi_node, if all the PHI arguments evaluate to the
69 same constant C, then the LHS of the PHI is set to C. This
70 evaluation is known as the "meet operation". Since one of the
71 goals of this evaluation is to optimistically return constant
72 values as often as possible, it uses two main short cuts:
74 - If an argument is flowing in through a non-executable edge, it
75 is ignored. This is useful in cases like this:
81 a_11 = PHI (a_9, a_10)
83 If PRED is known to always evaluate to false, then we can
84 assume that a_11 will always take its value from a_10, meaning
85 that instead of consider it VARYING (a_9 and a_10 have
86 different values), we can consider it CONSTANT 100.
88 - If an argument has an UNDEFINED value, then it does not affect
89 the outcome of the meet operation. If a variable V_i has an
90 UNDEFINED value, it means that either its defining statement
91 hasn't been visited yet or V_i has no defining statement, in
92 which case the original symbol 'V' is being used
93 uninitialized. Since 'V' is a local variable, the compiler
94 may assume any initial value for it.
97 After propagation, every variable V_i that ends up with a lattice
98 value of CONSTANT will have the associated constant value in the
99 array CONST_VAL[i].VALUE. That is fed into substitute_and_fold for
100 final substitution and folding.
103 Constant propagation in stores and loads (STORE-CCP)
104 ----------------------------------------------------
106 While CCP has all the logic to propagate constants in GIMPLE
107 registers, it is missing the ability to associate constants with
108 stores and loads (i.e., pointer dereferences, structures and
109 global/aliased variables). We don't keep loads and stores in
110 SSA, but we do build a factored use-def web for them (in the
113 For instance, consider the following code fragment:
132 We should be able to deduce that the predicate 'a.a != B' is always
133 false. To achieve this, we associate constant values to the SSA
134 names in the VDEF operands for each store. Additionally,
135 since we also glob partial loads/stores with the base symbol, we
136 also keep track of the memory reference where the constant value
137 was stored (in the MEM_REF field of PROP_VALUE_T). For instance,
145 In the example above, CCP will associate value '2' with 'a_5', but
146 it would be wrong to replace the load from 'a.b' with '2', because
147 '2' had been stored into a.a.
149 Note that the initial value of virtual operands is VARYING, not
150 UNDEFINED. Consider, for instance global variables:
158 # A_5 = PHI (A_4, A_2);
166 The value of A_2 cannot be assumed to be UNDEFINED, as it may have
167 been defined outside of foo. If we were to assume it UNDEFINED, we
168 would erroneously optimize the above into 'return 3;'.
170 Though STORE-CCP is not too expensive, it does have to do more work
171 than regular CCP, so it is only enabled at -O2. Both regular CCP
172 and STORE-CCP use the exact same algorithm. The only distinction
173 is that when doing STORE-CCP, the boolean variable DO_STORE_CCP is
174 set to true. This affects the evaluation of statements and PHI
179 Constant propagation with conditional branches,
180 Wegman and Zadeck, ACM TOPLAS 13(2):181-210.
182 Building an Optimizing Compiler,
183 Robert Morgan, Butterworth-Heinemann, 1998, Section 8.9.
185 Advanced Compiler Design and Implementation,
186 Steven Muchnick, Morgan Kaufmann, 1997, Section 12.6 */
190 #include "coretypes.h"
197 #include "basic-block.h"
200 #include "function.h"
201 #include "diagnostic.h"
203 #include "tree-dump.h"
204 #include "tree-flow.h"
205 #include "tree-pass.h"
206 #include "tree-ssa-propagate.h"
207 #include "langhooks.h"
212 /* Possible lattice values. */
221 /* Array of propagated constant values. After propagation,
222 CONST_VAL[I].VALUE holds the constant value for SSA_NAME(I). If
223 the constant is held in an SSA name representing a memory store
224 (i.e., a VDEF), CONST_VAL[I].MEM_REF will contain the actual
225 memory reference used to store (i.e., the LHS of the assignment
227 static prop_value_t
*const_val
;
229 /* True if we are also propagating constants in stores and loads. */
230 static bool do_store_ccp
;
232 /* Dump constant propagation value VAL to file OUTF prefixed by PREFIX. */
235 dump_lattice_value (FILE *outf
, const char *prefix
, prop_value_t val
)
237 switch (val
.lattice_val
)
240 fprintf (outf
, "%sUNINITIALIZED", prefix
);
243 fprintf (outf
, "%sUNDEFINED", prefix
);
246 fprintf (outf
, "%sVARYING", prefix
);
249 fprintf (outf
, "%sCONSTANT ", prefix
);
250 print_generic_expr (outf
, val
.value
, dump_flags
);
258 /* Print lattice value VAL to stderr. */
260 void debug_lattice_value (prop_value_t val
);
263 debug_lattice_value (prop_value_t val
)
265 dump_lattice_value (stderr
, "", val
);
266 fprintf (stderr
, "\n");
270 /* The regular is_gimple_min_invariant does a shallow test of the object.
271 It assumes that full gimplification has happened, or will happen on the
272 object. For a value coming from DECL_INITIAL, this is not true, so we
273 have to be more strict ourselves. */
276 ccp_decl_initial_min_invariant (tree t
)
278 if (!is_gimple_min_invariant (t
))
280 if (TREE_CODE (t
) == ADDR_EXPR
)
282 /* Inline and unroll is_gimple_addressable. */
285 t
= TREE_OPERAND (t
, 0);
286 if (is_gimple_id (t
))
288 if (!handled_component_p (t
))
295 /* If SYM is a constant variable with known value, return the value.
296 NULL_TREE is returned otherwise. */
299 get_symbol_constant_value (tree sym
)
301 if (TREE_STATIC (sym
)
302 && TREE_READONLY (sym
)
305 tree val
= DECL_INITIAL (sym
);
307 && ccp_decl_initial_min_invariant (val
))
309 /* Variables declared 'const' without an initializer
310 have zero as the intializer if they may not be
311 overridden at link or run time. */
313 && targetm
.binds_local_p (sym
)
314 && (INTEGRAL_TYPE_P (TREE_TYPE (sym
))
315 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (sym
))))
316 return fold_convert (TREE_TYPE (sym
), integer_zero_node
);
322 /* Compute a default value for variable VAR and store it in the
323 CONST_VAL array. The following rules are used to get default
326 1- Global and static variables that are declared constant are
329 2- Any other value is considered UNDEFINED. This is useful when
330 considering PHI nodes. PHI arguments that are undefined do not
331 change the constant value of the PHI node, which allows for more
332 constants to be propagated.
334 3- If SSA_NAME_VALUE is set and it is a constant, its value is
337 4- Variables defined by statements other than assignments and PHI
338 nodes are considered VARYING.
340 5- Initial values of variables that are not GIMPLE registers are
341 considered VARYING. */
344 get_default_value (tree var
)
346 tree sym
= SSA_NAME_VAR (var
);
347 prop_value_t val
= { UNINITIALIZED
, NULL_TREE
, NULL_TREE
};
350 if (!do_store_ccp
&& !is_gimple_reg (var
))
352 /* Short circuit for regular CCP. We are not interested in any
353 non-register when DO_STORE_CCP is false. */
354 val
.lattice_val
= VARYING
;
356 else if (SSA_NAME_VALUE (var
)
357 && is_gimple_min_invariant (SSA_NAME_VALUE (var
)))
359 val
.lattice_val
= CONSTANT
;
360 val
.value
= SSA_NAME_VALUE (var
);
362 else if ((cst_val
= get_symbol_constant_value (sym
)) != NULL_TREE
)
364 /* Globals and static variables declared 'const' take their
366 val
.lattice_val
= CONSTANT
;
372 tree stmt
= SSA_NAME_DEF_STMT (var
);
374 if (IS_EMPTY_STMT (stmt
))
376 /* Variables defined by an empty statement are those used
377 before being initialized. If VAR is a local variable, we
378 can assume initially that it is UNDEFINED, otherwise we must
379 consider it VARYING. */
380 if (is_gimple_reg (sym
) && TREE_CODE (sym
) != PARM_DECL
)
381 val
.lattice_val
= UNDEFINED
;
383 val
.lattice_val
= VARYING
;
385 else if (TREE_CODE (stmt
) == GIMPLE_MODIFY_STMT
386 || TREE_CODE (stmt
) == PHI_NODE
)
388 /* Any other variable defined by an assignment or a PHI node
389 is considered UNDEFINED. */
390 val
.lattice_val
= UNDEFINED
;
394 /* Otherwise, VAR will never take on a constant value. */
395 val
.lattice_val
= VARYING
;
403 /* Get the constant value associated with variable VAR. */
405 static inline prop_value_t
*
410 if (const_val
== NULL
)
413 val
= &const_val
[SSA_NAME_VERSION (var
)];
414 if (val
->lattice_val
== UNINITIALIZED
)
415 *val
= get_default_value (var
);
420 /* Sets the value associated with VAR to VARYING. */
423 set_value_varying (tree var
)
425 prop_value_t
*val
= &const_val
[SSA_NAME_VERSION (var
)];
427 val
->lattice_val
= VARYING
;
428 val
->value
= NULL_TREE
;
429 val
->mem_ref
= NULL_TREE
;
432 /* For float types, modify the value of VAL to make ccp work correctly
433 for non-standard values (-0, NaN):
435 If HONOR_SIGNED_ZEROS is false, and VAL = -0, we canonicalize it to 0.
436 If HONOR_NANS is false, and VAL is NaN, we canonicalize it to UNDEFINED.
437 This is to fix the following problem (see PR 29921): Suppose we have
441 and we set value of y to NaN. This causes value of x to be set to NaN.
442 When we later determine that y is in fact VARYING, fold uses the fact
443 that HONOR_NANS is false, and we try to change the value of x to 0,
444 causing an ICE. With HONOR_NANS being false, the real appearance of
445 NaN would cause undefined behavior, though, so claiming that y (and x)
446 are UNDEFINED initially is correct. */
449 canonicalize_float_value (prop_value_t
*val
)
451 enum machine_mode mode
;
455 if (val
->lattice_val
!= CONSTANT
456 || TREE_CODE (val
->value
) != REAL_CST
)
459 d
= TREE_REAL_CST (val
->value
);
460 type
= TREE_TYPE (val
->value
);
461 mode
= TYPE_MODE (type
);
463 if (!HONOR_SIGNED_ZEROS (mode
)
464 && REAL_VALUE_MINUS_ZERO (d
))
466 val
->value
= build_real (type
, dconst0
);
470 if (!HONOR_NANS (mode
)
471 && REAL_VALUE_ISNAN (d
))
473 val
->lattice_val
= UNDEFINED
;
480 /* Set the value for variable VAR to NEW_VAL. Return true if the new
481 value is different from VAR's previous value. */
484 set_lattice_value (tree var
, prop_value_t new_val
)
486 prop_value_t
*old_val
= get_value (var
);
488 canonicalize_float_value (&new_val
);
490 /* Lattice transitions must always be monotonically increasing in
491 value. If *OLD_VAL and NEW_VAL are the same, return false to
492 inform the caller that this was a non-transition. */
494 gcc_assert (old_val
->lattice_val
< new_val
.lattice_val
495 || (old_val
->lattice_val
== new_val
.lattice_val
496 && ((!old_val
->value
&& !new_val
.value
)
497 || operand_equal_p (old_val
->value
, new_val
.value
, 0))
498 && old_val
->mem_ref
== new_val
.mem_ref
));
500 if (old_val
->lattice_val
!= new_val
.lattice_val
)
502 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
504 dump_lattice_value (dump_file
, "Lattice value changed to ", new_val
);
505 fprintf (dump_file
, ". Adding SSA edges to worklist.\n");
510 gcc_assert (new_val
.lattice_val
!= UNDEFINED
);
518 /* Return the likely CCP lattice value for STMT.
520 If STMT has no operands, then return CONSTANT.
522 Else if undefinedness of operands of STMT cause its value to be
523 undefined, then return UNDEFINED.
525 Else if any operands of STMT are constants, then return CONSTANT.
527 Else return VARYING. */
530 likely_value (tree stmt
)
532 bool has_constant_operand
, has_undefined_operand
, all_undefined_operands
;
537 ann
= stmt_ann (stmt
);
539 /* If the statement has volatile operands, it won't fold to a
541 if (ann
->has_volatile_ops
)
544 /* If we are not doing store-ccp, statements with loads
545 and/or stores will never fold into a constant. */
547 && !ZERO_SSA_OPERANDS (stmt
, SSA_OP_ALL_VIRTUALS
))
551 /* A CALL_EXPR is assumed to be varying. NOTE: This may be overly
552 conservative, in the presence of const and pure calls. */
553 if (get_call_expr_in (stmt
) != NULL_TREE
)
556 /* Anything other than assignments and conditional jumps are not
557 interesting for CCP. */
558 if (TREE_CODE (stmt
) != GIMPLE_MODIFY_STMT
559 && !(TREE_CODE (stmt
) == RETURN_EXPR
&& get_rhs (stmt
) != NULL_TREE
)
560 && TREE_CODE (stmt
) != COND_EXPR
561 && TREE_CODE (stmt
) != SWITCH_EXPR
)
564 if (is_gimple_min_invariant (get_rhs (stmt
)))
567 has_constant_operand
= false;
568 has_undefined_operand
= false;
569 all_undefined_operands
= true;
570 FOR_EACH_SSA_TREE_OPERAND (use
, stmt
, iter
, SSA_OP_USE
| SSA_OP_VUSE
)
572 prop_value_t
*val
= get_value (use
);
574 if (val
->lattice_val
== UNDEFINED
)
575 has_undefined_operand
= true;
577 all_undefined_operands
= false;
579 if (val
->lattice_val
== CONSTANT
)
580 has_constant_operand
= true;
583 /* If the operation combines operands like COMPLEX_EXPR make sure to
584 not mark the result UNDEFINED if only one part of the result is
586 if (has_undefined_operand
587 && all_undefined_operands
)
589 else if (TREE_CODE (stmt
) == GIMPLE_MODIFY_STMT
590 && has_undefined_operand
)
592 switch (TREE_CODE (GIMPLE_STMT_OPERAND (stmt
, 1)))
594 /* Unary operators are handled with all_undefined_operands. */
597 case POINTER_PLUS_EXPR
:
598 /* Not MIN_EXPR, MAX_EXPR. One VARYING operand may be selected.
599 Not bitwise operators, one VARYING operand may specify the
600 result completely. Not logical operators for the same reason.
601 Not COMPLEX_EXPR as one VARYING operand makes the result partly
602 not UNDEFINED. Not *DIV_EXPR, comparisons and shifts because
603 the undefined operand may be promoted. */
610 /* If there was an UNDEFINED operand but the result may be not UNDEFINED
611 fall back to VARYING even if there were CONSTANT operands. */
612 if (has_undefined_operand
)
615 if (has_constant_operand
616 /* We do not consider virtual operands here -- load from read-only
617 memory may have only VARYING virtual operands, but still be
619 || ZERO_SSA_OPERANDS (stmt
, SSA_OP_USE
))
625 /* Returns true if STMT cannot be constant. */
628 surely_varying_stmt_p (tree stmt
)
630 /* If the statement has operands that we cannot handle, it cannot be
632 if (stmt_ann (stmt
)->has_volatile_ops
)
635 if (!ZERO_SSA_OPERANDS (stmt
, SSA_OP_ALL_VIRTUALS
))
640 /* We can only handle simple loads and stores. */
641 if (!stmt_makes_single_load (stmt
)
642 && !stmt_makes_single_store (stmt
))
646 /* If it contains a call, it is varying. */
647 if (get_call_expr_in (stmt
) != NULL_TREE
)
650 /* Anything other than assignments and conditional jumps are not
651 interesting for CCP. */
652 if (TREE_CODE (stmt
) != GIMPLE_MODIFY_STMT
653 && !(TREE_CODE (stmt
) == RETURN_EXPR
&& get_rhs (stmt
) != NULL_TREE
)
654 && TREE_CODE (stmt
) != COND_EXPR
655 && TREE_CODE (stmt
) != SWITCH_EXPR
)
661 /* Initialize local data structures for CCP. */
664 ccp_initialize (void)
668 const_val
= XCNEWVEC (prop_value_t
, num_ssa_names
);
670 /* Initialize simulation flags for PHI nodes and statements. */
673 block_stmt_iterator i
;
675 for (i
= bsi_start (bb
); !bsi_end_p (i
); bsi_next (&i
))
677 tree stmt
= bsi_stmt (i
);
678 bool is_varying
= surely_varying_stmt_p (stmt
);
685 /* If the statement will not produce a constant, mark
686 all its outputs VARYING. */
687 FOR_EACH_SSA_TREE_OPERAND (def
, stmt
, iter
, SSA_OP_ALL_DEFS
)
690 set_value_varying (def
);
694 DONT_SIMULATE_AGAIN (stmt
) = is_varying
;
698 /* Now process PHI nodes. We never set DONT_SIMULATE_AGAIN on phi node,
699 since we do not know which edges are executable yet, except for
700 phi nodes for virtual operands when we do not do store ccp. */
705 for (phi
= phi_nodes (bb
); phi
; phi
= PHI_CHAIN (phi
))
707 if (!do_store_ccp
&& !is_gimple_reg (PHI_RESULT (phi
)))
708 DONT_SIMULATE_AGAIN (phi
) = true;
710 DONT_SIMULATE_AGAIN (phi
) = false;
716 /* Do final substitution of propagated values, cleanup the flowgraph and
717 free allocated storage.
719 Return TRUE when something was optimized. */
724 /* Perform substitutions based on the known constant values. */
725 bool something_changed
= substitute_and_fold (const_val
, false);
729 return something_changed
;;
733 /* Compute the meet operator between *VAL1 and *VAL2. Store the result
736 any M UNDEFINED = any
737 any M VARYING = VARYING
738 Ci M Cj = Ci if (i == j)
739 Ci M Cj = VARYING if (i != j)
743 ccp_lattice_meet (prop_value_t
*val1
, prop_value_t
*val2
)
745 if (val1
->lattice_val
== UNDEFINED
)
747 /* UNDEFINED M any = any */
750 else if (val2
->lattice_val
== UNDEFINED
)
752 /* any M UNDEFINED = any
753 Nothing to do. VAL1 already contains the value we want. */
756 else if (val1
->lattice_val
== VARYING
757 || val2
->lattice_val
== VARYING
)
759 /* any M VARYING = VARYING. */
760 val1
->lattice_val
= VARYING
;
761 val1
->value
= NULL_TREE
;
762 val1
->mem_ref
= NULL_TREE
;
764 else if (val1
->lattice_val
== CONSTANT
765 && val2
->lattice_val
== CONSTANT
766 && simple_cst_equal (val1
->value
, val2
->value
) == 1
768 || (val1
->mem_ref
&& val2
->mem_ref
769 && operand_equal_p (val1
->mem_ref
, val2
->mem_ref
, 0))))
771 /* Ci M Cj = Ci if (i == j)
772 Ci M Cj = VARYING if (i != j)
774 If these two values come from memory stores, make sure that
775 they come from the same memory reference. */
776 val1
->lattice_val
= CONSTANT
;
777 val1
->value
= val1
->value
;
778 val1
->mem_ref
= val1
->mem_ref
;
782 /* Any other combination is VARYING. */
783 val1
->lattice_val
= VARYING
;
784 val1
->value
= NULL_TREE
;
785 val1
->mem_ref
= NULL_TREE
;
790 /* Loop through the PHI_NODE's parameters for BLOCK and compare their
791 lattice values to determine PHI_NODE's lattice value. The value of a
792 PHI node is determined calling ccp_lattice_meet with all the arguments
793 of the PHI node that are incoming via executable edges. */
795 static enum ssa_prop_result
796 ccp_visit_phi_node (tree phi
)
799 prop_value_t
*old_val
, new_val
;
801 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
803 fprintf (dump_file
, "\nVisiting PHI node: ");
804 print_generic_expr (dump_file
, phi
, dump_flags
);
807 old_val
= get_value (PHI_RESULT (phi
));
808 switch (old_val
->lattice_val
)
811 return SSA_PROP_VARYING
;
818 new_val
.lattice_val
= UNDEFINED
;
819 new_val
.value
= NULL_TREE
;
820 new_val
.mem_ref
= NULL_TREE
;
827 for (i
= 0; i
< PHI_NUM_ARGS (phi
); i
++)
829 /* Compute the meet operator over all the PHI arguments flowing
830 through executable edges. */
831 edge e
= PHI_ARG_EDGE (phi
, i
);
833 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
836 "\n Argument #%d (%d -> %d %sexecutable)\n",
837 i
, e
->src
->index
, e
->dest
->index
,
838 (e
->flags
& EDGE_EXECUTABLE
) ? "" : "not ");
841 /* If the incoming edge is executable, Compute the meet operator for
842 the existing value of the PHI node and the current PHI argument. */
843 if (e
->flags
& EDGE_EXECUTABLE
)
845 tree arg
= PHI_ARG_DEF (phi
, i
);
846 prop_value_t arg_val
;
848 if (is_gimple_min_invariant (arg
))
850 arg_val
.lattice_val
= CONSTANT
;
852 arg_val
.mem_ref
= NULL_TREE
;
855 arg_val
= *(get_value (arg
));
857 ccp_lattice_meet (&new_val
, &arg_val
);
859 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
861 fprintf (dump_file
, "\t");
862 print_generic_expr (dump_file
, arg
, dump_flags
);
863 dump_lattice_value (dump_file
, "\tValue: ", arg_val
);
864 fprintf (dump_file
, "\n");
867 if (new_val
.lattice_val
== VARYING
)
872 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
874 dump_lattice_value (dump_file
, "\n PHI node value: ", new_val
);
875 fprintf (dump_file
, "\n\n");
878 /* Make the transition to the new value. */
879 if (set_lattice_value (PHI_RESULT (phi
), new_val
))
881 if (new_val
.lattice_val
== VARYING
)
882 return SSA_PROP_VARYING
;
884 return SSA_PROP_INTERESTING
;
887 return SSA_PROP_NOT_INTERESTING
;
891 /* CCP specific front-end to the non-destructive constant folding
894 Attempt to simplify the RHS of STMT knowing that one or more
895 operands are constants.
897 If simplification is possible, return the simplified RHS,
898 otherwise return the original RHS. */
903 tree rhs
= get_rhs (stmt
);
904 enum tree_code code
= TREE_CODE (rhs
);
905 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
906 tree retval
= NULL_TREE
;
908 if (TREE_CODE (rhs
) == SSA_NAME
)
910 /* If the RHS is an SSA_NAME, return its known constant value,
912 return get_value (rhs
)->value
;
914 else if (do_store_ccp
&& stmt_makes_single_load (stmt
))
916 /* If the RHS is a memory load, see if the VUSEs associated with
917 it are a valid constant for that memory load. */
918 prop_value_t
*val
= get_value_loaded_by (stmt
, const_val
);
919 if (val
&& val
->mem_ref
)
921 if (operand_equal_p (val
->mem_ref
, rhs
, 0))
924 /* If RHS is extracting REALPART_EXPR or IMAGPART_EXPR of a
925 complex type with a known constant value, return it. */
926 if ((TREE_CODE (rhs
) == REALPART_EXPR
927 || TREE_CODE (rhs
) == IMAGPART_EXPR
)
928 && operand_equal_p (val
->mem_ref
, TREE_OPERAND (rhs
, 0), 0))
929 return fold_build1 (TREE_CODE (rhs
), TREE_TYPE (rhs
), val
->value
);
934 /* Unary operators. Note that we know the single operand must
935 be a constant. So this should almost always return a
937 if (kind
== tcc_unary
)
939 /* Handle unary operators which can appear in GIMPLE form. */
940 tree op0
= TREE_OPERAND (rhs
, 0);
942 /* Simplify the operand down to a constant. */
943 if (TREE_CODE (op0
) == SSA_NAME
)
945 prop_value_t
*val
= get_value (op0
);
946 if (val
->lattice_val
== CONSTANT
)
947 op0
= get_value (op0
)->value
;
950 if ((code
== NOP_EXPR
|| code
== CONVERT_EXPR
)
951 && useless_type_conversion_p (TREE_TYPE (rhs
), TREE_TYPE (op0
)))
953 return fold_unary (code
, TREE_TYPE (rhs
), op0
);
956 /* Binary and comparison operators. We know one or both of the
957 operands are constants. */
958 else if (kind
== tcc_binary
959 || kind
== tcc_comparison
960 || code
== TRUTH_AND_EXPR
961 || code
== TRUTH_OR_EXPR
962 || code
== TRUTH_XOR_EXPR
)
964 /* Handle binary and comparison operators that can appear in
966 tree op0
= TREE_OPERAND (rhs
, 0);
967 tree op1
= TREE_OPERAND (rhs
, 1);
969 /* Simplify the operands down to constants when appropriate. */
970 if (TREE_CODE (op0
) == SSA_NAME
)
972 prop_value_t
*val
= get_value (op0
);
973 if (val
->lattice_val
== CONSTANT
)
977 if (TREE_CODE (op1
) == SSA_NAME
)
979 prop_value_t
*val
= get_value (op1
);
980 if (val
->lattice_val
== CONSTANT
)
984 return fold_binary (code
, TREE_TYPE (rhs
), op0
, op1
);
987 /* We may be able to fold away calls to builtin functions if their
988 arguments are constants. */
989 else if (code
== CALL_EXPR
990 && TREE_CODE (CALL_EXPR_FN (rhs
)) == ADDR_EXPR
991 && TREE_CODE (TREE_OPERAND (CALL_EXPR_FN (rhs
), 0)) == FUNCTION_DECL
992 && DECL_BUILT_IN (TREE_OPERAND (CALL_EXPR_FN (rhs
), 0)))
994 if (!ZERO_SSA_OPERANDS (stmt
, SSA_OP_USE
))
1001 /* Preserve the original values of every operand. */
1002 orig
= XNEWVEC (tree
, NUM_SSA_OPERANDS (stmt
, SSA_OP_USE
));
1003 FOR_EACH_SSA_TREE_OPERAND (var
, stmt
, iter
, SSA_OP_USE
)
1006 /* Substitute operands with their values and try to fold. */
1007 replace_uses_in (stmt
, NULL
, const_val
);
1008 retval
= fold_call_expr (rhs
, false);
1010 /* Restore operands to their original form. */
1012 FOR_EACH_SSA_USE_OPERAND (var_p
, stmt
, iter
, SSA_OP_USE
)
1013 SET_USE (var_p
, orig
[i
++]);
1020 /* If we got a simplified form, see if we need to convert its type. */
1022 return fold_convert (TREE_TYPE (rhs
), retval
);
1024 /* No simplification was possible. */
1029 /* Return the tree representing the element referenced by T if T is an
1030 ARRAY_REF or COMPONENT_REF into constant aggregates. Return
1031 NULL_TREE otherwise. */
1034 fold_const_aggregate_ref (tree t
)
1036 prop_value_t
*value
;
1037 tree base
, ctor
, idx
, field
;
1038 unsigned HOST_WIDE_INT cnt
;
1041 switch (TREE_CODE (t
))
1044 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
1045 DECL_INITIAL. If BASE is a nested reference into another
1046 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
1047 the inner reference. */
1048 base
= TREE_OPERAND (t
, 0);
1049 switch (TREE_CODE (base
))
1052 if (!TREE_READONLY (base
)
1053 || TREE_CODE (TREE_TYPE (base
)) != ARRAY_TYPE
1054 || !targetm
.binds_local_p (base
))
1057 ctor
= DECL_INITIAL (base
);
1062 ctor
= fold_const_aggregate_ref (base
);
1069 if (ctor
== NULL_TREE
1070 || (TREE_CODE (ctor
) != CONSTRUCTOR
1071 && TREE_CODE (ctor
) != STRING_CST
)
1072 || !TREE_STATIC (ctor
))
1075 /* Get the index. If we have an SSA_NAME, try to resolve it
1076 with the current lattice value for the SSA_NAME. */
1077 idx
= TREE_OPERAND (t
, 1);
1078 switch (TREE_CODE (idx
))
1081 if ((value
= get_value (idx
))
1082 && value
->lattice_val
== CONSTANT
1083 && TREE_CODE (value
->value
) == INTEGER_CST
)
1096 /* Fold read from constant string. */
1097 if (TREE_CODE (ctor
) == STRING_CST
)
1099 if ((TYPE_MODE (TREE_TYPE (t
))
1100 == TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor
))))
1101 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor
))))
1103 && GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor
)))) == 1
1104 && compare_tree_int (idx
, TREE_STRING_LENGTH (ctor
)) < 0)
1105 return build_int_cst_type (TREE_TYPE (t
),
1106 (TREE_STRING_POINTER (ctor
)
1107 [TREE_INT_CST_LOW (idx
)]));
1111 /* Whoo-hoo! I'll fold ya baby. Yeah! */
1112 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor
), cnt
, cfield
, cval
)
1113 if (tree_int_cst_equal (cfield
, idx
))
1118 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
1119 DECL_INITIAL. If BASE is a nested reference into another
1120 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
1121 the inner reference. */
1122 base
= TREE_OPERAND (t
, 0);
1123 switch (TREE_CODE (base
))
1126 if (!TREE_READONLY (base
)
1127 || TREE_CODE (TREE_TYPE (base
)) != RECORD_TYPE
1128 || !targetm
.binds_local_p (base
))
1131 ctor
= DECL_INITIAL (base
);
1136 ctor
= fold_const_aggregate_ref (base
);
1143 if (ctor
== NULL_TREE
1144 || TREE_CODE (ctor
) != CONSTRUCTOR
1145 || !TREE_STATIC (ctor
))
1148 field
= TREE_OPERAND (t
, 1);
1150 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor
), cnt
, cfield
, cval
)
1152 /* FIXME: Handle bit-fields. */
1153 && ! DECL_BIT_FIELD (cfield
))
1160 tree c
= fold_const_aggregate_ref (TREE_OPERAND (t
, 0));
1161 if (c
&& TREE_CODE (c
) == COMPLEX_CST
)
1162 return fold_build1 (TREE_CODE (t
), TREE_TYPE (t
), c
);
1173 /* Evaluate statement STMT. */
1176 evaluate_stmt (tree stmt
)
1179 tree simplified
= NULL_TREE
;
1180 ccp_lattice_t likelyvalue
= likely_value (stmt
);
1183 val
.mem_ref
= NULL_TREE
;
1185 fold_defer_overflow_warnings ();
1187 /* If the statement is likely to have a CONSTANT result, then try
1188 to fold the statement to determine the constant value. */
1189 if (likelyvalue
== CONSTANT
)
1190 simplified
= ccp_fold (stmt
);
1191 /* If the statement is likely to have a VARYING result, then do not
1192 bother folding the statement. */
1193 if (likelyvalue
== VARYING
)
1194 simplified
= get_rhs (stmt
);
1195 /* If the statement is an ARRAY_REF or COMPONENT_REF into constant
1196 aggregates, extract the referenced constant. Otherwise the
1197 statement is likely to have an UNDEFINED value, and there will be
1198 nothing to do. Note that fold_const_aggregate_ref returns
1199 NULL_TREE if the first case does not match. */
1200 else if (!simplified
)
1201 simplified
= fold_const_aggregate_ref (get_rhs (stmt
));
1203 is_constant
= simplified
&& is_gimple_min_invariant (simplified
);
1205 fold_undefer_overflow_warnings (is_constant
, stmt
, 0);
1209 /* The statement produced a constant value. */
1210 val
.lattice_val
= CONSTANT
;
1211 val
.value
= simplified
;
1215 /* The statement produced a nonconstant value. If the statement
1216 had UNDEFINED operands, then the result of the statement
1217 should be UNDEFINED. Otherwise, the statement is VARYING. */
1218 if (likelyvalue
== UNDEFINED
)
1219 val
.lattice_val
= likelyvalue
;
1221 val
.lattice_val
= VARYING
;
1223 val
.value
= NULL_TREE
;
1230 /* Visit the assignment statement STMT. Set the value of its LHS to the
1231 value computed by the RHS and store LHS in *OUTPUT_P. If STMT
1232 creates virtual definitions, set the value of each new name to that
1233 of the RHS (if we can derive a constant out of the RHS). */
1235 static enum ssa_prop_result
1236 visit_assignment (tree stmt
, tree
*output_p
)
1240 enum ssa_prop_result retval
;
1242 lhs
= GIMPLE_STMT_OPERAND (stmt
, 0);
1243 rhs
= GIMPLE_STMT_OPERAND (stmt
, 1);
1245 if (TREE_CODE (rhs
) == SSA_NAME
)
1247 /* For a simple copy operation, we copy the lattice values. */
1248 prop_value_t
*nval
= get_value (rhs
);
1251 else if (do_store_ccp
&& stmt_makes_single_load (stmt
))
1253 /* Same as above, but the RHS is not a gimple register and yet
1254 has a known VUSE. If STMT is loading from the same memory
1255 location that created the SSA_NAMEs for the virtual operands,
1256 we can propagate the value on the RHS. */
1257 prop_value_t
*nval
= get_value_loaded_by (stmt
, const_val
);
1261 && operand_equal_p (nval
->mem_ref
, rhs
, 0))
1264 val
= evaluate_stmt (stmt
);
1267 /* Evaluate the statement. */
1268 val
= evaluate_stmt (stmt
);
1270 /* If the original LHS was a VIEW_CONVERT_EXPR, modify the constant
1271 value to be a VIEW_CONVERT_EXPR of the old constant value.
1273 ??? Also, if this was a definition of a bitfield, we need to widen
1274 the constant value into the type of the destination variable. This
1275 should not be necessary if GCC represented bitfields properly. */
1277 tree orig_lhs
= GIMPLE_STMT_OPERAND (stmt
, 0);
1279 if (TREE_CODE (orig_lhs
) == VIEW_CONVERT_EXPR
1280 && val
.lattice_val
== CONSTANT
)
1282 tree w
= fold_unary (VIEW_CONVERT_EXPR
,
1283 TREE_TYPE (TREE_OPERAND (orig_lhs
, 0)),
1286 orig_lhs
= TREE_OPERAND (orig_lhs
, 0);
1287 if (w
&& is_gimple_min_invariant (w
))
1291 val
.lattice_val
= VARYING
;
1296 if (val
.lattice_val
== CONSTANT
1297 && TREE_CODE (orig_lhs
) == COMPONENT_REF
1298 && DECL_BIT_FIELD (TREE_OPERAND (orig_lhs
, 1)))
1300 tree w
= widen_bitfield (val
.value
, TREE_OPERAND (orig_lhs
, 1),
1303 if (w
&& is_gimple_min_invariant (w
))
1307 val
.lattice_val
= VARYING
;
1308 val
.value
= NULL_TREE
;
1309 val
.mem_ref
= NULL_TREE
;
1314 retval
= SSA_PROP_NOT_INTERESTING
;
1316 /* Set the lattice value of the statement's output. */
1317 if (TREE_CODE (lhs
) == SSA_NAME
)
1319 /* If STMT is an assignment to an SSA_NAME, we only have one
1321 if (set_lattice_value (lhs
, val
))
1324 if (val
.lattice_val
== VARYING
)
1325 retval
= SSA_PROP_VARYING
;
1327 retval
= SSA_PROP_INTERESTING
;
1330 else if (do_store_ccp
&& stmt_makes_single_store (stmt
))
1332 /* Otherwise, set the names in VDEF operands to the new
1333 constant value and mark the LHS as the memory reference
1334 associated with VAL. */
1339 /* Mark VAL as stored in the LHS of this assignment. */
1340 if (val
.lattice_val
== CONSTANT
)
1343 /* Set the value of every VDEF to VAL. */
1345 FOR_EACH_SSA_TREE_OPERAND (vdef
, stmt
, i
, SSA_OP_VIRTUAL_DEFS
)
1347 /* See PR 29801. We may have VDEFs for read-only variables
1348 (see the handling of unmodifiable variables in
1349 add_virtual_operand); do not attempt to change their value. */
1350 if (get_symbol_constant_value (SSA_NAME_VAR (vdef
)) != NULL_TREE
)
1353 changed
|= set_lattice_value (vdef
, val
);
1356 /* Note that for propagation purposes, we are only interested in
1357 visiting statements that load the exact same memory reference
1358 stored here. Those statements will have the exact same list
1359 of virtual uses, so it is enough to set the output of this
1360 statement to be its first virtual definition. */
1361 *output_p
= first_vdef (stmt
);
1364 if (val
.lattice_val
== VARYING
)
1365 retval
= SSA_PROP_VARYING
;
1367 retval
= SSA_PROP_INTERESTING
;
1375 /* Visit the conditional statement STMT. Return SSA_PROP_INTERESTING
1376 if it can determine which edge will be taken. Otherwise, return
1377 SSA_PROP_VARYING. */
1379 static enum ssa_prop_result
1380 visit_cond_stmt (tree stmt
, edge
*taken_edge_p
)
1385 block
= bb_for_stmt (stmt
);
1386 val
= evaluate_stmt (stmt
);
1388 /* Find which edge out of the conditional block will be taken and add it
1389 to the worklist. If no single edge can be determined statically,
1390 return SSA_PROP_VARYING to feed all the outgoing edges to the
1391 propagation engine. */
1392 *taken_edge_p
= val
.value
? find_taken_edge (block
, val
.value
) : 0;
1394 return SSA_PROP_INTERESTING
;
1396 return SSA_PROP_VARYING
;
1400 /* Evaluate statement STMT. If the statement produces an output value and
1401 its evaluation changes the lattice value of its output, return
1402 SSA_PROP_INTERESTING and set *OUTPUT_P to the SSA_NAME holding the
1405 If STMT is a conditional branch and we can determine its truth
1406 value, set *TAKEN_EDGE_P accordingly. If STMT produces a varying
1407 value, return SSA_PROP_VARYING. */
1409 static enum ssa_prop_result
1410 ccp_visit_stmt (tree stmt
, edge
*taken_edge_p
, tree
*output_p
)
1415 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1417 fprintf (dump_file
, "\nVisiting statement:\n");
1418 print_generic_stmt (dump_file
, stmt
, dump_flags
);
1419 fprintf (dump_file
, "\n");
1422 if (TREE_CODE (stmt
) == GIMPLE_MODIFY_STMT
)
1424 /* If the statement is an assignment that produces a single
1425 output value, evaluate its RHS to see if the lattice value of
1426 its output has changed. */
1427 return visit_assignment (stmt
, output_p
);
1429 else if (TREE_CODE (stmt
) == COND_EXPR
|| TREE_CODE (stmt
) == SWITCH_EXPR
)
1431 /* If STMT is a conditional branch, see if we can determine
1432 which branch will be taken. */
1433 return visit_cond_stmt (stmt
, taken_edge_p
);
1436 /* Any other kind of statement is not interesting for constant
1437 propagation and, therefore, not worth simulating. */
1438 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1439 fprintf (dump_file
, "No interesting values produced. Marked VARYING.\n");
1441 /* Definitions made by statements other than assignments to
1442 SSA_NAMEs represent unknown modifications to their outputs.
1443 Mark them VARYING. */
1444 FOR_EACH_SSA_TREE_OPERAND (def
, stmt
, iter
, SSA_OP_ALL_DEFS
)
1446 prop_value_t v
= { VARYING
, NULL_TREE
, NULL_TREE
};
1447 set_lattice_value (def
, v
);
1450 return SSA_PROP_VARYING
;
1454 /* Main entry point for SSA Conditional Constant Propagation. */
1457 execute_ssa_ccp (bool store_ccp
)
1459 do_store_ccp
= store_ccp
;
1461 ssa_propagate (ccp_visit_stmt
, ccp_visit_phi_node
);
1462 if (ccp_finalize ())
1463 return (TODO_cleanup_cfg
| TODO_update_ssa
| TODO_remove_unused_locals
);
1472 return execute_ssa_ccp (false);
1479 return flag_tree_ccp
!= 0;
1483 struct tree_opt_pass pass_ccp
=
1486 gate_ccp
, /* gate */
1487 do_ssa_ccp
, /* execute */
1490 0, /* static_pass_number */
1491 TV_TREE_CCP
, /* tv_id */
1492 PROP_cfg
| PROP_ssa
, /* properties_required */
1493 0, /* properties_provided */
1494 0, /* properties_destroyed */
1495 0, /* todo_flags_start */
1496 TODO_dump_func
| TODO_verify_ssa
1497 | TODO_verify_stmts
| TODO_ggc_collect
,/* todo_flags_finish */
1503 do_ssa_store_ccp (void)
1505 /* If STORE-CCP is not enabled, we just run regular CCP. */
1506 return execute_ssa_ccp (flag_tree_store_ccp
!= 0);
1510 gate_store_ccp (void)
1512 /* STORE-CCP is enabled only with -ftree-store-ccp, but when
1513 -fno-tree-store-ccp is specified, we should run regular CCP.
1514 That's why the pass is enabled with either flag. */
1515 return flag_tree_store_ccp
!= 0 || flag_tree_ccp
!= 0;
1519 struct tree_opt_pass pass_store_ccp
=
1521 "store_ccp", /* name */
1522 gate_store_ccp
, /* gate */
1523 do_ssa_store_ccp
, /* execute */
1526 0, /* static_pass_number */
1527 TV_TREE_STORE_CCP
, /* tv_id */
1528 PROP_cfg
| PROP_ssa
| PROP_alias
, /* properties_required */
1529 0, /* properties_provided */
1530 0, /* properties_destroyed */
1531 0, /* todo_flags_start */
1532 TODO_dump_func
| TODO_verify_ssa
1533 | TODO_verify_stmts
| TODO_ggc_collect
,/* todo_flags_finish */
1537 /* Given a constant value VAL for bitfield FIELD, and a destination
1538 variable VAR, return VAL appropriately widened to fit into VAR. If
1539 FIELD is wider than HOST_WIDE_INT, NULL is returned. */
1542 widen_bitfield (tree val
, tree field
, tree var
)
1544 unsigned HOST_WIDE_INT var_size
, field_size
;
1546 unsigned HOST_WIDE_INT mask
;
1549 /* We can only do this if the size of the type and field and VAL are
1550 all constants representable in HOST_WIDE_INT. */
1551 if (!host_integerp (TYPE_SIZE (TREE_TYPE (var
)), 1)
1552 || !host_integerp (DECL_SIZE (field
), 1)
1553 || !host_integerp (val
, 0))
1556 var_size
= tree_low_cst (TYPE_SIZE (TREE_TYPE (var
)), 1);
1557 field_size
= tree_low_cst (DECL_SIZE (field
), 1);
1559 /* Give up if either the bitfield or the variable are too wide. */
1560 if (field_size
> HOST_BITS_PER_WIDE_INT
|| var_size
> HOST_BITS_PER_WIDE_INT
)
1563 gcc_assert (var_size
>= field_size
);
1565 /* If the sign bit of the value is not set or the field's type is unsigned,
1566 just mask off the high order bits of the value. */
1567 if (DECL_UNSIGNED (field
)
1568 || !(tree_low_cst (val
, 0) & (((HOST_WIDE_INT
)1) << (field_size
- 1))))
1570 /* Zero extension. Build a mask with the lower 'field_size' bits
1571 set and a BIT_AND_EXPR node to clear the high order bits of
1573 for (i
= 0, mask
= 0; i
< field_size
; i
++)
1574 mask
|= ((HOST_WIDE_INT
) 1) << i
;
1576 wide_val
= fold_build2 (BIT_AND_EXPR
, TREE_TYPE (var
), val
,
1577 build_int_cst (TREE_TYPE (var
), mask
));
1581 /* Sign extension. Create a mask with the upper 'field_size'
1582 bits set and a BIT_IOR_EXPR to set the high order bits of the
1584 for (i
= 0, mask
= 0; i
< (var_size
- field_size
); i
++)
1585 mask
|= ((HOST_WIDE_INT
) 1) << (var_size
- i
- 1);
1587 wide_val
= fold_build2 (BIT_IOR_EXPR
, TREE_TYPE (var
), val
,
1588 build_int_cst (TREE_TYPE (var
), mask
));
1595 /* A subroutine of fold_stmt_r. Attempts to fold *(A+O) to A[X].
1596 BASE is an array type. OFFSET is a byte displacement. ORIG_TYPE
1597 is the desired result type. */
1600 maybe_fold_offset_to_array_ref (tree base
, tree offset
, tree orig_type
,
1601 bool allow_negative_idx
)
1603 tree min_idx
, idx
, idx_type
, elt_offset
= integer_zero_node
;
1604 tree array_type
, elt_type
, elt_size
;
1607 /* If BASE is an ARRAY_REF, we can pick up another offset (this time
1608 measured in units of the size of elements type) from that ARRAY_REF).
1609 We can't do anything if either is variable.
1611 The case we handle here is *(&A[N]+O). */
1612 if (TREE_CODE (base
) == ARRAY_REF
)
1614 tree low_bound
= array_ref_low_bound (base
);
1616 elt_offset
= TREE_OPERAND (base
, 1);
1617 if (TREE_CODE (low_bound
) != INTEGER_CST
1618 || TREE_CODE (elt_offset
) != INTEGER_CST
)
1621 elt_offset
= int_const_binop (MINUS_EXPR
, elt_offset
, low_bound
, 0);
1622 base
= TREE_OPERAND (base
, 0);
1625 /* Ignore stupid user tricks of indexing non-array variables. */
1626 array_type
= TREE_TYPE (base
);
1627 if (TREE_CODE (array_type
) != ARRAY_TYPE
)
1629 elt_type
= TREE_TYPE (array_type
);
1630 if (!useless_type_conversion_p (orig_type
, elt_type
))
1633 /* Use signed size type for intermediate computation on the index. */
1634 idx_type
= signed_type_for (size_type_node
);
1636 /* If OFFSET and ELT_OFFSET are zero, we don't care about the size of the
1637 element type (so we can use the alignment if it's not constant).
1638 Otherwise, compute the offset as an index by using a division. If the
1639 division isn't exact, then don't do anything. */
1640 elt_size
= TYPE_SIZE_UNIT (elt_type
);
1643 if (integer_zerop (offset
))
1645 if (TREE_CODE (elt_size
) != INTEGER_CST
)
1646 elt_size
= size_int (TYPE_ALIGN (elt_type
));
1648 idx
= build_int_cst (idx_type
, 0);
1652 unsigned HOST_WIDE_INT lquo
, lrem
;
1653 HOST_WIDE_INT hquo
, hrem
;
1656 /* The final array offset should be signed, so we need
1657 to sign-extend the (possibly pointer) offset here
1658 and use signed division. */
1659 soffset
= double_int_sext (tree_to_double_int (offset
),
1660 TYPE_PRECISION (TREE_TYPE (offset
)));
1661 if (TREE_CODE (elt_size
) != INTEGER_CST
1662 || div_and_round_double (TRUNC_DIV_EXPR
, 0,
1663 soffset
.low
, soffset
.high
,
1664 TREE_INT_CST_LOW (elt_size
),
1665 TREE_INT_CST_HIGH (elt_size
),
1666 &lquo
, &hquo
, &lrem
, &hrem
)
1670 idx
= build_int_cst_wide (idx_type
, lquo
, hquo
);
1673 /* Assume the low bound is zero. If there is a domain type, get the
1674 low bound, if any, convert the index into that type, and add the
1676 min_idx
= build_int_cst (idx_type
, 0);
1677 domain_type
= TYPE_DOMAIN (array_type
);
1680 idx_type
= domain_type
;
1681 if (TYPE_MIN_VALUE (idx_type
))
1682 min_idx
= TYPE_MIN_VALUE (idx_type
);
1684 min_idx
= fold_convert (idx_type
, min_idx
);
1686 if (TREE_CODE (min_idx
) != INTEGER_CST
)
1689 elt_offset
= fold_convert (idx_type
, elt_offset
);
1692 if (!integer_zerop (min_idx
))
1693 idx
= int_const_binop (PLUS_EXPR
, idx
, min_idx
, 0);
1694 if (!integer_zerop (elt_offset
))
1695 idx
= int_const_binop (PLUS_EXPR
, idx
, elt_offset
, 0);
1697 /* Make sure to possibly truncate late after offsetting. */
1698 idx
= fold_convert (idx_type
, idx
);
1700 /* We don't want to construct access past array bounds. For example
1703 should not be simplified into (*c)[14] or tree-vrp will
1704 give false warnings. The same is true for
1705 struct A { long x; char d[0]; } *a;
1707 which should be not folded to &a->d[-8]. */
1709 && TYPE_MAX_VALUE (domain_type
)
1710 && TREE_CODE (TYPE_MAX_VALUE (domain_type
)) == INTEGER_CST
)
1712 tree up_bound
= TYPE_MAX_VALUE (domain_type
);
1714 if (tree_int_cst_lt (up_bound
, idx
)
1715 /* Accesses after the end of arrays of size 0 (gcc
1716 extension) and 1 are likely intentional ("struct
1718 && compare_tree_int (up_bound
, 1) > 0)
1722 && TYPE_MIN_VALUE (domain_type
))
1724 if (!allow_negative_idx
1725 && TREE_CODE (TYPE_MIN_VALUE (domain_type
)) == INTEGER_CST
1726 && tree_int_cst_lt (idx
, TYPE_MIN_VALUE (domain_type
)))
1729 else if (!allow_negative_idx
1730 && compare_tree_int (idx
, 0) < 0)
1733 return build4 (ARRAY_REF
, elt_type
, base
, idx
, NULL_TREE
, NULL_TREE
);
1737 /* Attempt to fold *(S+O) to S.X.
1738 BASE is a record type. OFFSET is a byte displacement. ORIG_TYPE
1739 is the desired result type. */
1742 maybe_fold_offset_to_component_ref (tree record_type
, tree base
, tree offset
,
1743 tree orig_type
, bool base_is_ptr
)
1745 tree f
, t
, field_type
, tail_array_field
, field_offset
;
1749 if (TREE_CODE (record_type
) != RECORD_TYPE
1750 && TREE_CODE (record_type
) != UNION_TYPE
1751 && TREE_CODE (record_type
) != QUAL_UNION_TYPE
)
1754 /* Short-circuit silly cases. */
1755 if (useless_type_conversion_p (record_type
, orig_type
))
1758 tail_array_field
= NULL_TREE
;
1759 for (f
= TYPE_FIELDS (record_type
); f
; f
= TREE_CHAIN (f
))
1763 if (TREE_CODE (f
) != FIELD_DECL
)
1765 if (DECL_BIT_FIELD (f
))
1768 if (!DECL_FIELD_OFFSET (f
))
1770 field_offset
= byte_position (f
);
1771 if (TREE_CODE (field_offset
) != INTEGER_CST
)
1774 /* ??? Java creates "interesting" fields for representing base classes.
1775 They have no name, and have no context. With no context, we get into
1776 trouble with nonoverlapping_component_refs_p. Skip them. */
1777 if (!DECL_FIELD_CONTEXT (f
))
1780 /* The previous array field isn't at the end. */
1781 tail_array_field
= NULL_TREE
;
1783 /* Check to see if this offset overlaps with the field. */
1784 cmp
= tree_int_cst_compare (field_offset
, offset
);
1788 field_type
= TREE_TYPE (f
);
1790 /* Here we exactly match the offset being checked. If the types match,
1791 then we can return that field. */
1793 && useless_type_conversion_p (orig_type
, field_type
))
1796 base
= build1 (INDIRECT_REF
, record_type
, base
);
1797 t
= build3 (COMPONENT_REF
, field_type
, base
, f
, NULL_TREE
);
1801 /* Don't care about offsets into the middle of scalars. */
1802 if (!AGGREGATE_TYPE_P (field_type
))
1805 /* Check for array at the end of the struct. This is often
1806 used as for flexible array members. We should be able to
1807 turn this into an array access anyway. */
1808 if (TREE_CODE (field_type
) == ARRAY_TYPE
)
1809 tail_array_field
= f
;
1811 /* Check the end of the field against the offset. */
1812 if (!DECL_SIZE_UNIT (f
)
1813 || TREE_CODE (DECL_SIZE_UNIT (f
)) != INTEGER_CST
)
1815 t
= int_const_binop (MINUS_EXPR
, offset
, field_offset
, 1);
1816 if (!tree_int_cst_lt (t
, DECL_SIZE_UNIT (f
)))
1819 /* If we matched, then set offset to the displacement into
1822 new_base
= build1 (INDIRECT_REF
, record_type
, base
);
1825 new_base
= build3 (COMPONENT_REF
, field_type
, new_base
, f
, NULL_TREE
);
1827 /* Recurse to possibly find the match. */
1828 ret
= maybe_fold_offset_to_array_ref (new_base
, t
, orig_type
,
1829 f
== TYPE_FIELDS (record_type
));
1832 ret
= maybe_fold_offset_to_component_ref (field_type
, new_base
, t
,
1838 if (!tail_array_field
)
1841 f
= tail_array_field
;
1842 field_type
= TREE_TYPE (f
);
1843 offset
= int_const_binop (MINUS_EXPR
, offset
, byte_position (f
), 1);
1845 /* If we get here, we've got an aggregate field, and a possibly
1846 nonzero offset into them. Recurse and hope for a valid match. */
1848 base
= build1 (INDIRECT_REF
, record_type
, base
);
1849 base
= build3 (COMPONENT_REF
, field_type
, base
, f
, NULL_TREE
);
1851 t
= maybe_fold_offset_to_array_ref (base
, offset
, orig_type
,
1852 f
== TYPE_FIELDS (record_type
));
1855 return maybe_fold_offset_to_component_ref (field_type
, base
, offset
,
1859 /* Attempt to express (ORIG_TYPE)BASE+OFFSET as BASE->field_of_orig_type
1860 or BASE[index] or by combination of those.
1862 Before attempting the conversion strip off existing ADDR_EXPRs and
1863 handled component refs. */
1866 maybe_fold_offset_to_reference (tree base
, tree offset
, tree orig_type
)
1870 bool base_is_ptr
= true;
1873 if (TREE_CODE (base
) == ADDR_EXPR
)
1875 base_is_ptr
= false;
1877 base
= TREE_OPERAND (base
, 0);
1879 /* Handle case where existing COMPONENT_REF pick e.g. wrong field of union,
1880 so it needs to be removed and new COMPONENT_REF constructed.
1881 The wrong COMPONENT_REF are often constructed by folding the
1882 (type *)&object within the expression (type *)&object+offset */
1883 if (handled_component_p (base
) && 0)
1885 HOST_WIDE_INT sub_offset
, size
, maxsize
;
1887 newbase
= get_ref_base_and_extent (base
, &sub_offset
,
1889 gcc_assert (newbase
);
1890 gcc_assert (!(sub_offset
& (BITS_PER_UNIT
- 1)));
1891 if (size
== maxsize
)
1895 offset
= int_const_binop (PLUS_EXPR
, offset
,
1896 build_int_cst (TREE_TYPE (offset
),
1897 sub_offset
/ BITS_PER_UNIT
), 1);
1900 if (useless_type_conversion_p (orig_type
, TREE_TYPE (base
))
1901 && integer_zerop (offset
))
1903 type
= TREE_TYPE (base
);
1908 if (!POINTER_TYPE_P (TREE_TYPE (base
)))
1910 type
= TREE_TYPE (TREE_TYPE (base
));
1912 ret
= maybe_fold_offset_to_component_ref (type
, base
, offset
,
1913 orig_type
, base_is_ptr
);
1917 base
= build1 (INDIRECT_REF
, type
, base
);
1918 ret
= maybe_fold_offset_to_array_ref (base
, offset
, orig_type
, true);
1923 /* A subroutine of fold_stmt_r. Attempt to simplify *(BASE+OFFSET).
1924 Return the simplified expression, or NULL if nothing could be done. */
1927 maybe_fold_stmt_indirect (tree expr
, tree base
, tree offset
)
1930 bool volatile_p
= TREE_THIS_VOLATILE (expr
);
1932 /* We may well have constructed a double-nested PLUS_EXPR via multiple
1933 substitutions. Fold that down to one. Remove NON_LVALUE_EXPRs that
1934 are sometimes added. */
1936 STRIP_TYPE_NOPS (base
);
1937 TREE_OPERAND (expr
, 0) = base
;
1939 /* One possibility is that the address reduces to a string constant. */
1940 t
= fold_read_from_constant_string (expr
);
1944 /* Add in any offset from a POINTER_PLUS_EXPR. */
1945 if (TREE_CODE (base
) == POINTER_PLUS_EXPR
)
1949 offset2
= TREE_OPERAND (base
, 1);
1950 if (TREE_CODE (offset2
) != INTEGER_CST
)
1952 base
= TREE_OPERAND (base
, 0);
1954 offset
= fold_convert (sizetype
,
1955 int_const_binop (PLUS_EXPR
, offset
, offset2
, 1));
1958 if (TREE_CODE (base
) == ADDR_EXPR
)
1960 tree base_addr
= base
;
1962 /* Strip the ADDR_EXPR. */
1963 base
= TREE_OPERAND (base
, 0);
1965 /* Fold away CONST_DECL to its value, if the type is scalar. */
1966 if (TREE_CODE (base
) == CONST_DECL
1967 && ccp_decl_initial_min_invariant (DECL_INITIAL (base
)))
1968 return DECL_INITIAL (base
);
1970 /* Try folding *(&B+O) to B.X. */
1971 t
= maybe_fold_offset_to_reference (base_addr
, offset
,
1975 TREE_THIS_VOLATILE (t
) = volatile_p
;
1981 /* We can get here for out-of-range string constant accesses,
1982 such as "_"[3]. Bail out of the entire substitution search
1983 and arrange for the entire statement to be replaced by a
1984 call to __builtin_trap. In all likelihood this will all be
1985 constant-folded away, but in the meantime we can't leave with
1986 something that get_expr_operands can't understand. */
1990 if (TREE_CODE (t
) == ADDR_EXPR
1991 && TREE_CODE (TREE_OPERAND (t
, 0)) == STRING_CST
)
1993 /* FIXME: Except that this causes problems elsewhere with dead
1994 code not being deleted, and we die in the rtl expanders
1995 because we failed to remove some ssa_name. In the meantime,
1996 just return zero. */
1997 /* FIXME2: This condition should be signaled by
1998 fold_read_from_constant_string directly, rather than
1999 re-checking for it here. */
2000 return integer_zero_node
;
2003 /* Try folding *(B+O) to B->X. Still an improvement. */
2004 if (POINTER_TYPE_P (TREE_TYPE (base
)))
2006 t
= maybe_fold_offset_to_reference (base
, offset
,
2013 /* Otherwise we had an offset that we could not simplify. */
2018 /* A subroutine of fold_stmt_r. EXPR is a POINTER_PLUS_EXPR.
2020 A quaint feature extant in our address arithmetic is that there
2021 can be hidden type changes here. The type of the result need
2022 not be the same as the type of the input pointer.
2024 What we're after here is an expression of the form
2025 (T *)(&array + const)
2026 where the cast doesn't actually exist, but is implicit in the
2027 type of the POINTER_PLUS_EXPR. We'd like to turn this into
2029 which may be able to propagate further. */
2032 maybe_fold_stmt_addition (tree expr
)
2034 tree op0
= TREE_OPERAND (expr
, 0);
2035 tree op1
= TREE_OPERAND (expr
, 1);
2036 tree ptr_type
= TREE_TYPE (expr
);
2040 gcc_assert (TREE_CODE (expr
) == POINTER_PLUS_EXPR
);
2042 /* It had better be a constant. */
2043 if (TREE_CODE (op1
) != INTEGER_CST
)
2045 /* The first operand should be an ADDR_EXPR. */
2046 if (TREE_CODE (op0
) != ADDR_EXPR
)
2048 op0
= TREE_OPERAND (op0
, 0);
2050 /* If the first operand is an ARRAY_REF, expand it so that we can fold
2051 the offset into it. */
2052 while (TREE_CODE (op0
) == ARRAY_REF
)
2054 tree array_obj
= TREE_OPERAND (op0
, 0);
2055 tree array_idx
= TREE_OPERAND (op0
, 1);
2056 tree elt_type
= TREE_TYPE (op0
);
2057 tree elt_size
= TYPE_SIZE_UNIT (elt_type
);
2060 if (TREE_CODE (array_idx
) != INTEGER_CST
)
2062 if (TREE_CODE (elt_size
) != INTEGER_CST
)
2065 /* Un-bias the index by the min index of the array type. */
2066 min_idx
= TYPE_DOMAIN (TREE_TYPE (array_obj
));
2069 min_idx
= TYPE_MIN_VALUE (min_idx
);
2072 if (TREE_CODE (min_idx
) != INTEGER_CST
)
2075 array_idx
= fold_convert (TREE_TYPE (min_idx
), array_idx
);
2076 if (!integer_zerop (min_idx
))
2077 array_idx
= int_const_binop (MINUS_EXPR
, array_idx
,
2082 /* Convert the index to a byte offset. */
2083 array_idx
= fold_convert (sizetype
, array_idx
);
2084 array_idx
= int_const_binop (MULT_EXPR
, array_idx
, elt_size
, 0);
2086 /* Update the operands for the next round, or for folding. */
2087 op1
= int_const_binop (PLUS_EXPR
,
2092 ptd_type
= TREE_TYPE (ptr_type
);
2094 /* At which point we can try some of the same things as for indirects. */
2095 t
= maybe_fold_offset_to_array_ref (op0
, op1
, ptd_type
, true);
2097 t
= maybe_fold_offset_to_component_ref (TREE_TYPE (op0
), op0
, op1
,
2100 t
= build1 (ADDR_EXPR
, ptr_type
, t
);
2105 /* For passing state through walk_tree into fold_stmt_r and its
2108 struct fold_stmt_r_data
2112 bool *inside_addr_expr_p
;
2115 /* Subroutine of fold_stmt called via walk_tree. We perform several
2116 simplifications of EXPR_P, mostly having to do with pointer arithmetic. */
2119 fold_stmt_r (tree
*expr_p
, int *walk_subtrees
, void *data
)
2121 struct fold_stmt_r_data
*fold_stmt_r_data
= (struct fold_stmt_r_data
*) data
;
2122 bool *inside_addr_expr_p
= fold_stmt_r_data
->inside_addr_expr_p
;
2123 bool *changed_p
= fold_stmt_r_data
->changed_p
;
2124 tree expr
= *expr_p
, t
;
2125 bool volatile_p
= TREE_THIS_VOLATILE (expr
);
2127 /* ??? It'd be nice if walk_tree had a pre-order option. */
2128 switch (TREE_CODE (expr
))
2131 t
= walk_tree (&TREE_OPERAND (expr
, 0), fold_stmt_r
, data
, NULL
);
2136 t
= maybe_fold_stmt_indirect (expr
, TREE_OPERAND (expr
, 0),
2141 t
= walk_tree (&TREE_OPERAND (expr
, 0), fold_stmt_r
, data
, NULL
);
2146 if (POINTER_TYPE_P (TREE_TYPE (expr
))
2147 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (expr
, 0)))
2148 && (t
= maybe_fold_offset_to_reference
2149 (TREE_OPERAND (expr
, 0),
2151 TREE_TYPE (TREE_TYPE (expr
)))))
2153 tree ptr_type
= build_pointer_type (TREE_TYPE (t
));
2154 if (!useless_type_conversion_p (TREE_TYPE (expr
), ptr_type
))
2156 t
= build_fold_addr_expr_with_type (t
, ptr_type
);
2160 /* ??? Could handle more ARRAY_REFs here, as a variant of INDIRECT_REF.
2161 We'd only want to bother decomposing an existing ARRAY_REF if
2162 the base array is found to have another offset contained within.
2163 Otherwise we'd be wasting time. */
2165 /* If we are not processing expressions found within an
2166 ADDR_EXPR, then we can fold constant array references. */
2167 if (!*inside_addr_expr_p
)
2168 t
= fold_read_from_constant_string (expr
);
2174 *inside_addr_expr_p
= true;
2175 t
= walk_tree (&TREE_OPERAND (expr
, 0), fold_stmt_r
, data
, NULL
);
2176 *inside_addr_expr_p
= false;
2181 /* Set TREE_INVARIANT properly so that the value is properly
2182 considered constant, and so gets propagated as expected. */
2184 recompute_tree_invariant_for_addr_expr (expr
);
2187 case POINTER_PLUS_EXPR
:
2188 t
= walk_tree (&TREE_OPERAND (expr
, 0), fold_stmt_r
, data
, NULL
);
2191 t
= walk_tree (&TREE_OPERAND (expr
, 1), fold_stmt_r
, data
, NULL
);
2196 t
= maybe_fold_stmt_addition (expr
);
2200 t
= walk_tree (&TREE_OPERAND (expr
, 0), fold_stmt_r
, data
, NULL
);
2205 /* Make sure the FIELD_DECL is actually a field in the type on the lhs.
2206 We've already checked that the records are compatible, so we should
2207 come up with a set of compatible fields. */
2209 tree expr_record
= TREE_TYPE (TREE_OPERAND (expr
, 0));
2210 tree expr_field
= TREE_OPERAND (expr
, 1);
2212 if (DECL_FIELD_CONTEXT (expr_field
) != TYPE_MAIN_VARIANT (expr_record
))
2214 expr_field
= find_compatible_field (expr_record
, expr_field
);
2215 TREE_OPERAND (expr
, 1) = expr_field
;
2220 case TARGET_MEM_REF
:
2221 t
= maybe_fold_tmr (expr
);
2225 if (COMPARISON_CLASS_P (TREE_OPERAND (expr
, 0)))
2227 tree op0
= TREE_OPERAND (expr
, 0);
2231 fold_defer_overflow_warnings ();
2232 tem
= fold_binary (TREE_CODE (op0
), TREE_TYPE (op0
),
2233 TREE_OPERAND (op0
, 0),
2234 TREE_OPERAND (op0
, 1));
2235 set
= tem
&& set_rhs (expr_p
, tem
);
2236 fold_undefer_overflow_warnings (set
, fold_stmt_r_data
->stmt
, 0);
2251 /* Preserve volatileness of the original expression. */
2252 TREE_THIS_VOLATILE (t
) = volatile_p
;
2261 /* Return the string length, maximum string length or maximum value of
2263 If ARG is an SSA name variable, follow its use-def chains. If LENGTH
2264 is not NULL and, for TYPE == 0, its value is not equal to the length
2265 we determine or if we are unable to determine the length or value,
2266 return false. VISITED is a bitmap of visited variables.
2267 TYPE is 0 if string length should be returned, 1 for maximum string
2268 length and 2 for maximum value ARG can have. */
2271 get_maxval_strlen (tree arg
, tree
*length
, bitmap visited
, int type
)
2273 tree var
, def_stmt
, val
;
2275 if (TREE_CODE (arg
) != SSA_NAME
)
2277 if (TREE_CODE (arg
) == COND_EXPR
)
2278 return get_maxval_strlen (COND_EXPR_THEN (arg
), length
, visited
, type
)
2279 && get_maxval_strlen (COND_EXPR_ELSE (arg
), length
, visited
, type
);
2284 if (TREE_CODE (val
) != INTEGER_CST
2285 || tree_int_cst_sgn (val
) < 0)
2289 val
= c_strlen (arg
, 1);
2297 if (TREE_CODE (*length
) != INTEGER_CST
2298 || TREE_CODE (val
) != INTEGER_CST
)
2301 if (tree_int_cst_lt (*length
, val
))
2305 else if (simple_cst_equal (val
, *length
) != 1)
2313 /* If we were already here, break the infinite cycle. */
2314 if (bitmap_bit_p (visited
, SSA_NAME_VERSION (arg
)))
2316 bitmap_set_bit (visited
, SSA_NAME_VERSION (arg
));
2319 def_stmt
= SSA_NAME_DEF_STMT (var
);
2321 switch (TREE_CODE (def_stmt
))
2323 case GIMPLE_MODIFY_STMT
:
2327 /* The RHS of the statement defining VAR must either have a
2328 constant length or come from another SSA_NAME with a constant
2330 rhs
= GIMPLE_STMT_OPERAND (def_stmt
, 1);
2332 return get_maxval_strlen (rhs
, length
, visited
, type
);
2337 /* All the arguments of the PHI node must have the same constant
2341 for (i
= 0; i
< PHI_NUM_ARGS (def_stmt
); i
++)
2343 tree arg
= PHI_ARG_DEF (def_stmt
, i
);
2345 /* If this PHI has itself as an argument, we cannot
2346 determine the string length of this argument. However,
2347 if we can find a constant string length for the other
2348 PHI args then we can still be sure that this is a
2349 constant string length. So be optimistic and just
2350 continue with the next argument. */
2351 if (arg
== PHI_RESULT (def_stmt
))
2354 if (!get_maxval_strlen (arg
, length
, visited
, type
))
2370 /* Fold builtin call FN in statement STMT. If it cannot be folded into a
2371 constant, return NULL_TREE. Otherwise, return its constant value. */
2374 ccp_fold_builtin (tree stmt
, tree fn
)
2376 tree result
, val
[3];
2378 int arg_mask
, i
, type
;
2381 call_expr_arg_iterator iter
;
2384 ignore
= TREE_CODE (stmt
) != GIMPLE_MODIFY_STMT
;
2386 /* First try the generic builtin folder. If that succeeds, return the
2388 result
= fold_call_expr (fn
, ignore
);
2392 STRIP_NOPS (result
);
2396 /* Ignore MD builtins. */
2397 callee
= get_callee_fndecl (fn
);
2398 if (DECL_BUILT_IN_CLASS (callee
) == BUILT_IN_MD
)
2401 /* If the builtin could not be folded, and it has no argument list,
2403 nargs
= call_expr_nargs (fn
);
2407 /* Limit the work only for builtins we know how to simplify. */
2408 switch (DECL_FUNCTION_CODE (callee
))
2410 case BUILT_IN_STRLEN
:
2411 case BUILT_IN_FPUTS
:
2412 case BUILT_IN_FPUTS_UNLOCKED
:
2416 case BUILT_IN_STRCPY
:
2417 case BUILT_IN_STRNCPY
:
2421 case BUILT_IN_MEMCPY_CHK
:
2422 case BUILT_IN_MEMPCPY_CHK
:
2423 case BUILT_IN_MEMMOVE_CHK
:
2424 case BUILT_IN_MEMSET_CHK
:
2425 case BUILT_IN_STRNCPY_CHK
:
2429 case BUILT_IN_STRCPY_CHK
:
2430 case BUILT_IN_STPCPY_CHK
:
2434 case BUILT_IN_SNPRINTF_CHK
:
2435 case BUILT_IN_VSNPRINTF_CHK
:
2443 /* Try to use the dataflow information gathered by the CCP process. */
2444 visited
= BITMAP_ALLOC (NULL
);
2446 memset (val
, 0, sizeof (val
));
2447 init_call_expr_arg_iterator (fn
, &iter
);
2448 for (i
= 0; arg_mask
; i
++, arg_mask
>>= 1)
2450 a
= next_call_expr_arg (&iter
);
2453 bitmap_clear (visited
);
2454 if (!get_maxval_strlen (a
, &val
[i
], visited
, type
))
2459 BITMAP_FREE (visited
);
2462 switch (DECL_FUNCTION_CODE (callee
))
2464 case BUILT_IN_STRLEN
:
2467 tree new_val
= fold_convert (TREE_TYPE (fn
), val
[0]);
2469 /* If the result is not a valid gimple value, or not a cast
2470 of a valid gimple value, then we can not use the result. */
2471 if (is_gimple_val (new_val
)
2472 || (is_gimple_cast (new_val
)
2473 && is_gimple_val (TREE_OPERAND (new_val
, 0))))
2478 case BUILT_IN_STRCPY
:
2479 if (val
[1] && is_gimple_val (val
[1]) && nargs
== 2)
2480 result
= fold_builtin_strcpy (callee
,
2481 CALL_EXPR_ARG (fn
, 0),
2482 CALL_EXPR_ARG (fn
, 1),
2486 case BUILT_IN_STRNCPY
:
2487 if (val
[1] && is_gimple_val (val
[1]) && nargs
== 3)
2488 result
= fold_builtin_strncpy (callee
,
2489 CALL_EXPR_ARG (fn
, 0),
2490 CALL_EXPR_ARG (fn
, 1),
2491 CALL_EXPR_ARG (fn
, 2),
2495 case BUILT_IN_FPUTS
:
2496 result
= fold_builtin_fputs (CALL_EXPR_ARG (fn
, 0),
2497 CALL_EXPR_ARG (fn
, 1),
2498 TREE_CODE (stmt
) != GIMPLE_MODIFY_STMT
, 0,
2502 case BUILT_IN_FPUTS_UNLOCKED
:
2503 result
= fold_builtin_fputs (CALL_EXPR_ARG (fn
, 0),
2504 CALL_EXPR_ARG (fn
, 1),
2505 TREE_CODE (stmt
) != GIMPLE_MODIFY_STMT
, 1,
2509 case BUILT_IN_MEMCPY_CHK
:
2510 case BUILT_IN_MEMPCPY_CHK
:
2511 case BUILT_IN_MEMMOVE_CHK
:
2512 case BUILT_IN_MEMSET_CHK
:
2513 if (val
[2] && is_gimple_val (val
[2]))
2514 result
= fold_builtin_memory_chk (callee
,
2515 CALL_EXPR_ARG (fn
, 0),
2516 CALL_EXPR_ARG (fn
, 1),
2517 CALL_EXPR_ARG (fn
, 2),
2518 CALL_EXPR_ARG (fn
, 3),
2520 DECL_FUNCTION_CODE (callee
));
2523 case BUILT_IN_STRCPY_CHK
:
2524 case BUILT_IN_STPCPY_CHK
:
2525 if (val
[1] && is_gimple_val (val
[1]))
2526 result
= fold_builtin_stxcpy_chk (callee
,
2527 CALL_EXPR_ARG (fn
, 0),
2528 CALL_EXPR_ARG (fn
, 1),
2529 CALL_EXPR_ARG (fn
, 2),
2531 DECL_FUNCTION_CODE (callee
));
2534 case BUILT_IN_STRNCPY_CHK
:
2535 if (val
[2] && is_gimple_val (val
[2]))
2536 result
= fold_builtin_strncpy_chk (CALL_EXPR_ARG (fn
, 0),
2537 CALL_EXPR_ARG (fn
, 1),
2538 CALL_EXPR_ARG (fn
, 2),
2539 CALL_EXPR_ARG (fn
, 3),
2543 case BUILT_IN_SNPRINTF_CHK
:
2544 case BUILT_IN_VSNPRINTF_CHK
:
2545 if (val
[1] && is_gimple_val (val
[1]))
2546 result
= fold_builtin_snprintf_chk (fn
, val
[1],
2547 DECL_FUNCTION_CODE (callee
));
2554 if (result
&& ignore
)
2555 result
= fold_ignored_result (result
);
2560 /* Fold the statement pointed to by STMT_P. In some cases, this function may
2561 replace the whole statement with a new one. Returns true iff folding
2562 makes any changes. */
2565 fold_stmt (tree
*stmt_p
)
2567 tree rhs
, result
, stmt
;
2568 struct fold_stmt_r_data fold_stmt_r_data
;
2569 bool changed
= false;
2570 bool inside_addr_expr
= false;
2574 fold_stmt_r_data
.stmt
= stmt
;
2575 fold_stmt_r_data
.changed_p
= &changed
;
2576 fold_stmt_r_data
.inside_addr_expr_p
= &inside_addr_expr
;
2578 /* If we replaced constants and the statement makes pointer dereferences,
2579 then we may need to fold instances of *&VAR into VAR, etc. */
2580 if (walk_tree (stmt_p
, fold_stmt_r
, &fold_stmt_r_data
, NULL
))
2582 *stmt_p
= build_call_expr (implicit_built_in_decls
[BUILT_IN_TRAP
], 0);
2586 rhs
= get_rhs (stmt
);
2591 if (TREE_CODE (rhs
) == CALL_EXPR
)
2595 /* Check for builtins that CCP can handle using information not
2596 available in the generic fold routines. */
2597 callee
= get_callee_fndecl (rhs
);
2598 if (callee
&& DECL_BUILT_IN (callee
))
2599 result
= ccp_fold_builtin (stmt
, rhs
);
2602 /* Check for resolvable OBJ_TYPE_REF. The only sorts we can resolve
2603 here are when we've propagated the address of a decl into the
2605 /* ??? Should perhaps do this in fold proper. However, doing it
2606 there requires that we create a new CALL_EXPR, and that requires
2607 copying EH region info to the new node. Easier to just do it
2608 here where we can just smash the call operand. Also
2609 CALL_EXPR_RETURN_SLOT_OPT needs to be handled correctly and
2610 copied, fold_call_expr does not have not information. */
2611 callee
= CALL_EXPR_FN (rhs
);
2612 if (TREE_CODE (callee
) == OBJ_TYPE_REF
2613 && lang_hooks
.fold_obj_type_ref
2614 && TREE_CODE (OBJ_TYPE_REF_OBJECT (callee
)) == ADDR_EXPR
2615 && DECL_P (TREE_OPERAND
2616 (OBJ_TYPE_REF_OBJECT (callee
), 0)))
2620 /* ??? Caution: Broken ADDR_EXPR semantics means that
2621 looking at the type of the operand of the addr_expr
2622 can yield an array type. See silly exception in
2623 check_pointer_types_r. */
2625 t
= TREE_TYPE (TREE_TYPE (OBJ_TYPE_REF_OBJECT (callee
)));
2626 t
= lang_hooks
.fold_obj_type_ref (callee
, t
);
2629 CALL_EXPR_FN (rhs
) = t
;
2635 else if (TREE_CODE (rhs
) == COND_EXPR
)
2637 tree temp
= fold (COND_EXPR_COND (rhs
));
2638 if (temp
!= COND_EXPR_COND (rhs
))
2639 result
= fold_build3 (COND_EXPR
, TREE_TYPE (rhs
), temp
,
2640 COND_EXPR_THEN (rhs
), COND_EXPR_ELSE (rhs
));
2643 /* If we couldn't fold the RHS, hand over to the generic fold routines. */
2644 if (result
== NULL_TREE
)
2645 result
= fold (rhs
);
2647 /* Strip away useless type conversions. Both the NON_LVALUE_EXPR that
2648 may have been added by fold, and "useless" type conversions that might
2649 now be apparent due to propagation. */
2650 STRIP_USELESS_TYPE_CONVERSION (result
);
2653 changed
|= set_rhs (stmt_p
, result
);
2658 /* Perform the minimal folding on statement STMT. Only operations like
2659 *&x created by constant propagation are handled. The statement cannot
2660 be replaced with a new one. */
2663 fold_stmt_inplace (tree stmt
)
2665 tree old_stmt
= stmt
, rhs
, new_rhs
;
2666 struct fold_stmt_r_data fold_stmt_r_data
;
2667 bool changed
= false;
2668 bool inside_addr_expr
= false;
2670 fold_stmt_r_data
.stmt
= stmt
;
2671 fold_stmt_r_data
.changed_p
= &changed
;
2672 fold_stmt_r_data
.inside_addr_expr_p
= &inside_addr_expr
;
2674 walk_tree (&stmt
, fold_stmt_r
, &fold_stmt_r_data
, NULL
);
2675 gcc_assert (stmt
== old_stmt
);
2677 rhs
= get_rhs (stmt
);
2678 if (!rhs
|| rhs
== stmt
)
2681 new_rhs
= fold (rhs
);
2682 STRIP_USELESS_TYPE_CONVERSION (new_rhs
);
2686 changed
|= set_rhs (&stmt
, new_rhs
);
2687 gcc_assert (stmt
== old_stmt
);
2692 /* Try to optimize out __builtin_stack_restore. Optimize it out
2693 if there is another __builtin_stack_restore in the same basic
2694 block and no calls or ASM_EXPRs are in between, or if this block's
2695 only outgoing edge is to EXIT_BLOCK and there are no calls or
2696 ASM_EXPRs after this __builtin_stack_restore. */
2699 optimize_stack_restore (basic_block bb
, tree call
, block_stmt_iterator i
)
2701 tree stack_save
, stmt
, callee
;
2703 if (TREE_CODE (call
) != CALL_EXPR
2704 || call_expr_nargs (call
) != 1
2705 || TREE_CODE (CALL_EXPR_ARG (call
, 0)) != SSA_NAME
2706 || !POINTER_TYPE_P (TREE_TYPE (CALL_EXPR_ARG (call
, 0))))
2709 for (bsi_next (&i
); !bsi_end_p (i
); bsi_next (&i
))
2713 stmt
= bsi_stmt (i
);
2714 if (TREE_CODE (stmt
) == ASM_EXPR
)
2716 call
= get_call_expr_in (stmt
);
2720 callee
= get_callee_fndecl (call
);
2721 if (!callee
|| DECL_BUILT_IN_CLASS (callee
) != BUILT_IN_NORMAL
)
2724 if (DECL_FUNCTION_CODE (callee
) == BUILT_IN_STACK_RESTORE
)
2729 && (! single_succ_p (bb
)
2730 || single_succ_edge (bb
)->dest
!= EXIT_BLOCK_PTR
))
2733 stack_save
= SSA_NAME_DEF_STMT (CALL_EXPR_ARG (call
, 0));
2734 if (TREE_CODE (stack_save
) != GIMPLE_MODIFY_STMT
2735 || GIMPLE_STMT_OPERAND (stack_save
, 0) != CALL_EXPR_ARG (call
, 0)
2736 || TREE_CODE (GIMPLE_STMT_OPERAND (stack_save
, 1)) != CALL_EXPR
2737 || tree_could_throw_p (stack_save
)
2738 || !has_single_use (CALL_EXPR_ARG (call
, 0)))
2741 callee
= get_callee_fndecl (GIMPLE_STMT_OPERAND (stack_save
, 1));
2743 || DECL_BUILT_IN_CLASS (callee
) != BUILT_IN_NORMAL
2744 || DECL_FUNCTION_CODE (callee
) != BUILT_IN_STACK_SAVE
2745 || call_expr_nargs (GIMPLE_STMT_OPERAND (stack_save
, 1)) != 0)
2749 push_stmt_changes (&stmt
);
2750 if (!set_rhs (&stmt
,
2751 build_int_cst (TREE_TYPE (CALL_EXPR_ARG (call
, 0)), 0)))
2753 discard_stmt_changes (&stmt
);
2756 gcc_assert (stmt
== stack_save
);
2757 pop_stmt_changes (&stmt
);
2759 return integer_zero_node
;
2762 /* If va_list type is a simple pointer and nothing special is needed,
2763 optimize __builtin_va_start (&ap, 0) into ap = __builtin_next_arg (0),
2764 __builtin_va_end (&ap) out as NOP and __builtin_va_copy into a simple
2765 pointer assignment. */
2768 optimize_stdarg_builtin (tree call
)
2770 tree callee
, lhs
, rhs
;
2771 bool va_list_simple_ptr
;
2773 if (TREE_CODE (call
) != CALL_EXPR
)
2776 va_list_simple_ptr
= POINTER_TYPE_P (va_list_type_node
)
2777 && (TREE_TYPE (va_list_type_node
) == void_type_node
2778 || TREE_TYPE (va_list_type_node
) == char_type_node
);
2780 callee
= get_callee_fndecl (call
);
2781 switch (DECL_FUNCTION_CODE (callee
))
2783 case BUILT_IN_VA_START
:
2784 if (!va_list_simple_ptr
2785 || targetm
.expand_builtin_va_start
!= NULL
2786 || built_in_decls
[BUILT_IN_NEXT_ARG
] == NULL
)
2789 if (call_expr_nargs (call
) != 2)
2792 lhs
= CALL_EXPR_ARG (call
, 0);
2793 if (!POINTER_TYPE_P (TREE_TYPE (lhs
))
2794 || TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (lhs
)))
2795 != TYPE_MAIN_VARIANT (va_list_type_node
))
2798 lhs
= build_fold_indirect_ref (lhs
);
2799 rhs
= build_call_expr (built_in_decls
[BUILT_IN_NEXT_ARG
],
2800 1, integer_zero_node
);
2801 rhs
= fold_convert (TREE_TYPE (lhs
), rhs
);
2802 return build2 (MODIFY_EXPR
, TREE_TYPE (lhs
), lhs
, rhs
);
2804 case BUILT_IN_VA_COPY
:
2805 if (!va_list_simple_ptr
)
2808 if (call_expr_nargs (call
) != 2)
2811 lhs
= CALL_EXPR_ARG (call
, 0);
2812 if (!POINTER_TYPE_P (TREE_TYPE (lhs
))
2813 || TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (lhs
)))
2814 != TYPE_MAIN_VARIANT (va_list_type_node
))
2817 lhs
= build_fold_indirect_ref (lhs
);
2818 rhs
= CALL_EXPR_ARG (call
, 1);
2819 if (TYPE_MAIN_VARIANT (TREE_TYPE (rhs
))
2820 != TYPE_MAIN_VARIANT (va_list_type_node
))
2823 rhs
= fold_convert (TREE_TYPE (lhs
), rhs
);
2824 return build2 (MODIFY_EXPR
, TREE_TYPE (lhs
), lhs
, rhs
);
2826 case BUILT_IN_VA_END
:
2827 return integer_zero_node
;
2834 /* Convert EXPR into a GIMPLE value suitable for substitution on the
2835 RHS of an assignment. Insert the necessary statements before
2837 When IGNORE is set, don't worry about the return value. */
2840 convert_to_gimple_builtin (block_stmt_iterator
*si_p
, tree expr
, bool ignore
)
2842 tree_stmt_iterator ti
;
2843 tree stmt
= bsi_stmt (*si_p
);
2844 tree tmp
, stmts
= NULL
;
2846 push_gimplify_context ();
2849 tmp
= build_empty_stmt ();
2850 gimplify_and_add (expr
, &stmts
);
2853 tmp
= get_initialized_tmp_var (expr
, &stmts
, NULL
);
2854 pop_gimplify_context (NULL
);
2856 if (EXPR_HAS_LOCATION (stmt
))
2857 annotate_all_with_locus (&stmts
, EXPR_LOCATION (stmt
));
2859 /* The replacement can expose previously unreferenced variables. */
2860 for (ti
= tsi_start (stmts
); !tsi_end_p (ti
); tsi_next (&ti
))
2862 tree new_stmt
= tsi_stmt (ti
);
2863 find_new_referenced_vars (tsi_stmt_ptr (ti
));
2864 bsi_insert_before (si_p
, new_stmt
, BSI_NEW_STMT
);
2865 mark_symbols_for_renaming (new_stmt
);
2873 /* A simple pass that attempts to fold all builtin functions. This pass
2874 is run after we've propagated as many constants as we can. */
2877 execute_fold_all_builtins (void)
2879 bool cfg_changed
= false;
2881 unsigned int todoflags
= 0;
2885 block_stmt_iterator i
;
2886 for (i
= bsi_start (bb
); !bsi_end_p (i
); )
2888 tree
*stmtp
= bsi_stmt_ptr (i
);
2889 tree old_stmt
= *stmtp
;
2890 tree call
= get_rhs (*stmtp
);
2891 tree callee
, result
;
2892 enum built_in_function fcode
;
2894 if (!call
|| TREE_CODE (call
) != CALL_EXPR
)
2899 callee
= get_callee_fndecl (call
);
2900 if (!callee
|| DECL_BUILT_IN_CLASS (callee
) != BUILT_IN_NORMAL
)
2905 fcode
= DECL_FUNCTION_CODE (callee
);
2907 result
= ccp_fold_builtin (*stmtp
, call
);
2909 switch (DECL_FUNCTION_CODE (callee
))
2911 case BUILT_IN_CONSTANT_P
:
2912 /* Resolve __builtin_constant_p. If it hasn't been
2913 folded to integer_one_node by now, it's fairly
2914 certain that the value simply isn't constant. */
2915 result
= integer_zero_node
;
2918 case BUILT_IN_STACK_RESTORE
:
2919 result
= optimize_stack_restore (bb
, *stmtp
, i
);
2925 case BUILT_IN_VA_START
:
2926 case BUILT_IN_VA_END
:
2927 case BUILT_IN_VA_COPY
:
2928 /* These shouldn't be folded before pass_stdarg. */
2929 result
= optimize_stdarg_builtin (*stmtp
);
2939 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2941 fprintf (dump_file
, "Simplified\n ");
2942 print_generic_stmt (dump_file
, *stmtp
, dump_flags
);
2945 push_stmt_changes (stmtp
);
2947 if (!set_rhs (stmtp
, result
))
2949 result
= convert_to_gimple_builtin (&i
, result
,
2950 TREE_CODE (old_stmt
)
2951 != GIMPLE_MODIFY_STMT
);
2954 bool ok
= set_rhs (stmtp
, result
);
2956 todoflags
|= TODO_rebuild_alias
;
2960 pop_stmt_changes (stmtp
);
2962 if (maybe_clean_or_replace_eh_stmt (old_stmt
, *stmtp
)
2963 && tree_purge_dead_eh_edges (bb
))
2966 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2968 fprintf (dump_file
, "to\n ");
2969 print_generic_stmt (dump_file
, *stmtp
, dump_flags
);
2970 fprintf (dump_file
, "\n");
2973 /* Retry the same statement if it changed into another
2974 builtin, there might be new opportunities now. */
2975 call
= get_rhs (*stmtp
);
2976 if (!call
|| TREE_CODE (call
) != CALL_EXPR
)
2981 callee
= get_callee_fndecl (call
);
2983 || DECL_BUILT_IN_CLASS (callee
) != BUILT_IN_NORMAL
2984 || DECL_FUNCTION_CODE (callee
) == fcode
)
2989 /* Delete unreachable blocks. */
2991 todoflags
|= TODO_cleanup_cfg
;
2997 struct tree_opt_pass pass_fold_builtins
=
3001 execute_fold_all_builtins
, /* execute */
3004 0, /* static_pass_number */
3006 PROP_cfg
| PROP_ssa
, /* properties_required */
3007 0, /* properties_provided */
3008 0, /* properties_destroyed */
3009 0, /* todo_flags_start */
3012 | TODO_update_ssa
, /* todo_flags_finish */