1 /* Interprocedural analyses.
2 Copyright (C) 2005-2019 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
27 #include "alloc-pool.h"
28 #include "tree-pass.h"
30 #include "tree-streamer.h"
32 #include "diagnostic.h"
33 #include "fold-const.h"
34 #include "gimple-fold.h"
37 #include "stor-layout.h"
38 #include "print-tree.h"
40 #include "gimple-iterator.h"
41 #include "gimplify-me.h"
42 #include "gimple-walk.h"
43 #include "symbol-summary.h"
47 #include "tree-inline.h"
48 #include "ipa-fnsummary.h"
49 #include "gimple-pretty-print.h"
51 #include "ipa-utils.h"
55 #include "tree-cfgcleanup.h"
57 /* Function summary where the parameter infos are actually stored. */
58 ipa_node_params_t
*ipa_node_params_sum
= NULL
;
60 function_summary
<ipcp_transformation
*> *ipcp_transformation_sum
= NULL
;
62 /* Edge summary for IPA-CP edge information. */
63 ipa_edge_args_sum_t
*ipa_edge_args_sum
;
65 /* Traits for a hash table for reusing already existing ipa_bits. */
67 struct ipa_bit_ggc_hash_traits
: public ggc_cache_remove
<ipa_bits
*>
69 typedef ipa_bits
*value_type
;
70 typedef ipa_bits
*compare_type
;
72 hash (const ipa_bits
*p
)
74 hashval_t t
= (hashval_t
) p
->value
.to_shwi ();
75 return iterative_hash_host_wide_int (p
->mask
.to_shwi (), t
);
78 equal (const ipa_bits
*a
, const ipa_bits
*b
)
80 return a
->value
== b
->value
&& a
->mask
== b
->mask
;
83 mark_empty (ipa_bits
*&p
)
88 is_empty (const ipa_bits
*p
)
93 is_deleted (const ipa_bits
*p
)
95 return p
== reinterpret_cast<const ipa_bits
*> (1);
98 mark_deleted (ipa_bits
*&p
)
100 p
= reinterpret_cast<ipa_bits
*> (1);
104 /* Hash table for avoid repeated allocations of equal ipa_bits. */
105 static GTY ((cache
)) hash_table
<ipa_bit_ggc_hash_traits
> *ipa_bits_hash_table
;
107 /* Traits for a hash table for reusing value_ranges used for IPA. Note that
108 the equiv bitmap is not hashed and is expected to be NULL. */
110 struct ipa_vr_ggc_hash_traits
: public ggc_cache_remove
<value_range_base
*>
112 typedef value_range_base
*value_type
;
113 typedef value_range_base
*compare_type
;
115 hash (const value_range_base
*p
)
117 inchash::hash
hstate (p
->kind ());
118 inchash::add_expr (p
->min (), hstate
);
119 inchash::add_expr (p
->max (), hstate
);
120 return hstate
.end ();
123 equal (const value_range_base
*a
, const value_range_base
*b
)
125 return a
->equal_p (*b
);
128 mark_empty (value_range_base
*&p
)
133 is_empty (const value_range_base
*p
)
138 is_deleted (const value_range_base
*p
)
140 return p
== reinterpret_cast<const value_range_base
*> (1);
143 mark_deleted (value_range_base
*&p
)
145 p
= reinterpret_cast<value_range_base
*> (1);
149 /* Hash table for avoid repeated allocations of equal value_ranges. */
150 static GTY ((cache
)) hash_table
<ipa_vr_ggc_hash_traits
> *ipa_vr_hash_table
;
152 /* Holders of ipa cgraph hooks: */
153 static struct cgraph_node_hook_list
*function_insertion_hook_holder
;
155 /* Description of a reference to an IPA constant. */
156 struct ipa_cst_ref_desc
158 /* Edge that corresponds to the statement which took the reference. */
159 struct cgraph_edge
*cs
;
160 /* Linked list of duplicates created when call graph edges are cloned. */
161 struct ipa_cst_ref_desc
*next_duplicate
;
162 /* Number of references in IPA structures, IPA_UNDESCRIBED_USE if the value
163 if out of control. */
167 /* Allocation pool for reference descriptions. */
169 static object_allocator
<ipa_cst_ref_desc
> ipa_refdesc_pool
170 ("IPA-PROP ref descriptions");
172 /* Return true if DECL_FUNCTION_SPECIFIC_OPTIMIZATION of the decl associated
173 with NODE should prevent us from analyzing it for the purposes of IPA-CP. */
176 ipa_func_spec_opts_forbid_analysis_p (struct cgraph_node
*node
)
178 tree fs_opts
= DECL_FUNCTION_SPECIFIC_OPTIMIZATION (node
->decl
);
182 return !opt_for_fn (node
->decl
, optimize
) || !opt_for_fn (node
->decl
, flag_ipa_cp
);
185 /* Return index of the formal whose tree is PTREE in function which corresponds
189 ipa_get_param_decl_index_1 (vec
<ipa_param_descriptor
, va_gc
> *descriptors
,
194 count
= vec_safe_length (descriptors
);
195 for (i
= 0; i
< count
; i
++)
196 if ((*descriptors
)[i
].decl_or_type
== ptree
)
202 /* Return index of the formal whose tree is PTREE in function which corresponds
206 ipa_get_param_decl_index (class ipa_node_params
*info
, tree ptree
)
208 return ipa_get_param_decl_index_1 (info
->descriptors
, ptree
);
211 /* Populate the param_decl field in parameter DESCRIPTORS that correspond to
215 ipa_populate_param_decls (struct cgraph_node
*node
,
216 vec
<ipa_param_descriptor
, va_gc
> &descriptors
)
224 gcc_assert (gimple_has_body_p (fndecl
));
225 fnargs
= DECL_ARGUMENTS (fndecl
);
227 for (parm
= fnargs
; parm
; parm
= DECL_CHAIN (parm
))
229 descriptors
[param_num
].decl_or_type
= parm
;
230 descriptors
[param_num
].move_cost
= estimate_move_cost (TREE_TYPE (parm
),
236 /* Return how many formal parameters FNDECL has. */
239 count_formal_params (tree fndecl
)
243 gcc_assert (gimple_has_body_p (fndecl
));
245 for (parm
= DECL_ARGUMENTS (fndecl
); parm
; parm
= DECL_CHAIN (parm
))
251 /* Return the declaration of Ith formal parameter of the function corresponding
252 to INFO. Note there is no setter function as this array is built just once
253 using ipa_initialize_node_params. */
256 ipa_dump_param (FILE *file
, class ipa_node_params
*info
, int i
)
258 fprintf (file
, "param #%i", i
);
259 if ((*info
->descriptors
)[i
].decl_or_type
)
262 print_generic_expr (file
, (*info
->descriptors
)[i
].decl_or_type
);
266 /* If necessary, allocate vector of parameter descriptors in info of NODE.
267 Return true if they were allocated, false if not. */
270 ipa_alloc_node_params (struct cgraph_node
*node
, int param_count
)
272 class ipa_node_params
*info
= IPA_NODE_REF (node
);
274 if (!info
->descriptors
&& param_count
)
276 vec_safe_grow_cleared (info
->descriptors
, param_count
);
283 /* Initialize the ipa_node_params structure associated with NODE by counting
284 the function parameters, creating the descriptors and populating their
288 ipa_initialize_node_params (struct cgraph_node
*node
)
290 class ipa_node_params
*info
= IPA_NODE_REF (node
);
292 if (!info
->descriptors
293 && ipa_alloc_node_params (node
, count_formal_params (node
->decl
)))
294 ipa_populate_param_decls (node
, *info
->descriptors
);
297 /* Print the jump functions associated with call graph edge CS to file F. */
300 ipa_print_node_jump_functions_for_edge (FILE *f
, struct cgraph_edge
*cs
)
304 count
= ipa_get_cs_argument_count (IPA_EDGE_REF (cs
));
305 for (i
= 0; i
< count
; i
++)
307 struct ipa_jump_func
*jump_func
;
308 enum jump_func_type type
;
310 jump_func
= ipa_get_ith_jump_func (IPA_EDGE_REF (cs
), i
);
311 type
= jump_func
->type
;
313 fprintf (f
, " param %d: ", i
);
314 if (type
== IPA_JF_UNKNOWN
)
315 fprintf (f
, "UNKNOWN\n");
316 else if (type
== IPA_JF_CONST
)
318 tree val
= jump_func
->value
.constant
.value
;
319 fprintf (f
, "CONST: ");
320 print_generic_expr (f
, val
);
321 if (TREE_CODE (val
) == ADDR_EXPR
322 && TREE_CODE (TREE_OPERAND (val
, 0)) == CONST_DECL
)
325 print_generic_expr (f
, DECL_INITIAL (TREE_OPERAND (val
, 0)));
329 else if (type
== IPA_JF_PASS_THROUGH
)
331 fprintf (f
, "PASS THROUGH: ");
332 fprintf (f
, "%d, op %s",
333 jump_func
->value
.pass_through
.formal_id
,
334 get_tree_code_name(jump_func
->value
.pass_through
.operation
));
335 if (jump_func
->value
.pass_through
.operation
!= NOP_EXPR
)
338 print_generic_expr (f
, jump_func
->value
.pass_through
.operand
);
340 if (jump_func
->value
.pass_through
.agg_preserved
)
341 fprintf (f
, ", agg_preserved");
344 else if (type
== IPA_JF_ANCESTOR
)
346 fprintf (f
, "ANCESTOR: ");
347 fprintf (f
, "%d, offset " HOST_WIDE_INT_PRINT_DEC
,
348 jump_func
->value
.ancestor
.formal_id
,
349 jump_func
->value
.ancestor
.offset
);
350 if (jump_func
->value
.ancestor
.agg_preserved
)
351 fprintf (f
, ", agg_preserved");
355 if (jump_func
->agg
.items
)
357 struct ipa_agg_jf_item
*item
;
360 fprintf (f
, " Aggregate passed by %s:\n",
361 jump_func
->agg
.by_ref
? "reference" : "value");
362 FOR_EACH_VEC_SAFE_ELT (jump_func
->agg
.items
, j
, item
)
364 fprintf (f
, " offset: " HOST_WIDE_INT_PRINT_DEC
", ",
366 if (TYPE_P (item
->value
))
367 fprintf (f
, "clobber of " HOST_WIDE_INT_PRINT_DEC
" bits",
368 tree_to_uhwi (TYPE_SIZE (item
->value
)));
371 fprintf (f
, "cst: ");
372 print_generic_expr (f
, item
->value
);
378 class ipa_polymorphic_call_context
*ctx
379 = ipa_get_ith_polymorhic_call_context (IPA_EDGE_REF (cs
), i
);
380 if (ctx
&& !ctx
->useless_p ())
382 fprintf (f
, " Context: ");
383 ctx
->dump (dump_file
);
388 fprintf (f
, " value: ");
389 print_hex (jump_func
->bits
->value
, f
);
390 fprintf (f
, ", mask: ");
391 print_hex (jump_func
->bits
->mask
, f
);
395 fprintf (f
, " Unknown bits\n");
401 (jump_func
->m_vr
->kind () == VR_ANTI_RANGE
) ? "~" : "");
402 print_decs (wi::to_wide (jump_func
->m_vr
->min ()), f
);
404 print_decs (wi::to_wide (jump_func
->m_vr
->max ()), f
);
408 fprintf (f
, " Unknown VR\n");
413 /* Print the jump functions of all arguments on all call graph edges going from
417 ipa_print_node_jump_functions (FILE *f
, struct cgraph_node
*node
)
419 struct cgraph_edge
*cs
;
421 fprintf (f
, " Jump functions of caller %s:\n", node
->dump_name ());
422 for (cs
= node
->callees
; cs
; cs
= cs
->next_callee
)
424 if (!ipa_edge_args_info_available_for_edge_p (cs
))
427 fprintf (f
, " callsite %s -> %s : \n",
429 cs
->callee
->dump_name ());
430 ipa_print_node_jump_functions_for_edge (f
, cs
);
433 for (cs
= node
->indirect_calls
; cs
; cs
= cs
->next_callee
)
435 class cgraph_indirect_call_info
*ii
;
436 if (!ipa_edge_args_info_available_for_edge_p (cs
))
439 ii
= cs
->indirect_info
;
440 if (ii
->agg_contents
)
441 fprintf (f
, " indirect %s callsite, calling param %i, "
442 "offset " HOST_WIDE_INT_PRINT_DEC
", %s",
443 ii
->member_ptr
? "member ptr" : "aggregate",
444 ii
->param_index
, ii
->offset
,
445 ii
->by_ref
? "by reference" : "by_value");
447 fprintf (f
, " indirect %s callsite, calling param %i, "
448 "offset " HOST_WIDE_INT_PRINT_DEC
,
449 ii
->polymorphic
? "polymorphic" : "simple", ii
->param_index
,
454 fprintf (f
, ", for stmt ");
455 print_gimple_stmt (f
, cs
->call_stmt
, 0, TDF_SLIM
);
460 ii
->context
.dump (f
);
461 ipa_print_node_jump_functions_for_edge (f
, cs
);
465 /* Print ipa_jump_func data structures of all nodes in the call graph to F. */
468 ipa_print_all_jump_functions (FILE *f
)
470 struct cgraph_node
*node
;
472 fprintf (f
, "\nJump functions:\n");
473 FOR_EACH_FUNCTION (node
)
475 ipa_print_node_jump_functions (f
, node
);
479 /* Set jfunc to be a know-really nothing jump function. */
482 ipa_set_jf_unknown (struct ipa_jump_func
*jfunc
)
484 jfunc
->type
= IPA_JF_UNKNOWN
;
489 /* Set JFUNC to be a copy of another jmp (to be used by jump function
490 combination code). The two functions will share their rdesc. */
493 ipa_set_jf_cst_copy (struct ipa_jump_func
*dst
,
494 struct ipa_jump_func
*src
)
497 gcc_checking_assert (src
->type
== IPA_JF_CONST
);
498 dst
->type
= IPA_JF_CONST
;
499 dst
->value
.constant
= src
->value
.constant
;
502 /* Set JFUNC to be a constant jmp function. */
505 ipa_set_jf_constant (struct ipa_jump_func
*jfunc
, tree constant
,
506 struct cgraph_edge
*cs
)
508 jfunc
->type
= IPA_JF_CONST
;
509 jfunc
->value
.constant
.value
= unshare_expr_without_location (constant
);
511 if (TREE_CODE (constant
) == ADDR_EXPR
512 && TREE_CODE (TREE_OPERAND (constant
, 0)) == FUNCTION_DECL
)
514 struct ipa_cst_ref_desc
*rdesc
;
516 rdesc
= ipa_refdesc_pool
.allocate ();
518 rdesc
->next_duplicate
= NULL
;
520 jfunc
->value
.constant
.rdesc
= rdesc
;
523 jfunc
->value
.constant
.rdesc
= NULL
;
526 /* Set JFUNC to be a simple pass-through jump function. */
528 ipa_set_jf_simple_pass_through (struct ipa_jump_func
*jfunc
, int formal_id
,
531 jfunc
->type
= IPA_JF_PASS_THROUGH
;
532 jfunc
->value
.pass_through
.operand
= NULL_TREE
;
533 jfunc
->value
.pass_through
.formal_id
= formal_id
;
534 jfunc
->value
.pass_through
.operation
= NOP_EXPR
;
535 jfunc
->value
.pass_through
.agg_preserved
= agg_preserved
;
538 /* Set JFUNC to be an unary pass through jump function. */
541 ipa_set_jf_unary_pass_through (struct ipa_jump_func
*jfunc
, int formal_id
,
542 enum tree_code operation
)
544 jfunc
->type
= IPA_JF_PASS_THROUGH
;
545 jfunc
->value
.pass_through
.operand
= NULL_TREE
;
546 jfunc
->value
.pass_through
.formal_id
= formal_id
;
547 jfunc
->value
.pass_through
.operation
= operation
;
548 jfunc
->value
.pass_through
.agg_preserved
= false;
550 /* Set JFUNC to be an arithmetic pass through jump function. */
553 ipa_set_jf_arith_pass_through (struct ipa_jump_func
*jfunc
, int formal_id
,
554 tree operand
, enum tree_code operation
)
556 jfunc
->type
= IPA_JF_PASS_THROUGH
;
557 jfunc
->value
.pass_through
.operand
= unshare_expr_without_location (operand
);
558 jfunc
->value
.pass_through
.formal_id
= formal_id
;
559 jfunc
->value
.pass_through
.operation
= operation
;
560 jfunc
->value
.pass_through
.agg_preserved
= false;
563 /* Set JFUNC to be an ancestor jump function. */
566 ipa_set_ancestor_jf (struct ipa_jump_func
*jfunc
, HOST_WIDE_INT offset
,
567 int formal_id
, bool agg_preserved
)
569 jfunc
->type
= IPA_JF_ANCESTOR
;
570 jfunc
->value
.ancestor
.formal_id
= formal_id
;
571 jfunc
->value
.ancestor
.offset
= offset
;
572 jfunc
->value
.ancestor
.agg_preserved
= agg_preserved
;
575 /* Get IPA BB information about the given BB. FBI is the context of analyzis
576 of this function body. */
578 static struct ipa_bb_info
*
579 ipa_get_bb_info (struct ipa_func_body_info
*fbi
, basic_block bb
)
581 gcc_checking_assert (fbi
);
582 return &fbi
->bb_infos
[bb
->index
];
585 /* Structure to be passed in between detect_type_change and
586 check_stmt_for_type_change. */
588 struct prop_type_change_info
590 /* Offset into the object where there is the virtual method pointer we are
592 HOST_WIDE_INT offset
;
593 /* The declaration or SSA_NAME pointer of the base that we are checking for
596 /* Set to true if dynamic type change has been detected. */
597 bool type_maybe_changed
;
600 /* Return true if STMT can modify a virtual method table pointer.
602 This function makes special assumptions about both constructors and
603 destructors which are all the functions that are allowed to alter the VMT
604 pointers. It assumes that destructors begin with assignment into all VMT
605 pointers and that constructors essentially look in the following way:
607 1) The very first thing they do is that they call constructors of ancestor
608 sub-objects that have them.
610 2) Then VMT pointers of this and all its ancestors is set to new values
611 corresponding to the type corresponding to the constructor.
613 3) Only afterwards, other stuff such as constructor of member sub-objects
614 and the code written by the user is run. Only this may include calling
615 virtual functions, directly or indirectly.
617 There is no way to call a constructor of an ancestor sub-object in any
620 This means that we do not have to care whether constructors get the correct
621 type information because they will always change it (in fact, if we define
622 the type to be given by the VMT pointer, it is undefined).
624 The most important fact to derive from the above is that if, for some
625 statement in the section 3, we try to detect whether the dynamic type has
626 changed, we can safely ignore all calls as we examine the function body
627 backwards until we reach statements in section 2 because these calls cannot
628 be ancestor constructors or destructors (if the input is not bogus) and so
629 do not change the dynamic type (this holds true only for automatically
630 allocated objects but at the moment we devirtualize only these). We then
631 must detect that statements in section 2 change the dynamic type and can try
632 to derive the new type. That is enough and we can stop, we will never see
633 the calls into constructors of sub-objects in this code. Therefore we can
634 safely ignore all call statements that we traverse.
638 stmt_may_be_vtbl_ptr_store (gimple
*stmt
)
640 if (is_gimple_call (stmt
))
642 if (gimple_clobber_p (stmt
))
644 else if (is_gimple_assign (stmt
))
646 tree lhs
= gimple_assign_lhs (stmt
);
648 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs
)))
650 if (flag_strict_aliasing
651 && !POINTER_TYPE_P (TREE_TYPE (lhs
)))
654 if (TREE_CODE (lhs
) == COMPONENT_REF
655 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs
, 1)))
657 /* In the future we might want to use get_ref_base_and_extent to find
658 if there is a field corresponding to the offset and if so, proceed
659 almost like if it was a component ref. */
665 /* Callback of walk_aliased_vdefs and a helper function for detect_type_change
666 to check whether a particular statement may modify the virtual table
667 pointerIt stores its result into DATA, which points to a
668 prop_type_change_info structure. */
671 check_stmt_for_type_change (ao_ref
*ao ATTRIBUTE_UNUSED
, tree vdef
, void *data
)
673 gimple
*stmt
= SSA_NAME_DEF_STMT (vdef
);
674 struct prop_type_change_info
*tci
= (struct prop_type_change_info
*) data
;
676 if (stmt_may_be_vtbl_ptr_store (stmt
))
678 tci
->type_maybe_changed
= true;
685 /* See if ARG is PARAM_DECl describing instance passed by pointer
686 or reference in FUNCTION. Return false if the dynamic type may change
687 in between beggining of the function until CALL is invoked.
689 Generally functions are not allowed to change type of such instances,
690 but they call destructors. We assume that methods cannot destroy the THIS
691 pointer. Also as a special cases, constructor and destructors may change
692 type of the THIS pointer. */
695 param_type_may_change_p (tree function
, tree arg
, gimple
*call
)
697 /* Pure functions cannot do any changes on the dynamic type;
698 that require writting to memory. */
699 if (flags_from_decl_or_type (function
) & (ECF_PURE
| ECF_CONST
))
701 /* We need to check if we are within inlined consturctor
702 or destructor (ideally we would have way to check that the
703 inline cdtor is actually working on ARG, but we don't have
704 easy tie on this, so punt on all non-pure cdtors.
705 We may also record the types of cdtors and once we know type
706 of the instance match them.
708 Also code unification optimizations may merge calls from
709 different blocks making return values unreliable. So
710 do nothing during late optimization. */
711 if (DECL_STRUCT_FUNCTION (function
)->after_inlining
)
713 if (TREE_CODE (arg
) == SSA_NAME
714 && SSA_NAME_IS_DEFAULT_DEF (arg
)
715 && TREE_CODE (SSA_NAME_VAR (arg
)) == PARM_DECL
)
717 /* Normal (non-THIS) argument. */
718 if ((SSA_NAME_VAR (arg
) != DECL_ARGUMENTS (function
)
719 || TREE_CODE (TREE_TYPE (function
)) != METHOD_TYPE
)
720 /* THIS pointer of an method - here we want to watch constructors
721 and destructors as those definitely may change the dynamic
723 || (TREE_CODE (TREE_TYPE (function
)) == METHOD_TYPE
724 && !DECL_CXX_CONSTRUCTOR_P (function
)
725 && !DECL_CXX_DESTRUCTOR_P (function
)
726 && (SSA_NAME_VAR (arg
) == DECL_ARGUMENTS (function
))))
728 /* Walk the inline stack and watch out for ctors/dtors. */
729 for (tree block
= gimple_block (call
); block
&& TREE_CODE (block
) == BLOCK
;
730 block
= BLOCK_SUPERCONTEXT (block
))
731 if (inlined_polymorphic_ctor_dtor_block_p (block
, false))
739 /* Detect whether the dynamic type of ARG of COMP_TYPE has changed (before
740 callsite CALL) by looking for assignments to its virtual table pointer. If
741 it is, return true and fill in the jump function JFUNC with relevant type
742 information or set it to unknown. ARG is the object itself (not a pointer
743 to it, unless dereferenced). BASE is the base of the memory access as
744 returned by get_ref_base_and_extent, as is the offset.
746 This is helper function for detect_type_change and detect_type_change_ssa
747 that does the heavy work which is usually unnecesary. */
750 detect_type_change_from_memory_writes (ipa_func_body_info
*fbi
, tree arg
,
751 tree base
, tree comp_type
, gcall
*call
,
752 struct ipa_jump_func
*jfunc
,
753 HOST_WIDE_INT offset
)
755 struct prop_type_change_info tci
;
758 gcc_checking_assert (DECL_P (arg
)
759 || TREE_CODE (arg
) == MEM_REF
760 || handled_component_p (arg
));
762 comp_type
= TYPE_MAIN_VARIANT (comp_type
);
764 /* Const calls cannot call virtual methods through VMT and so type changes do
766 if (!flag_devirtualize
|| !gimple_vuse (call
)
767 /* Be sure expected_type is polymorphic. */
769 || TREE_CODE (comp_type
) != RECORD_TYPE
770 || !TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type
))
771 || !BINFO_VTABLE (TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type
))))
774 ao_ref_init (&ao
, arg
);
777 ao
.size
= POINTER_SIZE
;
778 ao
.max_size
= ao
.size
;
781 tci
.object
= get_base_address (arg
);
782 tci
.type_maybe_changed
= false;
785 = walk_aliased_vdefs (&ao
, gimple_vuse (call
), check_stmt_for_type_change
,
786 &tci
, NULL
, NULL
, fbi
->aa_walk_budget
+ 1);
788 if (walked
>= 0 && !tci
.type_maybe_changed
)
791 ipa_set_jf_unknown (jfunc
);
795 /* Detect whether the dynamic type of ARG of COMP_TYPE may have changed.
796 If it is, return true and fill in the jump function JFUNC with relevant type
797 information or set it to unknown. ARG is the object itself (not a pointer
798 to it, unless dereferenced). BASE is the base of the memory access as
799 returned by get_ref_base_and_extent, as is the offset. */
802 detect_type_change (ipa_func_body_info
*fbi
, tree arg
, tree base
,
803 tree comp_type
, gcall
*call
, struct ipa_jump_func
*jfunc
,
804 HOST_WIDE_INT offset
)
806 if (!flag_devirtualize
)
809 if (TREE_CODE (base
) == MEM_REF
810 && !param_type_may_change_p (current_function_decl
,
811 TREE_OPERAND (base
, 0),
814 return detect_type_change_from_memory_writes (fbi
, arg
, base
, comp_type
,
815 call
, jfunc
, offset
);
818 /* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer
819 SSA name (its dereference will become the base and the offset is assumed to
823 detect_type_change_ssa (ipa_func_body_info
*fbi
, tree arg
, tree comp_type
,
824 gcall
*call
, struct ipa_jump_func
*jfunc
)
826 gcc_checking_assert (TREE_CODE (arg
) == SSA_NAME
);
827 if (!flag_devirtualize
828 || !POINTER_TYPE_P (TREE_TYPE (arg
)))
831 if (!param_type_may_change_p (current_function_decl
, arg
, call
))
834 arg
= build2 (MEM_REF
, ptr_type_node
, arg
,
835 build_int_cst (ptr_type_node
, 0));
837 return detect_type_change_from_memory_writes (fbi
, arg
, arg
, comp_type
,
841 /* Callback of walk_aliased_vdefs. Flags that it has been invoked to the
842 boolean variable pointed to by DATA. */
845 mark_modified (ao_ref
*ao ATTRIBUTE_UNUSED
, tree vdef ATTRIBUTE_UNUSED
,
848 bool *b
= (bool *) data
;
853 /* Find the nearest valid aa status for parameter specified by INDEX that
856 static struct ipa_param_aa_status
*
857 find_dominating_aa_status (struct ipa_func_body_info
*fbi
, basic_block bb
,
862 bb
= get_immediate_dominator (CDI_DOMINATORS
, bb
);
865 struct ipa_bb_info
*bi
= ipa_get_bb_info (fbi
, bb
);
866 if (!bi
->param_aa_statuses
.is_empty ()
867 && bi
->param_aa_statuses
[index
].valid
)
868 return &bi
->param_aa_statuses
[index
];
872 /* Get AA status structure for the given BB and parameter with INDEX. Allocate
873 structures and/or intialize the result with a dominating description as
876 static struct ipa_param_aa_status
*
877 parm_bb_aa_status_for_bb (struct ipa_func_body_info
*fbi
, basic_block bb
,
880 gcc_checking_assert (fbi
);
881 struct ipa_bb_info
*bi
= ipa_get_bb_info (fbi
, bb
);
882 if (bi
->param_aa_statuses
.is_empty ())
883 bi
->param_aa_statuses
.safe_grow_cleared (fbi
->param_count
);
884 struct ipa_param_aa_status
*paa
= &bi
->param_aa_statuses
[index
];
887 gcc_checking_assert (!paa
->parm_modified
888 && !paa
->ref_modified
889 && !paa
->pt_modified
);
890 struct ipa_param_aa_status
*dom_paa
;
891 dom_paa
= find_dominating_aa_status (fbi
, bb
, index
);
901 /* Return true if a load from a formal parameter PARM_LOAD is known to retrieve
902 a value known not to be modified in this function before reaching the
903 statement STMT. FBI holds information about the function we have so far
904 gathered but do not survive the summary building stage. */
907 parm_preserved_before_stmt_p (struct ipa_func_body_info
*fbi
, int index
,
908 gimple
*stmt
, tree parm_load
)
910 struct ipa_param_aa_status
*paa
;
911 bool modified
= false;
914 tree base
= get_base_address (parm_load
);
915 gcc_assert (TREE_CODE (base
) == PARM_DECL
);
916 if (TREE_READONLY (base
))
919 gcc_checking_assert (fbi
);
920 paa
= parm_bb_aa_status_for_bb (fbi
, gimple_bb (stmt
), index
);
921 if (paa
->parm_modified
)
924 gcc_checking_assert (gimple_vuse (stmt
) != NULL_TREE
);
925 ao_ref_init (&refd
, parm_load
);
926 int walked
= walk_aliased_vdefs (&refd
, gimple_vuse (stmt
), mark_modified
,
927 &modified
, NULL
, NULL
,
928 fbi
->aa_walk_budget
+ 1);
933 fbi
->aa_walk_budget
= 0;
936 fbi
->aa_walk_budget
-= walked
;
938 paa
->parm_modified
= true;
942 /* If STMT is an assignment that loads a value from an parameter declaration,
943 return the index of the parameter in ipa_node_params which has not been
944 modified. Otherwise return -1. */
947 load_from_unmodified_param (struct ipa_func_body_info
*fbi
,
948 vec
<ipa_param_descriptor
, va_gc
> *descriptors
,
954 if (!gimple_assign_single_p (stmt
))
957 op1
= gimple_assign_rhs1 (stmt
);
958 if (TREE_CODE (op1
) != PARM_DECL
)
961 index
= ipa_get_param_decl_index_1 (descriptors
, op1
);
963 || !parm_preserved_before_stmt_p (fbi
, index
, stmt
, op1
))
969 /* Return true if memory reference REF (which must be a load through parameter
970 with INDEX) loads data that are known to be unmodified in this function
971 before reaching statement STMT. */
974 parm_ref_data_preserved_p (struct ipa_func_body_info
*fbi
,
975 int index
, gimple
*stmt
, tree ref
)
977 struct ipa_param_aa_status
*paa
;
978 bool modified
= false;
981 gcc_checking_assert (fbi
);
982 paa
= parm_bb_aa_status_for_bb (fbi
, gimple_bb (stmt
), index
);
983 if (paa
->ref_modified
)
986 gcc_checking_assert (gimple_vuse (stmt
));
987 ao_ref_init (&refd
, ref
);
988 int walked
= walk_aliased_vdefs (&refd
, gimple_vuse (stmt
), mark_modified
,
989 &modified
, NULL
, NULL
,
990 fbi
->aa_walk_budget
+ 1);
994 fbi
->aa_walk_budget
= 0;
997 fbi
->aa_walk_budget
-= walked
;
999 paa
->ref_modified
= true;
1003 /* Return true if the data pointed to by PARM (which is a parameter with INDEX)
1004 is known to be unmodified in this function before reaching call statement
1005 CALL into which it is passed. FBI describes the function body. */
1008 parm_ref_data_pass_through_p (struct ipa_func_body_info
*fbi
, int index
,
1009 gimple
*call
, tree parm
)
1011 bool modified
= false;
1014 /* It's unnecessary to calculate anything about memory contnets for a const
1015 function because it is not goin to use it. But do not cache the result
1016 either. Also, no such calculations for non-pointers. */
1017 if (!gimple_vuse (call
)
1018 || !POINTER_TYPE_P (TREE_TYPE (parm
)))
1021 struct ipa_param_aa_status
*paa
= parm_bb_aa_status_for_bb (fbi
,
1024 if (paa
->pt_modified
)
1027 ao_ref_init_from_ptr_and_size (&refd
, parm
, NULL_TREE
);
1028 int walked
= walk_aliased_vdefs (&refd
, gimple_vuse (call
), mark_modified
,
1029 &modified
, NULL
, NULL
,
1030 fbi
->aa_walk_budget
+ 1);
1033 fbi
->aa_walk_budget
= 0;
1037 fbi
->aa_walk_budget
-= walked
;
1039 paa
->pt_modified
= true;
1043 /* Return true if we can prove that OP is a memory reference loading
1044 data from an aggregate passed as a parameter.
1046 The function works in two modes. If GUARANTEED_UNMODIFIED is NULL, it return
1047 false if it cannot prove that the value has not been modified before the
1048 load in STMT. If GUARANTEED_UNMODIFIED is not NULL, it will return true even
1049 if it cannot prove the value has not been modified, in that case it will
1050 store false to *GUARANTEED_UNMODIFIED, otherwise it will store true there.
1052 INFO and PARMS_AINFO describe parameters of the current function (but the
1053 latter can be NULL), STMT is the load statement. If function returns true,
1054 *INDEX_P, *OFFSET_P and *BY_REF is filled with the parameter index, offset
1055 within the aggregate and whether it is a load from a value passed by
1056 reference respectively. */
1059 ipa_load_from_parm_agg (struct ipa_func_body_info
*fbi
,
1060 vec
<ipa_param_descriptor
, va_gc
> *descriptors
,
1061 gimple
*stmt
, tree op
, int *index_p
,
1062 HOST_WIDE_INT
*offset_p
, poly_int64
*size_p
,
1063 bool *by_ref_p
, bool *guaranteed_unmodified
)
1068 tree base
= get_ref_base_and_extent_hwi (op
, offset_p
, &size
, &reverse
);
1075 int index
= ipa_get_param_decl_index_1 (descriptors
, base
);
1077 && parm_preserved_before_stmt_p (fbi
, index
, stmt
, op
))
1083 if (guaranteed_unmodified
)
1084 *guaranteed_unmodified
= true;
1090 if (TREE_CODE (base
) != MEM_REF
1091 || TREE_CODE (TREE_OPERAND (base
, 0)) != SSA_NAME
1092 || !integer_zerop (TREE_OPERAND (base
, 1)))
1095 if (SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base
, 0)))
1097 tree parm
= SSA_NAME_VAR (TREE_OPERAND (base
, 0));
1098 index
= ipa_get_param_decl_index_1 (descriptors
, parm
);
1102 /* This branch catches situations where a pointer parameter is not a
1103 gimple register, for example:
1105 void hip7(S*) (struct S * p)
1107 void (*<T2e4>) (struct S *) D.1867;
1112 D.1867_2 = p.1_1->f;
1117 gimple
*def
= SSA_NAME_DEF_STMT (TREE_OPERAND (base
, 0));
1118 index
= load_from_unmodified_param (fbi
, descriptors
, def
);
1123 bool data_preserved
= parm_ref_data_preserved_p (fbi
, index
, stmt
, op
);
1124 if (!data_preserved
&& !guaranteed_unmodified
)
1131 if (guaranteed_unmodified
)
1132 *guaranteed_unmodified
= data_preserved
;
1138 /* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
1139 of an assignment statement STMT, try to determine whether we are actually
1140 handling any of the following cases and construct an appropriate jump
1141 function into JFUNC if so:
1143 1) The passed value is loaded from a formal parameter which is not a gimple
1144 register (most probably because it is addressable, the value has to be
1145 scalar) and we can guarantee the value has not changed. This case can
1146 therefore be described by a simple pass-through jump function. For example:
1155 2) The passed value can be described by a simple arithmetic pass-through
1162 D.2064_4 = a.1(D) + 4;
1165 This case can also occur in combination of the previous one, e.g.:
1173 D.2064_4 = a.0_3 + 4;
1176 3) The passed value is an address of an object within another one (which
1177 also passed by reference). Such situations are described by an ancestor
1178 jump function and describe situations such as:
1180 B::foo() (struct B * const this)
1184 D.1845_2 = &this_1(D)->D.1748;
1187 INFO is the structure describing individual parameters access different
1188 stages of IPA optimizations. PARMS_AINFO contains the information that is
1189 only needed for intraprocedural analysis. */
1192 compute_complex_assign_jump_func (struct ipa_func_body_info
*fbi
,
1193 class ipa_node_params
*info
,
1194 struct ipa_jump_func
*jfunc
,
1195 gcall
*call
, gimple
*stmt
, tree name
,
1198 HOST_WIDE_INT offset
, size
;
1199 tree op1
, tc_ssa
, base
, ssa
;
1203 op1
= gimple_assign_rhs1 (stmt
);
1205 if (TREE_CODE (op1
) == SSA_NAME
)
1207 if (SSA_NAME_IS_DEFAULT_DEF (op1
))
1208 index
= ipa_get_param_decl_index (info
, SSA_NAME_VAR (op1
));
1210 index
= load_from_unmodified_param (fbi
, info
->descriptors
,
1211 SSA_NAME_DEF_STMT (op1
));
1216 index
= load_from_unmodified_param (fbi
, info
->descriptors
, stmt
);
1217 tc_ssa
= gimple_assign_lhs (stmt
);
1222 switch (gimple_assign_rhs_class (stmt
))
1224 case GIMPLE_BINARY_RHS
:
1226 tree op2
= gimple_assign_rhs2 (stmt
);
1227 if (!is_gimple_ip_invariant (op2
)
1228 || ((TREE_CODE_CLASS (gimple_assign_rhs_code (stmt
))
1230 && !useless_type_conversion_p (TREE_TYPE (name
),
1234 ipa_set_jf_arith_pass_through (jfunc
, index
, op2
,
1235 gimple_assign_rhs_code (stmt
));
1238 case GIMPLE_SINGLE_RHS
:
1240 bool agg_p
= parm_ref_data_pass_through_p (fbi
, index
, call
,
1242 ipa_set_jf_simple_pass_through (jfunc
, index
, agg_p
);
1245 case GIMPLE_UNARY_RHS
:
1246 if (!CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt
)))
1247 ipa_set_jf_unary_pass_through (jfunc
, index
,
1248 gimple_assign_rhs_code (stmt
));
1254 if (TREE_CODE (op1
) != ADDR_EXPR
)
1256 op1
= TREE_OPERAND (op1
, 0);
1257 if (TREE_CODE (TREE_TYPE (op1
)) != RECORD_TYPE
)
1259 base
= get_ref_base_and_extent_hwi (op1
, &offset
, &size
, &reverse
);
1260 offset_int mem_offset
;
1262 || TREE_CODE (base
) != MEM_REF
1263 || !mem_ref_offset (base
).is_constant (&mem_offset
))
1265 offset
+= mem_offset
.to_short_addr () * BITS_PER_UNIT
;
1266 ssa
= TREE_OPERAND (base
, 0);
1267 if (TREE_CODE (ssa
) != SSA_NAME
1268 || !SSA_NAME_IS_DEFAULT_DEF (ssa
)
1272 /* Dynamic types are changed in constructors and destructors. */
1273 index
= ipa_get_param_decl_index (info
, SSA_NAME_VAR (ssa
));
1274 if (index
>= 0 && param_type
&& POINTER_TYPE_P (param_type
))
1275 ipa_set_ancestor_jf (jfunc
, offset
, index
,
1276 parm_ref_data_pass_through_p (fbi
, index
, call
, ssa
));
1279 /* Extract the base, offset and MEM_REF expression from a statement ASSIGN if
1282 iftmp.1_3 = &obj_2(D)->D.1762;
1284 The base of the MEM_REF must be a default definition SSA NAME of a
1285 parameter. Return NULL_TREE if it looks otherwise. If case of success, the
1286 whole MEM_REF expression is returned and the offset calculated from any
1287 handled components and the MEM_REF itself is stored into *OFFSET. The whole
1288 RHS stripped off the ADDR_EXPR is stored into *OBJ_P. */
1291 get_ancestor_addr_info (gimple
*assign
, tree
*obj_p
, HOST_WIDE_INT
*offset
)
1294 tree expr
, parm
, obj
;
1297 if (!gimple_assign_single_p (assign
))
1299 expr
= gimple_assign_rhs1 (assign
);
1301 if (TREE_CODE (expr
) != ADDR_EXPR
)
1303 expr
= TREE_OPERAND (expr
, 0);
1305 expr
= get_ref_base_and_extent_hwi (expr
, offset
, &size
, &reverse
);
1307 offset_int mem_offset
;
1309 || TREE_CODE (expr
) != MEM_REF
1310 || !mem_ref_offset (expr
).is_constant (&mem_offset
))
1312 parm
= TREE_OPERAND (expr
, 0);
1313 if (TREE_CODE (parm
) != SSA_NAME
1314 || !SSA_NAME_IS_DEFAULT_DEF (parm
)
1315 || TREE_CODE (SSA_NAME_VAR (parm
)) != PARM_DECL
)
1318 *offset
+= mem_offset
.to_short_addr () * BITS_PER_UNIT
;
1324 /* Given that an actual argument is an SSA_NAME that is a result of a phi
1325 statement PHI, try to find out whether NAME is in fact a
1326 multiple-inheritance typecast from a descendant into an ancestor of a formal
1327 parameter and thus can be described by an ancestor jump function and if so,
1328 write the appropriate function into JFUNC.
1330 Essentially we want to match the following pattern:
1338 iftmp.1_3 = &obj_2(D)->D.1762;
1341 # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
1342 D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
1346 compute_complex_ancestor_jump_func (struct ipa_func_body_info
*fbi
,
1347 class ipa_node_params
*info
,
1348 struct ipa_jump_func
*jfunc
,
1349 gcall
*call
, gphi
*phi
)
1351 HOST_WIDE_INT offset
;
1352 gimple
*assign
, *cond
;
1353 basic_block phi_bb
, assign_bb
, cond_bb
;
1354 tree tmp
, parm
, expr
, obj
;
1357 if (gimple_phi_num_args (phi
) != 2)
1360 if (integer_zerop (PHI_ARG_DEF (phi
, 1)))
1361 tmp
= PHI_ARG_DEF (phi
, 0);
1362 else if (integer_zerop (PHI_ARG_DEF (phi
, 0)))
1363 tmp
= PHI_ARG_DEF (phi
, 1);
1366 if (TREE_CODE (tmp
) != SSA_NAME
1367 || SSA_NAME_IS_DEFAULT_DEF (tmp
)
1368 || !POINTER_TYPE_P (TREE_TYPE (tmp
))
1369 || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp
))) != RECORD_TYPE
)
1372 assign
= SSA_NAME_DEF_STMT (tmp
);
1373 assign_bb
= gimple_bb (assign
);
1374 if (!single_pred_p (assign_bb
))
1376 expr
= get_ancestor_addr_info (assign
, &obj
, &offset
);
1379 parm
= TREE_OPERAND (expr
, 0);
1380 index
= ipa_get_param_decl_index (info
, SSA_NAME_VAR (parm
));
1384 cond_bb
= single_pred (assign_bb
);
1385 cond
= last_stmt (cond_bb
);
1387 || gimple_code (cond
) != GIMPLE_COND
1388 || gimple_cond_code (cond
) != NE_EXPR
1389 || gimple_cond_lhs (cond
) != parm
1390 || !integer_zerop (gimple_cond_rhs (cond
)))
1393 phi_bb
= gimple_bb (phi
);
1394 for (i
= 0; i
< 2; i
++)
1396 basic_block pred
= EDGE_PRED (phi_bb
, i
)->src
;
1397 if (pred
!= assign_bb
&& pred
!= cond_bb
)
1401 ipa_set_ancestor_jf (jfunc
, offset
, index
,
1402 parm_ref_data_pass_through_p (fbi
, index
, call
, parm
));
1405 /* Inspect the given TYPE and return true iff it has the same structure (the
1406 same number of fields of the same types) as a C++ member pointer. If
1407 METHOD_PTR and DELTA are non-NULL, store the trees representing the
1408 corresponding fields there. */
1411 type_like_member_ptr_p (tree type
, tree
*method_ptr
, tree
*delta
)
1415 if (TREE_CODE (type
) != RECORD_TYPE
)
1418 fld
= TYPE_FIELDS (type
);
1419 if (!fld
|| !POINTER_TYPE_P (TREE_TYPE (fld
))
1420 || TREE_CODE (TREE_TYPE (TREE_TYPE (fld
))) != METHOD_TYPE
1421 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld
)))
1427 fld
= DECL_CHAIN (fld
);
1428 if (!fld
|| INTEGRAL_TYPE_P (fld
)
1429 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld
)))
1434 if (DECL_CHAIN (fld
))
1440 /* If RHS is an SSA_NAME and it is defined by a simple copy assign statement,
1441 return the rhs of its defining statement. Otherwise return RHS as it
1445 get_ssa_def_if_simple_copy (tree rhs
)
1447 while (TREE_CODE (rhs
) == SSA_NAME
&& !SSA_NAME_IS_DEFAULT_DEF (rhs
))
1449 gimple
*def_stmt
= SSA_NAME_DEF_STMT (rhs
);
1451 if (gimple_assign_single_p (def_stmt
))
1452 rhs
= gimple_assign_rhs1 (def_stmt
);
1459 /* Simple linked list, describing known contents of an aggregate before
1462 struct ipa_known_agg_contents_list
1464 /* Offset and size of the described part of the aggregate. */
1465 HOST_WIDE_INT offset
, size
;
1466 /* Known constant value or NULL if the contents is known to be unknown. */
1468 /* Pointer to the next structure in the list. */
1469 struct ipa_known_agg_contents_list
*next
;
1472 /* Add a known content item into a linked list of ipa_known_agg_contents_list
1473 structure, in which all elements are sorted ascendingly by offset. */
1476 add_to_agg_contents_list (struct ipa_known_agg_contents_list
**plist
,
1477 struct ipa_known_agg_contents_list
*item
)
1479 struct ipa_known_agg_contents_list
*list
= *plist
;
1481 for (; list
; list
= list
->next
)
1483 if (list
->offset
>= item
->offset
)
1486 plist
= &list
->next
;
1493 /* Check whether a given known content is clobbered by certain element in
1494 a linked list of ipa_known_agg_contents_list. */
1497 clobber_by_agg_contents_list_p (struct ipa_known_agg_contents_list
*list
,
1498 struct ipa_known_agg_contents_list
*item
)
1500 for (; list
; list
= list
->next
)
1502 if (list
->offset
>= item
->offset
)
1503 return list
->offset
< item
->offset
+ item
->size
;
1505 if (list
->offset
+ list
->size
> item
->offset
)
1512 /* Build aggregate jump function from LIST, assuming there are exactly
1513 CONST_COUNT constant entries there and that offset of the passed argument
1514 is ARG_OFFSET and store it into JFUNC. */
1517 build_agg_jump_func_from_list (struct ipa_known_agg_contents_list
*list
,
1518 int const_count
, HOST_WIDE_INT arg_offset
,
1519 struct ipa_jump_func
*jfunc
)
1521 vec_alloc (jfunc
->agg
.items
, const_count
);
1526 struct ipa_agg_jf_item item
;
1527 item
.offset
= list
->offset
- arg_offset
;
1528 gcc_assert ((item
.offset
% BITS_PER_UNIT
) == 0);
1529 item
.value
= unshare_expr_without_location (list
->constant
);
1530 jfunc
->agg
.items
->quick_push (item
);
1536 /* If STMT is a memory store to the object whose address is BASE, extract
1537 information (offset, size, and value) into CONTENT, and return true,
1538 otherwise we conservatively assume the whole object is modified with
1539 unknown content, and return false. CHECK_REF means that access to object
1540 is expected to be in form of MEM_REF expression. */
1543 extract_mem_content (gimple
*stmt
, tree base
, bool check_ref
,
1544 struct ipa_known_agg_contents_list
*content
)
1546 HOST_WIDE_INT lhs_offset
, lhs_size
;
1547 tree lhs
, rhs
, lhs_base
;
1550 if (!gimple_assign_single_p (stmt
))
1553 lhs
= gimple_assign_lhs (stmt
);
1554 rhs
= gimple_assign_rhs1 (stmt
);
1556 if (!is_gimple_reg_type (TREE_TYPE (rhs
))
1557 || TREE_CODE (lhs
) == BIT_FIELD_REF
1558 || contains_bitfld_component_ref_p (lhs
))
1561 lhs_base
= get_ref_base_and_extent_hwi (lhs
, &lhs_offset
,
1562 &lhs_size
, &reverse
);
1568 if (TREE_CODE (lhs_base
) != MEM_REF
1569 || TREE_OPERAND (lhs_base
, 0) != base
1570 || !integer_zerop (TREE_OPERAND (lhs_base
, 1)))
1573 else if (lhs_base
!= base
)
1576 rhs
= get_ssa_def_if_simple_copy (rhs
);
1578 content
->size
= lhs_size
;
1579 content
->offset
= lhs_offset
;
1580 content
->constant
= is_gimple_ip_invariant (rhs
) ? rhs
: NULL_TREE
;
1581 content
->next
= NULL
;
1586 /* Traverse statements from CALL backwards, scanning whether an aggregate given
1587 in ARG is filled in with constant values. ARG can either be an aggregate
1588 expression or a pointer to an aggregate. ARG_TYPE is the type of the
1589 aggregate. JFUNC is the jump function into which the constants are
1590 subsequently stored. AA_WALK_BUDGET_P points to limit on number of
1591 statements we allow get_continuation_for_phi to examine. */
1594 determine_known_aggregate_parts (gcall
*call
, tree arg
,
1596 struct ipa_jump_func
*jfunc
,
1597 unsigned *aa_walk_budget_p
)
1599 struct ipa_known_agg_contents_list
*list
= NULL
, *all_list
= NULL
;
1600 bitmap visited
= NULL
;
1601 int item_count
= 0, const_count
= 0;
1602 int ipa_max_agg_items
= PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS
);
1603 HOST_WIDE_INT arg_offset
, arg_size
;
1605 bool check_ref
, by_ref
;
1608 if (ipa_max_agg_items
== 0)
1611 /* The function operates in three stages. First, we prepare check_ref, r,
1612 arg_base and arg_offset based on what is actually passed as an actual
1615 if (POINTER_TYPE_P (arg_type
))
1618 if (TREE_CODE (arg
) == SSA_NAME
)
1621 if (!tree_fits_uhwi_p (TYPE_SIZE (TREE_TYPE (arg_type
)))
1622 || !POINTER_TYPE_P (TREE_TYPE (arg
)))
1627 type_size
= TYPE_SIZE (TREE_TYPE (arg_type
));
1628 arg_size
= tree_to_uhwi (type_size
);
1629 ao_ref_init_from_ptr_and_size (&r
, arg_base
, NULL_TREE
);
1631 else if (TREE_CODE (arg
) == ADDR_EXPR
)
1635 arg
= TREE_OPERAND (arg
, 0);
1636 arg_base
= get_ref_base_and_extent_hwi (arg
, &arg_offset
,
1637 &arg_size
, &reverse
);
1640 if (DECL_P (arg_base
))
1643 ao_ref_init (&r
, arg_base
);
1655 gcc_checking_assert (AGGREGATE_TYPE_P (TREE_TYPE (arg
)));
1659 arg_base
= get_ref_base_and_extent_hwi (arg
, &arg_offset
,
1660 &arg_size
, &reverse
);
1664 ao_ref_init (&r
, arg
);
1667 /* Second stage traverses virtual SSA web backwards starting from the call
1668 statement, only looks at individual dominating virtual operand (its
1669 definition dominates the call), as long as it is confident that content
1670 of the aggregate is affected by definition of the virtual operand, it
1671 builds a sorted linked list of ipa_agg_jf_list describing that. */
1673 for (tree dom_vuse
= gimple_vuse (call
); dom_vuse
;)
1675 gimple
*stmt
= SSA_NAME_DEF_STMT (dom_vuse
);
1677 if (gimple_code (stmt
) == GIMPLE_PHI
)
1679 dom_vuse
= get_continuation_for_phi (stmt
, &r
, true,
1681 &visited
, false, NULL
, NULL
);
1685 if (stmt_may_clobber_ref_p_1 (stmt
, &r
))
1687 struct ipa_known_agg_contents_list
*content
1688 = XALLOCA (struct ipa_known_agg_contents_list
);
1690 if (!extract_mem_content (stmt
, arg_base
, check_ref
, content
))
1693 /* Now we get a dominating virtual operand, and need to check
1694 whether its value is clobbered any other dominating one. */
1695 if (content
->constant
1696 && !clobber_by_agg_contents_list_p (all_list
, content
))
1698 struct ipa_known_agg_contents_list
*copy
1699 = XALLOCA (struct ipa_known_agg_contents_list
);
1701 /* Add to the list consisting of only dominating virtual
1702 operands, whose definitions can finally reach the call. */
1703 add_to_agg_contents_list (&list
, (*copy
= *content
, copy
));
1705 if (++const_count
== ipa_max_agg_items
)
1709 /* Add to the list consisting of all dominating virtual operands. */
1710 add_to_agg_contents_list (&all_list
, content
);
1712 if (++item_count
== 2 * ipa_max_agg_items
)
1715 dom_vuse
= gimple_vuse (stmt
);
1719 BITMAP_FREE (visited
);
1721 /* Third stage just goes over the list and creates an appropriate vector of
1722 ipa_agg_jf_item structures out of it, of course only if there are
1723 any known constants to begin with. */
1727 jfunc
->agg
.by_ref
= by_ref
;
1728 build_agg_jump_func_from_list (list
, const_count
, arg_offset
, jfunc
);
1733 /* Return the Ith param type of callee associated with call graph
1737 ipa_get_callee_param_type (struct cgraph_edge
*e
, int i
)
1740 tree type
= (e
->callee
1741 ? TREE_TYPE (e
->callee
->decl
)
1742 : gimple_call_fntype (e
->call_stmt
));
1743 tree t
= TYPE_ARG_TYPES (type
);
1745 for (n
= 0; n
< i
; n
++)
1752 return TREE_VALUE (t
);
1755 t
= DECL_ARGUMENTS (e
->callee
->decl
);
1756 for (n
= 0; n
< i
; n
++)
1763 return TREE_TYPE (t
);
1767 /* Return ipa_bits with VALUE and MASK values, which can be either a newly
1768 allocated structure or a previously existing one shared with other jump
1769 functions and/or transformation summaries. */
1772 ipa_get_ipa_bits_for_value (const widest_int
&value
, const widest_int
&mask
)
1778 ipa_bits
**slot
= ipa_bits_hash_table
->find_slot (&tmp
, INSERT
);
1782 ipa_bits
*res
= ggc_alloc
<ipa_bits
> ();
1790 /* Assign to JF a pointer to ipa_bits structure with VALUE and MASK. Use hash
1791 table in order to avoid creating multiple same ipa_bits structures. */
1794 ipa_set_jfunc_bits (ipa_jump_func
*jf
, const widest_int
&value
,
1795 const widest_int
&mask
)
1797 jf
->bits
= ipa_get_ipa_bits_for_value (value
, mask
);
1800 /* Return a pointer to a value_range just like *TMP, but either find it in
1801 ipa_vr_hash_table or allocate it in GC memory. TMP->equiv must be NULL. */
1803 static value_range_base
*
1804 ipa_get_value_range (value_range_base
*tmp
)
1806 value_range_base
**slot
= ipa_vr_hash_table
->find_slot (tmp
, INSERT
);
1810 value_range_base
*vr
= ggc_alloc
<value_range_base
> ();
1817 /* Return a pointer to a value range consisting of TYPE, MIN, MAX and an empty
1818 equiv set. Use hash table in order to avoid creating multiple same copies of
1821 static value_range_base
*
1822 ipa_get_value_range (enum value_range_kind type
, tree min
, tree max
)
1824 value_range_base
tmp (type
, min
, max
);
1825 return ipa_get_value_range (&tmp
);
1828 /* Assign to JF a pointer to a value_range structure with TYPE, MIN and MAX and
1829 a NULL equiv bitmap. Use hash table in order to avoid creating multiple
1830 same value_range structures. */
1833 ipa_set_jfunc_vr (ipa_jump_func
*jf
, enum value_range_kind type
,
1836 jf
->m_vr
= ipa_get_value_range (type
, min
, max
);
1839 /* Assign to JF a pointer to a value_range just like TMP but either fetch a
1840 copy from ipa_vr_hash_table or allocate a new on in GC memory. */
1843 ipa_set_jfunc_vr (ipa_jump_func
*jf
, value_range_base
*tmp
)
1845 jf
->m_vr
= ipa_get_value_range (tmp
);
1848 /* Compute jump function for all arguments of callsite CS and insert the
1849 information in the jump_functions array in the ipa_edge_args corresponding
1850 to this callsite. */
1853 ipa_compute_jump_functions_for_edge (struct ipa_func_body_info
*fbi
,
1854 struct cgraph_edge
*cs
)
1856 class ipa_node_params
*info
= IPA_NODE_REF (cs
->caller
);
1857 class ipa_edge_args
*args
= IPA_EDGE_REF_GET_CREATE (cs
);
1858 gcall
*call
= cs
->call_stmt
;
1859 int n
, arg_num
= gimple_call_num_args (call
);
1860 bool useful_context
= false;
1862 if (arg_num
== 0 || args
->jump_functions
)
1864 vec_safe_grow_cleared (args
->jump_functions
, arg_num
);
1865 if (flag_devirtualize
)
1866 vec_safe_grow_cleared (args
->polymorphic_call_contexts
, arg_num
);
1868 if (gimple_call_internal_p (call
))
1870 if (ipa_func_spec_opts_forbid_analysis_p (cs
->caller
))
1873 for (n
= 0; n
< arg_num
; n
++)
1875 struct ipa_jump_func
*jfunc
= ipa_get_ith_jump_func (args
, n
);
1876 tree arg
= gimple_call_arg (call
, n
);
1877 tree param_type
= ipa_get_callee_param_type (cs
, n
);
1878 if (flag_devirtualize
&& POINTER_TYPE_P (TREE_TYPE (arg
)))
1881 class ipa_polymorphic_call_context
context (cs
->caller
->decl
,
1884 context
.get_dynamic_type (instance
, arg
, NULL
, cs
->call_stmt
,
1885 &fbi
->aa_walk_budget
);
1886 *ipa_get_ith_polymorhic_call_context (args
, n
) = context
;
1887 if (!context
.useless_p ())
1888 useful_context
= true;
1891 if (POINTER_TYPE_P (TREE_TYPE (arg
)))
1893 bool addr_nonzero
= false;
1894 bool strict_overflow
= false;
1896 if (TREE_CODE (arg
) == SSA_NAME
1898 && get_ptr_nonnull (arg
))
1899 addr_nonzero
= true;
1900 else if (tree_single_nonzero_warnv_p (arg
, &strict_overflow
))
1901 addr_nonzero
= true;
1905 tree z
= build_int_cst (TREE_TYPE (arg
), 0);
1906 ipa_set_jfunc_vr (jfunc
, VR_ANTI_RANGE
, z
, z
);
1909 gcc_assert (!jfunc
->m_vr
);
1914 value_range_kind type
;
1915 if (TREE_CODE (arg
) == SSA_NAME
1917 && (type
= get_range_info (arg
, &min
, &max
))
1918 && (type
== VR_RANGE
|| type
== VR_ANTI_RANGE
))
1920 value_range_base resvr
;
1921 value_range_base
tmpvr (type
,
1922 wide_int_to_tree (TREE_TYPE (arg
), min
),
1923 wide_int_to_tree (TREE_TYPE (arg
), max
));
1924 range_fold_unary_expr (&resvr
, NOP_EXPR
, param_type
,
1925 &tmpvr
, TREE_TYPE (arg
));
1926 if (!resvr
.undefined_p () && !resvr
.varying_p ())
1927 ipa_set_jfunc_vr (jfunc
, &resvr
);
1929 gcc_assert (!jfunc
->m_vr
);
1932 gcc_assert (!jfunc
->m_vr
);
1935 if (INTEGRAL_TYPE_P (TREE_TYPE (arg
))
1936 && (TREE_CODE (arg
) == SSA_NAME
|| TREE_CODE (arg
) == INTEGER_CST
))
1938 if (TREE_CODE (arg
) == SSA_NAME
)
1939 ipa_set_jfunc_bits (jfunc
, 0,
1940 widest_int::from (get_nonzero_bits (arg
),
1941 TYPE_SIGN (TREE_TYPE (arg
))));
1943 ipa_set_jfunc_bits (jfunc
, wi::to_widest (arg
), 0);
1945 else if (POINTER_TYPE_P (TREE_TYPE (arg
)))
1947 unsigned HOST_WIDE_INT bitpos
;
1950 get_pointer_alignment_1 (arg
, &align
, &bitpos
);
1951 widest_int mask
= wi::bit_and_not
1952 (wi::mask
<widest_int
> (TYPE_PRECISION (TREE_TYPE (arg
)), false),
1953 align
/ BITS_PER_UNIT
- 1);
1954 widest_int value
= bitpos
/ BITS_PER_UNIT
;
1955 ipa_set_jfunc_bits (jfunc
, value
, mask
);
1958 gcc_assert (!jfunc
->bits
);
1960 if (is_gimple_ip_invariant (arg
)
1962 && is_global_var (arg
)
1963 && TREE_READONLY (arg
)))
1964 ipa_set_jf_constant (jfunc
, arg
, cs
);
1965 else if (!is_gimple_reg_type (TREE_TYPE (arg
))
1966 && TREE_CODE (arg
) == PARM_DECL
)
1968 int index
= ipa_get_param_decl_index (info
, arg
);
1970 gcc_assert (index
>=0);
1971 /* Aggregate passed by value, check for pass-through, otherwise we
1972 will attempt to fill in aggregate contents later in this
1974 if (parm_preserved_before_stmt_p (fbi
, index
, call
, arg
))
1976 ipa_set_jf_simple_pass_through (jfunc
, index
, false);
1980 else if (TREE_CODE (arg
) == SSA_NAME
)
1982 if (SSA_NAME_IS_DEFAULT_DEF (arg
))
1984 int index
= ipa_get_param_decl_index (info
, SSA_NAME_VAR (arg
));
1988 agg_p
= parm_ref_data_pass_through_p (fbi
, index
, call
, arg
);
1989 ipa_set_jf_simple_pass_through (jfunc
, index
, agg_p
);
1994 gimple
*stmt
= SSA_NAME_DEF_STMT (arg
);
1995 if (is_gimple_assign (stmt
))
1996 compute_complex_assign_jump_func (fbi
, info
, jfunc
,
1997 call
, stmt
, arg
, param_type
);
1998 else if (gimple_code (stmt
) == GIMPLE_PHI
)
1999 compute_complex_ancestor_jump_func (fbi
, info
, jfunc
,
2001 as_a
<gphi
*> (stmt
));
2005 /* If ARG is pointer, we cannot use its type to determine the type of aggregate
2006 passed (because type conversions are ignored in gimple). Usually we can
2007 safely get type from function declaration, but in case of K&R prototypes or
2008 variadic functions we can try our luck with type of the pointer passed.
2009 TODO: Since we look for actual initialization of the memory object, we may better
2010 work out the type based on the memory stores we find. */
2012 param_type
= TREE_TYPE (arg
);
2014 if ((jfunc
->type
!= IPA_JF_PASS_THROUGH
2015 || !ipa_get_jf_pass_through_agg_preserved (jfunc
))
2016 && (jfunc
->type
!= IPA_JF_ANCESTOR
2017 || !ipa_get_jf_ancestor_agg_preserved (jfunc
))
2018 && (AGGREGATE_TYPE_P (TREE_TYPE (arg
))
2019 || POINTER_TYPE_P (param_type
)))
2020 determine_known_aggregate_parts (call
, arg
, param_type
, jfunc
,
2021 &fbi
->aa_walk_budget
);
2023 if (!useful_context
)
2024 vec_free (args
->polymorphic_call_contexts
);
2027 /* Compute jump functions for all edges - both direct and indirect - outgoing
2031 ipa_compute_jump_functions_for_bb (struct ipa_func_body_info
*fbi
, basic_block bb
)
2033 struct ipa_bb_info
*bi
= ipa_get_bb_info (fbi
, bb
);
2035 struct cgraph_edge
*cs
;
2037 FOR_EACH_VEC_ELT_REVERSE (bi
->cg_edges
, i
, cs
)
2039 struct cgraph_node
*callee
= cs
->callee
;
2043 callee
->ultimate_alias_target ();
2044 /* We do not need to bother analyzing calls to unknown functions
2045 unless they may become known during lto/whopr. */
2046 if (!callee
->definition
&& !flag_lto
)
2049 ipa_compute_jump_functions_for_edge (fbi
, cs
);
2053 /* If STMT looks like a statement loading a value from a member pointer formal
2054 parameter, return that parameter and store the offset of the field to
2055 *OFFSET_P, if it is non-NULL. Otherwise return NULL (but *OFFSET_P still
2056 might be clobbered). If USE_DELTA, then we look for a use of the delta
2057 field rather than the pfn. */
2060 ipa_get_stmt_member_ptr_load_param (gimple
*stmt
, bool use_delta
,
2061 HOST_WIDE_INT
*offset_p
)
2063 tree rhs
, rec
, ref_field
, ref_offset
, fld
, ptr_field
, delta_field
;
2065 if (!gimple_assign_single_p (stmt
))
2068 rhs
= gimple_assign_rhs1 (stmt
);
2069 if (TREE_CODE (rhs
) == COMPONENT_REF
)
2071 ref_field
= TREE_OPERAND (rhs
, 1);
2072 rhs
= TREE_OPERAND (rhs
, 0);
2075 ref_field
= NULL_TREE
;
2076 if (TREE_CODE (rhs
) != MEM_REF
)
2078 rec
= TREE_OPERAND (rhs
, 0);
2079 if (TREE_CODE (rec
) != ADDR_EXPR
)
2081 rec
= TREE_OPERAND (rec
, 0);
2082 if (TREE_CODE (rec
) != PARM_DECL
2083 || !type_like_member_ptr_p (TREE_TYPE (rec
), &ptr_field
, &delta_field
))
2085 ref_offset
= TREE_OPERAND (rhs
, 1);
2092 *offset_p
= int_bit_position (fld
);
2096 if (integer_nonzerop (ref_offset
))
2098 return ref_field
== fld
? rec
: NULL_TREE
;
2101 return tree_int_cst_equal (byte_position (fld
), ref_offset
) ? rec
2105 /* Returns true iff T is an SSA_NAME defined by a statement. */
2108 ipa_is_ssa_with_stmt_def (tree t
)
2110 if (TREE_CODE (t
) == SSA_NAME
2111 && !SSA_NAME_IS_DEFAULT_DEF (t
))
2117 /* Find the indirect call graph edge corresponding to STMT and mark it as a
2118 call to a parameter number PARAM_INDEX. NODE is the caller. Return the
2119 indirect call graph edge. */
2121 static struct cgraph_edge
*
2122 ipa_note_param_call (struct cgraph_node
*node
, int param_index
,
2125 struct cgraph_edge
*cs
;
2127 cs
= node
->get_edge (stmt
);
2128 cs
->indirect_info
->param_index
= param_index
;
2129 cs
->indirect_info
->agg_contents
= 0;
2130 cs
->indirect_info
->member_ptr
= 0;
2131 cs
->indirect_info
->guaranteed_unmodified
= 0;
2135 /* Analyze the CALL and examine uses of formal parameters of the caller NODE
2136 (described by INFO). PARMS_AINFO is a pointer to a vector containing
2137 intermediate information about each formal parameter. Currently it checks
2138 whether the call calls a pointer that is a formal parameter and if so, the
2139 parameter is marked with the called flag and an indirect call graph edge
2140 describing the call is created. This is very simple for ordinary pointers
2141 represented in SSA but not-so-nice when it comes to member pointers. The
2142 ugly part of this function does nothing more than trying to match the
2143 pattern of such a call. An example of such a pattern is the gimple dump
2144 below, the call is on the last line:
2147 f$__delta_5 = f.__delta;
2148 f$__pfn_24 = f.__pfn;
2152 f$__delta_5 = MEM[(struct *)&f];
2153 f$__pfn_24 = MEM[(struct *)&f + 4B];
2155 and a few lines below:
2158 D.2496_3 = (int) f$__pfn_24;
2159 D.2497_4 = D.2496_3 & 1;
2166 D.2500_7 = (unsigned int) f$__delta_5;
2167 D.2501_8 = &S + D.2500_7;
2168 D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
2169 D.2503_10 = *D.2502_9;
2170 D.2504_12 = f$__pfn_24 + -1;
2171 D.2505_13 = (unsigned int) D.2504_12;
2172 D.2506_14 = D.2503_10 + D.2505_13;
2173 D.2507_15 = *D.2506_14;
2174 iftmp.11_16 = (String:: *) D.2507_15;
2177 # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
2178 D.2500_19 = (unsigned int) f$__delta_5;
2179 D.2508_20 = &S + D.2500_19;
2180 D.2493_21 = iftmp.11_1 (D.2508_20, 4);
2182 Such patterns are results of simple calls to a member pointer:
2184 int doprinting (int (MyString::* f)(int) const)
2186 MyString S ("somestring");
2191 Moreover, the function also looks for called pointers loaded from aggregates
2192 passed by value or reference. */
2195 ipa_analyze_indirect_call_uses (struct ipa_func_body_info
*fbi
, gcall
*call
,
2198 class ipa_node_params
*info
= fbi
->info
;
2199 HOST_WIDE_INT offset
;
2202 if (SSA_NAME_IS_DEFAULT_DEF (target
))
2204 tree var
= SSA_NAME_VAR (target
);
2205 int index
= ipa_get_param_decl_index (info
, var
);
2207 ipa_note_param_call (fbi
->node
, index
, call
);
2212 gimple
*def
= SSA_NAME_DEF_STMT (target
);
2213 bool guaranteed_unmodified
;
2214 if (gimple_assign_single_p (def
)
2215 && ipa_load_from_parm_agg (fbi
, info
->descriptors
, def
,
2216 gimple_assign_rhs1 (def
), &index
, &offset
,
2217 NULL
, &by_ref
, &guaranteed_unmodified
))
2219 struct cgraph_edge
*cs
= ipa_note_param_call (fbi
->node
, index
, call
);
2220 cs
->indirect_info
->offset
= offset
;
2221 cs
->indirect_info
->agg_contents
= 1;
2222 cs
->indirect_info
->by_ref
= by_ref
;
2223 cs
->indirect_info
->guaranteed_unmodified
= guaranteed_unmodified
;
2227 /* Now we need to try to match the complex pattern of calling a member
2229 if (gimple_code (def
) != GIMPLE_PHI
2230 || gimple_phi_num_args (def
) != 2
2231 || !POINTER_TYPE_P (TREE_TYPE (target
))
2232 || TREE_CODE (TREE_TYPE (TREE_TYPE (target
))) != METHOD_TYPE
)
2235 /* First, we need to check whether one of these is a load from a member
2236 pointer that is a parameter to this function. */
2237 tree n1
= PHI_ARG_DEF (def
, 0);
2238 tree n2
= PHI_ARG_DEF (def
, 1);
2239 if (!ipa_is_ssa_with_stmt_def (n1
) || !ipa_is_ssa_with_stmt_def (n2
))
2241 gimple
*d1
= SSA_NAME_DEF_STMT (n1
);
2242 gimple
*d2
= SSA_NAME_DEF_STMT (n2
);
2245 basic_block bb
, virt_bb
;
2246 basic_block join
= gimple_bb (def
);
2247 if ((rec
= ipa_get_stmt_member_ptr_load_param (d1
, false, &offset
)))
2249 if (ipa_get_stmt_member_ptr_load_param (d2
, false, NULL
))
2252 bb
= EDGE_PRED (join
, 0)->src
;
2253 virt_bb
= gimple_bb (d2
);
2255 else if ((rec
= ipa_get_stmt_member_ptr_load_param (d2
, false, &offset
)))
2257 bb
= EDGE_PRED (join
, 1)->src
;
2258 virt_bb
= gimple_bb (d1
);
2263 /* Second, we need to check that the basic blocks are laid out in the way
2264 corresponding to the pattern. */
2266 if (!single_pred_p (virt_bb
) || !single_succ_p (virt_bb
)
2267 || single_pred (virt_bb
) != bb
2268 || single_succ (virt_bb
) != join
)
2271 /* Third, let's see that the branching is done depending on the least
2272 significant bit of the pfn. */
2274 gimple
*branch
= last_stmt (bb
);
2275 if (!branch
|| gimple_code (branch
) != GIMPLE_COND
)
2278 if ((gimple_cond_code (branch
) != NE_EXPR
2279 && gimple_cond_code (branch
) != EQ_EXPR
)
2280 || !integer_zerop (gimple_cond_rhs (branch
)))
2283 tree cond
= gimple_cond_lhs (branch
);
2284 if (!ipa_is_ssa_with_stmt_def (cond
))
2287 def
= SSA_NAME_DEF_STMT (cond
);
2288 if (!is_gimple_assign (def
)
2289 || gimple_assign_rhs_code (def
) != BIT_AND_EXPR
2290 || !integer_onep (gimple_assign_rhs2 (def
)))
2293 cond
= gimple_assign_rhs1 (def
);
2294 if (!ipa_is_ssa_with_stmt_def (cond
))
2297 def
= SSA_NAME_DEF_STMT (cond
);
2299 if (is_gimple_assign (def
)
2300 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def
)))
2302 cond
= gimple_assign_rhs1 (def
);
2303 if (!ipa_is_ssa_with_stmt_def (cond
))
2305 def
= SSA_NAME_DEF_STMT (cond
);
2309 rec2
= ipa_get_stmt_member_ptr_load_param (def
,
2310 (TARGET_PTRMEMFUNC_VBIT_LOCATION
2311 == ptrmemfunc_vbit_in_delta
),
2316 index
= ipa_get_param_decl_index (info
, rec
);
2318 && parm_preserved_before_stmt_p (fbi
, index
, call
, rec
))
2320 struct cgraph_edge
*cs
= ipa_note_param_call (fbi
->node
, index
, call
);
2321 cs
->indirect_info
->offset
= offset
;
2322 cs
->indirect_info
->agg_contents
= 1;
2323 cs
->indirect_info
->member_ptr
= 1;
2324 cs
->indirect_info
->guaranteed_unmodified
= 1;
2330 /* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
2331 object referenced in the expression is a formal parameter of the caller
2332 FBI->node (described by FBI->info), create a call note for the
2336 ipa_analyze_virtual_call_uses (struct ipa_func_body_info
*fbi
,
2337 gcall
*call
, tree target
)
2339 tree obj
= OBJ_TYPE_REF_OBJECT (target
);
2341 HOST_WIDE_INT anc_offset
;
2343 if (!flag_devirtualize
)
2346 if (TREE_CODE (obj
) != SSA_NAME
)
2349 class ipa_node_params
*info
= fbi
->info
;
2350 if (SSA_NAME_IS_DEFAULT_DEF (obj
))
2352 struct ipa_jump_func jfunc
;
2353 if (TREE_CODE (SSA_NAME_VAR (obj
)) != PARM_DECL
)
2357 index
= ipa_get_param_decl_index (info
, SSA_NAME_VAR (obj
));
2358 gcc_assert (index
>= 0);
2359 if (detect_type_change_ssa (fbi
, obj
, obj_type_ref_class (target
),
2365 struct ipa_jump_func jfunc
;
2366 gimple
*stmt
= SSA_NAME_DEF_STMT (obj
);
2369 expr
= get_ancestor_addr_info (stmt
, &obj
, &anc_offset
);
2372 index
= ipa_get_param_decl_index (info
,
2373 SSA_NAME_VAR (TREE_OPERAND (expr
, 0)));
2374 gcc_assert (index
>= 0);
2375 if (detect_type_change (fbi
, obj
, expr
, obj_type_ref_class (target
),
2376 call
, &jfunc
, anc_offset
))
2380 struct cgraph_edge
*cs
= ipa_note_param_call (fbi
->node
, index
, call
);
2381 class cgraph_indirect_call_info
*ii
= cs
->indirect_info
;
2382 ii
->offset
= anc_offset
;
2383 ii
->otr_token
= tree_to_uhwi (OBJ_TYPE_REF_TOKEN (target
));
2384 ii
->otr_type
= obj_type_ref_class (target
);
2385 ii
->polymorphic
= 1;
2388 /* Analyze a call statement CALL whether and how it utilizes formal parameters
2389 of the caller (described by INFO). PARMS_AINFO is a pointer to a vector
2390 containing intermediate information about each formal parameter. */
2393 ipa_analyze_call_uses (struct ipa_func_body_info
*fbi
, gcall
*call
)
2395 tree target
= gimple_call_fn (call
);
2398 || (TREE_CODE (target
) != SSA_NAME
2399 && !virtual_method_call_p (target
)))
2402 struct cgraph_edge
*cs
= fbi
->node
->get_edge (call
);
2403 /* If we previously turned the call into a direct call, there is
2404 no need to analyze. */
2405 if (cs
&& !cs
->indirect_unknown_callee
)
2408 if (cs
->indirect_info
->polymorphic
&& flag_devirtualize
)
2411 tree target
= gimple_call_fn (call
);
2412 ipa_polymorphic_call_context
context (current_function_decl
,
2413 target
, call
, &instance
);
2415 gcc_checking_assert (cs
->indirect_info
->otr_type
2416 == obj_type_ref_class (target
));
2417 gcc_checking_assert (cs
->indirect_info
->otr_token
2418 == tree_to_shwi (OBJ_TYPE_REF_TOKEN (target
)));
2420 cs
->indirect_info
->vptr_changed
2421 = !context
.get_dynamic_type (instance
,
2422 OBJ_TYPE_REF_OBJECT (target
),
2423 obj_type_ref_class (target
), call
,
2424 &fbi
->aa_walk_budget
);
2425 cs
->indirect_info
->context
= context
;
2428 if (TREE_CODE (target
) == SSA_NAME
)
2429 ipa_analyze_indirect_call_uses (fbi
, call
, target
);
2430 else if (virtual_method_call_p (target
))
2431 ipa_analyze_virtual_call_uses (fbi
, call
, target
);
2435 /* Analyze the call statement STMT with respect to formal parameters (described
2436 in INFO) of caller given by FBI->NODE. Currently it only checks whether
2437 formal parameters are called. */
2440 ipa_analyze_stmt_uses (struct ipa_func_body_info
*fbi
, gimple
*stmt
)
2442 if (is_gimple_call (stmt
))
2443 ipa_analyze_call_uses (fbi
, as_a
<gcall
*> (stmt
));
2446 /* Callback of walk_stmt_load_store_addr_ops for the visit_load.
2447 If OP is a parameter declaration, mark it as used in the info structure
2451 visit_ref_for_mod_analysis (gimple
*, tree op
, tree
, void *data
)
2453 class ipa_node_params
*info
= (class ipa_node_params
*) data
;
2455 op
= get_base_address (op
);
2457 && TREE_CODE (op
) == PARM_DECL
)
2459 int index
= ipa_get_param_decl_index (info
, op
);
2460 gcc_assert (index
>= 0);
2461 ipa_set_param_used (info
, index
, true);
2467 /* Scan the statements in BB and inspect the uses of formal parameters. Store
2468 the findings in various structures of the associated ipa_node_params
2469 structure, such as parameter flags, notes etc. FBI holds various data about
2470 the function being analyzed. */
2473 ipa_analyze_params_uses_in_bb (struct ipa_func_body_info
*fbi
, basic_block bb
)
2475 gimple_stmt_iterator gsi
;
2476 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
2478 gimple
*stmt
= gsi_stmt (gsi
);
2480 if (is_gimple_debug (stmt
))
2483 ipa_analyze_stmt_uses (fbi
, stmt
);
2484 walk_stmt_load_store_addr_ops (stmt
, fbi
->info
,
2485 visit_ref_for_mod_analysis
,
2486 visit_ref_for_mod_analysis
,
2487 visit_ref_for_mod_analysis
);
2489 for (gsi
= gsi_start_phis (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
2490 walk_stmt_load_store_addr_ops (gsi_stmt (gsi
), fbi
->info
,
2491 visit_ref_for_mod_analysis
,
2492 visit_ref_for_mod_analysis
,
2493 visit_ref_for_mod_analysis
);
2496 /* Calculate controlled uses of parameters of NODE. */
2499 ipa_analyze_controlled_uses (struct cgraph_node
*node
)
2501 class ipa_node_params
*info
= IPA_NODE_REF (node
);
2503 for (int i
= 0; i
< ipa_get_param_count (info
); i
++)
2505 tree parm
= ipa_get_param (info
, i
);
2506 int controlled_uses
= 0;
2508 /* For SSA regs see if parameter is used. For non-SSA we compute
2509 the flag during modification analysis. */
2510 if (is_gimple_reg (parm
))
2512 tree ddef
= ssa_default_def (DECL_STRUCT_FUNCTION (node
->decl
),
2514 if (ddef
&& !has_zero_uses (ddef
))
2516 imm_use_iterator imm_iter
;
2517 use_operand_p use_p
;
2519 ipa_set_param_used (info
, i
, true);
2520 FOR_EACH_IMM_USE_FAST (use_p
, imm_iter
, ddef
)
2521 if (!is_gimple_call (USE_STMT (use_p
)))
2523 if (!is_gimple_debug (USE_STMT (use_p
)))
2525 controlled_uses
= IPA_UNDESCRIBED_USE
;
2533 controlled_uses
= 0;
2536 controlled_uses
= IPA_UNDESCRIBED_USE
;
2537 ipa_set_controlled_uses (info
, i
, controlled_uses
);
2541 /* Free stuff in BI. */
2544 free_ipa_bb_info (struct ipa_bb_info
*bi
)
2546 bi
->cg_edges
.release ();
2547 bi
->param_aa_statuses
.release ();
2550 /* Dominator walker driving the analysis. */
2552 class analysis_dom_walker
: public dom_walker
2555 analysis_dom_walker (struct ipa_func_body_info
*fbi
)
2556 : dom_walker (CDI_DOMINATORS
), m_fbi (fbi
) {}
2558 virtual edge
before_dom_children (basic_block
);
2561 struct ipa_func_body_info
*m_fbi
;
2565 analysis_dom_walker::before_dom_children (basic_block bb
)
2567 ipa_analyze_params_uses_in_bb (m_fbi
, bb
);
2568 ipa_compute_jump_functions_for_bb (m_fbi
, bb
);
2572 /* Release body info FBI. */
2575 ipa_release_body_info (struct ipa_func_body_info
*fbi
)
2578 struct ipa_bb_info
*bi
;
2580 FOR_EACH_VEC_ELT (fbi
->bb_infos
, i
, bi
)
2581 free_ipa_bb_info (bi
);
2582 fbi
->bb_infos
.release ();
2585 /* Initialize the array describing properties of formal parameters
2586 of NODE, analyze their uses and compute jump functions associated
2587 with actual arguments of calls from within NODE. */
2590 ipa_analyze_node (struct cgraph_node
*node
)
2592 struct ipa_func_body_info fbi
;
2593 class ipa_node_params
*info
;
2595 ipa_check_create_node_params ();
2596 ipa_check_create_edge_args ();
2597 info
= IPA_NODE_REF (node
);
2599 if (info
->analysis_done
)
2601 info
->analysis_done
= 1;
2603 if (ipa_func_spec_opts_forbid_analysis_p (node
))
2605 for (int i
= 0; i
< ipa_get_param_count (info
); i
++)
2607 ipa_set_param_used (info
, i
, true);
2608 ipa_set_controlled_uses (info
, i
, IPA_UNDESCRIBED_USE
);
2613 struct function
*func
= DECL_STRUCT_FUNCTION (node
->decl
);
2615 calculate_dominance_info (CDI_DOMINATORS
);
2616 ipa_initialize_node_params (node
);
2617 ipa_analyze_controlled_uses (node
);
2620 fbi
.info
= IPA_NODE_REF (node
);
2621 fbi
.bb_infos
= vNULL
;
2622 fbi
.bb_infos
.safe_grow_cleared (last_basic_block_for_fn (cfun
));
2623 fbi
.param_count
= ipa_get_param_count (info
);
2624 fbi
.aa_walk_budget
= PARAM_VALUE (PARAM_IPA_MAX_AA_STEPS
);
2626 for (struct cgraph_edge
*cs
= node
->callees
; cs
; cs
= cs
->next_callee
)
2628 ipa_bb_info
*bi
= ipa_get_bb_info (&fbi
, gimple_bb (cs
->call_stmt
));
2629 bi
->cg_edges
.safe_push (cs
);
2632 for (struct cgraph_edge
*cs
= node
->indirect_calls
; cs
; cs
= cs
->next_callee
)
2634 ipa_bb_info
*bi
= ipa_get_bb_info (&fbi
, gimple_bb (cs
->call_stmt
));
2635 bi
->cg_edges
.safe_push (cs
);
2638 analysis_dom_walker (&fbi
).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun
));
2640 ipa_release_body_info (&fbi
);
2641 free_dominance_info (CDI_DOMINATORS
);
2645 /* Update the jump functions associated with call graph edge E when the call
2646 graph edge CS is being inlined, assuming that E->caller is already (possibly
2647 indirectly) inlined into CS->callee and that E has not been inlined. */
2650 update_jump_functions_after_inlining (struct cgraph_edge
*cs
,
2651 struct cgraph_edge
*e
)
2653 class ipa_edge_args
*top
= IPA_EDGE_REF (cs
);
2654 class ipa_edge_args
*args
= IPA_EDGE_REF (e
);
2657 int count
= ipa_get_cs_argument_count (args
);
2660 for (i
= 0; i
< count
; i
++)
2662 struct ipa_jump_func
*dst
= ipa_get_ith_jump_func (args
, i
);
2663 class ipa_polymorphic_call_context
*dst_ctx
2664 = ipa_get_ith_polymorhic_call_context (args
, i
);
2666 if (dst
->type
== IPA_JF_ANCESTOR
)
2668 struct ipa_jump_func
*src
;
2669 int dst_fid
= dst
->value
.ancestor
.formal_id
;
2670 class ipa_polymorphic_call_context
*src_ctx
2671 = ipa_get_ith_polymorhic_call_context (top
, dst_fid
);
2673 /* Variable number of arguments can cause havoc if we try to access
2674 one that does not exist in the inlined edge. So make sure we
2676 if (dst_fid
>= ipa_get_cs_argument_count (top
))
2678 ipa_set_jf_unknown (dst
);
2682 src
= ipa_get_ith_jump_func (top
, dst_fid
);
2684 if (src_ctx
&& !src_ctx
->useless_p ())
2686 class ipa_polymorphic_call_context ctx
= *src_ctx
;
2688 /* TODO: Make type preserved safe WRT contexts. */
2689 if (!ipa_get_jf_ancestor_type_preserved (dst
))
2690 ctx
.possible_dynamic_type_change (e
->in_polymorphic_cdtor
);
2691 ctx
.offset_by (dst
->value
.ancestor
.offset
);
2692 if (!ctx
.useless_p ())
2696 vec_safe_grow_cleared (args
->polymorphic_call_contexts
,
2698 dst_ctx
= ipa_get_ith_polymorhic_call_context (args
, i
);
2701 dst_ctx
->combine_with (ctx
);
2706 && (dst
->value
.ancestor
.agg_preserved
|| !src
->agg
.by_ref
))
2708 struct ipa_agg_jf_item
*item
;
2711 /* Currently we do not produce clobber aggregate jump functions,
2712 replace with merging when we do. */
2713 gcc_assert (!dst
->agg
.items
);
2715 dst
->agg
.items
= vec_safe_copy (src
->agg
.items
);
2716 dst
->agg
.by_ref
= src
->agg
.by_ref
;
2717 FOR_EACH_VEC_SAFE_ELT (dst
->agg
.items
, j
, item
)
2718 item
->offset
-= dst
->value
.ancestor
.offset
;
2721 if (src
->type
== IPA_JF_PASS_THROUGH
2722 && src
->value
.pass_through
.operation
== NOP_EXPR
)
2724 dst
->value
.ancestor
.formal_id
= src
->value
.pass_through
.formal_id
;
2725 dst
->value
.ancestor
.agg_preserved
&=
2726 src
->value
.pass_through
.agg_preserved
;
2728 else if (src
->type
== IPA_JF_ANCESTOR
)
2730 dst
->value
.ancestor
.formal_id
= src
->value
.ancestor
.formal_id
;
2731 dst
->value
.ancestor
.offset
+= src
->value
.ancestor
.offset
;
2732 dst
->value
.ancestor
.agg_preserved
&=
2733 src
->value
.ancestor
.agg_preserved
;
2736 ipa_set_jf_unknown (dst
);
2738 else if (dst
->type
== IPA_JF_PASS_THROUGH
)
2740 struct ipa_jump_func
*src
;
2741 /* We must check range due to calls with variable number of arguments
2742 and we cannot combine jump functions with operations. */
2743 if (dst
->value
.pass_through
.operation
== NOP_EXPR
2744 && (top
&& dst
->value
.pass_through
.formal_id
2745 < ipa_get_cs_argument_count (top
)))
2747 int dst_fid
= dst
->value
.pass_through
.formal_id
;
2748 src
= ipa_get_ith_jump_func (top
, dst_fid
);
2749 bool dst_agg_p
= ipa_get_jf_pass_through_agg_preserved (dst
);
2750 class ipa_polymorphic_call_context
*src_ctx
2751 = ipa_get_ith_polymorhic_call_context (top
, dst_fid
);
2753 if (src_ctx
&& !src_ctx
->useless_p ())
2755 class ipa_polymorphic_call_context ctx
= *src_ctx
;
2757 /* TODO: Make type preserved safe WRT contexts. */
2758 if (!ipa_get_jf_pass_through_type_preserved (dst
))
2759 ctx
.possible_dynamic_type_change (e
->in_polymorphic_cdtor
);
2760 if (!ctx
.useless_p ())
2764 vec_safe_grow_cleared (args
->polymorphic_call_contexts
,
2766 dst_ctx
= ipa_get_ith_polymorhic_call_context (args
, i
);
2768 dst_ctx
->combine_with (ctx
);
2773 case IPA_JF_UNKNOWN
:
2774 ipa_set_jf_unknown (dst
);
2777 ipa_set_jf_cst_copy (dst
, src
);
2780 case IPA_JF_PASS_THROUGH
:
2782 int formal_id
= ipa_get_jf_pass_through_formal_id (src
);
2783 enum tree_code operation
;
2784 operation
= ipa_get_jf_pass_through_operation (src
);
2786 if (operation
== NOP_EXPR
)
2790 && ipa_get_jf_pass_through_agg_preserved (src
);
2791 ipa_set_jf_simple_pass_through (dst
, formal_id
, agg_p
);
2793 else if (TREE_CODE_CLASS (operation
) == tcc_unary
)
2794 ipa_set_jf_unary_pass_through (dst
, formal_id
, operation
);
2797 tree operand
= ipa_get_jf_pass_through_operand (src
);
2798 ipa_set_jf_arith_pass_through (dst
, formal_id
, operand
,
2803 case IPA_JF_ANCESTOR
:
2807 && ipa_get_jf_ancestor_agg_preserved (src
);
2808 ipa_set_ancestor_jf (dst
,
2809 ipa_get_jf_ancestor_offset (src
),
2810 ipa_get_jf_ancestor_formal_id (src
),
2819 && (dst_agg_p
|| !src
->agg
.by_ref
))
2821 /* Currently we do not produce clobber aggregate jump
2822 functions, replace with merging when we do. */
2823 gcc_assert (!dst
->agg
.items
);
2825 dst
->agg
.by_ref
= src
->agg
.by_ref
;
2826 dst
->agg
.items
= vec_safe_copy (src
->agg
.items
);
2830 ipa_set_jf_unknown (dst
);
2835 /* If TARGET is an addr_expr of a function declaration, make it the
2836 (SPECULATIVE)destination of an indirect edge IE and return the edge.
2837 Otherwise, return NULL. */
2839 struct cgraph_edge
*
2840 ipa_make_edge_direct_to_target (struct cgraph_edge
*ie
, tree target
,
2843 struct cgraph_node
*callee
;
2844 bool unreachable
= false;
2846 if (TREE_CODE (target
) == ADDR_EXPR
)
2847 target
= TREE_OPERAND (target
, 0);
2848 if (TREE_CODE (target
) != FUNCTION_DECL
)
2850 target
= canonicalize_constructor_val (target
, NULL
);
2851 if (!target
|| TREE_CODE (target
) != FUNCTION_DECL
)
2853 /* Member pointer call that goes through a VMT lookup. */
2854 if (ie
->indirect_info
->member_ptr
2855 /* Or if target is not an invariant expression and we do not
2856 know if it will evaulate to function at runtime.
2857 This can happen when folding through &VAR, where &VAR
2858 is IP invariant, but VAR itself is not.
2860 TODO: Revisit this when GCC 5 is branched. It seems that
2861 member_ptr check is not needed and that we may try to fold
2862 the expression and see if VAR is readonly. */
2863 || !is_gimple_ip_invariant (target
))
2865 if (dump_enabled_p ())
2867 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS
, ie
->call_stmt
,
2868 "discovered direct call non-invariant %s\n",
2869 ie
->caller
->dump_name ());
2875 if (dump_enabled_p ())
2877 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS
, ie
->call_stmt
,
2878 "discovered direct call to non-function in %s, "
2879 "making it __builtin_unreachable\n",
2880 ie
->caller
->dump_name ());
2883 target
= builtin_decl_implicit (BUILT_IN_UNREACHABLE
);
2884 callee
= cgraph_node::get_create (target
);
2888 callee
= cgraph_node::get (target
);
2891 callee
= cgraph_node::get (target
);
2893 /* Because may-edges are not explicitely represented and vtable may be external,
2894 we may create the first reference to the object in the unit. */
2895 if (!callee
|| callee
->global
.inlined_to
)
2898 /* We are better to ensure we can refer to it.
2899 In the case of static functions we are out of luck, since we already
2900 removed its body. In the case of public functions we may or may
2901 not introduce the reference. */
2902 if (!canonicalize_constructor_val (target
, NULL
)
2903 || !TREE_PUBLIC (target
))
2906 fprintf (dump_file
, "ipa-prop: Discovered call to a known target "
2907 "(%s -> %s) but cannot refer to it. Giving up.\n",
2908 ie
->caller
->dump_name (),
2909 ie
->callee
->dump_name ());
2912 callee
= cgraph_node::get_create (target
);
2915 /* If the edge is already speculated. */
2916 if (speculative
&& ie
->speculative
)
2918 struct cgraph_edge
*e2
;
2919 struct ipa_ref
*ref
;
2920 ie
->speculative_call_info (e2
, ie
, ref
);
2921 if (e2
->callee
->ultimate_alias_target ()
2922 != callee
->ultimate_alias_target ())
2925 fprintf (dump_file
, "ipa-prop: Discovered call to a speculative "
2926 "target (%s -> %s) but the call is already "
2927 "speculated to %s. Giving up.\n",
2928 ie
->caller
->dump_name (), callee
->dump_name (),
2929 e2
->callee
->dump_name ());
2934 fprintf (dump_file
, "ipa-prop: Discovered call to a speculative target "
2935 "(%s -> %s) this agree with previous speculation.\n",
2936 ie
->caller
->dump_name (), callee
->dump_name ());
2941 if (!dbg_cnt (devirt
))
2944 ipa_check_create_node_params ();
2946 /* We cannot make edges to inline clones. It is bug that someone removed
2947 the cgraph node too early. */
2948 gcc_assert (!callee
->global
.inlined_to
);
2950 if (dump_file
&& !unreachable
)
2952 fprintf (dump_file
, "ipa-prop: Discovered %s call to a %s target "
2953 "(%s -> %s), for stmt ",
2954 ie
->indirect_info
->polymorphic
? "a virtual" : "an indirect",
2955 speculative
? "speculative" : "known",
2956 ie
->caller
->dump_name (),
2957 callee
->dump_name ());
2959 print_gimple_stmt (dump_file
, ie
->call_stmt
, 2, TDF_SLIM
);
2961 fprintf (dump_file
, "with uid %i\n", ie
->lto_stmt_uid
);
2963 if (dump_enabled_p ())
2965 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS
, ie
->call_stmt
,
2966 "converting indirect call in %s to direct call to %s\n",
2967 ie
->caller
->name (), callee
->name ());
2971 struct cgraph_edge
*orig
= ie
;
2972 ie
= ie
->make_direct (callee
);
2973 /* If we resolved speculative edge the cost is already up to date
2974 for direct call (adjusted by inline_edge_duplication_hook). */
2977 ipa_call_summary
*es
= ipa_call_summaries
->get (ie
);
2978 es
->call_stmt_size
-= (eni_size_weights
.indirect_call_cost
2979 - eni_size_weights
.call_cost
);
2980 es
->call_stmt_time
-= (eni_time_weights
.indirect_call_cost
2981 - eni_time_weights
.call_cost
);
2986 if (!callee
->can_be_discarded_p ())
2989 alias
= dyn_cast
<cgraph_node
*> (callee
->noninterposable_alias ());
2993 /* make_speculative will update ie's cost to direct call cost. */
2994 ie
= ie
->make_speculative
2995 (callee
, ie
->count
.apply_scale (8, 10));
3001 /* Attempt to locate an interprocedural constant at a given REQ_OFFSET in
3002 CONSTRUCTOR and return it. Return NULL if the search fails for some
3006 find_constructor_constant_at_offset (tree constructor
, HOST_WIDE_INT req_offset
)
3008 tree type
= TREE_TYPE (constructor
);
3009 if (TREE_CODE (type
) != ARRAY_TYPE
3010 && TREE_CODE (type
) != RECORD_TYPE
)
3015 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (constructor
), ix
, index
, val
)
3017 HOST_WIDE_INT elt_offset
;
3018 if (TREE_CODE (type
) == ARRAY_TYPE
)
3021 tree unit_size
= TYPE_SIZE_UNIT (TREE_TYPE (type
));
3022 gcc_assert (TREE_CODE (unit_size
) == INTEGER_CST
);
3026 if (TREE_CODE (index
) == RANGE_EXPR
)
3027 off
= wi::to_offset (TREE_OPERAND (index
, 0));
3029 off
= wi::to_offset (index
);
3030 if (TYPE_DOMAIN (type
) && TYPE_MIN_VALUE (TYPE_DOMAIN (type
)))
3032 tree low_bound
= TYPE_MIN_VALUE (TYPE_DOMAIN (type
));
3033 gcc_assert (TREE_CODE (unit_size
) == INTEGER_CST
);
3034 off
= wi::sext (off
- wi::to_offset (low_bound
),
3035 TYPE_PRECISION (TREE_TYPE (index
)));
3037 off
*= wi::to_offset (unit_size
);
3038 /* ??? Handle more than just the first index of a
3042 off
= wi::to_offset (unit_size
) * ix
;
3044 off
= wi::lshift (off
, LOG2_BITS_PER_UNIT
);
3045 if (!wi::fits_shwi_p (off
) || wi::neg_p (off
))
3047 elt_offset
= off
.to_shwi ();
3049 else if (TREE_CODE (type
) == RECORD_TYPE
)
3051 gcc_checking_assert (index
&& TREE_CODE (index
) == FIELD_DECL
);
3052 if (DECL_BIT_FIELD (index
))
3054 elt_offset
= int_bit_position (index
);
3059 if (elt_offset
> req_offset
)
3062 if (TREE_CODE (val
) == CONSTRUCTOR
)
3063 return find_constructor_constant_at_offset (val
,
3064 req_offset
- elt_offset
);
3066 if (elt_offset
== req_offset
3067 && is_gimple_reg_type (TREE_TYPE (val
))
3068 && is_gimple_ip_invariant (val
))
3074 /* Check whether SCALAR could be used to look up an aggregate interprocedural
3075 invariant from a static constructor and if so, return it. Otherwise return
3079 ipa_find_agg_cst_from_init (tree scalar
, HOST_WIDE_INT offset
, bool by_ref
)
3083 if (TREE_CODE (scalar
) != ADDR_EXPR
)
3085 scalar
= TREE_OPERAND (scalar
, 0);
3089 || !is_global_var (scalar
)
3090 || !TREE_READONLY (scalar
)
3091 || !DECL_INITIAL (scalar
)
3092 || TREE_CODE (DECL_INITIAL (scalar
)) != CONSTRUCTOR
)
3095 return find_constructor_constant_at_offset (DECL_INITIAL (scalar
), offset
);
3098 /* Retrieve value from aggregate jump function AGG or static initializer of
3099 SCALAR (which can be NULL) for the given OFFSET or return NULL if there is
3100 none. BY_REF specifies whether the value has to be passed by reference or
3101 by value. If FROM_GLOBAL_CONSTANT is non-NULL, then the boolean it points
3102 to is set to true if the value comes from an initializer of a constant. */
3105 ipa_find_agg_cst_for_param (struct ipa_agg_jump_function
*agg
, tree scalar
,
3106 HOST_WIDE_INT offset
, bool by_ref
,
3107 bool *from_global_constant
)
3109 struct ipa_agg_jf_item
*item
;
3114 tree res
= ipa_find_agg_cst_from_init (scalar
, offset
, by_ref
);
3117 if (from_global_constant
)
3118 *from_global_constant
= true;
3124 || by_ref
!= agg
->by_ref
)
3127 FOR_EACH_VEC_SAFE_ELT (agg
->items
, i
, item
)
3128 if (item
->offset
== offset
)
3130 /* Currently we do not have clobber values, return NULL for them once
3132 gcc_checking_assert (is_gimple_ip_invariant (item
->value
));
3133 if (from_global_constant
)
3134 *from_global_constant
= false;
3140 /* Remove a reference to SYMBOL from the list of references of a node given by
3141 reference description RDESC. Return true if the reference has been
3142 successfully found and removed. */
3145 remove_described_reference (symtab_node
*symbol
, struct ipa_cst_ref_desc
*rdesc
)
3147 struct ipa_ref
*to_del
;
3148 struct cgraph_edge
*origin
;
3153 to_del
= origin
->caller
->find_reference (symbol
, origin
->call_stmt
,
3154 origin
->lto_stmt_uid
);
3158 to_del
->remove_reference ();
3160 fprintf (dump_file
, "ipa-prop: Removed a reference from %s to %s.\n",
3161 origin
->caller
->dump_name (), xstrdup_for_dump (symbol
->name ()));
3165 /* If JFUNC has a reference description with refcount different from
3166 IPA_UNDESCRIBED_USE, return the reference description, otherwise return
3167 NULL. JFUNC must be a constant jump function. */
3169 static struct ipa_cst_ref_desc
*
3170 jfunc_rdesc_usable (struct ipa_jump_func
*jfunc
)
3172 struct ipa_cst_ref_desc
*rdesc
= ipa_get_jf_constant_rdesc (jfunc
);
3173 if (rdesc
&& rdesc
->refcount
!= IPA_UNDESCRIBED_USE
)
3179 /* If the value of constant jump function JFUNC is an address of a function
3180 declaration, return the associated call graph node. Otherwise return
3183 static cgraph_node
*
3184 cgraph_node_for_jfunc (struct ipa_jump_func
*jfunc
)
3186 gcc_checking_assert (jfunc
->type
== IPA_JF_CONST
);
3187 tree cst
= ipa_get_jf_constant (jfunc
);
3188 if (TREE_CODE (cst
) != ADDR_EXPR
3189 || TREE_CODE (TREE_OPERAND (cst
, 0)) != FUNCTION_DECL
)
3192 return cgraph_node::get (TREE_OPERAND (cst
, 0));
3196 /* If JFUNC is a constant jump function with a usable rdesc, decrement its
3197 refcount and if it hits zero, remove reference to SYMBOL from the caller of
3198 the edge specified in the rdesc. Return false if either the symbol or the
3199 reference could not be found, otherwise return true. */
3202 try_decrement_rdesc_refcount (struct ipa_jump_func
*jfunc
)
3204 struct ipa_cst_ref_desc
*rdesc
;
3205 if (jfunc
->type
== IPA_JF_CONST
3206 && (rdesc
= jfunc_rdesc_usable (jfunc
))
3207 && --rdesc
->refcount
== 0)
3209 symtab_node
*symbol
= cgraph_node_for_jfunc (jfunc
);
3213 return remove_described_reference (symbol
, rdesc
);
3218 /* Try to find a destination for indirect edge IE that corresponds to a simple
3219 call or a call of a member function pointer and where the destination is a
3220 pointer formal parameter described by jump function JFUNC. TARGET_TYPE is
3221 the type of the parameter to which the result of JFUNC is passed. If it can
3222 be determined, return the newly direct edge, otherwise return NULL.
3223 NEW_ROOT_INFO is the node info that JFUNC lattices are relative to. */
3225 static struct cgraph_edge
*
3226 try_make_edge_direct_simple_call (struct cgraph_edge
*ie
,
3227 struct ipa_jump_func
*jfunc
, tree target_type
,
3228 class ipa_node_params
*new_root_info
)
3230 struct cgraph_edge
*cs
;
3232 bool agg_contents
= ie
->indirect_info
->agg_contents
;
3233 tree scalar
= ipa_value_from_jfunc (new_root_info
, jfunc
, target_type
);
3236 bool from_global_constant
;
3237 target
= ipa_find_agg_cst_for_param (&jfunc
->agg
, scalar
,
3238 ie
->indirect_info
->offset
,
3239 ie
->indirect_info
->by_ref
,
3240 &from_global_constant
);
3242 && !from_global_constant
3243 && !ie
->indirect_info
->guaranteed_unmodified
)
3250 cs
= ipa_make_edge_direct_to_target (ie
, target
);
3252 if (cs
&& !agg_contents
)
3255 gcc_checking_assert (cs
->callee
3257 || jfunc
->type
!= IPA_JF_CONST
3258 || !cgraph_node_for_jfunc (jfunc
)
3259 || cs
->callee
== cgraph_node_for_jfunc (jfunc
)));
3260 ok
= try_decrement_rdesc_refcount (jfunc
);
3261 gcc_checking_assert (ok
);
3267 /* Return the target to be used in cases of impossible devirtualization. IE
3268 and target (the latter can be NULL) are dumped when dumping is enabled. */
3271 ipa_impossible_devirt_target (struct cgraph_edge
*ie
, tree target
)
3277 "Type inconsistent devirtualization: %s->%s\n",
3278 ie
->caller
->dump_name (),
3279 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (target
)));
3282 "No devirtualization target in %s\n",
3283 ie
->caller
->dump_name ());
3285 tree new_target
= builtin_decl_implicit (BUILT_IN_UNREACHABLE
);
3286 cgraph_node::get_create (new_target
);
3290 /* Try to find a destination for indirect edge IE that corresponds to a virtual
3291 call based on a formal parameter which is described by jump function JFUNC
3292 and if it can be determined, make it direct and return the direct edge.
3293 Otherwise, return NULL. CTX describes the polymorphic context that the
3294 parameter the call is based on brings along with it. */
3296 static struct cgraph_edge
*
3297 try_make_edge_direct_virtual_call (struct cgraph_edge
*ie
,
3298 struct ipa_jump_func
*jfunc
,
3299 class ipa_polymorphic_call_context ctx
)
3302 bool speculative
= false;
3304 if (!opt_for_fn (ie
->caller
->decl
, flag_devirtualize
))
3307 gcc_assert (!ie
->indirect_info
->by_ref
);
3309 /* Try to do lookup via known virtual table pointer value. */
3310 if (!ie
->indirect_info
->vptr_changed
3311 || opt_for_fn (ie
->caller
->decl
, flag_devirtualize_speculatively
))
3314 unsigned HOST_WIDE_INT offset
;
3315 tree scalar
= (jfunc
->type
== IPA_JF_CONST
) ? ipa_get_jf_constant (jfunc
)
3317 tree t
= ipa_find_agg_cst_for_param (&jfunc
->agg
, scalar
,
3318 ie
->indirect_info
->offset
,
3320 if (t
&& vtable_pointer_value_to_vtable (t
, &vtable
, &offset
))
3323 t
= gimple_get_virt_method_for_vtable (ie
->indirect_info
->otr_token
,
3324 vtable
, offset
, &can_refer
);
3328 || fndecl_built_in_p (t
, BUILT_IN_UNREACHABLE
)
3329 || !possible_polymorphic_call_target_p
3330 (ie
, cgraph_node::get (t
)))
3332 /* Do not speculate builtin_unreachable, it is stupid! */
3333 if (!ie
->indirect_info
->vptr_changed
)
3334 target
= ipa_impossible_devirt_target (ie
, target
);
3341 speculative
= ie
->indirect_info
->vptr_changed
;
3347 ipa_polymorphic_call_context
ie_context (ie
);
3348 vec
<cgraph_node
*>targets
;
3351 ctx
.offset_by (ie
->indirect_info
->offset
);
3352 if (ie
->indirect_info
->vptr_changed
)
3353 ctx
.possible_dynamic_type_change (ie
->in_polymorphic_cdtor
,
3354 ie
->indirect_info
->otr_type
);
3355 ctx
.combine_with (ie_context
, ie
->indirect_info
->otr_type
);
3356 targets
= possible_polymorphic_call_targets
3357 (ie
->indirect_info
->otr_type
,
3358 ie
->indirect_info
->otr_token
,
3360 if (final
&& targets
.length () <= 1)
3362 speculative
= false;
3363 if (targets
.length () == 1)
3364 target
= targets
[0]->decl
;
3366 target
= ipa_impossible_devirt_target (ie
, NULL_TREE
);
3368 else if (!target
&& opt_for_fn (ie
->caller
->decl
, flag_devirtualize_speculatively
)
3369 && !ie
->speculative
&& ie
->maybe_hot_p ())
3372 n
= try_speculative_devirtualization (ie
->indirect_info
->otr_type
,
3373 ie
->indirect_info
->otr_token
,
3374 ie
->indirect_info
->context
);
3384 if (!possible_polymorphic_call_target_p
3385 (ie
, cgraph_node::get_create (target
)))
3389 target
= ipa_impossible_devirt_target (ie
, target
);
3391 return ipa_make_edge_direct_to_target (ie
, target
, speculative
);
3397 /* Update the param called notes associated with NODE when CS is being inlined,
3398 assuming NODE is (potentially indirectly) inlined into CS->callee.
3399 Moreover, if the callee is discovered to be constant, create a new cgraph
3400 edge for it. Newly discovered indirect edges will be added to *NEW_EDGES,
3401 unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */
3404 update_indirect_edges_after_inlining (struct cgraph_edge
*cs
,
3405 struct cgraph_node
*node
,
3406 vec
<cgraph_edge
*> *new_edges
)
3408 class ipa_edge_args
*top
;
3409 struct cgraph_edge
*ie
, *next_ie
, *new_direct_edge
;
3410 class ipa_node_params
*new_root_info
, *inlined_node_info
;
3413 ipa_check_create_edge_args ();
3414 top
= IPA_EDGE_REF (cs
);
3415 new_root_info
= IPA_NODE_REF (cs
->caller
->global
.inlined_to
3416 ? cs
->caller
->global
.inlined_to
3418 inlined_node_info
= IPA_NODE_REF (cs
->callee
->function_symbol ());
3420 for (ie
= node
->indirect_calls
; ie
; ie
= next_ie
)
3422 class cgraph_indirect_call_info
*ici
= ie
->indirect_info
;
3423 struct ipa_jump_func
*jfunc
;
3425 cgraph_node
*spec_target
= NULL
;
3427 next_ie
= ie
->next_callee
;
3429 if (ici
->param_index
== -1)
3432 /* We must check range due to calls with variable number of arguments: */
3433 if (!top
|| ici
->param_index
>= ipa_get_cs_argument_count (top
))
3435 ici
->param_index
= -1;
3439 param_index
= ici
->param_index
;
3440 jfunc
= ipa_get_ith_jump_func (top
, param_index
);
3442 if (ie
->speculative
)
3444 struct cgraph_edge
*de
;
3445 struct ipa_ref
*ref
;
3446 ie
->speculative_call_info (de
, ie
, ref
);
3447 spec_target
= de
->callee
;
3450 if (!opt_for_fn (node
->decl
, flag_indirect_inlining
))
3451 new_direct_edge
= NULL
;
3452 else if (ici
->polymorphic
)
3454 ipa_polymorphic_call_context ctx
;
3455 ctx
= ipa_context_from_jfunc (new_root_info
, cs
, param_index
, jfunc
);
3456 new_direct_edge
= try_make_edge_direct_virtual_call (ie
, jfunc
, ctx
);
3460 tree target_type
= ipa_get_type (inlined_node_info
, param_index
);
3461 new_direct_edge
= try_make_edge_direct_simple_call (ie
, jfunc
,
3466 /* If speculation was removed, then we need to do nothing. */
3467 if (new_direct_edge
&& new_direct_edge
!= ie
3468 && new_direct_edge
->callee
== spec_target
)
3470 new_direct_edge
->indirect_inlining_edge
= 1;
3471 top
= IPA_EDGE_REF (cs
);
3473 if (!new_direct_edge
->speculative
)
3476 else if (new_direct_edge
)
3478 new_direct_edge
->indirect_inlining_edge
= 1;
3479 if (new_direct_edge
->call_stmt
)
3480 new_direct_edge
->call_stmt_cannot_inline_p
3481 = !gimple_check_call_matching_types (
3482 new_direct_edge
->call_stmt
,
3483 new_direct_edge
->callee
->decl
, false);
3486 new_edges
->safe_push (new_direct_edge
);
3489 top
= IPA_EDGE_REF (cs
);
3490 /* If speculative edge was introduced we still need to update
3491 call info of the indirect edge. */
3492 if (!new_direct_edge
->speculative
)
3495 if (jfunc
->type
== IPA_JF_PASS_THROUGH
3496 && ipa_get_jf_pass_through_operation (jfunc
) == NOP_EXPR
)
3498 if (ici
->agg_contents
3499 && !ipa_get_jf_pass_through_agg_preserved (jfunc
)
3500 && !ici
->polymorphic
)
3501 ici
->param_index
= -1;
3504 ici
->param_index
= ipa_get_jf_pass_through_formal_id (jfunc
);
3505 if (ici
->polymorphic
3506 && !ipa_get_jf_pass_through_type_preserved (jfunc
))
3507 ici
->vptr_changed
= true;
3510 else if (jfunc
->type
== IPA_JF_ANCESTOR
)
3512 if (ici
->agg_contents
3513 && !ipa_get_jf_ancestor_agg_preserved (jfunc
)
3514 && !ici
->polymorphic
)
3515 ici
->param_index
= -1;
3518 ici
->param_index
= ipa_get_jf_ancestor_formal_id (jfunc
);
3519 ici
->offset
+= ipa_get_jf_ancestor_offset (jfunc
);
3520 if (ici
->polymorphic
3521 && !ipa_get_jf_ancestor_type_preserved (jfunc
))
3522 ici
->vptr_changed
= true;
3526 /* Either we can find a destination for this edge now or never. */
3527 ici
->param_index
= -1;
3533 /* Recursively traverse subtree of NODE (including node) made of inlined
3534 cgraph_edges when CS has been inlined and invoke
3535 update_indirect_edges_after_inlining on all nodes and
3536 update_jump_functions_after_inlining on all non-inlined edges that lead out
3537 of this subtree. Newly discovered indirect edges will be added to
3538 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were
3542 propagate_info_to_inlined_callees (struct cgraph_edge
*cs
,
3543 struct cgraph_node
*node
,
3544 vec
<cgraph_edge
*> *new_edges
)
3546 struct cgraph_edge
*e
;
3549 res
= update_indirect_edges_after_inlining (cs
, node
, new_edges
);
3551 for (e
= node
->callees
; e
; e
= e
->next_callee
)
3552 if (!e
->inline_failed
)
3553 res
|= propagate_info_to_inlined_callees (cs
, e
->callee
, new_edges
);
3555 update_jump_functions_after_inlining (cs
, e
);
3556 for (e
= node
->indirect_calls
; e
; e
= e
->next_callee
)
3557 update_jump_functions_after_inlining (cs
, e
);
3562 /* Combine two controlled uses counts as done during inlining. */
3565 combine_controlled_uses_counters (int c
, int d
)
3567 if (c
== IPA_UNDESCRIBED_USE
|| d
== IPA_UNDESCRIBED_USE
)
3568 return IPA_UNDESCRIBED_USE
;
3573 /* Propagate number of controlled users from CS->caleee to the new root of the
3574 tree of inlined nodes. */
3577 propagate_controlled_uses (struct cgraph_edge
*cs
)
3579 class ipa_edge_args
*args
= IPA_EDGE_REF (cs
);
3582 struct cgraph_node
*new_root
= cs
->caller
->global
.inlined_to
3583 ? cs
->caller
->global
.inlined_to
: cs
->caller
;
3584 class ipa_node_params
*new_root_info
= IPA_NODE_REF (new_root
);
3585 class ipa_node_params
*old_root_info
= IPA_NODE_REF (cs
->callee
);
3588 count
= MIN (ipa_get_cs_argument_count (args
),
3589 ipa_get_param_count (old_root_info
));
3590 for (i
= 0; i
< count
; i
++)
3592 struct ipa_jump_func
*jf
= ipa_get_ith_jump_func (args
, i
);
3593 struct ipa_cst_ref_desc
*rdesc
;
3595 if (jf
->type
== IPA_JF_PASS_THROUGH
)
3598 src_idx
= ipa_get_jf_pass_through_formal_id (jf
);
3599 c
= ipa_get_controlled_uses (new_root_info
, src_idx
);
3600 d
= ipa_get_controlled_uses (old_root_info
, i
);
3602 gcc_checking_assert (ipa_get_jf_pass_through_operation (jf
)
3603 == NOP_EXPR
|| c
== IPA_UNDESCRIBED_USE
);
3604 c
= combine_controlled_uses_counters (c
, d
);
3605 ipa_set_controlled_uses (new_root_info
, src_idx
, c
);
3606 if (c
== 0 && new_root_info
->ipcp_orig_node
)
3608 struct cgraph_node
*n
;
3609 struct ipa_ref
*ref
;
3610 tree t
= new_root_info
->known_csts
[src_idx
];
3612 if (t
&& TREE_CODE (t
) == ADDR_EXPR
3613 && TREE_CODE (TREE_OPERAND (t
, 0)) == FUNCTION_DECL
3614 && (n
= cgraph_node::get (TREE_OPERAND (t
, 0)))
3615 && (ref
= new_root
->find_reference (n
, NULL
, 0)))
3618 fprintf (dump_file
, "ipa-prop: Removing cloning-created "
3619 "reference from %s to %s.\n",
3620 new_root
->dump_name (),
3622 ref
->remove_reference ();
3626 else if (jf
->type
== IPA_JF_CONST
3627 && (rdesc
= jfunc_rdesc_usable (jf
)))
3629 int d
= ipa_get_controlled_uses (old_root_info
, i
);
3630 int c
= rdesc
->refcount
;
3631 rdesc
->refcount
= combine_controlled_uses_counters (c
, d
);
3632 if (rdesc
->refcount
== 0)
3634 tree cst
= ipa_get_jf_constant (jf
);
3635 struct cgraph_node
*n
;
3636 gcc_checking_assert (TREE_CODE (cst
) == ADDR_EXPR
3637 && TREE_CODE (TREE_OPERAND (cst
, 0))
3639 n
= cgraph_node::get (TREE_OPERAND (cst
, 0));
3642 struct cgraph_node
*clone
;
3644 ok
= remove_described_reference (n
, rdesc
);
3645 gcc_checking_assert (ok
);
3648 while (clone
->global
.inlined_to
3649 && clone
!= rdesc
->cs
->caller
3650 && IPA_NODE_REF (clone
)->ipcp_orig_node
)
3652 struct ipa_ref
*ref
;
3653 ref
= clone
->find_reference (n
, NULL
, 0);
3657 fprintf (dump_file
, "ipa-prop: Removing "
3658 "cloning-created reference "
3660 clone
->dump_name (),
3662 ref
->remove_reference ();
3664 clone
= clone
->callers
->caller
;
3671 for (i
= ipa_get_param_count (old_root_info
);
3672 i
< ipa_get_cs_argument_count (args
);
3675 struct ipa_jump_func
*jf
= ipa_get_ith_jump_func (args
, i
);
3677 if (jf
->type
== IPA_JF_CONST
)
3679 struct ipa_cst_ref_desc
*rdesc
= jfunc_rdesc_usable (jf
);
3681 rdesc
->refcount
= IPA_UNDESCRIBED_USE
;
3683 else if (jf
->type
== IPA_JF_PASS_THROUGH
)
3684 ipa_set_controlled_uses (new_root_info
,
3685 jf
->value
.pass_through
.formal_id
,
3686 IPA_UNDESCRIBED_USE
);
3690 /* Update jump functions and call note functions on inlining the call site CS.
3691 CS is expected to lead to a node already cloned by
3692 cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
3693 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were +
3697 ipa_propagate_indirect_call_infos (struct cgraph_edge
*cs
,
3698 vec
<cgraph_edge
*> *new_edges
)
3701 /* Do nothing if the preparation phase has not been carried out yet
3702 (i.e. during early inlining). */
3703 if (!ipa_node_params_sum
)
3705 gcc_assert (ipa_edge_args_sum
);
3707 propagate_controlled_uses (cs
);
3708 changed
= propagate_info_to_inlined_callees (cs
, cs
->callee
, new_edges
);
3713 /* Ensure that array of edge arguments infos is big enough to accommodate a
3714 structure for all edges and reallocates it if not. Also, allocate
3715 associated hash tables is they do not already exist. */
3718 ipa_check_create_edge_args (void)
3720 if (!ipa_edge_args_sum
)
3722 = (new (ggc_cleared_alloc
<ipa_edge_args_sum_t
> ())
3723 ipa_edge_args_sum_t (symtab
, true));
3724 if (!ipa_bits_hash_table
)
3725 ipa_bits_hash_table
= hash_table
<ipa_bit_ggc_hash_traits
>::create_ggc (37);
3726 if (!ipa_vr_hash_table
)
3727 ipa_vr_hash_table
= hash_table
<ipa_vr_ggc_hash_traits
>::create_ggc (37);
3730 /* Free all ipa_edge structures. */
3733 ipa_free_all_edge_args (void)
3735 if (!ipa_edge_args_sum
)
3738 ipa_edge_args_sum
->release ();
3739 ipa_edge_args_sum
= NULL
;
3742 /* Free all ipa_node_params structures. */
3745 ipa_free_all_node_params (void)
3747 ipa_node_params_sum
->release ();
3748 ipa_node_params_sum
= NULL
;
3751 /* Initialize IPA CP transformation summary and also allocate any necessary hash
3752 tables if they do not already exist. */
3755 ipcp_transformation_initialize (void)
3757 if (!ipa_bits_hash_table
)
3758 ipa_bits_hash_table
= hash_table
<ipa_bit_ggc_hash_traits
>::create_ggc (37);
3759 if (!ipa_vr_hash_table
)
3760 ipa_vr_hash_table
= hash_table
<ipa_vr_ggc_hash_traits
>::create_ggc (37);
3761 if (ipcp_transformation_sum
== NULL
)
3762 ipcp_transformation_sum
= ipcp_transformation_t::create_ggc (symtab
);
3765 /* Release the IPA CP transformation summary. */
3768 ipcp_free_transformation_sum (void)
3770 if (!ipcp_transformation_sum
)
3773 ipcp_transformation_sum
->release ();
3774 ipcp_transformation_sum
= NULL
;
3777 /* Set the aggregate replacements of NODE to be AGGVALS. */
3780 ipa_set_node_agg_value_chain (struct cgraph_node
*node
,
3781 struct ipa_agg_replacement_value
*aggvals
)
3783 ipcp_transformation_initialize ();
3784 ipcp_transformation
*s
= ipcp_transformation_sum
->get_create (node
);
3785 s
->agg_values
= aggvals
;
3788 /* Hook that is called by cgraph.c when an edge is removed. Adjust reference
3789 count data structures accordingly. */
3792 ipa_edge_args_sum_t::remove (cgraph_edge
*cs
, ipa_edge_args
*args
)
3794 if (args
->jump_functions
)
3796 struct ipa_jump_func
*jf
;
3798 FOR_EACH_VEC_ELT (*args
->jump_functions
, i
, jf
)
3800 struct ipa_cst_ref_desc
*rdesc
;
3801 try_decrement_rdesc_refcount (jf
);
3802 if (jf
->type
== IPA_JF_CONST
3803 && (rdesc
= ipa_get_jf_constant_rdesc (jf
))
3810 /* Method invoked when an edge is duplicated. Copy ipa_edge_args and adjust
3811 reference count data strucutres accordingly. */
3814 ipa_edge_args_sum_t::duplicate (cgraph_edge
*src
, cgraph_edge
*dst
,
3815 ipa_edge_args
*old_args
, ipa_edge_args
*new_args
)
3819 new_args
->jump_functions
= vec_safe_copy (old_args
->jump_functions
);
3820 if (old_args
->polymorphic_call_contexts
)
3821 new_args
->polymorphic_call_contexts
3822 = vec_safe_copy (old_args
->polymorphic_call_contexts
);
3824 for (i
= 0; i
< vec_safe_length (old_args
->jump_functions
); i
++)
3826 struct ipa_jump_func
*src_jf
= ipa_get_ith_jump_func (old_args
, i
);
3827 struct ipa_jump_func
*dst_jf
= ipa_get_ith_jump_func (new_args
, i
);
3829 dst_jf
->agg
.items
= vec_safe_copy (dst_jf
->agg
.items
);
3831 if (src_jf
->type
== IPA_JF_CONST
)
3833 struct ipa_cst_ref_desc
*src_rdesc
= jfunc_rdesc_usable (src_jf
);
3836 dst_jf
->value
.constant
.rdesc
= NULL
;
3837 else if (src
->caller
== dst
->caller
)
3839 struct ipa_ref
*ref
;
3840 symtab_node
*n
= cgraph_node_for_jfunc (src_jf
);
3841 gcc_checking_assert (n
);
3842 ref
= src
->caller
->find_reference (n
, src
->call_stmt
,
3844 gcc_checking_assert (ref
);
3845 dst
->caller
->clone_reference (ref
, ref
->stmt
);
3847 struct ipa_cst_ref_desc
*dst_rdesc
= ipa_refdesc_pool
.allocate ();
3848 dst_rdesc
->cs
= dst
;
3849 dst_rdesc
->refcount
= src_rdesc
->refcount
;
3850 dst_rdesc
->next_duplicate
= NULL
;
3851 dst_jf
->value
.constant
.rdesc
= dst_rdesc
;
3853 else if (src_rdesc
->cs
== src
)
3855 struct ipa_cst_ref_desc
*dst_rdesc
= ipa_refdesc_pool
.allocate ();
3856 dst_rdesc
->cs
= dst
;
3857 dst_rdesc
->refcount
= src_rdesc
->refcount
;
3858 dst_rdesc
->next_duplicate
= src_rdesc
->next_duplicate
;
3859 src_rdesc
->next_duplicate
= dst_rdesc
;
3860 dst_jf
->value
.constant
.rdesc
= dst_rdesc
;
3864 struct ipa_cst_ref_desc
*dst_rdesc
;
3865 /* This can happen during inlining, when a JFUNC can refer to a
3866 reference taken in a function up in the tree of inline clones.
3867 We need to find the duplicate that refers to our tree of
3870 gcc_assert (dst
->caller
->global
.inlined_to
);
3871 for (dst_rdesc
= src_rdesc
->next_duplicate
;
3873 dst_rdesc
= dst_rdesc
->next_duplicate
)
3875 struct cgraph_node
*top
;
3876 top
= dst_rdesc
->cs
->caller
->global
.inlined_to
3877 ? dst_rdesc
->cs
->caller
->global
.inlined_to
3878 : dst_rdesc
->cs
->caller
;
3879 if (dst
->caller
->global
.inlined_to
== top
)
3882 gcc_assert (dst_rdesc
);
3883 dst_jf
->value
.constant
.rdesc
= dst_rdesc
;
3886 else if (dst_jf
->type
== IPA_JF_PASS_THROUGH
3887 && src
->caller
== dst
->caller
)
3889 struct cgraph_node
*inline_root
= dst
->caller
->global
.inlined_to
3890 ? dst
->caller
->global
.inlined_to
: dst
->caller
;
3891 class ipa_node_params
*root_info
= IPA_NODE_REF (inline_root
);
3892 int idx
= ipa_get_jf_pass_through_formal_id (dst_jf
);
3894 int c
= ipa_get_controlled_uses (root_info
, idx
);
3895 if (c
!= IPA_UNDESCRIBED_USE
)
3898 ipa_set_controlled_uses (root_info
, idx
, c
);
3904 /* Analyze newly added function into callgraph. */
3907 ipa_add_new_function (cgraph_node
*node
, void *data ATTRIBUTE_UNUSED
)
3909 if (node
->has_gimple_body_p ())
3910 ipa_analyze_node (node
);
3913 /* Hook that is called by summary when a node is duplicated. */
3916 ipa_node_params_t::duplicate(cgraph_node
*src
, cgraph_node
*dst
,
3917 ipa_node_params
*old_info
,
3918 ipa_node_params
*new_info
)
3920 ipa_agg_replacement_value
*old_av
, *new_av
;
3922 new_info
->descriptors
= vec_safe_copy (old_info
->descriptors
);
3923 new_info
->lattices
= NULL
;
3924 new_info
->ipcp_orig_node
= old_info
->ipcp_orig_node
;
3925 new_info
->known_csts
= old_info
->known_csts
.copy ();
3926 new_info
->known_contexts
= old_info
->known_contexts
.copy ();
3928 new_info
->analysis_done
= old_info
->analysis_done
;
3929 new_info
->node_enqueued
= old_info
->node_enqueued
;
3930 new_info
->versionable
= old_info
->versionable
;
3932 old_av
= ipa_get_agg_replacements_for_node (src
);
3938 struct ipa_agg_replacement_value
*v
;
3940 v
= ggc_alloc
<ipa_agg_replacement_value
> ();
3941 memcpy (v
, old_av
, sizeof (*v
));
3944 old_av
= old_av
->next
;
3946 ipa_set_node_agg_value_chain (dst
, new_av
);
3949 ipcp_transformation
*src_trans
= ipcp_get_transformation_summary (src
);
3953 ipcp_transformation_initialize ();
3954 src_trans
= ipcp_transformation_sum
->get_create (src
);
3955 ipcp_transformation
*dst_trans
3956 = ipcp_transformation_sum
->get_create (dst
);
3958 dst_trans
->bits
= vec_safe_copy (src_trans
->bits
);
3960 const vec
<ipa_vr
, va_gc
> *src_vr
= src_trans
->m_vr
;
3961 vec
<ipa_vr
, va_gc
> *&dst_vr
3962 = ipcp_get_transformation_summary (dst
)->m_vr
;
3963 if (vec_safe_length (src_trans
->m_vr
) > 0)
3965 vec_safe_reserve_exact (dst_vr
, src_vr
->length ());
3966 for (unsigned i
= 0; i
< src_vr
->length (); ++i
)
3967 dst_vr
->quick_push ((*src_vr
)[i
]);
3972 /* Register our cgraph hooks if they are not already there. */
3975 ipa_register_cgraph_hooks (void)
3977 ipa_check_create_node_params ();
3978 ipa_check_create_edge_args ();
3980 function_insertion_hook_holder
=
3981 symtab
->add_cgraph_insertion_hook (&ipa_add_new_function
, NULL
);
3984 /* Unregister our cgraph hooks if they are not already there. */
3987 ipa_unregister_cgraph_hooks (void)
3989 symtab
->remove_cgraph_insertion_hook (function_insertion_hook_holder
);
3990 function_insertion_hook_holder
= NULL
;
3993 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3994 longer needed after ipa-cp. */
3997 ipa_free_all_structures_after_ipa_cp (void)
3999 if (!optimize
&& !in_lto_p
)
4001 ipa_free_all_edge_args ();
4002 ipa_free_all_node_params ();
4003 ipcp_sources_pool
.release ();
4004 ipcp_cst_values_pool
.release ();
4005 ipcp_poly_ctx_values_pool
.release ();
4006 ipcp_agg_lattice_pool
.release ();
4007 ipa_unregister_cgraph_hooks ();
4008 ipa_refdesc_pool
.release ();
4012 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
4013 longer needed after indirect inlining. */
4016 ipa_free_all_structures_after_iinln (void)
4018 ipa_free_all_edge_args ();
4019 ipa_free_all_node_params ();
4020 ipa_unregister_cgraph_hooks ();
4021 ipcp_sources_pool
.release ();
4022 ipcp_cst_values_pool
.release ();
4023 ipcp_poly_ctx_values_pool
.release ();
4024 ipcp_agg_lattice_pool
.release ();
4025 ipa_refdesc_pool
.release ();
4028 /* Print ipa_tree_map data structures of all functions in the
4032 ipa_print_node_params (FILE *f
, struct cgraph_node
*node
)
4035 class ipa_node_params
*info
;
4037 if (!node
->definition
)
4039 info
= IPA_NODE_REF (node
);
4040 fprintf (f
, " function %s parameter descriptors:\n", node
->dump_name ());
4041 count
= ipa_get_param_count (info
);
4042 for (i
= 0; i
< count
; i
++)
4047 ipa_dump_param (f
, info
, i
);
4048 if (ipa_is_param_used (info
, i
))
4049 fprintf (f
, " used");
4050 c
= ipa_get_controlled_uses (info
, i
);
4051 if (c
== IPA_UNDESCRIBED_USE
)
4052 fprintf (f
, " undescribed_use");
4054 fprintf (f
, " controlled_uses=%i", c
);
4059 /* Print ipa_tree_map data structures of all functions in the
4063 ipa_print_all_params (FILE * f
)
4065 struct cgraph_node
*node
;
4067 fprintf (f
, "\nFunction parameters:\n");
4068 FOR_EACH_FUNCTION (node
)
4069 ipa_print_node_params (f
, node
);
4072 /* Dump the AV linked list. */
4075 ipa_dump_agg_replacement_values (FILE *f
, struct ipa_agg_replacement_value
*av
)
4078 fprintf (f
, " Aggregate replacements:");
4079 for (; av
; av
= av
->next
)
4081 fprintf (f
, "%s %i[" HOST_WIDE_INT_PRINT_DEC
"]=", comma
? "," : "",
4082 av
->index
, av
->offset
);
4083 print_generic_expr (f
, av
->value
);
4089 /* Stream out jump function JUMP_FUNC to OB. */
4092 ipa_write_jump_function (struct output_block
*ob
,
4093 struct ipa_jump_func
*jump_func
)
4095 struct ipa_agg_jf_item
*item
;
4096 struct bitpack_d bp
;
4100 /* ADDR_EXPRs are very comon IP invariants; save some streamer data
4101 as well as WPA memory by handling them specially. */
4102 if (jump_func
->type
== IPA_JF_CONST
4103 && TREE_CODE (jump_func
->value
.constant
.value
) == ADDR_EXPR
)
4106 streamer_write_uhwi (ob
, jump_func
->type
* 2 + flag
);
4107 switch (jump_func
->type
)
4109 case IPA_JF_UNKNOWN
:
4113 EXPR_LOCATION (jump_func
->value
.constant
.value
) == UNKNOWN_LOCATION
);
4114 stream_write_tree (ob
,
4116 ? TREE_OPERAND (jump_func
->value
.constant
.value
, 0)
4117 : jump_func
->value
.constant
.value
, true);
4119 case IPA_JF_PASS_THROUGH
:
4120 streamer_write_uhwi (ob
, jump_func
->value
.pass_through
.operation
);
4121 if (jump_func
->value
.pass_through
.operation
== NOP_EXPR
)
4123 streamer_write_uhwi (ob
, jump_func
->value
.pass_through
.formal_id
);
4124 bp
= bitpack_create (ob
->main_stream
);
4125 bp_pack_value (&bp
, jump_func
->value
.pass_through
.agg_preserved
, 1);
4126 streamer_write_bitpack (&bp
);
4128 else if (TREE_CODE_CLASS (jump_func
->value
.pass_through
.operation
)
4130 streamer_write_uhwi (ob
, jump_func
->value
.pass_through
.formal_id
);
4133 stream_write_tree (ob
, jump_func
->value
.pass_through
.operand
, true);
4134 streamer_write_uhwi (ob
, jump_func
->value
.pass_through
.formal_id
);
4137 case IPA_JF_ANCESTOR
:
4138 streamer_write_uhwi (ob
, jump_func
->value
.ancestor
.offset
);
4139 streamer_write_uhwi (ob
, jump_func
->value
.ancestor
.formal_id
);
4140 bp
= bitpack_create (ob
->main_stream
);
4141 bp_pack_value (&bp
, jump_func
->value
.ancestor
.agg_preserved
, 1);
4142 streamer_write_bitpack (&bp
);
4146 count
= vec_safe_length (jump_func
->agg
.items
);
4147 streamer_write_uhwi (ob
, count
);
4150 bp
= bitpack_create (ob
->main_stream
);
4151 bp_pack_value (&bp
, jump_func
->agg
.by_ref
, 1);
4152 streamer_write_bitpack (&bp
);
4155 FOR_EACH_VEC_SAFE_ELT (jump_func
->agg
.items
, i
, item
)
4157 streamer_write_uhwi (ob
, item
->offset
);
4158 stream_write_tree (ob
, item
->value
, true);
4161 bp
= bitpack_create (ob
->main_stream
);
4162 bp_pack_value (&bp
, !!jump_func
->bits
, 1);
4163 streamer_write_bitpack (&bp
);
4164 if (jump_func
->bits
)
4166 streamer_write_widest_int (ob
, jump_func
->bits
->value
);
4167 streamer_write_widest_int (ob
, jump_func
->bits
->mask
);
4169 bp_pack_value (&bp
, !!jump_func
->m_vr
, 1);
4170 streamer_write_bitpack (&bp
);
4171 if (jump_func
->m_vr
)
4173 streamer_write_enum (ob
->main_stream
, value_rang_type
,
4174 VR_LAST
, jump_func
->m_vr
->kind ());
4175 stream_write_tree (ob
, jump_func
->m_vr
->min (), true);
4176 stream_write_tree (ob
, jump_func
->m_vr
->max (), true);
4180 /* Read in jump function JUMP_FUNC from IB. */
4183 ipa_read_jump_function (class lto_input_block
*ib
,
4184 struct ipa_jump_func
*jump_func
,
4185 struct cgraph_edge
*cs
,
4186 class data_in
*data_in
,
4189 enum jump_func_type jftype
;
4190 enum tree_code operation
;
4192 int val
= streamer_read_uhwi (ib
);
4193 bool flag
= val
& 1;
4195 jftype
= (enum jump_func_type
) (val
/ 2);
4198 case IPA_JF_UNKNOWN
:
4199 ipa_set_jf_unknown (jump_func
);
4203 tree t
= stream_read_tree (ib
, data_in
);
4204 if (flag
&& prevails
)
4205 t
= build_fold_addr_expr (t
);
4206 ipa_set_jf_constant (jump_func
, t
, cs
);
4209 case IPA_JF_PASS_THROUGH
:
4210 operation
= (enum tree_code
) streamer_read_uhwi (ib
);
4211 if (operation
== NOP_EXPR
)
4213 int formal_id
= streamer_read_uhwi (ib
);
4214 struct bitpack_d bp
= streamer_read_bitpack (ib
);
4215 bool agg_preserved
= bp_unpack_value (&bp
, 1);
4216 ipa_set_jf_simple_pass_through (jump_func
, formal_id
, agg_preserved
);
4218 else if (TREE_CODE_CLASS (operation
) == tcc_unary
)
4220 int formal_id
= streamer_read_uhwi (ib
);
4221 ipa_set_jf_unary_pass_through (jump_func
, formal_id
, operation
);
4225 tree operand
= stream_read_tree (ib
, data_in
);
4226 int formal_id
= streamer_read_uhwi (ib
);
4227 ipa_set_jf_arith_pass_through (jump_func
, formal_id
, operand
,
4231 case IPA_JF_ANCESTOR
:
4233 HOST_WIDE_INT offset
= streamer_read_uhwi (ib
);
4234 int formal_id
= streamer_read_uhwi (ib
);
4235 struct bitpack_d bp
= streamer_read_bitpack (ib
);
4236 bool agg_preserved
= bp_unpack_value (&bp
, 1);
4237 ipa_set_ancestor_jf (jump_func
, offset
, formal_id
, agg_preserved
);
4241 fatal_error (UNKNOWN_LOCATION
, "invalid jump function in LTO stream");
4244 count
= streamer_read_uhwi (ib
);
4246 vec_alloc (jump_func
->agg
.items
, count
);
4249 struct bitpack_d bp
= streamer_read_bitpack (ib
);
4250 jump_func
->agg
.by_ref
= bp_unpack_value (&bp
, 1);
4252 for (i
= 0; i
< count
; i
++)
4254 struct ipa_agg_jf_item item
;
4255 item
.offset
= streamer_read_uhwi (ib
);
4256 item
.value
= stream_read_tree (ib
, data_in
);
4258 jump_func
->agg
.items
->quick_push (item
);
4261 struct bitpack_d bp
= streamer_read_bitpack (ib
);
4262 bool bits_known
= bp_unpack_value (&bp
, 1);
4265 widest_int value
= streamer_read_widest_int (ib
);
4266 widest_int mask
= streamer_read_widest_int (ib
);
4268 ipa_set_jfunc_bits (jump_func
, value
, mask
);
4271 jump_func
->bits
= NULL
;
4273 struct bitpack_d vr_bp
= streamer_read_bitpack (ib
);
4274 bool vr_known
= bp_unpack_value (&vr_bp
, 1);
4277 enum value_range_kind type
= streamer_read_enum (ib
, value_range_kind
,
4279 tree min
= stream_read_tree (ib
, data_in
);
4280 tree max
= stream_read_tree (ib
, data_in
);
4282 ipa_set_jfunc_vr (jump_func
, type
, min
, max
);
4285 jump_func
->m_vr
= NULL
;
4288 /* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
4289 relevant to indirect inlining to OB. */
4292 ipa_write_indirect_edge_info (struct output_block
*ob
,
4293 struct cgraph_edge
*cs
)
4295 class cgraph_indirect_call_info
*ii
= cs
->indirect_info
;
4296 struct bitpack_d bp
;
4298 streamer_write_hwi (ob
, ii
->param_index
);
4299 bp
= bitpack_create (ob
->main_stream
);
4300 bp_pack_value (&bp
, ii
->polymorphic
, 1);
4301 bp_pack_value (&bp
, ii
->agg_contents
, 1);
4302 bp_pack_value (&bp
, ii
->member_ptr
, 1);
4303 bp_pack_value (&bp
, ii
->by_ref
, 1);
4304 bp_pack_value (&bp
, ii
->guaranteed_unmodified
, 1);
4305 bp_pack_value (&bp
, ii
->vptr_changed
, 1);
4306 streamer_write_bitpack (&bp
);
4307 if (ii
->agg_contents
|| ii
->polymorphic
)
4308 streamer_write_hwi (ob
, ii
->offset
);
4310 gcc_assert (ii
->offset
== 0);
4312 if (ii
->polymorphic
)
4314 streamer_write_hwi (ob
, ii
->otr_token
);
4315 stream_write_tree (ob
, ii
->otr_type
, true);
4316 ii
->context
.stream_out (ob
);
4320 /* Read in parts of cgraph_indirect_call_info corresponding to CS that are
4321 relevant to indirect inlining from IB. */
4324 ipa_read_indirect_edge_info (class lto_input_block
*ib
,
4325 class data_in
*data_in
,
4326 struct cgraph_edge
*cs
)
4328 class cgraph_indirect_call_info
*ii
= cs
->indirect_info
;
4329 struct bitpack_d bp
;
4331 ii
->param_index
= (int) streamer_read_hwi (ib
);
4332 bp
= streamer_read_bitpack (ib
);
4333 ii
->polymorphic
= bp_unpack_value (&bp
, 1);
4334 ii
->agg_contents
= bp_unpack_value (&bp
, 1);
4335 ii
->member_ptr
= bp_unpack_value (&bp
, 1);
4336 ii
->by_ref
= bp_unpack_value (&bp
, 1);
4337 ii
->guaranteed_unmodified
= bp_unpack_value (&bp
, 1);
4338 ii
->vptr_changed
= bp_unpack_value (&bp
, 1);
4339 if (ii
->agg_contents
|| ii
->polymorphic
)
4340 ii
->offset
= (HOST_WIDE_INT
) streamer_read_hwi (ib
);
4343 if (ii
->polymorphic
)
4345 ii
->otr_token
= (HOST_WIDE_INT
) streamer_read_hwi (ib
);
4346 ii
->otr_type
= stream_read_tree (ib
, data_in
);
4347 ii
->context
.stream_in (ib
, data_in
);
4351 /* Stream out NODE info to OB. */
4354 ipa_write_node_info (struct output_block
*ob
, struct cgraph_node
*node
)
4357 lto_symtab_encoder_t encoder
;
4358 class ipa_node_params
*info
= IPA_NODE_REF (node
);
4360 struct cgraph_edge
*e
;
4361 struct bitpack_d bp
;
4363 encoder
= ob
->decl_state
->symtab_node_encoder
;
4364 node_ref
= lto_symtab_encoder_encode (encoder
, node
);
4365 streamer_write_uhwi (ob
, node_ref
);
4367 streamer_write_uhwi (ob
, ipa_get_param_count (info
));
4368 for (j
= 0; j
< ipa_get_param_count (info
); j
++)
4369 streamer_write_uhwi (ob
, ipa_get_param_move_cost (info
, j
));
4370 bp
= bitpack_create (ob
->main_stream
);
4371 gcc_assert (info
->analysis_done
4372 || ipa_get_param_count (info
) == 0);
4373 gcc_assert (!info
->node_enqueued
);
4374 gcc_assert (!info
->ipcp_orig_node
);
4375 for (j
= 0; j
< ipa_get_param_count (info
); j
++)
4376 bp_pack_value (&bp
, ipa_is_param_used (info
, j
), 1);
4377 streamer_write_bitpack (&bp
);
4378 for (j
= 0; j
< ipa_get_param_count (info
); j
++)
4380 streamer_write_hwi (ob
, ipa_get_controlled_uses (info
, j
));
4381 stream_write_tree (ob
, ipa_get_type (info
, j
), true);
4383 for (e
= node
->callees
; e
; e
= e
->next_callee
)
4385 class ipa_edge_args
*args
= IPA_EDGE_REF (e
);
4389 streamer_write_uhwi (ob
, 0);
4393 streamer_write_uhwi (ob
,
4394 ipa_get_cs_argument_count (args
) * 2
4395 + (args
->polymorphic_call_contexts
!= NULL
));
4396 for (j
= 0; j
< ipa_get_cs_argument_count (args
); j
++)
4398 ipa_write_jump_function (ob
, ipa_get_ith_jump_func (args
, j
));
4399 if (args
->polymorphic_call_contexts
!= NULL
)
4400 ipa_get_ith_polymorhic_call_context (args
, j
)->stream_out (ob
);
4403 for (e
= node
->indirect_calls
; e
; e
= e
->next_callee
)
4405 class ipa_edge_args
*args
= IPA_EDGE_REF (e
);
4407 streamer_write_uhwi (ob
, 0);
4410 streamer_write_uhwi (ob
,
4411 ipa_get_cs_argument_count (args
) * 2
4412 + (args
->polymorphic_call_contexts
!= NULL
));
4413 for (j
= 0; j
< ipa_get_cs_argument_count (args
); j
++)
4415 ipa_write_jump_function (ob
, ipa_get_ith_jump_func (args
, j
));
4416 if (args
->polymorphic_call_contexts
!= NULL
)
4417 ipa_get_ith_polymorhic_call_context (args
, j
)->stream_out (ob
);
4420 ipa_write_indirect_edge_info (ob
, e
);
4424 /* Stream in edge E from IB. */
4427 ipa_read_edge_info (class lto_input_block
*ib
,
4428 class data_in
*data_in
,
4429 struct cgraph_edge
*e
, bool prevails
)
4431 int count
= streamer_read_uhwi (ib
);
4432 bool contexts_computed
= count
& 1;
4437 if (prevails
&& e
->possibly_call_in_translation_unit_p ())
4439 class ipa_edge_args
*args
= IPA_EDGE_REF_GET_CREATE (e
);
4440 vec_safe_grow_cleared (args
->jump_functions
, count
);
4441 if (contexts_computed
)
4442 vec_safe_grow_cleared (args
->polymorphic_call_contexts
, count
);
4443 for (int k
= 0; k
< count
; k
++)
4445 ipa_read_jump_function (ib
, ipa_get_ith_jump_func (args
, k
), e
,
4447 if (contexts_computed
)
4448 ipa_get_ith_polymorhic_call_context (args
, k
)->stream_in
4454 for (int k
= 0; k
< count
; k
++)
4456 struct ipa_jump_func dummy
;
4457 ipa_read_jump_function (ib
, &dummy
, e
,
4459 if (contexts_computed
)
4461 class ipa_polymorphic_call_context ctx
;
4462 ctx
.stream_in (ib
, data_in
);
4468 /* Stream in NODE info from IB. */
4471 ipa_read_node_info (class lto_input_block
*ib
, struct cgraph_node
*node
,
4472 class data_in
*data_in
)
4475 struct cgraph_edge
*e
;
4476 struct bitpack_d bp
;
4477 bool prevails
= node
->prevailing_p ();
4478 class ipa_node_params
*info
= prevails
? IPA_NODE_REF (node
) : NULL
;
4480 int param_count
= streamer_read_uhwi (ib
);
4483 ipa_alloc_node_params (node
, param_count
);
4484 for (k
= 0; k
< param_count
; k
++)
4485 (*info
->descriptors
)[k
].move_cost
= streamer_read_uhwi (ib
);
4486 if (ipa_get_param_count (info
) != 0)
4487 info
->analysis_done
= true;
4488 info
->node_enqueued
= false;
4491 for (k
= 0; k
< param_count
; k
++)
4492 streamer_read_uhwi (ib
);
4494 bp
= streamer_read_bitpack (ib
);
4495 for (k
= 0; k
< param_count
; k
++)
4497 bool used
= bp_unpack_value (&bp
, 1);
4500 ipa_set_param_used (info
, k
, used
);
4502 for (k
= 0; k
< param_count
; k
++)
4504 int nuses
= streamer_read_hwi (ib
);
4505 tree type
= stream_read_tree (ib
, data_in
);
4509 ipa_set_controlled_uses (info
, k
, nuses
);
4510 (*info
->descriptors
)[k
].decl_or_type
= type
;
4513 for (e
= node
->callees
; e
; e
= e
->next_callee
)
4514 ipa_read_edge_info (ib
, data_in
, e
, prevails
);
4515 for (e
= node
->indirect_calls
; e
; e
= e
->next_callee
)
4517 ipa_read_edge_info (ib
, data_in
, e
, prevails
);
4518 ipa_read_indirect_edge_info (ib
, data_in
, e
);
4522 /* Write jump functions for nodes in SET. */
4525 ipa_prop_write_jump_functions (void)
4527 struct cgraph_node
*node
;
4528 struct output_block
*ob
;
4529 unsigned int count
= 0;
4530 lto_symtab_encoder_iterator lsei
;
4531 lto_symtab_encoder_t encoder
;
4533 if (!ipa_node_params_sum
|| !ipa_edge_args_sum
)
4536 ob
= create_output_block (LTO_section_jump_functions
);
4537 encoder
= ob
->decl_state
->symtab_node_encoder
;
4539 for (lsei
= lsei_start_function_in_partition (encoder
); !lsei_end_p (lsei
);
4540 lsei_next_function_in_partition (&lsei
))
4542 node
= lsei_cgraph_node (lsei
);
4543 if (node
->has_gimple_body_p ()
4544 && IPA_NODE_REF (node
) != NULL
)
4548 streamer_write_uhwi (ob
, count
);
4550 /* Process all of the functions. */
4551 for (lsei
= lsei_start_function_in_partition (encoder
); !lsei_end_p (lsei
);
4552 lsei_next_function_in_partition (&lsei
))
4554 node
= lsei_cgraph_node (lsei
);
4555 if (node
->has_gimple_body_p ()
4556 && IPA_NODE_REF (node
) != NULL
)
4557 ipa_write_node_info (ob
, node
);
4559 streamer_write_char_stream (ob
->main_stream
, 0);
4560 produce_asm (ob
, NULL
);
4561 destroy_output_block (ob
);
4564 /* Read section in file FILE_DATA of length LEN with data DATA. */
4567 ipa_prop_read_section (struct lto_file_decl_data
*file_data
, const char *data
,
4570 const struct lto_function_header
*header
=
4571 (const struct lto_function_header
*) data
;
4572 const int cfg_offset
= sizeof (struct lto_function_header
);
4573 const int main_offset
= cfg_offset
+ header
->cfg_size
;
4574 const int string_offset
= main_offset
+ header
->main_size
;
4575 class data_in
*data_in
;
4579 lto_input_block
ib_main ((const char *) data
+ main_offset
,
4580 header
->main_size
, file_data
->mode_table
);
4583 lto_data_in_create (file_data
, (const char *) data
+ string_offset
,
4584 header
->string_size
, vNULL
);
4585 count
= streamer_read_uhwi (&ib_main
);
4587 for (i
= 0; i
< count
; i
++)
4590 struct cgraph_node
*node
;
4591 lto_symtab_encoder_t encoder
;
4593 index
= streamer_read_uhwi (&ib_main
);
4594 encoder
= file_data
->symtab_node_encoder
;
4595 node
= dyn_cast
<cgraph_node
*> (lto_symtab_encoder_deref (encoder
,
4597 gcc_assert (node
->definition
);
4598 ipa_read_node_info (&ib_main
, node
, data_in
);
4600 lto_free_section_data (file_data
, LTO_section_jump_functions
, NULL
, data
,
4602 lto_data_in_delete (data_in
);
4605 /* Read ipcp jump functions. */
4608 ipa_prop_read_jump_functions (void)
4610 struct lto_file_decl_data
**file_data_vec
= lto_get_file_decl_data ();
4611 struct lto_file_decl_data
*file_data
;
4614 ipa_check_create_node_params ();
4615 ipa_check_create_edge_args ();
4616 ipa_register_cgraph_hooks ();
4618 while ((file_data
= file_data_vec
[j
++]))
4621 const char *data
= lto_get_section_data (file_data
, LTO_section_jump_functions
, NULL
, &len
);
4624 ipa_prop_read_section (file_data
, data
, len
);
4629 write_ipcp_transformation_info (output_block
*ob
, cgraph_node
*node
)
4632 unsigned int count
= 0;
4633 lto_symtab_encoder_t encoder
;
4634 struct ipa_agg_replacement_value
*aggvals
, *av
;
4636 aggvals
= ipa_get_agg_replacements_for_node (node
);
4637 encoder
= ob
->decl_state
->symtab_node_encoder
;
4638 node_ref
= lto_symtab_encoder_encode (encoder
, node
);
4639 streamer_write_uhwi (ob
, node_ref
);
4641 for (av
= aggvals
; av
; av
= av
->next
)
4643 streamer_write_uhwi (ob
, count
);
4645 for (av
= aggvals
; av
; av
= av
->next
)
4647 struct bitpack_d bp
;
4649 streamer_write_uhwi (ob
, av
->offset
);
4650 streamer_write_uhwi (ob
, av
->index
);
4651 stream_write_tree (ob
, av
->value
, true);
4653 bp
= bitpack_create (ob
->main_stream
);
4654 bp_pack_value (&bp
, av
->by_ref
, 1);
4655 streamer_write_bitpack (&bp
);
4658 ipcp_transformation
*ts
= ipcp_get_transformation_summary (node
);
4659 if (ts
&& vec_safe_length (ts
->m_vr
) > 0)
4661 count
= ts
->m_vr
->length ();
4662 streamer_write_uhwi (ob
, count
);
4663 for (unsigned i
= 0; i
< count
; ++i
)
4665 struct bitpack_d bp
;
4666 ipa_vr
*parm_vr
= &(*ts
->m_vr
)[i
];
4667 bp
= bitpack_create (ob
->main_stream
);
4668 bp_pack_value (&bp
, parm_vr
->known
, 1);
4669 streamer_write_bitpack (&bp
);
4672 streamer_write_enum (ob
->main_stream
, value_rang_type
,
4673 VR_LAST
, parm_vr
->type
);
4674 streamer_write_wide_int (ob
, parm_vr
->min
);
4675 streamer_write_wide_int (ob
, parm_vr
->max
);
4680 streamer_write_uhwi (ob
, 0);
4682 if (ts
&& vec_safe_length (ts
->bits
) > 0)
4684 count
= ts
->bits
->length ();
4685 streamer_write_uhwi (ob
, count
);
4687 for (unsigned i
= 0; i
< count
; ++i
)
4689 const ipa_bits
*bits_jfunc
= (*ts
->bits
)[i
];
4690 struct bitpack_d bp
= bitpack_create (ob
->main_stream
);
4691 bp_pack_value (&bp
, !!bits_jfunc
, 1);
4692 streamer_write_bitpack (&bp
);
4695 streamer_write_widest_int (ob
, bits_jfunc
->value
);
4696 streamer_write_widest_int (ob
, bits_jfunc
->mask
);
4701 streamer_write_uhwi (ob
, 0);
4704 /* Stream in the aggregate value replacement chain for NODE from IB. */
4707 read_ipcp_transformation_info (lto_input_block
*ib
, cgraph_node
*node
,
4710 struct ipa_agg_replacement_value
*aggvals
= NULL
;
4711 unsigned int count
, i
;
4713 count
= streamer_read_uhwi (ib
);
4714 for (i
= 0; i
<count
; i
++)
4716 struct ipa_agg_replacement_value
*av
;
4717 struct bitpack_d bp
;
4719 av
= ggc_alloc
<ipa_agg_replacement_value
> ();
4720 av
->offset
= streamer_read_uhwi (ib
);
4721 av
->index
= streamer_read_uhwi (ib
);
4722 av
->value
= stream_read_tree (ib
, data_in
);
4723 bp
= streamer_read_bitpack (ib
);
4724 av
->by_ref
= bp_unpack_value (&bp
, 1);
4728 ipa_set_node_agg_value_chain (node
, aggvals
);
4730 count
= streamer_read_uhwi (ib
);
4733 ipcp_transformation_initialize ();
4734 ipcp_transformation
*ts
= ipcp_transformation_sum
->get_create (node
);
4735 vec_safe_grow_cleared (ts
->m_vr
, count
);
4736 for (i
= 0; i
< count
; i
++)
4739 parm_vr
= &(*ts
->m_vr
)[i
];
4740 struct bitpack_d bp
;
4741 bp
= streamer_read_bitpack (ib
);
4742 parm_vr
->known
= bp_unpack_value (&bp
, 1);
4745 parm_vr
->type
= streamer_read_enum (ib
, value_range_kind
,
4747 parm_vr
->min
= streamer_read_wide_int (ib
);
4748 parm_vr
->max
= streamer_read_wide_int (ib
);
4752 count
= streamer_read_uhwi (ib
);
4755 ipcp_transformation_initialize ();
4756 ipcp_transformation
*ts
= ipcp_transformation_sum
->get_create (node
);
4757 vec_safe_grow_cleared (ts
->bits
, count
);
4759 for (i
= 0; i
< count
; i
++)
4761 struct bitpack_d bp
= streamer_read_bitpack (ib
);
4762 bool known
= bp_unpack_value (&bp
, 1);
4766 = ipa_get_ipa_bits_for_value (streamer_read_widest_int (ib
),
4767 streamer_read_widest_int (ib
));
4768 (*ts
->bits
)[i
] = bits
;
4774 /* Write all aggregate replacement for nodes in set. */
4777 ipcp_write_transformation_summaries (void)
4779 struct cgraph_node
*node
;
4780 struct output_block
*ob
;
4781 unsigned int count
= 0;
4782 lto_symtab_encoder_iterator lsei
;
4783 lto_symtab_encoder_t encoder
;
4785 ob
= create_output_block (LTO_section_ipcp_transform
);
4786 encoder
= ob
->decl_state
->symtab_node_encoder
;
4788 for (lsei
= lsei_start_function_in_partition (encoder
); !lsei_end_p (lsei
);
4789 lsei_next_function_in_partition (&lsei
))
4791 node
= lsei_cgraph_node (lsei
);
4792 if (node
->has_gimple_body_p ())
4796 streamer_write_uhwi (ob
, count
);
4798 for (lsei
= lsei_start_function_in_partition (encoder
); !lsei_end_p (lsei
);
4799 lsei_next_function_in_partition (&lsei
))
4801 node
= lsei_cgraph_node (lsei
);
4802 if (node
->has_gimple_body_p ())
4803 write_ipcp_transformation_info (ob
, node
);
4805 streamer_write_char_stream (ob
->main_stream
, 0);
4806 produce_asm (ob
, NULL
);
4807 destroy_output_block (ob
);
4810 /* Read replacements section in file FILE_DATA of length LEN with data
4814 read_replacements_section (struct lto_file_decl_data
*file_data
,
4818 const struct lto_function_header
*header
=
4819 (const struct lto_function_header
*) data
;
4820 const int cfg_offset
= sizeof (struct lto_function_header
);
4821 const int main_offset
= cfg_offset
+ header
->cfg_size
;
4822 const int string_offset
= main_offset
+ header
->main_size
;
4823 class data_in
*data_in
;
4827 lto_input_block
ib_main ((const char *) data
+ main_offset
,
4828 header
->main_size
, file_data
->mode_table
);
4830 data_in
= lto_data_in_create (file_data
, (const char *) data
+ string_offset
,
4831 header
->string_size
, vNULL
);
4832 count
= streamer_read_uhwi (&ib_main
);
4834 for (i
= 0; i
< count
; i
++)
4837 struct cgraph_node
*node
;
4838 lto_symtab_encoder_t encoder
;
4840 index
= streamer_read_uhwi (&ib_main
);
4841 encoder
= file_data
->symtab_node_encoder
;
4842 node
= dyn_cast
<cgraph_node
*> (lto_symtab_encoder_deref (encoder
,
4844 gcc_assert (node
->definition
);
4845 read_ipcp_transformation_info (&ib_main
, node
, data_in
);
4847 lto_free_section_data (file_data
, LTO_section_jump_functions
, NULL
, data
,
4849 lto_data_in_delete (data_in
);
4852 /* Read IPA-CP aggregate replacements. */
4855 ipcp_read_transformation_summaries (void)
4857 struct lto_file_decl_data
**file_data_vec
= lto_get_file_decl_data ();
4858 struct lto_file_decl_data
*file_data
;
4861 while ((file_data
= file_data_vec
[j
++]))
4864 const char *data
= lto_get_section_data (file_data
,
4865 LTO_section_ipcp_transform
,
4868 read_replacements_section (file_data
, data
, len
);
4872 /* Adjust the aggregate replacements in AGGVAL to reflect parameters skipped in
4876 adjust_agg_replacement_values (struct cgraph_node
*node
,
4877 struct ipa_agg_replacement_value
*aggval
)
4879 struct ipa_agg_replacement_value
*v
;
4881 if (!node
->clone
.param_adjustments
)
4884 auto_vec
<int, 16> new_indices
;
4885 node
->clone
.param_adjustments
->get_updated_indices (&new_indices
);
4886 for (v
= aggval
; v
; v
= v
->next
)
4888 gcc_checking_assert (v
->index
>= 0);
4890 if ((unsigned) v
->index
< new_indices
.length ())
4891 v
->index
= new_indices
[v
->index
];
4893 /* This can happen if we know about a constant passed by reference by
4894 an argument which is never actually used for anything, let alone
4895 loading that constant. */
4900 /* Dominator walker driving the ipcp modification phase. */
4902 class ipcp_modif_dom_walker
: public dom_walker
4905 ipcp_modif_dom_walker (struct ipa_func_body_info
*fbi
,
4906 vec
<ipa_param_descriptor
, va_gc
> *descs
,
4907 struct ipa_agg_replacement_value
*av
,
4909 : dom_walker (CDI_DOMINATORS
), m_fbi (fbi
), m_descriptors (descs
),
4910 m_aggval (av
), m_something_changed (sc
), m_cfg_changed (cc
) {}
4912 virtual edge
before_dom_children (basic_block
);
4915 struct ipa_func_body_info
*m_fbi
;
4916 vec
<ipa_param_descriptor
, va_gc
> *m_descriptors
;
4917 struct ipa_agg_replacement_value
*m_aggval
;
4918 bool *m_something_changed
, *m_cfg_changed
;
4922 ipcp_modif_dom_walker::before_dom_children (basic_block bb
)
4924 gimple_stmt_iterator gsi
;
4925 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
4927 struct ipa_agg_replacement_value
*v
;
4928 gimple
*stmt
= gsi_stmt (gsi
);
4930 HOST_WIDE_INT offset
;
4935 if (!gimple_assign_load_p (stmt
))
4937 rhs
= gimple_assign_rhs1 (stmt
);
4938 if (!is_gimple_reg_type (TREE_TYPE (rhs
)))
4943 while (handled_component_p (t
))
4945 /* V_C_E can do things like convert an array of integers to one
4946 bigger integer and similar things we do not handle below. */
4947 if (TREE_CODE (t
) == VIEW_CONVERT_EXPR
)
4952 t
= TREE_OPERAND (t
, 0);
4957 if (!ipa_load_from_parm_agg (m_fbi
, m_descriptors
, stmt
, rhs
, &index
,
4958 &offset
, &size
, &by_ref
))
4960 for (v
= m_aggval
; v
; v
= v
->next
)
4961 if (v
->index
== index
4962 && v
->offset
== offset
)
4965 || v
->by_ref
!= by_ref
4966 || maybe_ne (tree_to_poly_int64 (TYPE_SIZE (TREE_TYPE (v
->value
))),
4970 gcc_checking_assert (is_gimple_ip_invariant (v
->value
));
4971 if (!useless_type_conversion_p (TREE_TYPE (rhs
), TREE_TYPE (v
->value
)))
4973 if (fold_convertible_p (TREE_TYPE (rhs
), v
->value
))
4974 val
= fold_build1 (NOP_EXPR
, TREE_TYPE (rhs
), v
->value
);
4975 else if (TYPE_SIZE (TREE_TYPE (rhs
))
4976 == TYPE_SIZE (TREE_TYPE (v
->value
)))
4977 val
= fold_build1 (VIEW_CONVERT_EXPR
, TREE_TYPE (rhs
), v
->value
);
4982 fprintf (dump_file
, " const ");
4983 print_generic_expr (dump_file
, v
->value
);
4984 fprintf (dump_file
, " can't be converted to type of ");
4985 print_generic_expr (dump_file
, rhs
);
4986 fprintf (dump_file
, "\n");
4994 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4996 fprintf (dump_file
, "Modifying stmt:\n ");
4997 print_gimple_stmt (dump_file
, stmt
, 0);
4999 gimple_assign_set_rhs_from_tree (&gsi
, val
);
5002 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5004 fprintf (dump_file
, "into:\n ");
5005 print_gimple_stmt (dump_file
, stmt
, 0);
5006 fprintf (dump_file
, "\n");
5009 *m_something_changed
= true;
5010 if (maybe_clean_eh_stmt (stmt
)
5011 && gimple_purge_dead_eh_edges (gimple_bb (stmt
)))
5012 *m_cfg_changed
= true;
5017 /* Update bits info of formal parameters as described in
5018 ipcp_transformation. */
5021 ipcp_update_bits (struct cgraph_node
*node
)
5023 ipcp_transformation
*ts
= ipcp_get_transformation_summary (node
);
5025 if (!ts
|| vec_safe_length (ts
->bits
) == 0)
5027 vec
<ipa_bits
*, va_gc
> &bits
= *ts
->bits
;
5028 unsigned count
= bits
.length ();
5032 auto_vec
<int, 16> new_indices
;
5033 bool need_remapping
= false;
5034 if (node
->clone
.param_adjustments
)
5036 node
->clone
.param_adjustments
->get_updated_indices (&new_indices
);
5037 need_remapping
= true;
5039 auto_vec
<tree
, 16> parm_decls
;
5040 push_function_arg_decls (&parm_decls
, node
->decl
);
5042 for (unsigned i
= 0; i
< count
; ++i
)
5047 if (i
>= new_indices
.length ())
5049 int idx
= new_indices
[i
];
5052 parm
= parm_decls
[idx
];
5055 parm
= parm_decls
[i
];
5056 gcc_checking_assert (parm
);
5060 || !(INTEGRAL_TYPE_P (TREE_TYPE (parm
))
5061 || POINTER_TYPE_P (TREE_TYPE (parm
)))
5062 || !is_gimple_reg (parm
))
5065 tree ddef
= ssa_default_def (DECL_STRUCT_FUNCTION (node
->decl
), parm
);
5071 fprintf (dump_file
, "Adjusting mask for param %u to ", i
);
5072 print_hex (bits
[i
]->mask
, dump_file
);
5073 fprintf (dump_file
, "\n");
5076 if (INTEGRAL_TYPE_P (TREE_TYPE (ddef
)))
5078 unsigned prec
= TYPE_PRECISION (TREE_TYPE (ddef
));
5079 signop sgn
= TYPE_SIGN (TREE_TYPE (ddef
));
5081 wide_int nonzero_bits
= wide_int::from (bits
[i
]->mask
, prec
, UNSIGNED
)
5082 | wide_int::from (bits
[i
]->value
, prec
, sgn
);
5083 set_nonzero_bits (ddef
, nonzero_bits
);
5087 unsigned tem
= bits
[i
]->mask
.to_uhwi ();
5088 unsigned HOST_WIDE_INT bitpos
= bits
[i
]->value
.to_uhwi ();
5089 unsigned align
= tem
& -tem
;
5090 unsigned misalign
= bitpos
& (align
- 1);
5095 fprintf (dump_file
, "Adjusting align: %u, misalign: %u\n", align
, misalign
);
5097 unsigned old_align
, old_misalign
;
5098 struct ptr_info_def
*pi
= get_ptr_info (ddef
);
5099 bool old_known
= get_ptr_info_alignment (pi
, &old_align
, &old_misalign
);
5102 && old_align
> align
)
5106 fprintf (dump_file
, "But alignment was already %u.\n", old_align
);
5107 if ((old_misalign
& (align
- 1)) != misalign
)
5108 fprintf (dump_file
, "old_misalign (%u) and misalign (%u) mismatch\n",
5109 old_misalign
, misalign
);
5115 && ((misalign
& (old_align
- 1)) != old_misalign
)
5117 fprintf (dump_file
, "old_misalign (%u) and misalign (%u) mismatch\n",
5118 old_misalign
, misalign
);
5120 set_ptr_info_alignment (pi
, align
, misalign
);
5127 ipa_vr::nonzero_p (tree expr_type
) const
5129 if (type
== VR_ANTI_RANGE
&& wi::eq_p (min
, 0) && wi::eq_p (max
, 0))
5132 unsigned prec
= TYPE_PRECISION (expr_type
);
5133 return (type
== VR_RANGE
5134 && TYPE_UNSIGNED (expr_type
)
5135 && wi::eq_p (min
, wi::one (prec
))
5136 && wi::eq_p (max
, wi::max_value (prec
, TYPE_SIGN (expr_type
))));
5139 /* Update value range of formal parameters as described in
5140 ipcp_transformation. */
5143 ipcp_update_vr (struct cgraph_node
*node
)
5145 ipcp_transformation
*ts
= ipcp_get_transformation_summary (node
);
5146 if (!ts
|| vec_safe_length (ts
->m_vr
) == 0)
5148 const vec
<ipa_vr
, va_gc
> &vr
= *ts
->m_vr
;
5149 unsigned count
= vr
.length ();
5153 auto_vec
<int, 16> new_indices
;
5154 bool need_remapping
= false;
5155 if (node
->clone
.param_adjustments
)
5157 node
->clone
.param_adjustments
->get_updated_indices (&new_indices
);
5158 need_remapping
= true;
5160 auto_vec
<tree
, 16> parm_decls
;
5161 push_function_arg_decls (&parm_decls
, node
->decl
);
5163 for (unsigned i
= 0; i
< count
; ++i
)
5169 if (i
>= new_indices
.length ())
5171 remapped_idx
= new_indices
[i
];
5172 if (remapped_idx
< 0)
5178 parm
= parm_decls
[remapped_idx
];
5180 gcc_checking_assert (parm
);
5181 tree ddef
= ssa_default_def (DECL_STRUCT_FUNCTION (node
->decl
), parm
);
5183 if (!ddef
|| !is_gimple_reg (parm
))
5187 && (vr
[i
].type
== VR_RANGE
|| vr
[i
].type
== VR_ANTI_RANGE
))
5189 tree type
= TREE_TYPE (ddef
);
5190 unsigned prec
= TYPE_PRECISION (type
);
5191 if (INTEGRAL_TYPE_P (TREE_TYPE (ddef
)))
5195 fprintf (dump_file
, "Setting value range of param %u "
5196 "(now %i) ", i
, remapped_idx
);
5197 fprintf (dump_file
, "%s[",
5198 (vr
[i
].type
== VR_ANTI_RANGE
) ? "~" : "");
5199 print_decs (vr
[i
].min
, dump_file
);
5200 fprintf (dump_file
, ", ");
5201 print_decs (vr
[i
].max
, dump_file
);
5202 fprintf (dump_file
, "]\n");
5204 set_range_info (ddef
, vr
[i
].type
,
5205 wide_int_storage::from (vr
[i
].min
, prec
,
5207 wide_int_storage::from (vr
[i
].max
, prec
,
5210 else if (POINTER_TYPE_P (TREE_TYPE (ddef
))
5211 && vr
[i
].nonzero_p (TREE_TYPE (ddef
)))
5214 fprintf (dump_file
, "Setting nonnull for %u\n", i
);
5215 set_ptr_nonnull (ddef
);
5221 /* IPCP transformation phase doing propagation of aggregate values. */
5224 ipcp_transform_function (struct cgraph_node
*node
)
5226 vec
<ipa_param_descriptor
, va_gc
> *descriptors
= NULL
;
5227 struct ipa_func_body_info fbi
;
5228 struct ipa_agg_replacement_value
*aggval
;
5230 bool cfg_changed
= false, something_changed
= false;
5232 gcc_checking_assert (cfun
);
5233 gcc_checking_assert (current_function_decl
);
5236 fprintf (dump_file
, "Modification phase of node %s\n",
5237 node
->dump_name ());
5239 ipcp_update_bits (node
);
5240 ipcp_update_vr (node
);
5241 aggval
= ipa_get_agg_replacements_for_node (node
);
5244 param_count
= count_formal_params (node
->decl
);
5245 if (param_count
== 0)
5247 adjust_agg_replacement_values (node
, aggval
);
5249 ipa_dump_agg_replacement_values (dump_file
, aggval
);
5253 fbi
.bb_infos
= vNULL
;
5254 fbi
.bb_infos
.safe_grow_cleared (last_basic_block_for_fn (cfun
));
5255 fbi
.param_count
= param_count
;
5256 fbi
.aa_walk_budget
= PARAM_VALUE (PARAM_IPA_MAX_AA_STEPS
);
5258 vec_safe_grow_cleared (descriptors
, param_count
);
5259 ipa_populate_param_decls (node
, *descriptors
);
5260 calculate_dominance_info (CDI_DOMINATORS
);
5261 ipcp_modif_dom_walker (&fbi
, descriptors
, aggval
, &something_changed
,
5262 &cfg_changed
).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun
));
5265 struct ipa_bb_info
*bi
;
5266 FOR_EACH_VEC_ELT (fbi
.bb_infos
, i
, bi
)
5267 free_ipa_bb_info (bi
);
5268 fbi
.bb_infos
.release ();
5269 free_dominance_info (CDI_DOMINATORS
);
5271 ipcp_transformation
*s
= ipcp_transformation_sum
->get (node
);
5272 s
->agg_values
= NULL
;
5276 vec_free (descriptors
);
5278 if (!something_changed
)
5282 delete_unreachable_blocks_update_callgraph (node
, false);
5284 return TODO_update_ssa_only_virtuals
;
5287 #include "gt-ipa-prop.h"