1 /* Interprocedural analyses.
2 Copyright (C) 2005-2019 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
27 #include "alloc-pool.h"
28 #include "tree-pass.h"
30 #include "tree-streamer.h"
32 #include "diagnostic.h"
33 #include "fold-const.h"
34 #include "gimple-fold.h"
37 #include "stor-layout.h"
38 #include "print-tree.h"
40 #include "gimple-iterator.h"
41 #include "gimplify-me.h"
42 #include "gimple-walk.h"
43 #include "symbol-summary.h"
47 #include "tree-inline.h"
48 #include "ipa-fnsummary.h"
49 #include "gimple-pretty-print.h"
50 #include "ipa-utils.h"
54 #include "tree-cfgcleanup.h"
56 /* Function summary where the parameter infos are actually stored. */
57 ipa_node_params_t
*ipa_node_params_sum
= NULL
;
59 function_summary
<ipcp_transformation
*> *ipcp_transformation_sum
= NULL
;
61 /* Edge summary for IPA-CP edge information. */
62 ipa_edge_args_sum_t
*ipa_edge_args_sum
;
64 /* Traits for a hash table for reusing already existing ipa_bits. */
66 struct ipa_bit_ggc_hash_traits
: public ggc_cache_remove
<ipa_bits
*>
68 typedef ipa_bits
*value_type
;
69 typedef ipa_bits
*compare_type
;
71 hash (const ipa_bits
*p
)
73 hashval_t t
= (hashval_t
) p
->value
.to_shwi ();
74 return iterative_hash_host_wide_int (p
->mask
.to_shwi (), t
);
77 equal (const ipa_bits
*a
, const ipa_bits
*b
)
79 return a
->value
== b
->value
&& a
->mask
== b
->mask
;
82 mark_empty (ipa_bits
*&p
)
87 is_empty (const ipa_bits
*p
)
92 is_deleted (const ipa_bits
*p
)
94 return p
== reinterpret_cast<const ipa_bits
*> (1);
97 mark_deleted (ipa_bits
*&p
)
99 p
= reinterpret_cast<ipa_bits
*> (1);
103 /* Hash table for avoid repeated allocations of equal ipa_bits. */
104 static GTY ((cache
)) hash_table
<ipa_bit_ggc_hash_traits
> *ipa_bits_hash_table
;
106 /* Traits for a hash table for reusing value_ranges used for IPA. Note that
107 the equiv bitmap is not hashed and is expected to be NULL. */
109 struct ipa_vr_ggc_hash_traits
: public ggc_cache_remove
<value_range
*>
111 typedef value_range
*value_type
;
112 typedef value_range
*compare_type
;
114 hash (const value_range
*p
)
116 inchash::hash
hstate (p
->kind ());
117 inchash::add_expr (p
->min (), hstate
);
118 inchash::add_expr (p
->max (), hstate
);
119 return hstate
.end ();
122 equal (const value_range
*a
, const value_range
*b
)
124 return a
->equal_p (*b
);
127 mark_empty (value_range
*&p
)
132 is_empty (const value_range
*p
)
137 is_deleted (const value_range
*p
)
139 return p
== reinterpret_cast<const value_range
*> (1);
142 mark_deleted (value_range
*&p
)
144 p
= reinterpret_cast<value_range
*> (1);
148 /* Hash table for avoid repeated allocations of equal value_ranges. */
149 static GTY ((cache
)) hash_table
<ipa_vr_ggc_hash_traits
> *ipa_vr_hash_table
;
151 /* Holders of ipa cgraph hooks: */
152 static struct cgraph_node_hook_list
*function_insertion_hook_holder
;
154 /* Description of a reference to an IPA constant. */
155 struct ipa_cst_ref_desc
157 /* Edge that corresponds to the statement which took the reference. */
158 struct cgraph_edge
*cs
;
159 /* Linked list of duplicates created when call graph edges are cloned. */
160 struct ipa_cst_ref_desc
*next_duplicate
;
161 /* Number of references in IPA structures, IPA_UNDESCRIBED_USE if the value
162 if out of control. */
166 /* Allocation pool for reference descriptions. */
168 static object_allocator
<ipa_cst_ref_desc
> ipa_refdesc_pool
169 ("IPA-PROP ref descriptions");
171 /* Return true if DECL_FUNCTION_SPECIFIC_OPTIMIZATION of the decl associated
172 with NODE should prevent us from analyzing it for the purposes of IPA-CP. */
175 ipa_func_spec_opts_forbid_analysis_p (struct cgraph_node
*node
)
177 tree fs_opts
= DECL_FUNCTION_SPECIFIC_OPTIMIZATION (node
->decl
);
181 return !opt_for_fn (node
->decl
, optimize
) || !opt_for_fn (node
->decl
, flag_ipa_cp
);
184 /* Return index of the formal whose tree is PTREE in function which corresponds
188 ipa_get_param_decl_index_1 (vec
<ipa_param_descriptor
, va_gc
> *descriptors
,
193 count
= vec_safe_length (descriptors
);
194 for (i
= 0; i
< count
; i
++)
195 if ((*descriptors
)[i
].decl_or_type
== ptree
)
201 /* Return index of the formal whose tree is PTREE in function which corresponds
205 ipa_get_param_decl_index (class ipa_node_params
*info
, tree ptree
)
207 return ipa_get_param_decl_index_1 (info
->descriptors
, ptree
);
210 /* Populate the param_decl field in parameter DESCRIPTORS that correspond to
214 ipa_populate_param_decls (struct cgraph_node
*node
,
215 vec
<ipa_param_descriptor
, va_gc
> &descriptors
)
223 gcc_assert (gimple_has_body_p (fndecl
));
224 fnargs
= DECL_ARGUMENTS (fndecl
);
226 for (parm
= fnargs
; parm
; parm
= DECL_CHAIN (parm
))
228 descriptors
[param_num
].decl_or_type
= parm
;
229 unsigned int cost
= estimate_move_cost (TREE_TYPE (parm
), true);
230 descriptors
[param_num
].move_cost
= cost
;
231 /* Watch overflow, move_cost is a bitfield. */
232 gcc_checking_assert (cost
== descriptors
[param_num
].move_cost
);
237 /* Return how many formal parameters FNDECL has. */
240 count_formal_params (tree fndecl
)
244 gcc_assert (gimple_has_body_p (fndecl
));
246 for (parm
= DECL_ARGUMENTS (fndecl
); parm
; parm
= DECL_CHAIN (parm
))
252 /* Return the declaration of Ith formal parameter of the function corresponding
253 to INFO. Note there is no setter function as this array is built just once
254 using ipa_initialize_node_params. */
257 ipa_dump_param (FILE *file
, class ipa_node_params
*info
, int i
)
259 fprintf (file
, "param #%i", i
);
260 if ((*info
->descriptors
)[i
].decl_or_type
)
263 print_generic_expr (file
, (*info
->descriptors
)[i
].decl_or_type
);
267 /* If necessary, allocate vector of parameter descriptors in info of NODE.
268 Return true if they were allocated, false if not. */
271 ipa_alloc_node_params (struct cgraph_node
*node
, int param_count
)
273 class ipa_node_params
*info
= IPA_NODE_REF_GET_CREATE (node
);
275 if (!info
->descriptors
&& param_count
)
277 vec_safe_grow_cleared (info
->descriptors
, param_count
);
284 /* Initialize the ipa_node_params structure associated with NODE by counting
285 the function parameters, creating the descriptors and populating their
289 ipa_initialize_node_params (struct cgraph_node
*node
)
291 class ipa_node_params
*info
= IPA_NODE_REF_GET_CREATE (node
);
293 if (!info
->descriptors
294 && ipa_alloc_node_params (node
, count_formal_params (node
->decl
)))
295 ipa_populate_param_decls (node
, *info
->descriptors
);
298 /* Print the jump functions associated with call graph edge CS to file F. */
301 ipa_print_node_jump_functions_for_edge (FILE *f
, struct cgraph_edge
*cs
)
305 count
= ipa_get_cs_argument_count (IPA_EDGE_REF (cs
));
306 for (i
= 0; i
< count
; i
++)
308 struct ipa_jump_func
*jump_func
;
309 enum jump_func_type type
;
311 jump_func
= ipa_get_ith_jump_func (IPA_EDGE_REF (cs
), i
);
312 type
= jump_func
->type
;
314 fprintf (f
, " param %d: ", i
);
315 if (type
== IPA_JF_UNKNOWN
)
316 fprintf (f
, "UNKNOWN\n");
317 else if (type
== IPA_JF_CONST
)
319 tree val
= jump_func
->value
.constant
.value
;
320 fprintf (f
, "CONST: ");
321 print_generic_expr (f
, val
);
322 if (TREE_CODE (val
) == ADDR_EXPR
323 && TREE_CODE (TREE_OPERAND (val
, 0)) == CONST_DECL
)
326 print_generic_expr (f
, DECL_INITIAL (TREE_OPERAND (val
, 0)));
330 else if (type
== IPA_JF_PASS_THROUGH
)
332 fprintf (f
, "PASS THROUGH: ");
333 fprintf (f
, "%d, op %s",
334 jump_func
->value
.pass_through
.formal_id
,
335 get_tree_code_name(jump_func
->value
.pass_through
.operation
));
336 if (jump_func
->value
.pass_through
.operation
!= NOP_EXPR
)
339 print_generic_expr (f
, jump_func
->value
.pass_through
.operand
);
341 if (jump_func
->value
.pass_through
.agg_preserved
)
342 fprintf (f
, ", agg_preserved");
345 else if (type
== IPA_JF_ANCESTOR
)
347 fprintf (f
, "ANCESTOR: ");
348 fprintf (f
, "%d, offset " HOST_WIDE_INT_PRINT_DEC
,
349 jump_func
->value
.ancestor
.formal_id
,
350 jump_func
->value
.ancestor
.offset
);
351 if (jump_func
->value
.ancestor
.agg_preserved
)
352 fprintf (f
, ", agg_preserved");
356 if (jump_func
->agg
.items
)
358 struct ipa_agg_jf_item
*item
;
361 fprintf (f
, " Aggregate passed by %s:\n",
362 jump_func
->agg
.by_ref
? "reference" : "value");
363 FOR_EACH_VEC_SAFE_ELT (jump_func
->agg
.items
, j
, item
)
365 fprintf (f
, " offset: " HOST_WIDE_INT_PRINT_DEC
", ",
367 if (TYPE_P (item
->value
))
368 fprintf (f
, "clobber of " HOST_WIDE_INT_PRINT_DEC
" bits",
369 tree_to_uhwi (TYPE_SIZE (item
->value
)));
372 fprintf (f
, "cst: ");
373 print_generic_expr (f
, item
->value
);
379 class ipa_polymorphic_call_context
*ctx
380 = ipa_get_ith_polymorhic_call_context (IPA_EDGE_REF (cs
), i
);
381 if (ctx
&& !ctx
->useless_p ())
383 fprintf (f
, " Context: ");
384 ctx
->dump (dump_file
);
389 fprintf (f
, " value: ");
390 print_hex (jump_func
->bits
->value
, f
);
391 fprintf (f
, ", mask: ");
392 print_hex (jump_func
->bits
->mask
, f
);
396 fprintf (f
, " Unknown bits\n");
402 (jump_func
->m_vr
->kind () == VR_ANTI_RANGE
) ? "~" : "");
403 print_decs (wi::to_wide (jump_func
->m_vr
->min ()), f
);
405 print_decs (wi::to_wide (jump_func
->m_vr
->max ()), f
);
409 fprintf (f
, " Unknown VR\n");
414 /* Print the jump functions of all arguments on all call graph edges going from
418 ipa_print_node_jump_functions (FILE *f
, struct cgraph_node
*node
)
420 struct cgraph_edge
*cs
;
422 fprintf (f
, " Jump functions of caller %s:\n", node
->dump_name ());
423 for (cs
= node
->callees
; cs
; cs
= cs
->next_callee
)
426 fprintf (f
, " callsite %s -> %s : \n",
428 cs
->callee
->dump_name ());
429 if (!ipa_edge_args_info_available_for_edge_p (cs
))
430 fprintf (f
, " no arg info\n");
432 ipa_print_node_jump_functions_for_edge (f
, cs
);
435 for (cs
= node
->indirect_calls
; cs
; cs
= cs
->next_callee
)
437 class cgraph_indirect_call_info
*ii
;
439 ii
= cs
->indirect_info
;
440 if (ii
->agg_contents
)
441 fprintf (f
, " indirect %s callsite, calling param %i, "
442 "offset " HOST_WIDE_INT_PRINT_DEC
", %s",
443 ii
->member_ptr
? "member ptr" : "aggregate",
444 ii
->param_index
, ii
->offset
,
445 ii
->by_ref
? "by reference" : "by_value");
447 fprintf (f
, " indirect %s callsite, calling param %i, "
448 "offset " HOST_WIDE_INT_PRINT_DEC
,
449 ii
->polymorphic
? "polymorphic" : "simple", ii
->param_index
,
454 fprintf (f
, ", for stmt ");
455 print_gimple_stmt (f
, cs
->call_stmt
, 0, TDF_SLIM
);
460 ii
->context
.dump (f
);
461 if (!ipa_edge_args_info_available_for_edge_p (cs
))
462 fprintf (f
, " no arg info\n");
464 ipa_print_node_jump_functions_for_edge (f
, cs
);
468 /* Print ipa_jump_func data structures of all nodes in the call graph to F. */
471 ipa_print_all_jump_functions (FILE *f
)
473 struct cgraph_node
*node
;
475 fprintf (f
, "\nJump functions:\n");
476 FOR_EACH_FUNCTION (node
)
478 ipa_print_node_jump_functions (f
, node
);
482 /* Set jfunc to be a know-really nothing jump function. */
485 ipa_set_jf_unknown (struct ipa_jump_func
*jfunc
)
487 jfunc
->type
= IPA_JF_UNKNOWN
;
492 /* Set JFUNC to be a copy of another jmp (to be used by jump function
493 combination code). The two functions will share their rdesc. */
496 ipa_set_jf_cst_copy (struct ipa_jump_func
*dst
,
497 struct ipa_jump_func
*src
)
500 gcc_checking_assert (src
->type
== IPA_JF_CONST
);
501 dst
->type
= IPA_JF_CONST
;
502 dst
->value
.constant
= src
->value
.constant
;
505 /* Set JFUNC to be a constant jmp function. */
508 ipa_set_jf_constant (struct ipa_jump_func
*jfunc
, tree constant
,
509 struct cgraph_edge
*cs
)
511 jfunc
->type
= IPA_JF_CONST
;
512 jfunc
->value
.constant
.value
= unshare_expr_without_location (constant
);
514 if (TREE_CODE (constant
) == ADDR_EXPR
515 && TREE_CODE (TREE_OPERAND (constant
, 0)) == FUNCTION_DECL
)
517 struct ipa_cst_ref_desc
*rdesc
;
519 rdesc
= ipa_refdesc_pool
.allocate ();
521 rdesc
->next_duplicate
= NULL
;
523 jfunc
->value
.constant
.rdesc
= rdesc
;
526 jfunc
->value
.constant
.rdesc
= NULL
;
529 /* Set JFUNC to be a simple pass-through jump function. */
531 ipa_set_jf_simple_pass_through (struct ipa_jump_func
*jfunc
, int formal_id
,
534 jfunc
->type
= IPA_JF_PASS_THROUGH
;
535 jfunc
->value
.pass_through
.operand
= NULL_TREE
;
536 jfunc
->value
.pass_through
.formal_id
= formal_id
;
537 jfunc
->value
.pass_through
.operation
= NOP_EXPR
;
538 jfunc
->value
.pass_through
.agg_preserved
= agg_preserved
;
541 /* Set JFUNC to be an unary pass through jump function. */
544 ipa_set_jf_unary_pass_through (struct ipa_jump_func
*jfunc
, int formal_id
,
545 enum tree_code operation
)
547 jfunc
->type
= IPA_JF_PASS_THROUGH
;
548 jfunc
->value
.pass_through
.operand
= NULL_TREE
;
549 jfunc
->value
.pass_through
.formal_id
= formal_id
;
550 jfunc
->value
.pass_through
.operation
= operation
;
551 jfunc
->value
.pass_through
.agg_preserved
= false;
553 /* Set JFUNC to be an arithmetic pass through jump function. */
556 ipa_set_jf_arith_pass_through (struct ipa_jump_func
*jfunc
, int formal_id
,
557 tree operand
, enum tree_code operation
)
559 jfunc
->type
= IPA_JF_PASS_THROUGH
;
560 jfunc
->value
.pass_through
.operand
= unshare_expr_without_location (operand
);
561 jfunc
->value
.pass_through
.formal_id
= formal_id
;
562 jfunc
->value
.pass_through
.operation
= operation
;
563 jfunc
->value
.pass_through
.agg_preserved
= false;
566 /* Set JFUNC to be an ancestor jump function. */
569 ipa_set_ancestor_jf (struct ipa_jump_func
*jfunc
, HOST_WIDE_INT offset
,
570 int formal_id
, bool agg_preserved
)
572 jfunc
->type
= IPA_JF_ANCESTOR
;
573 jfunc
->value
.ancestor
.formal_id
= formal_id
;
574 jfunc
->value
.ancestor
.offset
= offset
;
575 jfunc
->value
.ancestor
.agg_preserved
= agg_preserved
;
578 /* Get IPA BB information about the given BB. FBI is the context of analyzis
579 of this function body. */
581 static struct ipa_bb_info
*
582 ipa_get_bb_info (struct ipa_func_body_info
*fbi
, basic_block bb
)
584 gcc_checking_assert (fbi
);
585 return &fbi
->bb_infos
[bb
->index
];
588 /* Structure to be passed in between detect_type_change and
589 check_stmt_for_type_change. */
591 struct prop_type_change_info
593 /* Offset into the object where there is the virtual method pointer we are
595 HOST_WIDE_INT offset
;
596 /* The declaration or SSA_NAME pointer of the base that we are checking for
599 /* Set to true if dynamic type change has been detected. */
600 bool type_maybe_changed
;
603 /* Return true if STMT can modify a virtual method table pointer.
605 This function makes special assumptions about both constructors and
606 destructors which are all the functions that are allowed to alter the VMT
607 pointers. It assumes that destructors begin with assignment into all VMT
608 pointers and that constructors essentially look in the following way:
610 1) The very first thing they do is that they call constructors of ancestor
611 sub-objects that have them.
613 2) Then VMT pointers of this and all its ancestors is set to new values
614 corresponding to the type corresponding to the constructor.
616 3) Only afterwards, other stuff such as constructor of member sub-objects
617 and the code written by the user is run. Only this may include calling
618 virtual functions, directly or indirectly.
620 There is no way to call a constructor of an ancestor sub-object in any
623 This means that we do not have to care whether constructors get the correct
624 type information because they will always change it (in fact, if we define
625 the type to be given by the VMT pointer, it is undefined).
627 The most important fact to derive from the above is that if, for some
628 statement in the section 3, we try to detect whether the dynamic type has
629 changed, we can safely ignore all calls as we examine the function body
630 backwards until we reach statements in section 2 because these calls cannot
631 be ancestor constructors or destructors (if the input is not bogus) and so
632 do not change the dynamic type (this holds true only for automatically
633 allocated objects but at the moment we devirtualize only these). We then
634 must detect that statements in section 2 change the dynamic type and can try
635 to derive the new type. That is enough and we can stop, we will never see
636 the calls into constructors of sub-objects in this code. Therefore we can
637 safely ignore all call statements that we traverse.
641 stmt_may_be_vtbl_ptr_store (gimple
*stmt
)
643 if (is_gimple_call (stmt
))
645 if (gimple_clobber_p (stmt
))
647 else if (is_gimple_assign (stmt
))
649 tree lhs
= gimple_assign_lhs (stmt
);
651 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs
)))
653 if (flag_strict_aliasing
654 && !POINTER_TYPE_P (TREE_TYPE (lhs
)))
657 if (TREE_CODE (lhs
) == COMPONENT_REF
658 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs
, 1)))
660 /* In the future we might want to use get_ref_base_and_extent to find
661 if there is a field corresponding to the offset and if so, proceed
662 almost like if it was a component ref. */
668 /* Callback of walk_aliased_vdefs and a helper function for detect_type_change
669 to check whether a particular statement may modify the virtual table
670 pointerIt stores its result into DATA, which points to a
671 prop_type_change_info structure. */
674 check_stmt_for_type_change (ao_ref
*ao ATTRIBUTE_UNUSED
, tree vdef
, void *data
)
676 gimple
*stmt
= SSA_NAME_DEF_STMT (vdef
);
677 struct prop_type_change_info
*tci
= (struct prop_type_change_info
*) data
;
679 if (stmt_may_be_vtbl_ptr_store (stmt
))
681 tci
->type_maybe_changed
= true;
688 /* See if ARG is PARAM_DECl describing instance passed by pointer
689 or reference in FUNCTION. Return false if the dynamic type may change
690 in between beggining of the function until CALL is invoked.
692 Generally functions are not allowed to change type of such instances,
693 but they call destructors. We assume that methods cannot destroy the THIS
694 pointer. Also as a special cases, constructor and destructors may change
695 type of the THIS pointer. */
698 param_type_may_change_p (tree function
, tree arg
, gimple
*call
)
700 /* Pure functions cannot do any changes on the dynamic type;
701 that require writting to memory. */
702 if (flags_from_decl_or_type (function
) & (ECF_PURE
| ECF_CONST
))
704 /* We need to check if we are within inlined consturctor
705 or destructor (ideally we would have way to check that the
706 inline cdtor is actually working on ARG, but we don't have
707 easy tie on this, so punt on all non-pure cdtors.
708 We may also record the types of cdtors and once we know type
709 of the instance match them.
711 Also code unification optimizations may merge calls from
712 different blocks making return values unreliable. So
713 do nothing during late optimization. */
714 if (DECL_STRUCT_FUNCTION (function
)->after_inlining
)
716 if (TREE_CODE (arg
) == SSA_NAME
717 && SSA_NAME_IS_DEFAULT_DEF (arg
)
718 && TREE_CODE (SSA_NAME_VAR (arg
)) == PARM_DECL
)
720 /* Normal (non-THIS) argument. */
721 if ((SSA_NAME_VAR (arg
) != DECL_ARGUMENTS (function
)
722 || TREE_CODE (TREE_TYPE (function
)) != METHOD_TYPE
)
723 /* THIS pointer of an method - here we want to watch constructors
724 and destructors as those definitely may change the dynamic
726 || (TREE_CODE (TREE_TYPE (function
)) == METHOD_TYPE
727 && !DECL_CXX_CONSTRUCTOR_P (function
)
728 && !DECL_CXX_DESTRUCTOR_P (function
)
729 && (SSA_NAME_VAR (arg
) == DECL_ARGUMENTS (function
))))
731 /* Walk the inline stack and watch out for ctors/dtors. */
732 for (tree block
= gimple_block (call
); block
&& TREE_CODE (block
) == BLOCK
;
733 block
= BLOCK_SUPERCONTEXT (block
))
734 if (inlined_polymorphic_ctor_dtor_block_p (block
, false))
742 /* Detect whether the dynamic type of ARG of COMP_TYPE has changed (before
743 callsite CALL) by looking for assignments to its virtual table pointer. If
744 it is, return true and fill in the jump function JFUNC with relevant type
745 information or set it to unknown. ARG is the object itself (not a pointer
746 to it, unless dereferenced). BASE is the base of the memory access as
747 returned by get_ref_base_and_extent, as is the offset.
749 This is helper function for detect_type_change and detect_type_change_ssa
750 that does the heavy work which is usually unnecesary. */
753 detect_type_change_from_memory_writes (ipa_func_body_info
*fbi
, tree arg
,
754 tree base
, tree comp_type
, gcall
*call
,
755 struct ipa_jump_func
*jfunc
,
756 HOST_WIDE_INT offset
)
758 struct prop_type_change_info tci
;
761 gcc_checking_assert (DECL_P (arg
)
762 || TREE_CODE (arg
) == MEM_REF
763 || handled_component_p (arg
));
765 comp_type
= TYPE_MAIN_VARIANT (comp_type
);
767 /* Const calls cannot call virtual methods through VMT and so type changes do
769 if (!flag_devirtualize
|| !gimple_vuse (call
)
770 /* Be sure expected_type is polymorphic. */
772 || TREE_CODE (comp_type
) != RECORD_TYPE
773 || !TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type
))
774 || !BINFO_VTABLE (TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type
))))
777 ao_ref_init (&ao
, arg
);
780 ao
.size
= POINTER_SIZE
;
781 ao
.max_size
= ao
.size
;
784 tci
.object
= get_base_address (arg
);
785 tci
.type_maybe_changed
= false;
788 = walk_aliased_vdefs (&ao
, gimple_vuse (call
), check_stmt_for_type_change
,
789 &tci
, NULL
, NULL
, fbi
->aa_walk_budget
+ 1);
791 if (walked
>= 0 && !tci
.type_maybe_changed
)
794 ipa_set_jf_unknown (jfunc
);
798 /* Detect whether the dynamic type of ARG of COMP_TYPE may have changed.
799 If it is, return true and fill in the jump function JFUNC with relevant type
800 information or set it to unknown. ARG is the object itself (not a pointer
801 to it, unless dereferenced). BASE is the base of the memory access as
802 returned by get_ref_base_and_extent, as is the offset. */
805 detect_type_change (ipa_func_body_info
*fbi
, tree arg
, tree base
,
806 tree comp_type
, gcall
*call
, struct ipa_jump_func
*jfunc
,
807 HOST_WIDE_INT offset
)
809 if (!flag_devirtualize
)
812 if (TREE_CODE (base
) == MEM_REF
813 && !param_type_may_change_p (current_function_decl
,
814 TREE_OPERAND (base
, 0),
817 return detect_type_change_from_memory_writes (fbi
, arg
, base
, comp_type
,
818 call
, jfunc
, offset
);
821 /* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer
822 SSA name (its dereference will become the base and the offset is assumed to
826 detect_type_change_ssa (ipa_func_body_info
*fbi
, tree arg
, tree comp_type
,
827 gcall
*call
, struct ipa_jump_func
*jfunc
)
829 gcc_checking_assert (TREE_CODE (arg
) == SSA_NAME
);
830 if (!flag_devirtualize
831 || !POINTER_TYPE_P (TREE_TYPE (arg
)))
834 if (!param_type_may_change_p (current_function_decl
, arg
, call
))
837 arg
= build2 (MEM_REF
, ptr_type_node
, arg
,
838 build_int_cst (ptr_type_node
, 0));
840 return detect_type_change_from_memory_writes (fbi
, arg
, arg
, comp_type
,
844 /* Callback of walk_aliased_vdefs. Flags that it has been invoked to the
845 boolean variable pointed to by DATA. */
848 mark_modified (ao_ref
*ao ATTRIBUTE_UNUSED
, tree vdef ATTRIBUTE_UNUSED
,
851 bool *b
= (bool *) data
;
856 /* Find the nearest valid aa status for parameter specified by INDEX that
859 static struct ipa_param_aa_status
*
860 find_dominating_aa_status (struct ipa_func_body_info
*fbi
, basic_block bb
,
865 bb
= get_immediate_dominator (CDI_DOMINATORS
, bb
);
868 struct ipa_bb_info
*bi
= ipa_get_bb_info (fbi
, bb
);
869 if (!bi
->param_aa_statuses
.is_empty ()
870 && bi
->param_aa_statuses
[index
].valid
)
871 return &bi
->param_aa_statuses
[index
];
875 /* Get AA status structure for the given BB and parameter with INDEX. Allocate
876 structures and/or intialize the result with a dominating description as
879 static struct ipa_param_aa_status
*
880 parm_bb_aa_status_for_bb (struct ipa_func_body_info
*fbi
, basic_block bb
,
883 gcc_checking_assert (fbi
);
884 struct ipa_bb_info
*bi
= ipa_get_bb_info (fbi
, bb
);
885 if (bi
->param_aa_statuses
.is_empty ())
886 bi
->param_aa_statuses
.safe_grow_cleared (fbi
->param_count
);
887 struct ipa_param_aa_status
*paa
= &bi
->param_aa_statuses
[index
];
890 gcc_checking_assert (!paa
->parm_modified
891 && !paa
->ref_modified
892 && !paa
->pt_modified
);
893 struct ipa_param_aa_status
*dom_paa
;
894 dom_paa
= find_dominating_aa_status (fbi
, bb
, index
);
904 /* Return true if a load from a formal parameter PARM_LOAD is known to retrieve
905 a value known not to be modified in this function before reaching the
906 statement STMT. FBI holds information about the function we have so far
907 gathered but do not survive the summary building stage. */
910 parm_preserved_before_stmt_p (struct ipa_func_body_info
*fbi
, int index
,
911 gimple
*stmt
, tree parm_load
)
913 struct ipa_param_aa_status
*paa
;
914 bool modified
= false;
917 tree base
= get_base_address (parm_load
);
918 gcc_assert (TREE_CODE (base
) == PARM_DECL
);
919 if (TREE_READONLY (base
))
922 gcc_checking_assert (fbi
);
923 paa
= parm_bb_aa_status_for_bb (fbi
, gimple_bb (stmt
), index
);
924 if (paa
->parm_modified
)
927 gcc_checking_assert (gimple_vuse (stmt
) != NULL_TREE
);
928 ao_ref_init (&refd
, parm_load
);
929 int walked
= walk_aliased_vdefs (&refd
, gimple_vuse (stmt
), mark_modified
,
930 &modified
, NULL
, NULL
,
931 fbi
->aa_walk_budget
+ 1);
936 fbi
->aa_walk_budget
= 0;
939 fbi
->aa_walk_budget
-= walked
;
941 paa
->parm_modified
= true;
945 /* If STMT is an assignment that loads a value from an parameter declaration,
946 return the index of the parameter in ipa_node_params which has not been
947 modified. Otherwise return -1. */
950 load_from_unmodified_param (struct ipa_func_body_info
*fbi
,
951 vec
<ipa_param_descriptor
, va_gc
> *descriptors
,
957 if (!gimple_assign_single_p (stmt
))
960 op1
= gimple_assign_rhs1 (stmt
);
961 if (TREE_CODE (op1
) != PARM_DECL
)
964 index
= ipa_get_param_decl_index_1 (descriptors
, op1
);
966 || !parm_preserved_before_stmt_p (fbi
, index
, stmt
, op1
))
972 /* Return true if memory reference REF (which must be a load through parameter
973 with INDEX) loads data that are known to be unmodified in this function
974 before reaching statement STMT. */
977 parm_ref_data_preserved_p (struct ipa_func_body_info
*fbi
,
978 int index
, gimple
*stmt
, tree ref
)
980 struct ipa_param_aa_status
*paa
;
981 bool modified
= false;
984 gcc_checking_assert (fbi
);
985 paa
= parm_bb_aa_status_for_bb (fbi
, gimple_bb (stmt
), index
);
986 if (paa
->ref_modified
)
989 gcc_checking_assert (gimple_vuse (stmt
));
990 ao_ref_init (&refd
, ref
);
991 int walked
= walk_aliased_vdefs (&refd
, gimple_vuse (stmt
), mark_modified
,
992 &modified
, NULL
, NULL
,
993 fbi
->aa_walk_budget
+ 1);
997 fbi
->aa_walk_budget
= 0;
1000 fbi
->aa_walk_budget
-= walked
;
1002 paa
->ref_modified
= true;
1006 /* Return true if the data pointed to by PARM (which is a parameter with INDEX)
1007 is known to be unmodified in this function before reaching call statement
1008 CALL into which it is passed. FBI describes the function body. */
1011 parm_ref_data_pass_through_p (struct ipa_func_body_info
*fbi
, int index
,
1012 gimple
*call
, tree parm
)
1014 bool modified
= false;
1017 /* It's unnecessary to calculate anything about memory contnets for a const
1018 function because it is not goin to use it. But do not cache the result
1019 either. Also, no such calculations for non-pointers. */
1020 if (!gimple_vuse (call
)
1021 || !POINTER_TYPE_P (TREE_TYPE (parm
)))
1024 struct ipa_param_aa_status
*paa
= parm_bb_aa_status_for_bb (fbi
,
1027 if (paa
->pt_modified
)
1030 ao_ref_init_from_ptr_and_size (&refd
, parm
, NULL_TREE
);
1031 int walked
= walk_aliased_vdefs (&refd
, gimple_vuse (call
), mark_modified
,
1032 &modified
, NULL
, NULL
,
1033 fbi
->aa_walk_budget
+ 1);
1036 fbi
->aa_walk_budget
= 0;
1040 fbi
->aa_walk_budget
-= walked
;
1042 paa
->pt_modified
= true;
1046 /* Return true if we can prove that OP is a memory reference loading
1047 data from an aggregate passed as a parameter.
1049 The function works in two modes. If GUARANTEED_UNMODIFIED is NULL, it return
1050 false if it cannot prove that the value has not been modified before the
1051 load in STMT. If GUARANTEED_UNMODIFIED is not NULL, it will return true even
1052 if it cannot prove the value has not been modified, in that case it will
1053 store false to *GUARANTEED_UNMODIFIED, otherwise it will store true there.
1055 INFO and PARMS_AINFO describe parameters of the current function (but the
1056 latter can be NULL), STMT is the load statement. If function returns true,
1057 *INDEX_P, *OFFSET_P and *BY_REF is filled with the parameter index, offset
1058 within the aggregate and whether it is a load from a value passed by
1059 reference respectively. */
1062 ipa_load_from_parm_agg (struct ipa_func_body_info
*fbi
,
1063 vec
<ipa_param_descriptor
, va_gc
> *descriptors
,
1064 gimple
*stmt
, tree op
, int *index_p
,
1065 HOST_WIDE_INT
*offset_p
, poly_int64
*size_p
,
1066 bool *by_ref_p
, bool *guaranteed_unmodified
)
1071 tree base
= get_ref_base_and_extent_hwi (op
, offset_p
, &size
, &reverse
);
1078 int index
= ipa_get_param_decl_index_1 (descriptors
, base
);
1080 && parm_preserved_before_stmt_p (fbi
, index
, stmt
, op
))
1086 if (guaranteed_unmodified
)
1087 *guaranteed_unmodified
= true;
1093 if (TREE_CODE (base
) != MEM_REF
1094 || TREE_CODE (TREE_OPERAND (base
, 0)) != SSA_NAME
1095 || !integer_zerop (TREE_OPERAND (base
, 1)))
1098 if (SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base
, 0)))
1100 tree parm
= SSA_NAME_VAR (TREE_OPERAND (base
, 0));
1101 index
= ipa_get_param_decl_index_1 (descriptors
, parm
);
1105 /* This branch catches situations where a pointer parameter is not a
1106 gimple register, for example:
1108 void hip7(S*) (struct S * p)
1110 void (*<T2e4>) (struct S *) D.1867;
1115 D.1867_2 = p.1_1->f;
1120 gimple
*def
= SSA_NAME_DEF_STMT (TREE_OPERAND (base
, 0));
1121 index
= load_from_unmodified_param (fbi
, descriptors
, def
);
1126 bool data_preserved
= parm_ref_data_preserved_p (fbi
, index
, stmt
, op
);
1127 if (!data_preserved
&& !guaranteed_unmodified
)
1134 if (guaranteed_unmodified
)
1135 *guaranteed_unmodified
= data_preserved
;
1141 /* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
1142 of an assignment statement STMT, try to determine whether we are actually
1143 handling any of the following cases and construct an appropriate jump
1144 function into JFUNC if so:
1146 1) The passed value is loaded from a formal parameter which is not a gimple
1147 register (most probably because it is addressable, the value has to be
1148 scalar) and we can guarantee the value has not changed. This case can
1149 therefore be described by a simple pass-through jump function. For example:
1158 2) The passed value can be described by a simple arithmetic pass-through
1165 D.2064_4 = a.1(D) + 4;
1168 This case can also occur in combination of the previous one, e.g.:
1176 D.2064_4 = a.0_3 + 4;
1179 3) The passed value is an address of an object within another one (which
1180 also passed by reference). Such situations are described by an ancestor
1181 jump function and describe situations such as:
1183 B::foo() (struct B * const this)
1187 D.1845_2 = &this_1(D)->D.1748;
1190 INFO is the structure describing individual parameters access different
1191 stages of IPA optimizations. PARMS_AINFO contains the information that is
1192 only needed for intraprocedural analysis. */
1195 compute_complex_assign_jump_func (struct ipa_func_body_info
*fbi
,
1196 class ipa_node_params
*info
,
1197 struct ipa_jump_func
*jfunc
,
1198 gcall
*call
, gimple
*stmt
, tree name
,
1201 HOST_WIDE_INT offset
, size
;
1202 tree op1
, tc_ssa
, base
, ssa
;
1206 op1
= gimple_assign_rhs1 (stmt
);
1208 if (TREE_CODE (op1
) == SSA_NAME
)
1210 if (SSA_NAME_IS_DEFAULT_DEF (op1
))
1211 index
= ipa_get_param_decl_index (info
, SSA_NAME_VAR (op1
));
1213 index
= load_from_unmodified_param (fbi
, info
->descriptors
,
1214 SSA_NAME_DEF_STMT (op1
));
1219 index
= load_from_unmodified_param (fbi
, info
->descriptors
, stmt
);
1220 tc_ssa
= gimple_assign_lhs (stmt
);
1225 switch (gimple_assign_rhs_class (stmt
))
1227 case GIMPLE_BINARY_RHS
:
1229 tree op2
= gimple_assign_rhs2 (stmt
);
1230 if (!is_gimple_ip_invariant (op2
)
1231 || ((TREE_CODE_CLASS (gimple_assign_rhs_code (stmt
))
1233 && !useless_type_conversion_p (TREE_TYPE (name
),
1237 ipa_set_jf_arith_pass_through (jfunc
, index
, op2
,
1238 gimple_assign_rhs_code (stmt
));
1241 case GIMPLE_SINGLE_RHS
:
1243 bool agg_p
= parm_ref_data_pass_through_p (fbi
, index
, call
,
1245 ipa_set_jf_simple_pass_through (jfunc
, index
, agg_p
);
1248 case GIMPLE_UNARY_RHS
:
1249 if (!CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt
)))
1250 ipa_set_jf_unary_pass_through (jfunc
, index
,
1251 gimple_assign_rhs_code (stmt
));
1257 if (TREE_CODE (op1
) != ADDR_EXPR
)
1259 op1
= TREE_OPERAND (op1
, 0);
1260 if (TREE_CODE (TREE_TYPE (op1
)) != RECORD_TYPE
)
1262 base
= get_ref_base_and_extent_hwi (op1
, &offset
, &size
, &reverse
);
1263 offset_int mem_offset
;
1265 || TREE_CODE (base
) != MEM_REF
1266 || !mem_ref_offset (base
).is_constant (&mem_offset
))
1268 offset
+= mem_offset
.to_short_addr () * BITS_PER_UNIT
;
1269 ssa
= TREE_OPERAND (base
, 0);
1270 if (TREE_CODE (ssa
) != SSA_NAME
1271 || !SSA_NAME_IS_DEFAULT_DEF (ssa
)
1275 /* Dynamic types are changed in constructors and destructors. */
1276 index
= ipa_get_param_decl_index (info
, SSA_NAME_VAR (ssa
));
1277 if (index
>= 0 && param_type
&& POINTER_TYPE_P (param_type
))
1278 ipa_set_ancestor_jf (jfunc
, offset
, index
,
1279 parm_ref_data_pass_through_p (fbi
, index
, call
, ssa
));
1282 /* Extract the base, offset and MEM_REF expression from a statement ASSIGN if
1285 iftmp.1_3 = &obj_2(D)->D.1762;
1287 The base of the MEM_REF must be a default definition SSA NAME of a
1288 parameter. Return NULL_TREE if it looks otherwise. If case of success, the
1289 whole MEM_REF expression is returned and the offset calculated from any
1290 handled components and the MEM_REF itself is stored into *OFFSET. The whole
1291 RHS stripped off the ADDR_EXPR is stored into *OBJ_P. */
1294 get_ancestor_addr_info (gimple
*assign
, tree
*obj_p
, HOST_WIDE_INT
*offset
)
1297 tree expr
, parm
, obj
;
1300 if (!gimple_assign_single_p (assign
))
1302 expr
= gimple_assign_rhs1 (assign
);
1304 if (TREE_CODE (expr
) != ADDR_EXPR
)
1306 expr
= TREE_OPERAND (expr
, 0);
1308 expr
= get_ref_base_and_extent_hwi (expr
, offset
, &size
, &reverse
);
1310 offset_int mem_offset
;
1312 || TREE_CODE (expr
) != MEM_REF
1313 || !mem_ref_offset (expr
).is_constant (&mem_offset
))
1315 parm
= TREE_OPERAND (expr
, 0);
1316 if (TREE_CODE (parm
) != SSA_NAME
1317 || !SSA_NAME_IS_DEFAULT_DEF (parm
)
1318 || TREE_CODE (SSA_NAME_VAR (parm
)) != PARM_DECL
)
1321 *offset
+= mem_offset
.to_short_addr () * BITS_PER_UNIT
;
1327 /* Given that an actual argument is an SSA_NAME that is a result of a phi
1328 statement PHI, try to find out whether NAME is in fact a
1329 multiple-inheritance typecast from a descendant into an ancestor of a formal
1330 parameter and thus can be described by an ancestor jump function and if so,
1331 write the appropriate function into JFUNC.
1333 Essentially we want to match the following pattern:
1341 iftmp.1_3 = &obj_2(D)->D.1762;
1344 # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
1345 D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
1349 compute_complex_ancestor_jump_func (struct ipa_func_body_info
*fbi
,
1350 class ipa_node_params
*info
,
1351 struct ipa_jump_func
*jfunc
,
1352 gcall
*call
, gphi
*phi
)
1354 HOST_WIDE_INT offset
;
1355 gimple
*assign
, *cond
;
1356 basic_block phi_bb
, assign_bb
, cond_bb
;
1357 tree tmp
, parm
, expr
, obj
;
1360 if (gimple_phi_num_args (phi
) != 2)
1363 if (integer_zerop (PHI_ARG_DEF (phi
, 1)))
1364 tmp
= PHI_ARG_DEF (phi
, 0);
1365 else if (integer_zerop (PHI_ARG_DEF (phi
, 0)))
1366 tmp
= PHI_ARG_DEF (phi
, 1);
1369 if (TREE_CODE (tmp
) != SSA_NAME
1370 || SSA_NAME_IS_DEFAULT_DEF (tmp
)
1371 || !POINTER_TYPE_P (TREE_TYPE (tmp
))
1372 || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp
))) != RECORD_TYPE
)
1375 assign
= SSA_NAME_DEF_STMT (tmp
);
1376 assign_bb
= gimple_bb (assign
);
1377 if (!single_pred_p (assign_bb
))
1379 expr
= get_ancestor_addr_info (assign
, &obj
, &offset
);
1382 parm
= TREE_OPERAND (expr
, 0);
1383 index
= ipa_get_param_decl_index (info
, SSA_NAME_VAR (parm
));
1387 cond_bb
= single_pred (assign_bb
);
1388 cond
= last_stmt (cond_bb
);
1390 || gimple_code (cond
) != GIMPLE_COND
1391 || gimple_cond_code (cond
) != NE_EXPR
1392 || gimple_cond_lhs (cond
) != parm
1393 || !integer_zerop (gimple_cond_rhs (cond
)))
1396 phi_bb
= gimple_bb (phi
);
1397 for (i
= 0; i
< 2; i
++)
1399 basic_block pred
= EDGE_PRED (phi_bb
, i
)->src
;
1400 if (pred
!= assign_bb
&& pred
!= cond_bb
)
1404 ipa_set_ancestor_jf (jfunc
, offset
, index
,
1405 parm_ref_data_pass_through_p (fbi
, index
, call
, parm
));
1408 /* Inspect the given TYPE and return true iff it has the same structure (the
1409 same number of fields of the same types) as a C++ member pointer. If
1410 METHOD_PTR and DELTA are non-NULL, store the trees representing the
1411 corresponding fields there. */
1414 type_like_member_ptr_p (tree type
, tree
*method_ptr
, tree
*delta
)
1418 if (TREE_CODE (type
) != RECORD_TYPE
)
1421 fld
= TYPE_FIELDS (type
);
1422 if (!fld
|| !POINTER_TYPE_P (TREE_TYPE (fld
))
1423 || TREE_CODE (TREE_TYPE (TREE_TYPE (fld
))) != METHOD_TYPE
1424 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld
)))
1430 fld
= DECL_CHAIN (fld
);
1431 if (!fld
|| INTEGRAL_TYPE_P (fld
)
1432 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld
)))
1437 if (DECL_CHAIN (fld
))
1443 /* If RHS is an SSA_NAME and it is defined by a simple copy assign statement,
1444 return the rhs of its defining statement. Otherwise return RHS as it
1448 get_ssa_def_if_simple_copy (tree rhs
)
1450 while (TREE_CODE (rhs
) == SSA_NAME
&& !SSA_NAME_IS_DEFAULT_DEF (rhs
))
1452 gimple
*def_stmt
= SSA_NAME_DEF_STMT (rhs
);
1454 if (gimple_assign_single_p (def_stmt
))
1455 rhs
= gimple_assign_rhs1 (def_stmt
);
1462 /* Simple linked list, describing known contents of an aggregate before
1465 struct ipa_known_agg_contents_list
1467 /* Offset and size of the described part of the aggregate. */
1468 HOST_WIDE_INT offset
, size
;
1469 /* Known constant value or NULL if the contents is known to be unknown. */
1471 /* Pointer to the next structure in the list. */
1472 struct ipa_known_agg_contents_list
*next
;
1475 /* Add a known content item into a linked list of ipa_known_agg_contents_list
1476 structure, in which all elements are sorted ascendingly by offset. */
1479 add_to_agg_contents_list (struct ipa_known_agg_contents_list
**plist
,
1480 struct ipa_known_agg_contents_list
*item
)
1482 struct ipa_known_agg_contents_list
*list
= *plist
;
1484 for (; list
; list
= list
->next
)
1486 if (list
->offset
>= item
->offset
)
1489 plist
= &list
->next
;
1496 /* Check whether a given known content is clobbered by certain element in
1497 a linked list of ipa_known_agg_contents_list. */
1500 clobber_by_agg_contents_list_p (struct ipa_known_agg_contents_list
*list
,
1501 struct ipa_known_agg_contents_list
*item
)
1503 for (; list
; list
= list
->next
)
1505 if (list
->offset
>= item
->offset
)
1506 return list
->offset
< item
->offset
+ item
->size
;
1508 if (list
->offset
+ list
->size
> item
->offset
)
1515 /* Build aggregate jump function from LIST, assuming there are exactly
1516 CONST_COUNT constant entries there and that offset of the passed argument
1517 is ARG_OFFSET and store it into JFUNC. */
1520 build_agg_jump_func_from_list (struct ipa_known_agg_contents_list
*list
,
1521 int const_count
, HOST_WIDE_INT arg_offset
,
1522 struct ipa_jump_func
*jfunc
)
1524 vec_alloc (jfunc
->agg
.items
, const_count
);
1529 struct ipa_agg_jf_item item
;
1530 item
.offset
= list
->offset
- arg_offset
;
1531 gcc_assert ((item
.offset
% BITS_PER_UNIT
) == 0);
1532 item
.value
= unshare_expr_without_location (list
->constant
);
1533 jfunc
->agg
.items
->quick_push (item
);
1539 /* If STMT is a memory store to the object whose address is BASE, extract
1540 information (offset, size, and value) into CONTENT, and return true,
1541 otherwise we conservatively assume the whole object is modified with
1542 unknown content, and return false. CHECK_REF means that access to object
1543 is expected to be in form of MEM_REF expression. */
1546 extract_mem_content (gimple
*stmt
, tree base
, bool check_ref
,
1547 struct ipa_known_agg_contents_list
*content
)
1549 HOST_WIDE_INT lhs_offset
, lhs_size
;
1550 tree lhs
, rhs
, lhs_base
;
1553 if (!gimple_assign_single_p (stmt
))
1556 lhs
= gimple_assign_lhs (stmt
);
1557 rhs
= gimple_assign_rhs1 (stmt
);
1559 if (!is_gimple_reg_type (TREE_TYPE (rhs
))
1560 || TREE_CODE (lhs
) == BIT_FIELD_REF
1561 || contains_bitfld_component_ref_p (lhs
))
1564 lhs_base
= get_ref_base_and_extent_hwi (lhs
, &lhs_offset
,
1565 &lhs_size
, &reverse
);
1571 if (TREE_CODE (lhs_base
) != MEM_REF
1572 || TREE_OPERAND (lhs_base
, 0) != base
1573 || !integer_zerop (TREE_OPERAND (lhs_base
, 1)))
1576 else if (lhs_base
!= base
)
1579 rhs
= get_ssa_def_if_simple_copy (rhs
);
1581 content
->size
= lhs_size
;
1582 content
->offset
= lhs_offset
;
1583 content
->constant
= is_gimple_ip_invariant (rhs
) ? rhs
: NULL_TREE
;
1584 content
->next
= NULL
;
1589 /* Traverse statements from CALL backwards, scanning whether an aggregate given
1590 in ARG is filled in with constant values. ARG can either be an aggregate
1591 expression or a pointer to an aggregate. ARG_TYPE is the type of the
1592 aggregate. JFUNC is the jump function into which the constants are
1593 subsequently stored. AA_WALK_BUDGET_P points to limit on number of
1594 statements we allow get_continuation_for_phi to examine. */
1597 determine_known_aggregate_parts (gcall
*call
, tree arg
,
1599 struct ipa_jump_func
*jfunc
,
1600 unsigned *aa_walk_budget_p
)
1602 struct ipa_known_agg_contents_list
*list
= NULL
, *all_list
= NULL
;
1603 bitmap visited
= NULL
;
1604 int item_count
= 0, const_count
= 0;
1605 int ipa_max_agg_items
= param_ipa_max_agg_items
;
1606 HOST_WIDE_INT arg_offset
, arg_size
;
1608 bool check_ref
, by_ref
;
1611 if (ipa_max_agg_items
== 0)
1614 /* The function operates in three stages. First, we prepare check_ref, r,
1615 arg_base and arg_offset based on what is actually passed as an actual
1618 if (POINTER_TYPE_P (arg_type
))
1621 if (TREE_CODE (arg
) == SSA_NAME
)
1624 if (!tree_fits_uhwi_p (TYPE_SIZE (TREE_TYPE (arg_type
)))
1625 || !POINTER_TYPE_P (TREE_TYPE (arg
)))
1630 type_size
= TYPE_SIZE (TREE_TYPE (arg_type
));
1631 arg_size
= tree_to_uhwi (type_size
);
1632 ao_ref_init_from_ptr_and_size (&r
, arg_base
, NULL_TREE
);
1634 else if (TREE_CODE (arg
) == ADDR_EXPR
)
1638 arg
= TREE_OPERAND (arg
, 0);
1639 arg_base
= get_ref_base_and_extent_hwi (arg
, &arg_offset
,
1640 &arg_size
, &reverse
);
1643 if (DECL_P (arg_base
))
1646 ao_ref_init (&r
, arg_base
);
1658 gcc_checking_assert (AGGREGATE_TYPE_P (TREE_TYPE (arg
)));
1662 arg_base
= get_ref_base_and_extent_hwi (arg
, &arg_offset
,
1663 &arg_size
, &reverse
);
1667 ao_ref_init (&r
, arg
);
1670 /* Second stage traverses virtual SSA web backwards starting from the call
1671 statement, only looks at individual dominating virtual operand (its
1672 definition dominates the call), as long as it is confident that content
1673 of the aggregate is affected by definition of the virtual operand, it
1674 builds a sorted linked list of ipa_agg_jf_list describing that. */
1676 for (tree dom_vuse
= gimple_vuse (call
); dom_vuse
;)
1678 gimple
*stmt
= SSA_NAME_DEF_STMT (dom_vuse
);
1680 if (gimple_code (stmt
) == GIMPLE_PHI
)
1682 dom_vuse
= get_continuation_for_phi (stmt
, &r
, true,
1684 &visited
, false, NULL
, NULL
);
1688 if (stmt_may_clobber_ref_p_1 (stmt
, &r
))
1690 struct ipa_known_agg_contents_list
*content
1691 = XALLOCA (struct ipa_known_agg_contents_list
);
1693 if (!extract_mem_content (stmt
, arg_base
, check_ref
, content
))
1696 /* Now we get a dominating virtual operand, and need to check
1697 whether its value is clobbered any other dominating one. */
1698 if (content
->constant
1699 && !clobber_by_agg_contents_list_p (all_list
, content
))
1701 struct ipa_known_agg_contents_list
*copy
1702 = XALLOCA (struct ipa_known_agg_contents_list
);
1704 /* Add to the list consisting of only dominating virtual
1705 operands, whose definitions can finally reach the call. */
1706 add_to_agg_contents_list (&list
, (*copy
= *content
, copy
));
1708 if (++const_count
== ipa_max_agg_items
)
1712 /* Add to the list consisting of all dominating virtual operands. */
1713 add_to_agg_contents_list (&all_list
, content
);
1715 if (++item_count
== 2 * ipa_max_agg_items
)
1718 dom_vuse
= gimple_vuse (stmt
);
1722 BITMAP_FREE (visited
);
1724 /* Third stage just goes over the list and creates an appropriate vector of
1725 ipa_agg_jf_item structures out of it, of course only if there are
1726 any known constants to begin with. */
1730 jfunc
->agg
.by_ref
= by_ref
;
1731 build_agg_jump_func_from_list (list
, const_count
, arg_offset
, jfunc
);
1736 /* Return the Ith param type of callee associated with call graph
1740 ipa_get_callee_param_type (struct cgraph_edge
*e
, int i
)
1743 tree type
= (e
->callee
1744 ? TREE_TYPE (e
->callee
->decl
)
1745 : gimple_call_fntype (e
->call_stmt
));
1746 tree t
= TYPE_ARG_TYPES (type
);
1748 for (n
= 0; n
< i
; n
++)
1755 return TREE_VALUE (t
);
1758 t
= DECL_ARGUMENTS (e
->callee
->decl
);
1759 for (n
= 0; n
< i
; n
++)
1766 return TREE_TYPE (t
);
1770 /* Return ipa_bits with VALUE and MASK values, which can be either a newly
1771 allocated structure or a previously existing one shared with other jump
1772 functions and/or transformation summaries. */
1775 ipa_get_ipa_bits_for_value (const widest_int
&value
, const widest_int
&mask
)
1781 ipa_bits
**slot
= ipa_bits_hash_table
->find_slot (&tmp
, INSERT
);
1785 ipa_bits
*res
= ggc_alloc
<ipa_bits
> ();
1793 /* Assign to JF a pointer to ipa_bits structure with VALUE and MASK. Use hash
1794 table in order to avoid creating multiple same ipa_bits structures. */
1797 ipa_set_jfunc_bits (ipa_jump_func
*jf
, const widest_int
&value
,
1798 const widest_int
&mask
)
1800 jf
->bits
= ipa_get_ipa_bits_for_value (value
, mask
);
1803 /* Return a pointer to a value_range just like *TMP, but either find it in
1804 ipa_vr_hash_table or allocate it in GC memory. TMP->equiv must be NULL. */
1806 static value_range
*
1807 ipa_get_value_range (value_range
*tmp
)
1809 value_range
**slot
= ipa_vr_hash_table
->find_slot (tmp
, INSERT
);
1813 value_range
*vr
= ggc_alloc
<value_range
> ();
1820 /* Return a pointer to a value range consisting of TYPE, MIN, MAX and an empty
1821 equiv set. Use hash table in order to avoid creating multiple same copies of
1824 static value_range
*
1825 ipa_get_value_range (enum value_range_kind type
, tree min
, tree max
)
1827 value_range
tmp (type
, min
, max
);
1828 return ipa_get_value_range (&tmp
);
1831 /* Assign to JF a pointer to a value_range structure with TYPE, MIN and MAX and
1832 a NULL equiv bitmap. Use hash table in order to avoid creating multiple
1833 same value_range structures. */
1836 ipa_set_jfunc_vr (ipa_jump_func
*jf
, enum value_range_kind type
,
1839 jf
->m_vr
= ipa_get_value_range (type
, min
, max
);
1842 /* Assign to JF a pointer to a value_range just like TMP but either fetch a
1843 copy from ipa_vr_hash_table or allocate a new on in GC memory. */
1846 ipa_set_jfunc_vr (ipa_jump_func
*jf
, value_range
*tmp
)
1848 jf
->m_vr
= ipa_get_value_range (tmp
);
1851 /* Compute jump function for all arguments of callsite CS and insert the
1852 information in the jump_functions array in the ipa_edge_args corresponding
1853 to this callsite. */
1856 ipa_compute_jump_functions_for_edge (struct ipa_func_body_info
*fbi
,
1857 struct cgraph_edge
*cs
)
1859 class ipa_node_params
*info
= IPA_NODE_REF (cs
->caller
);
1860 class ipa_edge_args
*args
= IPA_EDGE_REF_GET_CREATE (cs
);
1861 gcall
*call
= cs
->call_stmt
;
1862 int n
, arg_num
= gimple_call_num_args (call
);
1863 bool useful_context
= false;
1865 if (arg_num
== 0 || args
->jump_functions
)
1867 vec_safe_grow_cleared (args
->jump_functions
, arg_num
);
1868 if (flag_devirtualize
)
1869 vec_safe_grow_cleared (args
->polymorphic_call_contexts
, arg_num
);
1871 if (gimple_call_internal_p (call
))
1873 if (ipa_func_spec_opts_forbid_analysis_p (cs
->caller
))
1876 for (n
= 0; n
< arg_num
; n
++)
1878 struct ipa_jump_func
*jfunc
= ipa_get_ith_jump_func (args
, n
);
1879 tree arg
= gimple_call_arg (call
, n
);
1880 tree param_type
= ipa_get_callee_param_type (cs
, n
);
1881 if (flag_devirtualize
&& POINTER_TYPE_P (TREE_TYPE (arg
)))
1884 class ipa_polymorphic_call_context
context (cs
->caller
->decl
,
1887 context
.get_dynamic_type (instance
, arg
, NULL
, cs
->call_stmt
,
1888 &fbi
->aa_walk_budget
);
1889 *ipa_get_ith_polymorhic_call_context (args
, n
) = context
;
1890 if (!context
.useless_p ())
1891 useful_context
= true;
1894 if (POINTER_TYPE_P (TREE_TYPE (arg
)))
1896 bool addr_nonzero
= false;
1897 bool strict_overflow
= false;
1899 if (TREE_CODE (arg
) == SSA_NAME
1901 && get_ptr_nonnull (arg
))
1902 addr_nonzero
= true;
1903 else if (tree_single_nonzero_warnv_p (arg
, &strict_overflow
))
1904 addr_nonzero
= true;
1908 tree z
= build_int_cst (TREE_TYPE (arg
), 0);
1909 ipa_set_jfunc_vr (jfunc
, VR_ANTI_RANGE
, z
, z
);
1912 gcc_assert (!jfunc
->m_vr
);
1917 value_range_kind type
;
1918 if (TREE_CODE (arg
) == SSA_NAME
1920 && (type
= get_range_info (arg
, &min
, &max
))
1921 && (type
== VR_RANGE
|| type
== VR_ANTI_RANGE
))
1924 value_range
tmpvr (type
,
1925 wide_int_to_tree (TREE_TYPE (arg
), min
),
1926 wide_int_to_tree (TREE_TYPE (arg
), max
));
1927 range_fold_unary_expr (&resvr
, NOP_EXPR
, param_type
,
1928 &tmpvr
, TREE_TYPE (arg
));
1929 if (!resvr
.undefined_p () && !resvr
.varying_p ())
1930 ipa_set_jfunc_vr (jfunc
, &resvr
);
1932 gcc_assert (!jfunc
->m_vr
);
1935 gcc_assert (!jfunc
->m_vr
);
1938 if (INTEGRAL_TYPE_P (TREE_TYPE (arg
))
1939 && (TREE_CODE (arg
) == SSA_NAME
|| TREE_CODE (arg
) == INTEGER_CST
))
1941 if (TREE_CODE (arg
) == SSA_NAME
)
1942 ipa_set_jfunc_bits (jfunc
, 0,
1943 widest_int::from (get_nonzero_bits (arg
),
1944 TYPE_SIGN (TREE_TYPE (arg
))));
1946 ipa_set_jfunc_bits (jfunc
, wi::to_widest (arg
), 0);
1948 else if (POINTER_TYPE_P (TREE_TYPE (arg
)))
1950 unsigned HOST_WIDE_INT bitpos
;
1953 get_pointer_alignment_1 (arg
, &align
, &bitpos
);
1954 widest_int mask
= wi::bit_and_not
1955 (wi::mask
<widest_int
> (TYPE_PRECISION (TREE_TYPE (arg
)), false),
1956 align
/ BITS_PER_UNIT
- 1);
1957 widest_int value
= bitpos
/ BITS_PER_UNIT
;
1958 ipa_set_jfunc_bits (jfunc
, value
, mask
);
1961 gcc_assert (!jfunc
->bits
);
1963 if (is_gimple_ip_invariant (arg
)
1965 && is_global_var (arg
)
1966 && TREE_READONLY (arg
)))
1967 ipa_set_jf_constant (jfunc
, arg
, cs
);
1968 else if (!is_gimple_reg_type (TREE_TYPE (arg
))
1969 && TREE_CODE (arg
) == PARM_DECL
)
1971 int index
= ipa_get_param_decl_index (info
, arg
);
1973 gcc_assert (index
>=0);
1974 /* Aggregate passed by value, check for pass-through, otherwise we
1975 will attempt to fill in aggregate contents later in this
1977 if (parm_preserved_before_stmt_p (fbi
, index
, call
, arg
))
1979 ipa_set_jf_simple_pass_through (jfunc
, index
, false);
1983 else if (TREE_CODE (arg
) == SSA_NAME
)
1985 if (SSA_NAME_IS_DEFAULT_DEF (arg
))
1987 int index
= ipa_get_param_decl_index (info
, SSA_NAME_VAR (arg
));
1991 agg_p
= parm_ref_data_pass_through_p (fbi
, index
, call
, arg
);
1992 ipa_set_jf_simple_pass_through (jfunc
, index
, agg_p
);
1997 gimple
*stmt
= SSA_NAME_DEF_STMT (arg
);
1998 if (is_gimple_assign (stmt
))
1999 compute_complex_assign_jump_func (fbi
, info
, jfunc
,
2000 call
, stmt
, arg
, param_type
);
2001 else if (gimple_code (stmt
) == GIMPLE_PHI
)
2002 compute_complex_ancestor_jump_func (fbi
, info
, jfunc
,
2004 as_a
<gphi
*> (stmt
));
2008 /* If ARG is pointer, we cannot use its type to determine the type of aggregate
2009 passed (because type conversions are ignored in gimple). Usually we can
2010 safely get type from function declaration, but in case of K&R prototypes or
2011 variadic functions we can try our luck with type of the pointer passed.
2012 TODO: Since we look for actual initialization of the memory object, we may better
2013 work out the type based on the memory stores we find. */
2015 param_type
= TREE_TYPE (arg
);
2017 if ((jfunc
->type
!= IPA_JF_PASS_THROUGH
2018 || !ipa_get_jf_pass_through_agg_preserved (jfunc
))
2019 && (jfunc
->type
!= IPA_JF_ANCESTOR
2020 || !ipa_get_jf_ancestor_agg_preserved (jfunc
))
2021 && (AGGREGATE_TYPE_P (TREE_TYPE (arg
))
2022 || POINTER_TYPE_P (param_type
)))
2023 determine_known_aggregate_parts (call
, arg
, param_type
, jfunc
,
2024 &fbi
->aa_walk_budget
);
2026 if (!useful_context
)
2027 vec_free (args
->polymorphic_call_contexts
);
2030 /* Compute jump functions for all edges - both direct and indirect - outgoing
2034 ipa_compute_jump_functions_for_bb (struct ipa_func_body_info
*fbi
, basic_block bb
)
2036 struct ipa_bb_info
*bi
= ipa_get_bb_info (fbi
, bb
);
2038 struct cgraph_edge
*cs
;
2040 FOR_EACH_VEC_ELT_REVERSE (bi
->cg_edges
, i
, cs
)
2042 struct cgraph_node
*callee
= cs
->callee
;
2046 callee
= callee
->ultimate_alias_target ();
2047 /* We do not need to bother analyzing calls to unknown functions
2048 unless they may become known during lto/whopr. */
2049 if (!callee
->definition
&& !flag_lto
)
2052 ipa_compute_jump_functions_for_edge (fbi
, cs
);
2056 /* If STMT looks like a statement loading a value from a member pointer formal
2057 parameter, return that parameter and store the offset of the field to
2058 *OFFSET_P, if it is non-NULL. Otherwise return NULL (but *OFFSET_P still
2059 might be clobbered). If USE_DELTA, then we look for a use of the delta
2060 field rather than the pfn. */
2063 ipa_get_stmt_member_ptr_load_param (gimple
*stmt
, bool use_delta
,
2064 HOST_WIDE_INT
*offset_p
)
2066 tree rhs
, rec
, ref_field
, ref_offset
, fld
, ptr_field
, delta_field
;
2068 if (!gimple_assign_single_p (stmt
))
2071 rhs
= gimple_assign_rhs1 (stmt
);
2072 if (TREE_CODE (rhs
) == COMPONENT_REF
)
2074 ref_field
= TREE_OPERAND (rhs
, 1);
2075 rhs
= TREE_OPERAND (rhs
, 0);
2078 ref_field
= NULL_TREE
;
2079 if (TREE_CODE (rhs
) != MEM_REF
)
2081 rec
= TREE_OPERAND (rhs
, 0);
2082 if (TREE_CODE (rec
) != ADDR_EXPR
)
2084 rec
= TREE_OPERAND (rec
, 0);
2085 if (TREE_CODE (rec
) != PARM_DECL
2086 || !type_like_member_ptr_p (TREE_TYPE (rec
), &ptr_field
, &delta_field
))
2088 ref_offset
= TREE_OPERAND (rhs
, 1);
2095 *offset_p
= int_bit_position (fld
);
2099 if (integer_nonzerop (ref_offset
))
2101 return ref_field
== fld
? rec
: NULL_TREE
;
2104 return tree_int_cst_equal (byte_position (fld
), ref_offset
) ? rec
2108 /* Returns true iff T is an SSA_NAME defined by a statement. */
2111 ipa_is_ssa_with_stmt_def (tree t
)
2113 if (TREE_CODE (t
) == SSA_NAME
2114 && !SSA_NAME_IS_DEFAULT_DEF (t
))
2120 /* Find the indirect call graph edge corresponding to STMT and mark it as a
2121 call to a parameter number PARAM_INDEX. NODE is the caller. Return the
2122 indirect call graph edge.
2123 If POLYMORPHIC is true record is as a destination of polymorphic call. */
2125 static struct cgraph_edge
*
2126 ipa_note_param_call (struct cgraph_node
*node
, int param_index
,
2127 gcall
*stmt
, bool polymorphic
)
2129 struct cgraph_edge
*cs
;
2131 cs
= node
->get_edge (stmt
);
2132 cs
->indirect_info
->param_index
= param_index
;
2133 cs
->indirect_info
->agg_contents
= 0;
2134 cs
->indirect_info
->member_ptr
= 0;
2135 cs
->indirect_info
->guaranteed_unmodified
= 0;
2136 ipa_set_param_used_by_indirect_call (IPA_NODE_REF (node
),
2138 if (cs
->indirect_info
->polymorphic
|| polymorphic
)
2139 ipa_set_param_used_by_polymorphic_call
2140 (IPA_NODE_REF (node
), param_index
, true);
2144 /* Analyze the CALL and examine uses of formal parameters of the caller NODE
2145 (described by INFO). PARMS_AINFO is a pointer to a vector containing
2146 intermediate information about each formal parameter. Currently it checks
2147 whether the call calls a pointer that is a formal parameter and if so, the
2148 parameter is marked with the called flag and an indirect call graph edge
2149 describing the call is created. This is very simple for ordinary pointers
2150 represented in SSA but not-so-nice when it comes to member pointers. The
2151 ugly part of this function does nothing more than trying to match the
2152 pattern of such a call. An example of such a pattern is the gimple dump
2153 below, the call is on the last line:
2156 f$__delta_5 = f.__delta;
2157 f$__pfn_24 = f.__pfn;
2161 f$__delta_5 = MEM[(struct *)&f];
2162 f$__pfn_24 = MEM[(struct *)&f + 4B];
2164 and a few lines below:
2167 D.2496_3 = (int) f$__pfn_24;
2168 D.2497_4 = D.2496_3 & 1;
2175 D.2500_7 = (unsigned int) f$__delta_5;
2176 D.2501_8 = &S + D.2500_7;
2177 D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
2178 D.2503_10 = *D.2502_9;
2179 D.2504_12 = f$__pfn_24 + -1;
2180 D.2505_13 = (unsigned int) D.2504_12;
2181 D.2506_14 = D.2503_10 + D.2505_13;
2182 D.2507_15 = *D.2506_14;
2183 iftmp.11_16 = (String:: *) D.2507_15;
2186 # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
2187 D.2500_19 = (unsigned int) f$__delta_5;
2188 D.2508_20 = &S + D.2500_19;
2189 D.2493_21 = iftmp.11_1 (D.2508_20, 4);
2191 Such patterns are results of simple calls to a member pointer:
2193 int doprinting (int (MyString::* f)(int) const)
2195 MyString S ("somestring");
2200 Moreover, the function also looks for called pointers loaded from aggregates
2201 passed by value or reference. */
2204 ipa_analyze_indirect_call_uses (struct ipa_func_body_info
*fbi
, gcall
*call
,
2207 class ipa_node_params
*info
= fbi
->info
;
2208 HOST_WIDE_INT offset
;
2211 if (SSA_NAME_IS_DEFAULT_DEF (target
))
2213 tree var
= SSA_NAME_VAR (target
);
2214 int index
= ipa_get_param_decl_index (info
, var
);
2216 ipa_note_param_call (fbi
->node
, index
, call
, false);
2221 gimple
*def
= SSA_NAME_DEF_STMT (target
);
2222 bool guaranteed_unmodified
;
2223 if (gimple_assign_single_p (def
)
2224 && ipa_load_from_parm_agg (fbi
, info
->descriptors
, def
,
2225 gimple_assign_rhs1 (def
), &index
, &offset
,
2226 NULL
, &by_ref
, &guaranteed_unmodified
))
2228 struct cgraph_edge
*cs
= ipa_note_param_call (fbi
->node
, index
,
2230 cs
->indirect_info
->offset
= offset
;
2231 cs
->indirect_info
->agg_contents
= 1;
2232 cs
->indirect_info
->by_ref
= by_ref
;
2233 cs
->indirect_info
->guaranteed_unmodified
= guaranteed_unmodified
;
2237 /* Now we need to try to match the complex pattern of calling a member
2239 if (gimple_code (def
) != GIMPLE_PHI
2240 || gimple_phi_num_args (def
) != 2
2241 || !POINTER_TYPE_P (TREE_TYPE (target
))
2242 || TREE_CODE (TREE_TYPE (TREE_TYPE (target
))) != METHOD_TYPE
)
2245 /* First, we need to check whether one of these is a load from a member
2246 pointer that is a parameter to this function. */
2247 tree n1
= PHI_ARG_DEF (def
, 0);
2248 tree n2
= PHI_ARG_DEF (def
, 1);
2249 if (!ipa_is_ssa_with_stmt_def (n1
) || !ipa_is_ssa_with_stmt_def (n2
))
2251 gimple
*d1
= SSA_NAME_DEF_STMT (n1
);
2252 gimple
*d2
= SSA_NAME_DEF_STMT (n2
);
2255 basic_block bb
, virt_bb
;
2256 basic_block join
= gimple_bb (def
);
2257 if ((rec
= ipa_get_stmt_member_ptr_load_param (d1
, false, &offset
)))
2259 if (ipa_get_stmt_member_ptr_load_param (d2
, false, NULL
))
2262 bb
= EDGE_PRED (join
, 0)->src
;
2263 virt_bb
= gimple_bb (d2
);
2265 else if ((rec
= ipa_get_stmt_member_ptr_load_param (d2
, false, &offset
)))
2267 bb
= EDGE_PRED (join
, 1)->src
;
2268 virt_bb
= gimple_bb (d1
);
2273 /* Second, we need to check that the basic blocks are laid out in the way
2274 corresponding to the pattern. */
2276 if (!single_pred_p (virt_bb
) || !single_succ_p (virt_bb
)
2277 || single_pred (virt_bb
) != bb
2278 || single_succ (virt_bb
) != join
)
2281 /* Third, let's see that the branching is done depending on the least
2282 significant bit of the pfn. */
2284 gimple
*branch
= last_stmt (bb
);
2285 if (!branch
|| gimple_code (branch
) != GIMPLE_COND
)
2288 if ((gimple_cond_code (branch
) != NE_EXPR
2289 && gimple_cond_code (branch
) != EQ_EXPR
)
2290 || !integer_zerop (gimple_cond_rhs (branch
)))
2293 tree cond
= gimple_cond_lhs (branch
);
2294 if (!ipa_is_ssa_with_stmt_def (cond
))
2297 def
= SSA_NAME_DEF_STMT (cond
);
2298 if (!is_gimple_assign (def
)
2299 || gimple_assign_rhs_code (def
) != BIT_AND_EXPR
2300 || !integer_onep (gimple_assign_rhs2 (def
)))
2303 cond
= gimple_assign_rhs1 (def
);
2304 if (!ipa_is_ssa_with_stmt_def (cond
))
2307 def
= SSA_NAME_DEF_STMT (cond
);
2309 if (is_gimple_assign (def
)
2310 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def
)))
2312 cond
= gimple_assign_rhs1 (def
);
2313 if (!ipa_is_ssa_with_stmt_def (cond
))
2315 def
= SSA_NAME_DEF_STMT (cond
);
2319 rec2
= ipa_get_stmt_member_ptr_load_param (def
,
2320 (TARGET_PTRMEMFUNC_VBIT_LOCATION
2321 == ptrmemfunc_vbit_in_delta
),
2326 index
= ipa_get_param_decl_index (info
, rec
);
2328 && parm_preserved_before_stmt_p (fbi
, index
, call
, rec
))
2330 struct cgraph_edge
*cs
= ipa_note_param_call (fbi
->node
, index
,
2332 cs
->indirect_info
->offset
= offset
;
2333 cs
->indirect_info
->agg_contents
= 1;
2334 cs
->indirect_info
->member_ptr
= 1;
2335 cs
->indirect_info
->guaranteed_unmodified
= 1;
2341 /* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
2342 object referenced in the expression is a formal parameter of the caller
2343 FBI->node (described by FBI->info), create a call note for the
2347 ipa_analyze_virtual_call_uses (struct ipa_func_body_info
*fbi
,
2348 gcall
*call
, tree target
)
2350 tree obj
= OBJ_TYPE_REF_OBJECT (target
);
2352 HOST_WIDE_INT anc_offset
;
2354 if (!flag_devirtualize
)
2357 if (TREE_CODE (obj
) != SSA_NAME
)
2360 class ipa_node_params
*info
= fbi
->info
;
2361 if (SSA_NAME_IS_DEFAULT_DEF (obj
))
2363 struct ipa_jump_func jfunc
;
2364 if (TREE_CODE (SSA_NAME_VAR (obj
)) != PARM_DECL
)
2368 index
= ipa_get_param_decl_index (info
, SSA_NAME_VAR (obj
));
2369 gcc_assert (index
>= 0);
2370 if (detect_type_change_ssa (fbi
, obj
, obj_type_ref_class (target
),
2376 struct ipa_jump_func jfunc
;
2377 gimple
*stmt
= SSA_NAME_DEF_STMT (obj
);
2380 expr
= get_ancestor_addr_info (stmt
, &obj
, &anc_offset
);
2383 index
= ipa_get_param_decl_index (info
,
2384 SSA_NAME_VAR (TREE_OPERAND (expr
, 0)));
2385 gcc_assert (index
>= 0);
2386 if (detect_type_change (fbi
, obj
, expr
, obj_type_ref_class (target
),
2387 call
, &jfunc
, anc_offset
))
2391 struct cgraph_edge
*cs
= ipa_note_param_call (fbi
->node
, index
,
2393 class cgraph_indirect_call_info
*ii
= cs
->indirect_info
;
2394 ii
->offset
= anc_offset
;
2395 ii
->otr_token
= tree_to_uhwi (OBJ_TYPE_REF_TOKEN (target
));
2396 ii
->otr_type
= obj_type_ref_class (target
);
2397 ii
->polymorphic
= 1;
2400 /* Analyze a call statement CALL whether and how it utilizes formal parameters
2401 of the caller (described by INFO). PARMS_AINFO is a pointer to a vector
2402 containing intermediate information about each formal parameter. */
2405 ipa_analyze_call_uses (struct ipa_func_body_info
*fbi
, gcall
*call
)
2407 tree target
= gimple_call_fn (call
);
2410 || (TREE_CODE (target
) != SSA_NAME
2411 && !virtual_method_call_p (target
)))
2414 struct cgraph_edge
*cs
= fbi
->node
->get_edge (call
);
2415 /* If we previously turned the call into a direct call, there is
2416 no need to analyze. */
2417 if (cs
&& !cs
->indirect_unknown_callee
)
2420 if (cs
->indirect_info
->polymorphic
&& flag_devirtualize
)
2423 tree target
= gimple_call_fn (call
);
2424 ipa_polymorphic_call_context
context (current_function_decl
,
2425 target
, call
, &instance
);
2427 gcc_checking_assert (cs
->indirect_info
->otr_type
2428 == obj_type_ref_class (target
));
2429 gcc_checking_assert (cs
->indirect_info
->otr_token
2430 == tree_to_shwi (OBJ_TYPE_REF_TOKEN (target
)));
2432 cs
->indirect_info
->vptr_changed
2433 = !context
.get_dynamic_type (instance
,
2434 OBJ_TYPE_REF_OBJECT (target
),
2435 obj_type_ref_class (target
), call
,
2436 &fbi
->aa_walk_budget
);
2437 cs
->indirect_info
->context
= context
;
2440 if (TREE_CODE (target
) == SSA_NAME
)
2441 ipa_analyze_indirect_call_uses (fbi
, call
, target
);
2442 else if (virtual_method_call_p (target
))
2443 ipa_analyze_virtual_call_uses (fbi
, call
, target
);
2447 /* Analyze the call statement STMT with respect to formal parameters (described
2448 in INFO) of caller given by FBI->NODE. Currently it only checks whether
2449 formal parameters are called. */
2452 ipa_analyze_stmt_uses (struct ipa_func_body_info
*fbi
, gimple
*stmt
)
2454 if (is_gimple_call (stmt
))
2455 ipa_analyze_call_uses (fbi
, as_a
<gcall
*> (stmt
));
2458 /* Callback of walk_stmt_load_store_addr_ops for the visit_load.
2459 If OP is a parameter declaration, mark it as used in the info structure
2463 visit_ref_for_mod_analysis (gimple
*, tree op
, tree
, void *data
)
2465 class ipa_node_params
*info
= (class ipa_node_params
*) data
;
2467 op
= get_base_address (op
);
2469 && TREE_CODE (op
) == PARM_DECL
)
2471 int index
= ipa_get_param_decl_index (info
, op
);
2472 gcc_assert (index
>= 0);
2473 ipa_set_param_used (info
, index
, true);
2479 /* Scan the statements in BB and inspect the uses of formal parameters. Store
2480 the findings in various structures of the associated ipa_node_params
2481 structure, such as parameter flags, notes etc. FBI holds various data about
2482 the function being analyzed. */
2485 ipa_analyze_params_uses_in_bb (struct ipa_func_body_info
*fbi
, basic_block bb
)
2487 gimple_stmt_iterator gsi
;
2488 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
2490 gimple
*stmt
= gsi_stmt (gsi
);
2492 if (is_gimple_debug (stmt
))
2495 ipa_analyze_stmt_uses (fbi
, stmt
);
2496 walk_stmt_load_store_addr_ops (stmt
, fbi
->info
,
2497 visit_ref_for_mod_analysis
,
2498 visit_ref_for_mod_analysis
,
2499 visit_ref_for_mod_analysis
);
2501 for (gsi
= gsi_start_phis (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
2502 walk_stmt_load_store_addr_ops (gsi_stmt (gsi
), fbi
->info
,
2503 visit_ref_for_mod_analysis
,
2504 visit_ref_for_mod_analysis
,
2505 visit_ref_for_mod_analysis
);
2508 /* Calculate controlled uses of parameters of NODE. */
2511 ipa_analyze_controlled_uses (struct cgraph_node
*node
)
2513 class ipa_node_params
*info
= IPA_NODE_REF (node
);
2515 for (int i
= 0; i
< ipa_get_param_count (info
); i
++)
2517 tree parm
= ipa_get_param (info
, i
);
2518 int controlled_uses
= 0;
2520 /* For SSA regs see if parameter is used. For non-SSA we compute
2521 the flag during modification analysis. */
2522 if (is_gimple_reg (parm
))
2524 tree ddef
= ssa_default_def (DECL_STRUCT_FUNCTION (node
->decl
),
2526 if (ddef
&& !has_zero_uses (ddef
))
2528 imm_use_iterator imm_iter
;
2529 use_operand_p use_p
;
2531 ipa_set_param_used (info
, i
, true);
2532 FOR_EACH_IMM_USE_FAST (use_p
, imm_iter
, ddef
)
2533 if (!is_gimple_call (USE_STMT (use_p
)))
2535 if (!is_gimple_debug (USE_STMT (use_p
)))
2537 controlled_uses
= IPA_UNDESCRIBED_USE
;
2545 controlled_uses
= 0;
2548 controlled_uses
= IPA_UNDESCRIBED_USE
;
2549 ipa_set_controlled_uses (info
, i
, controlled_uses
);
2553 /* Free stuff in BI. */
2556 free_ipa_bb_info (struct ipa_bb_info
*bi
)
2558 bi
->cg_edges
.release ();
2559 bi
->param_aa_statuses
.release ();
2562 /* Dominator walker driving the analysis. */
2564 class analysis_dom_walker
: public dom_walker
2567 analysis_dom_walker (struct ipa_func_body_info
*fbi
)
2568 : dom_walker (CDI_DOMINATORS
), m_fbi (fbi
) {}
2570 virtual edge
before_dom_children (basic_block
);
2573 struct ipa_func_body_info
*m_fbi
;
2577 analysis_dom_walker::before_dom_children (basic_block bb
)
2579 ipa_analyze_params_uses_in_bb (m_fbi
, bb
);
2580 ipa_compute_jump_functions_for_bb (m_fbi
, bb
);
2584 /* Release body info FBI. */
2587 ipa_release_body_info (struct ipa_func_body_info
*fbi
)
2590 struct ipa_bb_info
*bi
;
2592 FOR_EACH_VEC_ELT (fbi
->bb_infos
, i
, bi
)
2593 free_ipa_bb_info (bi
);
2594 fbi
->bb_infos
.release ();
2597 /* Initialize the array describing properties of formal parameters
2598 of NODE, analyze their uses and compute jump functions associated
2599 with actual arguments of calls from within NODE. */
2602 ipa_analyze_node (struct cgraph_node
*node
)
2604 struct ipa_func_body_info fbi
;
2605 class ipa_node_params
*info
;
2607 ipa_check_create_node_params ();
2608 ipa_check_create_edge_args ();
2609 info
= IPA_NODE_REF_GET_CREATE (node
);
2611 if (info
->analysis_done
)
2613 info
->analysis_done
= 1;
2615 if (ipa_func_spec_opts_forbid_analysis_p (node
))
2617 for (int i
= 0; i
< ipa_get_param_count (info
); i
++)
2619 ipa_set_param_used (info
, i
, true);
2620 ipa_set_controlled_uses (info
, i
, IPA_UNDESCRIBED_USE
);
2625 struct function
*func
= DECL_STRUCT_FUNCTION (node
->decl
);
2627 calculate_dominance_info (CDI_DOMINATORS
);
2628 ipa_initialize_node_params (node
);
2629 ipa_analyze_controlled_uses (node
);
2632 fbi
.info
= IPA_NODE_REF (node
);
2633 fbi
.bb_infos
= vNULL
;
2634 fbi
.bb_infos
.safe_grow_cleared (last_basic_block_for_fn (cfun
));
2635 fbi
.param_count
= ipa_get_param_count (info
);
2636 fbi
.aa_walk_budget
= param_ipa_max_aa_steps
;
2638 for (struct cgraph_edge
*cs
= node
->callees
; cs
; cs
= cs
->next_callee
)
2640 ipa_bb_info
*bi
= ipa_get_bb_info (&fbi
, gimple_bb (cs
->call_stmt
));
2641 bi
->cg_edges
.safe_push (cs
);
2644 for (struct cgraph_edge
*cs
= node
->indirect_calls
; cs
; cs
= cs
->next_callee
)
2646 ipa_bb_info
*bi
= ipa_get_bb_info (&fbi
, gimple_bb (cs
->call_stmt
));
2647 bi
->cg_edges
.safe_push (cs
);
2650 analysis_dom_walker (&fbi
).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun
));
2652 ipa_release_body_info (&fbi
);
2653 free_dominance_info (CDI_DOMINATORS
);
2657 /* Update the jump functions associated with call graph edge E when the call
2658 graph edge CS is being inlined, assuming that E->caller is already (possibly
2659 indirectly) inlined into CS->callee and that E has not been inlined. */
2662 update_jump_functions_after_inlining (struct cgraph_edge
*cs
,
2663 struct cgraph_edge
*e
)
2665 class ipa_edge_args
*top
= IPA_EDGE_REF (cs
);
2666 class ipa_edge_args
*args
= IPA_EDGE_REF (e
);
2669 int count
= ipa_get_cs_argument_count (args
);
2672 for (i
= 0; i
< count
; i
++)
2674 struct ipa_jump_func
*dst
= ipa_get_ith_jump_func (args
, i
);
2677 ipa_set_jf_unknown (dst
);
2680 class ipa_polymorphic_call_context
*dst_ctx
2681 = ipa_get_ith_polymorhic_call_context (args
, i
);
2683 if (dst
->type
== IPA_JF_ANCESTOR
)
2685 struct ipa_jump_func
*src
;
2686 int dst_fid
= dst
->value
.ancestor
.formal_id
;
2687 class ipa_polymorphic_call_context
*src_ctx
2688 = ipa_get_ith_polymorhic_call_context (top
, dst_fid
);
2690 /* Variable number of arguments can cause havoc if we try to access
2691 one that does not exist in the inlined edge. So make sure we
2693 if (dst_fid
>= ipa_get_cs_argument_count (top
))
2695 ipa_set_jf_unknown (dst
);
2699 src
= ipa_get_ith_jump_func (top
, dst_fid
);
2701 if (src_ctx
&& !src_ctx
->useless_p ())
2703 class ipa_polymorphic_call_context ctx
= *src_ctx
;
2705 /* TODO: Make type preserved safe WRT contexts. */
2706 if (!ipa_get_jf_ancestor_type_preserved (dst
))
2707 ctx
.possible_dynamic_type_change (e
->in_polymorphic_cdtor
);
2708 ctx
.offset_by (dst
->value
.ancestor
.offset
);
2709 if (!ctx
.useless_p ())
2713 vec_safe_grow_cleared (args
->polymorphic_call_contexts
,
2715 dst_ctx
= ipa_get_ith_polymorhic_call_context (args
, i
);
2718 dst_ctx
->combine_with (ctx
);
2723 && (dst
->value
.ancestor
.agg_preserved
|| !src
->agg
.by_ref
))
2725 struct ipa_agg_jf_item
*item
;
2728 /* Currently we do not produce clobber aggregate jump functions,
2729 replace with merging when we do. */
2730 gcc_assert (!dst
->agg
.items
);
2732 dst
->agg
.items
= vec_safe_copy (src
->agg
.items
);
2733 dst
->agg
.by_ref
= src
->agg
.by_ref
;
2734 FOR_EACH_VEC_SAFE_ELT (dst
->agg
.items
, j
, item
)
2735 item
->offset
-= dst
->value
.ancestor
.offset
;
2738 if (src
->type
== IPA_JF_PASS_THROUGH
2739 && src
->value
.pass_through
.operation
== NOP_EXPR
)
2741 dst
->value
.ancestor
.formal_id
= src
->value
.pass_through
.formal_id
;
2742 dst
->value
.ancestor
.agg_preserved
&=
2743 src
->value
.pass_through
.agg_preserved
;
2745 else if (src
->type
== IPA_JF_ANCESTOR
)
2747 dst
->value
.ancestor
.formal_id
= src
->value
.ancestor
.formal_id
;
2748 dst
->value
.ancestor
.offset
+= src
->value
.ancestor
.offset
;
2749 dst
->value
.ancestor
.agg_preserved
&=
2750 src
->value
.ancestor
.agg_preserved
;
2753 ipa_set_jf_unknown (dst
);
2755 else if (dst
->type
== IPA_JF_PASS_THROUGH
)
2757 struct ipa_jump_func
*src
;
2758 /* We must check range due to calls with variable number of arguments
2759 and we cannot combine jump functions with operations. */
2760 if (dst
->value
.pass_through
.operation
== NOP_EXPR
2761 && (top
&& dst
->value
.pass_through
.formal_id
2762 < ipa_get_cs_argument_count (top
)))
2764 int dst_fid
= dst
->value
.pass_through
.formal_id
;
2765 src
= ipa_get_ith_jump_func (top
, dst_fid
);
2766 bool dst_agg_p
= ipa_get_jf_pass_through_agg_preserved (dst
);
2767 class ipa_polymorphic_call_context
*src_ctx
2768 = ipa_get_ith_polymorhic_call_context (top
, dst_fid
);
2770 if (src_ctx
&& !src_ctx
->useless_p ())
2772 class ipa_polymorphic_call_context ctx
= *src_ctx
;
2774 /* TODO: Make type preserved safe WRT contexts. */
2775 if (!ipa_get_jf_pass_through_type_preserved (dst
))
2776 ctx
.possible_dynamic_type_change (e
->in_polymorphic_cdtor
);
2777 if (!ctx
.useless_p ())
2781 vec_safe_grow_cleared (args
->polymorphic_call_contexts
,
2783 dst_ctx
= ipa_get_ith_polymorhic_call_context (args
, i
);
2785 dst_ctx
->combine_with (ctx
);
2790 case IPA_JF_UNKNOWN
:
2791 ipa_set_jf_unknown (dst
);
2794 ipa_set_jf_cst_copy (dst
, src
);
2797 case IPA_JF_PASS_THROUGH
:
2799 int formal_id
= ipa_get_jf_pass_through_formal_id (src
);
2800 enum tree_code operation
;
2801 operation
= ipa_get_jf_pass_through_operation (src
);
2803 if (operation
== NOP_EXPR
)
2807 && ipa_get_jf_pass_through_agg_preserved (src
);
2808 ipa_set_jf_simple_pass_through (dst
, formal_id
, agg_p
);
2810 else if (TREE_CODE_CLASS (operation
) == tcc_unary
)
2811 ipa_set_jf_unary_pass_through (dst
, formal_id
, operation
);
2814 tree operand
= ipa_get_jf_pass_through_operand (src
);
2815 ipa_set_jf_arith_pass_through (dst
, formal_id
, operand
,
2820 case IPA_JF_ANCESTOR
:
2824 && ipa_get_jf_ancestor_agg_preserved (src
);
2825 ipa_set_ancestor_jf (dst
,
2826 ipa_get_jf_ancestor_offset (src
),
2827 ipa_get_jf_ancestor_formal_id (src
),
2836 && (dst_agg_p
|| !src
->agg
.by_ref
))
2838 /* Currently we do not produce clobber aggregate jump
2839 functions, replace with merging when we do. */
2840 gcc_assert (!dst
->agg
.items
);
2842 dst
->agg
.by_ref
= src
->agg
.by_ref
;
2843 dst
->agg
.items
= vec_safe_copy (src
->agg
.items
);
2847 ipa_set_jf_unknown (dst
);
2852 /* If TARGET is an addr_expr of a function declaration, make it the
2853 (SPECULATIVE)destination of an indirect edge IE and return the edge.
2854 Otherwise, return NULL. */
2856 struct cgraph_edge
*
2857 ipa_make_edge_direct_to_target (struct cgraph_edge
*ie
, tree target
,
2860 struct cgraph_node
*callee
;
2861 bool unreachable
= false;
2863 if (TREE_CODE (target
) == ADDR_EXPR
)
2864 target
= TREE_OPERAND (target
, 0);
2865 if (TREE_CODE (target
) != FUNCTION_DECL
)
2867 target
= canonicalize_constructor_val (target
, NULL
);
2868 if (!target
|| TREE_CODE (target
) != FUNCTION_DECL
)
2870 /* Member pointer call that goes through a VMT lookup. */
2871 if (ie
->indirect_info
->member_ptr
2872 /* Or if target is not an invariant expression and we do not
2873 know if it will evaulate to function at runtime.
2874 This can happen when folding through &VAR, where &VAR
2875 is IP invariant, but VAR itself is not.
2877 TODO: Revisit this when GCC 5 is branched. It seems that
2878 member_ptr check is not needed and that we may try to fold
2879 the expression and see if VAR is readonly. */
2880 || !is_gimple_ip_invariant (target
))
2882 if (dump_enabled_p ())
2884 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS
, ie
->call_stmt
,
2885 "discovered direct call non-invariant %s\n",
2886 ie
->caller
->dump_name ());
2892 if (dump_enabled_p ())
2894 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS
, ie
->call_stmt
,
2895 "discovered direct call to non-function in %s, "
2896 "making it __builtin_unreachable\n",
2897 ie
->caller
->dump_name ());
2900 target
= builtin_decl_implicit (BUILT_IN_UNREACHABLE
);
2901 callee
= cgraph_node::get_create (target
);
2905 callee
= cgraph_node::get (target
);
2908 callee
= cgraph_node::get (target
);
2910 /* Because may-edges are not explicitely represented and vtable may be external,
2911 we may create the first reference to the object in the unit. */
2912 if (!callee
|| callee
->inlined_to
)
2915 /* We are better to ensure we can refer to it.
2916 In the case of static functions we are out of luck, since we already
2917 removed its body. In the case of public functions we may or may
2918 not introduce the reference. */
2919 if (!canonicalize_constructor_val (target
, NULL
)
2920 || !TREE_PUBLIC (target
))
2923 fprintf (dump_file
, "ipa-prop: Discovered call to a known target "
2924 "(%s -> %s) but cannot refer to it. Giving up.\n",
2925 ie
->caller
->dump_name (),
2926 ie
->callee
->dump_name ());
2929 callee
= cgraph_node::get_create (target
);
2932 /* If the edge is already speculated. */
2933 if (speculative
&& ie
->speculative
)
2935 struct cgraph_edge
*e2
;
2936 struct ipa_ref
*ref
;
2937 ie
->speculative_call_info (e2
, ie
, ref
);
2938 if (e2
->callee
->ultimate_alias_target ()
2939 != callee
->ultimate_alias_target ())
2942 fprintf (dump_file
, "ipa-prop: Discovered call to a speculative "
2943 "target (%s -> %s) but the call is already "
2944 "speculated to %s. Giving up.\n",
2945 ie
->caller
->dump_name (), callee
->dump_name (),
2946 e2
->callee
->dump_name ());
2951 fprintf (dump_file
, "ipa-prop: Discovered call to a speculative target "
2952 "(%s -> %s) this agree with previous speculation.\n",
2953 ie
->caller
->dump_name (), callee
->dump_name ());
2958 if (!dbg_cnt (devirt
))
2961 ipa_check_create_node_params ();
2963 /* We cannot make edges to inline clones. It is bug that someone removed
2964 the cgraph node too early. */
2965 gcc_assert (!callee
->inlined_to
);
2967 if (dump_file
&& !unreachable
)
2969 fprintf (dump_file
, "ipa-prop: Discovered %s call to a %s target "
2970 "(%s -> %s), for stmt ",
2971 ie
->indirect_info
->polymorphic
? "a virtual" : "an indirect",
2972 speculative
? "speculative" : "known",
2973 ie
->caller
->dump_name (),
2974 callee
->dump_name ());
2976 print_gimple_stmt (dump_file
, ie
->call_stmt
, 2, TDF_SLIM
);
2978 fprintf (dump_file
, "with uid %i\n", ie
->lto_stmt_uid
);
2980 if (dump_enabled_p ())
2982 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS
, ie
->call_stmt
,
2983 "converting indirect call in %s to direct call to %s\n",
2984 ie
->caller
->name (), callee
->name ());
2988 struct cgraph_edge
*orig
= ie
;
2989 ie
= ie
->make_direct (callee
);
2990 /* If we resolved speculative edge the cost is already up to date
2991 for direct call (adjusted by inline_edge_duplication_hook). */
2994 ipa_call_summary
*es
= ipa_call_summaries
->get (ie
);
2995 es
->call_stmt_size
-= (eni_size_weights
.indirect_call_cost
2996 - eni_size_weights
.call_cost
);
2997 es
->call_stmt_time
-= (eni_time_weights
.indirect_call_cost
2998 - eni_time_weights
.call_cost
);
3003 if (!callee
->can_be_discarded_p ())
3006 alias
= dyn_cast
<cgraph_node
*> (callee
->noninterposable_alias ());
3010 /* make_speculative will update ie's cost to direct call cost. */
3011 ie
= ie
->make_speculative
3012 (callee
, ie
->count
.apply_scale (8, 10));
3018 /* Attempt to locate an interprocedural constant at a given REQ_OFFSET in
3019 CONSTRUCTOR and return it. Return NULL if the search fails for some
3023 find_constructor_constant_at_offset (tree constructor
, HOST_WIDE_INT req_offset
)
3025 tree type
= TREE_TYPE (constructor
);
3026 if (TREE_CODE (type
) != ARRAY_TYPE
3027 && TREE_CODE (type
) != RECORD_TYPE
)
3032 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (constructor
), ix
, index
, val
)
3034 HOST_WIDE_INT elt_offset
;
3035 if (TREE_CODE (type
) == ARRAY_TYPE
)
3038 tree unit_size
= TYPE_SIZE_UNIT (TREE_TYPE (type
));
3039 gcc_assert (TREE_CODE (unit_size
) == INTEGER_CST
);
3043 if (TREE_CODE (index
) == RANGE_EXPR
)
3044 off
= wi::to_offset (TREE_OPERAND (index
, 0));
3046 off
= wi::to_offset (index
);
3047 if (TYPE_DOMAIN (type
) && TYPE_MIN_VALUE (TYPE_DOMAIN (type
)))
3049 tree low_bound
= TYPE_MIN_VALUE (TYPE_DOMAIN (type
));
3050 gcc_assert (TREE_CODE (unit_size
) == INTEGER_CST
);
3051 off
= wi::sext (off
- wi::to_offset (low_bound
),
3052 TYPE_PRECISION (TREE_TYPE (index
)));
3054 off
*= wi::to_offset (unit_size
);
3055 /* ??? Handle more than just the first index of a
3059 off
= wi::to_offset (unit_size
) * ix
;
3061 off
= wi::lshift (off
, LOG2_BITS_PER_UNIT
);
3062 if (!wi::fits_shwi_p (off
) || wi::neg_p (off
))
3064 elt_offset
= off
.to_shwi ();
3066 else if (TREE_CODE (type
) == RECORD_TYPE
)
3068 gcc_checking_assert (index
&& TREE_CODE (index
) == FIELD_DECL
);
3069 if (DECL_BIT_FIELD (index
))
3071 elt_offset
= int_bit_position (index
);
3076 if (elt_offset
> req_offset
)
3079 if (TREE_CODE (val
) == CONSTRUCTOR
)
3080 return find_constructor_constant_at_offset (val
,
3081 req_offset
- elt_offset
);
3083 if (elt_offset
== req_offset
3084 && is_gimple_reg_type (TREE_TYPE (val
))
3085 && is_gimple_ip_invariant (val
))
3091 /* Check whether SCALAR could be used to look up an aggregate interprocedural
3092 invariant from a static constructor and if so, return it. Otherwise return
3096 ipa_find_agg_cst_from_init (tree scalar
, HOST_WIDE_INT offset
, bool by_ref
)
3100 if (TREE_CODE (scalar
) != ADDR_EXPR
)
3102 scalar
= TREE_OPERAND (scalar
, 0);
3106 || !is_global_var (scalar
)
3107 || !TREE_READONLY (scalar
)
3108 || !DECL_INITIAL (scalar
)
3109 || TREE_CODE (DECL_INITIAL (scalar
)) != CONSTRUCTOR
)
3112 return find_constructor_constant_at_offset (DECL_INITIAL (scalar
), offset
);
3115 /* Retrieve value from aggregate jump function AGG or static initializer of
3116 SCALAR (which can be NULL) for the given OFFSET or return NULL if there is
3117 none. BY_REF specifies whether the value has to be passed by reference or
3118 by value. If FROM_GLOBAL_CONSTANT is non-NULL, then the boolean it points
3119 to is set to true if the value comes from an initializer of a constant. */
3122 ipa_find_agg_cst_for_param (struct ipa_agg_jump_function
*agg
, tree scalar
,
3123 HOST_WIDE_INT offset
, bool by_ref
,
3124 bool *from_global_constant
)
3126 struct ipa_agg_jf_item
*item
;
3131 tree res
= ipa_find_agg_cst_from_init (scalar
, offset
, by_ref
);
3134 if (from_global_constant
)
3135 *from_global_constant
= true;
3141 || by_ref
!= agg
->by_ref
)
3144 FOR_EACH_VEC_SAFE_ELT (agg
->items
, i
, item
)
3145 if (item
->offset
== offset
)
3147 /* Currently we do not have clobber values, return NULL for them once
3149 gcc_checking_assert (is_gimple_ip_invariant (item
->value
));
3150 if (from_global_constant
)
3151 *from_global_constant
= false;
3157 /* Remove a reference to SYMBOL from the list of references of a node given by
3158 reference description RDESC. Return true if the reference has been
3159 successfully found and removed. */
3162 remove_described_reference (symtab_node
*symbol
, struct ipa_cst_ref_desc
*rdesc
)
3164 struct ipa_ref
*to_del
;
3165 struct cgraph_edge
*origin
;
3170 to_del
= origin
->caller
->find_reference (symbol
, origin
->call_stmt
,
3171 origin
->lto_stmt_uid
);
3175 to_del
->remove_reference ();
3177 fprintf (dump_file
, "ipa-prop: Removed a reference from %s to %s.\n",
3178 origin
->caller
->dump_name (), xstrdup_for_dump (symbol
->name ()));
3182 /* If JFUNC has a reference description with refcount different from
3183 IPA_UNDESCRIBED_USE, return the reference description, otherwise return
3184 NULL. JFUNC must be a constant jump function. */
3186 static struct ipa_cst_ref_desc
*
3187 jfunc_rdesc_usable (struct ipa_jump_func
*jfunc
)
3189 struct ipa_cst_ref_desc
*rdesc
= ipa_get_jf_constant_rdesc (jfunc
);
3190 if (rdesc
&& rdesc
->refcount
!= IPA_UNDESCRIBED_USE
)
3196 /* If the value of constant jump function JFUNC is an address of a function
3197 declaration, return the associated call graph node. Otherwise return
3200 static cgraph_node
*
3201 cgraph_node_for_jfunc (struct ipa_jump_func
*jfunc
)
3203 gcc_checking_assert (jfunc
->type
== IPA_JF_CONST
);
3204 tree cst
= ipa_get_jf_constant (jfunc
);
3205 if (TREE_CODE (cst
) != ADDR_EXPR
3206 || TREE_CODE (TREE_OPERAND (cst
, 0)) != FUNCTION_DECL
)
3209 return cgraph_node::get (TREE_OPERAND (cst
, 0));
3213 /* If JFUNC is a constant jump function with a usable rdesc, decrement its
3214 refcount and if it hits zero, remove reference to SYMBOL from the caller of
3215 the edge specified in the rdesc. Return false if either the symbol or the
3216 reference could not be found, otherwise return true. */
3219 try_decrement_rdesc_refcount (struct ipa_jump_func
*jfunc
)
3221 struct ipa_cst_ref_desc
*rdesc
;
3222 if (jfunc
->type
== IPA_JF_CONST
3223 && (rdesc
= jfunc_rdesc_usable (jfunc
))
3224 && --rdesc
->refcount
== 0)
3226 symtab_node
*symbol
= cgraph_node_for_jfunc (jfunc
);
3230 return remove_described_reference (symbol
, rdesc
);
3235 /* Try to find a destination for indirect edge IE that corresponds to a simple
3236 call or a call of a member function pointer and where the destination is a
3237 pointer formal parameter described by jump function JFUNC. TARGET_TYPE is
3238 the type of the parameter to which the result of JFUNC is passed. If it can
3239 be determined, return the newly direct edge, otherwise return NULL.
3240 NEW_ROOT_INFO is the node info that JFUNC lattices are relative to. */
3242 static struct cgraph_edge
*
3243 try_make_edge_direct_simple_call (struct cgraph_edge
*ie
,
3244 struct ipa_jump_func
*jfunc
, tree target_type
,
3245 class ipa_node_params
*new_root_info
)
3247 struct cgraph_edge
*cs
;
3249 bool agg_contents
= ie
->indirect_info
->agg_contents
;
3250 tree scalar
= ipa_value_from_jfunc (new_root_info
, jfunc
, target_type
);
3253 bool from_global_constant
;
3254 target
= ipa_find_agg_cst_for_param (&jfunc
->agg
, scalar
,
3255 ie
->indirect_info
->offset
,
3256 ie
->indirect_info
->by_ref
,
3257 &from_global_constant
);
3259 && !from_global_constant
3260 && !ie
->indirect_info
->guaranteed_unmodified
)
3267 cs
= ipa_make_edge_direct_to_target (ie
, target
);
3269 if (cs
&& !agg_contents
)
3272 gcc_checking_assert (cs
->callee
3274 || jfunc
->type
!= IPA_JF_CONST
3275 || !cgraph_node_for_jfunc (jfunc
)
3276 || cs
->callee
== cgraph_node_for_jfunc (jfunc
)));
3277 ok
= try_decrement_rdesc_refcount (jfunc
);
3278 gcc_checking_assert (ok
);
3284 /* Return the target to be used in cases of impossible devirtualization. IE
3285 and target (the latter can be NULL) are dumped when dumping is enabled. */
3288 ipa_impossible_devirt_target (struct cgraph_edge
*ie
, tree target
)
3294 "Type inconsistent devirtualization: %s->%s\n",
3295 ie
->caller
->dump_name (),
3296 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (target
)));
3299 "No devirtualization target in %s\n",
3300 ie
->caller
->dump_name ());
3302 tree new_target
= builtin_decl_implicit (BUILT_IN_UNREACHABLE
);
3303 cgraph_node::get_create (new_target
);
3307 /* Try to find a destination for indirect edge IE that corresponds to a virtual
3308 call based on a formal parameter which is described by jump function JFUNC
3309 and if it can be determined, make it direct and return the direct edge.
3310 Otherwise, return NULL. CTX describes the polymorphic context that the
3311 parameter the call is based on brings along with it. */
3313 static struct cgraph_edge
*
3314 try_make_edge_direct_virtual_call (struct cgraph_edge
*ie
,
3315 struct ipa_jump_func
*jfunc
,
3316 class ipa_polymorphic_call_context ctx
)
3319 bool speculative
= false;
3321 if (!opt_for_fn (ie
->caller
->decl
, flag_devirtualize
))
3324 gcc_assert (!ie
->indirect_info
->by_ref
);
3326 /* Try to do lookup via known virtual table pointer value. */
3327 if (!ie
->indirect_info
->vptr_changed
3328 || opt_for_fn (ie
->caller
->decl
, flag_devirtualize_speculatively
))
3331 unsigned HOST_WIDE_INT offset
;
3332 tree scalar
= (jfunc
->type
== IPA_JF_CONST
) ? ipa_get_jf_constant (jfunc
)
3334 tree t
= ipa_find_agg_cst_for_param (&jfunc
->agg
, scalar
,
3335 ie
->indirect_info
->offset
,
3337 if (t
&& vtable_pointer_value_to_vtable (t
, &vtable
, &offset
))
3340 t
= gimple_get_virt_method_for_vtable (ie
->indirect_info
->otr_token
,
3341 vtable
, offset
, &can_refer
);
3345 || fndecl_built_in_p (t
, BUILT_IN_UNREACHABLE
)
3346 || !possible_polymorphic_call_target_p
3347 (ie
, cgraph_node::get (t
)))
3349 /* Do not speculate builtin_unreachable, it is stupid! */
3350 if (!ie
->indirect_info
->vptr_changed
)
3351 target
= ipa_impossible_devirt_target (ie
, target
);
3358 speculative
= ie
->indirect_info
->vptr_changed
;
3364 ipa_polymorphic_call_context
ie_context (ie
);
3365 vec
<cgraph_node
*>targets
;
3368 ctx
.offset_by (ie
->indirect_info
->offset
);
3369 if (ie
->indirect_info
->vptr_changed
)
3370 ctx
.possible_dynamic_type_change (ie
->in_polymorphic_cdtor
,
3371 ie
->indirect_info
->otr_type
);
3372 ctx
.combine_with (ie_context
, ie
->indirect_info
->otr_type
);
3373 targets
= possible_polymorphic_call_targets
3374 (ie
->indirect_info
->otr_type
,
3375 ie
->indirect_info
->otr_token
,
3377 if (final
&& targets
.length () <= 1)
3379 speculative
= false;
3380 if (targets
.length () == 1)
3381 target
= targets
[0]->decl
;
3383 target
= ipa_impossible_devirt_target (ie
, NULL_TREE
);
3385 else if (!target
&& opt_for_fn (ie
->caller
->decl
, flag_devirtualize_speculatively
)
3386 && !ie
->speculative
&& ie
->maybe_hot_p ())
3389 n
= try_speculative_devirtualization (ie
->indirect_info
->otr_type
,
3390 ie
->indirect_info
->otr_token
,
3391 ie
->indirect_info
->context
);
3401 if (!possible_polymorphic_call_target_p
3402 (ie
, cgraph_node::get_create (target
)))
3406 target
= ipa_impossible_devirt_target (ie
, target
);
3408 return ipa_make_edge_direct_to_target (ie
, target
, speculative
);
3414 /* Update the param called notes associated with NODE when CS is being inlined,
3415 assuming NODE is (potentially indirectly) inlined into CS->callee.
3416 Moreover, if the callee is discovered to be constant, create a new cgraph
3417 edge for it. Newly discovered indirect edges will be added to *NEW_EDGES,
3418 unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */
3421 update_indirect_edges_after_inlining (struct cgraph_edge
*cs
,
3422 struct cgraph_node
*node
,
3423 vec
<cgraph_edge
*> *new_edges
)
3425 class ipa_edge_args
*top
;
3426 struct cgraph_edge
*ie
, *next_ie
, *new_direct_edge
;
3427 class ipa_node_params
*new_root_info
, *inlined_node_info
;
3430 ipa_check_create_edge_args ();
3431 top
= IPA_EDGE_REF (cs
);
3432 new_root_info
= IPA_NODE_REF (cs
->caller
->inlined_to
3433 ? cs
->caller
->inlined_to
3435 inlined_node_info
= IPA_NODE_REF (cs
->callee
->function_symbol ());
3437 for (ie
= node
->indirect_calls
; ie
; ie
= next_ie
)
3439 class cgraph_indirect_call_info
*ici
= ie
->indirect_info
;
3440 struct ipa_jump_func
*jfunc
;
3442 cgraph_node
*spec_target
= NULL
;
3444 next_ie
= ie
->next_callee
;
3446 if (ici
->param_index
== -1)
3449 /* We must check range due to calls with variable number of arguments: */
3450 if (!top
|| ici
->param_index
>= ipa_get_cs_argument_count (top
))
3452 ici
->param_index
= -1;
3456 param_index
= ici
->param_index
;
3457 jfunc
= ipa_get_ith_jump_func (top
, param_index
);
3459 if (ie
->speculative
)
3461 struct cgraph_edge
*de
;
3462 struct ipa_ref
*ref
;
3463 ie
->speculative_call_info (de
, ie
, ref
);
3464 spec_target
= de
->callee
;
3467 if (!opt_for_fn (node
->decl
, flag_indirect_inlining
))
3468 new_direct_edge
= NULL
;
3469 else if (ici
->polymorphic
)
3471 ipa_polymorphic_call_context ctx
;
3472 ctx
= ipa_context_from_jfunc (new_root_info
, cs
, param_index
, jfunc
);
3473 new_direct_edge
= try_make_edge_direct_virtual_call (ie
, jfunc
, ctx
);
3477 tree target_type
= ipa_get_type (inlined_node_info
, param_index
);
3478 new_direct_edge
= try_make_edge_direct_simple_call (ie
, jfunc
,
3483 /* If speculation was removed, then we need to do nothing. */
3484 if (new_direct_edge
&& new_direct_edge
!= ie
3485 && new_direct_edge
->callee
== spec_target
)
3487 new_direct_edge
->indirect_inlining_edge
= 1;
3488 top
= IPA_EDGE_REF (cs
);
3490 if (!new_direct_edge
->speculative
)
3493 else if (new_direct_edge
)
3495 new_direct_edge
->indirect_inlining_edge
= 1;
3498 new_edges
->safe_push (new_direct_edge
);
3501 top
= IPA_EDGE_REF (cs
);
3502 /* If speculative edge was introduced we still need to update
3503 call info of the indirect edge. */
3504 if (!new_direct_edge
->speculative
)
3507 if (jfunc
->type
== IPA_JF_PASS_THROUGH
3508 && ipa_get_jf_pass_through_operation (jfunc
) == NOP_EXPR
)
3510 if (ici
->agg_contents
3511 && !ipa_get_jf_pass_through_agg_preserved (jfunc
)
3512 && !ici
->polymorphic
)
3513 ici
->param_index
= -1;
3516 ici
->param_index
= ipa_get_jf_pass_through_formal_id (jfunc
);
3517 if (ici
->polymorphic
3518 && !ipa_get_jf_pass_through_type_preserved (jfunc
))
3519 ici
->vptr_changed
= true;
3520 ipa_set_param_used_by_indirect_call (new_root_info
,
3521 ici
->param_index
, true);
3522 if (ici
->polymorphic
)
3523 ipa_set_param_used_by_polymorphic_call (new_root_info
,
3524 ici
->param_index
, true);
3527 else if (jfunc
->type
== IPA_JF_ANCESTOR
)
3529 if (ici
->agg_contents
3530 && !ipa_get_jf_ancestor_agg_preserved (jfunc
)
3531 && !ici
->polymorphic
)
3532 ici
->param_index
= -1;
3535 ici
->param_index
= ipa_get_jf_ancestor_formal_id (jfunc
);
3536 ici
->offset
+= ipa_get_jf_ancestor_offset (jfunc
);
3537 if (ici
->polymorphic
3538 && !ipa_get_jf_ancestor_type_preserved (jfunc
))
3539 ici
->vptr_changed
= true;
3543 /* Either we can find a destination for this edge now or never. */
3544 ici
->param_index
= -1;
3550 /* Recursively traverse subtree of NODE (including node) made of inlined
3551 cgraph_edges when CS has been inlined and invoke
3552 update_indirect_edges_after_inlining on all nodes and
3553 update_jump_functions_after_inlining on all non-inlined edges that lead out
3554 of this subtree. Newly discovered indirect edges will be added to
3555 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were
3559 propagate_info_to_inlined_callees (struct cgraph_edge
*cs
,
3560 struct cgraph_node
*node
,
3561 vec
<cgraph_edge
*> *new_edges
)
3563 struct cgraph_edge
*e
;
3566 res
= update_indirect_edges_after_inlining (cs
, node
, new_edges
);
3568 for (e
= node
->callees
; e
; e
= e
->next_callee
)
3569 if (!e
->inline_failed
)
3570 res
|= propagate_info_to_inlined_callees (cs
, e
->callee
, new_edges
);
3572 update_jump_functions_after_inlining (cs
, e
);
3573 for (e
= node
->indirect_calls
; e
; e
= e
->next_callee
)
3574 update_jump_functions_after_inlining (cs
, e
);
3579 /* Combine two controlled uses counts as done during inlining. */
3582 combine_controlled_uses_counters (int c
, int d
)
3584 if (c
== IPA_UNDESCRIBED_USE
|| d
== IPA_UNDESCRIBED_USE
)
3585 return IPA_UNDESCRIBED_USE
;
3590 /* Propagate number of controlled users from CS->caleee to the new root of the
3591 tree of inlined nodes. */
3594 propagate_controlled_uses (struct cgraph_edge
*cs
)
3596 class ipa_edge_args
*args
= IPA_EDGE_REF (cs
);
3599 struct cgraph_node
*new_root
= cs
->caller
->inlined_to
3600 ? cs
->caller
->inlined_to
: cs
->caller
;
3601 class ipa_node_params
*new_root_info
= IPA_NODE_REF (new_root
);
3602 class ipa_node_params
*old_root_info
= IPA_NODE_REF (cs
->callee
);
3608 count
= MIN (ipa_get_cs_argument_count (args
),
3609 ipa_get_param_count (old_root_info
));
3610 for (i
= 0; i
< count
; i
++)
3612 struct ipa_jump_func
*jf
= ipa_get_ith_jump_func (args
, i
);
3613 struct ipa_cst_ref_desc
*rdesc
;
3615 if (jf
->type
== IPA_JF_PASS_THROUGH
)
3618 src_idx
= ipa_get_jf_pass_through_formal_id (jf
);
3619 c
= ipa_get_controlled_uses (new_root_info
, src_idx
);
3620 d
= ipa_get_controlled_uses (old_root_info
, i
);
3622 gcc_checking_assert (ipa_get_jf_pass_through_operation (jf
)
3623 == NOP_EXPR
|| c
== IPA_UNDESCRIBED_USE
);
3624 c
= combine_controlled_uses_counters (c
, d
);
3625 ipa_set_controlled_uses (new_root_info
, src_idx
, c
);
3626 if (c
== 0 && new_root_info
->ipcp_orig_node
)
3628 struct cgraph_node
*n
;
3629 struct ipa_ref
*ref
;
3630 tree t
= new_root_info
->known_csts
[src_idx
];
3632 if (t
&& TREE_CODE (t
) == ADDR_EXPR
3633 && TREE_CODE (TREE_OPERAND (t
, 0)) == FUNCTION_DECL
3634 && (n
= cgraph_node::get (TREE_OPERAND (t
, 0)))
3635 && (ref
= new_root
->find_reference (n
, NULL
, 0)))
3638 fprintf (dump_file
, "ipa-prop: Removing cloning-created "
3639 "reference from %s to %s.\n",
3640 new_root
->dump_name (),
3642 ref
->remove_reference ();
3646 else if (jf
->type
== IPA_JF_CONST
3647 && (rdesc
= jfunc_rdesc_usable (jf
)))
3649 int d
= ipa_get_controlled_uses (old_root_info
, i
);
3650 int c
= rdesc
->refcount
;
3651 rdesc
->refcount
= combine_controlled_uses_counters (c
, d
);
3652 if (rdesc
->refcount
== 0)
3654 tree cst
= ipa_get_jf_constant (jf
);
3655 struct cgraph_node
*n
;
3656 gcc_checking_assert (TREE_CODE (cst
) == ADDR_EXPR
3657 && TREE_CODE (TREE_OPERAND (cst
, 0))
3659 n
= cgraph_node::get (TREE_OPERAND (cst
, 0));
3662 struct cgraph_node
*clone
;
3664 ok
= remove_described_reference (n
, rdesc
);
3665 gcc_checking_assert (ok
);
3668 while (clone
->inlined_to
3669 && clone
->ipcp_clone
3670 && clone
!= rdesc
->cs
->caller
)
3672 struct ipa_ref
*ref
;
3673 ref
= clone
->find_reference (n
, NULL
, 0);
3677 fprintf (dump_file
, "ipa-prop: Removing "
3678 "cloning-created reference "
3680 clone
->dump_name (),
3682 ref
->remove_reference ();
3684 clone
= clone
->callers
->caller
;
3691 for (i
= ipa_get_param_count (old_root_info
);
3692 i
< ipa_get_cs_argument_count (args
);
3695 struct ipa_jump_func
*jf
= ipa_get_ith_jump_func (args
, i
);
3697 if (jf
->type
== IPA_JF_CONST
)
3699 struct ipa_cst_ref_desc
*rdesc
= jfunc_rdesc_usable (jf
);
3701 rdesc
->refcount
= IPA_UNDESCRIBED_USE
;
3703 else if (jf
->type
== IPA_JF_PASS_THROUGH
)
3704 ipa_set_controlled_uses (new_root_info
,
3705 jf
->value
.pass_through
.formal_id
,
3706 IPA_UNDESCRIBED_USE
);
3710 /* Update jump functions and call note functions on inlining the call site CS.
3711 CS is expected to lead to a node already cloned by
3712 cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
3713 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were +
3717 ipa_propagate_indirect_call_infos (struct cgraph_edge
*cs
,
3718 vec
<cgraph_edge
*> *new_edges
)
3721 /* Do nothing if the preparation phase has not been carried out yet
3722 (i.e. during early inlining). */
3723 if (!ipa_node_params_sum
)
3725 gcc_assert (ipa_edge_args_sum
);
3727 propagate_controlled_uses (cs
);
3728 changed
= propagate_info_to_inlined_callees (cs
, cs
->callee
, new_edges
);
3729 ipa_node_params_sum
->remove (cs
->callee
);
3731 class ipa_edge_args
*args
= IPA_EDGE_REF (cs
);
3735 if (args
->jump_functions
)
3737 struct ipa_jump_func
*jf
;
3739 FOR_EACH_VEC_ELT (*args
->jump_functions
, i
, jf
)
3740 if (jf
->type
== IPA_JF_CONST
3741 && ipa_get_jf_constant_rdesc (jf
))
3748 ipa_edge_args_sum
->remove (cs
);
3750 if (ipcp_transformation_sum
)
3751 ipcp_transformation_sum
->remove (cs
->callee
);
3756 /* Ensure that array of edge arguments infos is big enough to accommodate a
3757 structure for all edges and reallocates it if not. Also, allocate
3758 associated hash tables is they do not already exist. */
3761 ipa_check_create_edge_args (void)
3763 if (!ipa_edge_args_sum
)
3765 = (new (ggc_alloc_no_dtor
<ipa_edge_args_sum_t
> ())
3766 ipa_edge_args_sum_t (symtab
, true));
3767 if (!ipa_bits_hash_table
)
3768 ipa_bits_hash_table
= hash_table
<ipa_bit_ggc_hash_traits
>::create_ggc (37);
3769 if (!ipa_vr_hash_table
)
3770 ipa_vr_hash_table
= hash_table
<ipa_vr_ggc_hash_traits
>::create_ggc (37);
3773 /* Free all ipa_edge structures. */
3776 ipa_free_all_edge_args (void)
3778 if (!ipa_edge_args_sum
)
3781 ggc_delete (ipa_edge_args_sum
);
3782 ipa_edge_args_sum
= NULL
;
3785 /* Free all ipa_node_params structures. */
3788 ipa_free_all_node_params (void)
3790 ggc_delete (ipa_node_params_sum
);
3791 ipa_node_params_sum
= NULL
;
3794 /* Initialize IPA CP transformation summary and also allocate any necessary hash
3795 tables if they do not already exist. */
3798 ipcp_transformation_initialize (void)
3800 if (!ipa_bits_hash_table
)
3801 ipa_bits_hash_table
= hash_table
<ipa_bit_ggc_hash_traits
>::create_ggc (37);
3802 if (!ipa_vr_hash_table
)
3803 ipa_vr_hash_table
= hash_table
<ipa_vr_ggc_hash_traits
>::create_ggc (37);
3804 if (ipcp_transformation_sum
== NULL
)
3805 ipcp_transformation_sum
= ipcp_transformation_t::create_ggc (symtab
);
3808 /* Release the IPA CP transformation summary. */
3811 ipcp_free_transformation_sum (void)
3813 if (!ipcp_transformation_sum
)
3816 ipcp_transformation_sum
->~function_summary
<ipcp_transformation
*> ();
3817 ggc_free (ipcp_transformation_sum
);
3818 ipcp_transformation_sum
= NULL
;
3821 /* Set the aggregate replacements of NODE to be AGGVALS. */
3824 ipa_set_node_agg_value_chain (struct cgraph_node
*node
,
3825 struct ipa_agg_replacement_value
*aggvals
)
3827 ipcp_transformation_initialize ();
3828 ipcp_transformation
*s
= ipcp_transformation_sum
->get_create (node
);
3829 s
->agg_values
= aggvals
;
3832 /* Hook that is called by cgraph.c when an edge is removed. Adjust reference
3833 count data structures accordingly. */
3836 ipa_edge_args_sum_t::remove (cgraph_edge
*cs
, ipa_edge_args
*args
)
3838 if (args
->jump_functions
)
3840 struct ipa_jump_func
*jf
;
3842 FOR_EACH_VEC_ELT (*args
->jump_functions
, i
, jf
)
3844 struct ipa_cst_ref_desc
*rdesc
;
3845 try_decrement_rdesc_refcount (jf
);
3846 if (jf
->type
== IPA_JF_CONST
3847 && (rdesc
= ipa_get_jf_constant_rdesc (jf
))
3854 /* Method invoked when an edge is duplicated. Copy ipa_edge_args and adjust
3855 reference count data strucutres accordingly. */
3858 ipa_edge_args_sum_t::duplicate (cgraph_edge
*src
, cgraph_edge
*dst
,
3859 ipa_edge_args
*old_args
, ipa_edge_args
*new_args
)
3863 new_args
->jump_functions
= vec_safe_copy (old_args
->jump_functions
);
3864 if (old_args
->polymorphic_call_contexts
)
3865 new_args
->polymorphic_call_contexts
3866 = vec_safe_copy (old_args
->polymorphic_call_contexts
);
3868 for (i
= 0; i
< vec_safe_length (old_args
->jump_functions
); i
++)
3870 struct ipa_jump_func
*src_jf
= ipa_get_ith_jump_func (old_args
, i
);
3871 struct ipa_jump_func
*dst_jf
= ipa_get_ith_jump_func (new_args
, i
);
3873 dst_jf
->agg
.items
= vec_safe_copy (dst_jf
->agg
.items
);
3875 if (src_jf
->type
== IPA_JF_CONST
)
3877 struct ipa_cst_ref_desc
*src_rdesc
= jfunc_rdesc_usable (src_jf
);
3880 dst_jf
->value
.constant
.rdesc
= NULL
;
3881 else if (src
->caller
== dst
->caller
)
3883 struct ipa_ref
*ref
;
3884 symtab_node
*n
= cgraph_node_for_jfunc (src_jf
);
3885 gcc_checking_assert (n
);
3886 ref
= src
->caller
->find_reference (n
, src
->call_stmt
,
3888 gcc_checking_assert (ref
);
3889 dst
->caller
->clone_reference (ref
, ref
->stmt
);
3891 struct ipa_cst_ref_desc
*dst_rdesc
= ipa_refdesc_pool
.allocate ();
3892 dst_rdesc
->cs
= dst
;
3893 dst_rdesc
->refcount
= src_rdesc
->refcount
;
3894 dst_rdesc
->next_duplicate
= NULL
;
3895 dst_jf
->value
.constant
.rdesc
= dst_rdesc
;
3897 else if (src_rdesc
->cs
== src
)
3899 struct ipa_cst_ref_desc
*dst_rdesc
= ipa_refdesc_pool
.allocate ();
3900 dst_rdesc
->cs
= dst
;
3901 dst_rdesc
->refcount
= src_rdesc
->refcount
;
3902 dst_rdesc
->next_duplicate
= src_rdesc
->next_duplicate
;
3903 src_rdesc
->next_duplicate
= dst_rdesc
;
3904 dst_jf
->value
.constant
.rdesc
= dst_rdesc
;
3908 struct ipa_cst_ref_desc
*dst_rdesc
;
3909 /* This can happen during inlining, when a JFUNC can refer to a
3910 reference taken in a function up in the tree of inline clones.
3911 We need to find the duplicate that refers to our tree of
3914 gcc_assert (dst
->caller
->inlined_to
);
3915 for (dst_rdesc
= src_rdesc
->next_duplicate
;
3917 dst_rdesc
= dst_rdesc
->next_duplicate
)
3919 struct cgraph_node
*top
;
3920 top
= dst_rdesc
->cs
->caller
->inlined_to
3921 ? dst_rdesc
->cs
->caller
->inlined_to
3922 : dst_rdesc
->cs
->caller
;
3923 if (dst
->caller
->inlined_to
== top
)
3926 gcc_assert (dst_rdesc
);
3927 dst_jf
->value
.constant
.rdesc
= dst_rdesc
;
3930 else if (dst_jf
->type
== IPA_JF_PASS_THROUGH
3931 && src
->caller
== dst
->caller
)
3933 struct cgraph_node
*inline_root
= dst
->caller
->inlined_to
3934 ? dst
->caller
->inlined_to
: dst
->caller
;
3935 class ipa_node_params
*root_info
= IPA_NODE_REF (inline_root
);
3936 int idx
= ipa_get_jf_pass_through_formal_id (dst_jf
);
3938 int c
= ipa_get_controlled_uses (root_info
, idx
);
3939 if (c
!= IPA_UNDESCRIBED_USE
)
3942 ipa_set_controlled_uses (root_info
, idx
, c
);
3948 /* Analyze newly added function into callgraph. */
3951 ipa_add_new_function (cgraph_node
*node
, void *data ATTRIBUTE_UNUSED
)
3953 if (node
->has_gimple_body_p ())
3954 ipa_analyze_node (node
);
3957 /* Hook that is called by summary when a node is duplicated. */
3960 ipa_node_params_t::duplicate(cgraph_node
*src
, cgraph_node
*dst
,
3961 ipa_node_params
*old_info
,
3962 ipa_node_params
*new_info
)
3964 ipa_agg_replacement_value
*old_av
, *new_av
;
3966 new_info
->descriptors
= vec_safe_copy (old_info
->descriptors
);
3967 new_info
->lattices
= NULL
;
3968 new_info
->ipcp_orig_node
= old_info
->ipcp_orig_node
;
3969 new_info
->known_csts
= old_info
->known_csts
.copy ();
3970 new_info
->known_contexts
= old_info
->known_contexts
.copy ();
3972 new_info
->analysis_done
= old_info
->analysis_done
;
3973 new_info
->node_enqueued
= old_info
->node_enqueued
;
3974 new_info
->versionable
= old_info
->versionable
;
3976 old_av
= ipa_get_agg_replacements_for_node (src
);
3982 struct ipa_agg_replacement_value
*v
;
3984 v
= ggc_alloc
<ipa_agg_replacement_value
> ();
3985 memcpy (v
, old_av
, sizeof (*v
));
3988 old_av
= old_av
->next
;
3990 ipa_set_node_agg_value_chain (dst
, new_av
);
3994 /* Duplication of ipcp transformation summaries. */
3997 ipcp_transformation_t::duplicate(cgraph_node
*, cgraph_node
*dst
,
3998 ipcp_transformation
*src_trans
,
3999 ipcp_transformation
*dst_trans
)
4001 /* Avoid redundant work of duplicating vectors we will never use. */
4002 if (dst
->inlined_to
)
4004 dst_trans
->bits
= vec_safe_copy (src_trans
->bits
);
4005 dst_trans
->m_vr
= vec_safe_copy (src_trans
->m_vr
);
4006 ipa_agg_replacement_value
*agg
= src_trans
->agg_values
,
4007 **aggptr
= &dst_trans
->agg_values
;
4010 *aggptr
= ggc_alloc
<ipa_agg_replacement_value
> ();
4013 aggptr
= &(*aggptr
)->next
;
4017 /* Register our cgraph hooks if they are not already there. */
4020 ipa_register_cgraph_hooks (void)
4022 ipa_check_create_node_params ();
4023 ipa_check_create_edge_args ();
4025 function_insertion_hook_holder
=
4026 symtab
->add_cgraph_insertion_hook (&ipa_add_new_function
, NULL
);
4029 /* Unregister our cgraph hooks if they are not already there. */
4032 ipa_unregister_cgraph_hooks (void)
4034 symtab
->remove_cgraph_insertion_hook (function_insertion_hook_holder
);
4035 function_insertion_hook_holder
= NULL
;
4038 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
4039 longer needed after ipa-cp. */
4042 ipa_free_all_structures_after_ipa_cp (void)
4044 if (!optimize
&& !in_lto_p
)
4046 ipa_free_all_edge_args ();
4047 ipa_free_all_node_params ();
4048 ipcp_sources_pool
.release ();
4049 ipcp_cst_values_pool
.release ();
4050 ipcp_poly_ctx_values_pool
.release ();
4051 ipcp_agg_lattice_pool
.release ();
4052 ipa_unregister_cgraph_hooks ();
4053 ipa_refdesc_pool
.release ();
4057 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
4058 longer needed after indirect inlining. */
4061 ipa_free_all_structures_after_iinln (void)
4063 ipa_free_all_edge_args ();
4064 ipa_free_all_node_params ();
4065 ipa_unregister_cgraph_hooks ();
4066 ipcp_sources_pool
.release ();
4067 ipcp_cst_values_pool
.release ();
4068 ipcp_poly_ctx_values_pool
.release ();
4069 ipcp_agg_lattice_pool
.release ();
4070 ipa_refdesc_pool
.release ();
4073 /* Print ipa_tree_map data structures of all functions in the
4077 ipa_print_node_params (FILE *f
, struct cgraph_node
*node
)
4080 class ipa_node_params
*info
;
4082 if (!node
->definition
)
4084 info
= IPA_NODE_REF (node
);
4085 fprintf (f
, " function %s parameter descriptors:\n", node
->dump_name ());
4088 fprintf (f
, " no params return\n");
4091 count
= ipa_get_param_count (info
);
4092 for (i
= 0; i
< count
; i
++)
4097 ipa_dump_param (f
, info
, i
);
4098 if (ipa_is_param_used (info
, i
))
4099 fprintf (f
, " used");
4100 if (ipa_is_param_used_by_ipa_predicates (info
, i
))
4101 fprintf (f
, " used_by_ipa_predicates");
4102 if (ipa_is_param_used_by_indirect_call (info
, i
))
4103 fprintf (f
, " used_by_indirect_call");
4104 if (ipa_is_param_used_by_polymorphic_call (info
, i
))
4105 fprintf (f
, " used_by_polymorphic_call");
4106 c
= ipa_get_controlled_uses (info
, i
);
4107 if (c
== IPA_UNDESCRIBED_USE
)
4108 fprintf (f
, " undescribed_use");
4110 fprintf (f
, " controlled_uses=%i", c
);
4115 /* Print ipa_tree_map data structures of all functions in the
4119 ipa_print_all_params (FILE * f
)
4121 struct cgraph_node
*node
;
4123 fprintf (f
, "\nFunction parameters:\n");
4124 FOR_EACH_FUNCTION (node
)
4125 ipa_print_node_params (f
, node
);
4128 /* Dump the AV linked list. */
4131 ipa_dump_agg_replacement_values (FILE *f
, struct ipa_agg_replacement_value
*av
)
4134 fprintf (f
, " Aggregate replacements:");
4135 for (; av
; av
= av
->next
)
4137 fprintf (f
, "%s %i[" HOST_WIDE_INT_PRINT_DEC
"]=", comma
? "," : "",
4138 av
->index
, av
->offset
);
4139 print_generic_expr (f
, av
->value
);
4145 /* Stream out jump function JUMP_FUNC to OB. */
4148 ipa_write_jump_function (struct output_block
*ob
,
4149 struct ipa_jump_func
*jump_func
)
4151 struct ipa_agg_jf_item
*item
;
4152 struct bitpack_d bp
;
4156 /* ADDR_EXPRs are very comon IP invariants; save some streamer data
4157 as well as WPA memory by handling them specially. */
4158 if (jump_func
->type
== IPA_JF_CONST
4159 && TREE_CODE (jump_func
->value
.constant
.value
) == ADDR_EXPR
)
4162 streamer_write_uhwi (ob
, jump_func
->type
* 2 + flag
);
4163 switch (jump_func
->type
)
4165 case IPA_JF_UNKNOWN
:
4169 EXPR_LOCATION (jump_func
->value
.constant
.value
) == UNKNOWN_LOCATION
);
4170 stream_write_tree (ob
,
4172 ? TREE_OPERAND (jump_func
->value
.constant
.value
, 0)
4173 : jump_func
->value
.constant
.value
, true);
4175 case IPA_JF_PASS_THROUGH
:
4176 streamer_write_uhwi (ob
, jump_func
->value
.pass_through
.operation
);
4177 if (jump_func
->value
.pass_through
.operation
== NOP_EXPR
)
4179 streamer_write_uhwi (ob
, jump_func
->value
.pass_through
.formal_id
);
4180 bp
= bitpack_create (ob
->main_stream
);
4181 bp_pack_value (&bp
, jump_func
->value
.pass_through
.agg_preserved
, 1);
4182 streamer_write_bitpack (&bp
);
4184 else if (TREE_CODE_CLASS (jump_func
->value
.pass_through
.operation
)
4186 streamer_write_uhwi (ob
, jump_func
->value
.pass_through
.formal_id
);
4189 stream_write_tree (ob
, jump_func
->value
.pass_through
.operand
, true);
4190 streamer_write_uhwi (ob
, jump_func
->value
.pass_through
.formal_id
);
4193 case IPA_JF_ANCESTOR
:
4194 streamer_write_uhwi (ob
, jump_func
->value
.ancestor
.offset
);
4195 streamer_write_uhwi (ob
, jump_func
->value
.ancestor
.formal_id
);
4196 bp
= bitpack_create (ob
->main_stream
);
4197 bp_pack_value (&bp
, jump_func
->value
.ancestor
.agg_preserved
, 1);
4198 streamer_write_bitpack (&bp
);
4202 count
= vec_safe_length (jump_func
->agg
.items
);
4203 streamer_write_uhwi (ob
, count
);
4206 bp
= bitpack_create (ob
->main_stream
);
4207 bp_pack_value (&bp
, jump_func
->agg
.by_ref
, 1);
4208 streamer_write_bitpack (&bp
);
4211 FOR_EACH_VEC_SAFE_ELT (jump_func
->agg
.items
, i
, item
)
4213 streamer_write_uhwi (ob
, item
->offset
);
4214 stream_write_tree (ob
, item
->value
, true);
4217 bp
= bitpack_create (ob
->main_stream
);
4218 bp_pack_value (&bp
, !!jump_func
->bits
, 1);
4219 streamer_write_bitpack (&bp
);
4220 if (jump_func
->bits
)
4222 streamer_write_widest_int (ob
, jump_func
->bits
->value
);
4223 streamer_write_widest_int (ob
, jump_func
->bits
->mask
);
4225 bp_pack_value (&bp
, !!jump_func
->m_vr
, 1);
4226 streamer_write_bitpack (&bp
);
4227 if (jump_func
->m_vr
)
4229 streamer_write_enum (ob
->main_stream
, value_rang_type
,
4230 VR_LAST
, jump_func
->m_vr
->kind ());
4231 stream_write_tree (ob
, jump_func
->m_vr
->min (), true);
4232 stream_write_tree (ob
, jump_func
->m_vr
->max (), true);
4236 /* Read in jump function JUMP_FUNC from IB. */
4239 ipa_read_jump_function (class lto_input_block
*ib
,
4240 struct ipa_jump_func
*jump_func
,
4241 struct cgraph_edge
*cs
,
4242 class data_in
*data_in
,
4245 enum jump_func_type jftype
;
4246 enum tree_code operation
;
4248 int val
= streamer_read_uhwi (ib
);
4249 bool flag
= val
& 1;
4251 jftype
= (enum jump_func_type
) (val
/ 2);
4254 case IPA_JF_UNKNOWN
:
4255 ipa_set_jf_unknown (jump_func
);
4259 tree t
= stream_read_tree (ib
, data_in
);
4260 if (flag
&& prevails
)
4261 t
= build_fold_addr_expr (t
);
4262 ipa_set_jf_constant (jump_func
, t
, cs
);
4265 case IPA_JF_PASS_THROUGH
:
4266 operation
= (enum tree_code
) streamer_read_uhwi (ib
);
4267 if (operation
== NOP_EXPR
)
4269 int formal_id
= streamer_read_uhwi (ib
);
4270 struct bitpack_d bp
= streamer_read_bitpack (ib
);
4271 bool agg_preserved
= bp_unpack_value (&bp
, 1);
4272 ipa_set_jf_simple_pass_through (jump_func
, formal_id
, agg_preserved
);
4274 else if (TREE_CODE_CLASS (operation
) == tcc_unary
)
4276 int formal_id
= streamer_read_uhwi (ib
);
4277 ipa_set_jf_unary_pass_through (jump_func
, formal_id
, operation
);
4281 tree operand
= stream_read_tree (ib
, data_in
);
4282 int formal_id
= streamer_read_uhwi (ib
);
4283 ipa_set_jf_arith_pass_through (jump_func
, formal_id
, operand
,
4287 case IPA_JF_ANCESTOR
:
4289 HOST_WIDE_INT offset
= streamer_read_uhwi (ib
);
4290 int formal_id
= streamer_read_uhwi (ib
);
4291 struct bitpack_d bp
= streamer_read_bitpack (ib
);
4292 bool agg_preserved
= bp_unpack_value (&bp
, 1);
4293 ipa_set_ancestor_jf (jump_func
, offset
, formal_id
, agg_preserved
);
4297 fatal_error (UNKNOWN_LOCATION
, "invalid jump function in LTO stream");
4300 count
= streamer_read_uhwi (ib
);
4302 vec_alloc (jump_func
->agg
.items
, count
);
4305 struct bitpack_d bp
= streamer_read_bitpack (ib
);
4306 jump_func
->agg
.by_ref
= bp_unpack_value (&bp
, 1);
4308 for (i
= 0; i
< count
; i
++)
4310 struct ipa_agg_jf_item item
;
4311 item
.offset
= streamer_read_uhwi (ib
);
4312 item
.value
= stream_read_tree (ib
, data_in
);
4314 jump_func
->agg
.items
->quick_push (item
);
4317 struct bitpack_d bp
= streamer_read_bitpack (ib
);
4318 bool bits_known
= bp_unpack_value (&bp
, 1);
4321 widest_int value
= streamer_read_widest_int (ib
);
4322 widest_int mask
= streamer_read_widest_int (ib
);
4324 ipa_set_jfunc_bits (jump_func
, value
, mask
);
4327 jump_func
->bits
= NULL
;
4329 struct bitpack_d vr_bp
= streamer_read_bitpack (ib
);
4330 bool vr_known
= bp_unpack_value (&vr_bp
, 1);
4333 enum value_range_kind type
= streamer_read_enum (ib
, value_range_kind
,
4335 tree min
= stream_read_tree (ib
, data_in
);
4336 tree max
= stream_read_tree (ib
, data_in
);
4338 ipa_set_jfunc_vr (jump_func
, type
, min
, max
);
4341 jump_func
->m_vr
= NULL
;
4344 /* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
4345 relevant to indirect inlining to OB. */
4348 ipa_write_indirect_edge_info (struct output_block
*ob
,
4349 struct cgraph_edge
*cs
)
4351 class cgraph_indirect_call_info
*ii
= cs
->indirect_info
;
4352 struct bitpack_d bp
;
4354 streamer_write_hwi (ob
, ii
->param_index
);
4355 bp
= bitpack_create (ob
->main_stream
);
4356 bp_pack_value (&bp
, ii
->polymorphic
, 1);
4357 bp_pack_value (&bp
, ii
->agg_contents
, 1);
4358 bp_pack_value (&bp
, ii
->member_ptr
, 1);
4359 bp_pack_value (&bp
, ii
->by_ref
, 1);
4360 bp_pack_value (&bp
, ii
->guaranteed_unmodified
, 1);
4361 bp_pack_value (&bp
, ii
->vptr_changed
, 1);
4362 streamer_write_bitpack (&bp
);
4363 if (ii
->agg_contents
|| ii
->polymorphic
)
4364 streamer_write_hwi (ob
, ii
->offset
);
4366 gcc_assert (ii
->offset
== 0);
4368 if (ii
->polymorphic
)
4370 streamer_write_hwi (ob
, ii
->otr_token
);
4371 stream_write_tree (ob
, ii
->otr_type
, true);
4372 ii
->context
.stream_out (ob
);
4376 /* Read in parts of cgraph_indirect_call_info corresponding to CS that are
4377 relevant to indirect inlining from IB. */
4380 ipa_read_indirect_edge_info (class lto_input_block
*ib
,
4381 class data_in
*data_in
,
4382 struct cgraph_edge
*cs
,
4383 class ipa_node_params
*info
)
4385 class cgraph_indirect_call_info
*ii
= cs
->indirect_info
;
4386 struct bitpack_d bp
;
4388 ii
->param_index
= (int) streamer_read_hwi (ib
);
4389 bp
= streamer_read_bitpack (ib
);
4390 ii
->polymorphic
= bp_unpack_value (&bp
, 1);
4391 ii
->agg_contents
= bp_unpack_value (&bp
, 1);
4392 ii
->member_ptr
= bp_unpack_value (&bp
, 1);
4393 ii
->by_ref
= bp_unpack_value (&bp
, 1);
4394 ii
->guaranteed_unmodified
= bp_unpack_value (&bp
, 1);
4395 ii
->vptr_changed
= bp_unpack_value (&bp
, 1);
4396 if (ii
->agg_contents
|| ii
->polymorphic
)
4397 ii
->offset
= (HOST_WIDE_INT
) streamer_read_hwi (ib
);
4400 if (ii
->polymorphic
)
4402 ii
->otr_token
= (HOST_WIDE_INT
) streamer_read_hwi (ib
);
4403 ii
->otr_type
= stream_read_tree (ib
, data_in
);
4404 ii
->context
.stream_in (ib
, data_in
);
4406 if (info
&& ii
->param_index
>= 0)
4408 if (ii
->polymorphic
)
4409 ipa_set_param_used_by_polymorphic_call (info
,
4410 ii
->param_index
, true);
4411 ipa_set_param_used_by_indirect_call (info
,
4412 ii
->param_index
, true);
4416 /* Stream out NODE info to OB. */
4419 ipa_write_node_info (struct output_block
*ob
, struct cgraph_node
*node
)
4422 lto_symtab_encoder_t encoder
;
4423 class ipa_node_params
*info
= IPA_NODE_REF (node
);
4425 struct cgraph_edge
*e
;
4426 struct bitpack_d bp
;
4428 encoder
= ob
->decl_state
->symtab_node_encoder
;
4429 node_ref
= lto_symtab_encoder_encode (encoder
, node
);
4430 streamer_write_uhwi (ob
, node_ref
);
4432 streamer_write_uhwi (ob
, ipa_get_param_count (info
));
4433 for (j
= 0; j
< ipa_get_param_count (info
); j
++)
4434 streamer_write_uhwi (ob
, ipa_get_param_move_cost (info
, j
));
4435 bp
= bitpack_create (ob
->main_stream
);
4436 gcc_assert (info
->analysis_done
4437 || ipa_get_param_count (info
) == 0);
4438 gcc_assert (!info
->node_enqueued
);
4439 gcc_assert (!info
->ipcp_orig_node
);
4440 for (j
= 0; j
< ipa_get_param_count (info
); j
++)
4441 bp_pack_value (&bp
, ipa_is_param_used (info
, j
), 1);
4442 streamer_write_bitpack (&bp
);
4443 for (j
= 0; j
< ipa_get_param_count (info
); j
++)
4445 streamer_write_hwi (ob
, ipa_get_controlled_uses (info
, j
));
4446 stream_write_tree (ob
, ipa_get_type (info
, j
), true);
4448 for (e
= node
->callees
; e
; e
= e
->next_callee
)
4450 class ipa_edge_args
*args
= IPA_EDGE_REF (e
);
4454 streamer_write_uhwi (ob
, 0);
4458 streamer_write_uhwi (ob
,
4459 ipa_get_cs_argument_count (args
) * 2
4460 + (args
->polymorphic_call_contexts
!= NULL
));
4461 for (j
= 0; j
< ipa_get_cs_argument_count (args
); j
++)
4463 ipa_write_jump_function (ob
, ipa_get_ith_jump_func (args
, j
));
4464 if (args
->polymorphic_call_contexts
!= NULL
)
4465 ipa_get_ith_polymorhic_call_context (args
, j
)->stream_out (ob
);
4468 for (e
= node
->indirect_calls
; e
; e
= e
->next_callee
)
4470 class ipa_edge_args
*args
= IPA_EDGE_REF (e
);
4472 streamer_write_uhwi (ob
, 0);
4475 streamer_write_uhwi (ob
,
4476 ipa_get_cs_argument_count (args
) * 2
4477 + (args
->polymorphic_call_contexts
!= NULL
));
4478 for (j
= 0; j
< ipa_get_cs_argument_count (args
); j
++)
4480 ipa_write_jump_function (ob
, ipa_get_ith_jump_func (args
, j
));
4481 if (args
->polymorphic_call_contexts
!= NULL
)
4482 ipa_get_ith_polymorhic_call_context (args
, j
)->stream_out (ob
);
4485 ipa_write_indirect_edge_info (ob
, e
);
4489 /* Stream in edge E from IB. */
4492 ipa_read_edge_info (class lto_input_block
*ib
,
4493 class data_in
*data_in
,
4494 struct cgraph_edge
*e
, bool prevails
)
4496 int count
= streamer_read_uhwi (ib
);
4497 bool contexts_computed
= count
& 1;
4502 if (prevails
&& e
->possibly_call_in_translation_unit_p ())
4504 class ipa_edge_args
*args
= IPA_EDGE_REF_GET_CREATE (e
);
4505 vec_safe_grow_cleared (args
->jump_functions
, count
);
4506 if (contexts_computed
)
4507 vec_safe_grow_cleared (args
->polymorphic_call_contexts
, count
);
4508 for (int k
= 0; k
< count
; k
++)
4510 ipa_read_jump_function (ib
, ipa_get_ith_jump_func (args
, k
), e
,
4512 if (contexts_computed
)
4513 ipa_get_ith_polymorhic_call_context (args
, k
)->stream_in
4519 for (int k
= 0; k
< count
; k
++)
4521 struct ipa_jump_func dummy
;
4522 ipa_read_jump_function (ib
, &dummy
, e
,
4524 if (contexts_computed
)
4526 class ipa_polymorphic_call_context ctx
;
4527 ctx
.stream_in (ib
, data_in
);
4533 /* Stream in NODE info from IB. */
4536 ipa_read_node_info (class lto_input_block
*ib
, struct cgraph_node
*node
,
4537 class data_in
*data_in
)
4540 struct cgraph_edge
*e
;
4541 struct bitpack_d bp
;
4542 bool prevails
= node
->prevailing_p ();
4543 class ipa_node_params
*info
= prevails
4544 ? IPA_NODE_REF_GET_CREATE (node
) : NULL
;
4546 int param_count
= streamer_read_uhwi (ib
);
4549 ipa_alloc_node_params (node
, param_count
);
4550 for (k
= 0; k
< param_count
; k
++)
4551 (*info
->descriptors
)[k
].move_cost
= streamer_read_uhwi (ib
);
4552 if (ipa_get_param_count (info
) != 0)
4553 info
->analysis_done
= true;
4554 info
->node_enqueued
= false;
4557 for (k
= 0; k
< param_count
; k
++)
4558 streamer_read_uhwi (ib
);
4560 bp
= streamer_read_bitpack (ib
);
4561 for (k
= 0; k
< param_count
; k
++)
4563 bool used
= bp_unpack_value (&bp
, 1);
4566 ipa_set_param_used (info
, k
, used
);
4568 for (k
= 0; k
< param_count
; k
++)
4570 int nuses
= streamer_read_hwi (ib
);
4571 tree type
= stream_read_tree (ib
, data_in
);
4575 ipa_set_controlled_uses (info
, k
, nuses
);
4576 (*info
->descriptors
)[k
].decl_or_type
= type
;
4579 for (e
= node
->callees
; e
; e
= e
->next_callee
)
4580 ipa_read_edge_info (ib
, data_in
, e
, prevails
);
4581 for (e
= node
->indirect_calls
; e
; e
= e
->next_callee
)
4583 ipa_read_edge_info (ib
, data_in
, e
, prevails
);
4584 ipa_read_indirect_edge_info (ib
, data_in
, e
, info
);
4588 /* Write jump functions for nodes in SET. */
4591 ipa_prop_write_jump_functions (void)
4593 struct cgraph_node
*node
;
4594 struct output_block
*ob
;
4595 unsigned int count
= 0;
4596 lto_symtab_encoder_iterator lsei
;
4597 lto_symtab_encoder_t encoder
;
4599 if (!ipa_node_params_sum
|| !ipa_edge_args_sum
)
4602 ob
= create_output_block (LTO_section_jump_functions
);
4603 encoder
= ob
->decl_state
->symtab_node_encoder
;
4605 for (lsei
= lsei_start_function_in_partition (encoder
); !lsei_end_p (lsei
);
4606 lsei_next_function_in_partition (&lsei
))
4608 node
= lsei_cgraph_node (lsei
);
4609 if (node
->has_gimple_body_p ()
4610 && IPA_NODE_REF (node
) != NULL
)
4614 streamer_write_uhwi (ob
, count
);
4616 /* Process all of the functions. */
4617 for (lsei
= lsei_start_function_in_partition (encoder
); !lsei_end_p (lsei
);
4618 lsei_next_function_in_partition (&lsei
))
4620 node
= lsei_cgraph_node (lsei
);
4621 if (node
->has_gimple_body_p ()
4622 && IPA_NODE_REF (node
) != NULL
)
4623 ipa_write_node_info (ob
, node
);
4625 streamer_write_char_stream (ob
->main_stream
, 0);
4626 produce_asm (ob
, NULL
);
4627 destroy_output_block (ob
);
4630 /* Read section in file FILE_DATA of length LEN with data DATA. */
4633 ipa_prop_read_section (struct lto_file_decl_data
*file_data
, const char *data
,
4636 const struct lto_function_header
*header
=
4637 (const struct lto_function_header
*) data
;
4638 const int cfg_offset
= sizeof (struct lto_function_header
);
4639 const int main_offset
= cfg_offset
+ header
->cfg_size
;
4640 const int string_offset
= main_offset
+ header
->main_size
;
4641 class data_in
*data_in
;
4645 lto_input_block
ib_main ((const char *) data
+ main_offset
,
4646 header
->main_size
, file_data
->mode_table
);
4649 lto_data_in_create (file_data
, (const char *) data
+ string_offset
,
4650 header
->string_size
, vNULL
);
4651 count
= streamer_read_uhwi (&ib_main
);
4653 for (i
= 0; i
< count
; i
++)
4656 struct cgraph_node
*node
;
4657 lto_symtab_encoder_t encoder
;
4659 index
= streamer_read_uhwi (&ib_main
);
4660 encoder
= file_data
->symtab_node_encoder
;
4661 node
= dyn_cast
<cgraph_node
*> (lto_symtab_encoder_deref (encoder
,
4663 gcc_assert (node
->definition
);
4664 ipa_read_node_info (&ib_main
, node
, data_in
);
4666 lto_free_section_data (file_data
, LTO_section_jump_functions
, NULL
, data
,
4668 lto_data_in_delete (data_in
);
4671 /* Read ipcp jump functions. */
4674 ipa_prop_read_jump_functions (void)
4676 struct lto_file_decl_data
**file_data_vec
= lto_get_file_decl_data ();
4677 struct lto_file_decl_data
*file_data
;
4680 ipa_check_create_node_params ();
4681 ipa_check_create_edge_args ();
4682 ipa_register_cgraph_hooks ();
4684 while ((file_data
= file_data_vec
[j
++]))
4688 = lto_get_summary_section_data (file_data
, LTO_section_jump_functions
,
4691 ipa_prop_read_section (file_data
, data
, len
);
4696 write_ipcp_transformation_info (output_block
*ob
, cgraph_node
*node
)
4699 unsigned int count
= 0;
4700 lto_symtab_encoder_t encoder
;
4701 struct ipa_agg_replacement_value
*aggvals
, *av
;
4703 aggvals
= ipa_get_agg_replacements_for_node (node
);
4704 encoder
= ob
->decl_state
->symtab_node_encoder
;
4705 node_ref
= lto_symtab_encoder_encode (encoder
, node
);
4706 streamer_write_uhwi (ob
, node_ref
);
4708 for (av
= aggvals
; av
; av
= av
->next
)
4710 streamer_write_uhwi (ob
, count
);
4712 for (av
= aggvals
; av
; av
= av
->next
)
4714 struct bitpack_d bp
;
4716 streamer_write_uhwi (ob
, av
->offset
);
4717 streamer_write_uhwi (ob
, av
->index
);
4718 stream_write_tree (ob
, av
->value
, true);
4720 bp
= bitpack_create (ob
->main_stream
);
4721 bp_pack_value (&bp
, av
->by_ref
, 1);
4722 streamer_write_bitpack (&bp
);
4725 ipcp_transformation
*ts
= ipcp_get_transformation_summary (node
);
4726 if (ts
&& vec_safe_length (ts
->m_vr
) > 0)
4728 count
= ts
->m_vr
->length ();
4729 streamer_write_uhwi (ob
, count
);
4730 for (unsigned i
= 0; i
< count
; ++i
)
4732 struct bitpack_d bp
;
4733 ipa_vr
*parm_vr
= &(*ts
->m_vr
)[i
];
4734 bp
= bitpack_create (ob
->main_stream
);
4735 bp_pack_value (&bp
, parm_vr
->known
, 1);
4736 streamer_write_bitpack (&bp
);
4739 streamer_write_enum (ob
->main_stream
, value_rang_type
,
4740 VR_LAST
, parm_vr
->type
);
4741 streamer_write_wide_int (ob
, parm_vr
->min
);
4742 streamer_write_wide_int (ob
, parm_vr
->max
);
4747 streamer_write_uhwi (ob
, 0);
4749 if (ts
&& vec_safe_length (ts
->bits
) > 0)
4751 count
= ts
->bits
->length ();
4752 streamer_write_uhwi (ob
, count
);
4754 for (unsigned i
= 0; i
< count
; ++i
)
4756 const ipa_bits
*bits_jfunc
= (*ts
->bits
)[i
];
4757 struct bitpack_d bp
= bitpack_create (ob
->main_stream
);
4758 bp_pack_value (&bp
, !!bits_jfunc
, 1);
4759 streamer_write_bitpack (&bp
);
4762 streamer_write_widest_int (ob
, bits_jfunc
->value
);
4763 streamer_write_widest_int (ob
, bits_jfunc
->mask
);
4768 streamer_write_uhwi (ob
, 0);
4771 /* Stream in the aggregate value replacement chain for NODE from IB. */
4774 read_ipcp_transformation_info (lto_input_block
*ib
, cgraph_node
*node
,
4777 struct ipa_agg_replacement_value
*aggvals
= NULL
;
4778 unsigned int count
, i
;
4780 count
= streamer_read_uhwi (ib
);
4781 for (i
= 0; i
<count
; i
++)
4783 struct ipa_agg_replacement_value
*av
;
4784 struct bitpack_d bp
;
4786 av
= ggc_alloc
<ipa_agg_replacement_value
> ();
4787 av
->offset
= streamer_read_uhwi (ib
);
4788 av
->index
= streamer_read_uhwi (ib
);
4789 av
->value
= stream_read_tree (ib
, data_in
);
4790 bp
= streamer_read_bitpack (ib
);
4791 av
->by_ref
= bp_unpack_value (&bp
, 1);
4795 ipa_set_node_agg_value_chain (node
, aggvals
);
4797 count
= streamer_read_uhwi (ib
);
4800 ipcp_transformation_initialize ();
4801 ipcp_transformation
*ts
= ipcp_transformation_sum
->get_create (node
);
4802 vec_safe_grow_cleared (ts
->m_vr
, count
);
4803 for (i
= 0; i
< count
; i
++)
4806 parm_vr
= &(*ts
->m_vr
)[i
];
4807 struct bitpack_d bp
;
4808 bp
= streamer_read_bitpack (ib
);
4809 parm_vr
->known
= bp_unpack_value (&bp
, 1);
4812 parm_vr
->type
= streamer_read_enum (ib
, value_range_kind
,
4814 parm_vr
->min
= streamer_read_wide_int (ib
);
4815 parm_vr
->max
= streamer_read_wide_int (ib
);
4819 count
= streamer_read_uhwi (ib
);
4822 ipcp_transformation_initialize ();
4823 ipcp_transformation
*ts
= ipcp_transformation_sum
->get_create (node
);
4824 vec_safe_grow_cleared (ts
->bits
, count
);
4826 for (i
= 0; i
< count
; i
++)
4828 struct bitpack_d bp
= streamer_read_bitpack (ib
);
4829 bool known
= bp_unpack_value (&bp
, 1);
4833 = ipa_get_ipa_bits_for_value (streamer_read_widest_int (ib
),
4834 streamer_read_widest_int (ib
));
4835 (*ts
->bits
)[i
] = bits
;
4841 /* Write all aggregate replacement for nodes in set. */
4844 ipcp_write_transformation_summaries (void)
4846 struct cgraph_node
*node
;
4847 struct output_block
*ob
;
4848 unsigned int count
= 0;
4849 lto_symtab_encoder_iterator lsei
;
4850 lto_symtab_encoder_t encoder
;
4852 ob
= create_output_block (LTO_section_ipcp_transform
);
4853 encoder
= ob
->decl_state
->symtab_node_encoder
;
4855 for (lsei
= lsei_start_function_in_partition (encoder
); !lsei_end_p (lsei
);
4856 lsei_next_function_in_partition (&lsei
))
4858 node
= lsei_cgraph_node (lsei
);
4859 if (node
->has_gimple_body_p ())
4863 streamer_write_uhwi (ob
, count
);
4865 for (lsei
= lsei_start_function_in_partition (encoder
); !lsei_end_p (lsei
);
4866 lsei_next_function_in_partition (&lsei
))
4868 node
= lsei_cgraph_node (lsei
);
4869 if (node
->has_gimple_body_p ())
4870 write_ipcp_transformation_info (ob
, node
);
4872 streamer_write_char_stream (ob
->main_stream
, 0);
4873 produce_asm (ob
, NULL
);
4874 destroy_output_block (ob
);
4877 /* Read replacements section in file FILE_DATA of length LEN with data
4881 read_replacements_section (struct lto_file_decl_data
*file_data
,
4885 const struct lto_function_header
*header
=
4886 (const struct lto_function_header
*) data
;
4887 const int cfg_offset
= sizeof (struct lto_function_header
);
4888 const int main_offset
= cfg_offset
+ header
->cfg_size
;
4889 const int string_offset
= main_offset
+ header
->main_size
;
4890 class data_in
*data_in
;
4894 lto_input_block
ib_main ((const char *) data
+ main_offset
,
4895 header
->main_size
, file_data
->mode_table
);
4897 data_in
= lto_data_in_create (file_data
, (const char *) data
+ string_offset
,
4898 header
->string_size
, vNULL
);
4899 count
= streamer_read_uhwi (&ib_main
);
4901 for (i
= 0; i
< count
; i
++)
4904 struct cgraph_node
*node
;
4905 lto_symtab_encoder_t encoder
;
4907 index
= streamer_read_uhwi (&ib_main
);
4908 encoder
= file_data
->symtab_node_encoder
;
4909 node
= dyn_cast
<cgraph_node
*> (lto_symtab_encoder_deref (encoder
,
4911 gcc_assert (node
->definition
);
4912 read_ipcp_transformation_info (&ib_main
, node
, data_in
);
4914 lto_free_section_data (file_data
, LTO_section_jump_functions
, NULL
, data
,
4916 lto_data_in_delete (data_in
);
4919 /* Read IPA-CP aggregate replacements. */
4922 ipcp_read_transformation_summaries (void)
4924 struct lto_file_decl_data
**file_data_vec
= lto_get_file_decl_data ();
4925 struct lto_file_decl_data
*file_data
;
4928 while ((file_data
= file_data_vec
[j
++]))
4932 = lto_get_summary_section_data (file_data
, LTO_section_ipcp_transform
,
4935 read_replacements_section (file_data
, data
, len
);
4939 /* Adjust the aggregate replacements in AGGVAL to reflect parameters skipped in
4943 adjust_agg_replacement_values (struct cgraph_node
*node
,
4944 struct ipa_agg_replacement_value
*aggval
)
4946 struct ipa_agg_replacement_value
*v
;
4948 if (!node
->clone
.param_adjustments
)
4951 auto_vec
<int, 16> new_indices
;
4952 node
->clone
.param_adjustments
->get_updated_indices (&new_indices
);
4953 for (v
= aggval
; v
; v
= v
->next
)
4955 gcc_checking_assert (v
->index
>= 0);
4957 if ((unsigned) v
->index
< new_indices
.length ())
4958 v
->index
= new_indices
[v
->index
];
4960 /* This can happen if we know about a constant passed by reference by
4961 an argument which is never actually used for anything, let alone
4962 loading that constant. */
4967 /* Dominator walker driving the ipcp modification phase. */
4969 class ipcp_modif_dom_walker
: public dom_walker
4972 ipcp_modif_dom_walker (struct ipa_func_body_info
*fbi
,
4973 vec
<ipa_param_descriptor
, va_gc
> *descs
,
4974 struct ipa_agg_replacement_value
*av
,
4976 : dom_walker (CDI_DOMINATORS
), m_fbi (fbi
), m_descriptors (descs
),
4977 m_aggval (av
), m_something_changed (sc
), m_cfg_changed (cc
) {}
4979 virtual edge
before_dom_children (basic_block
);
4982 struct ipa_func_body_info
*m_fbi
;
4983 vec
<ipa_param_descriptor
, va_gc
> *m_descriptors
;
4984 struct ipa_agg_replacement_value
*m_aggval
;
4985 bool *m_something_changed
, *m_cfg_changed
;
4989 ipcp_modif_dom_walker::before_dom_children (basic_block bb
)
4991 gimple_stmt_iterator gsi
;
4992 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
4994 struct ipa_agg_replacement_value
*v
;
4995 gimple
*stmt
= gsi_stmt (gsi
);
4997 HOST_WIDE_INT offset
;
5002 if (!gimple_assign_load_p (stmt
))
5004 rhs
= gimple_assign_rhs1 (stmt
);
5005 if (!is_gimple_reg_type (TREE_TYPE (rhs
)))
5010 while (handled_component_p (t
))
5012 /* V_C_E can do things like convert an array of integers to one
5013 bigger integer and similar things we do not handle below. */
5014 if (TREE_CODE (t
) == VIEW_CONVERT_EXPR
)
5019 t
= TREE_OPERAND (t
, 0);
5024 if (!ipa_load_from_parm_agg (m_fbi
, m_descriptors
, stmt
, rhs
, &index
,
5025 &offset
, &size
, &by_ref
))
5027 for (v
= m_aggval
; v
; v
= v
->next
)
5028 if (v
->index
== index
5029 && v
->offset
== offset
)
5032 || v
->by_ref
!= by_ref
5033 || maybe_ne (tree_to_poly_int64 (TYPE_SIZE (TREE_TYPE (v
->value
))),
5037 gcc_checking_assert (is_gimple_ip_invariant (v
->value
));
5038 if (!useless_type_conversion_p (TREE_TYPE (rhs
), TREE_TYPE (v
->value
)))
5040 if (fold_convertible_p (TREE_TYPE (rhs
), v
->value
))
5041 val
= fold_build1 (NOP_EXPR
, TREE_TYPE (rhs
), v
->value
);
5042 else if (TYPE_SIZE (TREE_TYPE (rhs
))
5043 == TYPE_SIZE (TREE_TYPE (v
->value
)))
5044 val
= fold_build1 (VIEW_CONVERT_EXPR
, TREE_TYPE (rhs
), v
->value
);
5049 fprintf (dump_file
, " const ");
5050 print_generic_expr (dump_file
, v
->value
);
5051 fprintf (dump_file
, " can't be converted to type of ");
5052 print_generic_expr (dump_file
, rhs
);
5053 fprintf (dump_file
, "\n");
5061 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5063 fprintf (dump_file
, "Modifying stmt:\n ");
5064 print_gimple_stmt (dump_file
, stmt
, 0);
5066 gimple_assign_set_rhs_from_tree (&gsi
, val
);
5069 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5071 fprintf (dump_file
, "into:\n ");
5072 print_gimple_stmt (dump_file
, stmt
, 0);
5073 fprintf (dump_file
, "\n");
5076 *m_something_changed
= true;
5077 if (maybe_clean_eh_stmt (stmt
)
5078 && gimple_purge_dead_eh_edges (gimple_bb (stmt
)))
5079 *m_cfg_changed
= true;
5084 /* Update bits info of formal parameters as described in
5085 ipcp_transformation. */
5088 ipcp_update_bits (struct cgraph_node
*node
)
5090 ipcp_transformation
*ts
= ipcp_get_transformation_summary (node
);
5092 if (!ts
|| vec_safe_length (ts
->bits
) == 0)
5094 vec
<ipa_bits
*, va_gc
> &bits
= *ts
->bits
;
5095 unsigned count
= bits
.length ();
5099 auto_vec
<int, 16> new_indices
;
5100 bool need_remapping
= false;
5101 if (node
->clone
.param_adjustments
)
5103 node
->clone
.param_adjustments
->get_updated_indices (&new_indices
);
5104 need_remapping
= true;
5106 auto_vec
<tree
, 16> parm_decls
;
5107 push_function_arg_decls (&parm_decls
, node
->decl
);
5109 for (unsigned i
= 0; i
< count
; ++i
)
5114 if (i
>= new_indices
.length ())
5116 int idx
= new_indices
[i
];
5119 parm
= parm_decls
[idx
];
5122 parm
= parm_decls
[i
];
5123 gcc_checking_assert (parm
);
5127 || !(INTEGRAL_TYPE_P (TREE_TYPE (parm
))
5128 || POINTER_TYPE_P (TREE_TYPE (parm
)))
5129 || !is_gimple_reg (parm
))
5132 tree ddef
= ssa_default_def (DECL_STRUCT_FUNCTION (node
->decl
), parm
);
5138 fprintf (dump_file
, "Adjusting mask for param %u to ", i
);
5139 print_hex (bits
[i
]->mask
, dump_file
);
5140 fprintf (dump_file
, "\n");
5143 if (INTEGRAL_TYPE_P (TREE_TYPE (ddef
)))
5145 unsigned prec
= TYPE_PRECISION (TREE_TYPE (ddef
));
5146 signop sgn
= TYPE_SIGN (TREE_TYPE (ddef
));
5148 wide_int nonzero_bits
= wide_int::from (bits
[i
]->mask
, prec
, UNSIGNED
)
5149 | wide_int::from (bits
[i
]->value
, prec
, sgn
);
5150 set_nonzero_bits (ddef
, nonzero_bits
);
5154 unsigned tem
= bits
[i
]->mask
.to_uhwi ();
5155 unsigned HOST_WIDE_INT bitpos
= bits
[i
]->value
.to_uhwi ();
5156 unsigned align
= tem
& -tem
;
5157 unsigned misalign
= bitpos
& (align
- 1);
5162 fprintf (dump_file
, "Adjusting align: %u, misalign: %u\n", align
, misalign
);
5164 unsigned old_align
, old_misalign
;
5165 struct ptr_info_def
*pi
= get_ptr_info (ddef
);
5166 bool old_known
= get_ptr_info_alignment (pi
, &old_align
, &old_misalign
);
5169 && old_align
> align
)
5173 fprintf (dump_file
, "But alignment was already %u.\n", old_align
);
5174 if ((old_misalign
& (align
- 1)) != misalign
)
5175 fprintf (dump_file
, "old_misalign (%u) and misalign (%u) mismatch\n",
5176 old_misalign
, misalign
);
5182 && ((misalign
& (old_align
- 1)) != old_misalign
)
5184 fprintf (dump_file
, "old_misalign (%u) and misalign (%u) mismatch\n",
5185 old_misalign
, misalign
);
5187 set_ptr_info_alignment (pi
, align
, misalign
);
5194 ipa_vr::nonzero_p (tree expr_type
) const
5196 if (type
== VR_ANTI_RANGE
&& wi::eq_p (min
, 0) && wi::eq_p (max
, 0))
5199 unsigned prec
= TYPE_PRECISION (expr_type
);
5200 return (type
== VR_RANGE
5201 && TYPE_UNSIGNED (expr_type
)
5202 && wi::eq_p (min
, wi::one (prec
))
5203 && wi::eq_p (max
, wi::max_value (prec
, TYPE_SIGN (expr_type
))));
5206 /* Update value range of formal parameters as described in
5207 ipcp_transformation. */
5210 ipcp_update_vr (struct cgraph_node
*node
)
5212 ipcp_transformation
*ts
= ipcp_get_transformation_summary (node
);
5213 if (!ts
|| vec_safe_length (ts
->m_vr
) == 0)
5215 const vec
<ipa_vr
, va_gc
> &vr
= *ts
->m_vr
;
5216 unsigned count
= vr
.length ();
5220 auto_vec
<int, 16> new_indices
;
5221 bool need_remapping
= false;
5222 if (node
->clone
.param_adjustments
)
5224 node
->clone
.param_adjustments
->get_updated_indices (&new_indices
);
5225 need_remapping
= true;
5227 auto_vec
<tree
, 16> parm_decls
;
5228 push_function_arg_decls (&parm_decls
, node
->decl
);
5230 for (unsigned i
= 0; i
< count
; ++i
)
5236 if (i
>= new_indices
.length ())
5238 remapped_idx
= new_indices
[i
];
5239 if (remapped_idx
< 0)
5245 parm
= parm_decls
[remapped_idx
];
5247 gcc_checking_assert (parm
);
5248 tree ddef
= ssa_default_def (DECL_STRUCT_FUNCTION (node
->decl
), parm
);
5250 if (!ddef
|| !is_gimple_reg (parm
))
5254 && (vr
[i
].type
== VR_RANGE
|| vr
[i
].type
== VR_ANTI_RANGE
))
5256 tree type
= TREE_TYPE (ddef
);
5257 unsigned prec
= TYPE_PRECISION (type
);
5258 if (INTEGRAL_TYPE_P (TREE_TYPE (ddef
)))
5262 fprintf (dump_file
, "Setting value range of param %u "
5263 "(now %i) ", i
, remapped_idx
);
5264 fprintf (dump_file
, "%s[",
5265 (vr
[i
].type
== VR_ANTI_RANGE
) ? "~" : "");
5266 print_decs (vr
[i
].min
, dump_file
);
5267 fprintf (dump_file
, ", ");
5268 print_decs (vr
[i
].max
, dump_file
);
5269 fprintf (dump_file
, "]\n");
5271 set_range_info (ddef
, vr
[i
].type
,
5272 wide_int_storage::from (vr
[i
].min
, prec
,
5274 wide_int_storage::from (vr
[i
].max
, prec
,
5277 else if (POINTER_TYPE_P (TREE_TYPE (ddef
))
5278 && vr
[i
].nonzero_p (TREE_TYPE (ddef
)))
5281 fprintf (dump_file
, "Setting nonnull for %u\n", i
);
5282 set_ptr_nonnull (ddef
);
5288 /* IPCP transformation phase doing propagation of aggregate values. */
5291 ipcp_transform_function (struct cgraph_node
*node
)
5293 vec
<ipa_param_descriptor
, va_gc
> *descriptors
= NULL
;
5294 struct ipa_func_body_info fbi
;
5295 struct ipa_agg_replacement_value
*aggval
;
5297 bool cfg_changed
= false, something_changed
= false;
5299 gcc_checking_assert (cfun
);
5300 gcc_checking_assert (current_function_decl
);
5303 fprintf (dump_file
, "Modification phase of node %s\n",
5304 node
->dump_name ());
5306 ipcp_update_bits (node
);
5307 ipcp_update_vr (node
);
5308 aggval
= ipa_get_agg_replacements_for_node (node
);
5311 param_count
= count_formal_params (node
->decl
);
5312 if (param_count
== 0)
5314 adjust_agg_replacement_values (node
, aggval
);
5316 ipa_dump_agg_replacement_values (dump_file
, aggval
);
5320 fbi
.bb_infos
= vNULL
;
5321 fbi
.bb_infos
.safe_grow_cleared (last_basic_block_for_fn (cfun
));
5322 fbi
.param_count
= param_count
;
5323 fbi
.aa_walk_budget
= param_ipa_max_aa_steps
;
5325 vec_safe_grow_cleared (descriptors
, param_count
);
5326 ipa_populate_param_decls (node
, *descriptors
);
5327 calculate_dominance_info (CDI_DOMINATORS
);
5328 ipcp_modif_dom_walker (&fbi
, descriptors
, aggval
, &something_changed
,
5329 &cfg_changed
).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun
));
5332 struct ipa_bb_info
*bi
;
5333 FOR_EACH_VEC_ELT (fbi
.bb_infos
, i
, bi
)
5334 free_ipa_bb_info (bi
);
5335 fbi
.bb_infos
.release ();
5336 free_dominance_info (CDI_DOMINATORS
);
5338 ipcp_transformation
*s
= ipcp_transformation_sum
->get (node
);
5339 s
->agg_values
= NULL
;
5343 vec_free (descriptors
);
5345 if (!something_changed
)
5349 delete_unreachable_blocks_update_callgraph (node
, false);
5351 return TODO_update_ssa_only_virtuals
;
5355 /* Return true if OTHER describes same agg item. */
5357 ipa_agg_jf_item::equal_to (const ipa_agg_jf_item
&other
)
5359 return offset
== other
.offset
5360 && operand_equal_p (value
, other
.value
, 0);
5362 #include "gt-ipa-prop.h"