1 /* Interprocedural analyses.
2 Copyright (C) 2005-2014 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
30 #include "hard-reg-set.h"
33 #include "dominance.h"
35 #include "basic-block.h"
36 #include "tree-ssa-alias.h"
37 #include "internal-fn.h"
38 #include "gimple-fold.h"
40 #include "gimple-expr.h"
44 #include "stor-layout.h"
45 #include "print-tree.h"
47 #include "gimple-iterator.h"
48 #include "gimplify-me.h"
49 #include "gimple-walk.h"
50 #include "langhooks.h"
53 #include "plugin-api.h"
56 #include "alloc-pool.h"
59 #include "gimple-ssa.h"
61 #include "tree-phinodes.h"
62 #include "ssa-iterators.h"
63 #include "tree-into-ssa.h"
65 #include "tree-pass.h"
66 #include "tree-inline.h"
67 #include "ipa-inline.h"
69 #include "diagnostic.h"
70 #include "gimple-pretty-print.h"
71 #include "lto-streamer.h"
72 #include "data-streamer.h"
73 #include "tree-streamer.h"
75 #include "ipa-utils.h"
76 #include "stringpool.h"
77 #include "tree-ssanames.h"
83 /* Intermediate information that we get from alias analysis about a particular
84 parameter in a particular basic_block. When a parameter or the memory it
85 references is marked modified, we use that information in all dominatd
86 blocks without cosulting alias analysis oracle. */
88 struct param_aa_status
90 /* Set when this structure contains meaningful information. If not, the
91 structure describing a dominating BB should be used instead. */
94 /* Whether we have seen something which might have modified the data in
95 question. PARM is for the parameter itself, REF is for data it points to
96 but using the alias type of individual accesses and PT is the same thing
97 but for computing aggregate pass-through functions using a very inclusive
99 bool parm_modified
, ref_modified
, pt_modified
;
102 /* Information related to a given BB that used only when looking at function
107 /* Call graph edges going out of this BB. */
108 vec
<cgraph_edge
*> cg_edges
;
109 /* Alias analysis statuses of each formal parameter at this bb. */
110 vec
<param_aa_status
> param_aa_statuses
;
113 /* Structure with global information that is only used when looking at function
116 struct func_body_info
118 /* The node that is being analyzed. */
122 struct ipa_node_params
*info
;
124 /* Information about individual BBs. */
125 vec
<ipa_bb_info
> bb_infos
;
127 /* Number of parameters. */
130 /* Number of statements already walked by when analyzing this function. */
131 unsigned int aa_walked
;
134 /* Vector where the parameter infos are actually stored. */
135 vec
<ipa_node_params
> ipa_node_params_vector
;
136 /* Vector of known aggregate values in cloned nodes. */
137 vec
<ipa_agg_replacement_value_p
, va_gc
> *ipa_node_agg_replacements
;
138 /* Vector where the parameter infos are actually stored. */
139 vec
<ipa_edge_args
, va_gc
> *ipa_edge_args_vector
;
141 /* Holders of ipa cgraph hooks: */
142 static struct cgraph_edge_hook_list
*edge_removal_hook_holder
;
143 static struct cgraph_node_hook_list
*node_removal_hook_holder
;
144 static struct cgraph_2edge_hook_list
*edge_duplication_hook_holder
;
145 static struct cgraph_2node_hook_list
*node_duplication_hook_holder
;
146 static struct cgraph_node_hook_list
*function_insertion_hook_holder
;
148 /* Description of a reference to an IPA constant. */
149 struct ipa_cst_ref_desc
151 /* Edge that corresponds to the statement which took the reference. */
152 struct cgraph_edge
*cs
;
153 /* Linked list of duplicates created when call graph edges are cloned. */
154 struct ipa_cst_ref_desc
*next_duplicate
;
155 /* Number of references in IPA structures, IPA_UNDESCRIBED_USE if the value
156 if out of control. */
160 /* Allocation pool for reference descriptions. */
162 static alloc_pool ipa_refdesc_pool
;
164 /* Return true if DECL_FUNCTION_SPECIFIC_OPTIMIZATION of the decl associated
165 with NODE should prevent us from analyzing it for the purposes of IPA-CP. */
168 ipa_func_spec_opts_forbid_analysis_p (struct cgraph_node
*node
)
170 tree fs_opts
= DECL_FUNCTION_SPECIFIC_OPTIMIZATION (node
->decl
);
171 struct cl_optimization
*os
;
175 os
= TREE_OPTIMIZATION (fs_opts
);
176 return !os
->x_optimize
|| !os
->x_flag_ipa_cp
;
179 /* Return index of the formal whose tree is PTREE in function which corresponds
183 ipa_get_param_decl_index_1 (vec
<ipa_param_descriptor
> descriptors
, tree ptree
)
187 count
= descriptors
.length ();
188 for (i
= 0; i
< count
; i
++)
189 if (descriptors
[i
].decl
== ptree
)
195 /* Return index of the formal whose tree is PTREE in function which corresponds
199 ipa_get_param_decl_index (struct ipa_node_params
*info
, tree ptree
)
201 return ipa_get_param_decl_index_1 (info
->descriptors
, ptree
);
204 /* Populate the param_decl field in parameter DESCRIPTORS that correspond to
208 ipa_populate_param_decls (struct cgraph_node
*node
,
209 vec
<ipa_param_descriptor
> &descriptors
)
217 gcc_assert (gimple_has_body_p (fndecl
));
218 fnargs
= DECL_ARGUMENTS (fndecl
);
220 for (parm
= fnargs
; parm
; parm
= DECL_CHAIN (parm
))
222 descriptors
[param_num
].decl
= parm
;
223 descriptors
[param_num
].move_cost
= estimate_move_cost (TREE_TYPE (parm
),
229 /* Return how many formal parameters FNDECL has. */
232 count_formal_params (tree fndecl
)
236 gcc_assert (gimple_has_body_p (fndecl
));
238 for (parm
= DECL_ARGUMENTS (fndecl
); parm
; parm
= DECL_CHAIN (parm
))
244 /* Return the declaration of Ith formal parameter of the function corresponding
245 to INFO. Note there is no setter function as this array is built just once
246 using ipa_initialize_node_params. */
249 ipa_dump_param (FILE *file
, struct ipa_node_params
*info
, int i
)
251 fprintf (file
, "param #%i", i
);
252 if (info
->descriptors
[i
].decl
)
255 print_generic_expr (file
, info
->descriptors
[i
].decl
, 0);
259 /* Initialize the ipa_node_params structure associated with NODE
260 to hold PARAM_COUNT parameters. */
263 ipa_alloc_node_params (struct cgraph_node
*node
, int param_count
)
265 struct ipa_node_params
*info
= IPA_NODE_REF (node
);
267 if (!info
->descriptors
.exists () && param_count
)
268 info
->descriptors
.safe_grow_cleared (param_count
);
271 /* Initialize the ipa_node_params structure associated with NODE by counting
272 the function parameters, creating the descriptors and populating their
276 ipa_initialize_node_params (struct cgraph_node
*node
)
278 struct ipa_node_params
*info
= IPA_NODE_REF (node
);
280 if (!info
->descriptors
.exists ())
282 ipa_alloc_node_params (node
, count_formal_params (node
->decl
));
283 ipa_populate_param_decls (node
, info
->descriptors
);
287 /* Print the jump functions associated with call graph edge CS to file F. */
290 ipa_print_node_jump_functions_for_edge (FILE *f
, struct cgraph_edge
*cs
)
294 count
= ipa_get_cs_argument_count (IPA_EDGE_REF (cs
));
295 for (i
= 0; i
< count
; i
++)
297 struct ipa_jump_func
*jump_func
;
298 enum jump_func_type type
;
300 jump_func
= ipa_get_ith_jump_func (IPA_EDGE_REF (cs
), i
);
301 type
= jump_func
->type
;
303 fprintf (f
, " param %d: ", i
);
304 if (type
== IPA_JF_UNKNOWN
)
305 fprintf (f
, "UNKNOWN\n");
306 else if (type
== IPA_JF_CONST
)
308 tree val
= jump_func
->value
.constant
.value
;
309 fprintf (f
, "CONST: ");
310 print_generic_expr (f
, val
, 0);
311 if (TREE_CODE (val
) == ADDR_EXPR
312 && TREE_CODE (TREE_OPERAND (val
, 0)) == CONST_DECL
)
315 print_generic_expr (f
, DECL_INITIAL (TREE_OPERAND (val
, 0)),
320 else if (type
== IPA_JF_PASS_THROUGH
)
322 fprintf (f
, "PASS THROUGH: ");
323 fprintf (f
, "%d, op %s",
324 jump_func
->value
.pass_through
.formal_id
,
325 get_tree_code_name(jump_func
->value
.pass_through
.operation
));
326 if (jump_func
->value
.pass_through
.operation
!= NOP_EXPR
)
329 print_generic_expr (f
,
330 jump_func
->value
.pass_through
.operand
, 0);
332 if (jump_func
->value
.pass_through
.agg_preserved
)
333 fprintf (f
, ", agg_preserved");
336 else if (type
== IPA_JF_ANCESTOR
)
338 fprintf (f
, "ANCESTOR: ");
339 fprintf (f
, "%d, offset "HOST_WIDE_INT_PRINT_DEC
,
340 jump_func
->value
.ancestor
.formal_id
,
341 jump_func
->value
.ancestor
.offset
);
342 if (jump_func
->value
.ancestor
.agg_preserved
)
343 fprintf (f
, ", agg_preserved");
347 if (jump_func
->agg
.items
)
349 struct ipa_agg_jf_item
*item
;
352 fprintf (f
, " Aggregate passed by %s:\n",
353 jump_func
->agg
.by_ref
? "reference" : "value");
354 FOR_EACH_VEC_SAFE_ELT (jump_func
->agg
.items
, j
, item
)
356 fprintf (f
, " offset: " HOST_WIDE_INT_PRINT_DEC
", ",
358 if (TYPE_P (item
->value
))
359 fprintf (f
, "clobber of " HOST_WIDE_INT_PRINT_DEC
" bits",
360 tree_to_uhwi (TYPE_SIZE (item
->value
)));
363 fprintf (f
, "cst: ");
364 print_generic_expr (f
, item
->value
, 0);
370 struct ipa_polymorphic_call_context
*ctx
371 = ipa_get_ith_polymorhic_call_context (IPA_EDGE_REF (cs
), i
);
372 if (ctx
&& !ctx
->useless_p ())
374 fprintf (f
, " Context: ");
375 ctx
->dump (dump_file
);
381 /* Print the jump functions of all arguments on all call graph edges going from
385 ipa_print_node_jump_functions (FILE *f
, struct cgraph_node
*node
)
387 struct cgraph_edge
*cs
;
389 fprintf (f
, " Jump functions of caller %s/%i:\n", node
->name (),
391 for (cs
= node
->callees
; cs
; cs
= cs
->next_callee
)
393 if (!ipa_edge_args_info_available_for_edge_p (cs
))
396 fprintf (f
, " callsite %s/%i -> %s/%i : \n",
397 xstrdup (node
->name ()), node
->order
,
398 xstrdup (cs
->callee
->name ()),
400 ipa_print_node_jump_functions_for_edge (f
, cs
);
403 for (cs
= node
->indirect_calls
; cs
; cs
= cs
->next_callee
)
405 struct cgraph_indirect_call_info
*ii
;
406 if (!ipa_edge_args_info_available_for_edge_p (cs
))
409 ii
= cs
->indirect_info
;
410 if (ii
->agg_contents
)
411 fprintf (f
, " indirect %s callsite, calling param %i, "
412 "offset " HOST_WIDE_INT_PRINT_DEC
", %s",
413 ii
->member_ptr
? "member ptr" : "aggregate",
414 ii
->param_index
, ii
->offset
,
415 ii
->by_ref
? "by reference" : "by_value");
417 fprintf (f
, " indirect %s callsite, calling param %i, "
418 "offset " HOST_WIDE_INT_PRINT_DEC
,
419 ii
->polymorphic
? "polymorphic" : "simple", ii
->param_index
,
424 fprintf (f
, ", for stmt ");
425 print_gimple_stmt (f
, cs
->call_stmt
, 0, TDF_SLIM
);
430 ii
->context
.dump (f
);
431 ipa_print_node_jump_functions_for_edge (f
, cs
);
435 /* Print ipa_jump_func data structures of all nodes in the call graph to F. */
438 ipa_print_all_jump_functions (FILE *f
)
440 struct cgraph_node
*node
;
442 fprintf (f
, "\nJump functions:\n");
443 FOR_EACH_FUNCTION (node
)
445 ipa_print_node_jump_functions (f
, node
);
449 /* Set JFUNC to be a copy of another jmp (to be used by jump function
450 combination code). The two functions will share their rdesc. */
453 ipa_set_jf_cst_copy (struct ipa_jump_func
*dst
,
454 struct ipa_jump_func
*src
)
457 gcc_checking_assert (src
->type
== IPA_JF_CONST
);
458 dst
->type
= IPA_JF_CONST
;
459 dst
->value
.constant
= src
->value
.constant
;
462 /* Set JFUNC to be a constant jmp function. */
465 ipa_set_jf_constant (struct ipa_jump_func
*jfunc
, tree constant
,
466 struct cgraph_edge
*cs
)
468 constant
= unshare_expr (constant
);
469 if (constant
&& EXPR_P (constant
))
470 SET_EXPR_LOCATION (constant
, UNKNOWN_LOCATION
);
471 jfunc
->type
= IPA_JF_CONST
;
472 jfunc
->value
.constant
.value
= unshare_expr_without_location (constant
);
474 if (TREE_CODE (constant
) == ADDR_EXPR
475 && TREE_CODE (TREE_OPERAND (constant
, 0)) == FUNCTION_DECL
)
477 struct ipa_cst_ref_desc
*rdesc
;
478 if (!ipa_refdesc_pool
)
479 ipa_refdesc_pool
= create_alloc_pool ("IPA-PROP ref descriptions",
480 sizeof (struct ipa_cst_ref_desc
), 32);
482 rdesc
= (struct ipa_cst_ref_desc
*) pool_alloc (ipa_refdesc_pool
);
484 rdesc
->next_duplicate
= NULL
;
486 jfunc
->value
.constant
.rdesc
= rdesc
;
489 jfunc
->value
.constant
.rdesc
= NULL
;
492 /* Set JFUNC to be a simple pass-through jump function. */
494 ipa_set_jf_simple_pass_through (struct ipa_jump_func
*jfunc
, int formal_id
,
497 jfunc
->type
= IPA_JF_PASS_THROUGH
;
498 jfunc
->value
.pass_through
.operand
= NULL_TREE
;
499 jfunc
->value
.pass_through
.formal_id
= formal_id
;
500 jfunc
->value
.pass_through
.operation
= NOP_EXPR
;
501 jfunc
->value
.pass_through
.agg_preserved
= agg_preserved
;
504 /* Set JFUNC to be an arithmetic pass through jump function. */
507 ipa_set_jf_arith_pass_through (struct ipa_jump_func
*jfunc
, int formal_id
,
508 tree operand
, enum tree_code operation
)
510 jfunc
->type
= IPA_JF_PASS_THROUGH
;
511 jfunc
->value
.pass_through
.operand
= unshare_expr_without_location (operand
);
512 jfunc
->value
.pass_through
.formal_id
= formal_id
;
513 jfunc
->value
.pass_through
.operation
= operation
;
514 jfunc
->value
.pass_through
.agg_preserved
= false;
517 /* Set JFUNC to be an ancestor jump function. */
520 ipa_set_ancestor_jf (struct ipa_jump_func
*jfunc
, HOST_WIDE_INT offset
,
521 int formal_id
, bool agg_preserved
)
523 jfunc
->type
= IPA_JF_ANCESTOR
;
524 jfunc
->value
.ancestor
.formal_id
= formal_id
;
525 jfunc
->value
.ancestor
.offset
= offset
;
526 jfunc
->value
.ancestor
.agg_preserved
= agg_preserved
;
529 /* Get IPA BB information about the given BB. FBI is the context of analyzis
530 of this function body. */
532 static struct ipa_bb_info
*
533 ipa_get_bb_info (struct func_body_info
*fbi
, basic_block bb
)
535 gcc_checking_assert (fbi
);
536 return &fbi
->bb_infos
[bb
->index
];
539 /* Structure to be passed in between detect_type_change and
540 check_stmt_for_type_change. */
542 struct prop_type_change_info
544 /* Offset into the object where there is the virtual method pointer we are
546 HOST_WIDE_INT offset
;
547 /* The declaration or SSA_NAME pointer of the base that we are checking for
550 /* Set to true if dynamic type change has been detected. */
551 bool type_maybe_changed
;
554 /* Return true if STMT can modify a virtual method table pointer.
556 This function makes special assumptions about both constructors and
557 destructors which are all the functions that are allowed to alter the VMT
558 pointers. It assumes that destructors begin with assignment into all VMT
559 pointers and that constructors essentially look in the following way:
561 1) The very first thing they do is that they call constructors of ancestor
562 sub-objects that have them.
564 2) Then VMT pointers of this and all its ancestors is set to new values
565 corresponding to the type corresponding to the constructor.
567 3) Only afterwards, other stuff such as constructor of member sub-objects
568 and the code written by the user is run. Only this may include calling
569 virtual functions, directly or indirectly.
571 There is no way to call a constructor of an ancestor sub-object in any
574 This means that we do not have to care whether constructors get the correct
575 type information because they will always change it (in fact, if we define
576 the type to be given by the VMT pointer, it is undefined).
578 The most important fact to derive from the above is that if, for some
579 statement in the section 3, we try to detect whether the dynamic type has
580 changed, we can safely ignore all calls as we examine the function body
581 backwards until we reach statements in section 2 because these calls cannot
582 be ancestor constructors or destructors (if the input is not bogus) and so
583 do not change the dynamic type (this holds true only for automatically
584 allocated objects but at the moment we devirtualize only these). We then
585 must detect that statements in section 2 change the dynamic type and can try
586 to derive the new type. That is enough and we can stop, we will never see
587 the calls into constructors of sub-objects in this code. Therefore we can
588 safely ignore all call statements that we traverse.
592 stmt_may_be_vtbl_ptr_store (gimple stmt
)
594 if (is_gimple_call (stmt
))
596 if (gimple_clobber_p (stmt
))
598 else if (is_gimple_assign (stmt
))
600 tree lhs
= gimple_assign_lhs (stmt
);
602 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs
)))
604 if (flag_strict_aliasing
605 && !POINTER_TYPE_P (TREE_TYPE (lhs
)))
608 if (TREE_CODE (lhs
) == COMPONENT_REF
609 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs
, 1)))
611 /* In the future we might want to use get_base_ref_and_offset to find
612 if there is a field corresponding to the offset and if so, proceed
613 almost like if it was a component ref. */
619 /* Callback of walk_aliased_vdefs and a helper function for detect_type_change
620 to check whether a particular statement may modify the virtual table
621 pointerIt stores its result into DATA, which points to a
622 prop_type_change_info structure. */
625 check_stmt_for_type_change (ao_ref
*ao ATTRIBUTE_UNUSED
, tree vdef
, void *data
)
627 gimple stmt
= SSA_NAME_DEF_STMT (vdef
);
628 struct prop_type_change_info
*tci
= (struct prop_type_change_info
*) data
;
630 if (stmt_may_be_vtbl_ptr_store (stmt
))
632 tci
->type_maybe_changed
= true;
639 /* See if ARG is PARAM_DECl describing instance passed by pointer
640 or reference in FUNCTION. Return false if the dynamic type may change
641 in between beggining of the function until CALL is invoked.
643 Generally functions are not allowed to change type of such instances,
644 but they call destructors. We assume that methods can not destroy the THIS
645 pointer. Also as a special cases, constructor and destructors may change
646 type of the THIS pointer. */
649 param_type_may_change_p (tree function
, tree arg
, gimple call
)
651 /* Pure functions can not do any changes on the dynamic type;
652 that require writting to memory. */
653 if (flags_from_decl_or_type (function
) & (ECF_PURE
| ECF_CONST
))
655 /* We need to check if we are within inlined consturctor
656 or destructor (ideally we would have way to check that the
657 inline cdtor is actually working on ARG, but we don't have
658 easy tie on this, so punt on all non-pure cdtors.
659 We may also record the types of cdtors and once we know type
660 of the instance match them.
662 Also code unification optimizations may merge calls from
663 different blocks making return values unreliable. So
664 do nothing during late optimization. */
665 if (DECL_STRUCT_FUNCTION (function
)->after_inlining
)
667 if (TREE_CODE (arg
) == SSA_NAME
668 && SSA_NAME_IS_DEFAULT_DEF (arg
)
669 && TREE_CODE (SSA_NAME_VAR (arg
)) == PARM_DECL
)
671 /* Normal (non-THIS) argument. */
672 if ((SSA_NAME_VAR (arg
) != DECL_ARGUMENTS (function
)
673 || TREE_CODE (TREE_TYPE (function
)) != METHOD_TYPE
)
674 /* THIS pointer of an method - here we we want to watch constructors
675 and destructors as those definitely may change the dynamic
677 || (TREE_CODE (TREE_TYPE (function
)) == METHOD_TYPE
678 && !DECL_CXX_CONSTRUCTOR_P (function
)
679 && !DECL_CXX_DESTRUCTOR_P (function
)
680 && (SSA_NAME_VAR (arg
) == DECL_ARGUMENTS (function
))))
682 /* Walk the inline stack and watch out for ctors/dtors. */
683 for (tree block
= gimple_block (call
); block
&& TREE_CODE (block
) == BLOCK
;
684 block
= BLOCK_SUPERCONTEXT (block
))
685 if (BLOCK_ABSTRACT_ORIGIN (block
)
686 && TREE_CODE (BLOCK_ABSTRACT_ORIGIN (block
)) == FUNCTION_DECL
)
688 tree fn
= BLOCK_ABSTRACT_ORIGIN (block
);
690 if (flags_from_decl_or_type (fn
) & (ECF_PURE
| ECF_CONST
))
692 if (TREE_CODE (TREE_TYPE (fn
)) == METHOD_TYPE
693 && (DECL_CXX_CONSTRUCTOR_P (fn
)
694 || DECL_CXX_DESTRUCTOR_P (fn
)))
703 /* Detect whether the dynamic type of ARG of COMP_TYPE has changed (before
704 callsite CALL) by looking for assignments to its virtual table pointer. If
705 it is, return true and fill in the jump function JFUNC with relevant type
706 information or set it to unknown. ARG is the object itself (not a pointer
707 to it, unless dereferenced). BASE is the base of the memory access as
708 returned by get_ref_base_and_extent, as is the offset.
710 This is helper function for detect_type_change and detect_type_change_ssa
711 that does the heavy work which is usually unnecesary. */
714 detect_type_change_from_memory_writes (tree arg
, tree base
, tree comp_type
,
715 gimple call
, struct ipa_jump_func
*jfunc
,
716 HOST_WIDE_INT offset
)
718 struct prop_type_change_info tci
;
720 bool entry_reached
= false;
722 gcc_checking_assert (DECL_P (arg
)
723 || TREE_CODE (arg
) == MEM_REF
724 || handled_component_p (arg
));
726 comp_type
= TYPE_MAIN_VARIANT (comp_type
);
728 /* Const calls cannot call virtual methods through VMT and so type changes do
730 if (!flag_devirtualize
|| !gimple_vuse (call
)
731 /* Be sure expected_type is polymorphic. */
733 || TREE_CODE (comp_type
) != RECORD_TYPE
734 || !TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type
))
735 || !BINFO_VTABLE (TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type
))))
738 ao_ref_init (&ao
, arg
);
741 ao
.size
= POINTER_SIZE
;
742 ao
.max_size
= ao
.size
;
745 tci
.object
= get_base_address (arg
);
746 tci
.type_maybe_changed
= false;
748 walk_aliased_vdefs (&ao
, gimple_vuse (call
), check_stmt_for_type_change
,
749 &tci
, NULL
, &entry_reached
);
750 if (!tci
.type_maybe_changed
)
753 jfunc
->type
= IPA_JF_UNKNOWN
;
757 /* Detect whether the dynamic type of ARG of COMP_TYPE may have changed.
758 If it is, return true and fill in the jump function JFUNC with relevant type
759 information or set it to unknown. ARG is the object itself (not a pointer
760 to it, unless dereferenced). BASE is the base of the memory access as
761 returned by get_ref_base_and_extent, as is the offset. */
764 detect_type_change (tree arg
, tree base
, tree comp_type
, gimple call
,
765 struct ipa_jump_func
*jfunc
, HOST_WIDE_INT offset
)
767 if (!flag_devirtualize
)
770 if (TREE_CODE (base
) == MEM_REF
771 && !param_type_may_change_p (current_function_decl
,
772 TREE_OPERAND (base
, 0),
775 return detect_type_change_from_memory_writes (arg
, base
, comp_type
,
776 call
, jfunc
, offset
);
779 /* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer
780 SSA name (its dereference will become the base and the offset is assumed to
784 detect_type_change_ssa (tree arg
, tree comp_type
,
785 gimple call
, struct ipa_jump_func
*jfunc
)
787 gcc_checking_assert (TREE_CODE (arg
) == SSA_NAME
);
788 if (!flag_devirtualize
789 || !POINTER_TYPE_P (TREE_TYPE (arg
)))
792 if (!param_type_may_change_p (current_function_decl
, arg
, call
))
795 arg
= build2 (MEM_REF
, ptr_type_node
, arg
,
796 build_int_cst (ptr_type_node
, 0));
798 return detect_type_change_from_memory_writes (arg
, arg
, comp_type
,
802 /* Callback of walk_aliased_vdefs. Flags that it has been invoked to the
803 boolean variable pointed to by DATA. */
806 mark_modified (ao_ref
*ao ATTRIBUTE_UNUSED
, tree vdef ATTRIBUTE_UNUSED
,
809 bool *b
= (bool *) data
;
814 /* Return true if we have already walked so many statements in AA that we
815 should really just start giving up. */
818 aa_overwalked (struct func_body_info
*fbi
)
820 gcc_checking_assert (fbi
);
821 return fbi
->aa_walked
> (unsigned) PARAM_VALUE (PARAM_IPA_MAX_AA_STEPS
);
824 /* Find the nearest valid aa status for parameter specified by INDEX that
827 static struct param_aa_status
*
828 find_dominating_aa_status (struct func_body_info
*fbi
, basic_block bb
,
833 bb
= get_immediate_dominator (CDI_DOMINATORS
, bb
);
836 struct ipa_bb_info
*bi
= ipa_get_bb_info (fbi
, bb
);
837 if (!bi
->param_aa_statuses
.is_empty ()
838 && bi
->param_aa_statuses
[index
].valid
)
839 return &bi
->param_aa_statuses
[index
];
843 /* Get AA status structure for the given BB and parameter with INDEX. Allocate
844 structures and/or intialize the result with a dominating description as
847 static struct param_aa_status
*
848 parm_bb_aa_status_for_bb (struct func_body_info
*fbi
, basic_block bb
,
851 gcc_checking_assert (fbi
);
852 struct ipa_bb_info
*bi
= ipa_get_bb_info (fbi
, bb
);
853 if (bi
->param_aa_statuses
.is_empty ())
854 bi
->param_aa_statuses
.safe_grow_cleared (fbi
->param_count
);
855 struct param_aa_status
*paa
= &bi
->param_aa_statuses
[index
];
858 gcc_checking_assert (!paa
->parm_modified
859 && !paa
->ref_modified
860 && !paa
->pt_modified
);
861 struct param_aa_status
*dom_paa
;
862 dom_paa
= find_dominating_aa_status (fbi
, bb
, index
);
872 /* Return true if a load from a formal parameter PARM_LOAD is known to retrieve
873 a value known not to be modified in this function before reaching the
874 statement STMT. FBI holds information about the function we have so far
875 gathered but do not survive the summary building stage. */
878 parm_preserved_before_stmt_p (struct func_body_info
*fbi
, int index
,
879 gimple stmt
, tree parm_load
)
881 struct param_aa_status
*paa
;
882 bool modified
= false;
885 /* FIXME: FBI can be NULL if we are being called from outside
886 ipa_node_analysis or ipcp_transform_function, which currently happens
887 during inlining analysis. It would be great to extend fbi's lifetime and
888 always have it. Currently, we are just not afraid of too much walking in
892 if (aa_overwalked (fbi
))
894 paa
= parm_bb_aa_status_for_bb (fbi
, gimple_bb (stmt
), index
);
895 if (paa
->parm_modified
)
901 gcc_checking_assert (gimple_vuse (stmt
) != NULL_TREE
);
902 ao_ref_init (&refd
, parm_load
);
903 int walked
= walk_aliased_vdefs (&refd
, gimple_vuse (stmt
), mark_modified
,
906 fbi
->aa_walked
+= walked
;
908 paa
->parm_modified
= true;
912 /* If STMT is an assignment that loads a value from an parameter declaration,
913 return the index of the parameter in ipa_node_params which has not been
914 modified. Otherwise return -1. */
917 load_from_unmodified_param (struct func_body_info
*fbi
,
918 vec
<ipa_param_descriptor
> descriptors
,
924 if (!gimple_assign_single_p (stmt
))
927 op1
= gimple_assign_rhs1 (stmt
);
928 if (TREE_CODE (op1
) != PARM_DECL
)
931 index
= ipa_get_param_decl_index_1 (descriptors
, op1
);
933 || !parm_preserved_before_stmt_p (fbi
, index
, stmt
, op1
))
939 /* Return true if memory reference REF (which must be a load through parameter
940 with INDEX) loads data that are known to be unmodified in this function
941 before reaching statement STMT. */
944 parm_ref_data_preserved_p (struct func_body_info
*fbi
,
945 int index
, gimple stmt
, tree ref
)
947 struct param_aa_status
*paa
;
948 bool modified
= false;
951 /* FIXME: FBI can be NULL if we are being called from outside
952 ipa_node_analysis or ipcp_transform_function, which currently happens
953 during inlining analysis. It would be great to extend fbi's lifetime and
954 always have it. Currently, we are just not afraid of too much walking in
958 if (aa_overwalked (fbi
))
960 paa
= parm_bb_aa_status_for_bb (fbi
, gimple_bb (stmt
), index
);
961 if (paa
->ref_modified
)
967 gcc_checking_assert (gimple_vuse (stmt
));
968 ao_ref_init (&refd
, ref
);
969 int walked
= walk_aliased_vdefs (&refd
, gimple_vuse (stmt
), mark_modified
,
972 fbi
->aa_walked
+= walked
;
974 paa
->ref_modified
= true;
978 /* Return true if the data pointed to by PARM (which is a parameter with INDEX)
979 is known to be unmodified in this function before reaching call statement
980 CALL into which it is passed. FBI describes the function body. */
983 parm_ref_data_pass_through_p (struct func_body_info
*fbi
, int index
,
984 gimple call
, tree parm
)
986 bool modified
= false;
989 /* It's unnecessary to calculate anything about memory contnets for a const
990 function because it is not goin to use it. But do not cache the result
991 either. Also, no such calculations for non-pointers. */
992 if (!gimple_vuse (call
)
993 || !POINTER_TYPE_P (TREE_TYPE (parm
))
994 || aa_overwalked (fbi
))
997 struct param_aa_status
*paa
= parm_bb_aa_status_for_bb (fbi
, gimple_bb (call
),
999 if (paa
->pt_modified
)
1002 ao_ref_init_from_ptr_and_size (&refd
, parm
, NULL_TREE
);
1003 int walked
= walk_aliased_vdefs (&refd
, gimple_vuse (call
), mark_modified
,
1005 fbi
->aa_walked
+= walked
;
1007 paa
->pt_modified
= true;
1011 /* Return true if we can prove that OP is a memory reference loading unmodified
1012 data from an aggregate passed as a parameter and if the aggregate is passed
1013 by reference, that the alias type of the load corresponds to the type of the
1014 formal parameter (so that we can rely on this type for TBAA in callers).
1015 INFO and PARMS_AINFO describe parameters of the current function (but the
1016 latter can be NULL), STMT is the load statement. If function returns true,
1017 *INDEX_P, *OFFSET_P and *BY_REF is filled with the parameter index, offset
1018 within the aggregate and whether it is a load from a value passed by
1019 reference respectively. */
1022 ipa_load_from_parm_agg_1 (struct func_body_info
*fbi
,
1023 vec
<ipa_param_descriptor
> descriptors
,
1024 gimple stmt
, tree op
, int *index_p
,
1025 HOST_WIDE_INT
*offset_p
, HOST_WIDE_INT
*size_p
,
1029 HOST_WIDE_INT size
, max_size
;
1030 tree base
= get_ref_base_and_extent (op
, offset_p
, &size
, &max_size
);
1032 if (max_size
== -1 || max_size
!= size
|| *offset_p
< 0)
1037 int index
= ipa_get_param_decl_index_1 (descriptors
, base
);
1039 && parm_preserved_before_stmt_p (fbi
, index
, stmt
, op
))
1050 if (TREE_CODE (base
) != MEM_REF
1051 || TREE_CODE (TREE_OPERAND (base
, 0)) != SSA_NAME
1052 || !integer_zerop (TREE_OPERAND (base
, 1)))
1055 if (SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base
, 0)))
1057 tree parm
= SSA_NAME_VAR (TREE_OPERAND (base
, 0));
1058 index
= ipa_get_param_decl_index_1 (descriptors
, parm
);
1062 /* This branch catches situations where a pointer parameter is not a
1063 gimple register, for example:
1065 void hip7(S*) (struct S * p)
1067 void (*<T2e4>) (struct S *) D.1867;
1072 D.1867_2 = p.1_1->f;
1077 gimple def
= SSA_NAME_DEF_STMT (TREE_OPERAND (base
, 0));
1078 index
= load_from_unmodified_param (fbi
, descriptors
, def
);
1082 && parm_ref_data_preserved_p (fbi
, index
, stmt
, op
))
1093 /* Just like the previous function, just without the param_analysis_info
1094 pointer, for users outside of this file. */
1097 ipa_load_from_parm_agg (struct ipa_node_params
*info
, gimple stmt
,
1098 tree op
, int *index_p
, HOST_WIDE_INT
*offset_p
,
1101 return ipa_load_from_parm_agg_1 (NULL
, info
->descriptors
, stmt
, op
, index_p
,
1102 offset_p
, NULL
, by_ref_p
);
1105 /* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
1106 of an assignment statement STMT, try to determine whether we are actually
1107 handling any of the following cases and construct an appropriate jump
1108 function into JFUNC if so:
1110 1) The passed value is loaded from a formal parameter which is not a gimple
1111 register (most probably because it is addressable, the value has to be
1112 scalar) and we can guarantee the value has not changed. This case can
1113 therefore be described by a simple pass-through jump function. For example:
1122 2) The passed value can be described by a simple arithmetic pass-through
1129 D.2064_4 = a.1(D) + 4;
1132 This case can also occur in combination of the previous one, e.g.:
1140 D.2064_4 = a.0_3 + 4;
1143 3) The passed value is an address of an object within another one (which
1144 also passed by reference). Such situations are described by an ancestor
1145 jump function and describe situations such as:
1147 B::foo() (struct B * const this)
1151 D.1845_2 = &this_1(D)->D.1748;
1154 INFO is the structure describing individual parameters access different
1155 stages of IPA optimizations. PARMS_AINFO contains the information that is
1156 only needed for intraprocedural analysis. */
1159 compute_complex_assign_jump_func (struct func_body_info
*fbi
,
1160 struct ipa_node_params
*info
,
1161 struct ipa_jump_func
*jfunc
,
1162 gimple call
, gimple stmt
, tree name
,
1165 HOST_WIDE_INT offset
, size
, max_size
;
1166 tree op1
, tc_ssa
, base
, ssa
;
1169 op1
= gimple_assign_rhs1 (stmt
);
1171 if (TREE_CODE (op1
) == SSA_NAME
)
1173 if (SSA_NAME_IS_DEFAULT_DEF (op1
))
1174 index
= ipa_get_param_decl_index (info
, SSA_NAME_VAR (op1
));
1176 index
= load_from_unmodified_param (fbi
, info
->descriptors
,
1177 SSA_NAME_DEF_STMT (op1
));
1182 index
= load_from_unmodified_param (fbi
, info
->descriptors
, stmt
);
1183 tc_ssa
= gimple_assign_lhs (stmt
);
1188 tree op2
= gimple_assign_rhs2 (stmt
);
1192 if (!is_gimple_ip_invariant (op2
)
1193 || (TREE_CODE_CLASS (gimple_expr_code (stmt
)) != tcc_comparison
1194 && !useless_type_conversion_p (TREE_TYPE (name
),
1198 ipa_set_jf_arith_pass_through (jfunc
, index
, op2
,
1199 gimple_assign_rhs_code (stmt
));
1201 else if (gimple_assign_single_p (stmt
))
1203 bool agg_p
= parm_ref_data_pass_through_p (fbi
, index
, call
, tc_ssa
);
1204 ipa_set_jf_simple_pass_through (jfunc
, index
, agg_p
);
1209 if (TREE_CODE (op1
) != ADDR_EXPR
)
1211 op1
= TREE_OPERAND (op1
, 0);
1212 if (TREE_CODE (TREE_TYPE (op1
)) != RECORD_TYPE
)
1214 base
= get_ref_base_and_extent (op1
, &offset
, &size
, &max_size
);
1215 if (TREE_CODE (base
) != MEM_REF
1216 /* If this is a varying address, punt. */
1218 || max_size
!= size
)
1220 offset
+= mem_ref_offset (base
).to_short_addr () * BITS_PER_UNIT
;
1221 ssa
= TREE_OPERAND (base
, 0);
1222 if (TREE_CODE (ssa
) != SSA_NAME
1223 || !SSA_NAME_IS_DEFAULT_DEF (ssa
)
1227 /* Dynamic types are changed in constructors and destructors. */
1228 index
= ipa_get_param_decl_index (info
, SSA_NAME_VAR (ssa
));
1229 if (index
>= 0 && param_type
&& POINTER_TYPE_P (param_type
))
1230 ipa_set_ancestor_jf (jfunc
, offset
, index
,
1231 parm_ref_data_pass_through_p (fbi
, index
, call
, ssa
));
1234 /* Extract the base, offset and MEM_REF expression from a statement ASSIGN if
1237 iftmp.1_3 = &obj_2(D)->D.1762;
1239 The base of the MEM_REF must be a default definition SSA NAME of a
1240 parameter. Return NULL_TREE if it looks otherwise. If case of success, the
1241 whole MEM_REF expression is returned and the offset calculated from any
1242 handled components and the MEM_REF itself is stored into *OFFSET. The whole
1243 RHS stripped off the ADDR_EXPR is stored into *OBJ_P. */
1246 get_ancestor_addr_info (gimple assign
, tree
*obj_p
, HOST_WIDE_INT
*offset
)
1248 HOST_WIDE_INT size
, max_size
;
1249 tree expr
, parm
, obj
;
1251 if (!gimple_assign_single_p (assign
))
1253 expr
= gimple_assign_rhs1 (assign
);
1255 if (TREE_CODE (expr
) != ADDR_EXPR
)
1257 expr
= TREE_OPERAND (expr
, 0);
1259 expr
= get_ref_base_and_extent (expr
, offset
, &size
, &max_size
);
1261 if (TREE_CODE (expr
) != MEM_REF
1262 /* If this is a varying address, punt. */
1267 parm
= TREE_OPERAND (expr
, 0);
1268 if (TREE_CODE (parm
) != SSA_NAME
1269 || !SSA_NAME_IS_DEFAULT_DEF (parm
)
1270 || TREE_CODE (SSA_NAME_VAR (parm
)) != PARM_DECL
)
1273 *offset
+= mem_ref_offset (expr
).to_short_addr () * BITS_PER_UNIT
;
1279 /* Given that an actual argument is an SSA_NAME that is a result of a phi
1280 statement PHI, try to find out whether NAME is in fact a
1281 multiple-inheritance typecast from a descendant into an ancestor of a formal
1282 parameter and thus can be described by an ancestor jump function and if so,
1283 write the appropriate function into JFUNC.
1285 Essentially we want to match the following pattern:
1293 iftmp.1_3 = &obj_2(D)->D.1762;
1296 # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
1297 D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
1301 compute_complex_ancestor_jump_func (struct func_body_info
*fbi
,
1302 struct ipa_node_params
*info
,
1303 struct ipa_jump_func
*jfunc
,
1304 gimple call
, gimple phi
)
1306 HOST_WIDE_INT offset
;
1307 gimple assign
, cond
;
1308 basic_block phi_bb
, assign_bb
, cond_bb
;
1309 tree tmp
, parm
, expr
, obj
;
1312 if (gimple_phi_num_args (phi
) != 2)
1315 if (integer_zerop (PHI_ARG_DEF (phi
, 1)))
1316 tmp
= PHI_ARG_DEF (phi
, 0);
1317 else if (integer_zerop (PHI_ARG_DEF (phi
, 0)))
1318 tmp
= PHI_ARG_DEF (phi
, 1);
1321 if (TREE_CODE (tmp
) != SSA_NAME
1322 || SSA_NAME_IS_DEFAULT_DEF (tmp
)
1323 || !POINTER_TYPE_P (TREE_TYPE (tmp
))
1324 || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp
))) != RECORD_TYPE
)
1327 assign
= SSA_NAME_DEF_STMT (tmp
);
1328 assign_bb
= gimple_bb (assign
);
1329 if (!single_pred_p (assign_bb
))
1331 expr
= get_ancestor_addr_info (assign
, &obj
, &offset
);
1334 parm
= TREE_OPERAND (expr
, 0);
1335 index
= ipa_get_param_decl_index (info
, SSA_NAME_VAR (parm
));
1339 cond_bb
= single_pred (assign_bb
);
1340 cond
= last_stmt (cond_bb
);
1342 || gimple_code (cond
) != GIMPLE_COND
1343 || gimple_cond_code (cond
) != NE_EXPR
1344 || gimple_cond_lhs (cond
) != parm
1345 || !integer_zerop (gimple_cond_rhs (cond
)))
1348 phi_bb
= gimple_bb (phi
);
1349 for (i
= 0; i
< 2; i
++)
1351 basic_block pred
= EDGE_PRED (phi_bb
, i
)->src
;
1352 if (pred
!= assign_bb
&& pred
!= cond_bb
)
1356 ipa_set_ancestor_jf (jfunc
, offset
, index
,
1357 parm_ref_data_pass_through_p (fbi
, index
, call
, parm
));
1360 /* Inspect the given TYPE and return true iff it has the same structure (the
1361 same number of fields of the same types) as a C++ member pointer. If
1362 METHOD_PTR and DELTA are non-NULL, store the trees representing the
1363 corresponding fields there. */
1366 type_like_member_ptr_p (tree type
, tree
*method_ptr
, tree
*delta
)
1370 if (TREE_CODE (type
) != RECORD_TYPE
)
1373 fld
= TYPE_FIELDS (type
);
1374 if (!fld
|| !POINTER_TYPE_P (TREE_TYPE (fld
))
1375 || TREE_CODE (TREE_TYPE (TREE_TYPE (fld
))) != METHOD_TYPE
1376 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld
)))
1382 fld
= DECL_CHAIN (fld
);
1383 if (!fld
|| INTEGRAL_TYPE_P (fld
)
1384 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld
)))
1389 if (DECL_CHAIN (fld
))
1395 /* If RHS is an SSA_NAME and it is defined by a simple copy assign statement,
1396 return the rhs of its defining statement. Otherwise return RHS as it
1400 get_ssa_def_if_simple_copy (tree rhs
)
1402 while (TREE_CODE (rhs
) == SSA_NAME
&& !SSA_NAME_IS_DEFAULT_DEF (rhs
))
1404 gimple def_stmt
= SSA_NAME_DEF_STMT (rhs
);
1406 if (gimple_assign_single_p (def_stmt
))
1407 rhs
= gimple_assign_rhs1 (def_stmt
);
1414 /* Simple linked list, describing known contents of an aggregate beforere
1417 struct ipa_known_agg_contents_list
1419 /* Offset and size of the described part of the aggregate. */
1420 HOST_WIDE_INT offset
, size
;
1421 /* Known constant value or NULL if the contents is known to be unknown. */
1423 /* Pointer to the next structure in the list. */
1424 struct ipa_known_agg_contents_list
*next
;
1427 /* Find the proper place in linked list of ipa_known_agg_contents_list
1428 structures where to put a new one with the given LHS_OFFSET and LHS_SIZE,
1429 unless there is a partial overlap, in which case return NULL, or such
1430 element is already there, in which case set *ALREADY_THERE to true. */
1432 static struct ipa_known_agg_contents_list
**
1433 get_place_in_agg_contents_list (struct ipa_known_agg_contents_list
**list
,
1434 HOST_WIDE_INT lhs_offset
,
1435 HOST_WIDE_INT lhs_size
,
1436 bool *already_there
)
1438 struct ipa_known_agg_contents_list
**p
= list
;
1439 while (*p
&& (*p
)->offset
< lhs_offset
)
1441 if ((*p
)->offset
+ (*p
)->size
> lhs_offset
)
1446 if (*p
&& (*p
)->offset
< lhs_offset
+ lhs_size
)
1448 if ((*p
)->offset
== lhs_offset
&& (*p
)->size
== lhs_size
)
1449 /* We already know this value is subsequently overwritten with
1451 *already_there
= true;
1453 /* Otherwise this is a partial overlap which we cannot
1460 /* Build aggregate jump function from LIST, assuming there are exactly
1461 CONST_COUNT constant entries there and that th offset of the passed argument
1462 is ARG_OFFSET and store it into JFUNC. */
1465 build_agg_jump_func_from_list (struct ipa_known_agg_contents_list
*list
,
1466 int const_count
, HOST_WIDE_INT arg_offset
,
1467 struct ipa_jump_func
*jfunc
)
1469 vec_alloc (jfunc
->agg
.items
, const_count
);
1474 struct ipa_agg_jf_item item
;
1475 item
.offset
= list
->offset
- arg_offset
;
1476 gcc_assert ((item
.offset
% BITS_PER_UNIT
) == 0);
1477 item
.value
= unshare_expr_without_location (list
->constant
);
1478 jfunc
->agg
.items
->quick_push (item
);
1484 /* Traverse statements from CALL backwards, scanning whether an aggregate given
1485 in ARG is filled in with constant values. ARG can either be an aggregate
1486 expression or a pointer to an aggregate. ARG_TYPE is the type of the
1487 aggregate. JFUNC is the jump function into which the constants are
1488 subsequently stored. */
1491 determine_locally_known_aggregate_parts (gimple call
, tree arg
, tree arg_type
,
1492 struct ipa_jump_func
*jfunc
)
1494 struct ipa_known_agg_contents_list
*list
= NULL
;
1495 int item_count
= 0, const_count
= 0;
1496 HOST_WIDE_INT arg_offset
, arg_size
;
1497 gimple_stmt_iterator gsi
;
1499 bool check_ref
, by_ref
;
1502 /* The function operates in three stages. First, we prepare check_ref, r,
1503 arg_base and arg_offset based on what is actually passed as an actual
1506 if (POINTER_TYPE_P (arg_type
))
1509 if (TREE_CODE (arg
) == SSA_NAME
)
1512 if (!tree_fits_uhwi_p (TYPE_SIZE (TREE_TYPE (arg_type
))))
1517 type_size
= TYPE_SIZE (TREE_TYPE (arg_type
));
1518 arg_size
= tree_to_uhwi (type_size
);
1519 ao_ref_init_from_ptr_and_size (&r
, arg_base
, NULL_TREE
);
1521 else if (TREE_CODE (arg
) == ADDR_EXPR
)
1523 HOST_WIDE_INT arg_max_size
;
1525 arg
= TREE_OPERAND (arg
, 0);
1526 arg_base
= get_ref_base_and_extent (arg
, &arg_offset
, &arg_size
,
1528 if (arg_max_size
== -1
1529 || arg_max_size
!= arg_size
1532 if (DECL_P (arg_base
))
1535 ao_ref_init (&r
, arg_base
);
1545 HOST_WIDE_INT arg_max_size
;
1547 gcc_checking_assert (AGGREGATE_TYPE_P (TREE_TYPE (arg
)));
1551 arg_base
= get_ref_base_and_extent (arg
, &arg_offset
, &arg_size
,
1553 if (arg_max_size
== -1
1554 || arg_max_size
!= arg_size
1558 ao_ref_init (&r
, arg
);
1561 /* Second stage walks back the BB, looks at individual statements and as long
1562 as it is confident of how the statements affect contents of the
1563 aggregates, it builds a sorted linked list of ipa_agg_jf_list structures
1565 gsi
= gsi_for_stmt (call
);
1567 for (; !gsi_end_p (gsi
); gsi_prev (&gsi
))
1569 struct ipa_known_agg_contents_list
*n
, **p
;
1570 gimple stmt
= gsi_stmt (gsi
);
1571 HOST_WIDE_INT lhs_offset
, lhs_size
, lhs_max_size
;
1572 tree lhs
, rhs
, lhs_base
;
1574 if (!stmt_may_clobber_ref_p_1 (stmt
, &r
))
1576 if (!gimple_assign_single_p (stmt
))
1579 lhs
= gimple_assign_lhs (stmt
);
1580 rhs
= gimple_assign_rhs1 (stmt
);
1581 if (!is_gimple_reg_type (TREE_TYPE (rhs
))
1582 || TREE_CODE (lhs
) == BIT_FIELD_REF
1583 || contains_bitfld_component_ref_p (lhs
))
1586 lhs_base
= get_ref_base_and_extent (lhs
, &lhs_offset
, &lhs_size
,
1588 if (lhs_max_size
== -1
1589 || lhs_max_size
!= lhs_size
)
1594 if (TREE_CODE (lhs_base
) != MEM_REF
1595 || TREE_OPERAND (lhs_base
, 0) != arg_base
1596 || !integer_zerop (TREE_OPERAND (lhs_base
, 1)))
1599 else if (lhs_base
!= arg_base
)
1601 if (DECL_P (lhs_base
))
1607 bool already_there
= false;
1608 p
= get_place_in_agg_contents_list (&list
, lhs_offset
, lhs_size
,
1615 rhs
= get_ssa_def_if_simple_copy (rhs
);
1616 n
= XALLOCA (struct ipa_known_agg_contents_list
);
1618 n
->offset
= lhs_offset
;
1619 if (is_gimple_ip_invariant (rhs
))
1625 n
->constant
= NULL_TREE
;
1630 if (const_count
== PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS
)
1631 || item_count
== 2 * PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS
))
1635 /* Third stage just goes over the list and creates an appropriate vector of
1636 ipa_agg_jf_item structures out of it, of sourse only if there are
1637 any known constants to begin with. */
1641 jfunc
->agg
.by_ref
= by_ref
;
1642 build_agg_jump_func_from_list (list
, const_count
, arg_offset
, jfunc
);
1647 ipa_get_callee_param_type (struct cgraph_edge
*e
, int i
)
1650 tree type
= (e
->callee
1651 ? TREE_TYPE (e
->callee
->decl
)
1652 : gimple_call_fntype (e
->call_stmt
));
1653 tree t
= TYPE_ARG_TYPES (type
);
1655 for (n
= 0; n
< i
; n
++)
1662 return TREE_VALUE (t
);
1665 t
= DECL_ARGUMENTS (e
->callee
->decl
);
1666 for (n
= 0; n
< i
; n
++)
1673 return TREE_TYPE (t
);
1677 /* Compute jump function for all arguments of callsite CS and insert the
1678 information in the jump_functions array in the ipa_edge_args corresponding
1679 to this callsite. */
1682 ipa_compute_jump_functions_for_edge (struct func_body_info
*fbi
,
1683 struct cgraph_edge
*cs
)
1685 struct ipa_node_params
*info
= IPA_NODE_REF (cs
->caller
);
1686 struct ipa_edge_args
*args
= IPA_EDGE_REF (cs
);
1687 gimple call
= cs
->call_stmt
;
1688 int n
, arg_num
= gimple_call_num_args (call
);
1689 bool useful_context
= false;
1691 if (arg_num
== 0 || args
->jump_functions
)
1693 vec_safe_grow_cleared (args
->jump_functions
, arg_num
);
1694 if (flag_devirtualize
)
1695 vec_safe_grow_cleared (args
->polymorphic_call_contexts
, arg_num
);
1697 if (gimple_call_internal_p (call
))
1699 if (ipa_func_spec_opts_forbid_analysis_p (cs
->caller
))
1702 for (n
= 0; n
< arg_num
; n
++)
1704 struct ipa_jump_func
*jfunc
= ipa_get_ith_jump_func (args
, n
);
1705 tree arg
= gimple_call_arg (call
, n
);
1706 tree param_type
= ipa_get_callee_param_type (cs
, n
);
1707 if (flag_devirtualize
&& POINTER_TYPE_P (TREE_TYPE (arg
)))
1710 struct ipa_polymorphic_call_context
context (cs
->caller
->decl
,
1713 context
.get_dynamic_type (instance
, arg
, NULL
, cs
->call_stmt
);
1714 *ipa_get_ith_polymorhic_call_context (args
, n
) = context
;
1715 if (!context
.useless_p ())
1716 useful_context
= true;
1719 if (is_gimple_ip_invariant (arg
))
1720 ipa_set_jf_constant (jfunc
, arg
, cs
);
1721 else if (!is_gimple_reg_type (TREE_TYPE (arg
))
1722 && TREE_CODE (arg
) == PARM_DECL
)
1724 int index
= ipa_get_param_decl_index (info
, arg
);
1726 gcc_assert (index
>=0);
1727 /* Aggregate passed by value, check for pass-through, otherwise we
1728 will attempt to fill in aggregate contents later in this
1730 if (parm_preserved_before_stmt_p (fbi
, index
, call
, arg
))
1732 ipa_set_jf_simple_pass_through (jfunc
, index
, false);
1736 else if (TREE_CODE (arg
) == SSA_NAME
)
1738 if (SSA_NAME_IS_DEFAULT_DEF (arg
))
1740 int index
= ipa_get_param_decl_index (info
, SSA_NAME_VAR (arg
));
1744 agg_p
= parm_ref_data_pass_through_p (fbi
, index
, call
, arg
);
1745 ipa_set_jf_simple_pass_through (jfunc
, index
, agg_p
);
1750 gimple stmt
= SSA_NAME_DEF_STMT (arg
);
1751 if (is_gimple_assign (stmt
))
1752 compute_complex_assign_jump_func (fbi
, info
, jfunc
,
1753 call
, stmt
, arg
, param_type
);
1754 else if (gimple_code (stmt
) == GIMPLE_PHI
)
1755 compute_complex_ancestor_jump_func (fbi
, info
, jfunc
,
1760 /* If ARG is pointer, we can not use its type to determine the type of aggregate
1761 passed (because type conversions are ignored in gimple). Usually we can
1762 safely get type from function declaration, but in case of K&R prototypes or
1763 variadic functions we can try our luck with type of the pointer passed.
1764 TODO: Since we look for actual initialization of the memory object, we may better
1765 work out the type based on the memory stores we find. */
1767 param_type
= TREE_TYPE (arg
);
1769 if ((jfunc
->type
!= IPA_JF_PASS_THROUGH
1770 || !ipa_get_jf_pass_through_agg_preserved (jfunc
))
1771 && (jfunc
->type
!= IPA_JF_ANCESTOR
1772 || !ipa_get_jf_ancestor_agg_preserved (jfunc
))
1773 && (AGGREGATE_TYPE_P (TREE_TYPE (arg
))
1774 || POINTER_TYPE_P (param_type
)))
1775 determine_locally_known_aggregate_parts (call
, arg
, param_type
, jfunc
);
1777 if (!useful_context
)
1778 vec_free (args
->polymorphic_call_contexts
);
1781 /* Compute jump functions for all edges - both direct and indirect - outgoing
1785 ipa_compute_jump_functions_for_bb (struct func_body_info
*fbi
, basic_block bb
)
1787 struct ipa_bb_info
*bi
= ipa_get_bb_info (fbi
, bb
);
1789 struct cgraph_edge
*cs
;
1791 FOR_EACH_VEC_ELT_REVERSE (bi
->cg_edges
, i
, cs
)
1793 struct cgraph_node
*callee
= cs
->callee
;
1797 callee
->ultimate_alias_target ();
1798 /* We do not need to bother analyzing calls to unknown functions
1799 unless they may become known during lto/whopr. */
1800 if (!callee
->definition
&& !flag_lto
)
1803 ipa_compute_jump_functions_for_edge (fbi
, cs
);
1807 /* If STMT looks like a statement loading a value from a member pointer formal
1808 parameter, return that parameter and store the offset of the field to
1809 *OFFSET_P, if it is non-NULL. Otherwise return NULL (but *OFFSET_P still
1810 might be clobbered). If USE_DELTA, then we look for a use of the delta
1811 field rather than the pfn. */
1814 ipa_get_stmt_member_ptr_load_param (gimple stmt
, bool use_delta
,
1815 HOST_WIDE_INT
*offset_p
)
1817 tree rhs
, rec
, ref_field
, ref_offset
, fld
, ptr_field
, delta_field
;
1819 if (!gimple_assign_single_p (stmt
))
1822 rhs
= gimple_assign_rhs1 (stmt
);
1823 if (TREE_CODE (rhs
) == COMPONENT_REF
)
1825 ref_field
= TREE_OPERAND (rhs
, 1);
1826 rhs
= TREE_OPERAND (rhs
, 0);
1829 ref_field
= NULL_TREE
;
1830 if (TREE_CODE (rhs
) != MEM_REF
)
1832 rec
= TREE_OPERAND (rhs
, 0);
1833 if (TREE_CODE (rec
) != ADDR_EXPR
)
1835 rec
= TREE_OPERAND (rec
, 0);
1836 if (TREE_CODE (rec
) != PARM_DECL
1837 || !type_like_member_ptr_p (TREE_TYPE (rec
), &ptr_field
, &delta_field
))
1839 ref_offset
= TREE_OPERAND (rhs
, 1);
1846 *offset_p
= int_bit_position (fld
);
1850 if (integer_nonzerop (ref_offset
))
1852 return ref_field
== fld
? rec
: NULL_TREE
;
1855 return tree_int_cst_equal (byte_position (fld
), ref_offset
) ? rec
1859 /* Returns true iff T is an SSA_NAME defined by a statement. */
1862 ipa_is_ssa_with_stmt_def (tree t
)
1864 if (TREE_CODE (t
) == SSA_NAME
1865 && !SSA_NAME_IS_DEFAULT_DEF (t
))
1871 /* Find the indirect call graph edge corresponding to STMT and mark it as a
1872 call to a parameter number PARAM_INDEX. NODE is the caller. Return the
1873 indirect call graph edge. */
1875 static struct cgraph_edge
*
1876 ipa_note_param_call (struct cgraph_node
*node
, int param_index
, gimple stmt
)
1878 struct cgraph_edge
*cs
;
1880 cs
= node
->get_edge (stmt
);
1881 cs
->indirect_info
->param_index
= param_index
;
1882 cs
->indirect_info
->agg_contents
= 0;
1883 cs
->indirect_info
->member_ptr
= 0;
1887 /* Analyze the CALL and examine uses of formal parameters of the caller NODE
1888 (described by INFO). PARMS_AINFO is a pointer to a vector containing
1889 intermediate information about each formal parameter. Currently it checks
1890 whether the call calls a pointer that is a formal parameter and if so, the
1891 parameter is marked with the called flag and an indirect call graph edge
1892 describing the call is created. This is very simple for ordinary pointers
1893 represented in SSA but not-so-nice when it comes to member pointers. The
1894 ugly part of this function does nothing more than trying to match the
1895 pattern of such a call. An example of such a pattern is the gimple dump
1896 below, the call is on the last line:
1899 f$__delta_5 = f.__delta;
1900 f$__pfn_24 = f.__pfn;
1904 f$__delta_5 = MEM[(struct *)&f];
1905 f$__pfn_24 = MEM[(struct *)&f + 4B];
1907 and a few lines below:
1910 D.2496_3 = (int) f$__pfn_24;
1911 D.2497_4 = D.2496_3 & 1;
1918 D.2500_7 = (unsigned int) f$__delta_5;
1919 D.2501_8 = &S + D.2500_7;
1920 D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
1921 D.2503_10 = *D.2502_9;
1922 D.2504_12 = f$__pfn_24 + -1;
1923 D.2505_13 = (unsigned int) D.2504_12;
1924 D.2506_14 = D.2503_10 + D.2505_13;
1925 D.2507_15 = *D.2506_14;
1926 iftmp.11_16 = (String:: *) D.2507_15;
1929 # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
1930 D.2500_19 = (unsigned int) f$__delta_5;
1931 D.2508_20 = &S + D.2500_19;
1932 D.2493_21 = iftmp.11_1 (D.2508_20, 4);
1934 Such patterns are results of simple calls to a member pointer:
1936 int doprinting (int (MyString::* f)(int) const)
1938 MyString S ("somestring");
1943 Moreover, the function also looks for called pointers loaded from aggregates
1944 passed by value or reference. */
1947 ipa_analyze_indirect_call_uses (struct func_body_info
*fbi
, gimple call
,
1950 struct ipa_node_params
*info
= fbi
->info
;
1951 HOST_WIDE_INT offset
;
1954 if (SSA_NAME_IS_DEFAULT_DEF (target
))
1956 tree var
= SSA_NAME_VAR (target
);
1957 int index
= ipa_get_param_decl_index (info
, var
);
1959 ipa_note_param_call (fbi
->node
, index
, call
);
1964 gimple def
= SSA_NAME_DEF_STMT (target
);
1965 if (gimple_assign_single_p (def
)
1966 && ipa_load_from_parm_agg_1 (fbi
, info
->descriptors
, def
,
1967 gimple_assign_rhs1 (def
), &index
, &offset
,
1970 struct cgraph_edge
*cs
= ipa_note_param_call (fbi
->node
, index
, call
);
1971 cs
->indirect_info
->offset
= offset
;
1972 cs
->indirect_info
->agg_contents
= 1;
1973 cs
->indirect_info
->by_ref
= by_ref
;
1977 /* Now we need to try to match the complex pattern of calling a member
1979 if (gimple_code (def
) != GIMPLE_PHI
1980 || gimple_phi_num_args (def
) != 2
1981 || !POINTER_TYPE_P (TREE_TYPE (target
))
1982 || TREE_CODE (TREE_TYPE (TREE_TYPE (target
))) != METHOD_TYPE
)
1985 /* First, we need to check whether one of these is a load from a member
1986 pointer that is a parameter to this function. */
1987 tree n1
= PHI_ARG_DEF (def
, 0);
1988 tree n2
= PHI_ARG_DEF (def
, 1);
1989 if (!ipa_is_ssa_with_stmt_def (n1
) || !ipa_is_ssa_with_stmt_def (n2
))
1991 gimple d1
= SSA_NAME_DEF_STMT (n1
);
1992 gimple d2
= SSA_NAME_DEF_STMT (n2
);
1995 basic_block bb
, virt_bb
;
1996 basic_block join
= gimple_bb (def
);
1997 if ((rec
= ipa_get_stmt_member_ptr_load_param (d1
, false, &offset
)))
1999 if (ipa_get_stmt_member_ptr_load_param (d2
, false, NULL
))
2002 bb
= EDGE_PRED (join
, 0)->src
;
2003 virt_bb
= gimple_bb (d2
);
2005 else if ((rec
= ipa_get_stmt_member_ptr_load_param (d2
, false, &offset
)))
2007 bb
= EDGE_PRED (join
, 1)->src
;
2008 virt_bb
= gimple_bb (d1
);
2013 /* Second, we need to check that the basic blocks are laid out in the way
2014 corresponding to the pattern. */
2016 if (!single_pred_p (virt_bb
) || !single_succ_p (virt_bb
)
2017 || single_pred (virt_bb
) != bb
2018 || single_succ (virt_bb
) != join
)
2021 /* Third, let's see that the branching is done depending on the least
2022 significant bit of the pfn. */
2024 gimple branch
= last_stmt (bb
);
2025 if (!branch
|| gimple_code (branch
) != GIMPLE_COND
)
2028 if ((gimple_cond_code (branch
) != NE_EXPR
2029 && gimple_cond_code (branch
) != EQ_EXPR
)
2030 || !integer_zerop (gimple_cond_rhs (branch
)))
2033 tree cond
= gimple_cond_lhs (branch
);
2034 if (!ipa_is_ssa_with_stmt_def (cond
))
2037 def
= SSA_NAME_DEF_STMT (cond
);
2038 if (!is_gimple_assign (def
)
2039 || gimple_assign_rhs_code (def
) != BIT_AND_EXPR
2040 || !integer_onep (gimple_assign_rhs2 (def
)))
2043 cond
= gimple_assign_rhs1 (def
);
2044 if (!ipa_is_ssa_with_stmt_def (cond
))
2047 def
= SSA_NAME_DEF_STMT (cond
);
2049 if (is_gimple_assign (def
)
2050 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def
)))
2052 cond
= gimple_assign_rhs1 (def
);
2053 if (!ipa_is_ssa_with_stmt_def (cond
))
2055 def
= SSA_NAME_DEF_STMT (cond
);
2059 rec2
= ipa_get_stmt_member_ptr_load_param (def
,
2060 (TARGET_PTRMEMFUNC_VBIT_LOCATION
2061 == ptrmemfunc_vbit_in_delta
),
2066 index
= ipa_get_param_decl_index (info
, rec
);
2068 && parm_preserved_before_stmt_p (fbi
, index
, call
, rec
))
2070 struct cgraph_edge
*cs
= ipa_note_param_call (fbi
->node
, index
, call
);
2071 cs
->indirect_info
->offset
= offset
;
2072 cs
->indirect_info
->agg_contents
= 1;
2073 cs
->indirect_info
->member_ptr
= 1;
2079 /* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
2080 object referenced in the expression is a formal parameter of the caller
2081 FBI->node (described by FBI->info), create a call note for the
2085 ipa_analyze_virtual_call_uses (struct func_body_info
*fbi
,
2086 gimple call
, tree target
)
2088 tree obj
= OBJ_TYPE_REF_OBJECT (target
);
2090 HOST_WIDE_INT anc_offset
;
2092 if (!flag_devirtualize
)
2095 if (TREE_CODE (obj
) != SSA_NAME
)
2098 struct ipa_node_params
*info
= fbi
->info
;
2099 if (SSA_NAME_IS_DEFAULT_DEF (obj
))
2101 struct ipa_jump_func jfunc
;
2102 if (TREE_CODE (SSA_NAME_VAR (obj
)) != PARM_DECL
)
2106 index
= ipa_get_param_decl_index (info
, SSA_NAME_VAR (obj
));
2107 gcc_assert (index
>= 0);
2108 if (detect_type_change_ssa (obj
, obj_type_ref_class (target
),
2114 struct ipa_jump_func jfunc
;
2115 gimple stmt
= SSA_NAME_DEF_STMT (obj
);
2118 expr
= get_ancestor_addr_info (stmt
, &obj
, &anc_offset
);
2121 index
= ipa_get_param_decl_index (info
,
2122 SSA_NAME_VAR (TREE_OPERAND (expr
, 0)));
2123 gcc_assert (index
>= 0);
2124 if (detect_type_change (obj
, expr
, obj_type_ref_class (target
),
2125 call
, &jfunc
, anc_offset
))
2129 struct cgraph_edge
*cs
= ipa_note_param_call (fbi
->node
, index
, call
);
2130 struct cgraph_indirect_call_info
*ii
= cs
->indirect_info
;
2131 ii
->offset
= anc_offset
;
2132 ii
->otr_token
= tree_to_uhwi (OBJ_TYPE_REF_TOKEN (target
));
2133 ii
->otr_type
= obj_type_ref_class (target
);
2134 ii
->polymorphic
= 1;
2137 /* Analyze a call statement CALL whether and how it utilizes formal parameters
2138 of the caller (described by INFO). PARMS_AINFO is a pointer to a vector
2139 containing intermediate information about each formal parameter. */
2142 ipa_analyze_call_uses (struct func_body_info
*fbi
, gimple call
)
2144 tree target
= gimple_call_fn (call
);
2147 || (TREE_CODE (target
) != SSA_NAME
2148 && !virtual_method_call_p (target
)))
2151 struct cgraph_edge
*cs
= fbi
->node
->get_edge (call
);
2152 /* If we previously turned the call into a direct call, there is
2153 no need to analyze. */
2154 if (cs
&& !cs
->indirect_unknown_callee
)
2157 if (cs
->indirect_info
->polymorphic
)
2160 tree target
= gimple_call_fn (call
);
2161 ipa_polymorphic_call_context
context (current_function_decl
,
2162 target
, call
, &instance
);
2164 gcc_checking_assert (cs
->indirect_info
->otr_type
2165 == obj_type_ref_class (target
));
2166 gcc_checking_assert (cs
->indirect_info
->otr_token
2167 == tree_to_shwi (OBJ_TYPE_REF_TOKEN (target
)));
2169 cs
->indirect_info
->vptr_changed
2170 = !context
.get_dynamic_type (instance
,
2171 OBJ_TYPE_REF_OBJECT (target
),
2172 obj_type_ref_class (target
), call
);
2173 cs
->indirect_info
->context
= context
;
2176 if (TREE_CODE (target
) == SSA_NAME
)
2177 ipa_analyze_indirect_call_uses (fbi
, call
, target
);
2178 else if (virtual_method_call_p (target
))
2179 ipa_analyze_virtual_call_uses (fbi
, call
, target
);
2183 /* Analyze the call statement STMT with respect to formal parameters (described
2184 in INFO) of caller given by FBI->NODE. Currently it only checks whether
2185 formal parameters are called. */
2188 ipa_analyze_stmt_uses (struct func_body_info
*fbi
, gimple stmt
)
2190 if (is_gimple_call (stmt
))
2191 ipa_analyze_call_uses (fbi
, stmt
);
2194 /* Callback of walk_stmt_load_store_addr_ops for the visit_load.
2195 If OP is a parameter declaration, mark it as used in the info structure
2199 visit_ref_for_mod_analysis (gimple
, tree op
, tree
, void *data
)
2201 struct ipa_node_params
*info
= (struct ipa_node_params
*) data
;
2203 op
= get_base_address (op
);
2205 && TREE_CODE (op
) == PARM_DECL
)
2207 int index
= ipa_get_param_decl_index (info
, op
);
2208 gcc_assert (index
>= 0);
2209 ipa_set_param_used (info
, index
, true);
2215 /* Scan the statements in BB and inspect the uses of formal parameters. Store
2216 the findings in various structures of the associated ipa_node_params
2217 structure, such as parameter flags, notes etc. FBI holds various data about
2218 the function being analyzed. */
2221 ipa_analyze_params_uses_in_bb (struct func_body_info
*fbi
, basic_block bb
)
2223 gimple_stmt_iterator gsi
;
2224 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
2226 gimple stmt
= gsi_stmt (gsi
);
2228 if (is_gimple_debug (stmt
))
2231 ipa_analyze_stmt_uses (fbi
, stmt
);
2232 walk_stmt_load_store_addr_ops (stmt
, fbi
->info
,
2233 visit_ref_for_mod_analysis
,
2234 visit_ref_for_mod_analysis
,
2235 visit_ref_for_mod_analysis
);
2237 for (gsi
= gsi_start_phis (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
2238 walk_stmt_load_store_addr_ops (gsi_stmt (gsi
), fbi
->info
,
2239 visit_ref_for_mod_analysis
,
2240 visit_ref_for_mod_analysis
,
2241 visit_ref_for_mod_analysis
);
2244 /* Calculate controlled uses of parameters of NODE. */
2247 ipa_analyze_controlled_uses (struct cgraph_node
*node
)
2249 struct ipa_node_params
*info
= IPA_NODE_REF (node
);
2251 for (int i
= 0; i
< ipa_get_param_count (info
); i
++)
2253 tree parm
= ipa_get_param (info
, i
);
2254 int controlled_uses
= 0;
2256 /* For SSA regs see if parameter is used. For non-SSA we compute
2257 the flag during modification analysis. */
2258 if (is_gimple_reg (parm
))
2260 tree ddef
= ssa_default_def (DECL_STRUCT_FUNCTION (node
->decl
),
2262 if (ddef
&& !has_zero_uses (ddef
))
2264 imm_use_iterator imm_iter
;
2265 use_operand_p use_p
;
2267 ipa_set_param_used (info
, i
, true);
2268 FOR_EACH_IMM_USE_FAST (use_p
, imm_iter
, ddef
)
2269 if (!is_gimple_call (USE_STMT (use_p
)))
2271 if (!is_gimple_debug (USE_STMT (use_p
)))
2273 controlled_uses
= IPA_UNDESCRIBED_USE
;
2281 controlled_uses
= 0;
2284 controlled_uses
= IPA_UNDESCRIBED_USE
;
2285 ipa_set_controlled_uses (info
, i
, controlled_uses
);
2289 /* Free stuff in BI. */
2292 free_ipa_bb_info (struct ipa_bb_info
*bi
)
2294 bi
->cg_edges
.release ();
2295 bi
->param_aa_statuses
.release ();
2298 /* Dominator walker driving the analysis. */
2300 class analysis_dom_walker
: public dom_walker
2303 analysis_dom_walker (struct func_body_info
*fbi
)
2304 : dom_walker (CDI_DOMINATORS
), m_fbi (fbi
) {}
2306 virtual void before_dom_children (basic_block
);
2309 struct func_body_info
*m_fbi
;
2313 analysis_dom_walker::before_dom_children (basic_block bb
)
2315 ipa_analyze_params_uses_in_bb (m_fbi
, bb
);
2316 ipa_compute_jump_functions_for_bb (m_fbi
, bb
);
2319 /* Initialize the array describing properties of of formal parameters
2320 of NODE, analyze their uses and compute jump functions associated
2321 with actual arguments of calls from within NODE. */
2324 ipa_analyze_node (struct cgraph_node
*node
)
2326 struct func_body_info fbi
;
2327 struct ipa_node_params
*info
;
2329 ipa_check_create_node_params ();
2330 ipa_check_create_edge_args ();
2331 info
= IPA_NODE_REF (node
);
2333 if (info
->analysis_done
)
2335 info
->analysis_done
= 1;
2337 if (ipa_func_spec_opts_forbid_analysis_p (node
))
2339 for (int i
= 0; i
< ipa_get_param_count (info
); i
++)
2341 ipa_set_param_used (info
, i
, true);
2342 ipa_set_controlled_uses (info
, i
, IPA_UNDESCRIBED_USE
);
2347 struct function
*func
= DECL_STRUCT_FUNCTION (node
->decl
);
2349 calculate_dominance_info (CDI_DOMINATORS
);
2350 ipa_initialize_node_params (node
);
2351 ipa_analyze_controlled_uses (node
);
2354 fbi
.info
= IPA_NODE_REF (node
);
2355 fbi
.bb_infos
= vNULL
;
2356 fbi
.bb_infos
.safe_grow_cleared (last_basic_block_for_fn (cfun
));
2357 fbi
.param_count
= ipa_get_param_count (info
);
2360 for (struct cgraph_edge
*cs
= node
->callees
; cs
; cs
= cs
->next_callee
)
2362 ipa_bb_info
*bi
= ipa_get_bb_info (&fbi
, gimple_bb (cs
->call_stmt
));
2363 bi
->cg_edges
.safe_push (cs
);
2366 for (struct cgraph_edge
*cs
= node
->indirect_calls
; cs
; cs
= cs
->next_callee
)
2368 ipa_bb_info
*bi
= ipa_get_bb_info (&fbi
, gimple_bb (cs
->call_stmt
));
2369 bi
->cg_edges
.safe_push (cs
);
2372 analysis_dom_walker (&fbi
).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun
));
2375 struct ipa_bb_info
*bi
;
2376 FOR_EACH_VEC_ELT (fbi
.bb_infos
, i
, bi
)
2377 free_ipa_bb_info (bi
);
2378 fbi
.bb_infos
.release ();
2379 free_dominance_info (CDI_DOMINATORS
);
2383 /* Update the jump functions associated with call graph edge E when the call
2384 graph edge CS is being inlined, assuming that E->caller is already (possibly
2385 indirectly) inlined into CS->callee and that E has not been inlined. */
2388 update_jump_functions_after_inlining (struct cgraph_edge
*cs
,
2389 struct cgraph_edge
*e
)
2391 struct ipa_edge_args
*top
= IPA_EDGE_REF (cs
);
2392 struct ipa_edge_args
*args
= IPA_EDGE_REF (e
);
2393 int count
= ipa_get_cs_argument_count (args
);
2396 for (i
= 0; i
< count
; i
++)
2398 struct ipa_jump_func
*dst
= ipa_get_ith_jump_func (args
, i
);
2399 struct ipa_polymorphic_call_context
*dst_ctx
2400 = ipa_get_ith_polymorhic_call_context (args
, i
);
2402 if (dst
->type
== IPA_JF_ANCESTOR
)
2404 struct ipa_jump_func
*src
;
2405 int dst_fid
= dst
->value
.ancestor
.formal_id
;
2406 struct ipa_polymorphic_call_context
*src_ctx
2407 = ipa_get_ith_polymorhic_call_context (top
, dst_fid
);
2409 /* Variable number of arguments can cause havoc if we try to access
2410 one that does not exist in the inlined edge. So make sure we
2412 if (dst_fid
>= ipa_get_cs_argument_count (top
))
2414 dst
->type
= IPA_JF_UNKNOWN
;
2418 src
= ipa_get_ith_jump_func (top
, dst_fid
);
2420 if (src_ctx
&& !src_ctx
->useless_p ())
2422 struct ipa_polymorphic_call_context ctx
= *src_ctx
;
2424 /* TODO: Make type preserved safe WRT contexts. */
2425 if (!ipa_get_jf_ancestor_type_preserved (dst
))
2426 ctx
.possible_dynamic_type_change (e
->in_polymorphic_cdtor
);
2427 ctx
.offset_by (dst
->value
.ancestor
.offset
);
2428 if (!ctx
.useless_p ())
2430 vec_safe_grow_cleared (args
->polymorphic_call_contexts
,
2432 dst_ctx
= ipa_get_ith_polymorhic_call_context (args
, i
);
2434 dst_ctx
->combine_with (ctx
);
2438 && (dst
->value
.ancestor
.agg_preserved
|| !src
->agg
.by_ref
))
2440 struct ipa_agg_jf_item
*item
;
2443 /* Currently we do not produce clobber aggregate jump functions,
2444 replace with merging when we do. */
2445 gcc_assert (!dst
->agg
.items
);
2447 dst
->agg
.items
= vec_safe_copy (src
->agg
.items
);
2448 dst
->agg
.by_ref
= src
->agg
.by_ref
;
2449 FOR_EACH_VEC_SAFE_ELT (dst
->agg
.items
, j
, item
)
2450 item
->offset
-= dst
->value
.ancestor
.offset
;
2453 if (src
->type
== IPA_JF_PASS_THROUGH
2454 && src
->value
.pass_through
.operation
== NOP_EXPR
)
2456 dst
->value
.ancestor
.formal_id
= src
->value
.pass_through
.formal_id
;
2457 dst
->value
.ancestor
.agg_preserved
&=
2458 src
->value
.pass_through
.agg_preserved
;
2460 else if (src
->type
== IPA_JF_ANCESTOR
)
2462 dst
->value
.ancestor
.formal_id
= src
->value
.ancestor
.formal_id
;
2463 dst
->value
.ancestor
.offset
+= src
->value
.ancestor
.offset
;
2464 dst
->value
.ancestor
.agg_preserved
&=
2465 src
->value
.ancestor
.agg_preserved
;
2468 dst
->type
= IPA_JF_UNKNOWN
;
2470 else if (dst
->type
== IPA_JF_PASS_THROUGH
)
2472 struct ipa_jump_func
*src
;
2473 /* We must check range due to calls with variable number of arguments
2474 and we cannot combine jump functions with operations. */
2475 if (dst
->value
.pass_through
.operation
== NOP_EXPR
2476 && (dst
->value
.pass_through
.formal_id
2477 < ipa_get_cs_argument_count (top
)))
2479 int dst_fid
= dst
->value
.pass_through
.formal_id
;
2480 src
= ipa_get_ith_jump_func (top
, dst_fid
);
2481 bool dst_agg_p
= ipa_get_jf_pass_through_agg_preserved (dst
);
2482 struct ipa_polymorphic_call_context
*src_ctx
2483 = ipa_get_ith_polymorhic_call_context (top
, dst_fid
);
2485 if (src_ctx
&& !src_ctx
->useless_p ())
2487 struct ipa_polymorphic_call_context ctx
= *src_ctx
;
2489 /* TODO: Make type preserved safe WRT contexts. */
2490 if (!ipa_get_jf_pass_through_type_preserved (dst
))
2491 ctx
.possible_dynamic_type_change (e
->in_polymorphic_cdtor
);
2492 if (!ctx
.useless_p ())
2496 vec_safe_grow_cleared (args
->polymorphic_call_contexts
,
2498 dst_ctx
= ipa_get_ith_polymorhic_call_context (args
, i
);
2500 dst_ctx
->combine_with (ctx
);
2505 case IPA_JF_UNKNOWN
:
2506 dst
->type
= IPA_JF_UNKNOWN
;
2509 ipa_set_jf_cst_copy (dst
, src
);
2512 case IPA_JF_PASS_THROUGH
:
2514 int formal_id
= ipa_get_jf_pass_through_formal_id (src
);
2515 enum tree_code operation
;
2516 operation
= ipa_get_jf_pass_through_operation (src
);
2518 if (operation
== NOP_EXPR
)
2522 && ipa_get_jf_pass_through_agg_preserved (src
);
2523 ipa_set_jf_simple_pass_through (dst
, formal_id
, agg_p
);
2527 tree operand
= ipa_get_jf_pass_through_operand (src
);
2528 ipa_set_jf_arith_pass_through (dst
, formal_id
, operand
,
2533 case IPA_JF_ANCESTOR
:
2537 && ipa_get_jf_ancestor_agg_preserved (src
);
2538 ipa_set_ancestor_jf (dst
,
2539 ipa_get_jf_ancestor_offset (src
),
2540 ipa_get_jf_ancestor_formal_id (src
),
2549 && (dst_agg_p
|| !src
->agg
.by_ref
))
2551 /* Currently we do not produce clobber aggregate jump
2552 functions, replace with merging when we do. */
2553 gcc_assert (!dst
->agg
.items
);
2555 dst
->agg
.by_ref
= src
->agg
.by_ref
;
2556 dst
->agg
.items
= vec_safe_copy (src
->agg
.items
);
2560 dst
->type
= IPA_JF_UNKNOWN
;
2565 /* If TARGET is an addr_expr of a function declaration, make it the
2566 (SPECULATIVE)destination of an indirect edge IE and return the edge.
2567 Otherwise, return NULL. */
2569 struct cgraph_edge
*
2570 ipa_make_edge_direct_to_target (struct cgraph_edge
*ie
, tree target
,
2573 struct cgraph_node
*callee
;
2574 struct inline_edge_summary
*es
= inline_edge_summary (ie
);
2575 bool unreachable
= false;
2577 if (TREE_CODE (target
) == ADDR_EXPR
)
2578 target
= TREE_OPERAND (target
, 0);
2579 if (TREE_CODE (target
) != FUNCTION_DECL
)
2581 target
= canonicalize_constructor_val (target
, NULL
);
2582 if (!target
|| TREE_CODE (target
) != FUNCTION_DECL
)
2584 if (ie
->indirect_info
->member_ptr
)
2585 /* Member pointer call that goes through a VMT lookup. */
2588 if (dump_enabled_p ())
2590 location_t loc
= gimple_location_safe (ie
->call_stmt
);
2591 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS
, loc
,
2592 "discovered direct call to non-function in %s/%i, "
2593 "making it __builtin_unreachable\n",
2594 ie
->caller
->name (), ie
->caller
->order
);
2597 target
= builtin_decl_implicit (BUILT_IN_UNREACHABLE
);
2598 callee
= cgraph_node::get_create (target
);
2602 callee
= cgraph_node::get (target
);
2605 callee
= cgraph_node::get (target
);
2607 /* Because may-edges are not explicitely represented and vtable may be external,
2608 we may create the first reference to the object in the unit. */
2609 if (!callee
|| callee
->global
.inlined_to
)
2612 /* We are better to ensure we can refer to it.
2613 In the case of static functions we are out of luck, since we already
2614 removed its body. In the case of public functions we may or may
2615 not introduce the reference. */
2616 if (!canonicalize_constructor_val (target
, NULL
)
2617 || !TREE_PUBLIC (target
))
2620 fprintf (dump_file
, "ipa-prop: Discovered call to a known target "
2621 "(%s/%i -> %s/%i) but can not refer to it. Giving up.\n",
2622 xstrdup (ie
->caller
->name ()),
2624 xstrdup (ie
->callee
->name ()),
2628 callee
= cgraph_node::get_create (target
);
2631 /* If the edge is already speculated. */
2632 if (speculative
&& ie
->speculative
)
2634 struct cgraph_edge
*e2
;
2635 struct ipa_ref
*ref
;
2636 ie
->speculative_call_info (e2
, ie
, ref
);
2637 if (e2
->callee
->ultimate_alias_target ()
2638 != callee
->ultimate_alias_target ())
2641 fprintf (dump_file
, "ipa-prop: Discovered call to a speculative target "
2642 "(%s/%i -> %s/%i) but the call is already speculated to %s/%i. Giving up.\n",
2643 xstrdup (ie
->caller
->name ()),
2645 xstrdup (callee
->name ()),
2647 xstrdup (e2
->callee
->name ()),
2653 fprintf (dump_file
, "ipa-prop: Discovered call to a speculative target "
2654 "(%s/%i -> %s/%i) this agree with previous speculation.\n",
2655 xstrdup (ie
->caller
->name ()),
2657 xstrdup (callee
->name ()),
2663 if (!dbg_cnt (devirt
))
2666 ipa_check_create_node_params ();
2668 /* We can not make edges to inline clones. It is bug that someone removed
2669 the cgraph node too early. */
2670 gcc_assert (!callee
->global
.inlined_to
);
2672 if (dump_file
&& !unreachable
)
2674 fprintf (dump_file
, "ipa-prop: Discovered %s call to a %s target "
2675 "(%s/%i -> %s/%i), for stmt ",
2676 ie
->indirect_info
->polymorphic
? "a virtual" : "an indirect",
2677 speculative
? "speculative" : "known",
2678 xstrdup (ie
->caller
->name ()),
2680 xstrdup (callee
->name ()),
2683 print_gimple_stmt (dump_file
, ie
->call_stmt
, 2, TDF_SLIM
);
2685 fprintf (dump_file
, "with uid %i\n", ie
->lto_stmt_uid
);
2687 if (dump_enabled_p ())
2689 location_t loc
= gimple_location_safe (ie
->call_stmt
);
2691 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS
, loc
,
2692 "converting indirect call in %s to direct call to %s\n",
2693 ie
->caller
->name (), callee
->name ());
2696 ie
= ie
->make_direct (callee
);
2699 if (!callee
->can_be_discarded_p ())
2702 alias
= dyn_cast
<cgraph_node
*> (callee
->noninterposable_alias ());
2706 ie
= ie
->make_speculative
2707 (callee
, ie
->count
* 8 / 10, ie
->frequency
* 8 / 10);
2709 es
= inline_edge_summary (ie
);
2710 es
->call_stmt_size
-= (eni_size_weights
.indirect_call_cost
2711 - eni_size_weights
.call_cost
);
2712 es
->call_stmt_time
-= (eni_time_weights
.indirect_call_cost
2713 - eni_time_weights
.call_cost
);
2718 /* Retrieve value from aggregate jump function AGG for the given OFFSET or
2719 return NULL if there is not any. BY_REF specifies whether the value has to
2720 be passed by reference or by value. */
2723 ipa_find_agg_cst_for_param (struct ipa_agg_jump_function
*agg
,
2724 HOST_WIDE_INT offset
, bool by_ref
)
2726 struct ipa_agg_jf_item
*item
;
2729 if (by_ref
!= agg
->by_ref
)
2732 FOR_EACH_VEC_SAFE_ELT (agg
->items
, i
, item
)
2733 if (item
->offset
== offset
)
2735 /* Currently we do not have clobber values, return NULL for them once
2737 gcc_checking_assert (is_gimple_ip_invariant (item
->value
));
2743 /* Remove a reference to SYMBOL from the list of references of a node given by
2744 reference description RDESC. Return true if the reference has been
2745 successfully found and removed. */
2748 remove_described_reference (symtab_node
*symbol
, struct ipa_cst_ref_desc
*rdesc
)
2750 struct ipa_ref
*to_del
;
2751 struct cgraph_edge
*origin
;
2756 to_del
= origin
->caller
->find_reference (symbol
, origin
->call_stmt
,
2757 origin
->lto_stmt_uid
);
2761 to_del
->remove_reference ();
2763 fprintf (dump_file
, "ipa-prop: Removed a reference from %s/%i to %s.\n",
2764 xstrdup (origin
->caller
->name ()),
2765 origin
->caller
->order
, xstrdup (symbol
->name ()));
2769 /* If JFUNC has a reference description with refcount different from
2770 IPA_UNDESCRIBED_USE, return the reference description, otherwise return
2771 NULL. JFUNC must be a constant jump function. */
2773 static struct ipa_cst_ref_desc
*
2774 jfunc_rdesc_usable (struct ipa_jump_func
*jfunc
)
2776 struct ipa_cst_ref_desc
*rdesc
= ipa_get_jf_constant_rdesc (jfunc
);
2777 if (rdesc
&& rdesc
->refcount
!= IPA_UNDESCRIBED_USE
)
2783 /* If the value of constant jump function JFUNC is an address of a function
2784 declaration, return the associated call graph node. Otherwise return
2787 static cgraph_node
*
2788 cgraph_node_for_jfunc (struct ipa_jump_func
*jfunc
)
2790 gcc_checking_assert (jfunc
->type
== IPA_JF_CONST
);
2791 tree cst
= ipa_get_jf_constant (jfunc
);
2792 if (TREE_CODE (cst
) != ADDR_EXPR
2793 || TREE_CODE (TREE_OPERAND (cst
, 0)) != FUNCTION_DECL
)
2796 return cgraph_node::get (TREE_OPERAND (cst
, 0));
2800 /* If JFUNC is a constant jump function with a usable rdesc, decrement its
2801 refcount and if it hits zero, remove reference to SYMBOL from the caller of
2802 the edge specified in the rdesc. Return false if either the symbol or the
2803 reference could not be found, otherwise return true. */
2806 try_decrement_rdesc_refcount (struct ipa_jump_func
*jfunc
)
2808 struct ipa_cst_ref_desc
*rdesc
;
2809 if (jfunc
->type
== IPA_JF_CONST
2810 && (rdesc
= jfunc_rdesc_usable (jfunc
))
2811 && --rdesc
->refcount
== 0)
2813 symtab_node
*symbol
= cgraph_node_for_jfunc (jfunc
);
2817 return remove_described_reference (symbol
, rdesc
);
2822 /* Try to find a destination for indirect edge IE that corresponds to a simple
2823 call or a call of a member function pointer and where the destination is a
2824 pointer formal parameter described by jump function JFUNC. If it can be
2825 determined, return the newly direct edge, otherwise return NULL.
2826 NEW_ROOT_INFO is the node info that JFUNC lattices are relative to. */
2828 static struct cgraph_edge
*
2829 try_make_edge_direct_simple_call (struct cgraph_edge
*ie
,
2830 struct ipa_jump_func
*jfunc
,
2831 struct ipa_node_params
*new_root_info
)
2833 struct cgraph_edge
*cs
;
2835 bool agg_contents
= ie
->indirect_info
->agg_contents
;
2837 if (ie
->indirect_info
->agg_contents
)
2838 target
= ipa_find_agg_cst_for_param (&jfunc
->agg
,
2839 ie
->indirect_info
->offset
,
2840 ie
->indirect_info
->by_ref
);
2842 target
= ipa_value_from_jfunc (new_root_info
, jfunc
);
2845 cs
= ipa_make_edge_direct_to_target (ie
, target
);
2847 if (cs
&& !agg_contents
)
2850 gcc_checking_assert (cs
->callee
2852 || jfunc
->type
!= IPA_JF_CONST
2853 || !cgraph_node_for_jfunc (jfunc
)
2854 || cs
->callee
== cgraph_node_for_jfunc (jfunc
)));
2855 ok
= try_decrement_rdesc_refcount (jfunc
);
2856 gcc_checking_assert (ok
);
2862 /* Return the target to be used in cases of impossible devirtualization. IE
2863 and target (the latter can be NULL) are dumped when dumping is enabled. */
2866 ipa_impossible_devirt_target (struct cgraph_edge
*ie
, tree target
)
2872 "Type inconsistent devirtualization: %s/%i->%s\n",
2873 ie
->caller
->name (), ie
->caller
->order
,
2874 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (target
)));
2877 "No devirtualization target in %s/%i\n",
2878 ie
->caller
->name (), ie
->caller
->order
);
2880 tree new_target
= builtin_decl_implicit (BUILT_IN_UNREACHABLE
);
2881 cgraph_node::get_create (new_target
);
2885 /* Try to find a destination for indirect edge IE that corresponds to a virtual
2886 call based on a formal parameter which is described by jump function JFUNC
2887 and if it can be determined, make it direct and return the direct edge.
2888 Otherwise, return NULL. CTX describes the polymorphic context that the
2889 parameter the call is based on brings along with it. */
2891 static struct cgraph_edge
*
2892 try_make_edge_direct_virtual_call (struct cgraph_edge
*ie
,
2893 struct ipa_jump_func
*jfunc
,
2894 struct ipa_polymorphic_call_context ctx
)
2897 bool speculative
= false;
2899 if (!flag_devirtualize
)
2902 gcc_assert (!ie
->indirect_info
->by_ref
);
2904 /* Try to do lookup via known virtual table pointer value. */
2905 if (!ie
->indirect_info
->vptr_changed
|| flag_devirtualize_speculatively
)
2908 unsigned HOST_WIDE_INT offset
;
2909 tree t
= ipa_find_agg_cst_for_param (&jfunc
->agg
,
2910 ie
->indirect_info
->offset
,
2912 if (t
&& vtable_pointer_value_to_vtable (t
, &vtable
, &offset
))
2914 t
= gimple_get_virt_method_for_vtable (ie
->indirect_info
->otr_token
,
2918 if ((TREE_CODE (TREE_TYPE (t
)) == FUNCTION_TYPE
2919 && DECL_FUNCTION_CODE (t
) == BUILT_IN_UNREACHABLE
)
2920 || !possible_polymorphic_call_target_p
2921 (ie
, cgraph_node::get (t
)))
2923 /* Do not speculate builtin_unreachable, it is stpid! */
2924 if (!ie
->indirect_info
->vptr_changed
)
2925 target
= ipa_impossible_devirt_target (ie
, target
);
2930 speculative
= ie
->indirect_info
->vptr_changed
;
2936 ipa_polymorphic_call_context
ie_context (ie
);
2937 vec
<cgraph_node
*>targets
;
2940 ctx
.offset_by (ie
->indirect_info
->offset
);
2941 if (ie
->indirect_info
->vptr_changed
)
2942 ctx
.possible_dynamic_type_change (ie
->in_polymorphic_cdtor
,
2943 ie
->indirect_info
->otr_type
);
2944 ctx
.combine_with (ie_context
, ie
->indirect_info
->otr_type
);
2945 targets
= possible_polymorphic_call_targets
2946 (ie
->indirect_info
->otr_type
,
2947 ie
->indirect_info
->otr_token
,
2949 if (final
&& targets
.length () <= 1)
2951 if (targets
.length () == 1)
2952 target
= targets
[0]->decl
;
2954 target
= ipa_impossible_devirt_target (ie
, NULL_TREE
);
2956 else if (!target
&& flag_devirtualize_speculatively
2957 && !ie
->speculative
&& ie
->maybe_hot_p ())
2960 n
= try_speculative_devirtualization (ie
->indirect_info
->otr_type
,
2961 ie
->indirect_info
->otr_token
,
2962 ie
->indirect_info
->context
);
2972 if (!possible_polymorphic_call_target_p
2973 (ie
, cgraph_node::get_create (target
)))
2977 target
= ipa_impossible_devirt_target (ie
, target
);
2979 return ipa_make_edge_direct_to_target (ie
, target
, speculative
);
2985 /* Update the param called notes associated with NODE when CS is being inlined,
2986 assuming NODE is (potentially indirectly) inlined into CS->callee.
2987 Moreover, if the callee is discovered to be constant, create a new cgraph
2988 edge for it. Newly discovered indirect edges will be added to *NEW_EDGES,
2989 unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */
2992 update_indirect_edges_after_inlining (struct cgraph_edge
*cs
,
2993 struct cgraph_node
*node
,
2994 vec
<cgraph_edge
*> *new_edges
)
2996 struct ipa_edge_args
*top
;
2997 struct cgraph_edge
*ie
, *next_ie
, *new_direct_edge
;
2998 struct ipa_node_params
*new_root_info
;
3001 ipa_check_create_edge_args ();
3002 top
= IPA_EDGE_REF (cs
);
3003 new_root_info
= IPA_NODE_REF (cs
->caller
->global
.inlined_to
3004 ? cs
->caller
->global
.inlined_to
3007 for (ie
= node
->indirect_calls
; ie
; ie
= next_ie
)
3009 struct cgraph_indirect_call_info
*ici
= ie
->indirect_info
;
3010 struct ipa_jump_func
*jfunc
;
3013 next_ie
= ie
->next_callee
;
3015 if (ici
->param_index
== -1)
3018 /* We must check range due to calls with variable number of arguments: */
3019 if (ici
->param_index
>= ipa_get_cs_argument_count (top
))
3021 ici
->param_index
= -1;
3025 param_index
= ici
->param_index
;
3026 jfunc
= ipa_get_ith_jump_func (top
, param_index
);
3028 if (!flag_indirect_inlining
)
3029 new_direct_edge
= NULL
;
3030 else if (ici
->polymorphic
)
3032 ipa_polymorphic_call_context ctx
;
3033 ctx
= ipa_context_from_jfunc (new_root_info
, cs
, param_index
, jfunc
);
3034 new_direct_edge
= try_make_edge_direct_virtual_call (ie
, jfunc
, ctx
);
3037 new_direct_edge
= try_make_edge_direct_simple_call (ie
, jfunc
,
3039 /* If speculation was removed, then we need to do nothing. */
3040 if (new_direct_edge
&& new_direct_edge
!= ie
)
3042 new_direct_edge
->indirect_inlining_edge
= 1;
3043 top
= IPA_EDGE_REF (cs
);
3046 else if (new_direct_edge
)
3048 new_direct_edge
->indirect_inlining_edge
= 1;
3049 if (new_direct_edge
->call_stmt
)
3050 new_direct_edge
->call_stmt_cannot_inline_p
3051 = !gimple_check_call_matching_types (
3052 new_direct_edge
->call_stmt
,
3053 new_direct_edge
->callee
->decl
, false);
3056 new_edges
->safe_push (new_direct_edge
);
3059 top
= IPA_EDGE_REF (cs
);
3061 else if (jfunc
->type
== IPA_JF_PASS_THROUGH
3062 && ipa_get_jf_pass_through_operation (jfunc
) == NOP_EXPR
)
3064 if ((ici
->agg_contents
3065 && !ipa_get_jf_pass_through_agg_preserved (jfunc
))
3066 || (ici
->polymorphic
3067 && !ipa_get_jf_pass_through_type_preserved (jfunc
)))
3068 ici
->param_index
= -1;
3070 ici
->param_index
= ipa_get_jf_pass_through_formal_id (jfunc
);
3072 else if (jfunc
->type
== IPA_JF_ANCESTOR
)
3074 if ((ici
->agg_contents
3075 && !ipa_get_jf_ancestor_agg_preserved (jfunc
))
3076 || (ici
->polymorphic
3077 && !ipa_get_jf_ancestor_type_preserved (jfunc
)))
3078 ici
->param_index
= -1;
3081 ici
->param_index
= ipa_get_jf_ancestor_formal_id (jfunc
);
3082 ici
->offset
+= ipa_get_jf_ancestor_offset (jfunc
);
3086 /* Either we can find a destination for this edge now or never. */
3087 ici
->param_index
= -1;
3093 /* Recursively traverse subtree of NODE (including node) made of inlined
3094 cgraph_edges when CS has been inlined and invoke
3095 update_indirect_edges_after_inlining on all nodes and
3096 update_jump_functions_after_inlining on all non-inlined edges that lead out
3097 of this subtree. Newly discovered indirect edges will be added to
3098 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were
3102 propagate_info_to_inlined_callees (struct cgraph_edge
*cs
,
3103 struct cgraph_node
*node
,
3104 vec
<cgraph_edge
*> *new_edges
)
3106 struct cgraph_edge
*e
;
3109 res
= update_indirect_edges_after_inlining (cs
, node
, new_edges
);
3111 for (e
= node
->callees
; e
; e
= e
->next_callee
)
3112 if (!e
->inline_failed
)
3113 res
|= propagate_info_to_inlined_callees (cs
, e
->callee
, new_edges
);
3115 update_jump_functions_after_inlining (cs
, e
);
3116 for (e
= node
->indirect_calls
; e
; e
= e
->next_callee
)
3117 update_jump_functions_after_inlining (cs
, e
);
3122 /* Combine two controlled uses counts as done during inlining. */
3125 combine_controlled_uses_counters (int c
, int d
)
3127 if (c
== IPA_UNDESCRIBED_USE
|| d
== IPA_UNDESCRIBED_USE
)
3128 return IPA_UNDESCRIBED_USE
;
3133 /* Propagate number of controlled users from CS->caleee to the new root of the
3134 tree of inlined nodes. */
3137 propagate_controlled_uses (struct cgraph_edge
*cs
)
3139 struct ipa_edge_args
*args
= IPA_EDGE_REF (cs
);
3140 struct cgraph_node
*new_root
= cs
->caller
->global
.inlined_to
3141 ? cs
->caller
->global
.inlined_to
: cs
->caller
;
3142 struct ipa_node_params
*new_root_info
= IPA_NODE_REF (new_root
);
3143 struct ipa_node_params
*old_root_info
= IPA_NODE_REF (cs
->callee
);
3146 count
= MIN (ipa_get_cs_argument_count (args
),
3147 ipa_get_param_count (old_root_info
));
3148 for (i
= 0; i
< count
; i
++)
3150 struct ipa_jump_func
*jf
= ipa_get_ith_jump_func (args
, i
);
3151 struct ipa_cst_ref_desc
*rdesc
;
3153 if (jf
->type
== IPA_JF_PASS_THROUGH
)
3156 src_idx
= ipa_get_jf_pass_through_formal_id (jf
);
3157 c
= ipa_get_controlled_uses (new_root_info
, src_idx
);
3158 d
= ipa_get_controlled_uses (old_root_info
, i
);
3160 gcc_checking_assert (ipa_get_jf_pass_through_operation (jf
)
3161 == NOP_EXPR
|| c
== IPA_UNDESCRIBED_USE
);
3162 c
= combine_controlled_uses_counters (c
, d
);
3163 ipa_set_controlled_uses (new_root_info
, src_idx
, c
);
3164 if (c
== 0 && new_root_info
->ipcp_orig_node
)
3166 struct cgraph_node
*n
;
3167 struct ipa_ref
*ref
;
3168 tree t
= new_root_info
->known_csts
[src_idx
];
3170 if (t
&& TREE_CODE (t
) == ADDR_EXPR
3171 && TREE_CODE (TREE_OPERAND (t
, 0)) == FUNCTION_DECL
3172 && (n
= cgraph_node::get (TREE_OPERAND (t
, 0)))
3173 && (ref
= new_root
->find_reference (n
, NULL
, 0)))
3176 fprintf (dump_file
, "ipa-prop: Removing cloning-created "
3177 "reference from %s/%i to %s/%i.\n",
3178 xstrdup (new_root
->name ()),
3180 xstrdup (n
->name ()), n
->order
);
3181 ref
->remove_reference ();
3185 else if (jf
->type
== IPA_JF_CONST
3186 && (rdesc
= jfunc_rdesc_usable (jf
)))
3188 int d
= ipa_get_controlled_uses (old_root_info
, i
);
3189 int c
= rdesc
->refcount
;
3190 rdesc
->refcount
= combine_controlled_uses_counters (c
, d
);
3191 if (rdesc
->refcount
== 0)
3193 tree cst
= ipa_get_jf_constant (jf
);
3194 struct cgraph_node
*n
;
3195 gcc_checking_assert (TREE_CODE (cst
) == ADDR_EXPR
3196 && TREE_CODE (TREE_OPERAND (cst
, 0))
3198 n
= cgraph_node::get (TREE_OPERAND (cst
, 0));
3201 struct cgraph_node
*clone
;
3203 ok
= remove_described_reference (n
, rdesc
);
3204 gcc_checking_assert (ok
);
3207 while (clone
->global
.inlined_to
3208 && clone
!= rdesc
->cs
->caller
3209 && IPA_NODE_REF (clone
)->ipcp_orig_node
)
3211 struct ipa_ref
*ref
;
3212 ref
= clone
->find_reference (n
, NULL
, 0);
3216 fprintf (dump_file
, "ipa-prop: Removing "
3217 "cloning-created reference "
3218 "from %s/%i to %s/%i.\n",
3219 xstrdup (clone
->name ()),
3221 xstrdup (n
->name ()),
3223 ref
->remove_reference ();
3225 clone
= clone
->callers
->caller
;
3232 for (i
= ipa_get_param_count (old_root_info
);
3233 i
< ipa_get_cs_argument_count (args
);
3236 struct ipa_jump_func
*jf
= ipa_get_ith_jump_func (args
, i
);
3238 if (jf
->type
== IPA_JF_CONST
)
3240 struct ipa_cst_ref_desc
*rdesc
= jfunc_rdesc_usable (jf
);
3242 rdesc
->refcount
= IPA_UNDESCRIBED_USE
;
3244 else if (jf
->type
== IPA_JF_PASS_THROUGH
)
3245 ipa_set_controlled_uses (new_root_info
,
3246 jf
->value
.pass_through
.formal_id
,
3247 IPA_UNDESCRIBED_USE
);
3251 /* Update jump functions and call note functions on inlining the call site CS.
3252 CS is expected to lead to a node already cloned by
3253 cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
3254 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were +
3258 ipa_propagate_indirect_call_infos (struct cgraph_edge
*cs
,
3259 vec
<cgraph_edge
*> *new_edges
)
3262 /* Do nothing if the preparation phase has not been carried out yet
3263 (i.e. during early inlining). */
3264 if (!ipa_node_params_vector
.exists ())
3266 gcc_assert (ipa_edge_args_vector
);
3268 propagate_controlled_uses (cs
);
3269 changed
= propagate_info_to_inlined_callees (cs
, cs
->callee
, new_edges
);
3274 /* Frees all dynamically allocated structures that the argument info points
3278 ipa_free_edge_args_substructures (struct ipa_edge_args
*args
)
3280 vec_free (args
->jump_functions
);
3281 memset (args
, 0, sizeof (*args
));
3284 /* Free all ipa_edge structures. */
3287 ipa_free_all_edge_args (void)
3290 struct ipa_edge_args
*args
;
3292 if (!ipa_edge_args_vector
)
3295 FOR_EACH_VEC_ELT (*ipa_edge_args_vector
, i
, args
)
3296 ipa_free_edge_args_substructures (args
);
3298 vec_free (ipa_edge_args_vector
);
3301 /* Frees all dynamically allocated structures that the param info points
3305 ipa_free_node_params_substructures (struct ipa_node_params
*info
)
3307 info
->descriptors
.release ();
3308 free (info
->lattices
);
3309 /* Lattice values and their sources are deallocated with their alocation
3311 info
->known_csts
.release ();
3312 info
->known_contexts
.release ();
3313 memset (info
, 0, sizeof (*info
));
3316 /* Free all ipa_node_params structures. */
3319 ipa_free_all_node_params (void)
3322 struct ipa_node_params
*info
;
3324 FOR_EACH_VEC_ELT (ipa_node_params_vector
, i
, info
)
3325 ipa_free_node_params_substructures (info
);
3327 ipa_node_params_vector
.release ();
3330 /* Set the aggregate replacements of NODE to be AGGVALS. */
3333 ipa_set_node_agg_value_chain (struct cgraph_node
*node
,
3334 struct ipa_agg_replacement_value
*aggvals
)
3336 if (vec_safe_length (ipa_node_agg_replacements
)
3337 <= (unsigned) symtab
->cgraph_max_uid
)
3338 vec_safe_grow_cleared (ipa_node_agg_replacements
,
3339 symtab
->cgraph_max_uid
+ 1);
3341 (*ipa_node_agg_replacements
)[node
->uid
] = aggvals
;
3344 /* Hook that is called by cgraph.c when an edge is removed. */
3347 ipa_edge_removal_hook (struct cgraph_edge
*cs
, void *data ATTRIBUTE_UNUSED
)
3349 struct ipa_edge_args
*args
;
3351 /* During IPA-CP updating we can be called on not-yet analyzed clones. */
3352 if (vec_safe_length (ipa_edge_args_vector
) <= (unsigned)cs
->uid
)
3355 args
= IPA_EDGE_REF (cs
);
3356 if (args
->jump_functions
)
3358 struct ipa_jump_func
*jf
;
3360 FOR_EACH_VEC_ELT (*args
->jump_functions
, i
, jf
)
3362 struct ipa_cst_ref_desc
*rdesc
;
3363 try_decrement_rdesc_refcount (jf
);
3364 if (jf
->type
== IPA_JF_CONST
3365 && (rdesc
= ipa_get_jf_constant_rdesc (jf
))
3371 ipa_free_edge_args_substructures (IPA_EDGE_REF (cs
));
3374 /* Hook that is called by cgraph.c when a node is removed. */
3377 ipa_node_removal_hook (struct cgraph_node
*node
, void *data ATTRIBUTE_UNUSED
)
3379 /* During IPA-CP updating we can be called on not-yet analyze clones. */
3380 if (ipa_node_params_vector
.length () > (unsigned)node
->uid
)
3381 ipa_free_node_params_substructures (IPA_NODE_REF (node
));
3382 if (vec_safe_length (ipa_node_agg_replacements
) > (unsigned)node
->uid
)
3383 (*ipa_node_agg_replacements
)[(unsigned)node
->uid
] = NULL
;
3386 /* Hook that is called by cgraph.c when an edge is duplicated. */
3389 ipa_edge_duplication_hook (struct cgraph_edge
*src
, struct cgraph_edge
*dst
,
3390 __attribute__((unused
)) void *data
)
3392 struct ipa_edge_args
*old_args
, *new_args
;
3395 ipa_check_create_edge_args ();
3397 old_args
= IPA_EDGE_REF (src
);
3398 new_args
= IPA_EDGE_REF (dst
);
3400 new_args
->jump_functions
= vec_safe_copy (old_args
->jump_functions
);
3401 if (old_args
->polymorphic_call_contexts
)
3402 new_args
->polymorphic_call_contexts
3403 = vec_safe_copy (old_args
->polymorphic_call_contexts
);
3405 for (i
= 0; i
< vec_safe_length (old_args
->jump_functions
); i
++)
3407 struct ipa_jump_func
*src_jf
= ipa_get_ith_jump_func (old_args
, i
);
3408 struct ipa_jump_func
*dst_jf
= ipa_get_ith_jump_func (new_args
, i
);
3410 dst_jf
->agg
.items
= vec_safe_copy (dst_jf
->agg
.items
);
3412 if (src_jf
->type
== IPA_JF_CONST
)
3414 struct ipa_cst_ref_desc
*src_rdesc
= jfunc_rdesc_usable (src_jf
);
3417 dst_jf
->value
.constant
.rdesc
= NULL
;
3418 else if (src
->caller
== dst
->caller
)
3420 struct ipa_ref
*ref
;
3421 symtab_node
*n
= cgraph_node_for_jfunc (src_jf
);
3422 gcc_checking_assert (n
);
3423 ref
= src
->caller
->find_reference (n
, src
->call_stmt
,
3425 gcc_checking_assert (ref
);
3426 dst
->caller
->clone_reference (ref
, ref
->stmt
);
3428 gcc_checking_assert (ipa_refdesc_pool
);
3429 struct ipa_cst_ref_desc
*dst_rdesc
3430 = (struct ipa_cst_ref_desc
*) pool_alloc (ipa_refdesc_pool
);
3431 dst_rdesc
->cs
= dst
;
3432 dst_rdesc
->refcount
= src_rdesc
->refcount
;
3433 dst_rdesc
->next_duplicate
= NULL
;
3434 dst_jf
->value
.constant
.rdesc
= dst_rdesc
;
3436 else if (src_rdesc
->cs
== src
)
3438 struct ipa_cst_ref_desc
*dst_rdesc
;
3439 gcc_checking_assert (ipa_refdesc_pool
);
3441 = (struct ipa_cst_ref_desc
*) pool_alloc (ipa_refdesc_pool
);
3442 dst_rdesc
->cs
= dst
;
3443 dst_rdesc
->refcount
= src_rdesc
->refcount
;
3444 dst_rdesc
->next_duplicate
= src_rdesc
->next_duplicate
;
3445 src_rdesc
->next_duplicate
= dst_rdesc
;
3446 dst_jf
->value
.constant
.rdesc
= dst_rdesc
;
3450 struct ipa_cst_ref_desc
*dst_rdesc
;
3451 /* This can happen during inlining, when a JFUNC can refer to a
3452 reference taken in a function up in the tree of inline clones.
3453 We need to find the duplicate that refers to our tree of
3456 gcc_assert (dst
->caller
->global
.inlined_to
);
3457 for (dst_rdesc
= src_rdesc
->next_duplicate
;
3459 dst_rdesc
= dst_rdesc
->next_duplicate
)
3461 struct cgraph_node
*top
;
3462 top
= dst_rdesc
->cs
->caller
->global
.inlined_to
3463 ? dst_rdesc
->cs
->caller
->global
.inlined_to
3464 : dst_rdesc
->cs
->caller
;
3465 if (dst
->caller
->global
.inlined_to
== top
)
3468 gcc_assert (dst_rdesc
);
3469 dst_jf
->value
.constant
.rdesc
= dst_rdesc
;
3472 else if (dst_jf
->type
== IPA_JF_PASS_THROUGH
3473 && src
->caller
== dst
->caller
)
3475 struct cgraph_node
*inline_root
= dst
->caller
->global
.inlined_to
3476 ? dst
->caller
->global
.inlined_to
: dst
->caller
;
3477 struct ipa_node_params
*root_info
= IPA_NODE_REF (inline_root
);
3478 int idx
= ipa_get_jf_pass_through_formal_id (dst_jf
);
3480 int c
= ipa_get_controlled_uses (root_info
, idx
);
3481 if (c
!= IPA_UNDESCRIBED_USE
)
3484 ipa_set_controlled_uses (root_info
, idx
, c
);
3490 /* Hook that is called by cgraph.c when a node is duplicated. */
3493 ipa_node_duplication_hook (struct cgraph_node
*src
, struct cgraph_node
*dst
,
3494 ATTRIBUTE_UNUSED
void *data
)
3496 struct ipa_node_params
*old_info
, *new_info
;
3497 struct ipa_agg_replacement_value
*old_av
, *new_av
;
3499 ipa_check_create_node_params ();
3500 old_info
= IPA_NODE_REF (src
);
3501 new_info
= IPA_NODE_REF (dst
);
3503 new_info
->descriptors
= old_info
->descriptors
.copy ();
3504 new_info
->lattices
= NULL
;
3505 new_info
->ipcp_orig_node
= old_info
->ipcp_orig_node
;
3507 new_info
->analysis_done
= old_info
->analysis_done
;
3508 new_info
->node_enqueued
= old_info
->node_enqueued
;
3510 old_av
= ipa_get_agg_replacements_for_node (src
);
3517 struct ipa_agg_replacement_value
*v
;
3519 v
= ggc_alloc
<ipa_agg_replacement_value
> ();
3520 memcpy (v
, old_av
, sizeof (*v
));
3523 old_av
= old_av
->next
;
3525 ipa_set_node_agg_value_chain (dst
, new_av
);
3529 /* Analyze newly added function into callgraph. */
3532 ipa_add_new_function (struct cgraph_node
*node
, void *data ATTRIBUTE_UNUSED
)
3534 if (node
->has_gimple_body_p ())
3535 ipa_analyze_node (node
);
3538 /* Register our cgraph hooks if they are not already there. */
3541 ipa_register_cgraph_hooks (void)
3543 if (!edge_removal_hook_holder
)
3544 edge_removal_hook_holder
=
3545 symtab
->add_edge_removal_hook (&ipa_edge_removal_hook
, NULL
);
3546 if (!node_removal_hook_holder
)
3547 node_removal_hook_holder
=
3548 symtab
->add_cgraph_removal_hook (&ipa_node_removal_hook
, NULL
);
3549 if (!edge_duplication_hook_holder
)
3550 edge_duplication_hook_holder
=
3551 symtab
->add_edge_duplication_hook (&ipa_edge_duplication_hook
, NULL
);
3552 if (!node_duplication_hook_holder
)
3553 node_duplication_hook_holder
=
3554 symtab
->add_cgraph_duplication_hook (&ipa_node_duplication_hook
, NULL
);
3555 function_insertion_hook_holder
=
3556 symtab
->add_cgraph_insertion_hook (&ipa_add_new_function
, NULL
);
3559 /* Unregister our cgraph hooks if they are not already there. */
3562 ipa_unregister_cgraph_hooks (void)
3564 symtab
->remove_edge_removal_hook (edge_removal_hook_holder
);
3565 edge_removal_hook_holder
= NULL
;
3566 symtab
->remove_cgraph_removal_hook (node_removal_hook_holder
);
3567 node_removal_hook_holder
= NULL
;
3568 symtab
->remove_edge_duplication_hook (edge_duplication_hook_holder
);
3569 edge_duplication_hook_holder
= NULL
;
3570 symtab
->remove_cgraph_duplication_hook (node_duplication_hook_holder
);
3571 node_duplication_hook_holder
= NULL
;
3572 symtab
->remove_cgraph_insertion_hook (function_insertion_hook_holder
);
3573 function_insertion_hook_holder
= NULL
;
3576 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3577 longer needed after ipa-cp. */
3580 ipa_free_all_structures_after_ipa_cp (void)
3584 ipa_free_all_edge_args ();
3585 ipa_free_all_node_params ();
3586 free_alloc_pool (ipcp_sources_pool
);
3587 free_alloc_pool (ipcp_cst_values_pool
);
3588 free_alloc_pool (ipcp_poly_ctx_values_pool
);
3589 free_alloc_pool (ipcp_agg_lattice_pool
);
3590 ipa_unregister_cgraph_hooks ();
3591 if (ipa_refdesc_pool
)
3592 free_alloc_pool (ipa_refdesc_pool
);
3596 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3597 longer needed after indirect inlining. */
3600 ipa_free_all_structures_after_iinln (void)
3602 ipa_free_all_edge_args ();
3603 ipa_free_all_node_params ();
3604 ipa_unregister_cgraph_hooks ();
3605 if (ipcp_sources_pool
)
3606 free_alloc_pool (ipcp_sources_pool
);
3607 if (ipcp_cst_values_pool
)
3608 free_alloc_pool (ipcp_cst_values_pool
);
3609 if (ipcp_poly_ctx_values_pool
)
3610 free_alloc_pool (ipcp_poly_ctx_values_pool
);
3611 if (ipcp_agg_lattice_pool
)
3612 free_alloc_pool (ipcp_agg_lattice_pool
);
3613 if (ipa_refdesc_pool
)
3614 free_alloc_pool (ipa_refdesc_pool
);
3617 /* Print ipa_tree_map data structures of all functions in the
3621 ipa_print_node_params (FILE *f
, struct cgraph_node
*node
)
3624 struct ipa_node_params
*info
;
3626 if (!node
->definition
)
3628 info
= IPA_NODE_REF (node
);
3629 fprintf (f
, " function %s/%i parameter descriptors:\n",
3630 node
->name (), node
->order
);
3631 count
= ipa_get_param_count (info
);
3632 for (i
= 0; i
< count
; i
++)
3637 ipa_dump_param (f
, info
, i
);
3638 if (ipa_is_param_used (info
, i
))
3639 fprintf (f
, " used");
3640 c
= ipa_get_controlled_uses (info
, i
);
3641 if (c
== IPA_UNDESCRIBED_USE
)
3642 fprintf (f
, " undescribed_use");
3644 fprintf (f
, " controlled_uses=%i", c
);
3649 /* Print ipa_tree_map data structures of all functions in the
3653 ipa_print_all_params (FILE * f
)
3655 struct cgraph_node
*node
;
3657 fprintf (f
, "\nFunction parameters:\n");
3658 FOR_EACH_FUNCTION (node
)
3659 ipa_print_node_params (f
, node
);
3662 /* Return a heap allocated vector containing formal parameters of FNDECL. */
3665 ipa_get_vector_of_formal_parms (tree fndecl
)
3671 gcc_assert (!flag_wpa
);
3672 count
= count_formal_params (fndecl
);
3673 args
.create (count
);
3674 for (parm
= DECL_ARGUMENTS (fndecl
); parm
; parm
= DECL_CHAIN (parm
))
3675 args
.quick_push (parm
);
3680 /* Return a heap allocated vector containing types of formal parameters of
3681 function type FNTYPE. */
3684 ipa_get_vector_of_formal_parm_types (tree fntype
)
3690 for (t
= TYPE_ARG_TYPES (fntype
); t
; t
= TREE_CHAIN (t
))
3693 types
.create (count
);
3694 for (t
= TYPE_ARG_TYPES (fntype
); t
; t
= TREE_CHAIN (t
))
3695 types
.quick_push (TREE_VALUE (t
));
3700 /* Modify the function declaration FNDECL and its type according to the plan in
3701 ADJUSTMENTS. It also sets base fields of individual adjustments structures
3702 to reflect the actual parameters being modified which are determined by the
3703 base_index field. */
3706 ipa_modify_formal_parameters (tree fndecl
, ipa_parm_adjustment_vec adjustments
)
3708 vec
<tree
> oparms
= ipa_get_vector_of_formal_parms (fndecl
);
3709 tree orig_type
= TREE_TYPE (fndecl
);
3710 tree old_arg_types
= TYPE_ARG_TYPES (orig_type
);
3712 /* The following test is an ugly hack, some functions simply don't have any
3713 arguments in their type. This is probably a bug but well... */
3714 bool care_for_types
= (old_arg_types
!= NULL_TREE
);
3715 bool last_parm_void
;
3719 last_parm_void
= (TREE_VALUE (tree_last (old_arg_types
))
3721 otypes
= ipa_get_vector_of_formal_parm_types (orig_type
);
3723 gcc_assert (oparms
.length () + 1 == otypes
.length ());
3725 gcc_assert (oparms
.length () == otypes
.length ());
3729 last_parm_void
= false;
3733 int len
= adjustments
.length ();
3734 tree
*link
= &DECL_ARGUMENTS (fndecl
);
3735 tree new_arg_types
= NULL
;
3736 for (int i
= 0; i
< len
; i
++)
3738 struct ipa_parm_adjustment
*adj
;
3741 adj
= &adjustments
[i
];
3743 if (adj
->op
== IPA_PARM_OP_NEW
)
3746 parm
= oparms
[adj
->base_index
];
3749 if (adj
->op
== IPA_PARM_OP_COPY
)
3752 new_arg_types
= tree_cons (NULL_TREE
, otypes
[adj
->base_index
],
3755 link
= &DECL_CHAIN (parm
);
3757 else if (adj
->op
!= IPA_PARM_OP_REMOVE
)
3763 ptype
= build_pointer_type (adj
->type
);
3767 if (is_gimple_reg_type (ptype
))
3769 unsigned malign
= GET_MODE_ALIGNMENT (TYPE_MODE (ptype
));
3770 if (TYPE_ALIGN (ptype
) < malign
)
3771 ptype
= build_aligned_type (ptype
, malign
);
3776 new_arg_types
= tree_cons (NULL_TREE
, ptype
, new_arg_types
);
3778 new_parm
= build_decl (UNKNOWN_LOCATION
, PARM_DECL
, NULL_TREE
,
3780 const char *prefix
= adj
->arg_prefix
? adj
->arg_prefix
: "SYNTH";
3781 DECL_NAME (new_parm
) = create_tmp_var_name (prefix
);
3782 DECL_ARTIFICIAL (new_parm
) = 1;
3783 DECL_ARG_TYPE (new_parm
) = ptype
;
3784 DECL_CONTEXT (new_parm
) = fndecl
;
3785 TREE_USED (new_parm
) = 1;
3786 DECL_IGNORED_P (new_parm
) = 1;
3787 layout_decl (new_parm
, 0);
3789 if (adj
->op
== IPA_PARM_OP_NEW
)
3793 adj
->new_decl
= new_parm
;
3796 link
= &DECL_CHAIN (new_parm
);
3802 tree new_reversed
= NULL
;
3805 new_reversed
= nreverse (new_arg_types
);
3809 TREE_CHAIN (new_arg_types
) = void_list_node
;
3811 new_reversed
= void_list_node
;
3815 /* Use copy_node to preserve as much as possible from original type
3816 (debug info, attribute lists etc.)
3817 Exception is METHOD_TYPEs must have THIS argument.
3818 When we are asked to remove it, we need to build new FUNCTION_TYPE
3820 tree new_type
= NULL
;
3821 if (TREE_CODE (orig_type
) != METHOD_TYPE
3822 || (adjustments
[0].op
== IPA_PARM_OP_COPY
3823 && adjustments
[0].base_index
== 0))
3825 new_type
= build_distinct_type_copy (orig_type
);
3826 TYPE_ARG_TYPES (new_type
) = new_reversed
;
3831 = build_distinct_type_copy (build_function_type (TREE_TYPE (orig_type
),
3833 TYPE_CONTEXT (new_type
) = TYPE_CONTEXT (orig_type
);
3834 DECL_VINDEX (fndecl
) = NULL_TREE
;
3837 /* When signature changes, we need to clear builtin info. */
3838 if (DECL_BUILT_IN (fndecl
))
3840 DECL_BUILT_IN_CLASS (fndecl
) = NOT_BUILT_IN
;
3841 DECL_FUNCTION_CODE (fndecl
) = (enum built_in_function
) 0;
3844 TREE_TYPE (fndecl
) = new_type
;
3845 DECL_VIRTUAL_P (fndecl
) = 0;
3846 DECL_LANG_SPECIFIC (fndecl
) = NULL
;
3851 /* Modify actual arguments of a function call CS as indicated in ADJUSTMENTS.
3852 If this is a directly recursive call, CS must be NULL. Otherwise it must
3853 contain the corresponding call graph edge. */
3856 ipa_modify_call_arguments (struct cgraph_edge
*cs
, gimple stmt
,
3857 ipa_parm_adjustment_vec adjustments
)
3859 struct cgraph_node
*current_node
= cgraph_node::get (current_function_decl
);
3861 vec
<tree
, va_gc
> **debug_args
= NULL
;
3863 gimple_stmt_iterator gsi
, prev_gsi
;
3867 len
= adjustments
.length ();
3869 callee_decl
= !cs
? gimple_call_fndecl (stmt
) : cs
->callee
->decl
;
3870 current_node
->remove_stmt_references (stmt
);
3872 gsi
= gsi_for_stmt (stmt
);
3874 gsi_prev (&prev_gsi
);
3875 for (i
= 0; i
< len
; i
++)
3877 struct ipa_parm_adjustment
*adj
;
3879 adj
= &adjustments
[i
];
3881 if (adj
->op
== IPA_PARM_OP_COPY
)
3883 tree arg
= gimple_call_arg (stmt
, adj
->base_index
);
3885 vargs
.quick_push (arg
);
3887 else if (adj
->op
!= IPA_PARM_OP_REMOVE
)
3889 tree expr
, base
, off
;
3891 unsigned int deref_align
= 0;
3892 bool deref_base
= false;
3894 /* We create a new parameter out of the value of the old one, we can
3895 do the following kind of transformations:
3897 - A scalar passed by reference is converted to a scalar passed by
3898 value. (adj->by_ref is false and the type of the original
3899 actual argument is a pointer to a scalar).
3901 - A part of an aggregate is passed instead of the whole aggregate.
3902 The part can be passed either by value or by reference, this is
3903 determined by value of adj->by_ref. Moreover, the code below
3904 handles both situations when the original aggregate is passed by
3905 value (its type is not a pointer) and when it is passed by
3906 reference (it is a pointer to an aggregate).
3908 When the new argument is passed by reference (adj->by_ref is true)
3909 it must be a part of an aggregate and therefore we form it by
3910 simply taking the address of a reference inside the original
3913 gcc_checking_assert (adj
->offset
% BITS_PER_UNIT
== 0);
3914 base
= gimple_call_arg (stmt
, adj
->base_index
);
3915 loc
= DECL_P (base
) ? DECL_SOURCE_LOCATION (base
)
3916 : EXPR_LOCATION (base
);
3918 if (TREE_CODE (base
) != ADDR_EXPR
3919 && POINTER_TYPE_P (TREE_TYPE (base
)))
3920 off
= build_int_cst (adj
->alias_ptr_type
,
3921 adj
->offset
/ BITS_PER_UNIT
);
3924 HOST_WIDE_INT base_offset
;
3928 if (TREE_CODE (base
) == ADDR_EXPR
)
3930 base
= TREE_OPERAND (base
, 0);
3936 base
= get_addr_base_and_unit_offset (base
, &base_offset
);
3937 /* Aggregate arguments can have non-invariant addresses. */
3940 base
= build_fold_addr_expr (prev_base
);
3941 off
= build_int_cst (adj
->alias_ptr_type
,
3942 adj
->offset
/ BITS_PER_UNIT
);
3944 else if (TREE_CODE (base
) == MEM_REF
)
3949 deref_align
= TYPE_ALIGN (TREE_TYPE (base
));
3951 off
= build_int_cst (adj
->alias_ptr_type
,
3953 + adj
->offset
/ BITS_PER_UNIT
);
3954 off
= int_const_binop (PLUS_EXPR
, TREE_OPERAND (base
, 1),
3956 base
= TREE_OPERAND (base
, 0);
3960 off
= build_int_cst (adj
->alias_ptr_type
,
3962 + adj
->offset
/ BITS_PER_UNIT
);
3963 base
= build_fold_addr_expr (base
);
3969 tree type
= adj
->type
;
3971 unsigned HOST_WIDE_INT misalign
;
3975 align
= deref_align
;
3980 get_pointer_alignment_1 (base
, &align
, &misalign
);
3981 if (TYPE_ALIGN (type
) > align
)
3982 align
= TYPE_ALIGN (type
);
3984 misalign
+= (offset_int::from (off
, SIGNED
).to_short_addr ()
3986 misalign
= misalign
& (align
- 1);
3988 align
= (misalign
& -misalign
);
3989 if (align
< TYPE_ALIGN (type
))
3990 type
= build_aligned_type (type
, align
);
3991 base
= force_gimple_operand_gsi (&gsi
, base
,
3992 true, NULL
, true, GSI_SAME_STMT
);
3993 expr
= fold_build2_loc (loc
, MEM_REF
, type
, base
, off
);
3994 /* If expr is not a valid gimple call argument emit
3995 a load into a temporary. */
3996 if (is_gimple_reg_type (TREE_TYPE (expr
)))
3998 gimple tem
= gimple_build_assign (NULL_TREE
, expr
);
3999 if (gimple_in_ssa_p (cfun
))
4001 gimple_set_vuse (tem
, gimple_vuse (stmt
));
4002 expr
= make_ssa_name (TREE_TYPE (expr
), tem
);
4005 expr
= create_tmp_reg (TREE_TYPE (expr
), NULL
);
4006 gimple_assign_set_lhs (tem
, expr
);
4007 gsi_insert_before (&gsi
, tem
, GSI_SAME_STMT
);
4012 expr
= fold_build2_loc (loc
, MEM_REF
, adj
->type
, base
, off
);
4013 expr
= build_fold_addr_expr (expr
);
4014 expr
= force_gimple_operand_gsi (&gsi
, expr
,
4015 true, NULL
, true, GSI_SAME_STMT
);
4017 vargs
.quick_push (expr
);
4019 if (adj
->op
!= IPA_PARM_OP_COPY
&& MAY_HAVE_DEBUG_STMTS
)
4022 tree ddecl
= NULL_TREE
, origin
= DECL_ORIGIN (adj
->base
), arg
;
4025 arg
= gimple_call_arg (stmt
, adj
->base_index
);
4026 if (!useless_type_conversion_p (TREE_TYPE (origin
), TREE_TYPE (arg
)))
4028 if (!fold_convertible_p (TREE_TYPE (origin
), arg
))
4030 arg
= fold_convert_loc (gimple_location (stmt
),
4031 TREE_TYPE (origin
), arg
);
4033 if (debug_args
== NULL
)
4034 debug_args
= decl_debug_args_insert (callee_decl
);
4035 for (ix
= 0; vec_safe_iterate (*debug_args
, ix
, &ddecl
); ix
+= 2)
4036 if (ddecl
== origin
)
4038 ddecl
= (**debug_args
)[ix
+ 1];
4043 ddecl
= make_node (DEBUG_EXPR_DECL
);
4044 DECL_ARTIFICIAL (ddecl
) = 1;
4045 TREE_TYPE (ddecl
) = TREE_TYPE (origin
);
4046 DECL_MODE (ddecl
) = DECL_MODE (origin
);
4048 vec_safe_push (*debug_args
, origin
);
4049 vec_safe_push (*debug_args
, ddecl
);
4051 def_temp
= gimple_build_debug_bind (ddecl
, unshare_expr (arg
), stmt
);
4052 gsi_insert_before (&gsi
, def_temp
, GSI_SAME_STMT
);
4056 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4058 fprintf (dump_file
, "replacing stmt:");
4059 print_gimple_stmt (dump_file
, gsi_stmt (gsi
), 0, 0);
4062 new_stmt
= gimple_build_call_vec (callee_decl
, vargs
);
4064 if (gimple_call_lhs (stmt
))
4065 gimple_call_set_lhs (new_stmt
, gimple_call_lhs (stmt
));
4067 gimple_set_block (new_stmt
, gimple_block (stmt
));
4068 if (gimple_has_location (stmt
))
4069 gimple_set_location (new_stmt
, gimple_location (stmt
));
4070 gimple_call_set_chain (new_stmt
, gimple_call_chain (stmt
));
4071 gimple_call_copy_flags (new_stmt
, stmt
);
4072 if (gimple_in_ssa_p (cfun
))
4074 gimple_set_vuse (new_stmt
, gimple_vuse (stmt
));
4075 if (gimple_vdef (stmt
))
4077 gimple_set_vdef (new_stmt
, gimple_vdef (stmt
));
4078 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt
)) = new_stmt
;
4082 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4084 fprintf (dump_file
, "with stmt:");
4085 print_gimple_stmt (dump_file
, new_stmt
, 0, 0);
4086 fprintf (dump_file
, "\n");
4088 gsi_replace (&gsi
, new_stmt
, true);
4090 cs
->set_call_stmt (new_stmt
);
4093 current_node
->record_stmt_references (gsi_stmt (gsi
));
4096 while (gsi_stmt (gsi
) != gsi_stmt (prev_gsi
));
4099 /* If the expression *EXPR should be replaced by a reduction of a parameter, do
4100 so. ADJUSTMENTS is a pointer to a vector of adjustments. CONVERT
4101 specifies whether the function should care about type incompatibility the
4102 current and new expressions. If it is false, the function will leave
4103 incompatibility issues to the caller. Return true iff the expression
4107 ipa_modify_expr (tree
*expr
, bool convert
,
4108 ipa_parm_adjustment_vec adjustments
)
4110 struct ipa_parm_adjustment
*cand
4111 = ipa_get_adjustment_candidate (&expr
, &convert
, adjustments
, false);
4117 src
= build_simple_mem_ref (cand
->new_decl
);
4119 src
= cand
->new_decl
;
4121 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4123 fprintf (dump_file
, "About to replace expr ");
4124 print_generic_expr (dump_file
, *expr
, 0);
4125 fprintf (dump_file
, " with ");
4126 print_generic_expr (dump_file
, src
, 0);
4127 fprintf (dump_file
, "\n");
4130 if (convert
&& !useless_type_conversion_p (TREE_TYPE (*expr
), cand
->type
))
4132 tree vce
= build1 (VIEW_CONVERT_EXPR
, TREE_TYPE (*expr
), src
);
4140 /* If T is an SSA_NAME, return NULL if it is not a default def or
4141 return its base variable if it is. If IGNORE_DEFAULT_DEF is true,
4142 the base variable is always returned, regardless if it is a default
4143 def. Return T if it is not an SSA_NAME. */
4146 get_ssa_base_param (tree t
, bool ignore_default_def
)
4148 if (TREE_CODE (t
) == SSA_NAME
)
4150 if (ignore_default_def
|| SSA_NAME_IS_DEFAULT_DEF (t
))
4151 return SSA_NAME_VAR (t
);
4158 /* Given an expression, return an adjustment entry specifying the
4159 transformation to be done on EXPR. If no suitable adjustment entry
4160 was found, returns NULL.
4162 If IGNORE_DEFAULT_DEF is set, consider SSA_NAMEs which are not a
4163 default def, otherwise bail on them.
4165 If CONVERT is non-NULL, this function will set *CONVERT if the
4166 expression provided is a component reference. ADJUSTMENTS is the
4167 adjustments vector. */
4169 ipa_parm_adjustment
*
4170 ipa_get_adjustment_candidate (tree
**expr
, bool *convert
,
4171 ipa_parm_adjustment_vec adjustments
,
4172 bool ignore_default_def
)
4174 if (TREE_CODE (**expr
) == BIT_FIELD_REF
4175 || TREE_CODE (**expr
) == IMAGPART_EXPR
4176 || TREE_CODE (**expr
) == REALPART_EXPR
)
4178 *expr
= &TREE_OPERAND (**expr
, 0);
4183 HOST_WIDE_INT offset
, size
, max_size
;
4184 tree base
= get_ref_base_and_extent (**expr
, &offset
, &size
, &max_size
);
4185 if (!base
|| size
== -1 || max_size
== -1)
4188 if (TREE_CODE (base
) == MEM_REF
)
4190 offset
+= mem_ref_offset (base
).to_short_addr () * BITS_PER_UNIT
;
4191 base
= TREE_OPERAND (base
, 0);
4194 base
= get_ssa_base_param (base
, ignore_default_def
);
4195 if (!base
|| TREE_CODE (base
) != PARM_DECL
)
4198 struct ipa_parm_adjustment
*cand
= NULL
;
4199 unsigned int len
= adjustments
.length ();
4200 for (unsigned i
= 0; i
< len
; i
++)
4202 struct ipa_parm_adjustment
*adj
= &adjustments
[i
];
4204 if (adj
->base
== base
4205 && (adj
->offset
== offset
|| adj
->op
== IPA_PARM_OP_REMOVE
))
4212 if (!cand
|| cand
->op
== IPA_PARM_OP_COPY
|| cand
->op
== IPA_PARM_OP_REMOVE
)
4217 /* Return true iff BASE_INDEX is in ADJUSTMENTS more than once. */
4220 index_in_adjustments_multiple_times_p (int base_index
,
4221 ipa_parm_adjustment_vec adjustments
)
4223 int i
, len
= adjustments
.length ();
4226 for (i
= 0; i
< len
; i
++)
4228 struct ipa_parm_adjustment
*adj
;
4229 adj
= &adjustments
[i
];
4231 if (adj
->base_index
== base_index
)
4243 /* Return adjustments that should have the same effect on function parameters
4244 and call arguments as if they were first changed according to adjustments in
4245 INNER and then by adjustments in OUTER. */
4247 ipa_parm_adjustment_vec
4248 ipa_combine_adjustments (ipa_parm_adjustment_vec inner
,
4249 ipa_parm_adjustment_vec outer
)
4251 int i
, outlen
= outer
.length ();
4252 int inlen
= inner
.length ();
4254 ipa_parm_adjustment_vec adjustments
, tmp
;
4257 for (i
= 0; i
< inlen
; i
++)
4259 struct ipa_parm_adjustment
*n
;
4262 if (n
->op
== IPA_PARM_OP_REMOVE
)
4266 /* FIXME: Handling of new arguments are not implemented yet. */
4267 gcc_assert (n
->op
!= IPA_PARM_OP_NEW
);
4268 tmp
.quick_push (*n
);
4272 adjustments
.create (outlen
+ removals
);
4273 for (i
= 0; i
< outlen
; i
++)
4275 struct ipa_parm_adjustment r
;
4276 struct ipa_parm_adjustment
*out
= &outer
[i
];
4277 struct ipa_parm_adjustment
*in
= &tmp
[out
->base_index
];
4279 memset (&r
, 0, sizeof (r
));
4280 gcc_assert (in
->op
!= IPA_PARM_OP_REMOVE
);
4281 if (out
->op
== IPA_PARM_OP_REMOVE
)
4283 if (!index_in_adjustments_multiple_times_p (in
->base_index
, tmp
))
4285 r
.op
= IPA_PARM_OP_REMOVE
;
4286 adjustments
.quick_push (r
);
4292 /* FIXME: Handling of new arguments are not implemented yet. */
4293 gcc_assert (out
->op
!= IPA_PARM_OP_NEW
);
4296 r
.base_index
= in
->base_index
;
4299 /* FIXME: Create nonlocal value too. */
4301 if (in
->op
== IPA_PARM_OP_COPY
&& out
->op
== IPA_PARM_OP_COPY
)
4302 r
.op
= IPA_PARM_OP_COPY
;
4303 else if (in
->op
== IPA_PARM_OP_COPY
)
4304 r
.offset
= out
->offset
;
4305 else if (out
->op
== IPA_PARM_OP_COPY
)
4306 r
.offset
= in
->offset
;
4308 r
.offset
= in
->offset
+ out
->offset
;
4309 adjustments
.quick_push (r
);
4312 for (i
= 0; i
< inlen
; i
++)
4314 struct ipa_parm_adjustment
*n
= &inner
[i
];
4316 if (n
->op
== IPA_PARM_OP_REMOVE
)
4317 adjustments
.quick_push (*n
);
4324 /* Dump the adjustments in the vector ADJUSTMENTS to dump_file in a human
4325 friendly way, assuming they are meant to be applied to FNDECL. */
4328 ipa_dump_param_adjustments (FILE *file
, ipa_parm_adjustment_vec adjustments
,
4331 int i
, len
= adjustments
.length ();
4333 vec
<tree
> parms
= ipa_get_vector_of_formal_parms (fndecl
);
4335 fprintf (file
, "IPA param adjustments: ");
4336 for (i
= 0; i
< len
; i
++)
4338 struct ipa_parm_adjustment
*adj
;
4339 adj
= &adjustments
[i
];
4342 fprintf (file
, " ");
4346 fprintf (file
, "%i. base_index: %i - ", i
, adj
->base_index
);
4347 print_generic_expr (file
, parms
[adj
->base_index
], 0);
4350 fprintf (file
, ", base: ");
4351 print_generic_expr (file
, adj
->base
, 0);
4355 fprintf (file
, ", new_decl: ");
4356 print_generic_expr (file
, adj
->new_decl
, 0);
4358 if (adj
->new_ssa_base
)
4360 fprintf (file
, ", new_ssa_base: ");
4361 print_generic_expr (file
, adj
->new_ssa_base
, 0);
4364 if (adj
->op
== IPA_PARM_OP_COPY
)
4365 fprintf (file
, ", copy_param");
4366 else if (adj
->op
== IPA_PARM_OP_REMOVE
)
4367 fprintf (file
, ", remove_param");
4369 fprintf (file
, ", offset %li", (long) adj
->offset
);
4371 fprintf (file
, ", by_ref");
4372 print_node_brief (file
, ", type: ", adj
->type
, 0);
4373 fprintf (file
, "\n");
4378 /* Dump the AV linked list. */
4381 ipa_dump_agg_replacement_values (FILE *f
, struct ipa_agg_replacement_value
*av
)
4384 fprintf (f
, " Aggregate replacements:");
4385 for (; av
; av
= av
->next
)
4387 fprintf (f
, "%s %i[" HOST_WIDE_INT_PRINT_DEC
"]=", comma
? "," : "",
4388 av
->index
, av
->offset
);
4389 print_generic_expr (f
, av
->value
, 0);
4395 /* Stream out jump function JUMP_FUNC to OB. */
4398 ipa_write_jump_function (struct output_block
*ob
,
4399 struct ipa_jump_func
*jump_func
)
4401 struct ipa_agg_jf_item
*item
;
4402 struct bitpack_d bp
;
4405 streamer_write_uhwi (ob
, jump_func
->type
);
4406 switch (jump_func
->type
)
4408 case IPA_JF_UNKNOWN
:
4412 EXPR_LOCATION (jump_func
->value
.constant
.value
) == UNKNOWN_LOCATION
);
4413 stream_write_tree (ob
, jump_func
->value
.constant
.value
, true);
4415 case IPA_JF_PASS_THROUGH
:
4416 streamer_write_uhwi (ob
, jump_func
->value
.pass_through
.operation
);
4417 if (jump_func
->value
.pass_through
.operation
== NOP_EXPR
)
4419 streamer_write_uhwi (ob
, jump_func
->value
.pass_through
.formal_id
);
4420 bp
= bitpack_create (ob
->main_stream
);
4421 bp_pack_value (&bp
, jump_func
->value
.pass_through
.agg_preserved
, 1);
4422 streamer_write_bitpack (&bp
);
4426 stream_write_tree (ob
, jump_func
->value
.pass_through
.operand
, true);
4427 streamer_write_uhwi (ob
, jump_func
->value
.pass_through
.formal_id
);
4430 case IPA_JF_ANCESTOR
:
4431 streamer_write_uhwi (ob
, jump_func
->value
.ancestor
.offset
);
4432 streamer_write_uhwi (ob
, jump_func
->value
.ancestor
.formal_id
);
4433 bp
= bitpack_create (ob
->main_stream
);
4434 bp_pack_value (&bp
, jump_func
->value
.ancestor
.agg_preserved
, 1);
4435 streamer_write_bitpack (&bp
);
4439 count
= vec_safe_length (jump_func
->agg
.items
);
4440 streamer_write_uhwi (ob
, count
);
4443 bp
= bitpack_create (ob
->main_stream
);
4444 bp_pack_value (&bp
, jump_func
->agg
.by_ref
, 1);
4445 streamer_write_bitpack (&bp
);
4448 FOR_EACH_VEC_SAFE_ELT (jump_func
->agg
.items
, i
, item
)
4450 streamer_write_uhwi (ob
, item
->offset
);
4451 stream_write_tree (ob
, item
->value
, true);
4455 /* Read in jump function JUMP_FUNC from IB. */
4458 ipa_read_jump_function (struct lto_input_block
*ib
,
4459 struct ipa_jump_func
*jump_func
,
4460 struct cgraph_edge
*cs
,
4461 struct data_in
*data_in
)
4463 enum jump_func_type jftype
;
4464 enum tree_code operation
;
4467 jftype
= (enum jump_func_type
) streamer_read_uhwi (ib
);
4470 case IPA_JF_UNKNOWN
:
4471 jump_func
->type
= IPA_JF_UNKNOWN
;
4474 ipa_set_jf_constant (jump_func
, stream_read_tree (ib
, data_in
), cs
);
4476 case IPA_JF_PASS_THROUGH
:
4477 operation
= (enum tree_code
) streamer_read_uhwi (ib
);
4478 if (operation
== NOP_EXPR
)
4480 int formal_id
= streamer_read_uhwi (ib
);
4481 struct bitpack_d bp
= streamer_read_bitpack (ib
);
4482 bool agg_preserved
= bp_unpack_value (&bp
, 1);
4483 ipa_set_jf_simple_pass_through (jump_func
, formal_id
, agg_preserved
);
4487 tree operand
= stream_read_tree (ib
, data_in
);
4488 int formal_id
= streamer_read_uhwi (ib
);
4489 ipa_set_jf_arith_pass_through (jump_func
, formal_id
, operand
,
4493 case IPA_JF_ANCESTOR
:
4495 HOST_WIDE_INT offset
= streamer_read_uhwi (ib
);
4496 int formal_id
= streamer_read_uhwi (ib
);
4497 struct bitpack_d bp
= streamer_read_bitpack (ib
);
4498 bool agg_preserved
= bp_unpack_value (&bp
, 1);
4499 ipa_set_ancestor_jf (jump_func
, offset
, formal_id
, agg_preserved
);
4504 count
= streamer_read_uhwi (ib
);
4505 vec_alloc (jump_func
->agg
.items
, count
);
4508 struct bitpack_d bp
= streamer_read_bitpack (ib
);
4509 jump_func
->agg
.by_ref
= bp_unpack_value (&bp
, 1);
4511 for (i
= 0; i
< count
; i
++)
4513 struct ipa_agg_jf_item item
;
4514 item
.offset
= streamer_read_uhwi (ib
);
4515 item
.value
= stream_read_tree (ib
, data_in
);
4516 jump_func
->agg
.items
->quick_push (item
);
4520 /* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
4521 relevant to indirect inlining to OB. */
4524 ipa_write_indirect_edge_info (struct output_block
*ob
,
4525 struct cgraph_edge
*cs
)
4527 struct cgraph_indirect_call_info
*ii
= cs
->indirect_info
;
4528 struct bitpack_d bp
;
4530 streamer_write_hwi (ob
, ii
->param_index
);
4531 bp
= bitpack_create (ob
->main_stream
);
4532 bp_pack_value (&bp
, ii
->polymorphic
, 1);
4533 bp_pack_value (&bp
, ii
->agg_contents
, 1);
4534 bp_pack_value (&bp
, ii
->member_ptr
, 1);
4535 bp_pack_value (&bp
, ii
->by_ref
, 1);
4536 bp_pack_value (&bp
, ii
->vptr_changed
, 1);
4537 streamer_write_bitpack (&bp
);
4538 if (ii
->agg_contents
|| ii
->polymorphic
)
4539 streamer_write_hwi (ob
, ii
->offset
);
4541 gcc_assert (ii
->offset
== 0);
4543 if (ii
->polymorphic
)
4545 streamer_write_hwi (ob
, ii
->otr_token
);
4546 stream_write_tree (ob
, ii
->otr_type
, true);
4547 ii
->context
.stream_out (ob
);
4551 /* Read in parts of cgraph_indirect_call_info corresponding to CS that are
4552 relevant to indirect inlining from IB. */
4555 ipa_read_indirect_edge_info (struct lto_input_block
*ib
,
4556 struct data_in
*data_in
,
4557 struct cgraph_edge
*cs
)
4559 struct cgraph_indirect_call_info
*ii
= cs
->indirect_info
;
4560 struct bitpack_d bp
;
4562 ii
->param_index
= (int) streamer_read_hwi (ib
);
4563 bp
= streamer_read_bitpack (ib
);
4564 ii
->polymorphic
= bp_unpack_value (&bp
, 1);
4565 ii
->agg_contents
= bp_unpack_value (&bp
, 1);
4566 ii
->member_ptr
= bp_unpack_value (&bp
, 1);
4567 ii
->by_ref
= bp_unpack_value (&bp
, 1);
4568 ii
->vptr_changed
= bp_unpack_value (&bp
, 1);
4569 if (ii
->agg_contents
|| ii
->polymorphic
)
4570 ii
->offset
= (HOST_WIDE_INT
) streamer_read_hwi (ib
);
4573 if (ii
->polymorphic
)
4575 ii
->otr_token
= (HOST_WIDE_INT
) streamer_read_hwi (ib
);
4576 ii
->otr_type
= stream_read_tree (ib
, data_in
);
4577 ii
->context
.stream_in (ib
, data_in
);
4581 /* Stream out NODE info to OB. */
4584 ipa_write_node_info (struct output_block
*ob
, struct cgraph_node
*node
)
4587 lto_symtab_encoder_t encoder
;
4588 struct ipa_node_params
*info
= IPA_NODE_REF (node
);
4590 struct cgraph_edge
*e
;
4591 struct bitpack_d bp
;
4593 encoder
= ob
->decl_state
->symtab_node_encoder
;
4594 node_ref
= lto_symtab_encoder_encode (encoder
, node
);
4595 streamer_write_uhwi (ob
, node_ref
);
4597 streamer_write_uhwi (ob
, ipa_get_param_count (info
));
4598 for (j
= 0; j
< ipa_get_param_count (info
); j
++)
4599 streamer_write_uhwi (ob
, ipa_get_param_move_cost (info
, j
));
4600 bp
= bitpack_create (ob
->main_stream
);
4601 gcc_assert (info
->analysis_done
4602 || ipa_get_param_count (info
) == 0);
4603 gcc_assert (!info
->node_enqueued
);
4604 gcc_assert (!info
->ipcp_orig_node
);
4605 for (j
= 0; j
< ipa_get_param_count (info
); j
++)
4606 bp_pack_value (&bp
, ipa_is_param_used (info
, j
), 1);
4607 streamer_write_bitpack (&bp
);
4608 for (j
= 0; j
< ipa_get_param_count (info
); j
++)
4609 streamer_write_hwi (ob
, ipa_get_controlled_uses (info
, j
));
4610 for (e
= node
->callees
; e
; e
= e
->next_callee
)
4612 struct ipa_edge_args
*args
= IPA_EDGE_REF (e
);
4614 streamer_write_uhwi (ob
,
4615 ipa_get_cs_argument_count (args
) * 2
4616 + (args
->polymorphic_call_contexts
!= NULL
));
4617 for (j
= 0; j
< ipa_get_cs_argument_count (args
); j
++)
4619 ipa_write_jump_function (ob
, ipa_get_ith_jump_func (args
, j
));
4620 if (args
->polymorphic_call_contexts
!= NULL
)
4621 ipa_get_ith_polymorhic_call_context (args
, j
)->stream_out (ob
);
4624 for (e
= node
->indirect_calls
; e
; e
= e
->next_callee
)
4626 struct ipa_edge_args
*args
= IPA_EDGE_REF (e
);
4628 streamer_write_uhwi (ob
,
4629 ipa_get_cs_argument_count (args
) * 2
4630 + (args
->polymorphic_call_contexts
!= NULL
));
4631 for (j
= 0; j
< ipa_get_cs_argument_count (args
); j
++)
4633 ipa_write_jump_function (ob
, ipa_get_ith_jump_func (args
, j
));
4634 if (args
->polymorphic_call_contexts
!= NULL
)
4635 ipa_get_ith_polymorhic_call_context (args
, j
)->stream_out (ob
);
4637 ipa_write_indirect_edge_info (ob
, e
);
4641 /* Stream in NODE info from IB. */
4644 ipa_read_node_info (struct lto_input_block
*ib
, struct cgraph_node
*node
,
4645 struct data_in
*data_in
)
4647 struct ipa_node_params
*info
= IPA_NODE_REF (node
);
4649 struct cgraph_edge
*e
;
4650 struct bitpack_d bp
;
4652 ipa_alloc_node_params (node
, streamer_read_uhwi (ib
));
4654 for (k
= 0; k
< ipa_get_param_count (info
); k
++)
4655 info
->descriptors
[k
].move_cost
= streamer_read_uhwi (ib
);
4657 bp
= streamer_read_bitpack (ib
);
4658 if (ipa_get_param_count (info
) != 0)
4659 info
->analysis_done
= true;
4660 info
->node_enqueued
= false;
4661 for (k
= 0; k
< ipa_get_param_count (info
); k
++)
4662 ipa_set_param_used (info
, k
, bp_unpack_value (&bp
, 1));
4663 for (k
= 0; k
< ipa_get_param_count (info
); k
++)
4664 ipa_set_controlled_uses (info
, k
, streamer_read_hwi (ib
));
4665 for (e
= node
->callees
; e
; e
= e
->next_callee
)
4667 struct ipa_edge_args
*args
= IPA_EDGE_REF (e
);
4668 int count
= streamer_read_uhwi (ib
);
4669 bool contexts_computed
= count
& 1;
4674 vec_safe_grow_cleared (args
->jump_functions
, count
);
4675 if (contexts_computed
)
4676 vec_safe_grow_cleared (args
->polymorphic_call_contexts
, count
);
4678 for (k
= 0; k
< ipa_get_cs_argument_count (args
); k
++)
4680 ipa_read_jump_function (ib
, ipa_get_ith_jump_func (args
, k
), e
,
4682 if (contexts_computed
)
4683 ipa_get_ith_polymorhic_call_context (args
, k
)->stream_in (ib
, data_in
);
4686 for (e
= node
->indirect_calls
; e
; e
= e
->next_callee
)
4688 struct ipa_edge_args
*args
= IPA_EDGE_REF (e
);
4689 int count
= streamer_read_uhwi (ib
);
4690 bool contexts_computed
= count
& 1;
4695 vec_safe_grow_cleared (args
->jump_functions
, count
);
4696 if (contexts_computed
)
4697 vec_safe_grow_cleared (args
->polymorphic_call_contexts
, count
);
4698 for (k
= 0; k
< ipa_get_cs_argument_count (args
); k
++)
4700 ipa_read_jump_function (ib
, ipa_get_ith_jump_func (args
, k
), e
,
4702 if (contexts_computed
)
4703 ipa_get_ith_polymorhic_call_context (args
, k
)->stream_in (ib
, data_in
);
4706 ipa_read_indirect_edge_info (ib
, data_in
, e
);
4710 /* Write jump functions for nodes in SET. */
4713 ipa_prop_write_jump_functions (void)
4715 struct cgraph_node
*node
;
4716 struct output_block
*ob
;
4717 unsigned int count
= 0;
4718 lto_symtab_encoder_iterator lsei
;
4719 lto_symtab_encoder_t encoder
;
4722 if (!ipa_node_params_vector
.exists ())
4725 ob
= create_output_block (LTO_section_jump_functions
);
4726 encoder
= ob
->decl_state
->symtab_node_encoder
;
4728 for (lsei
= lsei_start_function_in_partition (encoder
); !lsei_end_p (lsei
);
4729 lsei_next_function_in_partition (&lsei
))
4731 node
= lsei_cgraph_node (lsei
);
4732 if (node
->has_gimple_body_p ()
4733 && IPA_NODE_REF (node
) != NULL
)
4737 streamer_write_uhwi (ob
, count
);
4739 /* Process all of the functions. */
4740 for (lsei
= lsei_start_function_in_partition (encoder
); !lsei_end_p (lsei
);
4741 lsei_next_function_in_partition (&lsei
))
4743 node
= lsei_cgraph_node (lsei
);
4744 if (node
->has_gimple_body_p ()
4745 && IPA_NODE_REF (node
) != NULL
)
4746 ipa_write_node_info (ob
, node
);
4748 streamer_write_char_stream (ob
->main_stream
, 0);
4749 produce_asm (ob
, NULL
);
4750 destroy_output_block (ob
);
4753 /* Read section in file FILE_DATA of length LEN with data DATA. */
4756 ipa_prop_read_section (struct lto_file_decl_data
*file_data
, const char *data
,
4759 const struct lto_function_header
*header
=
4760 (const struct lto_function_header
*) data
;
4761 const int cfg_offset
= sizeof (struct lto_function_header
);
4762 const int main_offset
= cfg_offset
+ header
->cfg_size
;
4763 const int string_offset
= main_offset
+ header
->main_size
;
4764 struct data_in
*data_in
;
4768 lto_input_block
ib_main ((const char *) data
+ main_offset
,
4772 lto_data_in_create (file_data
, (const char *) data
+ string_offset
,
4773 header
->string_size
, vNULL
);
4774 count
= streamer_read_uhwi (&ib_main
);
4776 for (i
= 0; i
< count
; i
++)
4779 struct cgraph_node
*node
;
4780 lto_symtab_encoder_t encoder
;
4782 index
= streamer_read_uhwi (&ib_main
);
4783 encoder
= file_data
->symtab_node_encoder
;
4784 node
= dyn_cast
<cgraph_node
*> (lto_symtab_encoder_deref (encoder
,
4786 gcc_assert (node
->definition
);
4787 ipa_read_node_info (&ib_main
, node
, data_in
);
4789 lto_free_section_data (file_data
, LTO_section_jump_functions
, NULL
, data
,
4791 lto_data_in_delete (data_in
);
4794 /* Read ipcp jump functions. */
4797 ipa_prop_read_jump_functions (void)
4799 struct lto_file_decl_data
**file_data_vec
= lto_get_file_decl_data ();
4800 struct lto_file_decl_data
*file_data
;
4803 ipa_check_create_node_params ();
4804 ipa_check_create_edge_args ();
4805 ipa_register_cgraph_hooks ();
4807 while ((file_data
= file_data_vec
[j
++]))
4810 const char *data
= lto_get_section_data (file_data
, LTO_section_jump_functions
, NULL
, &len
);
4813 ipa_prop_read_section (file_data
, data
, len
);
4817 /* After merging units, we can get mismatch in argument counts.
4818 Also decl merging might've rendered parameter lists obsolete.
4819 Also compute called_with_variable_arg info. */
4822 ipa_update_after_lto_read (void)
4824 ipa_check_create_node_params ();
4825 ipa_check_create_edge_args ();
4829 write_agg_replacement_chain (struct output_block
*ob
, struct cgraph_node
*node
)
4832 unsigned int count
= 0;
4833 lto_symtab_encoder_t encoder
;
4834 struct ipa_agg_replacement_value
*aggvals
, *av
;
4836 aggvals
= ipa_get_agg_replacements_for_node (node
);
4837 encoder
= ob
->decl_state
->symtab_node_encoder
;
4838 node_ref
= lto_symtab_encoder_encode (encoder
, node
);
4839 streamer_write_uhwi (ob
, node_ref
);
4841 for (av
= aggvals
; av
; av
= av
->next
)
4843 streamer_write_uhwi (ob
, count
);
4845 for (av
= aggvals
; av
; av
= av
->next
)
4847 struct bitpack_d bp
;
4849 streamer_write_uhwi (ob
, av
->offset
);
4850 streamer_write_uhwi (ob
, av
->index
);
4851 stream_write_tree (ob
, av
->value
, true);
4853 bp
= bitpack_create (ob
->main_stream
);
4854 bp_pack_value (&bp
, av
->by_ref
, 1);
4855 streamer_write_bitpack (&bp
);
4859 /* Stream in the aggregate value replacement chain for NODE from IB. */
4862 read_agg_replacement_chain (struct lto_input_block
*ib
,
4863 struct cgraph_node
*node
,
4864 struct data_in
*data_in
)
4866 struct ipa_agg_replacement_value
*aggvals
= NULL
;
4867 unsigned int count
, i
;
4869 count
= streamer_read_uhwi (ib
);
4870 for (i
= 0; i
<count
; i
++)
4872 struct ipa_agg_replacement_value
*av
;
4873 struct bitpack_d bp
;
4875 av
= ggc_alloc
<ipa_agg_replacement_value
> ();
4876 av
->offset
= streamer_read_uhwi (ib
);
4877 av
->index
= streamer_read_uhwi (ib
);
4878 av
->value
= stream_read_tree (ib
, data_in
);
4879 bp
= streamer_read_bitpack (ib
);
4880 av
->by_ref
= bp_unpack_value (&bp
, 1);
4884 ipa_set_node_agg_value_chain (node
, aggvals
);
4887 /* Write all aggregate replacement for nodes in set. */
4890 ipa_prop_write_all_agg_replacement (void)
4892 struct cgraph_node
*node
;
4893 struct output_block
*ob
;
4894 unsigned int count
= 0;
4895 lto_symtab_encoder_iterator lsei
;
4896 lto_symtab_encoder_t encoder
;
4898 if (!ipa_node_agg_replacements
)
4901 ob
= create_output_block (LTO_section_ipcp_transform
);
4902 encoder
= ob
->decl_state
->symtab_node_encoder
;
4904 for (lsei
= lsei_start_function_in_partition (encoder
); !lsei_end_p (lsei
);
4905 lsei_next_function_in_partition (&lsei
))
4907 node
= lsei_cgraph_node (lsei
);
4908 if (node
->has_gimple_body_p ()
4909 && ipa_get_agg_replacements_for_node (node
) != NULL
)
4913 streamer_write_uhwi (ob
, count
);
4915 for (lsei
= lsei_start_function_in_partition (encoder
); !lsei_end_p (lsei
);
4916 lsei_next_function_in_partition (&lsei
))
4918 node
= lsei_cgraph_node (lsei
);
4919 if (node
->has_gimple_body_p ()
4920 && ipa_get_agg_replacements_for_node (node
) != NULL
)
4921 write_agg_replacement_chain (ob
, node
);
4923 streamer_write_char_stream (ob
->main_stream
, 0);
4924 produce_asm (ob
, NULL
);
4925 destroy_output_block (ob
);
4928 /* Read replacements section in file FILE_DATA of length LEN with data
4932 read_replacements_section (struct lto_file_decl_data
*file_data
,
4936 const struct lto_function_header
*header
=
4937 (const struct lto_function_header
*) data
;
4938 const int cfg_offset
= sizeof (struct lto_function_header
);
4939 const int main_offset
= cfg_offset
+ header
->cfg_size
;
4940 const int string_offset
= main_offset
+ header
->main_size
;
4941 struct data_in
*data_in
;
4945 lto_input_block
ib_main ((const char *) data
+ main_offset
,
4948 data_in
= lto_data_in_create (file_data
, (const char *) data
+ string_offset
,
4949 header
->string_size
, vNULL
);
4950 count
= streamer_read_uhwi (&ib_main
);
4952 for (i
= 0; i
< count
; i
++)
4955 struct cgraph_node
*node
;
4956 lto_symtab_encoder_t encoder
;
4958 index
= streamer_read_uhwi (&ib_main
);
4959 encoder
= file_data
->symtab_node_encoder
;
4960 node
= dyn_cast
<cgraph_node
*> (lto_symtab_encoder_deref (encoder
,
4962 gcc_assert (node
->definition
);
4963 read_agg_replacement_chain (&ib_main
, node
, data_in
);
4965 lto_free_section_data (file_data
, LTO_section_jump_functions
, NULL
, data
,
4967 lto_data_in_delete (data_in
);
4970 /* Read IPA-CP aggregate replacements. */
4973 ipa_prop_read_all_agg_replacement (void)
4975 struct lto_file_decl_data
**file_data_vec
= lto_get_file_decl_data ();
4976 struct lto_file_decl_data
*file_data
;
4979 while ((file_data
= file_data_vec
[j
++]))
4982 const char *data
= lto_get_section_data (file_data
,
4983 LTO_section_ipcp_transform
,
4986 read_replacements_section (file_data
, data
, len
);
4990 /* Adjust the aggregate replacements in AGGVAL to reflect parameters skipped in
4994 adjust_agg_replacement_values (struct cgraph_node
*node
,
4995 struct ipa_agg_replacement_value
*aggval
)
4997 struct ipa_agg_replacement_value
*v
;
4998 int i
, c
= 0, d
= 0, *adj
;
5000 if (!node
->clone
.combined_args_to_skip
)
5003 for (v
= aggval
; v
; v
= v
->next
)
5005 gcc_assert (v
->index
>= 0);
5011 adj
= XALLOCAVEC (int, c
);
5012 for (i
= 0; i
< c
; i
++)
5013 if (bitmap_bit_p (node
->clone
.combined_args_to_skip
, i
))
5021 for (v
= aggval
; v
; v
= v
->next
)
5022 v
->index
= adj
[v
->index
];
5025 /* Dominator walker driving the ipcp modification phase. */
5027 class ipcp_modif_dom_walker
: public dom_walker
5030 ipcp_modif_dom_walker (struct func_body_info
*fbi
,
5031 vec
<ipa_param_descriptor
> descs
,
5032 struct ipa_agg_replacement_value
*av
,
5034 : dom_walker (CDI_DOMINATORS
), m_fbi (fbi
), m_descriptors (descs
),
5035 m_aggval (av
), m_something_changed (sc
), m_cfg_changed (cc
) {}
5037 virtual void before_dom_children (basic_block
);
5040 struct func_body_info
*m_fbi
;
5041 vec
<ipa_param_descriptor
> m_descriptors
;
5042 struct ipa_agg_replacement_value
*m_aggval
;
5043 bool *m_something_changed
, *m_cfg_changed
;
5047 ipcp_modif_dom_walker::before_dom_children (basic_block bb
)
5049 gimple_stmt_iterator gsi
;
5050 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
5052 struct ipa_agg_replacement_value
*v
;
5053 gimple stmt
= gsi_stmt (gsi
);
5055 HOST_WIDE_INT offset
, size
;
5059 if (!gimple_assign_load_p (stmt
))
5061 rhs
= gimple_assign_rhs1 (stmt
);
5062 if (!is_gimple_reg_type (TREE_TYPE (rhs
)))
5067 while (handled_component_p (t
))
5069 /* V_C_E can do things like convert an array of integers to one
5070 bigger integer and similar things we do not handle below. */
5071 if (TREE_CODE (rhs
) == VIEW_CONVERT_EXPR
)
5076 t
= TREE_OPERAND (t
, 0);
5081 if (!ipa_load_from_parm_agg_1 (m_fbi
, m_descriptors
, stmt
, rhs
, &index
,
5082 &offset
, &size
, &by_ref
))
5084 for (v
= m_aggval
; v
; v
= v
->next
)
5085 if (v
->index
== index
5086 && v
->offset
== offset
)
5089 || v
->by_ref
!= by_ref
5090 || tree_to_shwi (TYPE_SIZE (TREE_TYPE (v
->value
))) != size
)
5093 gcc_checking_assert (is_gimple_ip_invariant (v
->value
));
5094 if (!useless_type_conversion_p (TREE_TYPE (rhs
), TREE_TYPE (v
->value
)))
5096 if (fold_convertible_p (TREE_TYPE (rhs
), v
->value
))
5097 val
= fold_build1 (NOP_EXPR
, TREE_TYPE (rhs
), v
->value
);
5098 else if (TYPE_SIZE (TREE_TYPE (rhs
))
5099 == TYPE_SIZE (TREE_TYPE (v
->value
)))
5100 val
= fold_build1 (VIEW_CONVERT_EXPR
, TREE_TYPE (rhs
), v
->value
);
5105 fprintf (dump_file
, " const ");
5106 print_generic_expr (dump_file
, v
->value
, 0);
5107 fprintf (dump_file
, " can't be converted to type of ");
5108 print_generic_expr (dump_file
, rhs
, 0);
5109 fprintf (dump_file
, "\n");
5117 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5119 fprintf (dump_file
, "Modifying stmt:\n ");
5120 print_gimple_stmt (dump_file
, stmt
, 0, 0);
5122 gimple_assign_set_rhs_from_tree (&gsi
, val
);
5125 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5127 fprintf (dump_file
, "into:\n ");
5128 print_gimple_stmt (dump_file
, stmt
, 0, 0);
5129 fprintf (dump_file
, "\n");
5132 *m_something_changed
= true;
5133 if (maybe_clean_eh_stmt (stmt
)
5134 && gimple_purge_dead_eh_edges (gimple_bb (stmt
)))
5135 *m_cfg_changed
= true;
5140 /* IPCP transformation phase doing propagation of aggregate values. */
5143 ipcp_transform_function (struct cgraph_node
*node
)
5145 vec
<ipa_param_descriptor
> descriptors
= vNULL
;
5146 struct func_body_info fbi
;
5147 struct ipa_agg_replacement_value
*aggval
;
5149 bool cfg_changed
= false, something_changed
= false;
5151 gcc_checking_assert (cfun
);
5152 gcc_checking_assert (current_function_decl
);
5155 fprintf (dump_file
, "Modification phase of node %s/%i\n",
5156 node
->name (), node
->order
);
5158 aggval
= ipa_get_agg_replacements_for_node (node
);
5161 param_count
= count_formal_params (node
->decl
);
5162 if (param_count
== 0)
5164 adjust_agg_replacement_values (node
, aggval
);
5166 ipa_dump_agg_replacement_values (dump_file
, aggval
);
5170 fbi
.bb_infos
= vNULL
;
5171 fbi
.bb_infos
.safe_grow_cleared (last_basic_block_for_fn (cfun
));
5172 fbi
.param_count
= param_count
;
5175 descriptors
.safe_grow_cleared (param_count
);
5176 ipa_populate_param_decls (node
, descriptors
);
5177 calculate_dominance_info (CDI_DOMINATORS
);
5178 ipcp_modif_dom_walker (&fbi
, descriptors
, aggval
, &something_changed
,
5179 &cfg_changed
).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun
));
5182 struct ipa_bb_info
*bi
;
5183 FOR_EACH_VEC_ELT (fbi
.bb_infos
, i
, bi
)
5184 free_ipa_bb_info (bi
);
5185 fbi
.bb_infos
.release ();
5186 free_dominance_info (CDI_DOMINATORS
);
5187 (*ipa_node_agg_replacements
)[node
->uid
] = NULL
;
5188 descriptors
.release ();
5190 if (!something_changed
)
5192 else if (cfg_changed
)
5193 return TODO_update_ssa_only_virtuals
| TODO_cleanup_cfg
;
5195 return TODO_update_ssa_only_virtuals
;