1 /* Data structure for the modref pass.
2 Copyright (C) 2020-2021 Free Software Foundation, Inc.
3 Contributed by David Cepelik and Jan Hubicka
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 /* modref_tree represent a decision tree that can be used by alias analysis
22 oracle to determine whether given memory access can be affected by a function
23 call. For every function we collect two trees, one for loads and other
24 for stores. Tree consist of following levels:
26 1) Base: this level represent base alias set of the access and refers
27 to sons (ref nodes). Flag all_refs means that all possible references
30 Because for LTO streaming we need to stream types rather than alias sets
31 modref_base_node is implemented as a template.
32 2) Ref: this level represent ref alias set and links to accesses unless
34 Again ref is an template to allow LTO streaming.
35 3) Access: this level represent info about individual accesses. Presently
36 we record whether access is through a dereference of a function parameter
37 and if so we record the access range.
40 #ifndef GCC_MODREF_TREE_H
41 #define GCC_MODREF_TREE_H
43 struct ipa_modref_summary
;
45 /* parm indexes greater than 0 are normal parms.
46 Some negative values have special meaning. */
47 enum modref_special_parms
{
48 MODREF_UNKNOWN_PARM
= -1,
49 MODREF_STATIC_CHAIN_PARM
= -2,
50 MODREF_RETSLOT_PARM
= -3,
51 /* Used in modref_parm_map to tak references which can be removed
52 from the summary during summary update since they now points to loca
54 MODREF_LOCAL_MEMORY_PARM
= -4
57 /* Modref record accesses relative to function parameters.
58 This is entry for single access specifying its base and access range.
60 Accesses can be collected to boundedly sized arrays using
61 modref_access_node::insert. */
62 struct GTY(()) modref_access_node
64 /* Access range information (in bits). */
69 /* Offset from parameter pointer to the base of the access (in bytes). */
70 poly_int64 parm_offset
;
72 /* Index of parameter which specifies the base of access. -1 if base is not
73 a function parameter. */
75 bool parm_offset_known
;
76 /* Number of times interval was extended during dataflow.
77 This has to be limited in order to keep dataflow finite. */
78 unsigned char adjustments
;
80 /* Return true if access node holds some useful info. */
81 bool useful_p () const
83 return parm_index
!= MODREF_UNKNOWN_PARM
;
85 /* Return true if access can be used to determine a kill. */
86 bool useful_for_kill_p () const
88 return parm_offset_known
&& parm_index
!= MODREF_UNKNOWN_PARM
89 && parm_index
!= MODREF_RETSLOT_PARM
&& known_size_p (size
)
90 && known_eq (max_size
, size
);
92 /* Dump range to debug OUT. */
93 void dump (FILE *out
);
94 /* Return true if both accesses are the same. */
95 bool operator == (modref_access_node
&a
) const;
96 /* Return true if range info is useful. */
97 bool range_info_useful_p () const;
98 /* Return tree corresponding to parameter of the range in STMT. */
99 tree
get_call_arg (const gcall
*stmt
) const;
100 /* Build ao_ref corresponding to the access and return true if succesful. */
101 bool get_ao_ref (const gcall
*stmt
, class ao_ref
*ref
) const;
102 /* Stream access to OB. */
103 void stream_out (struct output_block
*ob
) const;
104 /* Stream access in from IB. */
105 static modref_access_node
stream_in (struct lto_input_block
*ib
);
106 /* Insert A into vector ACCESSES. Limit size of vector to MAX_ACCESSES and
107 if RECORD_ADJUSTMENT is true keep track of adjustment counts.
108 Return 0 if nothing changed, 1 is insertion suceeded and -1 if failed. */
109 static int insert (vec
<modref_access_node
, va_gc
> *&accesses
,
110 modref_access_node a
, size_t max_accesses
,
111 bool record_adjustments
);
112 /* Same as insert but for kills where we are conservative the other way
113 around: if information is lost, the kill is lost. */
114 static bool insert_kill (vec
<modref_access_node
> &kills
,
115 modref_access_node
&a
, bool record_adjustments
);
117 bool contains (const modref_access_node
&) const;
118 bool contains_for_kills (const modref_access_node
&) const;
119 void update (poly_int64
, poly_int64
, poly_int64
, poly_int64
, bool);
120 bool update_for_kills (poly_int64
, poly_int64
, poly_int64
,
121 poly_int64
, poly_int64
, bool);
122 bool merge (const modref_access_node
&, bool);
123 bool merge_for_kills (const modref_access_node
&, bool);
124 static bool closer_pair_p (const modref_access_node
&,
125 const modref_access_node
&,
126 const modref_access_node
&,
127 const modref_access_node
&);
128 void forced_merge (const modref_access_node
&, bool);
129 void update2 (poly_int64
, poly_int64
, poly_int64
, poly_int64
,
130 poly_int64
, poly_int64
, poly_int64
, bool);
131 bool combined_offsets (const modref_access_node
&,
132 poly_int64
*, poly_int64
*, poly_int64
*) const;
133 static void try_merge_with (vec
<modref_access_node
, va_gc
> *&, size_t);
136 /* Access node specifying no useful info. */
137 const modref_access_node unspecified_modref_access_node
138 = {0, -1, -1, 0, MODREF_UNKNOWN_PARM
, false, 0};
140 template <typename T
>
141 struct GTY((user
)) modref_ref_node
145 vec
<modref_access_node
, va_gc
> *accesses
;
147 modref_ref_node (T ref
):
149 every_access (false),
153 /* Collapse the tree. */
161 /* Insert access with OFFSET and SIZE.
162 Collapse tree if it has more than MAX_ACCESSES entries.
163 If RECORD_ADJUSTMENTs is true avoid too many interval extensions.
164 Return true if record was changed. */
165 bool insert_access (modref_access_node a
, size_t max_accesses
,
166 bool record_adjustments
)
168 /* If this base->ref pair has no access information, bail out. */
172 /* Only the following kind of paramters needs to be tracked.
173 We do not track return slots because they are seen as a direct store
175 gcc_checking_assert (a
.parm_index
>= 0
176 || a
.parm_index
== MODREF_STATIC_CHAIN_PARM
177 || a
.parm_index
== MODREF_UNKNOWN_PARM
);
189 int ret
= modref_access_node::insert (accesses
, a
, max_accesses
,
195 "--param param=modref-max-accesses limit reached;"
203 /* Base of an access. */
204 template <typename T
>
205 struct GTY((user
)) modref_base_node
208 vec
<modref_ref_node
<T
> *, va_gc
> *refs
;
211 modref_base_node (T base
):
216 /* Search REF; return NULL if failed. */
217 modref_ref_node
<T
> *search (T ref
)
220 modref_ref_node
<T
> *n
;
221 FOR_EACH_VEC_SAFE_ELT (refs
, i
, n
)
227 /* Insert REF; collapse tree if there are more than MAX_REFS.
228 Return inserted ref and if CHANGED is non-null set it to true if
229 something changed. */
230 modref_ref_node
<T
> *insert_ref (T ref
, size_t max_refs
,
231 bool *changed
= NULL
)
233 modref_ref_node
<T
> *ref_node
;
235 /* If the node is collapsed, don't do anything. */
239 /* Otherwise, insert a node for the ref of the access under the base. */
240 ref_node
= search (ref
);
244 /* We always allow inserting ref 0. For non-0 refs there is upper
245 limit on number of entries and if exceeded,
246 drop ref conservatively to 0. */
247 if (ref
&& refs
&& refs
->length () >= max_refs
)
250 fprintf (dump_file
, "--param param=modref-max-refs limit reached;"
253 ref_node
= search (ref
);
261 ref_node
= new (ggc_alloc
<modref_ref_node
<T
> > ())modref_ref_node
<T
>
263 vec_safe_push (refs
, ref_node
);
270 modref_ref_node
<T
> *r
;
274 FOR_EACH_VEC_SAFE_ELT (refs
, i
, r
)
286 /* Map translating parameters across function call. */
288 struct modref_parm_map
290 /* Index of parameter we translate to.
291 Values from special_params enum are permitted too. */
293 bool parm_offset_known
;
294 poly_int64 parm_offset
;
297 /* Access tree for a single function. */
298 template <typename T
>
299 struct GTY((user
)) modref_tree
301 vec
<modref_base_node
<T
> *, va_gc
> *bases
;
307 modref_tree (size_t max_bases
, size_t max_refs
, size_t max_accesses
):
309 max_bases (max_bases
),
311 max_accesses (max_accesses
),
312 every_base (false) {}
314 /* Insert BASE; collapse tree if there are more than MAX_REFS.
315 Return inserted base and if CHANGED is non-null set it to true if
317 If table gets full, try to insert REF instead. */
319 modref_base_node
<T
> *insert_base (T base
, T ref
, bool *changed
= NULL
)
321 modref_base_node
<T
> *base_node
;
323 /* If the node is collapsed, don't do anything. */
327 /* Otherwise, insert a node for the base of the access into the tree. */
328 base_node
= search (base
);
332 /* We always allow inserting base 0. For non-0 base there is upper
333 limit on number of entries and if exceeded,
334 drop base conservatively to ref and if it still does not fit to 0. */
335 if (base
&& bases
&& bases
->length () >= max_bases
)
337 base_node
= search (ref
);
341 fprintf (dump_file
, "--param param=modref-max-bases"
342 " limit reached; using ref\n");
346 fprintf (dump_file
, "--param param=modref-max-bases"
347 " limit reached; using 0\n");
349 base_node
= search (base
);
357 base_node
= new (ggc_alloc
<modref_base_node
<T
> > ())
358 modref_base_node
<T
> (base
);
359 vec_safe_push (bases
, base_node
);
363 /* Insert memory access to the tree.
364 Return true if something changed. */
365 bool insert (T base
, T ref
, modref_access_node a
,
366 bool record_adjustments
)
371 bool changed
= false;
373 /* We may end up with max_size being less than size for accesses past the
374 end of array. Those are undefined and safe to ignore. */
375 if (a
.range_info_useful_p ()
376 && known_size_p (a
.size
) && known_size_p (a
.max_size
)
377 && known_lt (a
.max_size
, a
.size
))
381 " - Paradoxical range. Ignoring\n");
384 if (known_size_p (a
.size
)
385 && known_eq (a
.size
, 0))
389 " - Zero size. Ignoring\n");
392 if (known_size_p (a
.max_size
)
393 && known_eq (a
.max_size
, 0))
397 " - Zero max_size. Ignoring\n");
400 gcc_checking_assert (!known_size_p (a
.max_size
)
401 || !known_le (a
.max_size
, 0));
403 /* No useful information tracked; collapse everything. */
404 if (!base
&& !ref
&& !a
.useful_p ())
410 modref_base_node
<T
> *base_node
= insert_base (base
, ref
, &changed
);
411 base
= base_node
->base
;
412 /* If table got full we may end up with useless base. */
413 if (!base
&& !ref
&& !a
.useful_p ())
418 if (base_node
->every_ref
)
420 gcc_checking_assert (search (base
) != NULL
);
422 /* No useful ref info tracked; collapse base. */
423 if (!ref
&& !a
.useful_p ())
425 base_node
->collapse ();
429 modref_ref_node
<T
> *ref_node
= base_node
->insert_ref (ref
, max_refs
,
433 if (ref_node
->every_access
)
435 changed
|= ref_node
->insert_access (a
, max_accesses
,
437 /* See if we failed to add useful access. */
438 if (ref_node
->every_access
)
440 /* Collapse everything if there is no useful base and ref. */
444 gcc_checking_assert (changed
);
446 /* Collapse base if there is no useful ref. */
449 base_node
->collapse ();
450 gcc_checking_assert (changed
);
456 /* Remove tree branches that are not useful (i.e. they will always pass). */
461 modref_base_node
<T
> *base_node
;
462 modref_ref_node
<T
> *ref_node
;
467 for (i
= 0; vec_safe_iterate (bases
, i
, &base_node
);)
470 for (j
= 0; vec_safe_iterate (base_node
->refs
, j
, &ref_node
);)
472 if (!ref_node
->every_access
473 && (!ref_node
->accesses
474 || !ref_node
->accesses
->length ()))
476 base_node
->refs
->unordered_remove (j
);
477 vec_free (ref_node
->accesses
);
478 ggc_delete (ref_node
);
483 if (!base_node
->every_ref
484 && (!base_node
->refs
|| !base_node
->refs
->length ()))
486 bases
->unordered_remove (i
);
487 vec_free (base_node
->refs
);
488 ggc_delete (base_node
);
493 if (bases
&& !bases
->length ())
500 /* Merge OTHER into the tree.
501 PARM_MAP, if non-NULL, maps parm indexes of callee to caller.
502 Similar CHAIN_MAP, if non-NULL, maps static chain of callee to caller.
503 Return true if something has changed. */
504 bool merge (modref_tree
<T
> *other
, vec
<modref_parm_map
> *parm_map
,
505 modref_parm_map
*static_chain_map
,
506 bool record_accesses
)
508 if (!other
|| every_base
)
510 if (other
->every_base
)
516 bool changed
= false;
518 modref_base_node
<T
> *base_node
, *my_base_node
;
519 modref_ref_node
<T
> *ref_node
;
520 modref_access_node
*access_node
;
521 bool release
= false;
523 /* For self-recursive functions we may end up merging summary into itself;
524 produce copy first so we do not modify summary under our own hands. */
528 other
= modref_tree
<T
>::create_ggc (max_bases
, max_refs
, max_accesses
);
529 other
->copy_from (this);
532 FOR_EACH_VEC_SAFE_ELT (other
->bases
, i
, base_node
)
534 if (base_node
->every_ref
)
536 my_base_node
= insert_base (base_node
->base
, 0, &changed
);
537 if (my_base_node
&& !my_base_node
->every_ref
)
539 my_base_node
->collapse ();
545 FOR_EACH_VEC_SAFE_ELT (base_node
->refs
, j
, ref_node
)
547 if (ref_node
->every_access
)
549 changed
|= insert (base_node
->base
,
551 unspecified_modref_access_node
,
555 FOR_EACH_VEC_SAFE_ELT (ref_node
->accesses
, k
, access_node
)
557 modref_access_node a
= *access_node
;
559 if (a
.parm_index
!= MODREF_UNKNOWN_PARM
&& parm_map
)
561 if (a
.parm_index
>= (int)parm_map
->length ())
562 a
.parm_index
= MODREF_UNKNOWN_PARM
;
566 = a
.parm_index
== MODREF_STATIC_CHAIN_PARM
568 : (*parm_map
) [a
.parm_index
];
569 if (m
.parm_index
== MODREF_LOCAL_MEMORY_PARM
)
571 a
.parm_offset
+= m
.parm_offset
;
572 a
.parm_offset_known
&= m
.parm_offset_known
;
573 a
.parm_index
= m
.parm_index
;
576 changed
|= insert (base_node
->base
, ref_node
->ref
, a
,
586 /* Copy OTHER to THIS. */
587 void copy_from (modref_tree
<T
> *other
)
589 merge (other
, NULL
, NULL
, false);
592 /* Search BASE in tree; return NULL if failed. */
593 modref_base_node
<T
> *search (T base
)
596 modref_base_node
<T
> *n
;
597 FOR_EACH_VEC_SAFE_ELT (bases
, i
, n
)
603 /* Return true if tree contains access to global memory. */
604 bool global_access_p ()
607 modref_base_node
<T
> *base_node
;
608 modref_ref_node
<T
> *ref_node
;
609 modref_access_node
*access_node
;
612 FOR_EACH_VEC_SAFE_ELT (bases
, i
, base_node
)
614 if (base_node
->every_ref
)
616 FOR_EACH_VEC_SAFE_ELT (base_node
->refs
, j
, ref_node
)
618 if (ref_node
->every_access
)
620 FOR_EACH_VEC_SAFE_ELT (ref_node
->accesses
, k
, access_node
)
621 if (access_node
->parm_index
== MODREF_UNKNOWN_PARM
)
628 /* Return ggc allocated instance. We explicitly call destructors via
629 ggc_delete and do not want finalizers to be registered and
630 called at the garbage collection time. */
631 static modref_tree
<T
> *create_ggc (size_t max_bases
, size_t max_refs
,
634 return new (ggc_alloc_no_dtor
<modref_tree
<T
>> ())
635 modref_tree
<T
> (max_bases
, max_refs
, max_accesses
);
638 /* Remove all records and mark tree to alias with everything. */
642 modref_base_node
<T
> *n
;
646 FOR_EACH_VEC_SAFE_ELT (bases
, i
, n
)
657 /* Release memory. */
663 /* Update parameter indexes in TT according to MAP. */
665 remap_params (vec
<int> *map
)
668 modref_base_node
<T
> *base_node
;
669 FOR_EACH_VEC_SAFE_ELT (bases
, i
, base_node
)
672 modref_ref_node
<T
> *ref_node
;
673 FOR_EACH_VEC_SAFE_ELT (base_node
->refs
, j
, ref_node
)
676 modref_access_node
*access_node
;
677 FOR_EACH_VEC_SAFE_ELT (ref_node
->accesses
, k
, access_node
)
678 if (access_node
->parm_index
>= 0)
680 if (access_node
->parm_index
< (int)map
->length ())
681 access_node
->parm_index
= (*map
)[access_node
->parm_index
];
683 access_node
->parm_index
= MODREF_UNKNOWN_PARM
;
690 void modref_c_tests ();
692 void gt_ggc_mx (modref_tree
<int>* const&);
693 void gt_ggc_mx (modref_tree
<tree_node
*>* const&);
694 void gt_pch_nx (modref_tree
<int>* const&);
695 void gt_pch_nx (modref_tree
<tree_node
*>* const&);
696 void gt_pch_nx (modref_tree
<int>* const&, gt_pointer_operator op
, void *cookie
);
697 void gt_pch_nx (modref_tree
<tree_node
*>* const&, gt_pointer_operator op
,
700 void gt_ggc_mx (modref_base_node
<int>*);
701 void gt_ggc_mx (modref_base_node
<tree_node
*>* &);
702 void gt_pch_nx (modref_base_node
<int>* const&);
703 void gt_pch_nx (modref_base_node
<tree_node
*>* const&);
704 void gt_pch_nx (modref_base_node
<int>* const&, gt_pointer_operator op
,
706 void gt_pch_nx (modref_base_node
<tree_node
*>* const&, gt_pointer_operator op
,
709 void gt_ggc_mx (modref_ref_node
<int>*);
710 void gt_ggc_mx (modref_ref_node
<tree_node
*>* &);
711 void gt_pch_nx (modref_ref_node
<int>* const&);
712 void gt_pch_nx (modref_ref_node
<tree_node
*>* const&);
713 void gt_pch_nx (modref_ref_node
<int>* const&, gt_pointer_operator op
,
715 void gt_pch_nx (modref_ref_node
<tree_node
*>* const&, gt_pointer_operator op
,