]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/ipa-modref-tree.h
x86: Remove "%!" before ret
[thirdparty/gcc.git] / gcc / ipa-modref-tree.h
1 /* Data structure for the modref pass.
2 Copyright (C) 2020-2021 Free Software Foundation, Inc.
3 Contributed by David Cepelik and Jan Hubicka
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 /* modref_tree represent a decision tree that can be used by alias analysis
22 oracle to determine whether given memory access can be affected by a function
23 call. For every function we collect two trees, one for loads and other
24 for stores. Tree consist of following levels:
25
26 1) Base: this level represent base alias set of the access and refers
27 to sons (ref nodes). Flag all_refs means that all possible references
28 are aliasing.
29
30 Because for LTO streaming we need to stream types rather than alias sets
31 modref_base_node is implemented as a template.
32 2) Ref: this level represent ref alias set and links to accesses unless
33 all_refs flag is set.
34 Again ref is an template to allow LTO streaming.
35 3) Access: this level represent info about individual accesses. Presently
36 we record whether access is through a dereference of a function parameter
37 and if so we record the access range.
38 */
39
40 #ifndef GCC_MODREF_TREE_H
41 #define GCC_MODREF_TREE_H
42
43 struct ipa_modref_summary;
44
45 /* parm indexes greater than 0 are normal parms.
46 Some negative values have special meaning. */
47 enum modref_special_parms {
48 MODREF_UNKNOWN_PARM = -1,
49 MODREF_STATIC_CHAIN_PARM = -2,
50 MODREF_RETSLOT_PARM = -3,
51 /* Used in modref_parm_map to tak references which can be removed
52 from the summary during summary update since they now points to loca
53 memory. */
54 MODREF_LOCAL_MEMORY_PARM = -4
55 };
56
57 /* Modref record accesses relative to function parameters.
58 This is entry for single access specifying its base and access range.
59
60 Accesses can be collected to boundedly sized arrays using
61 modref_access_node::insert. */
62 struct GTY(()) modref_access_node
63 {
64 /* Access range information (in bits). */
65 poly_int64 offset;
66 poly_int64 size;
67 poly_int64 max_size;
68
69 /* Offset from parameter pointer to the base of the access (in bytes). */
70 poly_int64 parm_offset;
71
72 /* Index of parameter which specifies the base of access. -1 if base is not
73 a function parameter. */
74 int parm_index;
75 bool parm_offset_known;
76 /* Number of times interval was extended during dataflow.
77 This has to be limited in order to keep dataflow finite. */
78 unsigned char adjustments;
79
80 /* Return true if access node holds some useful info. */
81 bool useful_p () const
82 {
83 return parm_index != MODREF_UNKNOWN_PARM;
84 }
85 /* Return true if access can be used to determine a kill. */
86 bool useful_for_kill_p () const
87 {
88 return parm_offset_known && parm_index != MODREF_UNKNOWN_PARM
89 && parm_index != MODREF_RETSLOT_PARM && known_size_p (size)
90 && known_eq (max_size, size);
91 }
92 /* Dump range to debug OUT. */
93 void dump (FILE *out);
94 /* Return true if both accesses are the same. */
95 bool operator == (modref_access_node &a) const;
96 /* Return true if range info is useful. */
97 bool range_info_useful_p () const;
98 /* Return tree corresponding to parameter of the range in STMT. */
99 tree get_call_arg (const gcall *stmt) const;
100 /* Build ao_ref corresponding to the access and return true if succesful. */
101 bool get_ao_ref (const gcall *stmt, class ao_ref *ref) const;
102 /* Stream access to OB. */
103 void stream_out (struct output_block *ob) const;
104 /* Stream access in from IB. */
105 static modref_access_node stream_in (struct lto_input_block *ib);
106 /* Insert A into vector ACCESSES. Limit size of vector to MAX_ACCESSES and
107 if RECORD_ADJUSTMENT is true keep track of adjustment counts.
108 Return 0 if nothing changed, 1 is insertion suceeded and -1 if failed. */
109 static int insert (vec <modref_access_node, va_gc> *&accesses,
110 modref_access_node a, size_t max_accesses,
111 bool record_adjustments);
112 /* Same as insert but for kills where we are conservative the other way
113 around: if information is lost, the kill is lost. */
114 static bool insert_kill (vec<modref_access_node> &kills,
115 modref_access_node &a, bool record_adjustments);
116 private:
117 bool contains (const modref_access_node &) const;
118 bool contains_for_kills (const modref_access_node &) const;
119 void update (poly_int64, poly_int64, poly_int64, poly_int64, bool);
120 bool update_for_kills (poly_int64, poly_int64, poly_int64,
121 poly_int64, poly_int64, bool);
122 bool merge (const modref_access_node &, bool);
123 bool merge_for_kills (const modref_access_node &, bool);
124 static bool closer_pair_p (const modref_access_node &,
125 const modref_access_node &,
126 const modref_access_node &,
127 const modref_access_node &);
128 void forced_merge (const modref_access_node &, bool);
129 void update2 (poly_int64, poly_int64, poly_int64, poly_int64,
130 poly_int64, poly_int64, poly_int64, bool);
131 bool combined_offsets (const modref_access_node &,
132 poly_int64 *, poly_int64 *, poly_int64 *) const;
133 static void try_merge_with (vec <modref_access_node, va_gc> *&, size_t);
134 };
135
136 /* Access node specifying no useful info. */
137 const modref_access_node unspecified_modref_access_node
138 = {0, -1, -1, 0, MODREF_UNKNOWN_PARM, false, 0};
139
140 template <typename T>
141 struct GTY((user)) modref_ref_node
142 {
143 T ref;
144 bool every_access;
145 vec <modref_access_node, va_gc> *accesses;
146
147 modref_ref_node (T ref):
148 ref (ref),
149 every_access (false),
150 accesses (NULL)
151 {}
152
153 /* Collapse the tree. */
154 void collapse ()
155 {
156 vec_free (accesses);
157 accesses = NULL;
158 every_access = true;
159 }
160
161 /* Insert access with OFFSET and SIZE.
162 Collapse tree if it has more than MAX_ACCESSES entries.
163 If RECORD_ADJUSTMENTs is true avoid too many interval extensions.
164 Return true if record was changed. */
165 bool insert_access (modref_access_node a, size_t max_accesses,
166 bool record_adjustments)
167 {
168 /* If this base->ref pair has no access information, bail out. */
169 if (every_access)
170 return false;
171
172 /* Only the following kind of paramters needs to be tracked.
173 We do not track return slots because they are seen as a direct store
174 in the caller. */
175 gcc_checking_assert (a.parm_index >= 0
176 || a.parm_index == MODREF_STATIC_CHAIN_PARM
177 || a.parm_index == MODREF_UNKNOWN_PARM);
178
179 if (!a.useful_p ())
180 {
181 if (!every_access)
182 {
183 collapse ();
184 return true;
185 }
186 return false;
187 }
188
189 int ret = modref_access_node::insert (accesses, a, max_accesses,
190 record_adjustments);
191 if (ret == -1)
192 {
193 if (dump_file)
194 fprintf (dump_file,
195 "--param param=modref-max-accesses limit reached;"
196 " collapsing\n");
197 collapse ();
198 }
199 return ret != 0;
200 }
201 };
202
203 /* Base of an access. */
204 template <typename T>
205 struct GTY((user)) modref_base_node
206 {
207 T base;
208 vec <modref_ref_node <T> *, va_gc> *refs;
209 bool every_ref;
210
211 modref_base_node (T base):
212 base (base),
213 refs (NULL),
214 every_ref (false) {}
215
216 /* Search REF; return NULL if failed. */
217 modref_ref_node <T> *search (T ref)
218 {
219 size_t i;
220 modref_ref_node <T> *n;
221 FOR_EACH_VEC_SAFE_ELT (refs, i, n)
222 if (n->ref == ref)
223 return n;
224 return NULL;
225 }
226
227 /* Insert REF; collapse tree if there are more than MAX_REFS.
228 Return inserted ref and if CHANGED is non-null set it to true if
229 something changed. */
230 modref_ref_node <T> *insert_ref (T ref, size_t max_refs,
231 bool *changed = NULL)
232 {
233 modref_ref_node <T> *ref_node;
234
235 /* If the node is collapsed, don't do anything. */
236 if (every_ref)
237 return NULL;
238
239 /* Otherwise, insert a node for the ref of the access under the base. */
240 ref_node = search (ref);
241 if (ref_node)
242 return ref_node;
243
244 /* We always allow inserting ref 0. For non-0 refs there is upper
245 limit on number of entries and if exceeded,
246 drop ref conservatively to 0. */
247 if (ref && refs && refs->length () >= max_refs)
248 {
249 if (dump_file)
250 fprintf (dump_file, "--param param=modref-max-refs limit reached;"
251 " using 0\n");
252 ref = 0;
253 ref_node = search (ref);
254 if (ref_node)
255 return ref_node;
256 }
257
258 if (changed)
259 *changed = true;
260
261 ref_node = new (ggc_alloc <modref_ref_node <T> > ())modref_ref_node <T>
262 (ref);
263 vec_safe_push (refs, ref_node);
264 return ref_node;
265 }
266
267 void collapse ()
268 {
269 size_t i;
270 modref_ref_node <T> *r;
271
272 if (refs)
273 {
274 FOR_EACH_VEC_SAFE_ELT (refs, i, r)
275 {
276 r->collapse ();
277 ggc_free (r);
278 }
279 vec_free (refs);
280 }
281 refs = NULL;
282 every_ref = true;
283 }
284 };
285
286 /* Map translating parameters across function call. */
287
288 struct modref_parm_map
289 {
290 /* Index of parameter we translate to.
291 Values from special_params enum are permitted too. */
292 int parm_index;
293 bool parm_offset_known;
294 poly_int64 parm_offset;
295 };
296
297 /* Access tree for a single function. */
298 template <typename T>
299 struct GTY((user)) modref_tree
300 {
301 vec <modref_base_node <T> *, va_gc> *bases;
302 size_t max_bases;
303 size_t max_refs;
304 size_t max_accesses;
305 bool every_base;
306
307 modref_tree (size_t max_bases, size_t max_refs, size_t max_accesses):
308 bases (NULL),
309 max_bases (max_bases),
310 max_refs (max_refs),
311 max_accesses (max_accesses),
312 every_base (false) {}
313
314 /* Insert BASE; collapse tree if there are more than MAX_REFS.
315 Return inserted base and if CHANGED is non-null set it to true if
316 something changed.
317 If table gets full, try to insert REF instead. */
318
319 modref_base_node <T> *insert_base (T base, T ref, bool *changed = NULL)
320 {
321 modref_base_node <T> *base_node;
322
323 /* If the node is collapsed, don't do anything. */
324 if (every_base)
325 return NULL;
326
327 /* Otherwise, insert a node for the base of the access into the tree. */
328 base_node = search (base);
329 if (base_node)
330 return base_node;
331
332 /* We always allow inserting base 0. For non-0 base there is upper
333 limit on number of entries and if exceeded,
334 drop base conservatively to ref and if it still does not fit to 0. */
335 if (base && bases && bases->length () >= max_bases)
336 {
337 base_node = search (ref);
338 if (base_node)
339 {
340 if (dump_file)
341 fprintf (dump_file, "--param param=modref-max-bases"
342 " limit reached; using ref\n");
343 return base_node;
344 }
345 if (dump_file)
346 fprintf (dump_file, "--param param=modref-max-bases"
347 " limit reached; using 0\n");
348 base = 0;
349 base_node = search (base);
350 if (base_node)
351 return base_node;
352 }
353
354 if (changed)
355 *changed = true;
356
357 base_node = new (ggc_alloc <modref_base_node <T> > ())
358 modref_base_node <T> (base);
359 vec_safe_push (bases, base_node);
360 return base_node;
361 }
362
363 /* Insert memory access to the tree.
364 Return true if something changed. */
365 bool insert (T base, T ref, modref_access_node a,
366 bool record_adjustments)
367 {
368 if (every_base)
369 return false;
370
371 bool changed = false;
372
373 /* We may end up with max_size being less than size for accesses past the
374 end of array. Those are undefined and safe to ignore. */
375 if (a.range_info_useful_p ()
376 && known_size_p (a.size) && known_size_p (a.max_size)
377 && known_lt (a.max_size, a.size))
378 {
379 if (dump_file)
380 fprintf (dump_file,
381 " - Paradoxical range. Ignoring\n");
382 return false;
383 }
384 if (known_size_p (a.size)
385 && known_eq (a.size, 0))
386 {
387 if (dump_file)
388 fprintf (dump_file,
389 " - Zero size. Ignoring\n");
390 return false;
391 }
392 if (known_size_p (a.max_size)
393 && known_eq (a.max_size, 0))
394 {
395 if (dump_file)
396 fprintf (dump_file,
397 " - Zero max_size. Ignoring\n");
398 return false;
399 }
400 gcc_checking_assert (!known_size_p (a.max_size)
401 || !known_le (a.max_size, 0));
402
403 /* No useful information tracked; collapse everything. */
404 if (!base && !ref && !a.useful_p ())
405 {
406 collapse ();
407 return true;
408 }
409
410 modref_base_node <T> *base_node = insert_base (base, ref, &changed);
411 base = base_node->base;
412 /* If table got full we may end up with useless base. */
413 if (!base && !ref && !a.useful_p ())
414 {
415 collapse ();
416 return true;
417 }
418 if (base_node->every_ref)
419 return changed;
420 gcc_checking_assert (search (base) != NULL);
421
422 /* No useful ref info tracked; collapse base. */
423 if (!ref && !a.useful_p ())
424 {
425 base_node->collapse ();
426 return true;
427 }
428
429 modref_ref_node <T> *ref_node = base_node->insert_ref (ref, max_refs,
430 &changed);
431 ref = ref_node->ref;
432
433 if (ref_node->every_access)
434 return changed;
435 changed |= ref_node->insert_access (a, max_accesses,
436 record_adjustments);
437 /* See if we failed to add useful access. */
438 if (ref_node->every_access)
439 {
440 /* Collapse everything if there is no useful base and ref. */
441 if (!base && !ref)
442 {
443 collapse ();
444 gcc_checking_assert (changed);
445 }
446 /* Collapse base if there is no useful ref. */
447 else if (!ref)
448 {
449 base_node->collapse ();
450 gcc_checking_assert (changed);
451 }
452 }
453 return changed;
454 }
455
456 /* Remove tree branches that are not useful (i.e. they will always pass). */
457
458 void cleanup ()
459 {
460 size_t i, j;
461 modref_base_node <T> *base_node;
462 modref_ref_node <T> *ref_node;
463
464 if (!bases)
465 return;
466
467 for (i = 0; vec_safe_iterate (bases, i, &base_node);)
468 {
469 if (base_node->refs)
470 for (j = 0; vec_safe_iterate (base_node->refs, j, &ref_node);)
471 {
472 if (!ref_node->every_access
473 && (!ref_node->accesses
474 || !ref_node->accesses->length ()))
475 {
476 base_node->refs->unordered_remove (j);
477 vec_free (ref_node->accesses);
478 ggc_delete (ref_node);
479 }
480 else
481 j++;
482 }
483 if (!base_node->every_ref
484 && (!base_node->refs || !base_node->refs->length ()))
485 {
486 bases->unordered_remove (i);
487 vec_free (base_node->refs);
488 ggc_delete (base_node);
489 }
490 else
491 i++;
492 }
493 if (bases && !bases->length ())
494 {
495 vec_free (bases);
496 bases = NULL;
497 }
498 }
499
500 /* Merge OTHER into the tree.
501 PARM_MAP, if non-NULL, maps parm indexes of callee to caller.
502 Similar CHAIN_MAP, if non-NULL, maps static chain of callee to caller.
503 Return true if something has changed. */
504 bool merge (modref_tree <T> *other, vec <modref_parm_map> *parm_map,
505 modref_parm_map *static_chain_map,
506 bool record_accesses)
507 {
508 if (!other || every_base)
509 return false;
510 if (other->every_base)
511 {
512 collapse ();
513 return true;
514 }
515
516 bool changed = false;
517 size_t i, j, k;
518 modref_base_node <T> *base_node, *my_base_node;
519 modref_ref_node <T> *ref_node;
520 modref_access_node *access_node;
521 bool release = false;
522
523 /* For self-recursive functions we may end up merging summary into itself;
524 produce copy first so we do not modify summary under our own hands. */
525 if (other == this)
526 {
527 release = true;
528 other = modref_tree<T>::create_ggc (max_bases, max_refs, max_accesses);
529 other->copy_from (this);
530 }
531
532 FOR_EACH_VEC_SAFE_ELT (other->bases, i, base_node)
533 {
534 if (base_node->every_ref)
535 {
536 my_base_node = insert_base (base_node->base, 0, &changed);
537 if (my_base_node && !my_base_node->every_ref)
538 {
539 my_base_node->collapse ();
540 cleanup ();
541 changed = true;
542 }
543 }
544 else
545 FOR_EACH_VEC_SAFE_ELT (base_node->refs, j, ref_node)
546 {
547 if (ref_node->every_access)
548 {
549 changed |= insert (base_node->base,
550 ref_node->ref,
551 unspecified_modref_access_node,
552 record_accesses);
553 }
554 else
555 FOR_EACH_VEC_SAFE_ELT (ref_node->accesses, k, access_node)
556 {
557 modref_access_node a = *access_node;
558
559 if (a.parm_index != MODREF_UNKNOWN_PARM && parm_map)
560 {
561 if (a.parm_index >= (int)parm_map->length ())
562 a.parm_index = MODREF_UNKNOWN_PARM;
563 else
564 {
565 modref_parm_map &m
566 = a.parm_index == MODREF_STATIC_CHAIN_PARM
567 ? *static_chain_map
568 : (*parm_map) [a.parm_index];
569 if (m.parm_index == MODREF_LOCAL_MEMORY_PARM)
570 continue;
571 a.parm_offset += m.parm_offset;
572 a.parm_offset_known &= m.parm_offset_known;
573 a.parm_index = m.parm_index;
574 }
575 }
576 changed |= insert (base_node->base, ref_node->ref, a,
577 record_accesses);
578 }
579 }
580 }
581 if (release)
582 ggc_delete (other);
583 return changed;
584 }
585
586 /* Copy OTHER to THIS. */
587 void copy_from (modref_tree <T> *other)
588 {
589 merge (other, NULL, NULL, false);
590 }
591
592 /* Search BASE in tree; return NULL if failed. */
593 modref_base_node <T> *search (T base)
594 {
595 size_t i;
596 modref_base_node <T> *n;
597 FOR_EACH_VEC_SAFE_ELT (bases, i, n)
598 if (n->base == base)
599 return n;
600 return NULL;
601 }
602
603 /* Return true if tree contains access to global memory. */
604 bool global_access_p ()
605 {
606 size_t i, j, k;
607 modref_base_node <T> *base_node;
608 modref_ref_node <T> *ref_node;
609 modref_access_node *access_node;
610 if (every_base)
611 return true;
612 FOR_EACH_VEC_SAFE_ELT (bases, i, base_node)
613 {
614 if (base_node->every_ref)
615 return true;
616 FOR_EACH_VEC_SAFE_ELT (base_node->refs, j, ref_node)
617 {
618 if (ref_node->every_access)
619 return true;
620 FOR_EACH_VEC_SAFE_ELT (ref_node->accesses, k, access_node)
621 if (access_node->parm_index == MODREF_UNKNOWN_PARM)
622 return true;
623 }
624 }
625 return false;
626 }
627
628 /* Return ggc allocated instance. We explicitly call destructors via
629 ggc_delete and do not want finalizers to be registered and
630 called at the garbage collection time. */
631 static modref_tree<T> *create_ggc (size_t max_bases, size_t max_refs,
632 size_t max_accesses)
633 {
634 return new (ggc_alloc_no_dtor<modref_tree<T>> ())
635 modref_tree<T> (max_bases, max_refs, max_accesses);
636 }
637
638 /* Remove all records and mark tree to alias with everything. */
639 void collapse ()
640 {
641 size_t i;
642 modref_base_node <T> *n;
643
644 if (bases)
645 {
646 FOR_EACH_VEC_SAFE_ELT (bases, i, n)
647 {
648 n->collapse ();
649 ggc_free (n);
650 }
651 vec_free (bases);
652 }
653 bases = NULL;
654 every_base = true;
655 }
656
657 /* Release memory. */
658 ~modref_tree ()
659 {
660 collapse ();
661 }
662
663 /* Update parameter indexes in TT according to MAP. */
664 void
665 remap_params (vec <int> *map)
666 {
667 size_t i;
668 modref_base_node <T> *base_node;
669 FOR_EACH_VEC_SAFE_ELT (bases, i, base_node)
670 {
671 size_t j;
672 modref_ref_node <T> *ref_node;
673 FOR_EACH_VEC_SAFE_ELT (base_node->refs, j, ref_node)
674 {
675 size_t k;
676 modref_access_node *access_node;
677 FOR_EACH_VEC_SAFE_ELT (ref_node->accesses, k, access_node)
678 if (access_node->parm_index >= 0)
679 {
680 if (access_node->parm_index < (int)map->length ())
681 access_node->parm_index = (*map)[access_node->parm_index];
682 else
683 access_node->parm_index = MODREF_UNKNOWN_PARM;
684 }
685 }
686 }
687 }
688 };
689
690 void modref_c_tests ();
691
692 void gt_ggc_mx (modref_tree <int>* const&);
693 void gt_ggc_mx (modref_tree <tree_node*>* const&);
694 void gt_pch_nx (modref_tree <int>* const&);
695 void gt_pch_nx (modref_tree <tree_node*>* const&);
696 void gt_pch_nx (modref_tree <int>* const&, gt_pointer_operator op, void *cookie);
697 void gt_pch_nx (modref_tree <tree_node*>* const&, gt_pointer_operator op,
698 void *cookie);
699
700 void gt_ggc_mx (modref_base_node <int>*);
701 void gt_ggc_mx (modref_base_node <tree_node*>* &);
702 void gt_pch_nx (modref_base_node <int>* const&);
703 void gt_pch_nx (modref_base_node <tree_node*>* const&);
704 void gt_pch_nx (modref_base_node <int>* const&, gt_pointer_operator op,
705 void *cookie);
706 void gt_pch_nx (modref_base_node <tree_node*>* const&, gt_pointer_operator op,
707 void *cookie);
708
709 void gt_ggc_mx (modref_ref_node <int>*);
710 void gt_ggc_mx (modref_ref_node <tree_node*>* &);
711 void gt_pch_nx (modref_ref_node <int>* const&);
712 void gt_pch_nx (modref_ref_node <tree_node*>* const&);
713 void gt_pch_nx (modref_ref_node <int>* const&, gt_pointer_operator op,
714 void *cookie);
715 void gt_pch_nx (modref_ref_node <tree_node*>* const&, gt_pointer_operator op,
716 void *cookie);
717
718 #endif