]>
Commit | Line | Data |
---|---|---|
d119f34c | 1 | /* Data structure for the modref pass. |
99dee823 | 2 | Copyright (C) 2020-2021 Free Software Foundation, Inc. |
d119f34c JH |
3 | Contributed by David Cepelik and Jan Hubicka |
4 | ||
5 | This file is part of GCC. | |
6 | ||
7 | GCC is free software; you can redistribute it and/or modify it under | |
8 | the terms of the GNU General Public License as published by the Free | |
9 | Software Foundation; either version 3, or (at your option) any later | |
10 | version. | |
11 | ||
12 | GCC is distributed in the hope that it will be useful, but WITHOUT ANY | |
13 | WARRANTY; without even the implied warranty of MERCHANTABILITY or | |
14 | FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License | |
15 | for more details. | |
16 | ||
17 | You should have received a copy of the GNU General Public License | |
18 | along with GCC; see the file COPYING3. If not see | |
19 | <http://www.gnu.org/licenses/>. */ | |
20 | ||
c33f4742 JH |
21 | /* modref_tree represent a decision tree that can be used by alias analysis |
22 | oracle to determine whether given memory access can be affected by a function | |
23 | call. For every function we collect two trees, one for loads and other | |
24 | for stores. Tree consist of following levels: | |
25 | ||
8a2fd716 | 26 | 1) Base: this level represent base alias set of the access and refers |
c33f4742 JH |
27 | to sons (ref nodes). Flag all_refs means that all possible references |
28 | are aliasing. | |
29 | ||
8a2fd716 | 30 | Because for LTO streaming we need to stream types rather than alias sets |
c33f4742 | 31 | modref_base_node is implemented as a template. |
8a2fd716 JJ |
32 | 2) Ref: this level represent ref alias set and links to accesses unless |
33 | all_refs flag is set. | |
c33f4742 JH |
34 | Again ref is an template to allow LTO streaming. |
35 | 3) Access: this level represent info about individual accesses. Presently | |
8a2fd716 | 36 | we record whether access is through a dereference of a function parameter |
5c85f295 | 37 | and if so we record the access range. |
c33f4742 JH |
38 | */ |
39 | ||
d119f34c JH |
40 | #ifndef GCC_MODREF_TREE_H |
41 | #define GCC_MODREF_TREE_H | |
42 | ||
43 | struct ipa_modref_summary; | |
44 | ||
1f3a3363 JH |
45 | /* parm indexes greater than 0 are normal parms. |
46 | Some negative values have special meaning. */ | |
47 | enum modref_special_parms { | |
48 | MODREF_UNKNOWN_PARM = -1, | |
49 | MODREF_STATIC_CHAIN_PARM = -2, | |
50 | MODREF_RETSLOT_PARM = -3, | |
51 | /* Used in modref_parm_map to tak references which can be removed | |
52 | from the summary during summary update since they now points to loca | |
53 | memory. */ | |
54 | MODREF_LOCAL_MEMORY_PARM = -4 | |
55 | }; | |
56 | ||
e30bf330 JH |
57 | /* Modref record accesses relative to function parameters. |
58 | This is entry for single access specifying its base and access range. | |
59 | ||
60 | Accesses can be collected to boundedly sized arrays using | |
61 | modref_access_node::insert. */ | |
c33f4742 JH |
62 | struct GTY(()) modref_access_node |
63 | { | |
c34db4b6 JH |
64 | /* Access range information (in bits). */ |
65 | poly_int64 offset; | |
66 | poly_int64 size; | |
67 | poly_int64 max_size; | |
68 | ||
8a2fd716 | 69 | /* Offset from parameter pointer to the base of the access (in bytes). */ |
c34db4b6 JH |
70 | poly_int64 parm_offset; |
71 | ||
c33f4742 JH |
72 | /* Index of parameter which specifies the base of access. -1 if base is not |
73 | a function parameter. */ | |
74 | int parm_index; | |
c34db4b6 | 75 | bool parm_offset_known; |
5c85f295 JH |
76 | /* Number of times interval was extended during dataflow. |
77 | This has to be limited in order to keep dataflow finite. */ | |
78 | unsigned char adjustments; | |
c33f4742 | 79 | |
a2917490 | 80 | /* Return true if access node holds some useful info. */ |
c34db4b6 | 81 | bool useful_p () const |
c33f4742 | 82 | { |
1f3a3363 | 83 | return parm_index != MODREF_UNKNOWN_PARM; |
c33f4742 | 84 | } |
64f3e71c JH |
85 | /* Return true if access can be used to determine a kill. */ |
86 | bool useful_for_kill_p () const | |
87 | { | |
88 | return parm_offset_known && parm_index != MODREF_UNKNOWN_PARM | |
89 | && parm_index != MODREF_RETSLOT_PARM && known_size_p (size) | |
90 | && known_eq (max_size, size); | |
91 | } | |
e30bf330 JH |
92 | /* Dump range to debug OUT. */ |
93 | void dump (FILE *out); | |
c34db4b6 | 94 | /* Return true if both accesses are the same. */ |
a246d723 | 95 | bool operator == (modref_access_node &a) const; |
e30bf330 JH |
96 | /* Return true if range info is useful. */ |
97 | bool range_info_useful_p () const; | |
a2917490 JH |
98 | /* Return tree corresponding to parameter of the range in STMT. */ |
99 | tree get_call_arg (const gcall *stmt) const; | |
100 | /* Build ao_ref corresponding to the access and return true if succesful. */ | |
101 | bool get_ao_ref (const gcall *stmt, class ao_ref *ref) const; | |
e30bf330 JH |
102 | /* Insert A into vector ACCESSES. Limit size of vector to MAX_ACCESSES and |
103 | if RECORD_ADJUSTMENT is true keep track of adjustment counts. | |
a2917490 | 104 | Return 0 if nothing changed, 1 is insertion suceeded and -1 if failed. */ |
a246d723 JH |
105 | static int insert (vec <modref_access_node, va_gc> *&accesses, |
106 | modref_access_node a, size_t max_accesses, | |
107 | bool record_adjustments); | |
64f3e71c JH |
108 | /* Same as insert but for kills where we are conservative the other way |
109 | around: if information is lost, the kill is lost. */ | |
110 | static bool insert_kill (vec<modref_access_node> &kills, | |
111 | modref_access_node &a, bool record_adjustments); | |
f5ff3a8e | 112 | private: |
a246d723 | 113 | bool contains (const modref_access_node &) const; |
64f3e71c | 114 | bool contains_for_kills (const modref_access_node &) const; |
a246d723 | 115 | void update (poly_int64, poly_int64, poly_int64, poly_int64, bool); |
64f3e71c JH |
116 | bool update_for_kills (poly_int64, poly_int64, poly_int64, |
117 | poly_int64, poly_int64, bool); | |
a246d723 | 118 | bool merge (const modref_access_node &, bool); |
64f3e71c | 119 | bool merge_for_kills (const modref_access_node &, bool); |
a246d723 JH |
120 | static bool closer_pair_p (const modref_access_node &, |
121 | const modref_access_node &, | |
122 | const modref_access_node &, | |
123 | const modref_access_node &); | |
124 | void forced_merge (const modref_access_node &, bool); | |
125 | void update2 (poly_int64, poly_int64, poly_int64, poly_int64, | |
126 | poly_int64, poly_int64, poly_int64, bool); | |
127 | bool combined_offsets (const modref_access_node &, | |
128 | poly_int64 *, poly_int64 *, poly_int64 *) const; | |
129 | static void try_merge_with (vec <modref_access_node, va_gc> *&, size_t); | |
c33f4742 | 130 | }; |
d119f34c | 131 | |
c34db4b6 JH |
132 | /* Access node specifying no useful info. */ |
133 | const modref_access_node unspecified_modref_access_node | |
1f3a3363 | 134 | = {0, -1, -1, 0, MODREF_UNKNOWN_PARM, false, 0}; |
c34db4b6 | 135 | |
d119f34c JH |
136 | template <typename T> |
137 | struct GTY((user)) modref_ref_node | |
138 | { | |
139 | T ref; | |
c33f4742 JH |
140 | bool every_access; |
141 | vec <modref_access_node, va_gc> *accesses; | |
d119f34c JH |
142 | |
143 | modref_ref_node (T ref): | |
c33f4742 JH |
144 | ref (ref), |
145 | every_access (false), | |
146 | accesses (NULL) | |
d119f34c | 147 | {} |
c33f4742 | 148 | |
c33f4742 JH |
149 | /* Collapse the tree. */ |
150 | void collapse () | |
151 | { | |
152 | vec_free (accesses); | |
153 | accesses = NULL; | |
154 | every_access = true; | |
155 | } | |
156 | ||
157 | /* Insert access with OFFSET and SIZE. | |
5a90a186 | 158 | Collapse tree if it has more than MAX_ACCESSES entries. |
5c85f295 | 159 | If RECORD_ADJUSTMENTs is true avoid too many interval extensions. |
5a90a186 | 160 | Return true if record was changed. */ |
5c85f295 JH |
161 | bool insert_access (modref_access_node a, size_t max_accesses, |
162 | bool record_adjustments) | |
c33f4742 JH |
163 | { |
164 | /* If this base->ref pair has no access information, bail out. */ | |
165 | if (every_access) | |
5a90a186 | 166 | return false; |
c33f4742 | 167 | |
1f3a3363 JH |
168 | /* Only the following kind of paramters needs to be tracked. |
169 | We do not track return slots because they are seen as a direct store | |
170 | in the caller. */ | |
171 | gcc_checking_assert (a.parm_index >= 0 | |
172 | || a.parm_index == MODREF_STATIC_CHAIN_PARM | |
173 | || a.parm_index == MODREF_UNKNOWN_PARM); | |
f075b8c5 | 174 | |
6a649642 JH |
175 | if (!a.useful_p ()) |
176 | { | |
177 | if (!every_access) | |
178 | { | |
179 | collapse (); | |
180 | return true; | |
181 | } | |
182 | return false; | |
183 | } | |
184 | ||
a246d723 JH |
185 | int ret = modref_access_node::insert (accesses, a, max_accesses, |
186 | record_adjustments); | |
187 | if (ret == -1) | |
c33f4742 | 188 | { |
a246d723 | 189 | if (dump_file) |
c33f4742 | 190 | fprintf (dump_file, |
f5ff3a8e | 191 | "--param param=modref-max-accesses limit reached;" |
a246d723 JH |
192 | " collapsing\n"); |
193 | collapse (); | |
c33f4742 | 194 | } |
a246d723 | 195 | return ret != 0; |
5c85f295 | 196 | } |
d119f34c JH |
197 | }; |
198 | ||
199 | /* Base of an access. */ | |
200 | template <typename T> | |
201 | struct GTY((user)) modref_base_node | |
202 | { | |
203 | T base; | |
204 | vec <modref_ref_node <T> *, va_gc> *refs; | |
205 | bool every_ref; | |
206 | ||
207 | modref_base_node (T base): | |
208 | base (base), | |
209 | refs (NULL), | |
210 | every_ref (false) {} | |
211 | ||
212 | /* Search REF; return NULL if failed. */ | |
213 | modref_ref_node <T> *search (T ref) | |
214 | { | |
215 | size_t i; | |
216 | modref_ref_node <T> *n; | |
217 | FOR_EACH_VEC_SAFE_ELT (refs, i, n) | |
218 | if (n->ref == ref) | |
219 | return n; | |
220 | return NULL; | |
221 | } | |
222 | ||
5a90a186 JH |
223 | /* Insert REF; collapse tree if there are more than MAX_REFS. |
224 | Return inserted ref and if CHANGED is non-null set it to true if | |
225 | something changed. */ | |
226 | modref_ref_node <T> *insert_ref (T ref, size_t max_refs, | |
227 | bool *changed = NULL) | |
d119f34c JH |
228 | { |
229 | modref_ref_node <T> *ref_node; | |
230 | ||
231 | /* If the node is collapsed, don't do anything. */ | |
232 | if (every_ref) | |
233 | return NULL; | |
234 | ||
d119f34c JH |
235 | /* Otherwise, insert a node for the ref of the access under the base. */ |
236 | ref_node = search (ref); | |
237 | if (ref_node) | |
238 | return ref_node; | |
239 | ||
e28ac73a JH |
240 | /* We always allow inserting ref 0. For non-0 refs there is upper |
241 | limit on number of entries and if exceeded, | |
242 | drop ref conservatively to 0. */ | |
243 | if (ref && refs && refs->length () >= max_refs) | |
d119f34c JH |
244 | { |
245 | if (dump_file) | |
e28ac73a JH |
246 | fprintf (dump_file, "--param param=modref-max-refs limit reached;" |
247 | " using 0\n"); | |
248 | ref = 0; | |
249 | ref_node = search (ref); | |
250 | if (ref_node) | |
251 | return ref_node; | |
d119f34c JH |
252 | } |
253 | ||
e28ac73a JH |
254 | if (changed) |
255 | *changed = true; | |
256 | ||
d119f34c JH |
257 | ref_node = new (ggc_alloc <modref_ref_node <T> > ())modref_ref_node <T> |
258 | (ref); | |
259 | vec_safe_push (refs, ref_node); | |
260 | return ref_node; | |
261 | } | |
262 | ||
263 | void collapse () | |
264 | { | |
c9da53d6 JH |
265 | size_t i; |
266 | modref_ref_node <T> *r; | |
267 | ||
268 | if (refs) | |
269 | { | |
270 | FOR_EACH_VEC_SAFE_ELT (refs, i, r) | |
c33f4742 JH |
271 | { |
272 | r->collapse (); | |
273 | ggc_free (r); | |
274 | } | |
c9da53d6 JH |
275 | vec_free (refs); |
276 | } | |
d119f34c JH |
277 | refs = NULL; |
278 | every_ref = true; | |
279 | } | |
280 | }; | |
281 | ||
c34db4b6 JH |
282 | /* Map translating parameters across function call. */ |
283 | ||
284 | struct modref_parm_map | |
285 | { | |
286 | /* Index of parameter we translate to. | |
1f3a3363 | 287 | Values from special_params enum are permitted too. */ |
c34db4b6 JH |
288 | int parm_index; |
289 | bool parm_offset_known; | |
290 | poly_int64 parm_offset; | |
291 | }; | |
292 | ||
d119f34c JH |
293 | /* Access tree for a single function. */ |
294 | template <typename T> | |
295 | struct GTY((user)) modref_tree | |
296 | { | |
297 | vec <modref_base_node <T> *, va_gc> *bases; | |
298 | size_t max_bases; | |
299 | size_t max_refs; | |
c33f4742 | 300 | size_t max_accesses; |
d119f34c JH |
301 | bool every_base; |
302 | ||
c33f4742 | 303 | modref_tree (size_t max_bases, size_t max_refs, size_t max_accesses): |
d119f34c JH |
304 | bases (NULL), |
305 | max_bases (max_bases), | |
306 | max_refs (max_refs), | |
c33f4742 | 307 | max_accesses (max_accesses), |
d119f34c JH |
308 | every_base (false) {} |
309 | ||
5a90a186 JH |
310 | /* Insert BASE; collapse tree if there are more than MAX_REFS. |
311 | Return inserted base and if CHANGED is non-null set it to true if | |
e28ac73a JH |
312 | something changed. |
313 | If table gets full, try to insert REF instead. */ | |
5a90a186 | 314 | |
e28ac73a | 315 | modref_base_node <T> *insert_base (T base, T ref, bool *changed = NULL) |
d119f34c JH |
316 | { |
317 | modref_base_node <T> *base_node; | |
318 | ||
319 | /* If the node is collapsed, don't do anything. */ | |
320 | if (every_base) | |
321 | return NULL; | |
322 | ||
323 | /* Otherwise, insert a node for the base of the access into the tree. */ | |
324 | base_node = search (base); | |
325 | if (base_node) | |
326 | return base_node; | |
327 | ||
e28ac73a JH |
328 | /* We always allow inserting base 0. For non-0 base there is upper |
329 | limit on number of entries and if exceeded, | |
330 | drop base conservatively to ref and if it still does not fit to 0. */ | |
331 | if (base && bases && bases->length () >= max_bases) | |
d119f34c | 332 | { |
e28ac73a JH |
333 | base_node = search (ref); |
334 | if (base_node) | |
335 | { | |
336 | if (dump_file) | |
337 | fprintf (dump_file, "--param param=modref-max-bases" | |
338 | " limit reached; using ref\n"); | |
339 | return base_node; | |
340 | } | |
d119f34c | 341 | if (dump_file) |
e28ac73a JH |
342 | fprintf (dump_file, "--param param=modref-max-bases" |
343 | " limit reached; using 0\n"); | |
344 | base = 0; | |
345 | base_node = search (base); | |
346 | if (base_node) | |
347 | return base_node; | |
d119f34c JH |
348 | } |
349 | ||
e28ac73a JH |
350 | if (changed) |
351 | *changed = true; | |
352 | ||
d119f34c JH |
353 | base_node = new (ggc_alloc <modref_base_node <T> > ()) |
354 | modref_base_node <T> (base); | |
355 | vec_safe_push (bases, base_node); | |
356 | return base_node; | |
357 | } | |
358 | ||
5a90a186 JH |
359 | /* Insert memory access to the tree. |
360 | Return true if something changed. */ | |
5c85f295 JH |
361 | bool insert (T base, T ref, modref_access_node a, |
362 | bool record_adjustments) | |
d119f34c | 363 | { |
5a90a186 JH |
364 | if (every_base) |
365 | return false; | |
366 | ||
367 | bool changed = false; | |
368 | ||
5f377803 JH |
369 | /* We may end up with max_size being less than size for accesses past the |
370 | end of array. Those are undefined and safe to ignore. */ | |
371 | if (a.range_info_useful_p () | |
372 | && known_size_p (a.size) && known_size_p (a.max_size) | |
373 | && known_lt (a.max_size, a.size)) | |
374 | { | |
375 | if (dump_file) | |
376 | fprintf (dump_file, | |
377 | " - Paradoxical range. Ignoring\n"); | |
378 | return false; | |
379 | } | |
380 | if (known_size_p (a.size) | |
381 | && known_eq (a.size, 0)) | |
382 | { | |
383 | if (dump_file) | |
384 | fprintf (dump_file, | |
385 | " - Zero size. Ignoring\n"); | |
386 | return false; | |
387 | } | |
388 | if (known_size_p (a.max_size) | |
389 | && known_eq (a.max_size, 0)) | |
390 | { | |
391 | if (dump_file) | |
392 | fprintf (dump_file, | |
393 | " - Zero max_size. Ignoring\n"); | |
394 | return false; | |
395 | } | |
396 | gcc_checking_assert (!known_size_p (a.max_size) | |
397 | || !known_le (a.max_size, 0)); | |
398 | ||
c33f4742 JH |
399 | /* No useful information tracked; collapse everything. */ |
400 | if (!base && !ref && !a.useful_p ()) | |
d119f34c JH |
401 | { |
402 | collapse (); | |
5a90a186 | 403 | return true; |
d119f34c | 404 | } |
c33f4742 | 405 | |
e28ac73a JH |
406 | modref_base_node <T> *base_node = insert_base (base, ref, &changed); |
407 | base = base_node->base; | |
408 | /* If table got full we may end up with useless base. */ | |
409 | if (!base && !ref && !a.useful_p ()) | |
410 | { | |
411 | collapse (); | |
412 | return true; | |
413 | } | |
414 | if (base_node->every_ref) | |
5a90a186 JH |
415 | return changed; |
416 | gcc_checking_assert (search (base) != NULL); | |
417 | ||
418 | /* No useful ref info tracked; collapse base. */ | |
419 | if (!ref && !a.useful_p ()) | |
420 | { | |
421 | base_node->collapse (); | |
422 | return true; | |
423 | } | |
d119f34c | 424 | |
5a90a186 JH |
425 | modref_ref_node <T> *ref_node = base_node->insert_ref (ref, max_refs, |
426 | &changed); | |
e28ac73a | 427 | ref = ref_node->ref; |
c33f4742 | 428 | |
e28ac73a JH |
429 | if (ref_node->every_access) |
430 | return changed; | |
431 | changed |= ref_node->insert_access (a, max_accesses, | |
432 | record_adjustments); | |
433 | /* See if we failed to add useful access. */ | |
434 | if (ref_node->every_access) | |
d119f34c | 435 | { |
e28ac73a JH |
436 | /* Collapse everything if there is no useful base and ref. */ |
437 | if (!base && !ref) | |
5a90a186 JH |
438 | { |
439 | collapse (); | |
440 | gcc_checking_assert (changed); | |
441 | } | |
e28ac73a JH |
442 | /* Collapse base if there is no useful ref. */ |
443 | else if (!ref) | |
c33f4742 | 444 | { |
e28ac73a JH |
445 | base_node->collapse (); |
446 | gcc_checking_assert (changed); | |
c33f4742 JH |
447 | } |
448 | } | |
5a90a186 | 449 | return changed; |
d119f34c JH |
450 | } |
451 | ||
8a2fd716 | 452 | /* Remove tree branches that are not useful (i.e. they will always pass). */ |
c33f4742 JH |
453 | |
454 | void cleanup () | |
455 | { | |
456 | size_t i, j; | |
457 | modref_base_node <T> *base_node; | |
458 | modref_ref_node <T> *ref_node; | |
459 | ||
460 | if (!bases) | |
461 | return; | |
462 | ||
463 | for (i = 0; vec_safe_iterate (bases, i, &base_node);) | |
464 | { | |
465 | if (base_node->refs) | |
466 | for (j = 0; vec_safe_iterate (base_node->refs, j, &ref_node);) | |
467 | { | |
468 | if (!ref_node->every_access | |
469 | && (!ref_node->accesses | |
470 | || !ref_node->accesses->length ())) | |
471 | { | |
472 | base_node->refs->unordered_remove (j); | |
473 | vec_free (ref_node->accesses); | |
474 | ggc_delete (ref_node); | |
475 | } | |
476 | else | |
477 | j++; | |
478 | } | |
479 | if (!base_node->every_ref | |
480 | && (!base_node->refs || !base_node->refs->length ())) | |
481 | { | |
482 | bases->unordered_remove (i); | |
483 | vec_free (base_node->refs); | |
484 | ggc_delete (base_node); | |
485 | } | |
486 | else | |
487 | i++; | |
488 | } | |
489 | if (bases && !bases->length ()) | |
490 | { | |
491 | vec_free (bases); | |
492 | bases = NULL; | |
493 | } | |
494 | } | |
495 | ||
496 | /* Merge OTHER into the tree. | |
1f3a3363 JH |
497 | PARM_MAP, if non-NULL, maps parm indexes of callee to caller. |
498 | Similar CHAIN_MAP, if non-NULL, maps static chain of callee to caller. | |
5a90a186 | 499 | Return true if something has changed. */ |
5c85f295 | 500 | bool merge (modref_tree <T> *other, vec <modref_parm_map> *parm_map, |
1f3a3363 | 501 | modref_parm_map *static_chain_map, |
5c85f295 | 502 | bool record_accesses) |
d119f34c | 503 | { |
5a90a186 JH |
504 | if (!other || every_base) |
505 | return false; | |
d119f34c JH |
506 | if (other->every_base) |
507 | { | |
508 | collapse (); | |
5a90a186 | 509 | return true; |
d119f34c JH |
510 | } |
511 | ||
5a90a186 | 512 | bool changed = false; |
c33f4742 | 513 | size_t i, j, k; |
d119f34c | 514 | modref_base_node <T> *base_node, *my_base_node; |
5a90a186 | 515 | modref_ref_node <T> *ref_node; |
c33f4742 | 516 | modref_access_node *access_node; |
5a90a186 JH |
517 | bool release = false; |
518 | ||
519 | /* For self-recursive functions we may end up merging summary into itself; | |
520 | produce copy first so we do not modify summary under our own hands. */ | |
521 | if (other == this) | |
d119f34c | 522 | { |
5a90a186 JH |
523 | release = true; |
524 | other = modref_tree<T>::create_ggc (max_bases, max_refs, max_accesses); | |
525 | other->copy_from (this); | |
526 | } | |
d119f34c | 527 | |
5a90a186 JH |
528 | FOR_EACH_VEC_SAFE_ELT (other->bases, i, base_node) |
529 | { | |
d119f34c JH |
530 | if (base_node->every_ref) |
531 | { | |
e28ac73a | 532 | my_base_node = insert_base (base_node->base, 0, &changed); |
5a90a186 | 533 | if (my_base_node && !my_base_node->every_ref) |
c33f4742 | 534 | { |
5a90a186 JH |
535 | my_base_node->collapse (); |
536 | cleanup (); | |
537 | changed = true; | |
c33f4742 | 538 | } |
5a90a186 JH |
539 | } |
540 | else | |
541 | FOR_EACH_VEC_SAFE_ELT (base_node->refs, j, ref_node) | |
542 | { | |
543 | if (ref_node->every_access) | |
544 | { | |
c34db4b6 JH |
545 | changed |= insert (base_node->base, |
546 | ref_node->ref, | |
5c85f295 JH |
547 | unspecified_modref_access_node, |
548 | record_accesses); | |
5a90a186 JH |
549 | } |
550 | else | |
551 | FOR_EACH_VEC_SAFE_ELT (ref_node->accesses, k, access_node) | |
c33f4742 | 552 | { |
5a90a186 | 553 | modref_access_node a = *access_node; |
c34db4b6 | 554 | |
1f3a3363 | 555 | if (a.parm_index != MODREF_UNKNOWN_PARM && parm_map) |
5a90a186 JH |
556 | { |
557 | if (a.parm_index >= (int)parm_map->length ()) | |
1f3a3363 | 558 | a.parm_index = MODREF_UNKNOWN_PARM; |
5a90a186 | 559 | else |
c34db4b6 | 560 | { |
1f3a3363 JH |
561 | modref_parm_map &m |
562 | = a.parm_index == MODREF_STATIC_CHAIN_PARM | |
563 | ? *static_chain_map | |
564 | : (*parm_map) [a.parm_index]; | |
565 | if (m.parm_index == MODREF_LOCAL_MEMORY_PARM) | |
566 | continue; | |
567 | a.parm_offset += m.parm_offset; | |
568 | a.parm_offset_known &= m.parm_offset_known; | |
569 | a.parm_index = m.parm_index; | |
c34db4b6 | 570 | } |
5a90a186 | 571 | } |
5c85f295 JH |
572 | changed |= insert (base_node->base, ref_node->ref, a, |
573 | record_accesses); | |
c33f4742 | 574 | } |
5a90a186 | 575 | } |
d119f34c | 576 | } |
5a90a186 JH |
577 | if (release) |
578 | ggc_delete (other); | |
579 | return changed; | |
c33f4742 JH |
580 | } |
581 | ||
582 | /* Copy OTHER to THIS. */ | |
583 | void copy_from (modref_tree <T> *other) | |
584 | { | |
1f3a3363 | 585 | merge (other, NULL, NULL, false); |
d119f34c JH |
586 | } |
587 | ||
588 | /* Search BASE in tree; return NULL if failed. */ | |
589 | modref_base_node <T> *search (T base) | |
590 | { | |
591 | size_t i; | |
592 | modref_base_node <T> *n; | |
593 | FOR_EACH_VEC_SAFE_ELT (bases, i, n) | |
594 | if (n->base == base) | |
595 | return n; | |
596 | return NULL; | |
597 | } | |
598 | ||
008e7397 JH |
599 | /* Return true if tree contains access to global memory. */ |
600 | bool global_access_p () | |
601 | { | |
602 | size_t i, j, k; | |
603 | modref_base_node <T> *base_node; | |
604 | modref_ref_node <T> *ref_node; | |
605 | modref_access_node *access_node; | |
606 | if (every_base) | |
607 | return true; | |
608 | FOR_EACH_VEC_SAFE_ELT (bases, i, base_node) | |
609 | { | |
610 | if (base_node->every_ref) | |
611 | return true; | |
612 | FOR_EACH_VEC_SAFE_ELT (base_node->refs, j, ref_node) | |
613 | { | |
614 | if (ref_node->every_access) | |
615 | return true; | |
616 | FOR_EACH_VEC_SAFE_ELT (ref_node->accesses, k, access_node) | |
1f3a3363 | 617 | if (access_node->parm_index == MODREF_UNKNOWN_PARM) |
008e7397 JH |
618 | return true; |
619 | } | |
620 | } | |
621 | return false; | |
622 | } | |
623 | ||
c9da53d6 JH |
624 | /* Return ggc allocated instance. We explicitly call destructors via |
625 | ggc_delete and do not want finalizers to be registered and | |
626 | called at the garbage collection time. */ | |
c33f4742 JH |
627 | static modref_tree<T> *create_ggc (size_t max_bases, size_t max_refs, |
628 | size_t max_accesses) | |
c9da53d6 JH |
629 | { |
630 | return new (ggc_alloc_no_dtor<modref_tree<T>> ()) | |
c33f4742 | 631 | modref_tree<T> (max_bases, max_refs, max_accesses); |
c9da53d6 JH |
632 | } |
633 | ||
c33f4742 | 634 | /* Remove all records and mark tree to alias with everything. */ |
d119f34c JH |
635 | void collapse () |
636 | { | |
c9da53d6 JH |
637 | size_t i; |
638 | modref_base_node <T> *n; | |
639 | ||
640 | if (bases) | |
641 | { | |
642 | FOR_EACH_VEC_SAFE_ELT (bases, i, n) | |
643 | { | |
644 | n->collapse (); | |
645 | ggc_free (n); | |
646 | } | |
647 | vec_free (bases); | |
648 | } | |
d119f34c JH |
649 | bases = NULL; |
650 | every_base = true; | |
651 | } | |
c33f4742 JH |
652 | |
653 | /* Release memory. */ | |
c9da53d6 JH |
654 | ~modref_tree () |
655 | { | |
656 | collapse (); | |
657 | } | |
fe90c504 JH |
658 | |
659 | /* Update parameter indexes in TT according to MAP. */ | |
660 | void | |
661 | remap_params (vec <int> *map) | |
662 | { | |
663 | size_t i; | |
664 | modref_base_node <T> *base_node; | |
665 | FOR_EACH_VEC_SAFE_ELT (bases, i, base_node) | |
666 | { | |
667 | size_t j; | |
668 | modref_ref_node <T> *ref_node; | |
669 | FOR_EACH_VEC_SAFE_ELT (base_node->refs, j, ref_node) | |
670 | { | |
671 | size_t k; | |
672 | modref_access_node *access_node; | |
673 | FOR_EACH_VEC_SAFE_ELT (ref_node->accesses, k, access_node) | |
b406bb90 | 674 | if (access_node->parm_index >= 0) |
fe90c504 JH |
675 | { |
676 | if (access_node->parm_index < (int)map->length ()) | |
677 | access_node->parm_index = (*map)[access_node->parm_index]; | |
678 | else | |
1f3a3363 | 679 | access_node->parm_index = MODREF_UNKNOWN_PARM; |
fe90c504 JH |
680 | } |
681 | } | |
682 | } | |
683 | } | |
d119f34c JH |
684 | }; |
685 | ||
686 | void modref_c_tests (); | |
687 | ||
688 | void gt_ggc_mx (modref_tree <int>* const&); | |
689 | void gt_ggc_mx (modref_tree <tree_node*>* const&); | |
690 | void gt_pch_nx (modref_tree <int>* const&); | |
691 | void gt_pch_nx (modref_tree <tree_node*>* const&); | |
692 | void gt_pch_nx (modref_tree <int>* const&, gt_pointer_operator op, void *cookie); | |
693 | void gt_pch_nx (modref_tree <tree_node*>* const&, gt_pointer_operator op, | |
694 | void *cookie); | |
695 | ||
696 | void gt_ggc_mx (modref_base_node <int>*); | |
697 | void gt_ggc_mx (modref_base_node <tree_node*>* &); | |
698 | void gt_pch_nx (modref_base_node <int>* const&); | |
699 | void gt_pch_nx (modref_base_node <tree_node*>* const&); | |
700 | void gt_pch_nx (modref_base_node <int>* const&, gt_pointer_operator op, | |
701 | void *cookie); | |
702 | void gt_pch_nx (modref_base_node <tree_node*>* const&, gt_pointer_operator op, | |
703 | void *cookie); | |
704 | ||
705 | void gt_ggc_mx (modref_ref_node <int>*); | |
706 | void gt_ggc_mx (modref_ref_node <tree_node*>* &); | |
707 | void gt_pch_nx (modref_ref_node <int>* const&); | |
708 | void gt_pch_nx (modref_ref_node <tree_node*>* const&); | |
709 | void gt_pch_nx (modref_ref_node <int>* const&, gt_pointer_operator op, | |
710 | void *cookie); | |
711 | void gt_pch_nx (modref_ref_node <tree_node*>* const&, gt_pointer_operator op, | |
712 | void *cookie); | |
713 | ||
714 | #endif |