]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/tree-ssa-alias.c
re PR testsuite/27476 (ACATS: Ada testsuite Bourne shell compatibility problem on...
[thirdparty/gcc.git] / gcc / tree-ssa-alias.c
1 /* Alias analysis for trees.
2 Copyright (C) 2004, 2005 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to
19 the Free Software Foundation, 51 Franklin Street, Fifth Floor,
20 Boston, MA 02110-1301, USA. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "tree.h"
27 #include "rtl.h"
28 #include "tm_p.h"
29 #include "hard-reg-set.h"
30 #include "basic-block.h"
31 #include "timevar.h"
32 #include "expr.h"
33 #include "ggc.h"
34 #include "langhooks.h"
35 #include "flags.h"
36 #include "function.h"
37 #include "diagnostic.h"
38 #include "tree-dump.h"
39 #include "tree-gimple.h"
40 #include "tree-flow.h"
41 #include "tree-inline.h"
42 #include "tree-pass.h"
43 #include "tree-ssa-structalias.h"
44 #include "convert.h"
45 #include "params.h"
46 #include "ipa-type-escape.h"
47 #include "vec.h"
48 #include "bitmap.h"
49 #include "vecprim.h"
50
51 /* Obstack used to hold grouping bitmaps and other temporary bitmaps used by
52 aliasing */
53 static bitmap_obstack alias_obstack;
54
55 /* 'true' after aliases have been computed (see compute_may_aliases). */
56 bool aliases_computed_p;
57
58 /* Structure to map a variable to its alias set and keep track of the
59 virtual operands that will be needed to represent it. */
60 struct alias_map_d
61 {
62 /* Variable and its alias set. */
63 tree var;
64 HOST_WIDE_INT set;
65
66 /* Total number of virtual operands that will be needed to represent
67 all the aliases of VAR. */
68 long total_alias_vops;
69
70 /* Nonzero if the aliases for this memory tag have been grouped
71 already. Used in group_aliases. */
72 unsigned int grouped_p : 1;
73
74 /* Set of variables aliased with VAR. This is the exact same
75 information contained in VAR_ANN (VAR)->MAY_ALIASES, but in
76 bitmap form to speed up alias grouping. */
77 bitmap may_aliases;
78 };
79
80
81 /* Counters used to display statistics on alias analysis. */
82 struct alias_stats_d
83 {
84 unsigned int alias_queries;
85 unsigned int alias_mayalias;
86 unsigned int alias_noalias;
87 unsigned int simple_queries;
88 unsigned int simple_resolved;
89 unsigned int tbaa_queries;
90 unsigned int tbaa_resolved;
91 unsigned int structnoaddress_queries;
92 unsigned int structnoaddress_resolved;
93 };
94
95
96 /* Local variables. */
97 static struct alias_stats_d alias_stats;
98
99 /* Local functions. */
100 static void compute_flow_insensitive_aliasing (struct alias_info *);
101 static void dump_alias_stats (FILE *);
102 static bool may_alias_p (tree, HOST_WIDE_INT, tree, HOST_WIDE_INT, bool);
103 static tree create_memory_tag (tree type, bool is_type_tag);
104 static tree get_tmt_for (tree, struct alias_info *);
105 static tree get_nmt_for (tree);
106 static void add_may_alias (tree, tree);
107 static void replace_may_alias (tree, size_t, tree);
108 static struct alias_info *init_alias_info (void);
109 static void delete_alias_info (struct alias_info *);
110 static void compute_flow_sensitive_aliasing (struct alias_info *);
111 static void setup_pointers_and_addressables (struct alias_info *);
112 static void create_global_var (void);
113 static void maybe_create_global_var (struct alias_info *ai);
114 static void group_aliases (struct alias_info *);
115 static void set_pt_anything (tree ptr);
116
117 /* Global declarations. */
118
119 /* Call clobbered variables in the function. If bit I is set, then
120 REFERENCED_VARS (I) is call-clobbered. */
121 bitmap call_clobbered_vars;
122
123 /* Addressable variables in the function. If bit I is set, then
124 REFERENCED_VARS (I) has had its address taken. Note that
125 CALL_CLOBBERED_VARS and ADDRESSABLE_VARS are not related. An
126 addressable variable is not necessarily call-clobbered (e.g., a
127 local addressable whose address does not escape) and not all
128 call-clobbered variables are addressable (e.g., a local static
129 variable). */
130 bitmap addressable_vars;
131
132 /* When the program has too many call-clobbered variables and call-sites,
133 this variable is used to represent the clobbering effects of function
134 calls. In these cases, all the call clobbered variables in the program
135 are forced to alias this variable. This reduces compile times by not
136 having to keep track of too many V_MAY_DEF expressions at call sites. */
137 tree global_var;
138
139 /* qsort comparison function to sort type/name tags by DECL_UID. */
140
141 static int
142 sort_tags_by_id (const void *pa, const void *pb)
143 {
144 tree a = *(tree *)pa;
145 tree b = *(tree *)pb;
146
147 return DECL_UID (a) - DECL_UID (b);
148 }
149
150 /* Initialize WORKLIST to contain those memory tags that are marked call
151 clobbered. Initialized WORKLIST2 to contain the reasons these
152 memory tags escaped. */
153
154 static void
155 init_transitive_clobber_worklist (VEC (tree, heap) **worklist,
156 VEC (int, heap) **worklist2)
157 {
158 referenced_var_iterator rvi;
159 tree curr;
160
161 FOR_EACH_REFERENCED_VAR (curr, rvi)
162 {
163 if (MTAG_P (curr) && is_call_clobbered (curr))
164 {
165 VEC_safe_push (tree, heap, *worklist, curr);
166 VEC_safe_push (int, heap, *worklist2, var_ann (curr)->escape_mask);
167 }
168 }
169 }
170
171 /* Add ALIAS to WORKLIST (and the reason for escaping REASON to WORKLIST2) if
172 ALIAS is not already marked call clobbered, and is a memory
173 tag. */
174
175 static void
176 add_to_worklist (tree alias, VEC (tree, heap) **worklist,
177 VEC (int, heap) **worklist2,
178 int reason)
179 {
180 if (MTAG_P (alias) && !is_call_clobbered (alias))
181 {
182 VEC_safe_push (tree, heap, *worklist, alias);
183 VEC_safe_push (int, heap, *worklist2, reason);
184 }
185 }
186
187 /* Mark aliases of TAG as call clobbered, and place any tags on the
188 alias list that were not already call clobbered on WORKLIST. */
189
190 static void
191 mark_aliases_call_clobbered (tree tag, VEC (tree, heap) **worklist,
192 VEC (int, heap) **worklist2)
193 {
194 unsigned int i;
195 VEC (tree, gc) *ma;
196 tree entry;
197 var_ann_t ta = var_ann (tag);
198
199 if (!MTAG_P (tag))
200 return;
201 ma = may_aliases (tag);
202 if (!ma)
203 return;
204
205 for (i = 0; VEC_iterate (tree, ma, i, entry); i++)
206 {
207 if (!unmodifiable_var_p (entry))
208 {
209 add_to_worklist (entry, worklist, worklist2, ta->escape_mask);
210 mark_call_clobbered (entry, ta->escape_mask);
211 }
212 }
213 }
214
215 /* Tags containing global vars need to be marked as global.
216 Tags containing call clobbered vars need to be marked as call
217 clobbered. */
218
219 static void
220 compute_tag_properties (void)
221 {
222 referenced_var_iterator rvi;
223 tree tag;
224 bool changed = true;
225 VEC (tree, heap) *taglist = NULL;
226
227 FOR_EACH_REFERENCED_VAR (tag, rvi)
228 {
229 if (!MTAG_P (tag) || TREE_CODE (tag) == STRUCT_FIELD_TAG)
230 continue;
231 VEC_safe_push (tree, heap, taglist, tag);
232 }
233
234 /* We sort the taglist by DECL_UID, for two reasons.
235 1. To get a sequential ordering to make the bitmap accesses
236 faster.
237 2. Because of the way we compute aliases, it's more likely that
238 an earlier tag is included in a later tag, and this will reduce
239 the number of iterations.
240
241 If we had a real tag graph, we would just topo-order it and be
242 done with it. */
243 qsort (VEC_address (tree, taglist),
244 VEC_length (tree, taglist),
245 sizeof (tree),
246 sort_tags_by_id);
247
248 /* Go through each tag not marked as global, and if it aliases
249 global vars, mark it global.
250
251 If the tag contains call clobbered vars, mark it call
252 clobbered.
253
254 This loop iterates because tags may appear in the may-aliases
255 list of other tags when we group. */
256
257 while (changed)
258 {
259 unsigned int k;
260
261 changed = false;
262 for (k = 0; VEC_iterate (tree, taglist, k, tag); k++)
263 {
264 VEC (tree, gc) *ma;
265 unsigned int i;
266 tree entry;
267 bool tagcc = is_call_clobbered (tag);
268 bool tagglobal = MTAG_GLOBAL (tag);
269
270 if (tagcc && tagglobal)
271 continue;
272
273 ma = may_aliases (tag);
274 if (!ma)
275 continue;
276
277 for (i = 0; VEC_iterate (tree, ma, i, entry); i++)
278 {
279 /* Call clobbered entries cause the tag to be marked
280 call clobbered. */
281 if (!tagcc && is_call_clobbered (entry))
282 {
283 mark_call_clobbered (tag, var_ann (entry)->escape_mask);
284 tagcc = true;
285 changed = true;
286 }
287
288 /* Global vars cause the tag to be marked global. */
289 if (!tagglobal && is_global_var (entry))
290 {
291 MTAG_GLOBAL (tag) = true;
292 changed = true;
293 tagglobal = true;
294 }
295
296 /* Early exit once both global and cc are set, since the
297 loop can't do any more than that. */
298 if (tagcc && tagglobal)
299 break;
300 }
301 }
302 }
303 VEC_free (tree, heap, taglist);
304 }
305
306 /* Set up the initial variable clobbers and globalness.
307 When this function completes, only tags whose aliases need to be
308 clobbered will be set clobbered. Tags clobbered because they
309 contain call clobbered vars are handled in compute_tag_properties. */
310
311 static void
312 set_initial_properties (struct alias_info *ai)
313 {
314 unsigned int i;
315 referenced_var_iterator rvi;
316 tree var;
317 tree ptr;
318
319 FOR_EACH_REFERENCED_VAR (var, rvi)
320 {
321 if (is_global_var (var)
322 && (!var_can_have_subvars (var)
323 || get_subvars_for_var (var) == NULL))
324 {
325 if (!unmodifiable_var_p (var))
326 mark_call_clobbered (var, ESCAPE_IS_GLOBAL);
327 }
328 else if (TREE_CODE (var) == PARM_DECL
329 && default_def (var)
330 && POINTER_TYPE_P (TREE_TYPE (var)))
331 {
332 tree def = default_def (var);
333 get_ptr_info (def)->value_escapes_p = 1;
334 get_ptr_info (def)->escape_mask |= ESCAPE_IS_PARM;
335 }
336 }
337
338 for (i = 0; VEC_iterate (tree, ai->processed_ptrs, i, ptr); i++)
339 {
340 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
341 var_ann_t v_ann = var_ann (SSA_NAME_VAR (ptr));
342
343 if (pi->value_escapes_p)
344 {
345 /* If PTR escapes then its associated memory tags and
346 pointed-to variables are call-clobbered. */
347 if (pi->name_mem_tag)
348 mark_call_clobbered (pi->name_mem_tag, pi->escape_mask);
349
350 if (v_ann->symbol_mem_tag)
351 mark_call_clobbered (v_ann->symbol_mem_tag, pi->escape_mask);
352
353 if (pi->pt_vars)
354 {
355 bitmap_iterator bi;
356 unsigned int j;
357 EXECUTE_IF_SET_IN_BITMAP (pi->pt_vars, 0, j, bi)
358 if (!unmodifiable_var_p (referenced_var (j)))
359 mark_call_clobbered (referenced_var (j), pi->escape_mask);
360 }
361 }
362
363 /* If the name tag is call clobbered, so is the symbol tag
364 associated with the base VAR_DECL. */
365 if (pi->name_mem_tag
366 && v_ann->symbol_mem_tag
367 && is_call_clobbered (pi->name_mem_tag))
368 mark_call_clobbered (v_ann->symbol_mem_tag, pi->escape_mask);
369
370 /* Name tags and symbol tags that we don't know where they point
371 to, might point to global memory, and thus, are clobbered.
372
373 FIXME: This is not quite right. They should only be
374 clobbered if value_escapes_p is true, regardless of whether
375 they point to global memory or not.
376 So removing this code and fixing all the bugs would be nice.
377 It is the cause of a bunch of clobbering. */
378 if ((pi->pt_global_mem || pi->pt_anything)
379 && pi->is_dereferenced && pi->name_mem_tag)
380 {
381 mark_call_clobbered (pi->name_mem_tag, ESCAPE_IS_GLOBAL);
382 MTAG_GLOBAL (pi->name_mem_tag) = true;
383 }
384
385 if ((pi->pt_global_mem || pi->pt_anything)
386 && pi->is_dereferenced
387 && v_ann->symbol_mem_tag)
388 {
389 mark_call_clobbered (v_ann->symbol_mem_tag, ESCAPE_IS_GLOBAL);
390 MTAG_GLOBAL (v_ann->symbol_mem_tag) = true;
391 }
392 }
393 }
394
395
396 /* This variable is set to true if we are updating the used alone
397 information for SMTs, or are in a pass that is going to break it
398 temporarily. */
399 bool updating_used_alone;
400
401 /* Compute which variables need to be marked call clobbered because
402 their tag is call clobbered, and which tags need to be marked
403 global because they contain global variables. */
404
405 static void
406 compute_call_clobbered (struct alias_info *ai)
407 {
408 VEC (tree, heap) *worklist = NULL;
409 VEC(int,heap) *worklist2 = NULL;
410
411 set_initial_properties (ai);
412 init_transitive_clobber_worklist (&worklist, &worklist2);
413 while (VEC_length (tree, worklist) != 0)
414 {
415 tree curr = VEC_pop (tree, worklist);
416 int reason = VEC_pop (int, worklist2);
417
418 mark_call_clobbered (curr, reason);
419 mark_aliases_call_clobbered (curr, &worklist, &worklist2);
420 }
421 VEC_free (tree, heap, worklist);
422 VEC_free (int, heap, worklist2);
423 compute_tag_properties ();
424 }
425
426
427 /* Helper for recalculate_used_alone. Return a conservatively correct
428 answer as to whether STMT may make a store on the LHS to SYM. */
429
430 static bool
431 lhs_may_store_to (tree stmt, tree sym ATTRIBUTE_UNUSED)
432 {
433 tree lhs = TREE_OPERAND (stmt, 0);
434
435 lhs = get_base_address (lhs);
436
437 if (!lhs)
438 return false;
439
440 if (TREE_CODE (lhs) == SSA_NAME)
441 return false;
442 /* We could do better here by looking at the type tag of LHS, but it
443 is unclear whether this is worth it. */
444 return true;
445 }
446
447 /* Recalculate the used_alone information for SMTs . */
448
449 void
450 recalculate_used_alone (void)
451 {
452 VEC (tree, heap) *calls = NULL;
453 block_stmt_iterator bsi;
454 basic_block bb;
455 tree stmt;
456 size_t i;
457 referenced_var_iterator rvi;
458 tree var;
459
460 /* First, reset all the SMT used alone bits to zero. */
461 updating_used_alone = true;
462 FOR_EACH_REFERENCED_VAR (var, rvi)
463 if (TREE_CODE (var) == SYMBOL_MEMORY_TAG)
464 {
465 SMT_OLD_USED_ALONE (var) = SMT_USED_ALONE (var);
466 SMT_USED_ALONE (var) = 0;
467 }
468
469 /* Walk all the statements.
470 Calls get put into a list of statements to update, since we will
471 need to update operands on them if we make any changes.
472 If we see a bare use of a SMT anywhere in a real virtual use or virtual
473 def, mark the SMT as used alone, and for renaming. */
474 FOR_EACH_BB (bb)
475 {
476 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
477 {
478 bool iscall = false;
479 ssa_op_iter iter;
480
481 stmt = bsi_stmt (bsi);
482
483 if (TREE_CODE (stmt) == CALL_EXPR
484 || (TREE_CODE (stmt) == MODIFY_EXPR
485 && TREE_CODE (TREE_OPERAND (stmt, 1)) == CALL_EXPR))
486 {
487 iscall = true;
488 VEC_safe_push (tree, heap, calls, stmt);
489 }
490
491 FOR_EACH_SSA_TREE_OPERAND (var, stmt, iter,
492 SSA_OP_VUSE | SSA_OP_VIRTUAL_DEFS)
493 {
494 tree svar = var;
495
496 if (TREE_CODE (var) == SSA_NAME)
497 svar = SSA_NAME_VAR (var);
498
499 if (TREE_CODE (svar) == SYMBOL_MEMORY_TAG)
500 {
501 /* We only care about the LHS on calls. */
502 if (iscall && !lhs_may_store_to (stmt, svar))
503 continue;
504
505 if (!SMT_USED_ALONE (svar))
506 {
507 SMT_USED_ALONE (svar) = true;
508
509 /* Only need to mark for renaming if it wasn't
510 used alone before. */
511 if (!SMT_OLD_USED_ALONE (svar))
512 mark_sym_for_renaming (svar);
513 }
514 }
515 }
516 }
517 }
518
519 /* Update the operands on all the calls we saw. */
520 if (calls)
521 {
522 for (i = 0; VEC_iterate (tree, calls, i, stmt); i++)
523 update_stmt (stmt);
524 }
525 VEC_free (tree, heap, calls);
526 updating_used_alone = false;
527 }
528
529 /* Compute may-alias information for every variable referenced in function
530 FNDECL.
531
532 Alias analysis proceeds in 3 main phases:
533
534 1- Points-to and escape analysis.
535
536 This phase walks the use-def chains in the SSA web looking for three
537 things:
538
539 * Assignments of the form P_i = &VAR
540 * Assignments of the form P_i = malloc()
541 * Pointers and ADDR_EXPR that escape the current function.
542
543 The concept of 'escaping' is the same one used in the Java world. When
544 a pointer or an ADDR_EXPR escapes, it means that it has been exposed
545 outside of the current function. So, assignment to global variables,
546 function arguments and returning a pointer are all escape sites, as are
547 conversions between pointers and integers.
548
549 This is where we are currently limited. Since not everything is renamed
550 into SSA, we lose track of escape properties when a pointer is stashed
551 inside a field in a structure, for instance. In those cases, we are
552 assuming that the pointer does escape.
553
554 We use escape analysis to determine whether a variable is
555 call-clobbered. Simply put, if an ADDR_EXPR escapes, then the variable
556 is call-clobbered. If a pointer P_i escapes, then all the variables
557 pointed-to by P_i (and its memory tag) also escape.
558
559 2- Compute flow-sensitive aliases
560
561 We have two classes of memory tags. Memory tags associated with the
562 pointed-to data type of the pointers in the program. These tags are
563 called "symbol memory tag" (SMT). The other class are those associated
564 with SSA_NAMEs, called "name memory tag" (NMT). The basic idea is that
565 when adding operands for an INDIRECT_REF *P_i, we will first check
566 whether P_i has a name tag, if it does we use it, because that will have
567 more precise aliasing information. Otherwise, we use the standard symbol
568 tag.
569
570 In this phase, we go through all the pointers we found in points-to
571 analysis and create alias sets for the name memory tags associated with
572 each pointer P_i. If P_i escapes, we mark call-clobbered the variables
573 it points to and its tag.
574
575
576 3- Compute flow-insensitive aliases
577
578 This pass will compare the alias set of every symbol memory tag and
579 every addressable variable found in the program. Given a symbol
580 memory tag SMT and an addressable variable V. If the alias sets of
581 SMT and V conflict (as computed by may_alias_p), then V is marked
582 as an alias tag and added to the alias set of SMT.
583
584 For instance, consider the following function:
585
586 foo (int i)
587 {
588 int *p, a, b;
589
590 if (i > 10)
591 p = &a;
592 else
593 p = &b;
594
595 *p = 3;
596 a = b + 2;
597 return *p;
598 }
599
600 After aliasing analysis has finished, the symbol memory tag for pointer
601 'p' will have two aliases, namely variables 'a' and 'b'. Every time
602 pointer 'p' is dereferenced, we want to mark the operation as a
603 potential reference to 'a' and 'b'.
604
605 foo (int i)
606 {
607 int *p, a, b;
608
609 if (i_2 > 10)
610 p_4 = &a;
611 else
612 p_6 = &b;
613 # p_1 = PHI <p_4(1), p_6(2)>;
614
615 # a_7 = V_MAY_DEF <a_3>;
616 # b_8 = V_MAY_DEF <b_5>;
617 *p_1 = 3;
618
619 # a_9 = V_MAY_DEF <a_7>
620 # VUSE <b_8>
621 a_9 = b_8 + 2;
622
623 # VUSE <a_9>;
624 # VUSE <b_8>;
625 return *p_1;
626 }
627
628 In certain cases, the list of may aliases for a pointer may grow too
629 large. This may cause an explosion in the number of virtual operands
630 inserted in the code. Resulting in increased memory consumption and
631 compilation time.
632
633 When the number of virtual operands needed to represent aliased
634 loads and stores grows too large (configurable with @option{--param
635 max-aliased-vops}), alias sets are grouped to avoid severe
636 compile-time slow downs and memory consumption. See group_aliases. */
637
638 static unsigned int
639 compute_may_aliases (void)
640 {
641 struct alias_info *ai;
642
643 memset (&alias_stats, 0, sizeof (alias_stats));
644
645 /* Initialize aliasing information. */
646 ai = init_alias_info ();
647
648 /* For each pointer P_i, determine the sets of variables that P_i may
649 point-to. For every addressable variable V, determine whether the
650 address of V escapes the current function, making V call-clobbered
651 (i.e., whether &V is stored in a global variable or if its passed as a
652 function call argument). */
653 compute_points_to_sets (ai);
654
655 /* Collect all pointers and addressable variables, compute alias sets,
656 create memory tags for pointers and promote variables whose address is
657 not needed anymore. */
658 setup_pointers_and_addressables (ai);
659
660 /* Compute flow-sensitive, points-to based aliasing for all the name
661 memory tags. Note that this pass needs to be done before flow
662 insensitive analysis because it uses the points-to information
663 gathered before to mark call-clobbered symbol tags. */
664 compute_flow_sensitive_aliasing (ai);
665
666 /* Compute type-based flow-insensitive aliasing for all the type
667 memory tags. */
668 compute_flow_insensitive_aliasing (ai);
669
670 /* Determine if we need to enable alias grouping. */
671 if (ai->total_alias_vops >= MAX_ALIASED_VOPS)
672 group_aliases (ai);
673
674 /* Compute call clobbering information. */
675 compute_call_clobbered (ai);
676
677 /* If the program has too many call-clobbered variables and/or function
678 calls, create .GLOBAL_VAR and use it to model call-clobbering
679 semantics at call sites. This reduces the number of virtual operands
680 considerably, improving compile times at the expense of lost
681 aliasing precision. */
682 maybe_create_global_var (ai);
683
684 /* Debugging dumps. */
685 if (dump_file)
686 {
687 dump_referenced_vars (dump_file);
688 if (dump_flags & TDF_STATS)
689 dump_alias_stats (dump_file);
690 dump_points_to_info (dump_file);
691 dump_alias_info (dump_file);
692 }
693
694 /* Deallocate memory used by aliasing data structures. */
695 delete_alias_info (ai);
696
697 updating_used_alone = true;
698 {
699 block_stmt_iterator bsi;
700 basic_block bb;
701 FOR_EACH_BB (bb)
702 {
703 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
704 {
705 update_stmt_if_modified (bsi_stmt (bsi));
706 }
707 }
708 }
709 recalculate_used_alone ();
710 updating_used_alone = false;
711 return 0;
712 }
713
714
715 struct tree_opt_pass pass_may_alias =
716 {
717 "alias", /* name */
718 NULL, /* gate */
719 compute_may_aliases, /* execute */
720 NULL, /* sub */
721 NULL, /* next */
722 0, /* static_pass_number */
723 TV_TREE_MAY_ALIAS, /* tv_id */
724 PROP_cfg | PROP_ssa, /* properties_required */
725 PROP_alias, /* properties_provided */
726 0, /* properties_destroyed */
727 0, /* todo_flags_start */
728 TODO_dump_func | TODO_update_ssa
729 | TODO_ggc_collect | TODO_verify_ssa
730 | TODO_verify_stmts, /* todo_flags_finish */
731 0 /* letter */
732 };
733
734
735 /* Data structure used to count the number of dereferences to PTR
736 inside an expression. */
737 struct count_ptr_d
738 {
739 tree ptr;
740 unsigned count;
741 };
742
743
744 /* Helper for count_uses_and_derefs. Called by walk_tree to look for
745 (ALIGN/MISALIGNED_)INDIRECT_REF nodes for the pointer passed in DATA. */
746
747 static tree
748 count_ptr_derefs (tree *tp, int *walk_subtrees, void *data)
749 {
750 struct count_ptr_d *count_p = (struct count_ptr_d *) data;
751
752 /* Do not walk inside ADDR_EXPR nodes. In the expression &ptr->fld,
753 pointer 'ptr' is *not* dereferenced, it is simply used to compute
754 the address of 'fld' as 'ptr + offsetof(fld)'. */
755 if (TREE_CODE (*tp) == ADDR_EXPR)
756 {
757 *walk_subtrees = 0;
758 return NULL_TREE;
759 }
760
761 if (INDIRECT_REF_P (*tp) && TREE_OPERAND (*tp, 0) == count_p->ptr)
762 count_p->count++;
763
764 return NULL_TREE;
765 }
766
767
768 /* Count the number of direct and indirect uses for pointer PTR in
769 statement STMT. The two counts are stored in *NUM_USES_P and
770 *NUM_DEREFS_P respectively. *IS_STORE_P is set to 'true' if at
771 least one of those dereferences is a store operation. */
772
773 void
774 count_uses_and_derefs (tree ptr, tree stmt, unsigned *num_uses_p,
775 unsigned *num_derefs_p, bool *is_store)
776 {
777 ssa_op_iter i;
778 tree use;
779
780 *num_uses_p = 0;
781 *num_derefs_p = 0;
782 *is_store = false;
783
784 /* Find out the total number of uses of PTR in STMT. */
785 FOR_EACH_SSA_TREE_OPERAND (use, stmt, i, SSA_OP_USE)
786 if (use == ptr)
787 (*num_uses_p)++;
788
789 /* Now count the number of indirect references to PTR. This is
790 truly awful, but we don't have much choice. There are no parent
791 pointers inside INDIRECT_REFs, so an expression like
792 '*x_1 = foo (x_1, *x_1)' needs to be traversed piece by piece to
793 find all the indirect and direct uses of x_1 inside. The only
794 shortcut we can take is the fact that GIMPLE only allows
795 INDIRECT_REFs inside the expressions below. */
796 if (TREE_CODE (stmt) == MODIFY_EXPR
797 || (TREE_CODE (stmt) == RETURN_EXPR
798 && TREE_CODE (TREE_OPERAND (stmt, 0)) == MODIFY_EXPR)
799 || TREE_CODE (stmt) == ASM_EXPR
800 || TREE_CODE (stmt) == CALL_EXPR)
801 {
802 tree lhs, rhs;
803
804 if (TREE_CODE (stmt) == MODIFY_EXPR)
805 {
806 lhs = TREE_OPERAND (stmt, 0);
807 rhs = TREE_OPERAND (stmt, 1);
808 }
809 else if (TREE_CODE (stmt) == RETURN_EXPR)
810 {
811 tree e = TREE_OPERAND (stmt, 0);
812 lhs = TREE_OPERAND (e, 0);
813 rhs = TREE_OPERAND (e, 1);
814 }
815 else if (TREE_CODE (stmt) == ASM_EXPR)
816 {
817 lhs = ASM_OUTPUTS (stmt);
818 rhs = ASM_INPUTS (stmt);
819 }
820 else
821 {
822 lhs = NULL_TREE;
823 rhs = stmt;
824 }
825
826 if (lhs && (TREE_CODE (lhs) == TREE_LIST || EXPR_P (lhs)))
827 {
828 struct count_ptr_d count;
829 count.ptr = ptr;
830 count.count = 0;
831 walk_tree (&lhs, count_ptr_derefs, &count, NULL);
832 *is_store = true;
833 *num_derefs_p = count.count;
834 }
835
836 if (rhs && (TREE_CODE (rhs) == TREE_LIST || EXPR_P (rhs)))
837 {
838 struct count_ptr_d count;
839 count.ptr = ptr;
840 count.count = 0;
841 walk_tree (&rhs, count_ptr_derefs, &count, NULL);
842 *num_derefs_p += count.count;
843 }
844 }
845
846 gcc_assert (*num_uses_p >= *num_derefs_p);
847 }
848
849 /* Initialize the data structures used for alias analysis. */
850
851 static struct alias_info *
852 init_alias_info (void)
853 {
854 struct alias_info *ai;
855 referenced_var_iterator rvi;
856 tree var;
857
858 bitmap_obstack_initialize (&alias_obstack);
859 ai = XCNEW (struct alias_info);
860 ai->ssa_names_visited = sbitmap_alloc (num_ssa_names);
861 sbitmap_zero (ai->ssa_names_visited);
862 ai->processed_ptrs = VEC_alloc (tree, heap, 50);
863 ai->written_vars = BITMAP_ALLOC (&alias_obstack);
864 ai->dereferenced_ptrs_store = BITMAP_ALLOC (&alias_obstack);
865 ai->dereferenced_ptrs_load = BITMAP_ALLOC (&alias_obstack);
866
867 /* If aliases have been computed before, clear existing information. */
868 if (aliases_computed_p)
869 {
870 unsigned i;
871
872 /* Similarly, clear the set of addressable variables. In this
873 case, we can just clear the set because addressability is
874 only computed here. */
875 bitmap_clear (addressable_vars);
876
877 /* Clear flow-insensitive alias information from each symbol. */
878 FOR_EACH_REFERENCED_VAR (var, rvi)
879 {
880 var_ann_t ann = var_ann (var);
881
882 ann->is_aliased = 0;
883 ann->may_aliases = NULL;
884 NUM_REFERENCES_CLEAR (ann);
885
886 /* Since we are about to re-discover call-clobbered
887 variables, clear the call-clobbered flag. Variables that
888 are intrinsically call-clobbered (globals, local statics,
889 etc) will not be marked by the aliasing code, so we can't
890 remove them from CALL_CLOBBERED_VARS.
891
892 NB: STRUCT_FIELDS are still call clobbered if they are for
893 a global variable, so we *don't* clear their call clobberedness
894 just because they are tags, though we will clear it if they
895 aren't for global variables. */
896 if (TREE_CODE (var) == NAME_MEMORY_TAG
897 || TREE_CODE (var) == SYMBOL_MEMORY_TAG
898 || !is_global_var (var))
899 clear_call_clobbered (var);
900 }
901
902 /* Clear flow-sensitive points-to information from each SSA name. */
903 for (i = 1; i < num_ssa_names; i++)
904 {
905 tree name = ssa_name (i);
906
907 if (!name || !POINTER_TYPE_P (TREE_TYPE (name)))
908 continue;
909
910 if (SSA_NAME_PTR_INFO (name))
911 {
912 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (name);
913
914 /* Clear all the flags but keep the name tag to
915 avoid creating new temporaries unnecessarily. If
916 this pointer is found to point to a subset or
917 superset of its former points-to set, then a new
918 tag will need to be created in create_name_tags. */
919 pi->pt_anything = 0;
920 pi->pt_null = 0;
921 pi->value_escapes_p = 0;
922 pi->is_dereferenced = 0;
923 if (pi->pt_vars)
924 bitmap_clear (pi->pt_vars);
925 }
926 }
927 }
928
929 /* Next time, we will need to reset alias information. */
930 aliases_computed_p = true;
931
932 return ai;
933 }
934
935
936 /* Deallocate memory used by alias analysis. */
937
938 static void
939 delete_alias_info (struct alias_info *ai)
940 {
941 size_t i;
942 referenced_var_iterator rvi;
943 tree var;
944
945 sbitmap_free (ai->ssa_names_visited);
946 VEC_free (tree, heap, ai->processed_ptrs);
947
948 for (i = 0; i < ai->num_addressable_vars; i++)
949 free (ai->addressable_vars[i]);
950
951 FOR_EACH_REFERENCED_VAR(var, rvi)
952 {
953 var_ann_t ann = var_ann (var);
954 NUM_REFERENCES_CLEAR (ann);
955 }
956
957 free (ai->addressable_vars);
958
959 for (i = 0; i < ai->num_pointers; i++)
960 free (ai->pointers[i]);
961 free (ai->pointers);
962
963 BITMAP_FREE (ai->written_vars);
964 BITMAP_FREE (ai->dereferenced_ptrs_store);
965 BITMAP_FREE (ai->dereferenced_ptrs_load);
966 bitmap_obstack_release (&alias_obstack);
967 free (ai);
968
969 delete_points_to_sets ();
970 }
971
972 /* Create name tags for all the pointers that have been dereferenced.
973 We only create a name tag for a pointer P if P is found to point to
974 a set of variables (so that we can alias them to *P) or if it is
975 the result of a call to malloc (which means that P cannot point to
976 anything else nor alias any other variable).
977
978 If two pointers P and Q point to the same set of variables, they
979 are assigned the same name tag. */
980
981 static void
982 create_name_tags (void)
983 {
984 size_t i;
985 VEC (tree, heap) *with_ptvars = NULL;
986 tree ptr;
987
988 /* Collect the list of pointers with a non-empty points to set. */
989 for (i = 1; i < num_ssa_names; i++)
990 {
991 tree ptr = ssa_name (i);
992 struct ptr_info_def *pi;
993
994 if (!ptr
995 || !POINTER_TYPE_P (TREE_TYPE (ptr))
996 || !SSA_NAME_PTR_INFO (ptr))
997 continue;
998
999 pi = SSA_NAME_PTR_INFO (ptr);
1000
1001 if (pi->pt_anything || !pi->is_dereferenced)
1002 {
1003 /* No name tags for pointers that have not been
1004 dereferenced or point to an arbitrary location. */
1005 pi->name_mem_tag = NULL_TREE;
1006 continue;
1007 }
1008
1009 /* Set pt_anything on the pointers without pt_vars filled in so
1010 that they are assigned a symbol tag. */
1011 if (pi->pt_vars && !bitmap_empty_p (pi->pt_vars))
1012 VEC_safe_push (tree, heap, with_ptvars, ptr);
1013 else
1014 set_pt_anything (ptr);
1015 }
1016
1017 /* If we didn't find any pointers with pt_vars set, we're done. */
1018 if (!with_ptvars)
1019 return;
1020
1021 /* Now go through the pointers with pt_vars, and find a name tag
1022 with the same pt_vars as this pointer, or create one if one
1023 doesn't exist. */
1024 for (i = 0; VEC_iterate (tree, with_ptvars, i, ptr); i++)
1025 {
1026 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
1027 size_t j;
1028 tree ptr2;
1029 tree old_name_tag = pi->name_mem_tag;
1030
1031 /* If PTR points to a set of variables, check if we don't
1032 have another pointer Q with the same points-to set before
1033 creating a tag. If so, use Q's tag instead of creating a
1034 new one.
1035
1036 This is important for not creating unnecessary symbols
1037 and also for copy propagation. If we ever need to
1038 propagate PTR into Q or vice-versa, we would run into
1039 problems if they both had different name tags because
1040 they would have different SSA version numbers (which
1041 would force us to take the name tags in and out of SSA). */
1042 for (j = 0; j < i && VEC_iterate (tree, with_ptvars, j, ptr2); j++)
1043 {
1044 struct ptr_info_def *qi = SSA_NAME_PTR_INFO (ptr2);
1045
1046 if (bitmap_equal_p (pi->pt_vars, qi->pt_vars))
1047 {
1048 pi->name_mem_tag = qi->name_mem_tag;
1049 break;
1050 }
1051 }
1052
1053 /* If we didn't find a pointer with the same points-to set
1054 as PTR, create a new name tag if needed. */
1055 if (pi->name_mem_tag == NULL_TREE)
1056 pi->name_mem_tag = get_nmt_for (ptr);
1057
1058 /* If the new name tag computed for PTR is different than
1059 the old name tag that it used to have, then the old tag
1060 needs to be removed from the IL, so we mark it for
1061 renaming. */
1062 if (old_name_tag && old_name_tag != pi->name_mem_tag)
1063 mark_sym_for_renaming (old_name_tag);
1064
1065 TREE_THIS_VOLATILE (pi->name_mem_tag)
1066 |= TREE_THIS_VOLATILE (TREE_TYPE (TREE_TYPE (ptr)));
1067
1068 /* Mark the new name tag for renaming. */
1069 mark_sym_for_renaming (pi->name_mem_tag);
1070 }
1071
1072 VEC_free (tree, heap, with_ptvars);
1073 }
1074
1075
1076 /* For every pointer P_i in AI->PROCESSED_PTRS, create may-alias sets for
1077 the name memory tag (NMT) associated with P_i. If P_i escapes, then its
1078 name tag and the variables it points-to are call-clobbered. Finally, if
1079 P_i escapes and we could not determine where it points to, then all the
1080 variables in the same alias set as *P_i are marked call-clobbered. This
1081 is necessary because we must assume that P_i may take the address of any
1082 variable in the same alias set. */
1083
1084 static void
1085 compute_flow_sensitive_aliasing (struct alias_info *ai)
1086 {
1087 size_t i;
1088 tree ptr;
1089
1090 for (i = 0; VEC_iterate (tree, ai->processed_ptrs, i, ptr); i++)
1091 {
1092 if (!find_what_p_points_to (ptr))
1093 set_pt_anything (ptr);
1094 }
1095
1096 create_name_tags ();
1097
1098 for (i = 0; VEC_iterate (tree, ai->processed_ptrs, i, ptr); i++)
1099 {
1100 unsigned j;
1101 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
1102 var_ann_t v_ann = var_ann (SSA_NAME_VAR (ptr));
1103 bitmap_iterator bi;
1104
1105
1106 /* Set up aliasing information for PTR's name memory tag (if it has
1107 one). Note that only pointers that have been dereferenced will
1108 have a name memory tag. */
1109 if (pi->name_mem_tag && pi->pt_vars)
1110 EXECUTE_IF_SET_IN_BITMAP (pi->pt_vars, 0, j, bi)
1111 {
1112 add_may_alias (pi->name_mem_tag, referenced_var (j));
1113 add_may_alias (v_ann->symbol_mem_tag, referenced_var (j));
1114 }
1115 }
1116 }
1117
1118
1119 /* Compute type-based alias sets. Traverse all the pointers and
1120 addressable variables found in setup_pointers_and_addressables.
1121
1122 For every pointer P in AI->POINTERS and addressable variable V in
1123 AI->ADDRESSABLE_VARS, add V to the may-alias sets of P's symbol
1124 memory tag (SMT) if their alias sets conflict. V is then marked as
1125 an alias tag so that the operand scanner knows that statements
1126 containing V have aliased operands. */
1127
1128 static void
1129 compute_flow_insensitive_aliasing (struct alias_info *ai)
1130 {
1131 size_t i;
1132
1133 /* Initialize counter for the total number of virtual operands that
1134 aliasing will introduce. When AI->TOTAL_ALIAS_VOPS goes beyond the
1135 threshold set by --params max-alias-vops, we enable alias
1136 grouping. */
1137 ai->total_alias_vops = 0;
1138
1139 /* For every pointer P, determine which addressable variables may alias
1140 with P's symbol memory tag. */
1141 for (i = 0; i < ai->num_pointers; i++)
1142 {
1143 size_t j;
1144 struct alias_map_d *p_map = ai->pointers[i];
1145 tree tag = var_ann (p_map->var)->symbol_mem_tag;
1146 var_ann_t tag_ann = var_ann (tag);
1147
1148 p_map->total_alias_vops = 0;
1149 p_map->may_aliases = BITMAP_ALLOC (&alias_obstack);
1150
1151 for (j = 0; j < ai->num_addressable_vars; j++)
1152 {
1153 struct alias_map_d *v_map;
1154 var_ann_t v_ann;
1155 tree var;
1156 bool tag_stored_p, var_stored_p;
1157
1158 v_map = ai->addressable_vars[j];
1159 var = v_map->var;
1160 v_ann = var_ann (var);
1161
1162 /* Skip memory tags and variables that have never been
1163 written to. We also need to check if the variables are
1164 call-clobbered because they may be overwritten by
1165 function calls.
1166
1167 Note this is effectively random accessing elements in
1168 the sparse bitset, which can be highly inefficient.
1169 So we first check the call_clobbered status of the
1170 tag and variable before querying the bitmap. */
1171 tag_stored_p = is_call_clobbered (tag)
1172 || bitmap_bit_p (ai->written_vars, DECL_UID (tag));
1173 var_stored_p = is_call_clobbered (var)
1174 || bitmap_bit_p (ai->written_vars, DECL_UID (var));
1175 if (!tag_stored_p && !var_stored_p)
1176 continue;
1177
1178 if (may_alias_p (p_map->var, p_map->set, var, v_map->set, false))
1179 {
1180 size_t num_tag_refs, num_var_refs;
1181
1182 num_tag_refs = NUM_REFERENCES (tag_ann);
1183 num_var_refs = NUM_REFERENCES (v_ann);
1184
1185 /* Add VAR to TAG's may-aliases set. */
1186
1187 /* We should never have a var with subvars here, because
1188 they shouldn't get into the set of addressable vars */
1189 gcc_assert (!var_can_have_subvars (var)
1190 || get_subvars_for_var (var) == NULL);
1191
1192 add_may_alias (tag, var);
1193 /* Update the bitmap used to represent TAG's alias set
1194 in case we need to group aliases. */
1195 bitmap_set_bit (p_map->may_aliases, DECL_UID (var));
1196
1197 /* Update the total number of virtual operands due to
1198 aliasing. Since we are adding one more alias to TAG's
1199 may-aliases set, the total number of virtual operands due
1200 to aliasing will be increased by the number of references
1201 made to VAR and TAG (every reference to TAG will also
1202 count as a reference to VAR). */
1203 ai->total_alias_vops += (num_var_refs + num_tag_refs);
1204 p_map->total_alias_vops += (num_var_refs + num_tag_refs);
1205
1206
1207 }
1208 }
1209 }
1210
1211 /* Since this analysis is based exclusively on symbols, it fails to
1212 handle cases where two pointers P and Q have different memory
1213 tags with conflicting alias set numbers but no aliased symbols in
1214 common.
1215
1216 For example, suppose that we have two memory tags SMT.1 and SMT.2
1217 such that
1218
1219 may-aliases (SMT.1) = { a }
1220 may-aliases (SMT.2) = { b }
1221
1222 and the alias set number of SMT.1 conflicts with that of SMT.2.
1223 Since they don't have symbols in common, loads and stores from
1224 SMT.1 and SMT.2 will seem independent of each other, which will
1225 lead to the optimizers making invalid transformations (see
1226 testsuite/gcc.c-torture/execute/pr15262-[12].c).
1227
1228 To avoid this problem, we do a final traversal of AI->POINTERS
1229 looking for pairs of pointers that have no aliased symbols in
1230 common and yet have conflicting alias set numbers. */
1231 for (i = 0; i < ai->num_pointers; i++)
1232 {
1233 size_t j;
1234 struct alias_map_d *p_map1 = ai->pointers[i];
1235 tree tag1 = var_ann (p_map1->var)->symbol_mem_tag;
1236 bitmap may_aliases1 = p_map1->may_aliases;
1237
1238 for (j = i + 1; j < ai->num_pointers; j++)
1239 {
1240 struct alias_map_d *p_map2 = ai->pointers[j];
1241 tree tag2 = var_ann (p_map2->var)->symbol_mem_tag;
1242 bitmap may_aliases2 = p_map2->may_aliases;
1243
1244 /* If the pointers may not point to each other, do nothing. */
1245 if (!may_alias_p (p_map1->var, p_map1->set, tag2, p_map2->set, true))
1246 continue;
1247
1248 /* The two pointers may alias each other. If they already have
1249 symbols in common, do nothing. */
1250 if (bitmap_intersect_p (may_aliases1, may_aliases2))
1251 continue;
1252
1253 if (!bitmap_empty_p (may_aliases2))
1254 {
1255 unsigned int k;
1256 bitmap_iterator bi;
1257
1258 /* Add all the aliases for TAG2 into TAG1's alias set.
1259 FIXME, update grouping heuristic counters. */
1260 EXECUTE_IF_SET_IN_BITMAP (may_aliases2, 0, k, bi)
1261 add_may_alias (tag1, referenced_var (k));
1262 bitmap_ior_into (may_aliases1, may_aliases2);
1263 }
1264 else
1265 {
1266 /* Since TAG2 does not have any aliases of its own, add
1267 TAG2 itself to the alias set of TAG1. */
1268 add_may_alias (tag1, tag2);
1269 bitmap_set_bit (may_aliases1, DECL_UID (tag2));
1270 }
1271 }
1272 }
1273
1274 if (dump_file)
1275 fprintf (dump_file, "\n%s: Total number of aliased vops: %ld\n",
1276 get_name (current_function_decl),
1277 ai->total_alias_vops);
1278 }
1279
1280
1281 /* Comparison function for qsort used in group_aliases. */
1282
1283 static int
1284 total_alias_vops_cmp (const void *p, const void *q)
1285 {
1286 const struct alias_map_d **p1 = (const struct alias_map_d **)p;
1287 const struct alias_map_d **p2 = (const struct alias_map_d **)q;
1288 long n1 = (*p1)->total_alias_vops;
1289 long n2 = (*p2)->total_alias_vops;
1290
1291 /* We want to sort in descending order. */
1292 return (n1 > n2 ? -1 : (n1 == n2) ? 0 : 1);
1293 }
1294
1295 /* Group all the aliases for TAG to make TAG represent all the
1296 variables in its alias set. Update the total number
1297 of virtual operands due to aliasing (AI->TOTAL_ALIAS_VOPS). This
1298 function will make TAG be the unique alias tag for all the
1299 variables in its may-aliases. So, given:
1300
1301 may-aliases(TAG) = { V1, V2, V3 }
1302
1303 This function will group the variables into:
1304
1305 may-aliases(V1) = { TAG }
1306 may-aliases(V2) = { TAG }
1307 may-aliases(V2) = { TAG } */
1308
1309 static void
1310 group_aliases_into (tree tag, bitmap tag_aliases, struct alias_info *ai)
1311 {
1312 unsigned int i;
1313 var_ann_t tag_ann = var_ann (tag);
1314 size_t num_tag_refs = NUM_REFERENCES (tag_ann);
1315 bitmap_iterator bi;
1316
1317 EXECUTE_IF_SET_IN_BITMAP (tag_aliases, 0, i, bi)
1318 {
1319 tree var = referenced_var (i);
1320 var_ann_t ann = var_ann (var);
1321
1322 /* Make TAG the unique alias of VAR. */
1323 ann->is_aliased = 0;
1324 ann->may_aliases = NULL;
1325
1326 /* Note that VAR and TAG may be the same if the function has no
1327 addressable variables (see the discussion at the end of
1328 setup_pointers_and_addressables). */
1329 if (var != tag)
1330 add_may_alias (var, tag);
1331
1332 /* Reduce total number of virtual operands contributed
1333 by TAG on behalf of VAR. Notice that the references to VAR
1334 itself won't be removed. We will merely replace them with
1335 references to TAG. */
1336 ai->total_alias_vops -= num_tag_refs;
1337 }
1338
1339 /* We have reduced the number of virtual operands that TAG makes on
1340 behalf of all the variables formerly aliased with it. However,
1341 we have also "removed" all the virtual operands for TAG itself,
1342 so we add them back. */
1343 ai->total_alias_vops += num_tag_refs;
1344
1345 /* TAG no longer has any aliases. */
1346 tag_ann->may_aliases = NULL;
1347 }
1348
1349
1350 /* Group may-aliases sets to reduce the number of virtual operands due
1351 to aliasing.
1352
1353 1- Sort the list of pointers in decreasing number of contributed
1354 virtual operands.
1355
1356 2- Take the first entry in AI->POINTERS and revert the role of
1357 the memory tag and its aliases. Usually, whenever an aliased
1358 variable Vi is found to alias with a memory tag T, we add Vi
1359 to the may-aliases set for T. Meaning that after alias
1360 analysis, we will have:
1361
1362 may-aliases(T) = { V1, V2, V3, ..., Vn }
1363
1364 This means that every statement that references T, will get 'n'
1365 virtual operands for each of the Vi tags. But, when alias
1366 grouping is enabled, we make T an alias tag and add it to the
1367 alias set of all the Vi variables:
1368
1369 may-aliases(V1) = { T }
1370 may-aliases(V2) = { T }
1371 ...
1372 may-aliases(Vn) = { T }
1373
1374 This has two effects: (a) statements referencing T will only get
1375 a single virtual operand, and, (b) all the variables Vi will now
1376 appear to alias each other. So, we lose alias precision to
1377 improve compile time. But, in theory, a program with such a high
1378 level of aliasing should not be very optimizable in the first
1379 place.
1380
1381 3- Since variables may be in the alias set of more than one
1382 memory tag, the grouping done in step (2) needs to be extended
1383 to all the memory tags that have a non-empty intersection with
1384 the may-aliases set of tag T. For instance, if we originally
1385 had these may-aliases sets:
1386
1387 may-aliases(T) = { V1, V2, V3 }
1388 may-aliases(R) = { V2, V4 }
1389
1390 In step (2) we would have reverted the aliases for T as:
1391
1392 may-aliases(V1) = { T }
1393 may-aliases(V2) = { T }
1394 may-aliases(V3) = { T }
1395
1396 But note that now V2 is no longer aliased with R. We could
1397 add R to may-aliases(V2), but we are in the process of
1398 grouping aliases to reduce virtual operands so what we do is
1399 add V4 to the grouping to obtain:
1400
1401 may-aliases(V1) = { T }
1402 may-aliases(V2) = { T }
1403 may-aliases(V3) = { T }
1404 may-aliases(V4) = { T }
1405
1406 4- If the total number of virtual operands due to aliasing is
1407 still above the threshold set by max-alias-vops, go back to (2). */
1408
1409 static void
1410 group_aliases (struct alias_info *ai)
1411 {
1412 size_t i;
1413 tree ptr;
1414
1415 /* Sort the POINTERS array in descending order of contributed
1416 virtual operands. */
1417 qsort (ai->pointers, ai->num_pointers, sizeof (struct alias_map_d *),
1418 total_alias_vops_cmp);
1419
1420 /* For every pointer in AI->POINTERS, reverse the roles of its tag
1421 and the tag's may-aliases set. */
1422 for (i = 0; i < ai->num_pointers; i++)
1423 {
1424 size_t j;
1425 tree tag1 = var_ann (ai->pointers[i]->var)->symbol_mem_tag;
1426 bitmap tag1_aliases = ai->pointers[i]->may_aliases;
1427
1428 /* Skip tags that have been grouped already. */
1429 if (ai->pointers[i]->grouped_p)
1430 continue;
1431
1432 /* See if TAG1 had any aliases in common with other symbol tags.
1433 If we find a TAG2 with common aliases with TAG1, add TAG2's
1434 aliases into TAG1. */
1435 for (j = i + 1; j < ai->num_pointers; j++)
1436 {
1437 bitmap tag2_aliases = ai->pointers[j]->may_aliases;
1438
1439 if (bitmap_intersect_p (tag1_aliases, tag2_aliases))
1440 {
1441 tree tag2 = var_ann (ai->pointers[j]->var)->symbol_mem_tag;
1442
1443 bitmap_ior_into (tag1_aliases, tag2_aliases);
1444
1445 /* TAG2 does not need its aliases anymore. */
1446 bitmap_clear (tag2_aliases);
1447 var_ann (tag2)->may_aliases = NULL;
1448
1449 /* TAG1 is the unique alias of TAG2. */
1450 add_may_alias (tag2, tag1);
1451
1452 ai->pointers[j]->grouped_p = true;
1453 }
1454 }
1455
1456 /* Now group all the aliases we collected into TAG1. */
1457 group_aliases_into (tag1, tag1_aliases, ai);
1458
1459 /* If we've reduced total number of virtual operands below the
1460 threshold, stop. */
1461 if (ai->total_alias_vops < MAX_ALIASED_VOPS)
1462 break;
1463 }
1464
1465 /* Finally, all the variables that have been grouped cannot be in
1466 the may-alias set of name memory tags. Suppose that we have
1467 grouped the aliases in this code so that may-aliases(a) = SMT.20
1468
1469 p_5 = &a;
1470 ...
1471 # a_9 = V_MAY_DEF <a_8>
1472 p_5->field = 0
1473 ... Several modifications to SMT.20 ...
1474 # VUSE <a_9>
1475 x_30 = p_5->field
1476
1477 Since p_5 points to 'a', the optimizers will try to propagate 0
1478 into p_5->field, but that is wrong because there have been
1479 modifications to 'SMT.20' in between. To prevent this we have to
1480 replace 'a' with 'SMT.20' in the name tag of p_5. */
1481 for (i = 0; VEC_iterate (tree, ai->processed_ptrs, i, ptr); i++)
1482 {
1483 size_t j;
1484 tree name_tag = SSA_NAME_PTR_INFO (ptr)->name_mem_tag;
1485 VEC(tree,gc) *aliases;
1486 tree alias;
1487
1488 if (name_tag == NULL_TREE)
1489 continue;
1490
1491 aliases = var_ann (name_tag)->may_aliases;
1492 for (j = 0; VEC_iterate (tree, aliases, j, alias); j++)
1493 {
1494 var_ann_t ann = var_ann (alias);
1495
1496 if ((!MTAG_P (alias)
1497 || TREE_CODE (alias) == STRUCT_FIELD_TAG)
1498 && ann->may_aliases)
1499 {
1500 tree new_alias;
1501
1502 gcc_assert (VEC_length (tree, ann->may_aliases) == 1);
1503
1504 new_alias = VEC_index (tree, ann->may_aliases, 0);
1505 replace_may_alias (name_tag, j, new_alias);
1506 }
1507 }
1508 }
1509
1510 if (dump_file)
1511 fprintf (dump_file,
1512 "%s: Total number of aliased vops after grouping: %ld%s\n",
1513 get_name (current_function_decl),
1514 ai->total_alias_vops,
1515 (ai->total_alias_vops < 0) ? " (negative values are OK)" : "");
1516 }
1517
1518
1519 /* Create a new alias set entry for VAR in AI->ADDRESSABLE_VARS. */
1520
1521 static void
1522 create_alias_map_for (tree var, struct alias_info *ai)
1523 {
1524 struct alias_map_d *alias_map;
1525 alias_map = XCNEW (struct alias_map_d);
1526 alias_map->var = var;
1527 alias_map->set = get_alias_set (var);
1528 ai->addressable_vars[ai->num_addressable_vars++] = alias_map;
1529 }
1530
1531
1532 /* Create memory tags for all the dereferenced pointers and build the
1533 ADDRESSABLE_VARS and POINTERS arrays used for building the may-alias
1534 sets. Based on the address escape and points-to information collected
1535 earlier, this pass will also clear the TREE_ADDRESSABLE flag from those
1536 variables whose address is not needed anymore. */
1537
1538 static void
1539 setup_pointers_and_addressables (struct alias_info *ai)
1540 {
1541 size_t n_vars, num_addressable_vars, num_pointers;
1542 referenced_var_iterator rvi;
1543 tree var;
1544 VEC (tree, heap) *varvec = NULL;
1545 safe_referenced_var_iterator srvi;
1546
1547 /* Size up the arrays ADDRESSABLE_VARS and POINTERS. */
1548 num_addressable_vars = num_pointers = 0;
1549
1550 FOR_EACH_REFERENCED_VAR (var, rvi)
1551 {
1552 if (may_be_aliased (var))
1553 num_addressable_vars++;
1554
1555 if (POINTER_TYPE_P (TREE_TYPE (var)))
1556 {
1557 /* Since we don't keep track of volatile variables, assume that
1558 these pointers are used in indirect store operations. */
1559 if (TREE_THIS_VOLATILE (var))
1560 bitmap_set_bit (ai->dereferenced_ptrs_store, DECL_UID (var));
1561
1562 num_pointers++;
1563 }
1564 }
1565
1566 /* Create ADDRESSABLE_VARS and POINTERS. Note that these arrays are
1567 always going to be slightly bigger than we actually need them
1568 because some TREE_ADDRESSABLE variables will be marked
1569 non-addressable below and only pointers with unique symbol tags are
1570 going to be added to POINTERS. */
1571 ai->addressable_vars = XCNEWVEC (struct alias_map_d *, num_addressable_vars);
1572 ai->pointers = XCNEWVEC (struct alias_map_d *, num_pointers);
1573 ai->num_addressable_vars = 0;
1574 ai->num_pointers = 0;
1575
1576 /* Since we will be creating symbol memory tags within this loop,
1577 cache the value of NUM_REFERENCED_VARS to avoid processing the
1578 additional tags unnecessarily. */
1579 n_vars = num_referenced_vars;
1580
1581 FOR_EACH_REFERENCED_VAR_SAFE (var, varvec, srvi)
1582 {
1583 var_ann_t v_ann = var_ann (var);
1584 subvar_t svars;
1585
1586 /* Name memory tags already have flow-sensitive aliasing
1587 information, so they need not be processed by
1588 compute_flow_insensitive_aliasing. Similarly, symbol memory
1589 tags are already accounted for when we process their
1590 associated pointer.
1591
1592 Structure fields, on the other hand, have to have some of this
1593 information processed for them, but it's pointless to mark them
1594 non-addressable (since they are fake variables anyway). */
1595 if (MTAG_P (var) && TREE_CODE (var) != STRUCT_FIELD_TAG)
1596 continue;
1597
1598 /* Remove the ADDRESSABLE flag from every addressable variable whose
1599 address is not needed anymore. This is caused by the propagation
1600 of ADDR_EXPR constants into INDIRECT_REF expressions and the
1601 removal of dead pointer assignments done by the early scalar
1602 cleanup passes. */
1603 if (TREE_ADDRESSABLE (var))
1604 {
1605 if (!bitmap_bit_p (addressable_vars, DECL_UID (var))
1606 && TREE_CODE (var) != RESULT_DECL
1607 && !is_global_var (var))
1608 {
1609 bool okay_to_mark = true;
1610
1611 /* Since VAR is now a regular GIMPLE register, we will need
1612 to rename VAR into SSA afterwards. */
1613 mark_sym_for_renaming (var);
1614
1615 /* If VAR can have sub-variables, and any of its
1616 sub-variables has its address taken, then we cannot
1617 remove the addressable flag from VAR. */
1618 if (var_can_have_subvars (var)
1619 && (svars = get_subvars_for_var (var)))
1620 {
1621 subvar_t sv;
1622
1623 for (sv = svars; sv; sv = sv->next)
1624 {
1625 if (bitmap_bit_p (addressable_vars, DECL_UID (sv->var)))
1626 okay_to_mark = false;
1627 mark_sym_for_renaming (sv->var);
1628 }
1629 }
1630
1631 /* The address of VAR is not needed, remove the
1632 addressable bit, so that it can be optimized as a
1633 regular variable. */
1634 if (okay_to_mark)
1635 mark_non_addressable (var);
1636 }
1637 }
1638
1639 /* Global variables and addressable locals may be aliased. Create an
1640 entry in ADDRESSABLE_VARS for VAR. */
1641 if (may_be_aliased (var)
1642 && (!var_can_have_subvars (var)
1643 || get_subvars_for_var (var) == NULL))
1644 {
1645 create_alias_map_for (var, ai);
1646 mark_sym_for_renaming (var);
1647 }
1648
1649 /* Add pointer variables that have been dereferenced to the POINTERS
1650 array and create a symbol memory tag for them. */
1651 if (POINTER_TYPE_P (TREE_TYPE (var)))
1652 {
1653 if ((bitmap_bit_p (ai->dereferenced_ptrs_store, DECL_UID (var))
1654 || bitmap_bit_p (ai->dereferenced_ptrs_load, DECL_UID (var))))
1655 {
1656 tree tag;
1657 var_ann_t t_ann;
1658
1659 /* If pointer VAR still doesn't have a memory tag
1660 associated with it, create it now or re-use an
1661 existing one. */
1662 tag = get_tmt_for (var, ai);
1663 t_ann = var_ann (tag);
1664
1665 /* The symbol tag will need to be renamed into SSA
1666 afterwards. Note that we cannot do this inside
1667 get_tmt_for because aliasing may run multiple times
1668 and we only create symbol tags the first time. */
1669 mark_sym_for_renaming (tag);
1670
1671 /* Similarly, if pointer VAR used to have another type
1672 tag, we will need to process it in the renamer to
1673 remove the stale virtual operands. */
1674 if (v_ann->symbol_mem_tag)
1675 mark_sym_for_renaming (v_ann->symbol_mem_tag);
1676
1677 /* Associate the tag with pointer VAR. */
1678 v_ann->symbol_mem_tag = tag;
1679
1680 /* If pointer VAR has been used in a store operation,
1681 then its memory tag must be marked as written-to. */
1682 if (bitmap_bit_p (ai->dereferenced_ptrs_store, DECL_UID (var)))
1683 bitmap_set_bit (ai->written_vars, DECL_UID (tag));
1684
1685 /* All the dereferences of pointer VAR count as
1686 references of TAG. Since TAG can be associated with
1687 several pointers, add the dereferences of VAR to the
1688 TAG. */
1689 NUM_REFERENCES_SET (t_ann,
1690 NUM_REFERENCES (t_ann)
1691 + NUM_REFERENCES (v_ann));
1692 }
1693 else
1694 {
1695 /* The pointer has not been dereferenced. If it had a
1696 symbol memory tag, remove it and mark the old tag for
1697 renaming to remove it out of the IL. */
1698 var_ann_t ann = var_ann (var);
1699 tree tag = ann->symbol_mem_tag;
1700 if (tag)
1701 {
1702 mark_sym_for_renaming (tag);
1703 ann->symbol_mem_tag = NULL_TREE;
1704 }
1705 }
1706 }
1707 }
1708 VEC_free (tree, heap, varvec);
1709 }
1710
1711
1712 /* Determine whether to use .GLOBAL_VAR to model call clobbering semantics. At
1713 every call site, we need to emit V_MAY_DEF expressions to represent the
1714 clobbering effects of the call for variables whose address escapes the
1715 current function.
1716
1717 One approach is to group all call-clobbered variables into a single
1718 representative that is used as an alias of every call-clobbered variable
1719 (.GLOBAL_VAR). This works well, but it ties the optimizer hands because
1720 references to any call clobbered variable is a reference to .GLOBAL_VAR.
1721
1722 The second approach is to emit a clobbering V_MAY_DEF for every
1723 call-clobbered variable at call sites. This is the preferred way in terms
1724 of optimization opportunities but it may create too many V_MAY_DEF operands
1725 if there are many call clobbered variables and function calls in the
1726 function.
1727
1728 To decide whether or not to use .GLOBAL_VAR we multiply the number of
1729 function calls found by the number of call-clobbered variables. If that
1730 product is beyond a certain threshold, as determined by the parameterized
1731 values shown below, we use .GLOBAL_VAR.
1732
1733 FIXME. This heuristic should be improved. One idea is to use several
1734 .GLOBAL_VARs of different types instead of a single one. The thresholds
1735 have been derived from a typical bootstrap cycle, including all target
1736 libraries. Compile times were found increase by ~1% compared to using
1737 .GLOBAL_VAR. */
1738
1739 static void
1740 maybe_create_global_var (struct alias_info *ai)
1741 {
1742 unsigned i, n_clobbered;
1743 bitmap_iterator bi;
1744
1745 /* No need to create it, if we have one already. */
1746 if (global_var == NULL_TREE)
1747 {
1748 /* Count all the call-clobbered variables. */
1749 n_clobbered = 0;
1750 EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, i, bi)
1751 {
1752 n_clobbered++;
1753 }
1754
1755 /* If the number of virtual operands that would be needed to
1756 model all the call-clobbered variables is larger than
1757 GLOBAL_VAR_THRESHOLD, create .GLOBAL_VAR.
1758
1759 Also create .GLOBAL_VAR if there are no call-clobbered
1760 variables and the program contains a mixture of pure/const
1761 and regular function calls. This is to avoid the problem
1762 described in PR 20115:
1763
1764 int X;
1765 int func_pure (void) { return X; }
1766 int func_non_pure (int a) { X += a; }
1767 int foo ()
1768 {
1769 int a = func_pure ();
1770 func_non_pure (a);
1771 a = func_pure ();
1772 return a;
1773 }
1774
1775 Since foo() has no call-clobbered variables, there is
1776 no relationship between the calls to func_pure and
1777 func_non_pure. Since func_pure has no side-effects, value
1778 numbering optimizations elide the second call to func_pure.
1779 So, if we have some pure/const and some regular calls in the
1780 program we create .GLOBAL_VAR to avoid missing these
1781 relations. */
1782 if (ai->num_calls_found * n_clobbered >= (size_t) GLOBAL_VAR_THRESHOLD
1783 || (n_clobbered == 0
1784 && ai->num_calls_found > 0
1785 && ai->num_pure_const_calls_found > 0
1786 && ai->num_calls_found > ai->num_pure_const_calls_found))
1787 create_global_var ();
1788 }
1789
1790 /* Mark all call-clobbered symbols for renaming. Since the initial
1791 rewrite into SSA ignored all call sites, we may need to rename
1792 .GLOBAL_VAR and the call-clobbered variables. */
1793 EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, i, bi)
1794 {
1795 tree var = referenced_var (i);
1796
1797 /* If the function has calls to clobbering functions and
1798 .GLOBAL_VAR has been created, make it an alias for all
1799 call-clobbered variables. */
1800 if (global_var && var != global_var)
1801 {
1802 add_may_alias (var, global_var);
1803 gcc_assert (!get_subvars_for_var (var));
1804 }
1805
1806 mark_sym_for_renaming (var);
1807 }
1808 }
1809
1810
1811 /* Return TRUE if pointer PTR may point to variable VAR.
1812
1813 MEM_ALIAS_SET is the alias set for the memory location pointed-to by PTR
1814 This is needed because when checking for type conflicts we are
1815 interested in the alias set of the memory location pointed-to by
1816 PTR. The alias set of PTR itself is irrelevant.
1817
1818 VAR_ALIAS_SET is the alias set for VAR. */
1819
1820 static bool
1821 may_alias_p (tree ptr, HOST_WIDE_INT mem_alias_set,
1822 tree var, HOST_WIDE_INT var_alias_set,
1823 bool alias_set_only)
1824 {
1825 tree mem;
1826
1827 alias_stats.alias_queries++;
1828 alias_stats.simple_queries++;
1829
1830 /* By convention, a variable cannot alias itself. */
1831 mem = var_ann (ptr)->symbol_mem_tag;
1832 if (mem == var)
1833 {
1834 alias_stats.alias_noalias++;
1835 alias_stats.simple_resolved++;
1836 return false;
1837 }
1838
1839 /* If -fargument-noalias-global is > 2, pointer arguments may
1840 not point to anything else. */
1841 if (flag_argument_noalias > 2 && TREE_CODE (ptr) == PARM_DECL)
1842 {
1843 alias_stats.alias_noalias++;
1844 alias_stats.simple_resolved++;
1845 return false;
1846 }
1847
1848 /* If -fargument-noalias-global is > 1, pointer arguments may
1849 not point to global variables. */
1850 if (flag_argument_noalias > 1 && is_global_var (var)
1851 && TREE_CODE (ptr) == PARM_DECL)
1852 {
1853 alias_stats.alias_noalias++;
1854 alias_stats.simple_resolved++;
1855 return false;
1856 }
1857
1858 /* If either MEM or VAR is a read-only global and the other one
1859 isn't, then PTR cannot point to VAR. */
1860 if ((unmodifiable_var_p (mem) && !unmodifiable_var_p (var))
1861 || (unmodifiable_var_p (var) && !unmodifiable_var_p (mem)))
1862 {
1863 alias_stats.alias_noalias++;
1864 alias_stats.simple_resolved++;
1865 return false;
1866 }
1867
1868 gcc_assert (TREE_CODE (mem) == SYMBOL_MEMORY_TAG);
1869
1870 alias_stats.tbaa_queries++;
1871
1872 /* If the alias sets don't conflict then MEM cannot alias VAR. */
1873 if (!alias_sets_conflict_p (mem_alias_set, var_alias_set))
1874 {
1875 alias_stats.alias_noalias++;
1876 alias_stats.tbaa_resolved++;
1877 return false;
1878 }
1879
1880 /* If var is a record or union type, ptr cannot point into var
1881 unless there is some operation explicit address operation in the
1882 program that can reference a field of the ptr's dereferenced
1883 type. This also assumes that the types of both var and ptr are
1884 contained within the compilation unit, and that there is no fancy
1885 addressing arithmetic associated with any of the types
1886 involved. */
1887
1888 if ((mem_alias_set != 0) && (var_alias_set != 0))
1889 {
1890 tree ptr_type = TREE_TYPE (ptr);
1891 tree var_type = TREE_TYPE (var);
1892
1893 /* The star count is -1 if the type at the end of the pointer_to
1894 chain is not a record or union type. */
1895 if ((!alias_set_only) &&
1896 ipa_type_escape_star_count_of_interesting_type (var_type) >= 0)
1897 {
1898 int ptr_star_count = 0;
1899
1900 /* Ipa_type_escape_star_count_of_interesting_type is a little to
1901 restrictive for the pointer type, need to allow pointers to
1902 primitive types as long as those types cannot be pointers
1903 to everything. */
1904 while (POINTER_TYPE_P (ptr_type))
1905 /* Strip the *'s off. */
1906 {
1907 ptr_type = TREE_TYPE (ptr_type);
1908 ptr_star_count++;
1909 }
1910
1911 /* There does not appear to be a better test to see if the
1912 pointer type was one of the pointer to everything
1913 types. */
1914
1915 if (ptr_star_count > 0)
1916 {
1917 alias_stats.structnoaddress_queries++;
1918 if (ipa_type_escape_field_does_not_clobber_p (var_type,
1919 TREE_TYPE (ptr)))
1920 {
1921 alias_stats.structnoaddress_resolved++;
1922 alias_stats.alias_noalias++;
1923 return false;
1924 }
1925 }
1926 else if (ptr_star_count == 0)
1927 {
1928 /* If ptr_type was not really a pointer to type, it cannot
1929 alias. */
1930 alias_stats.structnoaddress_queries++;
1931 alias_stats.structnoaddress_resolved++;
1932 alias_stats.alias_noalias++;
1933 return false;
1934 }
1935 }
1936 }
1937
1938 alias_stats.alias_mayalias++;
1939 return true;
1940 }
1941
1942
1943 /* Add ALIAS to the set of variables that may alias VAR. */
1944
1945 static void
1946 add_may_alias (tree var, tree alias)
1947 {
1948 size_t i;
1949 var_ann_t v_ann = get_var_ann (var);
1950 var_ann_t a_ann = get_var_ann (alias);
1951 tree al;
1952
1953 /* Don't allow self-referential aliases. */
1954 gcc_assert (var != alias);
1955
1956 /* ALIAS must be addressable if it's being added to an alias set. */
1957 #if 1
1958 TREE_ADDRESSABLE (alias) = 1;
1959 #else
1960 gcc_assert (may_be_aliased (alias));
1961 #endif
1962
1963 if (v_ann->may_aliases == NULL)
1964 v_ann->may_aliases = VEC_alloc (tree, gc, 2);
1965
1966 /* Avoid adding duplicates. */
1967 for (i = 0; VEC_iterate (tree, v_ann->may_aliases, i, al); i++)
1968 if (alias == al)
1969 return;
1970
1971 VEC_safe_push (tree, gc, v_ann->may_aliases, alias);
1972 a_ann->is_aliased = 1;
1973 }
1974
1975
1976 /* Replace alias I in the alias sets of VAR with NEW_ALIAS. */
1977
1978 static void
1979 replace_may_alias (tree var, size_t i, tree new_alias)
1980 {
1981 var_ann_t v_ann = var_ann (var);
1982 VEC_replace (tree, v_ann->may_aliases, i, new_alias);
1983 }
1984
1985
1986 /* Mark pointer PTR as pointing to an arbitrary memory location. */
1987
1988 static void
1989 set_pt_anything (tree ptr)
1990 {
1991 struct ptr_info_def *pi = get_ptr_info (ptr);
1992
1993 pi->pt_anything = 1;
1994 pi->pt_vars = NULL;
1995
1996 /* The pointer used to have a name tag, but we now found it pointing
1997 to an arbitrary location. The name tag needs to be renamed and
1998 disassociated from PTR. */
1999 if (pi->name_mem_tag)
2000 {
2001 mark_sym_for_renaming (pi->name_mem_tag);
2002 pi->name_mem_tag = NULL_TREE;
2003 }
2004 }
2005
2006
2007 /* Return true if STMT is an "escape" site from the current function. Escape
2008 sites those statements which might expose the address of a variable
2009 outside the current function. STMT is an escape site iff:
2010
2011 1- STMT is a function call, or
2012 2- STMT is an __asm__ expression, or
2013 3- STMT is an assignment to a non-local variable, or
2014 4- STMT is a return statement.
2015
2016 AI points to the alias information collected so far.
2017
2018 Return the type of escape site found, if we found one, or NO_ESCAPE
2019 if none. */
2020
2021 enum escape_type
2022 is_escape_site (tree stmt, struct alias_info *ai)
2023 {
2024 tree call = get_call_expr_in (stmt);
2025 if (call != NULL_TREE)
2026 {
2027 ai->num_calls_found++;
2028
2029 if (!TREE_SIDE_EFFECTS (call))
2030 {
2031 ai->num_pure_const_calls_found++;
2032 return ESCAPE_TO_PURE_CONST;
2033 }
2034
2035 return ESCAPE_TO_CALL;
2036 }
2037 else if (TREE_CODE (stmt) == ASM_EXPR)
2038 return ESCAPE_TO_ASM;
2039 else if (TREE_CODE (stmt) == MODIFY_EXPR)
2040 {
2041 tree lhs = TREE_OPERAND (stmt, 0);
2042
2043 /* Get to the base of _REF nodes. */
2044 if (TREE_CODE (lhs) != SSA_NAME)
2045 lhs = get_base_address (lhs);
2046
2047 /* If we couldn't recognize the LHS of the assignment, assume that it
2048 is a non-local store. */
2049 if (lhs == NULL_TREE)
2050 return ESCAPE_UNKNOWN;
2051
2052 /* If the RHS is a conversion between a pointer and an integer, the
2053 pointer escapes since we can't track the integer. */
2054 if ((TREE_CODE (TREE_OPERAND (stmt, 1)) == NOP_EXPR
2055 || TREE_CODE (TREE_OPERAND (stmt, 1)) == CONVERT_EXPR
2056 || TREE_CODE (TREE_OPERAND (stmt, 1)) == VIEW_CONVERT_EXPR)
2057 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND
2058 (TREE_OPERAND (stmt, 1), 0)))
2059 && !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (stmt, 1))))
2060 return ESCAPE_BAD_CAST;
2061
2062 /* If the LHS is an SSA name, it can't possibly represent a non-local
2063 memory store. */
2064 if (TREE_CODE (lhs) == SSA_NAME)
2065 return NO_ESCAPE;
2066
2067 /* FIXME: LHS is not an SSA_NAME. Even if it's an assignment to a
2068 local variables we cannot be sure if it will escape, because we
2069 don't have information about objects not in SSA form. Need to
2070 implement something along the lines of
2071
2072 J.-D. Choi, M. Gupta, M. J. Serrano, V. C. Sreedhar, and S. P.
2073 Midkiff, ``Escape analysis for java,'' in Proceedings of the
2074 Conference on Object-Oriented Programming Systems, Languages, and
2075 Applications (OOPSLA), pp. 1-19, 1999. */
2076 return ESCAPE_STORED_IN_GLOBAL;
2077 }
2078 else if (TREE_CODE (stmt) == RETURN_EXPR)
2079 return ESCAPE_TO_RETURN;
2080
2081 return NO_ESCAPE;
2082 }
2083
2084 /* Create a new memory tag of type TYPE.
2085 Does NOT push it into the current binding. */
2086
2087 static tree
2088 create_tag_raw (enum tree_code code, tree type, const char *prefix)
2089 {
2090 tree tmp_var;
2091 tree new_type;
2092
2093 /* Make the type of the variable writable. */
2094 new_type = build_type_variant (type, 0, 0);
2095 TYPE_ATTRIBUTES (new_type) = TYPE_ATTRIBUTES (type);
2096
2097 tmp_var = build_decl (code, create_tmp_var_name (prefix),
2098 type);
2099 /* Make the variable writable. */
2100 TREE_READONLY (tmp_var) = 0;
2101
2102 /* It doesn't start out global. */
2103 MTAG_GLOBAL (tmp_var) = 0;
2104 TREE_STATIC (tmp_var) = 0;
2105 TREE_USED (tmp_var) = 1;
2106
2107 return tmp_var;
2108 }
2109
2110 /* Create a new memory tag of type TYPE. If IS_TYPE_TAG is true, the tag
2111 is considered to represent all the pointers whose pointed-to types are
2112 in the same alias set class. Otherwise, the tag represents a single
2113 SSA_NAME pointer variable. */
2114
2115 static tree
2116 create_memory_tag (tree type, bool is_type_tag)
2117 {
2118 var_ann_t ann;
2119 tree tag = create_tag_raw (is_type_tag ? SYMBOL_MEMORY_TAG : NAME_MEMORY_TAG,
2120 type, (is_type_tag) ? "SMT" : "NMT");
2121
2122 /* By default, memory tags are local variables. Alias analysis will
2123 determine whether they should be considered globals. */
2124 DECL_CONTEXT (tag) = current_function_decl;
2125
2126 /* Memory tags are by definition addressable. */
2127 TREE_ADDRESSABLE (tag) = 1;
2128
2129 ann = get_var_ann (tag);
2130 ann->symbol_mem_tag = NULL_TREE;
2131
2132 /* Add the tag to the symbol table. */
2133 add_referenced_tmp_var (tag);
2134
2135 return tag;
2136 }
2137
2138
2139 /* Create a name memory tag to represent a specific SSA_NAME pointer P_i.
2140 This is used if P_i has been found to point to a specific set of
2141 variables or to a non-aliased memory location like the address returned
2142 by malloc functions. */
2143
2144 static tree
2145 get_nmt_for (tree ptr)
2146 {
2147 struct ptr_info_def *pi = get_ptr_info (ptr);
2148 tree tag = pi->name_mem_tag;
2149
2150 if (tag == NULL_TREE)
2151 tag = create_memory_tag (TREE_TYPE (TREE_TYPE (ptr)), false);
2152 return tag;
2153 }
2154
2155
2156 /* Return the symbol memory tag associated to pointer PTR. A memory
2157 tag is an artificial variable that represents the memory location
2158 pointed-to by PTR. It is used to model the effects of pointer
2159 de-references on addressable variables.
2160
2161 AI points to the data gathered during alias analysis. This
2162 function populates the array AI->POINTERS. */
2163
2164 static tree
2165 get_tmt_for (tree ptr, struct alias_info *ai)
2166 {
2167 size_t i;
2168 tree tag;
2169 tree tag_type = TREE_TYPE (TREE_TYPE (ptr));
2170 HOST_WIDE_INT tag_set = get_alias_set (tag_type);
2171
2172 /* To avoid creating unnecessary memory tags, only create one memory tag
2173 per alias set class. Note that it may be tempting to group
2174 memory tags based on conflicting alias sets instead of
2175 equivalence. That would be wrong because alias sets are not
2176 necessarily transitive (as demonstrated by the libstdc++ test
2177 23_containers/vector/cons/4.cc). Given three alias sets A, B, C
2178 such that conflicts (A, B) == true and conflicts (A, C) == true,
2179 it does not necessarily follow that conflicts (B, C) == true. */
2180 for (i = 0, tag = NULL_TREE; i < ai->num_pointers; i++)
2181 {
2182 struct alias_map_d *curr = ai->pointers[i];
2183 tree curr_tag = var_ann (curr->var)->symbol_mem_tag;
2184 if (tag_set == curr->set)
2185 {
2186 tag = curr_tag;
2187 break;
2188 }
2189 }
2190
2191 /* If VAR cannot alias with any of the existing memory tags, create a new
2192 tag for PTR and add it to the POINTERS array. */
2193 if (tag == NULL_TREE)
2194 {
2195 struct alias_map_d *alias_map;
2196
2197 /* If PTR did not have a symbol tag already, create a new SMT.*
2198 artificial variable representing the memory location
2199 pointed-to by PTR. */
2200 if (var_ann (ptr)->symbol_mem_tag == NULL_TREE)
2201 tag = create_memory_tag (tag_type, true);
2202 else
2203 tag = var_ann (ptr)->symbol_mem_tag;
2204
2205 /* Add PTR to the POINTERS array. Note that we are not interested in
2206 PTR's alias set. Instead, we cache the alias set for the memory that
2207 PTR points to. */
2208 alias_map = XCNEW (struct alias_map_d);
2209 alias_map->var = ptr;
2210 alias_map->set = tag_set;
2211 ai->pointers[ai->num_pointers++] = alias_map;
2212 }
2213
2214 /* If the pointed-to type is volatile, so is the tag. */
2215 TREE_THIS_VOLATILE (tag) |= TREE_THIS_VOLATILE (tag_type);
2216
2217 /* Make sure that the symbol tag has the same alias set as the
2218 pointed-to type. */
2219 gcc_assert (tag_set == get_alias_set (tag));
2220
2221 return tag;
2222 }
2223
2224
2225 /* Create GLOBAL_VAR, an artificial global variable to act as a
2226 representative of all the variables that may be clobbered by function
2227 calls. */
2228
2229 static void
2230 create_global_var (void)
2231 {
2232 global_var = build_decl (VAR_DECL, get_identifier (".GLOBAL_VAR"),
2233 void_type_node);
2234 DECL_ARTIFICIAL (global_var) = 1;
2235 TREE_READONLY (global_var) = 0;
2236 DECL_EXTERNAL (global_var) = 1;
2237 TREE_STATIC (global_var) = 1;
2238 TREE_USED (global_var) = 1;
2239 DECL_CONTEXT (global_var) = NULL_TREE;
2240 TREE_THIS_VOLATILE (global_var) = 0;
2241 TREE_ADDRESSABLE (global_var) = 0;
2242
2243 create_var_ann (global_var);
2244 mark_call_clobbered (global_var, ESCAPE_UNKNOWN);
2245 add_referenced_tmp_var (global_var);
2246 mark_sym_for_renaming (global_var);
2247 }
2248
2249
2250 /* Dump alias statistics on FILE. */
2251
2252 static void
2253 dump_alias_stats (FILE *file)
2254 {
2255 const char *funcname
2256 = lang_hooks.decl_printable_name (current_function_decl, 2);
2257 fprintf (file, "\nAlias statistics for %s\n\n", funcname);
2258 fprintf (file, "Total alias queries:\t%u\n", alias_stats.alias_queries);
2259 fprintf (file, "Total alias mayalias results:\t%u\n",
2260 alias_stats.alias_mayalias);
2261 fprintf (file, "Total alias noalias results:\t%u\n",
2262 alias_stats.alias_noalias);
2263 fprintf (file, "Total simple queries:\t%u\n",
2264 alias_stats.simple_queries);
2265 fprintf (file, "Total simple resolved:\t%u\n",
2266 alias_stats.simple_resolved);
2267 fprintf (file, "Total TBAA queries:\t%u\n",
2268 alias_stats.tbaa_queries);
2269 fprintf (file, "Total TBAA resolved:\t%u\n",
2270 alias_stats.tbaa_resolved);
2271 fprintf (file, "Total non-addressable structure type queries:\t%u\n",
2272 alias_stats.structnoaddress_queries);
2273 fprintf (file, "Total non-addressable structure type resolved:\t%u\n",
2274 alias_stats.structnoaddress_resolved);
2275 }
2276
2277
2278 /* Dump alias information on FILE. */
2279
2280 void
2281 dump_alias_info (FILE *file)
2282 {
2283 size_t i;
2284 const char *funcname
2285 = lang_hooks.decl_printable_name (current_function_decl, 2);
2286 referenced_var_iterator rvi;
2287 tree var;
2288
2289 fprintf (file, "\nFlow-insensitive alias information for %s\n\n", funcname);
2290
2291 fprintf (file, "Aliased symbols\n\n");
2292
2293 FOR_EACH_REFERENCED_VAR (var, rvi)
2294 {
2295 if (may_be_aliased (var))
2296 dump_variable (file, var);
2297 }
2298
2299 fprintf (file, "\nDereferenced pointers\n\n");
2300
2301 FOR_EACH_REFERENCED_VAR (var, rvi)
2302 {
2303 var_ann_t ann = var_ann (var);
2304 if (ann->symbol_mem_tag)
2305 dump_variable (file, var);
2306 }
2307
2308 fprintf (file, "\nSymbol memory tags\n\n");
2309
2310 FOR_EACH_REFERENCED_VAR (var, rvi)
2311 {
2312 if (TREE_CODE (var) == SYMBOL_MEMORY_TAG)
2313 dump_variable (file, var);
2314 }
2315
2316 fprintf (file, "\n\nFlow-sensitive alias information for %s\n\n", funcname);
2317
2318 fprintf (file, "SSA_NAME pointers\n\n");
2319 for (i = 1; i < num_ssa_names; i++)
2320 {
2321 tree ptr = ssa_name (i);
2322 struct ptr_info_def *pi;
2323
2324 if (ptr == NULL_TREE)
2325 continue;
2326
2327 pi = SSA_NAME_PTR_INFO (ptr);
2328 if (!SSA_NAME_IN_FREE_LIST (ptr)
2329 && pi
2330 && pi->name_mem_tag)
2331 dump_points_to_info_for (file, ptr);
2332 }
2333
2334 fprintf (file, "\nName memory tags\n\n");
2335
2336 FOR_EACH_REFERENCED_VAR (var, rvi)
2337 {
2338 if (TREE_CODE (var) == NAME_MEMORY_TAG)
2339 dump_variable (file, var);
2340 }
2341
2342 fprintf (file, "\n");
2343 }
2344
2345
2346 /* Dump alias information on stderr. */
2347
2348 void
2349 debug_alias_info (void)
2350 {
2351 dump_alias_info (stderr);
2352 }
2353
2354
2355 /* Return the alias information associated with pointer T. It creates a
2356 new instance if none existed. */
2357
2358 struct ptr_info_def *
2359 get_ptr_info (tree t)
2360 {
2361 struct ptr_info_def *pi;
2362
2363 gcc_assert (POINTER_TYPE_P (TREE_TYPE (t)));
2364
2365 pi = SSA_NAME_PTR_INFO (t);
2366 if (pi == NULL)
2367 {
2368 pi = GGC_NEW (struct ptr_info_def);
2369 memset ((void *)pi, 0, sizeof (*pi));
2370 SSA_NAME_PTR_INFO (t) = pi;
2371 }
2372
2373 return pi;
2374 }
2375
2376
2377 /* Dump points-to information for SSA_NAME PTR into FILE. */
2378
2379 void
2380 dump_points_to_info_for (FILE *file, tree ptr)
2381 {
2382 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
2383
2384 print_generic_expr (file, ptr, dump_flags);
2385
2386 if (pi)
2387 {
2388 if (pi->name_mem_tag)
2389 {
2390 fprintf (file, ", name memory tag: ");
2391 print_generic_expr (file, pi->name_mem_tag, dump_flags);
2392 }
2393
2394 if (pi->is_dereferenced)
2395 fprintf (file, ", is dereferenced");
2396
2397 if (pi->value_escapes_p)
2398 fprintf (file, ", its value escapes");
2399
2400 if (pi->pt_anything)
2401 fprintf (file, ", points-to anything");
2402
2403 if (pi->pt_null)
2404 fprintf (file, ", points-to NULL");
2405
2406 if (pi->pt_vars)
2407 {
2408 unsigned ix;
2409 bitmap_iterator bi;
2410
2411 fprintf (file, ", points-to vars: { ");
2412 EXECUTE_IF_SET_IN_BITMAP (pi->pt_vars, 0, ix, bi)
2413 {
2414 print_generic_expr (file, referenced_var (ix), dump_flags);
2415 fprintf (file, " ");
2416 }
2417 fprintf (file, "}");
2418 }
2419 }
2420
2421 fprintf (file, "\n");
2422 }
2423
2424
2425 /* Dump points-to information for VAR into stderr. */
2426
2427 void
2428 debug_points_to_info_for (tree var)
2429 {
2430 dump_points_to_info_for (stderr, var);
2431 }
2432
2433
2434 /* Dump points-to information into FILE. NOTE: This function is slow, as
2435 it needs to traverse the whole CFG looking for pointer SSA_NAMEs. */
2436
2437 void
2438 dump_points_to_info (FILE *file)
2439 {
2440 basic_block bb;
2441 block_stmt_iterator si;
2442 ssa_op_iter iter;
2443 const char *fname =
2444 lang_hooks.decl_printable_name (current_function_decl, 2);
2445 referenced_var_iterator rvi;
2446 tree var;
2447
2448 fprintf (file, "\n\nPointed-to sets for pointers in %s\n\n", fname);
2449
2450 /* First dump points-to information for the default definitions of
2451 pointer variables. This is necessary because default definitions are
2452 not part of the code. */
2453 FOR_EACH_REFERENCED_VAR (var, rvi)
2454 {
2455 if (POINTER_TYPE_P (TREE_TYPE (var)))
2456 {
2457 tree def = default_def (var);
2458 if (def)
2459 dump_points_to_info_for (file, def);
2460 }
2461 }
2462
2463 /* Dump points-to information for every pointer defined in the program. */
2464 FOR_EACH_BB (bb)
2465 {
2466 tree phi;
2467
2468 for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
2469 {
2470 tree ptr = PHI_RESULT (phi);
2471 if (POINTER_TYPE_P (TREE_TYPE (ptr)))
2472 dump_points_to_info_for (file, ptr);
2473 }
2474
2475 for (si = bsi_start (bb); !bsi_end_p (si); bsi_next (&si))
2476 {
2477 tree stmt = bsi_stmt (si);
2478 tree def;
2479 FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_DEF)
2480 if (POINTER_TYPE_P (TREE_TYPE (def)))
2481 dump_points_to_info_for (file, def);
2482 }
2483 }
2484
2485 fprintf (file, "\n");
2486 }
2487
2488
2489 /* Dump points-to info pointed to by PTO into STDERR. */
2490
2491 void
2492 debug_points_to_info (void)
2493 {
2494 dump_points_to_info (stderr);
2495 }
2496
2497 /* Dump to FILE the list of variables that may be aliasing VAR. */
2498
2499 void
2500 dump_may_aliases_for (FILE *file, tree var)
2501 {
2502 VEC(tree, gc) *aliases;
2503
2504 if (TREE_CODE (var) == SSA_NAME)
2505 var = SSA_NAME_VAR (var);
2506
2507 aliases = var_ann (var)->may_aliases;
2508 if (aliases)
2509 {
2510 size_t i;
2511 tree al;
2512 fprintf (file, "{ ");
2513 for (i = 0; VEC_iterate (tree, aliases, i, al); i++)
2514 {
2515 print_generic_expr (file, al, dump_flags);
2516 fprintf (file, " ");
2517 }
2518 fprintf (file, "}");
2519 }
2520 }
2521
2522
2523 /* Dump to stderr the list of variables that may be aliasing VAR. */
2524
2525 void
2526 debug_may_aliases_for (tree var)
2527 {
2528 dump_may_aliases_for (stderr, var);
2529 }
2530
2531 /* Return true if VAR may be aliased. */
2532
2533 bool
2534 may_be_aliased (tree var)
2535 {
2536 /* Obviously. */
2537 if (TREE_ADDRESSABLE (var))
2538 return true;
2539
2540 /* Globally visible variables can have their addresses taken by other
2541 translation units. */
2542
2543 if (MTAG_P (var)
2544 && (MTAG_GLOBAL (var) || TREE_PUBLIC (var)))
2545 return true;
2546 else if (!MTAG_P (var)
2547 && (DECL_EXTERNAL (var) || TREE_PUBLIC (var)))
2548 return true;
2549
2550 /* Automatic variables can't have their addresses escape any other way.
2551 This must be after the check for global variables, as extern declarations
2552 do not have TREE_STATIC set. */
2553 if (!TREE_STATIC (var))
2554 return false;
2555
2556 /* If we're in unit-at-a-time mode, then we must have seen all occurrences
2557 of address-of operators, and so we can trust TREE_ADDRESSABLE. Otherwise
2558 we can only be sure the variable isn't addressable if it's local to the
2559 current function. */
2560 if (flag_unit_at_a_time)
2561 return false;
2562 if (decl_function_context (var) == current_function_decl)
2563 return false;
2564
2565 return true;
2566 }
2567
2568
2569 /* Given two symbols return TRUE if one is in the alias set of the other. */
2570 bool
2571 is_aliased_with (tree tag, tree sym)
2572 {
2573 size_t i;
2574 VEC(tree,gc) *aliases;
2575 tree al;
2576
2577 if (var_ann (sym)->is_aliased)
2578 {
2579 aliases = var_ann (tag)->may_aliases;
2580
2581 if (aliases == NULL)
2582 return false;
2583
2584 for (i = 0; VEC_iterate (tree, aliases, i, al); i++)
2585 if (al == sym)
2586 return true;
2587 }
2588 else
2589 {
2590 aliases = var_ann (sym)->may_aliases;
2591
2592 if (aliases == NULL)
2593 return false;
2594
2595 for (i = 0; VEC_iterate (tree, aliases, i, al); i++)
2596 if (al == tag)
2597 return true;
2598 }
2599
2600 return false;
2601 }
2602
2603
2604 /* Add VAR to the list of may-aliases of PTR's symbol tag. If PTR
2605 doesn't already have a symbol tag, create one. */
2606
2607 void
2608 add_type_alias (tree ptr, tree var)
2609 {
2610 VEC(tree, gc) *aliases;
2611 tree tag, al;
2612 var_ann_t ann = var_ann (ptr);
2613 subvar_t svars;
2614 VEC (tree, heap) *varvec = NULL;
2615 unsigned i;
2616
2617 if (ann->symbol_mem_tag == NULL_TREE)
2618 {
2619 tree q = NULL_TREE;
2620 tree tag_type = TREE_TYPE (TREE_TYPE (ptr));
2621 HOST_WIDE_INT tag_set = get_alias_set (tag_type);
2622 safe_referenced_var_iterator rvi;
2623
2624 /* PTR doesn't have a symbol tag, create a new one and add VAR to
2625 the new tag's alias set.
2626
2627 FIXME, This is slower than necessary. We need to determine
2628 whether there is another pointer Q with the same alias set as
2629 PTR. This could be sped up by having symbol tags associated
2630 with types. */
2631 FOR_EACH_REFERENCED_VAR_SAFE (q, varvec, rvi)
2632 {
2633 if (POINTER_TYPE_P (TREE_TYPE (q))
2634 && tag_set == get_alias_set (TREE_TYPE (TREE_TYPE (q))))
2635 {
2636 /* Found another pointer Q with the same alias set as
2637 the PTR's pointed-to type. If Q has a symbol tag, use
2638 it. Otherwise, create a new memory tag for PTR. */
2639 var_ann_t ann1 = var_ann (q);
2640 if (ann1->symbol_mem_tag)
2641 ann->symbol_mem_tag = ann1->symbol_mem_tag;
2642 else
2643 ann->symbol_mem_tag = create_memory_tag (tag_type, true);
2644 goto found_tag;
2645 }
2646 }
2647
2648 /* Couldn't find any other pointer with a symbol tag we could use.
2649 Create a new memory tag for PTR. */
2650 ann->symbol_mem_tag = create_memory_tag (tag_type, true);
2651 }
2652
2653 found_tag:
2654 /* If VAR is not already PTR's symbol tag, add it to the may-alias set
2655 for PTR's symbol tag. */
2656 gcc_assert (!MTAG_P (var));
2657 tag = ann->symbol_mem_tag;
2658
2659 /* If VAR has subvars, add the subvars to the tag instead of the
2660 actual var. */
2661 if (var_can_have_subvars (var)
2662 && (svars = get_subvars_for_var (var)))
2663 {
2664 subvar_t sv;
2665 for (sv = svars; sv; sv = sv->next)
2666 add_may_alias (tag, sv->var);
2667 }
2668 else
2669 add_may_alias (tag, var);
2670
2671 /* TAG and its set of aliases need to be marked for renaming. */
2672 mark_sym_for_renaming (tag);
2673 if ((aliases = var_ann (tag)->may_aliases) != NULL)
2674 {
2675 for (i = 0; VEC_iterate (tree, aliases, i, al); i++)
2676 mark_sym_for_renaming (al);
2677 }
2678
2679 /* If we had grouped aliases, VAR may have aliases of its own. Mark
2680 them for renaming as well. Other statements referencing the
2681 aliases of VAR will need to be updated. */
2682 if ((aliases = var_ann (var)->may_aliases) != NULL)
2683 {
2684 for (i = 0; VEC_iterate (tree, aliases, i, al); i++)
2685 mark_sym_for_renaming (al);
2686 }
2687 VEC_free (tree, heap, varvec);
2688 }
2689
2690
2691 /* Create a new symbol tag for PTR. Construct the may-alias list of this type
2692 tag so that it has the aliasing of VAR.
2693
2694 Note, the set of aliases represented by the new symbol tag are not marked
2695 for renaming. */
2696
2697 void
2698 new_type_alias (tree ptr, tree var)
2699 {
2700 var_ann_t p_ann = var_ann (ptr);
2701 tree tag_type = TREE_TYPE (TREE_TYPE (ptr));
2702 var_ann_t v_ann = var_ann (var);
2703 tree tag;
2704 subvar_t svars;
2705
2706 gcc_assert (p_ann->symbol_mem_tag == NULL_TREE);
2707 gcc_assert (!MTAG_P (var));
2708
2709 /* Add VAR to the may-alias set of PTR's new symbol tag. If VAR has
2710 subvars, add the subvars to the tag instead of the actual var. */
2711 if (var_can_have_subvars (var)
2712 && (svars = get_subvars_for_var (var)))
2713 {
2714 subvar_t sv;
2715
2716 tag = create_memory_tag (tag_type, true);
2717 p_ann->symbol_mem_tag = tag;
2718
2719 for (sv = svars; sv; sv = sv->next)
2720 add_may_alias (tag, sv->var);
2721 }
2722 else
2723 {
2724 /* The following is based on code in add_stmt_operand to ensure that the
2725 same defs/uses/vdefs/vuses will be found after replacing a reference
2726 to var (or ARRAY_REF to var) with an INDIRECT_REF to ptr whose value
2727 is the address of var. */
2728 VEC(tree, gc) *aliases = v_ann->may_aliases;
2729
2730 if ((aliases != NULL)
2731 && (VEC_length (tree, aliases) == 1))
2732 {
2733 tree ali = VEC_index (tree, aliases, 0);
2734
2735 if (TREE_CODE (ali) == SYMBOL_MEMORY_TAG)
2736 {
2737 p_ann->symbol_mem_tag = ali;
2738 return;
2739 }
2740 }
2741
2742 tag = create_memory_tag (tag_type, true);
2743 p_ann->symbol_mem_tag = tag;
2744
2745 if (aliases == NULL)
2746 add_may_alias (tag, var);
2747 else
2748 {
2749 unsigned i;
2750 tree al;
2751
2752 for (i = 0; VEC_iterate (tree, aliases, i, al); i++)
2753 add_may_alias (tag, al);
2754 }
2755 }
2756
2757 TREE_READONLY (tag) = TREE_READONLY (var);
2758 MTAG_GLOBAL (tag) = is_global_var (var);
2759 }
2760
2761
2762
2763 /* This represents the used range of a variable. */
2764
2765 typedef struct used_part
2766 {
2767 HOST_WIDE_INT minused;
2768 HOST_WIDE_INT maxused;
2769 /* True if we have an explicit use/def of some portion of this variable,
2770 even if it is all of it. i.e. a.b = 5 or temp = a.b. */
2771 bool explicit_uses;
2772 /* True if we have an implicit use/def of some portion of this
2773 variable. Implicit uses occur when we can't tell what part we
2774 are referencing, and have to make conservative assumptions. */
2775 bool implicit_uses;
2776 /* True if the structure is only written to or taken its address. */
2777 bool write_only;
2778 } *used_part_t;
2779
2780 /* An array of used_part structures, indexed by variable uid. */
2781
2782 static htab_t used_portions;
2783
2784 struct used_part_map
2785 {
2786 unsigned int uid;
2787 used_part_t to;
2788 };
2789
2790 /* Return true if the uid in the two used part maps are equal. */
2791
2792 static int
2793 used_part_map_eq (const void *va, const void *vb)
2794 {
2795 const struct used_part_map *a = (const struct used_part_map *) va;
2796 const struct used_part_map *b = (const struct used_part_map *) vb;
2797 return (a->uid == b->uid);
2798 }
2799
2800 /* Hash a from uid in a used_part_map. */
2801
2802 static unsigned int
2803 used_part_map_hash (const void *item)
2804 {
2805 return ((const struct used_part_map *)item)->uid;
2806 }
2807
2808 /* Free a used part map element. */
2809
2810 static void
2811 free_used_part_map (void *item)
2812 {
2813 free (((struct used_part_map *)item)->to);
2814 free (item);
2815 }
2816
2817 /* Lookup a used_part structure for a UID. */
2818
2819 static used_part_t
2820 up_lookup (unsigned int uid)
2821 {
2822 struct used_part_map *h, in;
2823 in.uid = uid;
2824 h = (struct used_part_map *) htab_find_with_hash (used_portions, &in, uid);
2825 if (!h)
2826 return NULL;
2827 return h->to;
2828 }
2829
2830 /* Insert the pair UID, TO into the used part hashtable. */
2831
2832 static void
2833 up_insert (unsigned int uid, used_part_t to)
2834 {
2835 struct used_part_map *h;
2836 void **loc;
2837
2838 h = XNEW (struct used_part_map);
2839 h->uid = uid;
2840 h->to = to;
2841 loc = htab_find_slot_with_hash (used_portions, h,
2842 uid, INSERT);
2843 if (*loc != NULL)
2844 free (*loc);
2845 *(struct used_part_map **) loc = h;
2846 }
2847
2848
2849 /* Given a variable uid, UID, get or create the entry in the used portions
2850 table for the variable. */
2851
2852 static used_part_t
2853 get_or_create_used_part_for (size_t uid)
2854 {
2855 used_part_t up;
2856 if ((up = up_lookup (uid)) == NULL)
2857 {
2858 up = XCNEW (struct used_part);
2859 up->minused = INT_MAX;
2860 up->maxused = 0;
2861 up->explicit_uses = false;
2862 up->implicit_uses = false;
2863 up->write_only = true;
2864 }
2865
2866 return up;
2867 }
2868
2869
2870 /* Create and return a structure sub-variable for field type FIELD at
2871 offset OFFSET, with size SIZE, of variable VAR. */
2872
2873 static tree
2874 create_sft (tree var, tree field, unsigned HOST_WIDE_INT offset,
2875 unsigned HOST_WIDE_INT size)
2876 {
2877 var_ann_t ann;
2878 tree subvar = create_tag_raw (STRUCT_FIELD_TAG, field, "SFT");
2879
2880 /* We need to copy the various flags from VAR to SUBVAR, so that
2881 they are is_global_var iff the original variable was. */
2882 DECL_CONTEXT (subvar) = DECL_CONTEXT (var);
2883 MTAG_GLOBAL (subvar) = DECL_EXTERNAL (var);
2884 TREE_PUBLIC (subvar) = TREE_PUBLIC (var);
2885 TREE_STATIC (subvar) = TREE_STATIC (var);
2886 TREE_READONLY (subvar) = TREE_READONLY (var);
2887 TREE_ADDRESSABLE (subvar) = TREE_ADDRESSABLE (var);
2888
2889 /* Add the new variable to REFERENCED_VARS. */
2890 ann = get_var_ann (subvar);
2891 ann->symbol_mem_tag = NULL;
2892 add_referenced_tmp_var (subvar);
2893 SFT_PARENT_VAR (subvar) = var;
2894 SFT_OFFSET (subvar) = offset;
2895 SFT_SIZE (subvar) = size;
2896 return subvar;
2897 }
2898
2899
2900 /* Given an aggregate VAR, create the subvariables that represent its
2901 fields. */
2902
2903 static void
2904 create_overlap_variables_for (tree var)
2905 {
2906 VEC(fieldoff_s,heap) *fieldstack = NULL;
2907 used_part_t up;
2908 size_t uid = DECL_UID (var);
2909
2910 up = up_lookup (uid);
2911 if (!up
2912 || up->write_only)
2913 return;
2914
2915 push_fields_onto_fieldstack (TREE_TYPE (var), &fieldstack, 0, NULL);
2916 if (VEC_length (fieldoff_s, fieldstack) != 0)
2917 {
2918 subvar_t *subvars;
2919 fieldoff_s *fo;
2920 bool notokay = false;
2921 int fieldcount = 0;
2922 int i;
2923 HOST_WIDE_INT lastfooffset = -1;
2924 HOST_WIDE_INT lastfosize = -1;
2925 tree lastfotype = NULL_TREE;
2926
2927 /* Not all fields have DECL_SIZE set, and those that don't, we don't
2928 know their size, and thus, can't handle.
2929 The same is true of fields with DECL_SIZE that is not an integer
2930 constant (such as variable sized fields).
2931 Fields with offsets which are not constant will have an offset < 0
2932 We *could* handle fields that are constant sized arrays, but
2933 currently don't. Doing so would require some extra changes to
2934 tree-ssa-operands.c. */
2935
2936 for (i = 0; VEC_iterate (fieldoff_s, fieldstack, i, fo); i++)
2937 {
2938 if (!fo->size
2939 || TREE_CODE (fo->size) != INTEGER_CST
2940 || fo->offset < 0)
2941 {
2942 notokay = true;
2943 break;
2944 }
2945 fieldcount++;
2946 }
2947
2948 /* The current heuristic we use is as follows:
2949 If the variable has no used portions in this function, no
2950 structure vars are created for it.
2951 Otherwise,
2952 If the variable has less than SALIAS_MAX_IMPLICIT_FIELDS,
2953 we always create structure vars for them.
2954 If the variable has more than SALIAS_MAX_IMPLICIT_FIELDS, and
2955 some explicit uses, we create structure vars for them.
2956 If the variable has more than SALIAS_MAX_IMPLICIT_FIELDS, and
2957 no explicit uses, we do not create structure vars for them.
2958 */
2959
2960 if (fieldcount >= SALIAS_MAX_IMPLICIT_FIELDS
2961 && !up->explicit_uses)
2962 {
2963 if (dump_file && (dump_flags & TDF_DETAILS))
2964 {
2965 fprintf (dump_file, "Variable ");
2966 print_generic_expr (dump_file, var, 0);
2967 fprintf (dump_file, " has no explicit uses in this function, and is > SALIAS_MAX_IMPLICIT_FIELDS, so skipping\n");
2968 }
2969 notokay = true;
2970 }
2971
2972 /* Bail out, if we can't create overlap variables. */
2973 if (notokay)
2974 {
2975 VEC_free (fieldoff_s, heap, fieldstack);
2976 return;
2977 }
2978
2979 /* Otherwise, create the variables. */
2980 subvars = lookup_subvars_for_var (var);
2981
2982 sort_fieldstack (fieldstack);
2983
2984 for (i = VEC_length (fieldoff_s, fieldstack);
2985 VEC_iterate (fieldoff_s, fieldstack, --i, fo);)
2986 {
2987 subvar_t sv;
2988 HOST_WIDE_INT fosize;
2989 tree currfotype;
2990
2991 fosize = TREE_INT_CST_LOW (fo->size);
2992 currfotype = fo->type;
2993
2994 /* If this field isn't in the used portion,
2995 or it has the exact same offset and size as the last
2996 field, skip it. */
2997
2998 if (((fo->offset <= up->minused
2999 && fo->offset + fosize <= up->minused)
3000 || fo->offset >= up->maxused)
3001 || (fo->offset == lastfooffset
3002 && fosize == lastfosize
3003 && currfotype == lastfotype))
3004 continue;
3005 sv = GGC_NEW (struct subvar);
3006 sv->next = *subvars;
3007 sv->var = create_sft (var, fo->type, fo->offset, fosize);
3008
3009 if (dump_file)
3010 {
3011 fprintf (dump_file, "structure field tag %s created for var %s",
3012 get_name (sv->var), get_name (var));
3013 fprintf (dump_file, " offset " HOST_WIDE_INT_PRINT_DEC,
3014 SFT_OFFSET (sv->var));
3015 fprintf (dump_file, " size " HOST_WIDE_INT_PRINT_DEC,
3016 SFT_SIZE (sv->var));
3017 fprintf (dump_file, "\n");
3018 }
3019
3020 lastfotype = currfotype;
3021 lastfooffset = fo->offset;
3022 lastfosize = fosize;
3023 *subvars = sv;
3024 }
3025
3026 /* Once we have created subvars, the original is no longer call
3027 clobbered on its own. Its call clobbered status depends
3028 completely on the call clobbered status of the subvars.
3029
3030 add_referenced_var in the above loop will take care of
3031 marking subvars of global variables as call clobbered for us
3032 to start, since they are global as well. */
3033 clear_call_clobbered (var);
3034 }
3035
3036 VEC_free (fieldoff_s, heap, fieldstack);
3037 }
3038
3039
3040 /* Find the conservative answer to the question of what portions of what
3041 structures are used by this statement. We assume that if we have a
3042 component ref with a known size + offset, that we only need that part
3043 of the structure. For unknown cases, or cases where we do something
3044 to the whole structure, we assume we need to create fields for the
3045 entire structure. */
3046
3047 static tree
3048 find_used_portions (tree *tp, int *walk_subtrees, void *lhs_p)
3049 {
3050 switch (TREE_CODE (*tp))
3051 {
3052 case MODIFY_EXPR:
3053 /* Recurse manually here to track whether the use is in the
3054 LHS of an assignment. */
3055 find_used_portions (&TREE_OPERAND (*tp, 0), walk_subtrees, tp);
3056 return find_used_portions (&TREE_OPERAND (*tp, 1), walk_subtrees, NULL);
3057 case REALPART_EXPR:
3058 case IMAGPART_EXPR:
3059 case COMPONENT_REF:
3060 case ARRAY_REF:
3061 {
3062 HOST_WIDE_INT bitsize;
3063 HOST_WIDE_INT bitmaxsize;
3064 HOST_WIDE_INT bitpos;
3065 tree ref;
3066 ref = get_ref_base_and_extent (*tp, &bitpos, &bitsize, &bitmaxsize);
3067 if (DECL_P (ref)
3068 && var_can_have_subvars (ref)
3069 && bitmaxsize != -1)
3070 {
3071 size_t uid = DECL_UID (ref);
3072 used_part_t up;
3073
3074 up = get_or_create_used_part_for (uid);
3075
3076 if (bitpos <= up->minused)
3077 up->minused = bitpos;
3078 if ((bitpos + bitmaxsize >= up->maxused))
3079 up->maxused = bitpos + bitmaxsize;
3080
3081 if (bitsize == bitmaxsize)
3082 up->explicit_uses = true;
3083 else
3084 up->implicit_uses = true;
3085 if (!lhs_p)
3086 up->write_only = false;
3087 up_insert (uid, up);
3088
3089 *walk_subtrees = 0;
3090 return NULL_TREE;
3091 }
3092 }
3093 break;
3094 /* This is here to make sure we mark the entire base variable as used
3095 when you take its address. Because our used portion analysis is
3096 simple, we aren't looking at casts or pointer arithmetic to see what
3097 happens when you take the address. */
3098 case ADDR_EXPR:
3099 {
3100 tree var = get_base_address (TREE_OPERAND (*tp, 0));
3101
3102 if (var
3103 && DECL_P (var)
3104 && DECL_SIZE (var)
3105 && var_can_have_subvars (var)
3106 && TREE_CODE (DECL_SIZE (var)) == INTEGER_CST)
3107 {
3108 used_part_t up;
3109 size_t uid = DECL_UID (var);
3110
3111 up = get_or_create_used_part_for (uid);
3112
3113 up->minused = 0;
3114 up->maxused = TREE_INT_CST_LOW (DECL_SIZE (var));
3115 up->implicit_uses = true;
3116 if (!lhs_p)
3117 up->write_only = false;
3118
3119 up_insert (uid, up);
3120 *walk_subtrees = 0;
3121 return NULL_TREE;
3122 }
3123 }
3124 break;
3125 case CALL_EXPR:
3126 {
3127 tree *arg;
3128 for (arg = &TREE_OPERAND (*tp, 1); *arg; arg = &TREE_CHAIN (*arg))
3129 {
3130 if (TREE_CODE (TREE_VALUE (*arg)) != ADDR_EXPR)
3131 find_used_portions (&TREE_VALUE (*arg), walk_subtrees, NULL);
3132 }
3133 *walk_subtrees = 0;
3134 return NULL_TREE;
3135 }
3136 case VAR_DECL:
3137 case PARM_DECL:
3138 case RESULT_DECL:
3139 {
3140 tree var = *tp;
3141 if (DECL_SIZE (var)
3142 && var_can_have_subvars (var)
3143 && TREE_CODE (DECL_SIZE (var)) == INTEGER_CST)
3144 {
3145 used_part_t up;
3146 size_t uid = DECL_UID (var);
3147
3148 up = get_or_create_used_part_for (uid);
3149
3150 up->minused = 0;
3151 up->maxused = TREE_INT_CST_LOW (DECL_SIZE (var));
3152 up->implicit_uses = true;
3153
3154 up_insert (uid, up);
3155 *walk_subtrees = 0;
3156 return NULL_TREE;
3157 }
3158 }
3159 break;
3160
3161 default:
3162 break;
3163
3164 }
3165 return NULL_TREE;
3166 }
3167
3168 /* Create structure field variables for structures used in this function. */
3169
3170 static unsigned int
3171 create_structure_vars (void)
3172 {
3173 basic_block bb;
3174 safe_referenced_var_iterator rvi;
3175 VEC (tree, heap) *varvec = NULL;
3176 tree var;
3177
3178 used_portions = htab_create (10, used_part_map_hash, used_part_map_eq,
3179 free_used_part_map);
3180
3181 FOR_EACH_BB (bb)
3182 {
3183 block_stmt_iterator bsi;
3184 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
3185 {
3186 walk_tree_without_duplicates (bsi_stmt_ptr (bsi),
3187 find_used_portions,
3188 NULL);
3189 }
3190 }
3191 FOR_EACH_REFERENCED_VAR_SAFE (var, varvec, rvi)
3192 {
3193 /* The C++ FE creates vars without DECL_SIZE set, for some reason. */
3194 if (var
3195 && DECL_SIZE (var)
3196 && var_can_have_subvars (var)
3197 && !MTAG_P (var)
3198 && TREE_CODE (DECL_SIZE (var)) == INTEGER_CST)
3199 create_overlap_variables_for (var);
3200 }
3201 htab_delete (used_portions);
3202 VEC_free (tree, heap, varvec);
3203 return 0;
3204 }
3205
3206 static bool
3207 gate_structure_vars (void)
3208 {
3209 return flag_tree_salias != 0;
3210 }
3211
3212 struct tree_opt_pass pass_create_structure_vars =
3213 {
3214 "salias", /* name */
3215 gate_structure_vars, /* gate */
3216 create_structure_vars, /* execute */
3217 NULL, /* sub */
3218 NULL, /* next */
3219 0, /* static_pass_number */
3220 0, /* tv_id */
3221 PROP_cfg, /* properties_required */
3222 0, /* properties_provided */
3223 0, /* properties_destroyed */
3224 0, /* todo_flags_start */
3225 TODO_dump_func, /* todo_flags_finish */
3226 0 /* letter */
3227 };
3228
3229 /* Reset the DECL_CALL_CLOBBERED flags on our referenced vars. In
3230 theory, this only needs to be done for globals. */
3231
3232 static unsigned int
3233 reset_cc_flags (void)
3234 {
3235 tree var;
3236 referenced_var_iterator rvi;
3237
3238 FOR_EACH_REFERENCED_VAR (var, rvi)
3239 DECL_CALL_CLOBBERED (var) = false;
3240 return 0;
3241 }
3242
3243 struct tree_opt_pass pass_reset_cc_flags =
3244 {
3245 NULL, /* name */
3246 NULL, /* gate */
3247 reset_cc_flags, /* execute */
3248 NULL, /* sub */
3249 NULL, /* next */
3250 0, /* static_pass_number */
3251 0, /* tv_id */
3252 PROP_referenced_vars |PROP_cfg, /* properties_required */
3253 0, /* properties_provided */
3254 0, /* properties_destroyed */
3255 0, /* todo_flags_start */
3256 0, /* todo_flags_finish */
3257 0 /* letter */
3258 };