]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/tree-ssa-alias.c
* tree-dfa.c (add_referenced_var): Only global variables are
[thirdparty/gcc.git] / gcc / tree-ssa-alias.c
1 /* Alias analysis for trees.
2 Copyright (C) 2004 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "tree.h"
27 #include "rtl.h"
28 #include "tm_p.h"
29 #include "hard-reg-set.h"
30 #include "basic-block.h"
31 #include "timevar.h"
32 #include "expr.h"
33 #include "ggc.h"
34 #include "langhooks.h"
35 #include "flags.h"
36 #include "function.h"
37 #include "diagnostic.h"
38 #include "tree-dump.h"
39 #include "tree-gimple.h"
40 #include "tree-flow.h"
41 #include "tree-inline.h"
42 #include "tree-alias-common.h"
43 #include "tree-pass.h"
44 #include "convert.h"
45 #include "params.h"
46
47
48 /* Structure to map a variable to its alias set and keep track of the
49 virtual operands that will be needed to represent it. */
50 struct alias_map_d
51 {
52 /* Variable and its alias set. */
53 tree var;
54 HOST_WIDE_INT set;
55
56 /* Total number of virtual operands that will be needed to represent
57 all the aliases of VAR. */
58 long total_alias_vops;
59
60 /* Nonzero if the aliases for this memory tag have been grouped
61 already. Used in group_aliases. */
62 unsigned int grouped_p : 1;
63
64 /* Set of variables aliased with VAR. This is the exact same
65 information contained in VAR_ANN (VAR)->MAY_ALIASES, but in
66 bitmap form to speed up alias grouping. */
67 sbitmap may_aliases;
68 };
69
70
71 /* Alias information used by compute_may_aliases and its helpers. */
72 struct alias_info
73 {
74 /* SSA names visited while collecting points-to information. If bit I
75 is set, it means that SSA variable with version I has already been
76 visited. */
77 bitmap ssa_names_visited;
78
79 /* Array of SSA_NAME pointers processed by the points-to collector. */
80 varray_type processed_ptrs;
81
82 /* Variables whose address is still needed. */
83 bitmap addresses_needed;
84
85 /* ADDRESSABLE_VARS contains all the global variables and locals that
86 have had their address taken. */
87 struct alias_map_d **addressable_vars;
88 size_t num_addressable_vars;
89
90 /* POINTERS contains all the _DECL pointers with unique memory tags
91 that have been referenced in the program. */
92 struct alias_map_d **pointers;
93 size_t num_pointers;
94
95 /* Number of function calls found in the program. */
96 size_t num_calls_found;
97
98 /* Array of counters to keep track of how many times each pointer has
99 been dereferenced in the program. This is used by the alias grouping
100 heuristic in compute_flow_insensitive_aliasing. */
101 varray_type num_references;
102
103 /* Total number of virtual operands that will be needed to represent
104 all the aliases of all the pointers found in the program. */
105 long total_alias_vops;
106
107 /* Variables that have been written to. */
108 bitmap written_vars;
109
110 /* Pointers that have been used in an indirect store operation. */
111 bitmap dereferenced_ptrs_store;
112
113 /* Pointers that have been used in an indirect load operation. */
114 bitmap dereferenced_ptrs_load;
115 };
116
117
118 /* Counters used to display statistics on alias analysis. */
119 struct alias_stats_d
120 {
121 unsigned int alias_queries;
122 unsigned int alias_mayalias;
123 unsigned int alias_noalias;
124 unsigned int simple_queries;
125 unsigned int simple_resolved;
126 unsigned int tbaa_queries;
127 unsigned int tbaa_resolved;
128 unsigned int pta_queries;
129 unsigned int pta_resolved;
130 };
131
132
133 /* Local variables. */
134 static struct alias_stats_d alias_stats;
135
136 /* Local functions. */
137 static void compute_flow_insensitive_aliasing (struct alias_info *);
138 static void dump_alias_stats (FILE *);
139 static bool may_alias_p (tree, HOST_WIDE_INT, tree, HOST_WIDE_INT);
140 static tree create_memory_tag (tree type, bool is_type_tag);
141 static tree get_tmt_for (tree, struct alias_info *);
142 static tree get_nmt_for (tree);
143 static void add_may_alias (tree, tree);
144 static void replace_may_alias (tree, size_t, tree);
145 static struct alias_info *init_alias_info (void);
146 static void delete_alias_info (struct alias_info *);
147 static void compute_points_to_and_addr_escape (struct alias_info *);
148 static void compute_flow_sensitive_aliasing (struct alias_info *);
149 static void setup_pointers_and_addressables (struct alias_info *);
150 static bool collect_points_to_info_r (tree, tree, void *);
151 static bool is_escape_site (tree, size_t *);
152 static void add_pointed_to_var (struct alias_info *, tree, tree);
153 static void add_pointed_to_expr (tree, tree);
154 static void create_global_var (void);
155 static void collect_points_to_info_for (struct alias_info *, tree);
156 static bool ptr_is_dereferenced_by (tree, tree, bool *);
157 static void maybe_create_global_var (struct alias_info *ai);
158 static void group_aliases (struct alias_info *);
159 static struct ptr_info_def *get_ptr_info (tree t);
160 static void set_pt_anything (tree ptr);
161 static void set_pt_malloc (tree ptr);
162
163 /* Global declarations. */
164
165 /* Call clobbered variables in the function. If bit I is set, then
166 REFERENCED_VARS (I) is call-clobbered. */
167 bitmap call_clobbered_vars;
168
169 /* Addressable variables in the function. If bit I is set, then
170 REFERENCED_VARS (I) has had its address taken. Note that
171 CALL_CLOBBERED_VARS and ADDRESSABLE_VARS are not related. An
172 addressable variable is not necessarily call-clobbered (e.g., a
173 local addressable whose address does not escape) and not all
174 call-clobbered variables are addressable (e.g., a local static
175 variable). */
176 bitmap addressable_vars;
177
178 /* When the program has too many call-clobbered variables and call-sites,
179 this variable is used to represent the clobbering effects of function
180 calls. In these cases, all the call clobbered variables in the program
181 are forced to alias this variable. This reduces compile times by not
182 having to keep track of too many V_MAY_DEF expressions at call sites. */
183 tree global_var;
184
185
186 /* Compute may-alias information for every variable referenced in function
187 FNDECL.
188
189 Alias analysis proceeds in 3 main phases:
190
191 1- Points-to and escape analysis.
192
193 This phase walks the use-def chains in the SSA web looking for three
194 things:
195
196 * Assignments of the form P_i = &VAR
197 * Assignments of the form P_i = malloc()
198 * Pointers and ADDR_EXPR that escape the current function.
199
200 The concept of 'escaping' is the same one used in the Java world. When
201 a pointer or an ADDR_EXPR escapes, it means that it has been exposed
202 outside of the current function. So, assignment to global variables,
203 function arguments and returning a pointer are all escape sites.
204
205 This is where we are currently limited. Since not everything is renamed
206 into SSA, we lose track of escape properties when a pointer is stashed
207 inside a field in a structure, for instance. In those cases, we are
208 assuming that the pointer does escape.
209
210 We use escape analysis to determine whether a variable is
211 call-clobbered. Simply put, if an ADDR_EXPR escapes, then the variable
212 is call-clobbered. If a pointer P_i escapes, then all the variables
213 pointed-to by P_i (and its memory tag) also escape.
214
215 2- Compute flow-sensitive aliases
216
217 We have two classes of memory tags. Memory tags associated with the
218 pointed-to data type of the pointers in the program. These tags are
219 called "type memory tag" (TMT). The other class are those associated
220 with SSA_NAMEs, called "name memory tag" (NMT). The basic idea is that
221 when adding operands for an INDIRECT_REF *P_i, we will first check
222 whether P_i has a name tag, if it does we use it, because that will have
223 more precise aliasing information. Otherwise, we use the standard type
224 tag.
225
226 In this phase, we go through all the pointers we found in points-to
227 analysis and create alias sets for the name memory tags associated with
228 each pointer P_i. If P_i escapes, we mark call-clobbered the variables
229 it points to and its tag.
230
231
232 3- Compute flow-insensitive aliases
233
234 This pass will compare the alias set of every type memory tag and every
235 addressable variable found in the program. Given a type memory tag TMT
236 and an addressable variable V. If the alias sets of TMT and V conflict
237 (as computed by may_alias_p), then V is marked as an alias tag and added
238 to the alias set of TMT.
239
240 For instance, consider the following function:
241
242 foo (int i)
243 {
244 int *p, *q, a, b;
245
246 if (i > 10)
247 p = &a;
248 else
249 q = &b;
250
251 *p = 3;
252 *q = 5;
253 a = b + 2;
254 return *p;
255 }
256
257 After aliasing analysis has finished, the type memory tag for pointer
258 'p' will have two aliases, namely variables 'a' and 'b'. Every time
259 pointer 'p' is dereferenced, we want to mark the operation as a
260 potential reference to 'a' and 'b'.
261
262 foo (int i)
263 {
264 int *p, a, b;
265
266 if (i_2 > 10)
267 p_4 = &a;
268 else
269 p_6 = &b;
270 # p_1 = PHI <p_4(1), p_6(2)>;
271
272 # a_7 = V_MAY_DEF <a_3>;
273 # b_8 = V_MAY_DEF <b_5>;
274 *p_1 = 3;
275
276 # a_9 = V_MAY_DEF <a_7>
277 # VUSE <b_8>
278 a_9 = b_8 + 2;
279
280 # VUSE <a_9>;
281 # VUSE <b_8>;
282 return *p_1;
283 }
284
285 In certain cases, the list of may aliases for a pointer may grow too
286 large. This may cause an explosion in the number of virtual operands
287 inserted in the code. Resulting in increased memory consumption and
288 compilation time.
289
290 When the number of virtual operands needed to represent aliased
291 loads and stores grows too large (configurable with @option{--param
292 max-aliased-vops}), alias sets are grouped to avoid severe
293 compile-time slow downs and memory consumption. See group_aliases. */
294
295 static void
296 compute_may_aliases (void)
297 {
298 struct alias_info *ai;
299
300 memset (&alias_stats, 0, sizeof (alias_stats));
301
302 /* Initialize aliasing information. */
303 ai = init_alias_info ();
304
305 /* For each pointer P_i, determine the sets of variables that P_i may
306 point-to. For every addressable variable V, determine whether the
307 address of V escapes the current function, making V call-clobbered
308 (i.e., whether &V is stored in a global variable or if its passed as a
309 function call argument). */
310 compute_points_to_and_addr_escape (ai);
311
312 /* Collect all pointers and addressable variables, compute alias sets,
313 create memory tags for pointers and promote variables whose address is
314 not needed anymore. */
315 setup_pointers_and_addressables (ai);
316
317 /* Compute flow-sensitive, points-to based aliasing for all the name
318 memory tags. Note that this pass needs to be done before flow
319 insensitive analysis because it uses the points-to information
320 gathered before to mark call-clobbered type tags. */
321 compute_flow_sensitive_aliasing (ai);
322
323 /* Compute type-based flow-insensitive aliasing for all the type
324 memory tags. */
325 compute_flow_insensitive_aliasing (ai);
326
327 /* If the program has too many call-clobbered variables and/or function
328 calls, create .GLOBAL_VAR and use it to model call-clobbering
329 semantics at call sites. This reduces the number of virtual operands
330 considerably, improving compile times at the expense of lost
331 aliasing precision. */
332 maybe_create_global_var (ai);
333
334 /* Debugging dumps. */
335 if (dump_file)
336 {
337 dump_referenced_vars (dump_file);
338 if (dump_flags & TDF_STATS)
339 dump_alias_stats (dump_file);
340 dump_points_to_info (dump_file);
341 dump_alias_info (dump_file);
342 }
343
344 /* Deallocate memory used by aliasing data structures. */
345 delete_alias_info (ai);
346 }
347
348 struct tree_opt_pass pass_may_alias =
349 {
350 "alias", /* name */
351 NULL, /* gate */
352 compute_may_aliases, /* execute */
353 NULL, /* sub */
354 NULL, /* next */
355 0, /* static_pass_number */
356 TV_TREE_MAY_ALIAS, /* tv_id */
357 PROP_cfg | PROP_ssa | PROP_pta, /* properties_required */
358 PROP_alias, /* properties_provided */
359 0, /* properties_destroyed */
360 0, /* todo_flags_start */
361 TODO_dump_func | TODO_rename_vars
362 | TODO_ggc_collect | TODO_verify_ssa /* todo_flags_finish */
363 };
364
365
366 /* Initialize the data structures used for alias analysis. */
367
368 static struct alias_info *
369 init_alias_info (void)
370 {
371 struct alias_info *ai;
372 static bool aliases_computed_p = false;
373
374 ai = xcalloc (1, sizeof (struct alias_info));
375 ai->ssa_names_visited = BITMAP_XMALLOC ();
376 VARRAY_TREE_INIT (ai->processed_ptrs, 50, "processed_ptrs");
377 ai->addresses_needed = BITMAP_XMALLOC ();
378 VARRAY_UINT_INIT (ai->num_references, num_referenced_vars, "num_references");
379 ai->written_vars = BITMAP_XMALLOC ();
380 ai->dereferenced_ptrs_store = BITMAP_XMALLOC ();
381 ai->dereferenced_ptrs_load = BITMAP_XMALLOC ();
382
383 /* If aliases have been computed before, clear existing information. */
384 if (aliases_computed_p)
385 {
386 size_t i;
387
388 /* Clear the call-clobbered set. We are going to re-discover
389 call-clobbered variables. */
390 EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, i,
391 {
392 tree var = referenced_var (i);
393
394 /* Variables that are intrinsically call-clobbered (globals,
395 local statics, etc) will not be marked by the aliasing
396 code, so we can't remove them from CALL_CLOBBERED_VARS. */
397 if (!is_call_clobbered (var))
398 bitmap_clear_bit (call_clobbered_vars, var_ann (var)->uid);
399 });
400
401 /* Similarly, clear the set of addressable variables. In this
402 case, we can just clear the set because addressability is
403 only computed here. */
404 bitmap_clear (addressable_vars);
405
406 /* Clear flow-insensitive alias information from each symbol. */
407 for (i = 0; i < num_referenced_vars; i++)
408 {
409 var_ann_t ann = var_ann (referenced_var (i));
410 ann->is_alias_tag = 0;
411 ann->may_aliases = NULL;
412 }
413
414 /* Clear flow-sensitive points-to information from each SSA name. */
415 for (i = 1; i < num_ssa_names; i++)
416 {
417 tree name = ssa_name (i);
418
419 if (!POINTER_TYPE_P (TREE_TYPE (name)))
420 continue;
421
422 if (SSA_NAME_PTR_INFO (name))
423 {
424 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (name);
425
426 /* Clear all the flags but keep the name tag to
427 avoid creating new temporaries unnecessarily. If
428 this pointer is found to point to a subset or
429 superset of its former points-to set, then a new
430 tag will need to be created in create_name_tags. */
431 pi->pt_anything = 0;
432 pi->pt_malloc = 0;
433 pi->value_escapes_p = 0;
434 pi->is_dereferenced = 0;
435 if (pi->pt_vars)
436 bitmap_clear (pi->pt_vars);
437 }
438 }
439 }
440
441 /* Next time, we will need to reset alias information. */
442 aliases_computed_p = true;
443
444 return ai;
445 }
446
447
448 /* Deallocate memory used by alias analysis. */
449
450 static void
451 delete_alias_info (struct alias_info *ai)
452 {
453 size_t i;
454
455 BITMAP_XFREE (ai->ssa_names_visited);
456 ai->processed_ptrs = NULL;
457 BITMAP_XFREE (ai->addresses_needed);
458
459 for (i = 0; i < ai->num_addressable_vars; i++)
460 {
461 sbitmap_free (ai->addressable_vars[i]->may_aliases);
462 free (ai->addressable_vars[i]);
463 }
464 free (ai->addressable_vars);
465
466 for (i = 0; i < ai->num_pointers; i++)
467 {
468 sbitmap_free (ai->pointers[i]->may_aliases);
469 free (ai->pointers[i]);
470 }
471 free (ai->pointers);
472
473 ai->num_references = NULL;
474 BITMAP_XFREE (ai->written_vars);
475 BITMAP_XFREE (ai->dereferenced_ptrs_store);
476 BITMAP_XFREE (ai->dereferenced_ptrs_load);
477
478 free (ai);
479 }
480
481
482 /* Walk use-def chains for pointer PTR to determine what variables is PTR
483 pointing to. */
484
485 static void
486 collect_points_to_info_for (struct alias_info *ai, tree ptr)
487 {
488 #if defined ENABLE_CHECKING
489 if (!POINTER_TYPE_P (TREE_TYPE (ptr)))
490 abort ();
491 #endif
492
493 if (!bitmap_bit_p (ai->ssa_names_visited, SSA_NAME_VERSION (ptr)))
494 {
495 bitmap_set_bit (ai->ssa_names_visited, SSA_NAME_VERSION (ptr));
496 walk_use_def_chains (ptr, collect_points_to_info_r, ai, true);
497 VARRAY_PUSH_TREE (ai->processed_ptrs, ptr);
498 }
499 }
500
501
502 /* Helper for ptr_is_dereferenced_by. Called by walk_tree to look for
503 INDIRECT_REF nodes for the pointer passed in DATA. */
504
505 static tree
506 find_ptr_dereference (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED, void *data)
507 {
508 tree ptr = (tree) data;
509
510 if (TREE_CODE (*tp) == INDIRECT_REF
511 && TREE_OPERAND (*tp, 0) == ptr)
512 return *tp;
513
514 return NULL_TREE;
515 }
516
517
518 /* Return true if STMT contains INDIRECT_REF <PTR>. *IS_STORE is set
519 to 'true' if the dereference is on the LHS of an assignment. */
520
521 static bool
522 ptr_is_dereferenced_by (tree ptr, tree stmt, bool *is_store)
523 {
524 *is_store = false;
525
526 if (TREE_CODE (stmt) == MODIFY_EXPR
527 || (TREE_CODE (stmt) == RETURN_EXPR
528 && TREE_CODE (TREE_OPERAND (stmt, 0)) == MODIFY_EXPR))
529 {
530 tree e, lhs, rhs;
531
532 e = (TREE_CODE (stmt) == RETURN_EXPR) ? TREE_OPERAND (stmt, 0) : stmt;
533 lhs = TREE_OPERAND (e, 0);
534 rhs = TREE_OPERAND (e, 1);
535
536 if (EXPR_P (lhs)
537 && walk_tree (&lhs, find_ptr_dereference, ptr, NULL))
538 {
539 *is_store = true;
540 return true;
541 }
542 else if (EXPR_P (rhs)
543 && walk_tree (&rhs, find_ptr_dereference, ptr, NULL))
544 {
545 return true;
546 }
547 }
548 else if (TREE_CODE (stmt) == ASM_EXPR)
549 {
550 if (walk_tree (&ASM_OUTPUTS (stmt), find_ptr_dereference, ptr, NULL)
551 || walk_tree (&ASM_CLOBBERS (stmt), find_ptr_dereference, ptr, NULL))
552 {
553 *is_store = true;
554 return true;
555 }
556 else if (walk_tree (&ASM_INPUTS (stmt), find_ptr_dereference, ptr, NULL))
557 {
558 return true;
559 }
560 }
561
562 return false;
563 }
564
565
566 /* Traverse use-def links for all the pointers in the program to collect
567 address escape and points-to information.
568
569 This is loosely based on the same idea described in R. Hasti and S.
570 Horwitz, ``Using static single assignment form to improve
571 flow-insensitive pointer analysis,'' in SIGPLAN Conference on
572 Programming Language Design and Implementation, pp. 97-105, 1998. */
573
574 static void
575 compute_points_to_and_addr_escape (struct alias_info *ai)
576 {
577 basic_block bb;
578 size_t i;
579
580 timevar_push (TV_TREE_PTA);
581
582 FOR_EACH_BB (bb)
583 {
584 bb_ann_t block_ann = bb_ann (bb);
585 block_stmt_iterator si;
586
587 for (si = bsi_start (bb); !bsi_end_p (si); bsi_next (&si))
588 {
589 use_optype uses;
590 def_optype defs;
591 v_may_def_optype v_may_defs;
592 v_must_def_optype v_must_defs;
593 stmt_ann_t ann;
594 bitmap addr_taken;
595 tree stmt = bsi_stmt (si);
596 bool stmt_escapes_p = is_escape_site (stmt, &ai->num_calls_found);
597
598 /* Mark all the variables whose address are taken by the
599 statement. Note that this will miss all the addresses taken
600 in PHI nodes (those are discovered while following the use-def
601 chains). */
602 get_stmt_operands (stmt);
603 addr_taken = addresses_taken (stmt);
604 if (addr_taken)
605 EXECUTE_IF_SET_IN_BITMAP (addr_taken, 0, i,
606 {
607 tree var = referenced_var (i);
608 bitmap_set_bit (ai->addresses_needed, var_ann (var)->uid);
609 if (stmt_escapes_p)
610 mark_call_clobbered (var);
611 });
612
613 if (stmt_escapes_p)
614 block_ann->has_escape_site = 1;
615
616 /* Special case for silly ADDR_EXPR tricks
617 (gcc.c-torture/unsorted/pass.c). If this statement is an
618 assignment to a non-pointer variable and the RHS takes the
619 address of a variable, assume that the variable on the RHS is
620 call-clobbered. We could add the LHS to the list of
621 "pointers" and follow it to see if it really escapes, but it's
622 not worth the pain. */
623 if (addr_taken
624 && TREE_CODE (stmt) == MODIFY_EXPR
625 && !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (stmt, 0))))
626 EXECUTE_IF_SET_IN_BITMAP (addr_taken, 0, i,
627 {
628 tree var = referenced_var (i);
629 mark_call_clobbered (var);
630 });
631
632 ann = stmt_ann (stmt);
633 uses = USE_OPS (ann);
634 for (i = 0; i < NUM_USES (uses); i++)
635 {
636 tree op = USE_OP (uses, i);
637 var_ann_t v_ann = var_ann (SSA_NAME_VAR (op));
638 struct ptr_info_def *pi;
639 bool is_store;
640
641 /* If the operand's variable may be aliased, keep track
642 of how many times we've referenced it. This is used
643 for alias grouping in compute_flow_sensitive_aliasing.
644 Note that we don't need to grow AI->NUM_REFERENCES
645 because we are processing regular variables, not
646 memory tags (the array's initial size is set to
647 NUM_REFERENCED_VARS). */
648 if (may_be_aliased (SSA_NAME_VAR (op)))
649 (VARRAY_UINT (ai->num_references, v_ann->uid))++;
650
651 if (!POINTER_TYPE_P (TREE_TYPE (op)))
652 continue;
653
654 collect_points_to_info_for (ai, op);
655
656 pi = SSA_NAME_PTR_INFO (op);
657 if (ptr_is_dereferenced_by (op, stmt, &is_store))
658 {
659 /* Mark OP as dereferenced. In a subsequent pass,
660 dereferenced pointers that point to a set of
661 variables will be assigned a name tag to alias
662 all the variables OP points to. */
663 pi->is_dereferenced = 1;
664
665 /* Keep track of how many time we've dereferenced each
666 pointer. Again, we don't need to grow
667 AI->NUM_REFERENCES because we're processing
668 existing program variables. */
669 (VARRAY_UINT (ai->num_references, v_ann->uid))++;
670
671 /* If this is a store operation, mark OP as being
672 dereferenced to store, otherwise mark it as being
673 dereferenced to load. */
674 if (is_store)
675 bitmap_set_bit (ai->dereferenced_ptrs_store, v_ann->uid);
676 else
677 bitmap_set_bit (ai->dereferenced_ptrs_load, v_ann->uid);
678 }
679 else if (stmt_escapes_p)
680 {
681 /* Note that even if STMT is an escape point, pointer OP
682 will not escape if it is being dereferenced. That's
683 why we only check for escape points if OP is not
684 dereferenced by STMT. */
685 pi->value_escapes_p = 1;
686
687 /* If the statement makes a function call, assume
688 that pointer OP will be dereferenced in a store
689 operation inside the called function. */
690 if (get_call_expr_in (stmt))
691 {
692 bitmap_set_bit (ai->dereferenced_ptrs_store, v_ann->uid);
693 pi->is_dereferenced = 1;
694 }
695 }
696 }
697
698 /* Update reference counter for definitions to any
699 potentially aliased variable. This is used in the alias
700 grouping heuristics. */
701 defs = DEF_OPS (ann);
702 for (i = 0; i < NUM_DEFS (defs); i++)
703 {
704 tree op = DEF_OP (defs, i);
705 tree var = SSA_NAME_VAR (op);
706 var_ann_t ann = var_ann (var);
707 bitmap_set_bit (ai->written_vars, ann->uid);
708 if (may_be_aliased (var))
709 (VARRAY_UINT (ai->num_references, ann->uid))++;
710 }
711
712 /* Mark variables in V_MAY_DEF operands as being written to. */
713 v_may_defs = V_MAY_DEF_OPS (ann);
714 for (i = 0; i < NUM_V_MAY_DEFS (v_may_defs); i++)
715 {
716 tree op = V_MAY_DEF_OP (v_may_defs, i);
717 tree var = SSA_NAME_VAR (op);
718 var_ann_t ann = var_ann (var);
719 bitmap_set_bit (ai->written_vars, ann->uid);
720 }
721
722 /* Mark variables in V_MUST_DEF operands as being written to. */
723 v_must_defs = V_MUST_DEF_OPS (ann);
724 for (i = 0; i < NUM_V_MUST_DEFS (v_must_defs); i++)
725 {
726 tree op = V_MUST_DEF_OP (v_must_defs, i);
727 tree var = SSA_NAME_VAR (op);
728 var_ann_t ann = var_ann (var);
729 bitmap_set_bit (ai->written_vars, ann->uid);
730 }
731
732 /* After promoting variables and computing aliasing we will
733 need to re-scan most statements. FIXME: Try to minimize the
734 number of statements re-scanned. It's not really necessary to
735 re-scan *all* statements. */
736 modify_stmt (stmt);
737 }
738 }
739
740 timevar_pop (TV_TREE_PTA);
741 }
742
743
744 /* Create name tags for all the pointers that have been dereferenced.
745 We only create a name tag for a pointer P if P is found to point to
746 a set of variables (so that we can alias them to *P) or if it is
747 the result of a call to malloc (which means that P cannot point to
748 anything else nor alias any other variable).
749
750 If two pointers P and Q point to the same set of variables, they
751 are assigned the same name tag. */
752
753 static void
754 create_name_tags (struct alias_info *ai)
755 {
756 size_t i;
757
758 for (i = 0; i < VARRAY_ACTIVE_SIZE (ai->processed_ptrs); i++)
759 {
760 tree ptr = VARRAY_TREE (ai->processed_ptrs, i);
761 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
762
763 if (pi->pt_anything || !pi->is_dereferenced)
764 {
765 /* No name tags for pointers that have not been
766 dereferenced or point to an arbitrary location. */
767 pi->name_mem_tag = NULL_TREE;
768 continue;
769 }
770
771 if (pi->pt_vars
772 && bitmap_first_set_bit (pi->pt_vars) >= 0)
773 {
774 size_t j;
775 tree old_name_tag = pi->name_mem_tag;
776
777 /* If PTR points to a set of variables, check if we don't
778 have another pointer Q with the same points-to set before
779 creating a tag. If so, use Q's tag instead of creating a
780 new one.
781
782 This is important for not creating unnecessary symbols
783 and also for copy propagation. If we ever need to
784 propagate PTR into Q or vice-versa, we would run into
785 problems if they both had different name tags because
786 they would have different SSA version numbers (which
787 would force us to take the name tags in and out of SSA). */
788 for (j = 0; j < i; j++)
789 {
790 tree q = VARRAY_TREE (ai->processed_ptrs, j);
791 struct ptr_info_def *qi = SSA_NAME_PTR_INFO (q);
792
793 if (qi
794 && qi->pt_vars
795 && qi->name_mem_tag
796 && bitmap_equal_p (pi->pt_vars, qi->pt_vars))
797 {
798 pi->name_mem_tag = qi->name_mem_tag;
799 break;
800 }
801 }
802
803 /* If we didn't find a pointer with the same points-to set
804 as PTR, create a new name tag if needed. */
805 if (pi->name_mem_tag == NULL_TREE)
806 pi->name_mem_tag = get_nmt_for (ptr);
807
808 /* If the new name tag computed for PTR is different than
809 the old name tag that it used to have, then the old tag
810 needs to be removed from the IL, so we mark it for
811 renaming. */
812 if (old_name_tag && old_name_tag != pi->name_mem_tag)
813 bitmap_set_bit (vars_to_rename, var_ann (old_name_tag)->uid);
814 }
815 else if (pi->pt_malloc)
816 {
817 /* Otherwise, create a unique name tag for this pointer. */
818 pi->name_mem_tag = get_nmt_for (ptr);
819 }
820 else
821 {
822 /* Only pointers that may point to malloc or other variables
823 may receive a name tag. If the pointer does not point to
824 a known spot, we should use type tags. */
825 set_pt_anything (ptr);
826 continue;
827 }
828
829 /* Mark the new name tag for renaming. */
830 bitmap_set_bit (vars_to_rename, var_ann (pi->name_mem_tag)->uid);
831 }
832 }
833
834
835
836 /* For every pointer P_i in AI->PROCESSED_PTRS, create may-alias sets for
837 the name memory tag (NMT) associated with P_i. If P_i escapes, then its
838 name tag and the variables it points-to are call-clobbered. Finally, if
839 P_i escapes and we could not determine where it points to, then all the
840 variables in the same alias set as *P_i are marked call-clobbered. This
841 is necessary because we must assume that P_i may take the address of any
842 variable in the same alias set. */
843
844 static void
845 compute_flow_sensitive_aliasing (struct alias_info *ai)
846 {
847 size_t i;
848
849 create_name_tags (ai);
850
851 for (i = 0; i < VARRAY_ACTIVE_SIZE (ai->processed_ptrs); i++)
852 {
853 size_t j;
854 tree ptr = VARRAY_TREE (ai->processed_ptrs, i);
855 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
856 var_ann_t v_ann = var_ann (SSA_NAME_VAR (ptr));
857
858 if (pi->value_escapes_p || pi->pt_anything)
859 {
860 /* If PTR escapes or may point to anything, then its associated
861 memory tags and pointed-to variables are call-clobbered. */
862 if (pi->name_mem_tag)
863 mark_call_clobbered (pi->name_mem_tag);
864
865 if (v_ann->type_mem_tag)
866 mark_call_clobbered (v_ann->type_mem_tag);
867
868 if (pi->pt_vars)
869 EXECUTE_IF_SET_IN_BITMAP (pi->pt_vars, 0, j,
870 mark_call_clobbered (referenced_var (j)));
871 }
872
873 /* Set up aliasing information for PTR's name memory tag (if it has
874 one). Note that only pointers that have been dereferenced will
875 have a name memory tag. */
876 if (pi->name_mem_tag && pi->pt_vars)
877 EXECUTE_IF_SET_IN_BITMAP (pi->pt_vars, 0, j,
878 add_may_alias (pi->name_mem_tag, referenced_var (j)));
879
880 /* If the name tag is call clobbered, so is the type tag
881 associated with the base VAR_DECL. */
882 if (pi->name_mem_tag
883 && v_ann->type_mem_tag
884 && is_call_clobbered (pi->name_mem_tag))
885 mark_call_clobbered (v_ann->type_mem_tag);
886 }
887 }
888
889
890 /* Compute type-based alias sets. Traverse all the pointers and
891 addressable variables found in setup_pointers_and_addressables.
892
893 For every pointer P in AI->POINTERS and addressable variable V in
894 AI->ADDRESSABLE_VARS, add V to the may-alias sets of P's type
895 memory tag (TMT) if their alias sets conflict. V is then marked as
896 an alias tag so that the operand scanner knows that statements
897 containing V have aliased operands. */
898
899 static void
900 compute_flow_insensitive_aliasing (struct alias_info *ai)
901 {
902 size_t i;
903
904 /* Initialize counter for the total number of virtual operands that
905 aliasing will introduce. When AI->TOTAL_ALIAS_VOPS goes beyond the
906 threshold set by --params max-alias-vops, we enable alias
907 grouping. */
908 ai->total_alias_vops = 0;
909
910 /* For every pointer P, determine which addressable variables may alias
911 with P's type memory tag. */
912 for (i = 0; i < ai->num_pointers; i++)
913 {
914 size_t j;
915 struct alias_map_d *p_map = ai->pointers[i];
916 tree tag = var_ann (p_map->var)->type_mem_tag;
917 var_ann_t tag_ann = var_ann (tag);
918
919 p_map->total_alias_vops = 0;
920 p_map->may_aliases = sbitmap_alloc (num_referenced_vars);
921 sbitmap_zero (p_map->may_aliases);
922
923 for (j = 0; j < ai->num_addressable_vars; j++)
924 {
925 struct alias_map_d *v_map;
926 var_ann_t v_ann;
927 tree var;
928 bool tag_stored_p, var_stored_p;
929
930 v_map = ai->addressable_vars[j];
931 var = v_map->var;
932 v_ann = var_ann (var);
933
934 /* Skip memory tags and variables that have never been
935 written to. We also need to check if the variables are
936 call-clobbered because they may be overwritten by
937 function calls. */
938 tag_stored_p = bitmap_bit_p (ai->written_vars, tag_ann->uid)
939 || is_call_clobbered (tag);
940 var_stored_p = bitmap_bit_p (ai->written_vars, v_ann->uid)
941 || is_call_clobbered (var);
942 if (!tag_stored_p && !var_stored_p)
943 continue;
944
945 if (may_alias_p (p_map->var, p_map->set, var, v_map->set))
946 {
947 size_t num_tag_refs, num_var_refs;
948
949 num_tag_refs = VARRAY_UINT (ai->num_references, tag_ann->uid);
950 num_var_refs = VARRAY_UINT (ai->num_references, v_ann->uid);
951
952 /* Add VAR to TAG's may-aliases set. */
953 add_may_alias (tag, var);
954
955 /* Update the total number of virtual operands due to
956 aliasing. Since we are adding one more alias to TAG's
957 may-aliases set, the total number of virtual operands due
958 to aliasing will be increased by the number of references
959 made to VAR and TAG (every reference to TAG will also
960 count as a reference to VAR). */
961 ai->total_alias_vops += (num_var_refs + num_tag_refs);
962 p_map->total_alias_vops += (num_var_refs + num_tag_refs);
963
964 /* Update the bitmap used to represent TAG's alias set
965 in case we need to group aliases. */
966 SET_BIT (p_map->may_aliases, var_ann (var)->uid);
967 }
968 }
969 }
970
971 if (dump_file)
972 fprintf (dump_file, "%s: Total number of aliased vops: %ld\n",
973 get_name (current_function_decl),
974 ai->total_alias_vops);
975
976 /* Determine if we need to enable alias grouping. */
977 if (ai->total_alias_vops >= MAX_ALIASED_VOPS)
978 group_aliases (ai);
979 }
980
981
982 /* Comparison function for qsort used in group_aliases. */
983
984 static int
985 total_alias_vops_cmp (const void *p, const void *q)
986 {
987 const struct alias_map_d **p1 = (const struct alias_map_d **)p;
988 const struct alias_map_d **p2 = (const struct alias_map_d **)q;
989 long n1 = (*p1)->total_alias_vops;
990 long n2 = (*p2)->total_alias_vops;
991
992 /* We want to sort in descending order. */
993 return (n1 > n2 ? -1 : (n1 == n2) ? 0 : 1);
994 }
995
996 /* Group all the aliases for TAG to make TAG represent all the
997 variables in its alias set. Update the total number
998 of virtual operands due to aliasing (AI->TOTAL_ALIAS_VOPS). This
999 function will make TAG be the unique alias tag for all the
1000 variables in its may-aliases. So, given:
1001
1002 may-aliases(TAG) = { V1, V2, V3 }
1003
1004 This function will group the variables into:
1005
1006 may-aliases(V1) = { TAG }
1007 may-aliases(V2) = { TAG }
1008 may-aliases(V2) = { TAG } */
1009
1010 static void
1011 group_aliases_into (tree tag, sbitmap tag_aliases, struct alias_info *ai)
1012 {
1013 size_t i;
1014 var_ann_t tag_ann = var_ann (tag);
1015 size_t num_tag_refs = VARRAY_UINT (ai->num_references, tag_ann->uid);
1016
1017 EXECUTE_IF_SET_IN_SBITMAP (tag_aliases, 0, i,
1018 {
1019 tree var = referenced_var (i);
1020 var_ann_t ann = var_ann (var);
1021
1022 /* Make TAG the unique alias of VAR. */
1023 ann->is_alias_tag = 0;
1024 ann->may_aliases = NULL;
1025
1026 /* Note that VAR and TAG may be the same if the function has no
1027 addressable variables (see the discussion at the end of
1028 setup_pointers_and_addressables). */
1029 if (var != tag)
1030 add_may_alias (var, tag);
1031
1032 /* Reduce total number of virtual operands contributed
1033 by TAG on behalf of VAR. Notice that the references to VAR
1034 itself won't be removed. We will merely replace them with
1035 references to TAG. */
1036 ai->total_alias_vops -= num_tag_refs;
1037 });
1038
1039 /* We have reduced the number of virtual operands that TAG makes on
1040 behalf of all the variables formerly aliased with it. However,
1041 we have also "removed" all the virtual operands for TAG itself,
1042 so we add them back. */
1043 ai->total_alias_vops += num_tag_refs;
1044
1045 /* TAG no longer has any aliases. */
1046 tag_ann->may_aliases = NULL;
1047 }
1048
1049
1050 /* Group may-aliases sets to reduce the number of virtual operands due
1051 to aliasing.
1052
1053 1- Sort the list of pointers in decreasing number of contributed
1054 virtual operands.
1055
1056 2- Take the first entry in AI->POINTERS and revert the role of
1057 the memory tag and its aliases. Usually, whenever an aliased
1058 variable Vi is found to alias with a memory tag T, we add Vi
1059 to the may-aliases set for T. Meaning that after alias
1060 analysis, we will have:
1061
1062 may-aliases(T) = { V1, V2, V3, ..., Vn }
1063
1064 This means that every statement that references T, will get 'n'
1065 virtual operands for each of the Vi tags. But, when alias
1066 grouping is enabled, we make T an alias tag and add it to the
1067 alias set of all the Vi variables:
1068
1069 may-aliases(V1) = { T }
1070 may-aliases(V2) = { T }
1071 ...
1072 may-aliases(Vn) = { T }
1073
1074 This has two effects: (a) statements referencing T will only get
1075 a single virtual operand, and, (b) all the variables Vi will now
1076 appear to alias each other. So, we lose alias precision to
1077 improve compile time. But, in theory, a program with such a high
1078 level of aliasing should not be very optimizable in the first
1079 place.
1080
1081 3- Since variables may be in the alias set of more than one
1082 memory tag, the grouping done in step (2) needs to be extended
1083 to all the memory tags that have a non-empty intersection with
1084 the may-aliases set of tag T. For instance, if we originally
1085 had these may-aliases sets:
1086
1087 may-aliases(T) = { V1, V2, V3 }
1088 may-aliases(R) = { V2, V4 }
1089
1090 In step (2) we would have reverted the aliases for T as:
1091
1092 may-aliases(V1) = { T }
1093 may-aliases(V2) = { T }
1094 may-aliases(V3) = { T }
1095
1096 But note that now V2 is no longer aliased with R. We could
1097 add R to may-aliases(V2), but we are in the process of
1098 grouping aliases to reduce virtual operands so what we do is
1099 add V4 to the grouping to obtain:
1100
1101 may-aliases(V1) = { T }
1102 may-aliases(V2) = { T }
1103 may-aliases(V3) = { T }
1104 may-aliases(V4) = { T }
1105
1106 4- If the total number of virtual operands due to aliasing is
1107 still above the threshold set by max-alias-vops, go back to (2). */
1108
1109 static void
1110 group_aliases (struct alias_info *ai)
1111 {
1112 size_t i;
1113 sbitmap res;
1114
1115 /* Sort the POINTERS array in descending order of contributed
1116 virtual operands. */
1117 qsort (ai->pointers, ai->num_pointers, sizeof (struct alias_map_d *),
1118 total_alias_vops_cmp);
1119
1120 res = sbitmap_alloc (num_referenced_vars);
1121
1122 /* For every pointer in AI->POINTERS, reverse the roles of its tag
1123 and the tag's may-aliases set. */
1124 for (i = 0; i < ai->num_pointers; i++)
1125 {
1126 size_t j;
1127 tree tag1 = var_ann (ai->pointers[i]->var)->type_mem_tag;
1128 sbitmap tag1_aliases = ai->pointers[i]->may_aliases;
1129
1130 /* Skip tags that have been grouped already. */
1131 if (ai->pointers[i]->grouped_p)
1132 continue;
1133
1134 /* See if TAG1 had any aliases in common with other type tags.
1135 If we find a TAG2 with common aliases with TAG1, add TAG2's
1136 aliases into TAG1. */
1137 for (j = i + 1; j < ai->num_pointers; j++)
1138 {
1139 sbitmap tag2_aliases = ai->pointers[j]->may_aliases;
1140
1141 sbitmap_a_and_b (res, tag1_aliases, tag2_aliases);
1142 if (sbitmap_first_set_bit (res) >= 0)
1143 {
1144 tree tag2 = var_ann (ai->pointers[j]->var)->type_mem_tag;
1145
1146 sbitmap_a_or_b (tag1_aliases, tag1_aliases, tag2_aliases);
1147
1148 /* TAG2 does not need its aliases anymore. */
1149 sbitmap_zero (tag2_aliases);
1150 var_ann (tag2)->may_aliases = NULL;
1151
1152 /* TAG1 is the unique alias of TAG2. */
1153 add_may_alias (tag2, tag1);
1154
1155 ai->pointers[j]->grouped_p = true;
1156 }
1157 }
1158
1159 /* Now group all the aliases we collected into TAG1. */
1160 group_aliases_into (tag1, tag1_aliases, ai);
1161
1162 /* If we've reduced total number of virtual operands below the
1163 threshold, stop. */
1164 if (ai->total_alias_vops < MAX_ALIASED_VOPS)
1165 break;
1166 }
1167
1168 /* Finally, all the variables that have been grouped cannot be in
1169 the may-alias set of name memory tags. Suppose that we have
1170 grouped the aliases in this code so that may-aliases(a) = TMT.20
1171
1172 p_5 = &a;
1173 ...
1174 # a_9 = V_MAY_DEF <a_8>
1175 p_5->field = 0
1176 ... Several modifications to TMT.20 ...
1177 # VUSE <a_9>
1178 x_30 = p_5->field
1179
1180 Since p_5 points to 'a', the optimizers will try to propagate 0
1181 into p_5->field, but that is wrong because there have been
1182 modifications to 'TMT.20' in between. To prevent this we have to
1183 replace 'a' with 'TMT.20' in the name tag of p_5. */
1184 for (i = 0; i < VARRAY_ACTIVE_SIZE (ai->processed_ptrs); i++)
1185 {
1186 size_t j;
1187 tree ptr = VARRAY_TREE (ai->processed_ptrs, i);
1188 tree name_tag = SSA_NAME_PTR_INFO (ptr)->name_mem_tag;
1189 varray_type aliases;
1190
1191 if (name_tag == NULL_TREE)
1192 continue;
1193
1194 aliases = var_ann (name_tag)->may_aliases;
1195 for (j = 0; aliases && j < VARRAY_ACTIVE_SIZE (aliases); j++)
1196 {
1197 tree alias = VARRAY_TREE (aliases, j);
1198 var_ann_t ann = var_ann (alias);
1199
1200 if (ann->mem_tag_kind == NOT_A_TAG && ann->may_aliases)
1201 {
1202 tree new_alias;
1203
1204 #if defined ENABLE_CHECKING
1205 if (VARRAY_ACTIVE_SIZE (ann->may_aliases) != 1)
1206 abort ();
1207 #endif
1208 new_alias = VARRAY_TREE (ann->may_aliases, 0);
1209 replace_may_alias (name_tag, j, new_alias);
1210 }
1211 }
1212 }
1213
1214 sbitmap_free (res);
1215
1216 if (dump_file)
1217 fprintf (dump_file,
1218 "%s: Total number of aliased vops after grouping: %ld%s\n",
1219 get_name (current_function_decl),
1220 ai->total_alias_vops,
1221 (ai->total_alias_vops < 0) ? " (negative values are OK)" : "");
1222 }
1223
1224
1225 /* Create a new alias set entry for VAR in AI->ADDRESSABLE_VARS. */
1226
1227 static void
1228 create_alias_map_for (tree var, struct alias_info *ai)
1229 {
1230 struct alias_map_d *alias_map;
1231 alias_map = xcalloc (1, sizeof (*alias_map));
1232 alias_map->var = var;
1233 alias_map->set = get_alias_set (var);
1234 ai->addressable_vars[ai->num_addressable_vars++] = alias_map;
1235 }
1236
1237
1238 /* Create memory tags for all the dereferenced pointers and build the
1239 ADDRESSABLE_VARS and POINTERS arrays used for building the may-alias
1240 sets. Based on the address escape and points-to information collected
1241 earlier, this pass will also clear the TREE_ADDRESSABLE flag from those
1242 variables whose address is not needed anymore. */
1243
1244 static void
1245 setup_pointers_and_addressables (struct alias_info *ai)
1246 {
1247 size_t i, n_vars, num_addressable_vars, num_pointers;
1248
1249 /* Size up the arrays ADDRESSABLE_VARS and POINTERS. */
1250 num_addressable_vars = num_pointers = 0;
1251 for (i = 0; i < num_referenced_vars; i++)
1252 {
1253 tree var = referenced_var (i);
1254
1255 if (may_be_aliased (var))
1256 num_addressable_vars++;
1257
1258 if (POINTER_TYPE_P (TREE_TYPE (var)))
1259 {
1260 /* Since we don't keep track of volatile variables, assume that
1261 these pointers are used in indirect store operations. */
1262 if (TREE_THIS_VOLATILE (var))
1263 bitmap_set_bit (ai->dereferenced_ptrs_store, var_ann (var)->uid);
1264
1265 num_pointers++;
1266 }
1267 }
1268
1269 /* Create ADDRESSABLE_VARS and POINTERS. Note that these arrays are
1270 always going to be slightly bigger than we actually need them
1271 because some TREE_ADDRESSABLE variables will be marked
1272 non-addressable below and only pointers with unique type tags are
1273 going to be added to POINTERS. */
1274 ai->addressable_vars = xcalloc (num_addressable_vars,
1275 sizeof (struct alias_map_d *));
1276 ai->pointers = xcalloc (num_pointers, sizeof (struct alias_map_d *));
1277 ai->num_addressable_vars = 0;
1278 ai->num_pointers = 0;
1279
1280 /* Since we will be creating type memory tags within this loop, cache the
1281 value of NUM_REFERENCED_VARS to avoid processing the additional tags
1282 unnecessarily. */
1283 n_vars = num_referenced_vars;
1284
1285 for (i = 0; i < n_vars; i++)
1286 {
1287 tree var = referenced_var (i);
1288 var_ann_t v_ann = var_ann (var);
1289
1290 /* Name memory tags already have flow-sensitive aliasing
1291 information, so they need not be processed by
1292 compute_may_aliases. Similarly, type memory tags are already
1293 accounted for when we process their associated pointer. */
1294 if (v_ann->mem_tag_kind != NOT_A_TAG)
1295 continue;
1296
1297 /* Remove the ADDRESSABLE flag from every addressable variable whose
1298 address is not needed anymore. This is caused by the propagation
1299 of ADDR_EXPR constants into INDIRECT_REF expressions and the
1300 removal of dead pointer assignments done by the early scalar
1301 cleanup passes. */
1302 if (TREE_ADDRESSABLE (var))
1303 {
1304 if (!bitmap_bit_p (ai->addresses_needed, v_ann->uid)
1305 && v_ann->mem_tag_kind == NOT_A_TAG
1306 && !is_global_var (var))
1307 {
1308 /* The address of VAR is not needed, remove the
1309 addressable bit, so that it can be optimized as a
1310 regular variable. */
1311 mark_non_addressable (var);
1312
1313 /* Since VAR is now a regular GIMPLE register, we will need
1314 to rename VAR into SSA afterwards. */
1315 bitmap_set_bit (vars_to_rename, v_ann->uid);
1316 }
1317 else
1318 {
1319 /* Add the variable to the set of addressables. Mostly
1320 used when scanning operands for ASM_EXPRs that
1321 clobber memory. In those cases, we need to clobber
1322 all call-clobbered variables and all addressables. */
1323 bitmap_set_bit (addressable_vars, v_ann->uid);
1324 }
1325 }
1326
1327 /* Global variables and addressable locals may be aliased. Create an
1328 entry in ADDRESSABLE_VARS for VAR. */
1329 if (may_be_aliased (var))
1330 {
1331 create_alias_map_for (var, ai);
1332 bitmap_set_bit (vars_to_rename, var_ann (var)->uid);
1333 }
1334
1335 /* Add pointer variables that have been dereferenced to the POINTERS
1336 array and create a type memory tag for them. */
1337 if (POINTER_TYPE_P (TREE_TYPE (var)))
1338 {
1339 if ((bitmap_bit_p (ai->dereferenced_ptrs_store, v_ann->uid)
1340 || bitmap_bit_p (ai->dereferenced_ptrs_load, v_ann->uid)))
1341 {
1342 tree tag;
1343 var_ann_t t_ann;
1344
1345 /* If pointer VAR still doesn't have a memory tag
1346 associated with it, create it now or re-use an
1347 existing one. */
1348 tag = get_tmt_for (var, ai);
1349 t_ann = var_ann (tag);
1350
1351 /* The type tag will need to be renamed into SSA
1352 afterwards. Note that we cannot do this inside
1353 get_tmt_for because aliasing may run multiple times
1354 and we only create type tags the first time. */
1355 bitmap_set_bit (vars_to_rename, t_ann->uid);
1356
1357 /* Associate the tag with pointer VAR. */
1358 v_ann->type_mem_tag = tag;
1359
1360 /* If pointer VAR has been used in a store operation,
1361 then its memory tag must be marked as written-to. */
1362 if (bitmap_bit_p (ai->dereferenced_ptrs_store, v_ann->uid))
1363 bitmap_set_bit (ai->written_vars, t_ann->uid);
1364
1365 /* If pointer VAR is a global variable or a PARM_DECL,
1366 then its memory tag should be considered a global
1367 variable. */
1368 if (TREE_CODE (var) == PARM_DECL || is_global_var (var))
1369 mark_call_clobbered (tag);
1370
1371 /* All the dereferences of pointer VAR count as
1372 references of TAG. Since TAG can be associated with
1373 several pointers, add the dereferences of VAR to the
1374 TAG. We may need to grow AI->NUM_REFERENCES because
1375 we have been adding name and type tags. */
1376 if (t_ann->uid >= VARRAY_SIZE (ai->num_references))
1377 VARRAY_GROW (ai->num_references, t_ann->uid + 10);
1378
1379 VARRAY_UINT (ai->num_references, t_ann->uid)
1380 += VARRAY_UINT (ai->num_references, v_ann->uid);
1381 }
1382 else
1383 {
1384 /* The pointer has not been dereferenced. If it had a
1385 type memory tag, remove it and mark the old tag for
1386 renaming to remove it out of the IL. */
1387 var_ann_t ann = var_ann (var);
1388 tree tag = ann->type_mem_tag;
1389 if (tag)
1390 {
1391 bitmap_set_bit (vars_to_rename, var_ann (tag)->uid);
1392 ann->type_mem_tag = NULL_TREE;
1393 }
1394 }
1395 }
1396 }
1397
1398 /* If we found no addressable variables, but we have more than one
1399 pointer, we will need to check for conflicts between the
1400 pointers. Otherwise, we would miss alias relations as in
1401 testsuite/gcc.dg/tree-ssa/20040319-1.c:
1402
1403 struct bar { int count; int *arr;};
1404
1405 void foo (struct bar *b)
1406 {
1407 b->count = 0;
1408 *(b->arr) = 2;
1409 if (b->count == 0)
1410 abort ();
1411 }
1412
1413 b->count and *(b->arr) could be aliased if b->arr == &b->count.
1414 To do this, we add all the memory tags for the pointers in
1415 AI->POINTERS to AI->ADDRESSABLE_VARS, so that
1416 compute_flow_insensitive_aliasing will naturally compare every
1417 pointer to every type tag. */
1418 if (ai->num_addressable_vars == 0
1419 && ai->num_pointers > 1)
1420 {
1421 free (ai->addressable_vars);
1422 ai->addressable_vars = xcalloc (ai->num_pointers,
1423 sizeof (struct alias_map_d *));
1424 ai->num_addressable_vars = 0;
1425 for (i = 0; i < ai->num_pointers; i++)
1426 {
1427 struct alias_map_d *p = ai->pointers[i];
1428 tree tag = var_ann (p->var)->type_mem_tag;
1429 create_alias_map_for (tag, ai);
1430 }
1431 }
1432 }
1433
1434
1435 /* Determine whether to use .GLOBAL_VAR to model call clobbering semantics. At
1436 every call site, we need to emit V_MAY_DEF expressions to represent the
1437 clobbering effects of the call for variables whose address escapes the
1438 current function.
1439
1440 One approach is to group all call-clobbered variables into a single
1441 representative that is used as an alias of every call-clobbered variable
1442 (.GLOBAL_VAR). This works well, but it ties the optimizer hands because
1443 references to any call clobbered variable is a reference to .GLOBAL_VAR.
1444
1445 The second approach is to emit a clobbering V_MAY_DEF for every
1446 call-clobbered variable at call sites. This is the preferred way in terms
1447 of optimization opportunities but it may create too many V_MAY_DEF operands
1448 if there are many call clobbered variables and function calls in the
1449 function.
1450
1451 To decide whether or not to use .GLOBAL_VAR we multiply the number of
1452 function calls found by the number of call-clobbered variables. If that
1453 product is beyond a certain threshold, as determined by the parameterized
1454 values shown below, we use .GLOBAL_VAR.
1455
1456 FIXME. This heuristic should be improved. One idea is to use several
1457 .GLOBAL_VARs of different types instead of a single one. The thresholds
1458 have been derived from a typical bootstrap cycle, including all target
1459 libraries. Compile times were found increase by ~1% compared to using
1460 .GLOBAL_VAR. */
1461
1462 static void
1463 maybe_create_global_var (struct alias_info *ai)
1464 {
1465 size_t i, n_clobbered;
1466
1467 /* No need to create it, if we have one already. */
1468 if (global_var == NULL_TREE)
1469 {
1470 /* Count all the call-clobbered variables. */
1471 n_clobbered = 0;
1472 EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, i, n_clobbered++);
1473
1474 /* Create .GLOBAL_VAR if we have too many call-clobbered
1475 variables. We also create .GLOBAL_VAR when there no
1476 call-clobbered variables to prevent code motion
1477 transformations from re-arranging function calls that may
1478 have side effects. For instance,
1479
1480 foo ()
1481 {
1482 int a = f ();
1483 g ();
1484 h (a);
1485 }
1486
1487 There are no call-clobbered variables in foo(), so it would
1488 be entirely possible for a pass to want to move the call to
1489 f() after the call to g(). If f() has side effects, that
1490 would be wrong. Creating .GLOBAL_VAR in this case will
1491 insert VDEFs for it and prevent such transformations. */
1492 if (n_clobbered == 0
1493 || ai->num_calls_found * n_clobbered >= (size_t) GLOBAL_VAR_THRESHOLD)
1494 create_global_var ();
1495 }
1496
1497 /* If the function has calls to clobbering functions and .GLOBAL_VAR has
1498 been created, make it an alias for all call-clobbered variables. */
1499 if (global_var)
1500 EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, i,
1501 {
1502 tree var = referenced_var (i);
1503 if (var != global_var)
1504 {
1505 add_may_alias (var, global_var);
1506 bitmap_set_bit (vars_to_rename, var_ann (var)->uid);
1507 }
1508 });
1509 }
1510
1511
1512 /* Return TRUE if pointer PTR may point to variable VAR.
1513
1514 MEM_ALIAS_SET is the alias set for the memory location pointed-to by PTR
1515 This is needed because when checking for type conflicts we are
1516 interested in the alias set of the memory location pointed-to by
1517 PTR. The alias set of PTR itself is irrelevant.
1518
1519 VAR_ALIAS_SET is the alias set for VAR. */
1520
1521 static bool
1522 may_alias_p (tree ptr, HOST_WIDE_INT mem_alias_set,
1523 tree var, HOST_WIDE_INT var_alias_set)
1524 {
1525 tree mem;
1526 var_ann_t v_ann, m_ann;
1527
1528 alias_stats.alias_queries++;
1529 alias_stats.simple_queries++;
1530
1531 /* By convention, a variable cannot alias itself. */
1532 mem = var_ann (ptr)->type_mem_tag;
1533 if (mem == var)
1534 {
1535 alias_stats.alias_noalias++;
1536 alias_stats.simple_resolved++;
1537 return false;
1538 }
1539
1540 v_ann = var_ann (var);
1541 m_ann = var_ann (mem);
1542
1543 #if defined ENABLE_CHECKING
1544 if (m_ann->mem_tag_kind != TYPE_TAG)
1545 abort ();
1546 #endif
1547
1548 alias_stats.tbaa_queries++;
1549
1550 /* If VAR is a pointer with the same alias set as PTR, then dereferencing
1551 PTR can't possibly affect VAR. Note, that we are specifically testing
1552 for PTR's alias set here, not its pointed-to type. We also can't
1553 do this check with relaxed aliasing enabled. */
1554 if (POINTER_TYPE_P (TREE_TYPE (var))
1555 && var_alias_set != 0)
1556 {
1557 HOST_WIDE_INT ptr_alias_set = get_alias_set (ptr);
1558 if (ptr_alias_set == var_alias_set)
1559 {
1560 alias_stats.alias_noalias++;
1561 alias_stats.tbaa_resolved++;
1562 return false;
1563 }
1564 }
1565
1566 /* If the alias sets don't conflict then MEM cannot alias VAR. */
1567 if (!alias_sets_conflict_p (mem_alias_set, var_alias_set))
1568 {
1569 /* Handle aliases to structure fields. If either VAR or MEM are
1570 aggregate types, they may not have conflicting types, but one of
1571 the structures could contain a pointer to the other one.
1572
1573 For instance, given
1574
1575 MEM -> struct P *p;
1576 VAR -> struct Q *q;
1577
1578 It may happen that '*p' and '*q' can't alias because 'struct P'
1579 and 'struct Q' have non-conflicting alias sets. However, it could
1580 happen that one of the fields in 'struct P' is a 'struct Q *' or
1581 vice-versa.
1582
1583 Therefore, we also need to check if 'struct P' aliases 'struct Q *'
1584 or 'struct Q' aliases 'struct P *'. Notice, that since GIMPLE
1585 does not have more than one-level pointers, we don't need to
1586 recurse into the structures. */
1587 if (AGGREGATE_TYPE_P (TREE_TYPE (mem))
1588 || AGGREGATE_TYPE_P (TREE_TYPE (var)))
1589 {
1590 tree ptr_to_var;
1591
1592 if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
1593 ptr_to_var = TYPE_POINTER_TO (TREE_TYPE (TREE_TYPE (var)));
1594 else
1595 ptr_to_var = TYPE_POINTER_TO (TREE_TYPE (var));
1596
1597 /* If no pointer-to VAR exists, then MEM can't alias VAR. */
1598 if (ptr_to_var == NULL_TREE)
1599 {
1600 alias_stats.alias_noalias++;
1601 alias_stats.tbaa_resolved++;
1602 return false;
1603 }
1604
1605 /* If MEM doesn't alias a pointer to VAR and VAR doesn't alias
1606 PTR, then PTR can't alias VAR. */
1607 if (!alias_sets_conflict_p (mem_alias_set, get_alias_set (ptr_to_var))
1608 && !alias_sets_conflict_p (var_alias_set, get_alias_set (ptr)))
1609 {
1610 alias_stats.alias_noalias++;
1611 alias_stats.tbaa_resolved++;
1612 return false;
1613 }
1614 }
1615 else
1616 {
1617 alias_stats.alias_noalias++;
1618 alias_stats.tbaa_resolved++;
1619 return false;
1620 }
1621 }
1622
1623 if (flag_tree_points_to != PTA_NONE)
1624 alias_stats.pta_queries++;
1625
1626 /* If -ftree-points-to is given, check if PTR may point to VAR. */
1627 if (flag_tree_points_to == PTA_ANDERSEN
1628 && !ptr_may_alias_var (ptr, var))
1629 {
1630 alias_stats.alias_noalias++;
1631 alias_stats.pta_resolved++;
1632 return false;
1633 }
1634
1635 alias_stats.alias_mayalias++;
1636 return true;
1637 }
1638
1639
1640 /* Add ALIAS to the set of variables that may alias VAR. */
1641
1642 static void
1643 add_may_alias (tree var, tree alias)
1644 {
1645 size_t i;
1646 var_ann_t v_ann = get_var_ann (var);
1647 var_ann_t a_ann = get_var_ann (alias);
1648
1649 #if defined ENABLE_CHECKING
1650 if (var == alias)
1651 abort ();
1652 #endif
1653
1654 if (v_ann->may_aliases == NULL)
1655 VARRAY_TREE_INIT (v_ann->may_aliases, 2, "aliases");
1656
1657 /* Avoid adding duplicates. */
1658 for (i = 0; i < VARRAY_ACTIVE_SIZE (v_ann->may_aliases); i++)
1659 if (alias == VARRAY_TREE (v_ann->may_aliases, i))
1660 return;
1661
1662 /* If VAR is a call-clobbered variable, so is its new ALIAS.
1663 FIXME, call-clobbering should only depend on whether an address
1664 escapes. It should be independent of aliasing. */
1665 if (is_call_clobbered (var))
1666 mark_call_clobbered (alias);
1667
1668 /* Likewise. If ALIAS is call-clobbered, so is VAR. */
1669 else if (is_call_clobbered (alias))
1670 mark_call_clobbered (var);
1671
1672 VARRAY_PUSH_TREE (v_ann->may_aliases, alias);
1673 a_ann->is_alias_tag = 1;
1674 }
1675
1676
1677 /* Replace alias I in the alias sets of VAR with NEW_ALIAS. */
1678
1679 static void
1680 replace_may_alias (tree var, size_t i, tree new_alias)
1681 {
1682 var_ann_t v_ann = var_ann (var);
1683 VARRAY_TREE (v_ann->may_aliases, i) = new_alias;
1684
1685 /* If VAR is a call-clobbered variable, so is NEW_ALIAS.
1686 FIXME, call-clobbering should only depend on whether an address
1687 escapes. It should be independent of aliasing. */
1688 if (is_call_clobbered (var))
1689 mark_call_clobbered (new_alias);
1690
1691 /* Likewise. If NEW_ALIAS is call-clobbered, so is VAR. */
1692 else if (is_call_clobbered (new_alias))
1693 mark_call_clobbered (var);
1694 }
1695
1696
1697 /* Mark pointer PTR as pointing to an arbitrary memory location. */
1698
1699 static void
1700 set_pt_anything (tree ptr)
1701 {
1702 struct ptr_info_def *pi = get_ptr_info (ptr);
1703
1704 pi->pt_anything = 1;
1705 pi->pt_malloc = 0;
1706
1707 /* The pointer used to have a name tag, but we now found it pointing
1708 to an arbitrary location. The name tag needs to be renamed and
1709 disassociated from PTR. */
1710 if (pi->name_mem_tag)
1711 {
1712 bitmap_set_bit (vars_to_rename, var_ann (pi->name_mem_tag)->uid);
1713 pi->name_mem_tag = NULL_TREE;
1714 }
1715 }
1716
1717
1718 /* Mark pointer PTR as pointing to a malloc'd memory area. */
1719
1720 static void
1721 set_pt_malloc (tree ptr)
1722 {
1723 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
1724
1725 /* If the pointer has already been found to point to arbitrary
1726 memory locations, it is unsafe to mark it as pointing to malloc. */
1727 if (pi->pt_anything)
1728 return;
1729
1730 pi->pt_malloc = 1;
1731 }
1732
1733
1734 /* Given two pointers DEST and ORIG. Merge the points-to information in
1735 ORIG into DEST. AI is as in collect_points_to_info. */
1736
1737 static void
1738 merge_pointed_to_info (struct alias_info *ai, tree dest, tree orig)
1739 {
1740 struct ptr_info_def *dest_pi, *orig_pi;
1741
1742 /* Make sure we have points-to information for ORIG. */
1743 collect_points_to_info_for (ai, orig);
1744
1745 dest_pi = get_ptr_info (dest);
1746 orig_pi = SSA_NAME_PTR_INFO (orig);
1747
1748 if (orig_pi)
1749 {
1750 /* Notice that we never merge PT_MALLOC. This attribute is only
1751 true if the pointer is the result of a malloc() call.
1752 Otherwise, we can end up in this situation:
1753
1754 P_i = malloc ();
1755 ...
1756 P_j = P_i + X;
1757
1758 P_j would be marked as PT_MALLOC, which is wrong because
1759 PT_MALLOC implies that the pointer may not point to another
1760 variable.
1761
1762 FIXME 1: Subsequent analysis may determine that P_j
1763 cannot alias anything else, but we are being conservative
1764 here.
1765
1766 FIXME 2: If the merging comes from a copy assignment, we
1767 ought to merge PT_MALLOC, but then both pointers would end up
1768 getting different name tags because create_name_tags is not
1769 smart enough to determine that the two come from the same
1770 malloc call. Copy propagation before aliasing should cure
1771 this. */
1772 dest_pi->pt_malloc = 0;
1773
1774 if (orig_pi->pt_malloc || orig_pi->pt_anything)
1775 set_pt_anything (dest);
1776
1777 if (!dest_pi->pt_anything
1778 && orig_pi->pt_vars
1779 && bitmap_first_set_bit (orig_pi->pt_vars) >= 0)
1780 {
1781 if (dest_pi->pt_vars == NULL)
1782 {
1783 dest_pi->pt_vars = BITMAP_GGC_ALLOC ();
1784 bitmap_copy (dest_pi->pt_vars, orig_pi->pt_vars);
1785 }
1786 else
1787 bitmap_a_or_b (dest_pi->pt_vars,
1788 dest_pi->pt_vars,
1789 orig_pi->pt_vars);
1790 }
1791 }
1792 else
1793 set_pt_anything (dest);
1794 }
1795
1796
1797 /* Add VALUE to the list of expressions pointed-to by PTR. */
1798
1799 static void
1800 add_pointed_to_expr (tree ptr, tree value)
1801 {
1802 if (TREE_CODE (value) == WITH_SIZE_EXPR)
1803 value = TREE_OPERAND (value, 0);
1804
1805 #if defined ENABLE_CHECKING
1806 /* Pointer variables should have been handled by merge_pointed_to_info. */
1807 if (TREE_CODE (value) == SSA_NAME
1808 && POINTER_TYPE_P (TREE_TYPE (value)))
1809 abort ();
1810 #endif
1811
1812 get_ptr_info (ptr);
1813
1814 /* If VALUE is the result of a malloc-like call, then the area pointed to
1815 PTR is guaranteed to not alias with anything else. */
1816 if (TREE_CODE (value) == CALL_EXPR
1817 && (call_expr_flags (value) & (ECF_MALLOC | ECF_MAY_BE_ALLOCA)))
1818 set_pt_malloc (ptr);
1819 else
1820 set_pt_anything (ptr);
1821
1822 if (dump_file)
1823 {
1824 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
1825
1826 fprintf (dump_file, "Pointer ");
1827 print_generic_expr (dump_file, ptr, dump_flags);
1828 fprintf (dump_file, " points to ");
1829 if (pi->pt_malloc)
1830 fprintf (dump_file, "malloc space: ");
1831 else
1832 fprintf (dump_file, "an arbitrary address: ");
1833 print_generic_expr (dump_file, value, dump_flags);
1834 fprintf (dump_file, "\n");
1835 }
1836 }
1837
1838
1839 /* If VALUE is of the form &DECL, add DECL to the set of variables
1840 pointed-to by PTR. Otherwise, add VALUE as a pointed-to expression by
1841 PTR. AI is as in collect_points_to_info. */
1842
1843 static void
1844 add_pointed_to_var (struct alias_info *ai, tree ptr, tree value)
1845 {
1846 struct ptr_info_def *pi = get_ptr_info (ptr);
1847 tree pt_var;
1848 size_t uid;
1849
1850 #if defined ENABLE_CHECKING
1851 if (TREE_CODE (value) != ADDR_EXPR)
1852 abort ();
1853 #endif
1854
1855 pt_var = TREE_OPERAND (value, 0);
1856 if (TREE_CODE_CLASS (TREE_CODE (pt_var)) == 'r')
1857 pt_var = get_base_address (pt_var);
1858
1859 if (pt_var && SSA_VAR_P (pt_var))
1860 {
1861 uid = var_ann (pt_var)->uid;
1862 bitmap_set_bit (ai->addresses_needed, uid);
1863
1864 if (pi->pt_vars == NULL)
1865 pi->pt_vars = BITMAP_GGC_ALLOC ();
1866 bitmap_set_bit (pi->pt_vars, uid);
1867
1868 /* If the variable is a global, mark the pointer as pointing to
1869 global memory (which will make its tag a global variable). */
1870 if (is_global_var (pt_var))
1871 pi->pt_global_mem = 1;
1872 }
1873 }
1874
1875
1876 /* Callback for walk_use_def_chains to gather points-to information from the
1877 SSA web.
1878
1879 VAR is an SSA variable or a GIMPLE expression.
1880
1881 STMT is the statement that generates the SSA variable or, if STMT is a
1882 PHI_NODE, VAR is one of the PHI arguments.
1883
1884 DATA is a pointer to a structure of type ALIAS_INFO. */
1885
1886 static bool
1887 collect_points_to_info_r (tree var, tree stmt, void *data)
1888 {
1889 struct alias_info *ai = (struct alias_info *) data;
1890
1891 if (dump_file && (dump_flags & TDF_DETAILS))
1892 {
1893 fprintf (dump_file, "Visiting use-def links for ");
1894 print_generic_expr (dump_file, var, dump_flags);
1895 fprintf (dump_file, "\n");
1896 }
1897
1898 if (TREE_CODE (stmt) == MODIFY_EXPR)
1899 {
1900 tree rhs = TREE_OPERAND (stmt, 1);
1901 STRIP_NOPS (rhs);
1902
1903 /* Found P_i = ADDR_EXPR */
1904 if (TREE_CODE (rhs) == ADDR_EXPR)
1905 add_pointed_to_var (ai, var, rhs);
1906
1907 /* Found P_i = Q_j. */
1908 else if (TREE_CODE (rhs) == SSA_NAME
1909 && POINTER_TYPE_P (TREE_TYPE (rhs)))
1910 merge_pointed_to_info (ai, var, rhs);
1911
1912 /* Found P_i = PLUS_EXPR or P_i = MINUS_EXPR */
1913 else if (TREE_CODE (rhs) == PLUS_EXPR
1914 || TREE_CODE (rhs) == MINUS_EXPR)
1915 {
1916 tree op0 = TREE_OPERAND (rhs, 0);
1917 tree op1 = TREE_OPERAND (rhs, 1);
1918
1919 /* Both operands may be of pointer type. FIXME: Shouldn't
1920 we just expect PTR + OFFSET always? */
1921 if (POINTER_TYPE_P (TREE_TYPE (op0)))
1922 {
1923 if (TREE_CODE (op0) == SSA_NAME)
1924 merge_pointed_to_info (ai, var, op0);
1925 else if (TREE_CODE (op0) == ADDR_EXPR)
1926 add_pointed_to_var (ai, var, op0);
1927 else
1928 add_pointed_to_expr (var, op0);
1929 }
1930
1931 if (POINTER_TYPE_P (TREE_TYPE (op1)))
1932 {
1933 if (TREE_CODE (op1) == SSA_NAME)
1934 merge_pointed_to_info (ai, var, op1);
1935 else if (TREE_CODE (op1) == ADDR_EXPR)
1936 add_pointed_to_var (ai, var, op1);
1937 else
1938 add_pointed_to_expr (var, op1);
1939 }
1940
1941 /* Neither operand is a pointer? VAR can be pointing
1942 anywhere. FIXME: Is this right? If we get here, we
1943 found PTR = INT_CST + INT_CST. */
1944 if (!POINTER_TYPE_P (TREE_TYPE (op0))
1945 && !POINTER_TYPE_P (TREE_TYPE (op1)))
1946 add_pointed_to_expr (var, rhs);
1947 }
1948
1949 /* Something else. */
1950 else
1951 add_pointed_to_expr (var, rhs);
1952 }
1953 else if (TREE_CODE (stmt) == ASM_EXPR)
1954 {
1955 /* Pointers defined by __asm__ statements can point anywhere. */
1956 set_pt_anything (var);
1957 }
1958 else if (IS_EMPTY_STMT (stmt))
1959 {
1960 tree decl = SSA_NAME_VAR (var);
1961
1962 if (TREE_CODE (decl) == PARM_DECL)
1963 add_pointed_to_expr (var, decl);
1964 else if (DECL_INITIAL (decl))
1965 add_pointed_to_var (ai, var, DECL_INITIAL (decl));
1966 else
1967 add_pointed_to_expr (var, decl);
1968 }
1969 else if (TREE_CODE (stmt) == PHI_NODE)
1970 {
1971 /* It STMT is a PHI node, then VAR is one of its arguments. The
1972 variable that we are analyzing is the LHS of the PHI node. */
1973 tree lhs = PHI_RESULT (stmt);
1974
1975 if (TREE_CODE (var) == ADDR_EXPR)
1976 add_pointed_to_var (ai, lhs, var);
1977 else if (TREE_CODE (var) == SSA_NAME)
1978 merge_pointed_to_info (ai, lhs, var);
1979 else if (is_gimple_min_invariant (var))
1980 add_pointed_to_expr (lhs, var);
1981 else
1982 abort ();
1983 }
1984 else
1985 abort ();
1986
1987 return false;
1988 }
1989
1990
1991 /* Return true if STMT is an "escape" site from the current function. Escape
1992 sites those statements which might expose the address of a variable
1993 outside the current function. STMT is an escape site iff:
1994
1995 1- STMT is a function call, or
1996 2- STMT is an __asm__ expression, or
1997 3- STMT is an assignment to a non-local variable, or
1998 4- STMT is a return statement.
1999
2000 If NUM_CALLS_P is not NULL, the counter is incremented if STMT contains
2001 a function call. */
2002
2003 static bool
2004 is_escape_site (tree stmt, size_t *num_calls_p)
2005 {
2006 if (get_call_expr_in (stmt) != NULL_TREE)
2007 {
2008 if (num_calls_p)
2009 (*num_calls_p)++;
2010
2011 return true;
2012 }
2013 else if (TREE_CODE (stmt) == ASM_EXPR)
2014 return true;
2015 else if (TREE_CODE (stmt) == MODIFY_EXPR)
2016 {
2017 tree lhs = TREE_OPERAND (stmt, 0);
2018
2019 /* Get to the base of _REF nodes. */
2020 if (TREE_CODE (lhs) != SSA_NAME)
2021 lhs = get_base_address (lhs);
2022
2023 /* If we couldn't recognize the LHS of the assignment, assume that it
2024 is a non-local store. */
2025 if (lhs == NULL_TREE)
2026 return true;
2027
2028 /* If the LHS is an SSA name, it can't possibly represent a non-local
2029 memory store. */
2030 if (TREE_CODE (lhs) == SSA_NAME)
2031 return false;
2032
2033 /* FIXME: LHS is not an SSA_NAME. Even if it's an assignment to a
2034 local variables we cannot be sure if it will escape, because we
2035 don't have information about objects not in SSA form. Need to
2036 implement something along the lines of
2037
2038 J.-D. Choi, M. Gupta, M. J. Serrano, V. C. Sreedhar, and S. P.
2039 Midkiff, ``Escape analysis for java,'' in Proceedings of the
2040 Conference on Object-Oriented Programming Systems, Languages, and
2041 Applications (OOPSLA), pp. 1-19, 1999. */
2042 return true;
2043 }
2044 else if (TREE_CODE (stmt) == RETURN_EXPR)
2045 return true;
2046
2047 return false;
2048 }
2049
2050
2051 /* Create a new memory tag of type TYPE. If IS_TYPE_TAG is true, the tag
2052 is considered to represent all the pointers whose pointed-to types are
2053 in the same alias set class. Otherwise, the tag represents a single
2054 SSA_NAME pointer variable. */
2055
2056 static tree
2057 create_memory_tag (tree type, bool is_type_tag)
2058 {
2059 var_ann_t ann;
2060 tree tag = create_tmp_var_raw (type, (is_type_tag) ? "TMT" : "NMT");
2061
2062 /* By default, memory tags are local variables. Alias analysis will
2063 determine whether they should be considered globals. */
2064 DECL_CONTEXT (tag) = current_function_decl;
2065
2066 /* If the pointed-to type is volatile, so is the tag. */
2067 TREE_THIS_VOLATILE (tag) = TREE_THIS_VOLATILE (type);
2068
2069 /* Memory tags are by definition addressable. This also prevents
2070 is_gimple_ref frome confusing memory tags with optimizable
2071 variables. */
2072 TREE_ADDRESSABLE (tag) = 1;
2073
2074 ann = get_var_ann (tag);
2075 ann->mem_tag_kind = (is_type_tag) ? TYPE_TAG : NAME_TAG;
2076 ann->type_mem_tag = NULL_TREE;
2077
2078 /* Add the tag to the symbol table. */
2079 add_referenced_tmp_var (tag);
2080
2081 return tag;
2082 }
2083
2084
2085 /* Create a name memory tag to represent a specific SSA_NAME pointer P_i.
2086 This is used if P_i has been found to point to a specific set of
2087 variables or to a non-aliased memory location like the address returned
2088 by malloc functions. */
2089
2090 static tree
2091 get_nmt_for (tree ptr)
2092 {
2093 struct ptr_info_def *pi = get_ptr_info (ptr);
2094 tree tag = pi->name_mem_tag;
2095
2096 if (tag == NULL_TREE)
2097 tag = create_memory_tag (TREE_TYPE (TREE_TYPE (ptr)), false);
2098
2099 /* If PTR is a PARM_DECL, it points to a global variable or malloc,
2100 then its name tag should be considered a global variable. */
2101 if (TREE_CODE (SSA_NAME_VAR (ptr)) == PARM_DECL
2102 || pi->pt_malloc
2103 || pi->pt_global_mem)
2104 mark_call_clobbered (tag);
2105
2106 return tag;
2107 }
2108
2109
2110 /* Return the type memory tag associated to pointer PTR. A memory tag is an
2111 artificial variable that represents the memory location pointed-to by
2112 PTR. It is used to model the effects of pointer de-references on
2113 addressable variables.
2114
2115 AI points to the data gathered during alias analysis. This function
2116 populates the array AI->POINTERS. */
2117
2118 static tree
2119 get_tmt_for (tree ptr, struct alias_info *ai)
2120 {
2121 size_t i;
2122 tree tag;
2123 tree tag_type = TREE_TYPE (TREE_TYPE (ptr));
2124 HOST_WIDE_INT tag_set = get_alias_set (tag_type);
2125
2126 /* To avoid creating unnecessary memory tags, only create one memory tag
2127 per alias set class. Note that it may be tempting to group
2128 memory tags based on conflicting alias sets instead of
2129 equivalence. That would be wrong because alias sets are not
2130 necessarily transitive (as demonstrated by the libstdc++ test
2131 23_containers/vector/cons/4.cc). Given three alias sets A, B, C
2132 such that conflicts (A, B) == true and conflicts (A, C) == true,
2133 it does not necessarily follow that conflicts (B, C) == true. */
2134 for (i = 0, tag = NULL_TREE; i < ai->num_pointers; i++)
2135 {
2136 struct alias_map_d *curr = ai->pointers[i];
2137 if (tag_set == curr->set
2138 && (flag_tree_points_to == PTA_NONE
2139 || same_points_to_set (curr->var, ptr)))
2140 {
2141 tag = var_ann (curr->var)->type_mem_tag;
2142 break;
2143 }
2144 }
2145
2146 /* If VAR cannot alias with any of the existing memory tags, create a new
2147 tag for PTR and add it to the POINTERS array. */
2148 if (tag == NULL_TREE)
2149 {
2150 struct alias_map_d *alias_map;
2151
2152 /* If PTR did not have a type tag already, create a new TMT.*
2153 artificial variable representing the memory location
2154 pointed-to by PTR. */
2155 if (var_ann (ptr)->type_mem_tag == NULL_TREE)
2156 tag = create_memory_tag (tag_type, true);
2157 else
2158 tag = var_ann (ptr)->type_mem_tag;
2159
2160 /* Add PTR to the POINTERS array. Note that we are not interested in
2161 PTR's alias set. Instead, we cache the alias set for the memory that
2162 PTR points to. */
2163 alias_map = xcalloc (1, sizeof (*alias_map));
2164 alias_map->var = ptr;
2165 alias_map->set = tag_set;
2166 ai->pointers[ai->num_pointers++] = alias_map;
2167 }
2168
2169 #if defined ENABLE_CHECKING
2170 /* Make sure that the type tag has the same alias set as the
2171 pointed-to type. */
2172 if (tag_set != get_alias_set (tag))
2173 abort ();
2174 #endif
2175
2176
2177 return tag;
2178 }
2179
2180
2181 /* Create GLOBAL_VAR, an artificial global variable to act as a
2182 representative of all the variables that may be clobbered by function
2183 calls. */
2184
2185 static void
2186 create_global_var (void)
2187 {
2188 global_var = build_decl (VAR_DECL, get_identifier (".GLOBAL_VAR"),
2189 size_type_node);
2190 DECL_ARTIFICIAL (global_var) = 1;
2191 TREE_READONLY (global_var) = 0;
2192 DECL_EXTERNAL (global_var) = 1;
2193 TREE_STATIC (global_var) = 1;
2194 TREE_USED (global_var) = 1;
2195 DECL_CONTEXT (global_var) = NULL_TREE;
2196 TREE_THIS_VOLATILE (global_var) = 0;
2197 TREE_ADDRESSABLE (global_var) = 0;
2198
2199 add_referenced_tmp_var (global_var);
2200 bitmap_set_bit (vars_to_rename, var_ann (global_var)->uid);
2201 }
2202
2203
2204 /* Dump alias statistics on FILE. */
2205
2206 static void
2207 dump_alias_stats (FILE *file)
2208 {
2209 const char *funcname
2210 = lang_hooks.decl_printable_name (current_function_decl, 2);
2211 fprintf (file, "\nAlias statistics for %s\n\n", funcname);
2212 fprintf (file, "Total alias queries:\t%u\n", alias_stats.alias_queries);
2213 fprintf (file, "Total alias mayalias results:\t%u\n",
2214 alias_stats.alias_mayalias);
2215 fprintf (file, "Total alias noalias results:\t%u\n",
2216 alias_stats.alias_noalias);
2217 fprintf (file, "Total simple queries:\t%u\n",
2218 alias_stats.simple_queries);
2219 fprintf (file, "Total simple resolved:\t%u\n",
2220 alias_stats.simple_resolved);
2221 fprintf (file, "Total TBAA queries:\t%u\n",
2222 alias_stats.tbaa_queries);
2223 fprintf (file, "Total TBAA resolved:\t%u\n",
2224 alias_stats.tbaa_resolved);
2225 fprintf (file, "Total PTA queries:\t%u\n",
2226 alias_stats.pta_queries);
2227 fprintf (file, "Total PTA resolved:\t%u\n",
2228 alias_stats.pta_resolved);
2229 }
2230
2231
2232 /* Dump alias information on FILE. */
2233
2234 void
2235 dump_alias_info (FILE *file)
2236 {
2237 size_t i;
2238 const char *funcname
2239 = lang_hooks.decl_printable_name (current_function_decl, 2);
2240
2241 fprintf (file, "\nFlow-insensitive alias information for %s\n\n", funcname);
2242
2243 fprintf (file, "Aliased symbols\n\n");
2244 for (i = 0; i < num_referenced_vars; i++)
2245 {
2246 tree var = referenced_var (i);
2247 if (may_be_aliased (var))
2248 dump_variable (file, var);
2249 }
2250
2251 fprintf (file, "\nDereferenced pointers\n\n");
2252 for (i = 0; i < num_referenced_vars; i++)
2253 {
2254 tree var = referenced_var (i);
2255 var_ann_t ann = var_ann (var);
2256 if (ann->type_mem_tag)
2257 dump_variable (file, var);
2258 }
2259
2260 fprintf (file, "\nType memory tags\n\n");
2261 for (i = 0; i < num_referenced_vars; i++)
2262 {
2263 tree var = referenced_var (i);
2264 var_ann_t ann = var_ann (var);
2265 if (ann->mem_tag_kind == TYPE_TAG)
2266 dump_variable (file, var);
2267 }
2268
2269 fprintf (file, "\n\nFlow-sensitive alias information for %s\n\n", funcname);
2270
2271 fprintf (file, "SSA_NAME pointers\n\n");
2272 for (i = 1; i < num_ssa_names; i++)
2273 {
2274 tree ptr = ssa_name (i);
2275 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
2276 if (!SSA_NAME_IN_FREE_LIST (ptr)
2277 && pi
2278 && pi->name_mem_tag)
2279 dump_points_to_info_for (file, ptr);
2280 }
2281
2282 fprintf (file, "\nName memory tags\n\n");
2283 for (i = 0; i < num_referenced_vars; i++)
2284 {
2285 tree var = referenced_var (i);
2286 var_ann_t ann = var_ann (var);
2287 if (ann->mem_tag_kind == NAME_TAG)
2288 dump_variable (file, var);
2289 }
2290
2291 fprintf (file, "\n");
2292 }
2293
2294
2295 /* Dump alias information on stderr. */
2296
2297 void
2298 debug_alias_info (void)
2299 {
2300 dump_alias_info (stderr);
2301 }
2302
2303
2304 /* Return the alias information associated with pointer T. It creates a
2305 new instance if none existed. */
2306
2307 static struct ptr_info_def *
2308 get_ptr_info (tree t)
2309 {
2310 struct ptr_info_def *pi;
2311
2312 #if defined ENABLE_CHECKING
2313 if (!POINTER_TYPE_P (TREE_TYPE (t)))
2314 abort ();
2315 #endif
2316
2317 pi = SSA_NAME_PTR_INFO (t);
2318 if (pi == NULL)
2319 {
2320 pi = ggc_alloc (sizeof (*pi));
2321 memset ((void *)pi, 0, sizeof (*pi));
2322 SSA_NAME_PTR_INFO (t) = pi;
2323 }
2324
2325 return pi;
2326 }
2327
2328
2329 /* Dump points-to information for SSA_NAME PTR into FILE. */
2330
2331 void
2332 dump_points_to_info_for (FILE *file, tree ptr)
2333 {
2334 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
2335
2336 print_generic_expr (file, ptr, dump_flags);
2337
2338 if (pi)
2339 {
2340 if (pi->name_mem_tag)
2341 {
2342 fprintf (file, ", name memory tag: ");
2343 print_generic_expr (file, pi->name_mem_tag, dump_flags);
2344 }
2345
2346 if (pi->is_dereferenced)
2347 fprintf (file, ", is dereferenced");
2348
2349 if (pi->value_escapes_p)
2350 fprintf (file, ", its value escapes");
2351
2352 if (pi->pt_anything)
2353 fprintf (file, ", points-to anything");
2354
2355 if (pi->pt_malloc)
2356 fprintf (file, ", points-to malloc");
2357
2358 if (pi->pt_vars)
2359 {
2360 unsigned ix;
2361
2362 fprintf (file, ", points-to vars: { ");
2363 EXECUTE_IF_SET_IN_BITMAP (pi->pt_vars, 0, ix,
2364 {
2365 print_generic_expr (file, referenced_var (ix), dump_flags);
2366 fprintf (file, " ");
2367 });
2368 fprintf (file, "}");
2369 }
2370 }
2371
2372 fprintf (file, "\n");
2373 }
2374
2375
2376 /* Dump points-to information for VAR into stderr. */
2377
2378 void
2379 debug_points_to_info_for (tree var)
2380 {
2381 dump_points_to_info_for (stderr, var);
2382 }
2383
2384
2385 /* Dump points-to information into FILE. NOTE: This function is slow, as
2386 it needs to traverse the whole CFG looking for pointer SSA_NAMEs. */
2387
2388 void
2389 dump_points_to_info (FILE *file)
2390 {
2391 basic_block bb;
2392 block_stmt_iterator si;
2393 size_t i;
2394 const char *fname =
2395 lang_hooks.decl_printable_name (current_function_decl, 2);
2396
2397 fprintf (file, "\n\nPointed-to sets for pointers in %s\n\n", fname);
2398
2399 /* First dump points-to information for the default definitions of
2400 pointer variables. This is necessary because default definitions are
2401 not part of the code. */
2402 for (i = 0; i < num_referenced_vars; i++)
2403 {
2404 tree var = referenced_var (i);
2405 if (POINTER_TYPE_P (TREE_TYPE (var)))
2406 {
2407 var_ann_t ann = var_ann (var);
2408 if (ann->default_def)
2409 dump_points_to_info_for (file, ann->default_def);
2410 }
2411 }
2412
2413 /* Dump points-to information for every pointer defined in the program. */
2414 FOR_EACH_BB (bb)
2415 {
2416 tree phi;
2417
2418 for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
2419 {
2420 tree ptr = PHI_RESULT (phi);
2421 if (POINTER_TYPE_P (TREE_TYPE (ptr)))
2422 dump_points_to_info_for (file, ptr);
2423 }
2424
2425 for (si = bsi_start (bb); !bsi_end_p (si); bsi_next (&si))
2426 {
2427 stmt_ann_t ann = stmt_ann (bsi_stmt (si));
2428 def_optype defs = DEF_OPS (ann);
2429 if (defs)
2430 for (i = 0; i < NUM_DEFS (defs); i++)
2431 if (POINTER_TYPE_P (TREE_TYPE (DEF_OP (defs, i))))
2432 dump_points_to_info_for (file, DEF_OP (defs, i));
2433 }
2434 }
2435
2436 fprintf (file, "\n");
2437 }
2438
2439
2440 /* Dump points-to info pointed by PTO into STDERR. */
2441
2442 void
2443 debug_points_to_info (void)
2444 {
2445 dump_points_to_info (stderr);
2446 }
2447
2448 /* Dump to FILE the list of variables that may be aliasing VAR. */
2449
2450 void
2451 dump_may_aliases_for (FILE *file, tree var)
2452 {
2453 varray_type aliases;
2454
2455 if (TREE_CODE (var) == SSA_NAME)
2456 var = SSA_NAME_VAR (var);
2457
2458 aliases = var_ann (var)->may_aliases;
2459 if (aliases)
2460 {
2461 size_t i;
2462 fprintf (file, "{ ");
2463 for (i = 0; i < VARRAY_ACTIVE_SIZE (aliases); i++)
2464 {
2465 print_generic_expr (file, VARRAY_TREE (aliases, i), dump_flags);
2466 fprintf (file, " ");
2467 }
2468 fprintf (file, "}");
2469 }
2470 }
2471
2472
2473 /* Dump to stderr the list of variables that may be aliasing VAR. */
2474
2475 void
2476 debug_may_aliases_for (tree var)
2477 {
2478 dump_may_aliases_for (stderr, var);
2479 }
2480
2481 /* Return true if VAR may be aliased. */
2482
2483 bool
2484 may_be_aliased (tree var)
2485 {
2486 /* Obviously. */
2487 if (TREE_ADDRESSABLE (var))
2488 return true;
2489
2490 /* Automatic variables can't have their addresses escape any other way. */
2491 if (!TREE_STATIC (var))
2492 return false;
2493
2494 /* Globally visible variables can have their addresses taken by other
2495 translation units. */
2496 if (DECL_EXTERNAL (var) || TREE_PUBLIC (var))
2497 return true;
2498
2499 /* If we're in unit-at-a-time mode, then we must have seen all occurrences
2500 of address-of operators, and so we can trust TREE_ADDRESSABLE. Otherwise
2501 we can only be sure the variable isn't addressable if it's local to the
2502 current function. */
2503 if (flag_unit_at_a_time)
2504 return false;
2505 if (decl_function_context (var) == current_function_decl)
2506 return false;
2507
2508 return true;
2509 }
2510