]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/tree-ssa-structalias.c
Merge with trunk.
[thirdparty/gcc.git] / gcc / tree-ssa-structalias.c
1 /* Tree based points-to analysis
2 Copyright (C) 2005-2013 Free Software Foundation, Inc.
3 Contributed by Daniel Berlin <dberlin@dberlin.org>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3 of the License, or
10 (at your option) any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "ggc.h"
26 #include "obstack.h"
27 #include "bitmap.h"
28 #include "sbitmap.h"
29 #include "flags.h"
30 #include "basic-block.h"
31 #include "tree.h"
32 #include "gimple.h"
33 #include "gimple-ssa.h"
34 #include "cgraph.h"
35 #include "tree-ssanames.h"
36 #include "tree-into-ssa.h"
37 #include "tree-dfa.h"
38 #include "tree-inline.h"
39 #include "diagnostic-core.h"
40 #include "hash-table.h"
41 #include "function.h"
42 #include "tree-pass.h"
43 #include "alloc-pool.h"
44 #include "splay-tree.h"
45 #include "params.h"
46 #include "alias.h"
47 #include "pointer-set.h"
48
49 /* The idea behind this analyzer is to generate set constraints from the
50 program, then solve the resulting constraints in order to generate the
51 points-to sets.
52
53 Set constraints are a way of modeling program analysis problems that
54 involve sets. They consist of an inclusion constraint language,
55 describing the variables (each variable is a set) and operations that
56 are involved on the variables, and a set of rules that derive facts
57 from these operations. To solve a system of set constraints, you derive
58 all possible facts under the rules, which gives you the correct sets
59 as a consequence.
60
61 See "Efficient Field-sensitive pointer analysis for C" by "David
62 J. Pearce and Paul H. J. Kelly and Chris Hankin, at
63 http://citeseer.ist.psu.edu/pearce04efficient.html
64
65 Also see "Ultra-fast Aliasing Analysis using CLA: A Million Lines
66 of C Code in a Second" by ""Nevin Heintze and Olivier Tardieu" at
67 http://citeseer.ist.psu.edu/heintze01ultrafast.html
68
69 There are three types of real constraint expressions, DEREF,
70 ADDRESSOF, and SCALAR. Each constraint expression consists
71 of a constraint type, a variable, and an offset.
72
73 SCALAR is a constraint expression type used to represent x, whether
74 it appears on the LHS or the RHS of a statement.
75 DEREF is a constraint expression type used to represent *x, whether
76 it appears on the LHS or the RHS of a statement.
77 ADDRESSOF is a constraint expression used to represent &x, whether
78 it appears on the LHS or the RHS of a statement.
79
80 Each pointer variable in the program is assigned an integer id, and
81 each field of a structure variable is assigned an integer id as well.
82
83 Structure variables are linked to their list of fields through a "next
84 field" in each variable that points to the next field in offset
85 order.
86 Each variable for a structure field has
87
88 1. "size", that tells the size in bits of that field.
89 2. "fullsize, that tells the size in bits of the entire structure.
90 3. "offset", that tells the offset in bits from the beginning of the
91 structure to this field.
92
93 Thus,
94 struct f
95 {
96 int a;
97 int b;
98 } foo;
99 int *bar;
100
101 looks like
102
103 foo.a -> id 1, size 32, offset 0, fullsize 64, next foo.b
104 foo.b -> id 2, size 32, offset 32, fullsize 64, next NULL
105 bar -> id 3, size 32, offset 0, fullsize 32, next NULL
106
107
108 In order to solve the system of set constraints, the following is
109 done:
110
111 1. Each constraint variable x has a solution set associated with it,
112 Sol(x).
113
114 2. Constraints are separated into direct, copy, and complex.
115 Direct constraints are ADDRESSOF constraints that require no extra
116 processing, such as P = &Q
117 Copy constraints are those of the form P = Q.
118 Complex constraints are all the constraints involving dereferences
119 and offsets (including offsetted copies).
120
121 3. All direct constraints of the form P = &Q are processed, such
122 that Q is added to Sol(P)
123
124 4. All complex constraints for a given constraint variable are stored in a
125 linked list attached to that variable's node.
126
127 5. A directed graph is built out of the copy constraints. Each
128 constraint variable is a node in the graph, and an edge from
129 Q to P is added for each copy constraint of the form P = Q
130
131 6. The graph is then walked, and solution sets are
132 propagated along the copy edges, such that an edge from Q to P
133 causes Sol(P) <- Sol(P) union Sol(Q).
134
135 7. As we visit each node, all complex constraints associated with
136 that node are processed by adding appropriate copy edges to the graph, or the
137 appropriate variables to the solution set.
138
139 8. The process of walking the graph is iterated until no solution
140 sets change.
141
142 Prior to walking the graph in steps 6 and 7, We perform static
143 cycle elimination on the constraint graph, as well
144 as off-line variable substitution.
145
146 TODO: Adding offsets to pointer-to-structures can be handled (IE not punted
147 on and turned into anything), but isn't. You can just see what offset
148 inside the pointed-to struct it's going to access.
149
150 TODO: Constant bounded arrays can be handled as if they were structs of the
151 same number of elements.
152
153 TODO: Modeling heap and incoming pointers becomes much better if we
154 add fields to them as we discover them, which we could do.
155
156 TODO: We could handle unions, but to be honest, it's probably not
157 worth the pain or slowdown. */
158
159 /* IPA-PTA optimizations possible.
160
161 When the indirect function called is ANYTHING we can add disambiguation
162 based on the function signatures (or simply the parameter count which
163 is the varinfo size). We also do not need to consider functions that
164 do not have their address taken.
165
166 The is_global_var bit which marks escape points is overly conservative
167 in IPA mode. Split it to is_escape_point and is_global_var - only
168 externally visible globals are escape points in IPA mode. This is
169 also needed to fix the pt_solution_includes_global predicate
170 (and thus ptr_deref_may_alias_global_p).
171
172 The way we introduce DECL_PT_UID to avoid fixing up all points-to
173 sets in the translation unit when we copy a DECL during inlining
174 pessimizes precision. The advantage is that the DECL_PT_UID keeps
175 compile-time and memory usage overhead low - the points-to sets
176 do not grow or get unshared as they would during a fixup phase.
177 An alternative solution is to delay IPA PTA until after all
178 inlining transformations have been applied.
179
180 The way we propagate clobber/use information isn't optimized.
181 It should use a new complex constraint that properly filters
182 out local variables of the callee (though that would make
183 the sets invalid after inlining). OTOH we might as well
184 admit defeat to WHOPR and simply do all the clobber/use analysis
185 and propagation after PTA finished but before we threw away
186 points-to information for memory variables. WHOPR and PTA
187 do not play along well anyway - the whole constraint solving
188 would need to be done in WPA phase and it will be very interesting
189 to apply the results to local SSA names during LTRANS phase.
190
191 We probably should compute a per-function unit-ESCAPE solution
192 propagating it simply like the clobber / uses solutions. The
193 solution can go alongside the non-IPA espaced solution and be
194 used to query which vars escape the unit through a function.
195
196 We never put function decls in points-to sets so we do not
197 keep the set of called functions for indirect calls.
198
199 And probably more. */
200
201 static bool use_field_sensitive = true;
202 static int in_ipa_mode = 0;
203
204 /* Used for predecessor bitmaps. */
205 static bitmap_obstack predbitmap_obstack;
206
207 /* Used for points-to sets. */
208 static bitmap_obstack pta_obstack;
209
210 /* Used for oldsolution members of variables. */
211 static bitmap_obstack oldpta_obstack;
212
213 /* Used for per-solver-iteration bitmaps. */
214 static bitmap_obstack iteration_obstack;
215
216 static unsigned int create_variable_info_for (tree, const char *);
217 typedef struct constraint_graph *constraint_graph_t;
218 static void unify_nodes (constraint_graph_t, unsigned int, unsigned int, bool);
219
220 struct constraint;
221 typedef struct constraint *constraint_t;
222
223
224 #define EXECUTE_IF_IN_NONNULL_BITMAP(a, b, c, d) \
225 if (a) \
226 EXECUTE_IF_SET_IN_BITMAP (a, b, c, d)
227
228 static struct constraint_stats
229 {
230 unsigned int total_vars;
231 unsigned int nonpointer_vars;
232 unsigned int unified_vars_static;
233 unsigned int unified_vars_dynamic;
234 unsigned int iterations;
235 unsigned int num_edges;
236 unsigned int num_implicit_edges;
237 unsigned int points_to_sets_created;
238 } stats;
239
240 struct variable_info
241 {
242 /* ID of this variable */
243 unsigned int id;
244
245 /* True if this is a variable created by the constraint analysis, such as
246 heap variables and constraints we had to break up. */
247 unsigned int is_artificial_var : 1;
248
249 /* True if this is a special variable whose solution set should not be
250 changed. */
251 unsigned int is_special_var : 1;
252
253 /* True for variables whose size is not known or variable. */
254 unsigned int is_unknown_size_var : 1;
255
256 /* True for (sub-)fields that represent a whole variable. */
257 unsigned int is_full_var : 1;
258
259 /* True if this is a heap variable. */
260 unsigned int is_heap_var : 1;
261
262 /* True if this field may contain pointers. */
263 unsigned int may_have_pointers : 1;
264
265 /* True if this field has only restrict qualified pointers. */
266 unsigned int only_restrict_pointers : 1;
267
268 /* True if this represents a global variable. */
269 unsigned int is_global_var : 1;
270
271 /* True if this represents a IPA function info. */
272 unsigned int is_fn_info : 1;
273
274 /* The ID of the variable for the next field in this structure
275 or zero for the last field in this structure. */
276 unsigned next;
277
278 /* The ID of the variable for the first field in this structure. */
279 unsigned head;
280
281 /* Offset of this variable, in bits, from the base variable */
282 unsigned HOST_WIDE_INT offset;
283
284 /* Size of the variable, in bits. */
285 unsigned HOST_WIDE_INT size;
286
287 /* Full size of the base variable, in bits. */
288 unsigned HOST_WIDE_INT fullsize;
289
290 /* Name of this variable */
291 const char *name;
292
293 /* Tree that this variable is associated with. */
294 tree decl;
295
296 /* Points-to set for this variable. */
297 bitmap solution;
298
299 /* Old points-to set for this variable. */
300 bitmap oldsolution;
301 };
302 typedef struct variable_info *varinfo_t;
303
304 static varinfo_t first_vi_for_offset (varinfo_t, unsigned HOST_WIDE_INT);
305 static varinfo_t first_or_preceding_vi_for_offset (varinfo_t,
306 unsigned HOST_WIDE_INT);
307 static varinfo_t lookup_vi_for_tree (tree);
308 static inline bool type_can_have_subvars (const_tree);
309
310 /* Pool of variable info structures. */
311 static alloc_pool variable_info_pool;
312
313 /* Map varinfo to final pt_solution. */
314 static pointer_map_t *final_solutions;
315 struct obstack final_solutions_obstack;
316
317 /* Table of variable info structures for constraint variables.
318 Indexed directly by variable info id. */
319 static vec<varinfo_t> varmap;
320
321 /* Return the varmap element N */
322
323 static inline varinfo_t
324 get_varinfo (unsigned int n)
325 {
326 return varmap[n];
327 }
328
329 /* Return the next variable in the list of sub-variables of VI
330 or NULL if VI is the last sub-variable. */
331
332 static inline varinfo_t
333 vi_next (varinfo_t vi)
334 {
335 return get_varinfo (vi->next);
336 }
337
338 /* Static IDs for the special variables. Variable ID zero is unused
339 and used as terminator for the sub-variable chain. */
340 enum { nothing_id = 1, anything_id = 2, readonly_id = 3,
341 escaped_id = 4, nonlocal_id = 5,
342 storedanything_id = 6, integer_id = 7 };
343
344 /* Return a new variable info structure consisting for a variable
345 named NAME, and using constraint graph node NODE. Append it
346 to the vector of variable info structures. */
347
348 static varinfo_t
349 new_var_info (tree t, const char *name)
350 {
351 unsigned index = varmap.length ();
352 varinfo_t ret = (varinfo_t) pool_alloc (variable_info_pool);
353
354 ret->id = index;
355 ret->name = name;
356 ret->decl = t;
357 /* Vars without decl are artificial and do not have sub-variables. */
358 ret->is_artificial_var = (t == NULL_TREE);
359 ret->is_special_var = false;
360 ret->is_unknown_size_var = false;
361 ret->is_full_var = (t == NULL_TREE);
362 ret->is_heap_var = false;
363 ret->may_have_pointers = true;
364 ret->only_restrict_pointers = false;
365 ret->is_global_var = (t == NULL_TREE);
366 ret->is_fn_info = false;
367 if (t && DECL_P (t))
368 ret->is_global_var = (is_global_var (t)
369 /* We have to treat even local register variables
370 as escape points. */
371 || (TREE_CODE (t) == VAR_DECL
372 && DECL_HARD_REGISTER (t)));
373 ret->solution = BITMAP_ALLOC (&pta_obstack);
374 ret->oldsolution = NULL;
375 ret->next = 0;
376 ret->head = ret->id;
377
378 stats.total_vars++;
379
380 varmap.safe_push (ret);
381
382 return ret;
383 }
384
385
386 /* A map mapping call statements to per-stmt variables for uses
387 and clobbers specific to the call. */
388 static struct pointer_map_t *call_stmt_vars;
389
390 /* Lookup or create the variable for the call statement CALL. */
391
392 static varinfo_t
393 get_call_vi (gimple call)
394 {
395 void **slot_p;
396 varinfo_t vi, vi2;
397
398 slot_p = pointer_map_insert (call_stmt_vars, call);
399 if (*slot_p)
400 return (varinfo_t) *slot_p;
401
402 vi = new_var_info (NULL_TREE, "CALLUSED");
403 vi->offset = 0;
404 vi->size = 1;
405 vi->fullsize = 2;
406 vi->is_full_var = true;
407
408 vi2 = new_var_info (NULL_TREE, "CALLCLOBBERED");
409 vi2->offset = 1;
410 vi2->size = 1;
411 vi2->fullsize = 2;
412 vi2->is_full_var = true;
413
414 vi->next = vi2->id;
415
416 *slot_p = (void *) vi;
417 return vi;
418 }
419
420 /* Lookup the variable for the call statement CALL representing
421 the uses. Returns NULL if there is nothing special about this call. */
422
423 static varinfo_t
424 lookup_call_use_vi (gimple call)
425 {
426 void **slot_p;
427
428 slot_p = pointer_map_contains (call_stmt_vars, call);
429 if (slot_p)
430 return (varinfo_t) *slot_p;
431
432 return NULL;
433 }
434
435 /* Lookup the variable for the call statement CALL representing
436 the clobbers. Returns NULL if there is nothing special about this call. */
437
438 static varinfo_t
439 lookup_call_clobber_vi (gimple call)
440 {
441 varinfo_t uses = lookup_call_use_vi (call);
442 if (!uses)
443 return NULL;
444
445 return vi_next (uses);
446 }
447
448 /* Lookup or create the variable for the call statement CALL representing
449 the uses. */
450
451 static varinfo_t
452 get_call_use_vi (gimple call)
453 {
454 return get_call_vi (call);
455 }
456
457 /* Lookup or create the variable for the call statement CALL representing
458 the clobbers. */
459
460 static varinfo_t ATTRIBUTE_UNUSED
461 get_call_clobber_vi (gimple call)
462 {
463 return vi_next (get_call_vi (call));
464 }
465
466
467 typedef enum {SCALAR, DEREF, ADDRESSOF} constraint_expr_type;
468
469 /* An expression that appears in a constraint. */
470
471 struct constraint_expr
472 {
473 /* Constraint type. */
474 constraint_expr_type type;
475
476 /* Variable we are referring to in the constraint. */
477 unsigned int var;
478
479 /* Offset, in bits, of this constraint from the beginning of
480 variables it ends up referring to.
481
482 IOW, in a deref constraint, we would deref, get the result set,
483 then add OFFSET to each member. */
484 HOST_WIDE_INT offset;
485 };
486
487 /* Use 0x8000... as special unknown offset. */
488 #define UNKNOWN_OFFSET HOST_WIDE_INT_MIN
489
490 typedef struct constraint_expr ce_s;
491 static void get_constraint_for_1 (tree, vec<ce_s> *, bool, bool);
492 static void get_constraint_for (tree, vec<ce_s> *);
493 static void get_constraint_for_rhs (tree, vec<ce_s> *);
494 static void do_deref (vec<ce_s> *);
495
496 /* Our set constraints are made up of two constraint expressions, one
497 LHS, and one RHS.
498
499 As described in the introduction, our set constraints each represent an
500 operation between set valued variables.
501 */
502 struct constraint
503 {
504 struct constraint_expr lhs;
505 struct constraint_expr rhs;
506 };
507
508 /* List of constraints that we use to build the constraint graph from. */
509
510 static vec<constraint_t> constraints;
511 static alloc_pool constraint_pool;
512
513 /* The constraint graph is represented as an array of bitmaps
514 containing successor nodes. */
515
516 struct constraint_graph
517 {
518 /* Size of this graph, which may be different than the number of
519 nodes in the variable map. */
520 unsigned int size;
521
522 /* Explicit successors of each node. */
523 bitmap *succs;
524
525 /* Implicit predecessors of each node (Used for variable
526 substitution). */
527 bitmap *implicit_preds;
528
529 /* Explicit predecessors of each node (Used for variable substitution). */
530 bitmap *preds;
531
532 /* Indirect cycle representatives, or -1 if the node has no indirect
533 cycles. */
534 int *indirect_cycles;
535
536 /* Representative node for a node. rep[a] == a unless the node has
537 been unified. */
538 unsigned int *rep;
539
540 /* Equivalence class representative for a label. This is used for
541 variable substitution. */
542 int *eq_rep;
543
544 /* Pointer equivalence label for a node. All nodes with the same
545 pointer equivalence label can be unified together at some point
546 (either during constraint optimization or after the constraint
547 graph is built). */
548 unsigned int *pe;
549
550 /* Pointer equivalence representative for a label. This is used to
551 handle nodes that are pointer equivalent but not location
552 equivalent. We can unite these once the addressof constraints
553 are transformed into initial points-to sets. */
554 int *pe_rep;
555
556 /* Pointer equivalence label for each node, used during variable
557 substitution. */
558 unsigned int *pointer_label;
559
560 /* Location equivalence label for each node, used during location
561 equivalence finding. */
562 unsigned int *loc_label;
563
564 /* Pointed-by set for each node, used during location equivalence
565 finding. This is pointed-by rather than pointed-to, because it
566 is constructed using the predecessor graph. */
567 bitmap *pointed_by;
568
569 /* Points to sets for pointer equivalence. This is *not* the actual
570 points-to sets for nodes. */
571 bitmap *points_to;
572
573 /* Bitmap of nodes where the bit is set if the node is a direct
574 node. Used for variable substitution. */
575 sbitmap direct_nodes;
576
577 /* Bitmap of nodes where the bit is set if the node is address
578 taken. Used for variable substitution. */
579 bitmap address_taken;
580
581 /* Vector of complex constraints for each graph node. Complex
582 constraints are those involving dereferences or offsets that are
583 not 0. */
584 vec<constraint_t> *complex;
585 };
586
587 static constraint_graph_t graph;
588
589 /* During variable substitution and the offline version of indirect
590 cycle finding, we create nodes to represent dereferences and
591 address taken constraints. These represent where these start and
592 end. */
593 #define FIRST_REF_NODE (varmap).length ()
594 #define LAST_REF_NODE (FIRST_REF_NODE + (FIRST_REF_NODE - 1))
595
596 /* Return the representative node for NODE, if NODE has been unioned
597 with another NODE.
598 This function performs path compression along the way to finding
599 the representative. */
600
601 static unsigned int
602 find (unsigned int node)
603 {
604 gcc_checking_assert (node < graph->size);
605 if (graph->rep[node] != node)
606 return graph->rep[node] = find (graph->rep[node]);
607 return node;
608 }
609
610 /* Union the TO and FROM nodes to the TO nodes.
611 Note that at some point in the future, we may want to do
612 union-by-rank, in which case we are going to have to return the
613 node we unified to. */
614
615 static bool
616 unite (unsigned int to, unsigned int from)
617 {
618 gcc_checking_assert (to < graph->size && from < graph->size);
619 if (to != from && graph->rep[from] != to)
620 {
621 graph->rep[from] = to;
622 return true;
623 }
624 return false;
625 }
626
627 /* Create a new constraint consisting of LHS and RHS expressions. */
628
629 static constraint_t
630 new_constraint (const struct constraint_expr lhs,
631 const struct constraint_expr rhs)
632 {
633 constraint_t ret = (constraint_t) pool_alloc (constraint_pool);
634 ret->lhs = lhs;
635 ret->rhs = rhs;
636 return ret;
637 }
638
639 /* Print out constraint C to FILE. */
640
641 static void
642 dump_constraint (FILE *file, constraint_t c)
643 {
644 if (c->lhs.type == ADDRESSOF)
645 fprintf (file, "&");
646 else if (c->lhs.type == DEREF)
647 fprintf (file, "*");
648 fprintf (file, "%s", get_varinfo (c->lhs.var)->name);
649 if (c->lhs.offset == UNKNOWN_OFFSET)
650 fprintf (file, " + UNKNOWN");
651 else if (c->lhs.offset != 0)
652 fprintf (file, " + " HOST_WIDE_INT_PRINT_DEC, c->lhs.offset);
653 fprintf (file, " = ");
654 if (c->rhs.type == ADDRESSOF)
655 fprintf (file, "&");
656 else if (c->rhs.type == DEREF)
657 fprintf (file, "*");
658 fprintf (file, "%s", get_varinfo (c->rhs.var)->name);
659 if (c->rhs.offset == UNKNOWN_OFFSET)
660 fprintf (file, " + UNKNOWN");
661 else if (c->rhs.offset != 0)
662 fprintf (file, " + " HOST_WIDE_INT_PRINT_DEC, c->rhs.offset);
663 }
664
665
666 void debug_constraint (constraint_t);
667 void debug_constraints (void);
668 void debug_constraint_graph (void);
669 void debug_solution_for_var (unsigned int);
670 void debug_sa_points_to_info (void);
671
672 /* Print out constraint C to stderr. */
673
674 DEBUG_FUNCTION void
675 debug_constraint (constraint_t c)
676 {
677 dump_constraint (stderr, c);
678 fprintf (stderr, "\n");
679 }
680
681 /* Print out all constraints to FILE */
682
683 static void
684 dump_constraints (FILE *file, int from)
685 {
686 int i;
687 constraint_t c;
688 for (i = from; constraints.iterate (i, &c); i++)
689 if (c)
690 {
691 dump_constraint (file, c);
692 fprintf (file, "\n");
693 }
694 }
695
696 /* Print out all constraints to stderr. */
697
698 DEBUG_FUNCTION void
699 debug_constraints (void)
700 {
701 dump_constraints (stderr, 0);
702 }
703
704 /* Print the constraint graph in dot format. */
705
706 static void
707 dump_constraint_graph (FILE *file)
708 {
709 unsigned int i;
710
711 /* Only print the graph if it has already been initialized: */
712 if (!graph)
713 return;
714
715 /* Prints the header of the dot file: */
716 fprintf (file, "strict digraph {\n");
717 fprintf (file, " node [\n shape = box\n ]\n");
718 fprintf (file, " edge [\n fontsize = \"12\"\n ]\n");
719 fprintf (file, "\n // List of nodes and complex constraints in "
720 "the constraint graph:\n");
721
722 /* The next lines print the nodes in the graph together with the
723 complex constraints attached to them. */
724 for (i = 1; i < graph->size; i++)
725 {
726 if (i == FIRST_REF_NODE)
727 continue;
728 if (find (i) != i)
729 continue;
730 if (i < FIRST_REF_NODE)
731 fprintf (file, "\"%s\"", get_varinfo (i)->name);
732 else
733 fprintf (file, "\"*%s\"", get_varinfo (i - FIRST_REF_NODE)->name);
734 if (graph->complex[i].exists ())
735 {
736 unsigned j;
737 constraint_t c;
738 fprintf (file, " [label=\"\\N\\n");
739 for (j = 0; graph->complex[i].iterate (j, &c); ++j)
740 {
741 dump_constraint (file, c);
742 fprintf (file, "\\l");
743 }
744 fprintf (file, "\"]");
745 }
746 fprintf (file, ";\n");
747 }
748
749 /* Go over the edges. */
750 fprintf (file, "\n // Edges in the constraint graph:\n");
751 for (i = 1; i < graph->size; i++)
752 {
753 unsigned j;
754 bitmap_iterator bi;
755 if (find (i) != i)
756 continue;
757 EXECUTE_IF_IN_NONNULL_BITMAP (graph->succs[i], 0, j, bi)
758 {
759 unsigned to = find (j);
760 if (i == to)
761 continue;
762 if (i < FIRST_REF_NODE)
763 fprintf (file, "\"%s\"", get_varinfo (i)->name);
764 else
765 fprintf (file, "\"*%s\"", get_varinfo (i - FIRST_REF_NODE)->name);
766 fprintf (file, " -> ");
767 if (to < FIRST_REF_NODE)
768 fprintf (file, "\"%s\"", get_varinfo (to)->name);
769 else
770 fprintf (file, "\"*%s\"", get_varinfo (to - FIRST_REF_NODE)->name);
771 fprintf (file, ";\n");
772 }
773 }
774
775 /* Prints the tail of the dot file. */
776 fprintf (file, "}\n");
777 }
778
779 /* Print out the constraint graph to stderr. */
780
781 DEBUG_FUNCTION void
782 debug_constraint_graph (void)
783 {
784 dump_constraint_graph (stderr);
785 }
786
787 /* SOLVER FUNCTIONS
788
789 The solver is a simple worklist solver, that works on the following
790 algorithm:
791
792 sbitmap changed_nodes = all zeroes;
793 changed_count = 0;
794 For each node that is not already collapsed:
795 changed_count++;
796 set bit in changed nodes
797
798 while (changed_count > 0)
799 {
800 compute topological ordering for constraint graph
801
802 find and collapse cycles in the constraint graph (updating
803 changed if necessary)
804
805 for each node (n) in the graph in topological order:
806 changed_count--;
807
808 Process each complex constraint associated with the node,
809 updating changed if necessary.
810
811 For each outgoing edge from n, propagate the solution from n to
812 the destination of the edge, updating changed as necessary.
813
814 } */
815
816 /* Return true if two constraint expressions A and B are equal. */
817
818 static bool
819 constraint_expr_equal (struct constraint_expr a, struct constraint_expr b)
820 {
821 return a.type == b.type && a.var == b.var && a.offset == b.offset;
822 }
823
824 /* Return true if constraint expression A is less than constraint expression
825 B. This is just arbitrary, but consistent, in order to give them an
826 ordering. */
827
828 static bool
829 constraint_expr_less (struct constraint_expr a, struct constraint_expr b)
830 {
831 if (a.type == b.type)
832 {
833 if (a.var == b.var)
834 return a.offset < b.offset;
835 else
836 return a.var < b.var;
837 }
838 else
839 return a.type < b.type;
840 }
841
842 /* Return true if constraint A is less than constraint B. This is just
843 arbitrary, but consistent, in order to give them an ordering. */
844
845 static bool
846 constraint_less (const constraint_t &a, const constraint_t &b)
847 {
848 if (constraint_expr_less (a->lhs, b->lhs))
849 return true;
850 else if (constraint_expr_less (b->lhs, a->lhs))
851 return false;
852 else
853 return constraint_expr_less (a->rhs, b->rhs);
854 }
855
856 /* Return true if two constraints A and B are equal. */
857
858 static bool
859 constraint_equal (struct constraint a, struct constraint b)
860 {
861 return constraint_expr_equal (a.lhs, b.lhs)
862 && constraint_expr_equal (a.rhs, b.rhs);
863 }
864
865
866 /* Find a constraint LOOKFOR in the sorted constraint vector VEC */
867
868 static constraint_t
869 constraint_vec_find (vec<constraint_t> vec,
870 struct constraint lookfor)
871 {
872 unsigned int place;
873 constraint_t found;
874
875 if (!vec.exists ())
876 return NULL;
877
878 place = vec.lower_bound (&lookfor, constraint_less);
879 if (place >= vec.length ())
880 return NULL;
881 found = vec[place];
882 if (!constraint_equal (*found, lookfor))
883 return NULL;
884 return found;
885 }
886
887 /* Union two constraint vectors, TO and FROM. Put the result in TO. */
888
889 static void
890 constraint_set_union (vec<constraint_t> *to,
891 vec<constraint_t> *from)
892 {
893 int i;
894 constraint_t c;
895
896 FOR_EACH_VEC_ELT (*from, i, c)
897 {
898 if (constraint_vec_find (*to, *c) == NULL)
899 {
900 unsigned int place = to->lower_bound (c, constraint_less);
901 to->safe_insert (place, c);
902 }
903 }
904 }
905
906 /* Expands the solution in SET to all sub-fields of variables included. */
907
908 static void
909 solution_set_expand (bitmap set)
910 {
911 bitmap_iterator bi;
912 unsigned j;
913
914 /* In a first pass expand to the head of the variables we need to
915 add all sub-fields off. This avoids quadratic behavior. */
916 EXECUTE_IF_SET_IN_BITMAP (set, 0, j, bi)
917 {
918 varinfo_t v = get_varinfo (j);
919 if (v->is_artificial_var
920 || v->is_full_var)
921 continue;
922 bitmap_set_bit (set, v->head);
923 }
924
925 /* In the second pass now expand all head variables with subfields. */
926 EXECUTE_IF_SET_IN_BITMAP (set, 0, j, bi)
927 {
928 varinfo_t v = get_varinfo (j);
929 if (v->is_artificial_var
930 || v->is_full_var
931 || v->head != j)
932 continue;
933 for (v = vi_next (v); v != NULL; v = vi_next (v))
934 bitmap_set_bit (set, v->id);
935 }
936 }
937
938 /* Union solution sets TO and FROM, and add INC to each member of FROM in the
939 process. */
940
941 static bool
942 set_union_with_increment (bitmap to, bitmap from, HOST_WIDE_INT inc)
943 {
944 bool changed = false;
945 bitmap_iterator bi;
946 unsigned int i;
947
948 /* If the solution of FROM contains anything it is good enough to transfer
949 this to TO. */
950 if (bitmap_bit_p (from, anything_id))
951 return bitmap_set_bit (to, anything_id);
952
953 /* For zero offset simply union the solution into the destination. */
954 if (inc == 0)
955 return bitmap_ior_into (to, from);
956
957 /* If the offset is unknown we have to expand the solution to
958 all subfields. */
959 if (inc == UNKNOWN_OFFSET)
960 {
961 bitmap tmp = BITMAP_ALLOC (&iteration_obstack);
962 bitmap_copy (tmp, from);
963 solution_set_expand (tmp);
964 changed |= bitmap_ior_into (to, tmp);
965 BITMAP_FREE (tmp);
966 return changed;
967 }
968
969 /* For non-zero offset union the offsetted solution into the destination. */
970 EXECUTE_IF_SET_IN_BITMAP (from, 0, i, bi)
971 {
972 varinfo_t vi = get_varinfo (i);
973
974 /* If this is a variable with just one field just set its bit
975 in the result. */
976 if (vi->is_artificial_var
977 || vi->is_unknown_size_var
978 || vi->is_full_var)
979 changed |= bitmap_set_bit (to, i);
980 else
981 {
982 unsigned HOST_WIDE_INT fieldoffset = vi->offset + inc;
983
984 /* If the offset makes the pointer point to before the
985 variable use offset zero for the field lookup. */
986 if (inc < 0
987 && fieldoffset > vi->offset)
988 fieldoffset = 0;
989
990 vi = first_or_preceding_vi_for_offset (vi, fieldoffset);
991
992 changed |= bitmap_set_bit (to, vi->id);
993 /* If the result is not exactly at fieldoffset include the next
994 field as well. See get_constraint_for_ptr_offset for more
995 rationale. */
996 if (vi->offset != fieldoffset
997 && vi->next != 0)
998 changed |= bitmap_set_bit (to, vi->next);
999 }
1000 }
1001
1002 return changed;
1003 }
1004
1005 /* Insert constraint C into the list of complex constraints for graph
1006 node VAR. */
1007
1008 static void
1009 insert_into_complex (constraint_graph_t graph,
1010 unsigned int var, constraint_t c)
1011 {
1012 vec<constraint_t> complex = graph->complex[var];
1013 unsigned int place = complex.lower_bound (c, constraint_less);
1014
1015 /* Only insert constraints that do not already exist. */
1016 if (place >= complex.length ()
1017 || !constraint_equal (*c, *complex[place]))
1018 graph->complex[var].safe_insert (place, c);
1019 }
1020
1021
1022 /* Condense two variable nodes into a single variable node, by moving
1023 all associated info from SRC to TO. */
1024
1025 static void
1026 merge_node_constraints (constraint_graph_t graph, unsigned int to,
1027 unsigned int from)
1028 {
1029 unsigned int i;
1030 constraint_t c;
1031
1032 gcc_checking_assert (find (from) == to);
1033
1034 /* Move all complex constraints from src node into to node */
1035 FOR_EACH_VEC_ELT (graph->complex[from], i, c)
1036 {
1037 /* In complex constraints for node src, we may have either
1038 a = *src, and *src = a, or an offseted constraint which are
1039 always added to the rhs node's constraints. */
1040
1041 if (c->rhs.type == DEREF)
1042 c->rhs.var = to;
1043 else if (c->lhs.type == DEREF)
1044 c->lhs.var = to;
1045 else
1046 c->rhs.var = to;
1047 }
1048 constraint_set_union (&graph->complex[to], &graph->complex[from]);
1049 graph->complex[from].release ();
1050 }
1051
1052
1053 /* Remove edges involving NODE from GRAPH. */
1054
1055 static void
1056 clear_edges_for_node (constraint_graph_t graph, unsigned int node)
1057 {
1058 if (graph->succs[node])
1059 BITMAP_FREE (graph->succs[node]);
1060 }
1061
1062 /* Merge GRAPH nodes FROM and TO into node TO. */
1063
1064 static void
1065 merge_graph_nodes (constraint_graph_t graph, unsigned int to,
1066 unsigned int from)
1067 {
1068 if (graph->indirect_cycles[from] != -1)
1069 {
1070 /* If we have indirect cycles with the from node, and we have
1071 none on the to node, the to node has indirect cycles from the
1072 from node now that they are unified.
1073 If indirect cycles exist on both, unify the nodes that they
1074 are in a cycle with, since we know they are in a cycle with
1075 each other. */
1076 if (graph->indirect_cycles[to] == -1)
1077 graph->indirect_cycles[to] = graph->indirect_cycles[from];
1078 }
1079
1080 /* Merge all the successor edges. */
1081 if (graph->succs[from])
1082 {
1083 if (!graph->succs[to])
1084 graph->succs[to] = BITMAP_ALLOC (&pta_obstack);
1085 bitmap_ior_into (graph->succs[to],
1086 graph->succs[from]);
1087 }
1088
1089 clear_edges_for_node (graph, from);
1090 }
1091
1092
1093 /* Add an indirect graph edge to GRAPH, going from TO to FROM if
1094 it doesn't exist in the graph already. */
1095
1096 static void
1097 add_implicit_graph_edge (constraint_graph_t graph, unsigned int to,
1098 unsigned int from)
1099 {
1100 if (to == from)
1101 return;
1102
1103 if (!graph->implicit_preds[to])
1104 graph->implicit_preds[to] = BITMAP_ALLOC (&predbitmap_obstack);
1105
1106 if (bitmap_set_bit (graph->implicit_preds[to], from))
1107 stats.num_implicit_edges++;
1108 }
1109
1110 /* Add a predecessor graph edge to GRAPH, going from TO to FROM if
1111 it doesn't exist in the graph already.
1112 Return false if the edge already existed, true otherwise. */
1113
1114 static void
1115 add_pred_graph_edge (constraint_graph_t graph, unsigned int to,
1116 unsigned int from)
1117 {
1118 if (!graph->preds[to])
1119 graph->preds[to] = BITMAP_ALLOC (&predbitmap_obstack);
1120 bitmap_set_bit (graph->preds[to], from);
1121 }
1122
1123 /* Add a graph edge to GRAPH, going from FROM to TO if
1124 it doesn't exist in the graph already.
1125 Return false if the edge already existed, true otherwise. */
1126
1127 static bool
1128 add_graph_edge (constraint_graph_t graph, unsigned int to,
1129 unsigned int from)
1130 {
1131 if (to == from)
1132 {
1133 return false;
1134 }
1135 else
1136 {
1137 bool r = false;
1138
1139 if (!graph->succs[from])
1140 graph->succs[from] = BITMAP_ALLOC (&pta_obstack);
1141 if (bitmap_set_bit (graph->succs[from], to))
1142 {
1143 r = true;
1144 if (to < FIRST_REF_NODE && from < FIRST_REF_NODE)
1145 stats.num_edges++;
1146 }
1147 return r;
1148 }
1149 }
1150
1151
1152 /* Initialize the constraint graph structure to contain SIZE nodes. */
1153
1154 static void
1155 init_graph (unsigned int size)
1156 {
1157 unsigned int j;
1158
1159 graph = XCNEW (struct constraint_graph);
1160 graph->size = size;
1161 graph->succs = XCNEWVEC (bitmap, graph->size);
1162 graph->indirect_cycles = XNEWVEC (int, graph->size);
1163 graph->rep = XNEWVEC (unsigned int, graph->size);
1164 /* ??? Macros do not support template types with multiple arguments,
1165 so we use a typedef to work around it. */
1166 typedef vec<constraint_t> vec_constraint_t_heap;
1167 graph->complex = XCNEWVEC (vec_constraint_t_heap, size);
1168 graph->pe = XCNEWVEC (unsigned int, graph->size);
1169 graph->pe_rep = XNEWVEC (int, graph->size);
1170
1171 for (j = 0; j < graph->size; j++)
1172 {
1173 graph->rep[j] = j;
1174 graph->pe_rep[j] = -1;
1175 graph->indirect_cycles[j] = -1;
1176 }
1177 }
1178
1179 /* Build the constraint graph, adding only predecessor edges right now. */
1180
1181 static void
1182 build_pred_graph (void)
1183 {
1184 int i;
1185 constraint_t c;
1186 unsigned int j;
1187
1188 graph->implicit_preds = XCNEWVEC (bitmap, graph->size);
1189 graph->preds = XCNEWVEC (bitmap, graph->size);
1190 graph->pointer_label = XCNEWVEC (unsigned int, graph->size);
1191 graph->loc_label = XCNEWVEC (unsigned int, graph->size);
1192 graph->pointed_by = XCNEWVEC (bitmap, graph->size);
1193 graph->points_to = XCNEWVEC (bitmap, graph->size);
1194 graph->eq_rep = XNEWVEC (int, graph->size);
1195 graph->direct_nodes = sbitmap_alloc (graph->size);
1196 graph->address_taken = BITMAP_ALLOC (&predbitmap_obstack);
1197 bitmap_clear (graph->direct_nodes);
1198
1199 for (j = 1; j < FIRST_REF_NODE; j++)
1200 {
1201 if (!get_varinfo (j)->is_special_var)
1202 bitmap_set_bit (graph->direct_nodes, j);
1203 }
1204
1205 for (j = 0; j < graph->size; j++)
1206 graph->eq_rep[j] = -1;
1207
1208 for (j = 0; j < varmap.length (); j++)
1209 graph->indirect_cycles[j] = -1;
1210
1211 FOR_EACH_VEC_ELT (constraints, i, c)
1212 {
1213 struct constraint_expr lhs = c->lhs;
1214 struct constraint_expr rhs = c->rhs;
1215 unsigned int lhsvar = lhs.var;
1216 unsigned int rhsvar = rhs.var;
1217
1218 if (lhs.type == DEREF)
1219 {
1220 /* *x = y. */
1221 if (rhs.offset == 0 && lhs.offset == 0 && rhs.type == SCALAR)
1222 add_pred_graph_edge (graph, FIRST_REF_NODE + lhsvar, rhsvar);
1223 }
1224 else if (rhs.type == DEREF)
1225 {
1226 /* x = *y */
1227 if (rhs.offset == 0 && lhs.offset == 0 && lhs.type == SCALAR)
1228 add_pred_graph_edge (graph, lhsvar, FIRST_REF_NODE + rhsvar);
1229 else
1230 bitmap_clear_bit (graph->direct_nodes, lhsvar);
1231 }
1232 else if (rhs.type == ADDRESSOF)
1233 {
1234 varinfo_t v;
1235
1236 /* x = &y */
1237 if (graph->points_to[lhsvar] == NULL)
1238 graph->points_to[lhsvar] = BITMAP_ALLOC (&predbitmap_obstack);
1239 bitmap_set_bit (graph->points_to[lhsvar], rhsvar);
1240
1241 if (graph->pointed_by[rhsvar] == NULL)
1242 graph->pointed_by[rhsvar] = BITMAP_ALLOC (&predbitmap_obstack);
1243 bitmap_set_bit (graph->pointed_by[rhsvar], lhsvar);
1244
1245 /* Implicitly, *x = y */
1246 add_implicit_graph_edge (graph, FIRST_REF_NODE + lhsvar, rhsvar);
1247
1248 /* All related variables are no longer direct nodes. */
1249 bitmap_clear_bit (graph->direct_nodes, rhsvar);
1250 v = get_varinfo (rhsvar);
1251 if (!v->is_full_var)
1252 {
1253 v = get_varinfo (v->head);
1254 do
1255 {
1256 bitmap_clear_bit (graph->direct_nodes, v->id);
1257 v = vi_next (v);
1258 }
1259 while (v != NULL);
1260 }
1261 bitmap_set_bit (graph->address_taken, rhsvar);
1262 }
1263 else if (lhsvar > anything_id
1264 && lhsvar != rhsvar && lhs.offset == 0 && rhs.offset == 0)
1265 {
1266 /* x = y */
1267 add_pred_graph_edge (graph, lhsvar, rhsvar);
1268 /* Implicitly, *x = *y */
1269 add_implicit_graph_edge (graph, FIRST_REF_NODE + lhsvar,
1270 FIRST_REF_NODE + rhsvar);
1271 }
1272 else if (lhs.offset != 0 || rhs.offset != 0)
1273 {
1274 if (rhs.offset != 0)
1275 bitmap_clear_bit (graph->direct_nodes, lhs.var);
1276 else if (lhs.offset != 0)
1277 bitmap_clear_bit (graph->direct_nodes, rhs.var);
1278 }
1279 }
1280 }
1281
1282 /* Build the constraint graph, adding successor edges. */
1283
1284 static void
1285 build_succ_graph (void)
1286 {
1287 unsigned i, t;
1288 constraint_t c;
1289
1290 FOR_EACH_VEC_ELT (constraints, i, c)
1291 {
1292 struct constraint_expr lhs;
1293 struct constraint_expr rhs;
1294 unsigned int lhsvar;
1295 unsigned int rhsvar;
1296
1297 if (!c)
1298 continue;
1299
1300 lhs = c->lhs;
1301 rhs = c->rhs;
1302 lhsvar = find (lhs.var);
1303 rhsvar = find (rhs.var);
1304
1305 if (lhs.type == DEREF)
1306 {
1307 if (rhs.offset == 0 && lhs.offset == 0 && rhs.type == SCALAR)
1308 add_graph_edge (graph, FIRST_REF_NODE + lhsvar, rhsvar);
1309 }
1310 else if (rhs.type == DEREF)
1311 {
1312 if (rhs.offset == 0 && lhs.offset == 0 && lhs.type == SCALAR)
1313 add_graph_edge (graph, lhsvar, FIRST_REF_NODE + rhsvar);
1314 }
1315 else if (rhs.type == ADDRESSOF)
1316 {
1317 /* x = &y */
1318 gcc_checking_assert (find (rhs.var) == rhs.var);
1319 bitmap_set_bit (get_varinfo (lhsvar)->solution, rhsvar);
1320 }
1321 else if (lhsvar > anything_id
1322 && lhsvar != rhsvar && lhs.offset == 0 && rhs.offset == 0)
1323 {
1324 add_graph_edge (graph, lhsvar, rhsvar);
1325 }
1326 }
1327
1328 /* Add edges from STOREDANYTHING to all non-direct nodes that can
1329 receive pointers. */
1330 t = find (storedanything_id);
1331 for (i = integer_id + 1; i < FIRST_REF_NODE; ++i)
1332 {
1333 if (!bitmap_bit_p (graph->direct_nodes, i)
1334 && get_varinfo (i)->may_have_pointers)
1335 add_graph_edge (graph, find (i), t);
1336 }
1337
1338 /* Everything stored to ANYTHING also potentially escapes. */
1339 add_graph_edge (graph, find (escaped_id), t);
1340 }
1341
1342
1343 /* Changed variables on the last iteration. */
1344 static bitmap changed;
1345
1346 /* Strongly Connected Component visitation info. */
1347
1348 struct scc_info
1349 {
1350 sbitmap visited;
1351 sbitmap deleted;
1352 unsigned int *dfs;
1353 unsigned int *node_mapping;
1354 int current_index;
1355 vec<unsigned> scc_stack;
1356 };
1357
1358
1359 /* Recursive routine to find strongly connected components in GRAPH.
1360 SI is the SCC info to store the information in, and N is the id of current
1361 graph node we are processing.
1362
1363 This is Tarjan's strongly connected component finding algorithm, as
1364 modified by Nuutila to keep only non-root nodes on the stack.
1365 The algorithm can be found in "On finding the strongly connected
1366 connected components in a directed graph" by Esko Nuutila and Eljas
1367 Soisalon-Soininen, in Information Processing Letters volume 49,
1368 number 1, pages 9-14. */
1369
1370 static void
1371 scc_visit (constraint_graph_t graph, struct scc_info *si, unsigned int n)
1372 {
1373 unsigned int i;
1374 bitmap_iterator bi;
1375 unsigned int my_dfs;
1376
1377 bitmap_set_bit (si->visited, n);
1378 si->dfs[n] = si->current_index ++;
1379 my_dfs = si->dfs[n];
1380
1381 /* Visit all the successors. */
1382 EXECUTE_IF_IN_NONNULL_BITMAP (graph->succs[n], 0, i, bi)
1383 {
1384 unsigned int w;
1385
1386 if (i > LAST_REF_NODE)
1387 break;
1388
1389 w = find (i);
1390 if (bitmap_bit_p (si->deleted, w))
1391 continue;
1392
1393 if (!bitmap_bit_p (si->visited, w))
1394 scc_visit (graph, si, w);
1395
1396 unsigned int t = find (w);
1397 gcc_checking_assert (find (n) == n);
1398 if (si->dfs[t] < si->dfs[n])
1399 si->dfs[n] = si->dfs[t];
1400 }
1401
1402 /* See if any components have been identified. */
1403 if (si->dfs[n] == my_dfs)
1404 {
1405 if (si->scc_stack.length () > 0
1406 && si->dfs[si->scc_stack.last ()] >= my_dfs)
1407 {
1408 bitmap scc = BITMAP_ALLOC (NULL);
1409 unsigned int lowest_node;
1410 bitmap_iterator bi;
1411
1412 bitmap_set_bit (scc, n);
1413
1414 while (si->scc_stack.length () != 0
1415 && si->dfs[si->scc_stack.last ()] >= my_dfs)
1416 {
1417 unsigned int w = si->scc_stack.pop ();
1418
1419 bitmap_set_bit (scc, w);
1420 }
1421
1422 lowest_node = bitmap_first_set_bit (scc);
1423 gcc_assert (lowest_node < FIRST_REF_NODE);
1424
1425 /* Collapse the SCC nodes into a single node, and mark the
1426 indirect cycles. */
1427 EXECUTE_IF_SET_IN_BITMAP (scc, 0, i, bi)
1428 {
1429 if (i < FIRST_REF_NODE)
1430 {
1431 if (unite (lowest_node, i))
1432 unify_nodes (graph, lowest_node, i, false);
1433 }
1434 else
1435 {
1436 unite (lowest_node, i);
1437 graph->indirect_cycles[i - FIRST_REF_NODE] = lowest_node;
1438 }
1439 }
1440 }
1441 bitmap_set_bit (si->deleted, n);
1442 }
1443 else
1444 si->scc_stack.safe_push (n);
1445 }
1446
1447 /* Unify node FROM into node TO, updating the changed count if
1448 necessary when UPDATE_CHANGED is true. */
1449
1450 static void
1451 unify_nodes (constraint_graph_t graph, unsigned int to, unsigned int from,
1452 bool update_changed)
1453 {
1454 gcc_checking_assert (to != from && find (to) == to);
1455
1456 if (dump_file && (dump_flags & TDF_DETAILS))
1457 fprintf (dump_file, "Unifying %s to %s\n",
1458 get_varinfo (from)->name,
1459 get_varinfo (to)->name);
1460
1461 if (update_changed)
1462 stats.unified_vars_dynamic++;
1463 else
1464 stats.unified_vars_static++;
1465
1466 merge_graph_nodes (graph, to, from);
1467 merge_node_constraints (graph, to, from);
1468
1469 /* Mark TO as changed if FROM was changed. If TO was already marked
1470 as changed, decrease the changed count. */
1471
1472 if (update_changed
1473 && bitmap_clear_bit (changed, from))
1474 bitmap_set_bit (changed, to);
1475 varinfo_t fromvi = get_varinfo (from);
1476 if (fromvi->solution)
1477 {
1478 /* If the solution changes because of the merging, we need to mark
1479 the variable as changed. */
1480 varinfo_t tovi = get_varinfo (to);
1481 if (bitmap_ior_into (tovi->solution, fromvi->solution))
1482 {
1483 if (update_changed)
1484 bitmap_set_bit (changed, to);
1485 }
1486
1487 BITMAP_FREE (fromvi->solution);
1488 if (fromvi->oldsolution)
1489 BITMAP_FREE (fromvi->oldsolution);
1490
1491 if (stats.iterations > 0
1492 && tovi->oldsolution)
1493 BITMAP_FREE (tovi->oldsolution);
1494 }
1495 if (graph->succs[to])
1496 bitmap_clear_bit (graph->succs[to], to);
1497 }
1498
1499 /* Information needed to compute the topological ordering of a graph. */
1500
1501 struct topo_info
1502 {
1503 /* sbitmap of visited nodes. */
1504 sbitmap visited;
1505 /* Array that stores the topological order of the graph, *in
1506 reverse*. */
1507 vec<unsigned> topo_order;
1508 };
1509
1510
1511 /* Initialize and return a topological info structure. */
1512
1513 static struct topo_info *
1514 init_topo_info (void)
1515 {
1516 size_t size = graph->size;
1517 struct topo_info *ti = XNEW (struct topo_info);
1518 ti->visited = sbitmap_alloc (size);
1519 bitmap_clear (ti->visited);
1520 ti->topo_order.create (1);
1521 return ti;
1522 }
1523
1524
1525 /* Free the topological sort info pointed to by TI. */
1526
1527 static void
1528 free_topo_info (struct topo_info *ti)
1529 {
1530 sbitmap_free (ti->visited);
1531 ti->topo_order.release ();
1532 free (ti);
1533 }
1534
1535 /* Visit the graph in topological order, and store the order in the
1536 topo_info structure. */
1537
1538 static void
1539 topo_visit (constraint_graph_t graph, struct topo_info *ti,
1540 unsigned int n)
1541 {
1542 bitmap_iterator bi;
1543 unsigned int j;
1544
1545 bitmap_set_bit (ti->visited, n);
1546
1547 if (graph->succs[n])
1548 EXECUTE_IF_SET_IN_BITMAP (graph->succs[n], 0, j, bi)
1549 {
1550 if (!bitmap_bit_p (ti->visited, j))
1551 topo_visit (graph, ti, j);
1552 }
1553
1554 ti->topo_order.safe_push (n);
1555 }
1556
1557 /* Process a constraint C that represents x = *(y + off), using DELTA as the
1558 starting solution for y. */
1559
1560 static void
1561 do_sd_constraint (constraint_graph_t graph, constraint_t c,
1562 bitmap delta)
1563 {
1564 unsigned int lhs = c->lhs.var;
1565 bool flag = false;
1566 bitmap sol = get_varinfo (lhs)->solution;
1567 unsigned int j;
1568 bitmap_iterator bi;
1569 HOST_WIDE_INT roffset = c->rhs.offset;
1570
1571 /* Our IL does not allow this. */
1572 gcc_checking_assert (c->lhs.offset == 0);
1573
1574 /* If the solution of Y contains anything it is good enough to transfer
1575 this to the LHS. */
1576 if (bitmap_bit_p (delta, anything_id))
1577 {
1578 flag |= bitmap_set_bit (sol, anything_id);
1579 goto done;
1580 }
1581
1582 /* If we do not know at with offset the rhs is dereferenced compute
1583 the reachability set of DELTA, conservatively assuming it is
1584 dereferenced at all valid offsets. */
1585 if (roffset == UNKNOWN_OFFSET)
1586 {
1587 solution_set_expand (delta);
1588 /* No further offset processing is necessary. */
1589 roffset = 0;
1590 }
1591
1592 /* For each variable j in delta (Sol(y)), add
1593 an edge in the graph from j to x, and union Sol(j) into Sol(x). */
1594 EXECUTE_IF_SET_IN_BITMAP (delta, 0, j, bi)
1595 {
1596 varinfo_t v = get_varinfo (j);
1597 HOST_WIDE_INT fieldoffset = v->offset + roffset;
1598 unsigned int t;
1599
1600 if (v->is_full_var)
1601 fieldoffset = v->offset;
1602 else if (roffset != 0)
1603 v = first_vi_for_offset (v, fieldoffset);
1604 /* If the access is outside of the variable we can ignore it. */
1605 if (!v)
1606 continue;
1607
1608 do
1609 {
1610 t = find (v->id);
1611
1612 /* Adding edges from the special vars is pointless.
1613 They don't have sets that can change. */
1614 if (get_varinfo (t)->is_special_var)
1615 flag |= bitmap_ior_into (sol, get_varinfo (t)->solution);
1616 /* Merging the solution from ESCAPED needlessly increases
1617 the set. Use ESCAPED as representative instead. */
1618 else if (v->id == escaped_id)
1619 flag |= bitmap_set_bit (sol, escaped_id);
1620 else if (v->may_have_pointers
1621 && add_graph_edge (graph, lhs, t))
1622 flag |= bitmap_ior_into (sol, get_varinfo (t)->solution);
1623
1624 /* If the variable is not exactly at the requested offset
1625 we have to include the next one. */
1626 if (v->offset == (unsigned HOST_WIDE_INT)fieldoffset
1627 || v->next == 0)
1628 break;
1629
1630 v = vi_next (v);
1631 fieldoffset = v->offset;
1632 }
1633 while (1);
1634 }
1635
1636 done:
1637 /* If the LHS solution changed, mark the var as changed. */
1638 if (flag)
1639 {
1640 get_varinfo (lhs)->solution = sol;
1641 bitmap_set_bit (changed, lhs);
1642 }
1643 }
1644
1645 /* Process a constraint C that represents *(x + off) = y using DELTA
1646 as the starting solution for x. */
1647
1648 static void
1649 do_ds_constraint (constraint_t c, bitmap delta)
1650 {
1651 unsigned int rhs = c->rhs.var;
1652 bitmap sol = get_varinfo (rhs)->solution;
1653 unsigned int j;
1654 bitmap_iterator bi;
1655 HOST_WIDE_INT loff = c->lhs.offset;
1656 bool escaped_p = false;
1657
1658 /* Our IL does not allow this. */
1659 gcc_checking_assert (c->rhs.offset == 0);
1660
1661 /* If the solution of y contains ANYTHING simply use the ANYTHING
1662 solution. This avoids needlessly increasing the points-to sets. */
1663 if (bitmap_bit_p (sol, anything_id))
1664 sol = get_varinfo (find (anything_id))->solution;
1665
1666 /* If the solution for x contains ANYTHING we have to merge the
1667 solution of y into all pointer variables which we do via
1668 STOREDANYTHING. */
1669 if (bitmap_bit_p (delta, anything_id))
1670 {
1671 unsigned t = find (storedanything_id);
1672 if (add_graph_edge (graph, t, rhs))
1673 {
1674 if (bitmap_ior_into (get_varinfo (t)->solution, sol))
1675 bitmap_set_bit (changed, t);
1676 }
1677 return;
1678 }
1679
1680 /* If we do not know at with offset the rhs is dereferenced compute
1681 the reachability set of DELTA, conservatively assuming it is
1682 dereferenced at all valid offsets. */
1683 if (loff == UNKNOWN_OFFSET)
1684 {
1685 solution_set_expand (delta);
1686 loff = 0;
1687 }
1688
1689 /* For each member j of delta (Sol(x)), add an edge from y to j and
1690 union Sol(y) into Sol(j) */
1691 EXECUTE_IF_SET_IN_BITMAP (delta, 0, j, bi)
1692 {
1693 varinfo_t v = get_varinfo (j);
1694 unsigned int t;
1695 HOST_WIDE_INT fieldoffset = v->offset + loff;
1696
1697 if (v->is_full_var)
1698 fieldoffset = v->offset;
1699 else if (loff != 0)
1700 v = first_vi_for_offset (v, fieldoffset);
1701 /* If the access is outside of the variable we can ignore it. */
1702 if (!v)
1703 continue;
1704
1705 do
1706 {
1707 if (v->may_have_pointers)
1708 {
1709 /* If v is a global variable then this is an escape point. */
1710 if (v->is_global_var
1711 && !escaped_p)
1712 {
1713 t = find (escaped_id);
1714 if (add_graph_edge (graph, t, rhs)
1715 && bitmap_ior_into (get_varinfo (t)->solution, sol))
1716 bitmap_set_bit (changed, t);
1717 /* Enough to let rhs escape once. */
1718 escaped_p = true;
1719 }
1720
1721 if (v->is_special_var)
1722 break;
1723
1724 t = find (v->id);
1725 if (add_graph_edge (graph, t, rhs)
1726 && bitmap_ior_into (get_varinfo (t)->solution, sol))
1727 bitmap_set_bit (changed, t);
1728 }
1729
1730 /* If the variable is not exactly at the requested offset
1731 we have to include the next one. */
1732 if (v->offset == (unsigned HOST_WIDE_INT)fieldoffset
1733 || v->next == 0)
1734 break;
1735
1736 v = vi_next (v);
1737 fieldoffset = v->offset;
1738 }
1739 while (1);
1740 }
1741 }
1742
1743 /* Handle a non-simple (simple meaning requires no iteration),
1744 constraint (IE *x = &y, x = *y, *x = y, and x = y with offsets involved). */
1745
1746 static void
1747 do_complex_constraint (constraint_graph_t graph, constraint_t c, bitmap delta)
1748 {
1749 if (c->lhs.type == DEREF)
1750 {
1751 if (c->rhs.type == ADDRESSOF)
1752 {
1753 gcc_unreachable ();
1754 }
1755 else
1756 {
1757 /* *x = y */
1758 do_ds_constraint (c, delta);
1759 }
1760 }
1761 else if (c->rhs.type == DEREF)
1762 {
1763 /* x = *y */
1764 if (!(get_varinfo (c->lhs.var)->is_special_var))
1765 do_sd_constraint (graph, c, delta);
1766 }
1767 else
1768 {
1769 bitmap tmp;
1770 bitmap solution;
1771 bool flag = false;
1772
1773 gcc_checking_assert (c->rhs.type == SCALAR && c->lhs.type == SCALAR);
1774 solution = get_varinfo (c->rhs.var)->solution;
1775 tmp = get_varinfo (c->lhs.var)->solution;
1776
1777 flag = set_union_with_increment (tmp, solution, c->rhs.offset);
1778
1779 if (flag)
1780 bitmap_set_bit (changed, c->lhs.var);
1781 }
1782 }
1783
1784 /* Initialize and return a new SCC info structure. */
1785
1786 static struct scc_info *
1787 init_scc_info (size_t size)
1788 {
1789 struct scc_info *si = XNEW (struct scc_info);
1790 size_t i;
1791
1792 si->current_index = 0;
1793 si->visited = sbitmap_alloc (size);
1794 bitmap_clear (si->visited);
1795 si->deleted = sbitmap_alloc (size);
1796 bitmap_clear (si->deleted);
1797 si->node_mapping = XNEWVEC (unsigned int, size);
1798 si->dfs = XCNEWVEC (unsigned int, size);
1799
1800 for (i = 0; i < size; i++)
1801 si->node_mapping[i] = i;
1802
1803 si->scc_stack.create (1);
1804 return si;
1805 }
1806
1807 /* Free an SCC info structure pointed to by SI */
1808
1809 static void
1810 free_scc_info (struct scc_info *si)
1811 {
1812 sbitmap_free (si->visited);
1813 sbitmap_free (si->deleted);
1814 free (si->node_mapping);
1815 free (si->dfs);
1816 si->scc_stack.release ();
1817 free (si);
1818 }
1819
1820
1821 /* Find indirect cycles in GRAPH that occur, using strongly connected
1822 components, and note them in the indirect cycles map.
1823
1824 This technique comes from Ben Hardekopf and Calvin Lin,
1825 "It Pays to be Lazy: Fast and Accurate Pointer Analysis for Millions of
1826 Lines of Code", submitted to PLDI 2007. */
1827
1828 static void
1829 find_indirect_cycles (constraint_graph_t graph)
1830 {
1831 unsigned int i;
1832 unsigned int size = graph->size;
1833 struct scc_info *si = init_scc_info (size);
1834
1835 for (i = 0; i < MIN (LAST_REF_NODE, size); i ++ )
1836 if (!bitmap_bit_p (si->visited, i) && find (i) == i)
1837 scc_visit (graph, si, i);
1838
1839 free_scc_info (si);
1840 }
1841
1842 /* Compute a topological ordering for GRAPH, and store the result in the
1843 topo_info structure TI. */
1844
1845 static void
1846 compute_topo_order (constraint_graph_t graph,
1847 struct topo_info *ti)
1848 {
1849 unsigned int i;
1850 unsigned int size = graph->size;
1851
1852 for (i = 0; i != size; ++i)
1853 if (!bitmap_bit_p (ti->visited, i) && find (i) == i)
1854 topo_visit (graph, ti, i);
1855 }
1856
1857 /* Structure used to for hash value numbering of pointer equivalence
1858 classes. */
1859
1860 typedef struct equiv_class_label
1861 {
1862 hashval_t hashcode;
1863 unsigned int equivalence_class;
1864 bitmap labels;
1865 } *equiv_class_label_t;
1866 typedef const struct equiv_class_label *const_equiv_class_label_t;
1867
1868 /* Equiv_class_label hashtable helpers. */
1869
1870 struct equiv_class_hasher : typed_free_remove <equiv_class_label>
1871 {
1872 typedef equiv_class_label value_type;
1873 typedef equiv_class_label compare_type;
1874 static inline hashval_t hash (const value_type *);
1875 static inline bool equal (const value_type *, const compare_type *);
1876 };
1877
1878 /* Hash function for a equiv_class_label_t */
1879
1880 inline hashval_t
1881 equiv_class_hasher::hash (const value_type *ecl)
1882 {
1883 return ecl->hashcode;
1884 }
1885
1886 /* Equality function for two equiv_class_label_t's. */
1887
1888 inline bool
1889 equiv_class_hasher::equal (const value_type *eql1, const compare_type *eql2)
1890 {
1891 return (eql1->hashcode == eql2->hashcode
1892 && bitmap_equal_p (eql1->labels, eql2->labels));
1893 }
1894
1895 /* A hashtable for mapping a bitmap of labels->pointer equivalence
1896 classes. */
1897 static hash_table <equiv_class_hasher> pointer_equiv_class_table;
1898
1899 /* A hashtable for mapping a bitmap of labels->location equivalence
1900 classes. */
1901 static hash_table <equiv_class_hasher> location_equiv_class_table;
1902
1903 /* Lookup a equivalence class in TABLE by the bitmap of LABELS with
1904 hash HAS it contains. Sets *REF_LABELS to the bitmap LABELS
1905 is equivalent to. */
1906
1907 static equiv_class_label *
1908 equiv_class_lookup_or_add (hash_table <equiv_class_hasher> table, bitmap labels)
1909 {
1910 equiv_class_label **slot;
1911 equiv_class_label ecl;
1912
1913 ecl.labels = labels;
1914 ecl.hashcode = bitmap_hash (labels);
1915 slot = table.find_slot_with_hash (&ecl, ecl.hashcode, INSERT);
1916 if (!*slot)
1917 {
1918 *slot = XNEW (struct equiv_class_label);
1919 (*slot)->labels = labels;
1920 (*slot)->hashcode = ecl.hashcode;
1921 (*slot)->equivalence_class = 0;
1922 }
1923
1924 return *slot;
1925 }
1926
1927 /* Perform offline variable substitution.
1928
1929 This is a worst case quadratic time way of identifying variables
1930 that must have equivalent points-to sets, including those caused by
1931 static cycles, and single entry subgraphs, in the constraint graph.
1932
1933 The technique is described in "Exploiting Pointer and Location
1934 Equivalence to Optimize Pointer Analysis. In the 14th International
1935 Static Analysis Symposium (SAS), August 2007." It is known as the
1936 "HU" algorithm, and is equivalent to value numbering the collapsed
1937 constraint graph including evaluating unions.
1938
1939 The general method of finding equivalence classes is as follows:
1940 Add fake nodes (REF nodes) and edges for *a = b and a = *b constraints.
1941 Initialize all non-REF nodes to be direct nodes.
1942 For each constraint a = a U {b}, we set pts(a) = pts(a) u {fresh
1943 variable}
1944 For each constraint containing the dereference, we also do the same
1945 thing.
1946
1947 We then compute SCC's in the graph and unify nodes in the same SCC,
1948 including pts sets.
1949
1950 For each non-collapsed node x:
1951 Visit all unvisited explicit incoming edges.
1952 Ignoring all non-pointers, set pts(x) = Union of pts(a) for y
1953 where y->x.
1954 Lookup the equivalence class for pts(x).
1955 If we found one, equivalence_class(x) = found class.
1956 Otherwise, equivalence_class(x) = new class, and new_class is
1957 added to the lookup table.
1958
1959 All direct nodes with the same equivalence class can be replaced
1960 with a single representative node.
1961 All unlabeled nodes (label == 0) are not pointers and all edges
1962 involving them can be eliminated.
1963 We perform these optimizations during rewrite_constraints
1964
1965 In addition to pointer equivalence class finding, we also perform
1966 location equivalence class finding. This is the set of variables
1967 that always appear together in points-to sets. We use this to
1968 compress the size of the points-to sets. */
1969
1970 /* Current maximum pointer equivalence class id. */
1971 static int pointer_equiv_class;
1972
1973 /* Current maximum location equivalence class id. */
1974 static int location_equiv_class;
1975
1976 /* Recursive routine to find strongly connected components in GRAPH,
1977 and label it's nodes with DFS numbers. */
1978
1979 static void
1980 condense_visit (constraint_graph_t graph, struct scc_info *si, unsigned int n)
1981 {
1982 unsigned int i;
1983 bitmap_iterator bi;
1984 unsigned int my_dfs;
1985
1986 gcc_checking_assert (si->node_mapping[n] == n);
1987 bitmap_set_bit (si->visited, n);
1988 si->dfs[n] = si->current_index ++;
1989 my_dfs = si->dfs[n];
1990
1991 /* Visit all the successors. */
1992 EXECUTE_IF_IN_NONNULL_BITMAP (graph->preds[n], 0, i, bi)
1993 {
1994 unsigned int w = si->node_mapping[i];
1995
1996 if (bitmap_bit_p (si->deleted, w))
1997 continue;
1998
1999 if (!bitmap_bit_p (si->visited, w))
2000 condense_visit (graph, si, w);
2001
2002 unsigned int t = si->node_mapping[w];
2003 gcc_checking_assert (si->node_mapping[n] == n);
2004 if (si->dfs[t] < si->dfs[n])
2005 si->dfs[n] = si->dfs[t];
2006 }
2007
2008 /* Visit all the implicit predecessors. */
2009 EXECUTE_IF_IN_NONNULL_BITMAP (graph->implicit_preds[n], 0, i, bi)
2010 {
2011 unsigned int w = si->node_mapping[i];
2012
2013 if (bitmap_bit_p (si->deleted, w))
2014 continue;
2015
2016 if (!bitmap_bit_p (si->visited, w))
2017 condense_visit (graph, si, w);
2018
2019 unsigned int t = si->node_mapping[w];
2020 gcc_assert (si->node_mapping[n] == n);
2021 if (si->dfs[t] < si->dfs[n])
2022 si->dfs[n] = si->dfs[t];
2023 }
2024
2025 /* See if any components have been identified. */
2026 if (si->dfs[n] == my_dfs)
2027 {
2028 while (si->scc_stack.length () != 0
2029 && si->dfs[si->scc_stack.last ()] >= my_dfs)
2030 {
2031 unsigned int w = si->scc_stack.pop ();
2032 si->node_mapping[w] = n;
2033
2034 if (!bitmap_bit_p (graph->direct_nodes, w))
2035 bitmap_clear_bit (graph->direct_nodes, n);
2036
2037 /* Unify our nodes. */
2038 if (graph->preds[w])
2039 {
2040 if (!graph->preds[n])
2041 graph->preds[n] = BITMAP_ALLOC (&predbitmap_obstack);
2042 bitmap_ior_into (graph->preds[n], graph->preds[w]);
2043 }
2044 if (graph->implicit_preds[w])
2045 {
2046 if (!graph->implicit_preds[n])
2047 graph->implicit_preds[n] = BITMAP_ALLOC (&predbitmap_obstack);
2048 bitmap_ior_into (graph->implicit_preds[n],
2049 graph->implicit_preds[w]);
2050 }
2051 if (graph->points_to[w])
2052 {
2053 if (!graph->points_to[n])
2054 graph->points_to[n] = BITMAP_ALLOC (&predbitmap_obstack);
2055 bitmap_ior_into (graph->points_to[n],
2056 graph->points_to[w]);
2057 }
2058 }
2059 bitmap_set_bit (si->deleted, n);
2060 }
2061 else
2062 si->scc_stack.safe_push (n);
2063 }
2064
2065 /* Label pointer equivalences. */
2066
2067 static void
2068 label_visit (constraint_graph_t graph, struct scc_info *si, unsigned int n)
2069 {
2070 unsigned int i, first_pred;
2071 bitmap_iterator bi;
2072
2073 bitmap_set_bit (si->visited, n);
2074
2075 /* Label and union our incoming edges's points to sets. */
2076 first_pred = -1U;
2077 EXECUTE_IF_IN_NONNULL_BITMAP (graph->preds[n], 0, i, bi)
2078 {
2079 unsigned int w = si->node_mapping[i];
2080 if (!bitmap_bit_p (si->visited, w))
2081 label_visit (graph, si, w);
2082
2083 /* Skip unused edges */
2084 if (w == n || graph->pointer_label[w] == 0)
2085 continue;
2086
2087 if (graph->points_to[w])
2088 {
2089 if (!graph->points_to[n])
2090 {
2091 if (first_pred == -1U)
2092 first_pred = w;
2093 else
2094 {
2095 graph->points_to[n] = BITMAP_ALLOC (&predbitmap_obstack);
2096 bitmap_ior (graph->points_to[n],
2097 graph->points_to[first_pred],
2098 graph->points_to[w]);
2099 }
2100 }
2101 else
2102 bitmap_ior_into (graph->points_to[n], graph->points_to[w]);
2103 }
2104 }
2105
2106 /* Indirect nodes get fresh variables and a new pointer equiv class. */
2107 if (!bitmap_bit_p (graph->direct_nodes, n))
2108 {
2109 if (!graph->points_to[n])
2110 {
2111 graph->points_to[n] = BITMAP_ALLOC (&predbitmap_obstack);
2112 if (first_pred != -1U)
2113 bitmap_copy (graph->points_to[n], graph->points_to[first_pred]);
2114 }
2115 bitmap_set_bit (graph->points_to[n], FIRST_REF_NODE + n);
2116 graph->pointer_label[n] = pointer_equiv_class++;
2117 equiv_class_label_t ecl;
2118 ecl = equiv_class_lookup_or_add (pointer_equiv_class_table,
2119 graph->points_to[n]);
2120 ecl->equivalence_class = graph->pointer_label[n];
2121 return;
2122 }
2123
2124 /* If there was only a single non-empty predecessor the pointer equiv
2125 class is the same. */
2126 if (!graph->points_to[n])
2127 {
2128 if (first_pred != -1U)
2129 {
2130 graph->pointer_label[n] = graph->pointer_label[first_pred];
2131 graph->points_to[n] = graph->points_to[first_pred];
2132 }
2133 return;
2134 }
2135
2136 if (!bitmap_empty_p (graph->points_to[n]))
2137 {
2138 equiv_class_label_t ecl;
2139 ecl = equiv_class_lookup_or_add (pointer_equiv_class_table,
2140 graph->points_to[n]);
2141 if (ecl->equivalence_class == 0)
2142 ecl->equivalence_class = pointer_equiv_class++;
2143 else
2144 {
2145 BITMAP_FREE (graph->points_to[n]);
2146 graph->points_to[n] = ecl->labels;
2147 }
2148 graph->pointer_label[n] = ecl->equivalence_class;
2149 }
2150 }
2151
2152 /* Print the pred graph in dot format. */
2153
2154 static void
2155 dump_pred_graph (struct scc_info *si, FILE *file)
2156 {
2157 unsigned int i;
2158
2159 /* Only print the graph if it has already been initialized: */
2160 if (!graph)
2161 return;
2162
2163 /* Prints the header of the dot file: */
2164 fprintf (file, "strict digraph {\n");
2165 fprintf (file, " node [\n shape = box\n ]\n");
2166 fprintf (file, " edge [\n fontsize = \"12\"\n ]\n");
2167 fprintf (file, "\n // List of nodes and complex constraints in "
2168 "the constraint graph:\n");
2169
2170 /* The next lines print the nodes in the graph together with the
2171 complex constraints attached to them. */
2172 for (i = 1; i < graph->size; i++)
2173 {
2174 if (i == FIRST_REF_NODE)
2175 continue;
2176 if (si->node_mapping[i] != i)
2177 continue;
2178 if (i < FIRST_REF_NODE)
2179 fprintf (file, "\"%s\"", get_varinfo (i)->name);
2180 else
2181 fprintf (file, "\"*%s\"", get_varinfo (i - FIRST_REF_NODE)->name);
2182 if (graph->points_to[i]
2183 && !bitmap_empty_p (graph->points_to[i]))
2184 {
2185 fprintf (file, "[label=\"%s = {", get_varinfo (i)->name);
2186 unsigned j;
2187 bitmap_iterator bi;
2188 EXECUTE_IF_SET_IN_BITMAP (graph->points_to[i], 0, j, bi)
2189 fprintf (file, " %d", j);
2190 fprintf (file, " }\"]");
2191 }
2192 fprintf (file, ";\n");
2193 }
2194
2195 /* Go over the edges. */
2196 fprintf (file, "\n // Edges in the constraint graph:\n");
2197 for (i = 1; i < graph->size; i++)
2198 {
2199 unsigned j;
2200 bitmap_iterator bi;
2201 if (si->node_mapping[i] != i)
2202 continue;
2203 EXECUTE_IF_IN_NONNULL_BITMAP (graph->preds[i], 0, j, bi)
2204 {
2205 unsigned from = si->node_mapping[j];
2206 if (from < FIRST_REF_NODE)
2207 fprintf (file, "\"%s\"", get_varinfo (from)->name);
2208 else
2209 fprintf (file, "\"*%s\"", get_varinfo (from - FIRST_REF_NODE)->name);
2210 fprintf (file, " -> ");
2211 if (i < FIRST_REF_NODE)
2212 fprintf (file, "\"%s\"", get_varinfo (i)->name);
2213 else
2214 fprintf (file, "\"*%s\"", get_varinfo (i - FIRST_REF_NODE)->name);
2215 fprintf (file, ";\n");
2216 }
2217 }
2218
2219 /* Prints the tail of the dot file. */
2220 fprintf (file, "}\n");
2221 }
2222
2223 /* Perform offline variable substitution, discovering equivalence
2224 classes, and eliminating non-pointer variables. */
2225
2226 static struct scc_info *
2227 perform_var_substitution (constraint_graph_t graph)
2228 {
2229 unsigned int i;
2230 unsigned int size = graph->size;
2231 struct scc_info *si = init_scc_info (size);
2232
2233 bitmap_obstack_initialize (&iteration_obstack);
2234 pointer_equiv_class_table.create (511);
2235 location_equiv_class_table.create (511);
2236 pointer_equiv_class = 1;
2237 location_equiv_class = 1;
2238
2239 /* Condense the nodes, which means to find SCC's, count incoming
2240 predecessors, and unite nodes in SCC's. */
2241 for (i = 1; i < FIRST_REF_NODE; i++)
2242 if (!bitmap_bit_p (si->visited, si->node_mapping[i]))
2243 condense_visit (graph, si, si->node_mapping[i]);
2244
2245 if (dump_file && (dump_flags & TDF_GRAPH))
2246 {
2247 fprintf (dump_file, "\n\n// The constraint graph before var-substitution "
2248 "in dot format:\n");
2249 dump_pred_graph (si, dump_file);
2250 fprintf (dump_file, "\n\n");
2251 }
2252
2253 bitmap_clear (si->visited);
2254 /* Actually the label the nodes for pointer equivalences */
2255 for (i = 1; i < FIRST_REF_NODE; i++)
2256 if (!bitmap_bit_p (si->visited, si->node_mapping[i]))
2257 label_visit (graph, si, si->node_mapping[i]);
2258
2259 /* Calculate location equivalence labels. */
2260 for (i = 1; i < FIRST_REF_NODE; i++)
2261 {
2262 bitmap pointed_by;
2263 bitmap_iterator bi;
2264 unsigned int j;
2265
2266 if (!graph->pointed_by[i])
2267 continue;
2268 pointed_by = BITMAP_ALLOC (&iteration_obstack);
2269
2270 /* Translate the pointed-by mapping for pointer equivalence
2271 labels. */
2272 EXECUTE_IF_SET_IN_BITMAP (graph->pointed_by[i], 0, j, bi)
2273 {
2274 bitmap_set_bit (pointed_by,
2275 graph->pointer_label[si->node_mapping[j]]);
2276 }
2277 /* The original pointed_by is now dead. */
2278 BITMAP_FREE (graph->pointed_by[i]);
2279
2280 /* Look up the location equivalence label if one exists, or make
2281 one otherwise. */
2282 equiv_class_label_t ecl;
2283 ecl = equiv_class_lookup_or_add (location_equiv_class_table, pointed_by);
2284 if (ecl->equivalence_class == 0)
2285 ecl->equivalence_class = location_equiv_class++;
2286 else
2287 {
2288 if (dump_file && (dump_flags & TDF_DETAILS))
2289 fprintf (dump_file, "Found location equivalence for node %s\n",
2290 get_varinfo (i)->name);
2291 BITMAP_FREE (pointed_by);
2292 }
2293 graph->loc_label[i] = ecl->equivalence_class;
2294
2295 }
2296
2297 if (dump_file && (dump_flags & TDF_DETAILS))
2298 for (i = 1; i < FIRST_REF_NODE; i++)
2299 {
2300 unsigned j = si->node_mapping[i];
2301 if (j != i)
2302 {
2303 fprintf (dump_file, "%s node id %d ",
2304 bitmap_bit_p (graph->direct_nodes, i)
2305 ? "Direct" : "Indirect", i);
2306 if (i < FIRST_REF_NODE)
2307 fprintf (dump_file, "\"%s\"", get_varinfo (i)->name);
2308 else
2309 fprintf (dump_file, "\"*%s\"",
2310 get_varinfo (i - FIRST_REF_NODE)->name);
2311 fprintf (dump_file, " mapped to SCC leader node id %d ", j);
2312 if (j < FIRST_REF_NODE)
2313 fprintf (dump_file, "\"%s\"\n", get_varinfo (j)->name);
2314 else
2315 fprintf (dump_file, "\"*%s\"\n",
2316 get_varinfo (j - FIRST_REF_NODE)->name);
2317 }
2318 else
2319 {
2320 fprintf (dump_file,
2321 "Equivalence classes for %s node id %d ",
2322 bitmap_bit_p (graph->direct_nodes, i)
2323 ? "direct" : "indirect", i);
2324 if (i < FIRST_REF_NODE)
2325 fprintf (dump_file, "\"%s\"", get_varinfo (i)->name);
2326 else
2327 fprintf (dump_file, "\"*%s\"",
2328 get_varinfo (i - FIRST_REF_NODE)->name);
2329 fprintf (dump_file,
2330 ": pointer %d, location %d\n",
2331 graph->pointer_label[i], graph->loc_label[i]);
2332 }
2333 }
2334
2335 /* Quickly eliminate our non-pointer variables. */
2336
2337 for (i = 1; i < FIRST_REF_NODE; i++)
2338 {
2339 unsigned int node = si->node_mapping[i];
2340
2341 if (graph->pointer_label[node] == 0)
2342 {
2343 if (dump_file && (dump_flags & TDF_DETAILS))
2344 fprintf (dump_file,
2345 "%s is a non-pointer variable, eliminating edges.\n",
2346 get_varinfo (node)->name);
2347 stats.nonpointer_vars++;
2348 clear_edges_for_node (graph, node);
2349 }
2350 }
2351
2352 return si;
2353 }
2354
2355 /* Free information that was only necessary for variable
2356 substitution. */
2357
2358 static void
2359 free_var_substitution_info (struct scc_info *si)
2360 {
2361 free_scc_info (si);
2362 free (graph->pointer_label);
2363 free (graph->loc_label);
2364 free (graph->pointed_by);
2365 free (graph->points_to);
2366 free (graph->eq_rep);
2367 sbitmap_free (graph->direct_nodes);
2368 pointer_equiv_class_table.dispose ();
2369 location_equiv_class_table.dispose ();
2370 bitmap_obstack_release (&iteration_obstack);
2371 }
2372
2373 /* Return an existing node that is equivalent to NODE, which has
2374 equivalence class LABEL, if one exists. Return NODE otherwise. */
2375
2376 static unsigned int
2377 find_equivalent_node (constraint_graph_t graph,
2378 unsigned int node, unsigned int label)
2379 {
2380 /* If the address version of this variable is unused, we can
2381 substitute it for anything else with the same label.
2382 Otherwise, we know the pointers are equivalent, but not the
2383 locations, and we can unite them later. */
2384
2385 if (!bitmap_bit_p (graph->address_taken, node))
2386 {
2387 gcc_checking_assert (label < graph->size);
2388
2389 if (graph->eq_rep[label] != -1)
2390 {
2391 /* Unify the two variables since we know they are equivalent. */
2392 if (unite (graph->eq_rep[label], node))
2393 unify_nodes (graph, graph->eq_rep[label], node, false);
2394 return graph->eq_rep[label];
2395 }
2396 else
2397 {
2398 graph->eq_rep[label] = node;
2399 graph->pe_rep[label] = node;
2400 }
2401 }
2402 else
2403 {
2404 gcc_checking_assert (label < graph->size);
2405 graph->pe[node] = label;
2406 if (graph->pe_rep[label] == -1)
2407 graph->pe_rep[label] = node;
2408 }
2409
2410 return node;
2411 }
2412
2413 /* Unite pointer equivalent but not location equivalent nodes in
2414 GRAPH. This may only be performed once variable substitution is
2415 finished. */
2416
2417 static void
2418 unite_pointer_equivalences (constraint_graph_t graph)
2419 {
2420 unsigned int i;
2421
2422 /* Go through the pointer equivalences and unite them to their
2423 representative, if they aren't already. */
2424 for (i = 1; i < FIRST_REF_NODE; i++)
2425 {
2426 unsigned int label = graph->pe[i];
2427 if (label)
2428 {
2429 int label_rep = graph->pe_rep[label];
2430
2431 if (label_rep == -1)
2432 continue;
2433
2434 label_rep = find (label_rep);
2435 if (label_rep >= 0 && unite (label_rep, find (i)))
2436 unify_nodes (graph, label_rep, i, false);
2437 }
2438 }
2439 }
2440
2441 /* Move complex constraints to the GRAPH nodes they belong to. */
2442
2443 static void
2444 move_complex_constraints (constraint_graph_t graph)
2445 {
2446 int i;
2447 constraint_t c;
2448
2449 FOR_EACH_VEC_ELT (constraints, i, c)
2450 {
2451 if (c)
2452 {
2453 struct constraint_expr lhs = c->lhs;
2454 struct constraint_expr rhs = c->rhs;
2455
2456 if (lhs.type == DEREF)
2457 {
2458 insert_into_complex (graph, lhs.var, c);
2459 }
2460 else if (rhs.type == DEREF)
2461 {
2462 if (!(get_varinfo (lhs.var)->is_special_var))
2463 insert_into_complex (graph, rhs.var, c);
2464 }
2465 else if (rhs.type != ADDRESSOF && lhs.var > anything_id
2466 && (lhs.offset != 0 || rhs.offset != 0))
2467 {
2468 insert_into_complex (graph, rhs.var, c);
2469 }
2470 }
2471 }
2472 }
2473
2474
2475 /* Optimize and rewrite complex constraints while performing
2476 collapsing of equivalent nodes. SI is the SCC_INFO that is the
2477 result of perform_variable_substitution. */
2478
2479 static void
2480 rewrite_constraints (constraint_graph_t graph,
2481 struct scc_info *si)
2482 {
2483 int i;
2484 constraint_t c;
2485
2486 #ifdef ENABLE_CHECKING
2487 for (unsigned int j = 0; j < graph->size; j++)
2488 gcc_assert (find (j) == j);
2489 #endif
2490
2491 FOR_EACH_VEC_ELT (constraints, i, c)
2492 {
2493 struct constraint_expr lhs = c->lhs;
2494 struct constraint_expr rhs = c->rhs;
2495 unsigned int lhsvar = find (lhs.var);
2496 unsigned int rhsvar = find (rhs.var);
2497 unsigned int lhsnode, rhsnode;
2498 unsigned int lhslabel, rhslabel;
2499
2500 lhsnode = si->node_mapping[lhsvar];
2501 rhsnode = si->node_mapping[rhsvar];
2502 lhslabel = graph->pointer_label[lhsnode];
2503 rhslabel = graph->pointer_label[rhsnode];
2504
2505 /* See if it is really a non-pointer variable, and if so, ignore
2506 the constraint. */
2507 if (lhslabel == 0)
2508 {
2509 if (dump_file && (dump_flags & TDF_DETAILS))
2510 {
2511
2512 fprintf (dump_file, "%s is a non-pointer variable,"
2513 "ignoring constraint:",
2514 get_varinfo (lhs.var)->name);
2515 dump_constraint (dump_file, c);
2516 fprintf (dump_file, "\n");
2517 }
2518 constraints[i] = NULL;
2519 continue;
2520 }
2521
2522 if (rhslabel == 0)
2523 {
2524 if (dump_file && (dump_flags & TDF_DETAILS))
2525 {
2526
2527 fprintf (dump_file, "%s is a non-pointer variable,"
2528 "ignoring constraint:",
2529 get_varinfo (rhs.var)->name);
2530 dump_constraint (dump_file, c);
2531 fprintf (dump_file, "\n");
2532 }
2533 constraints[i] = NULL;
2534 continue;
2535 }
2536
2537 lhsvar = find_equivalent_node (graph, lhsvar, lhslabel);
2538 rhsvar = find_equivalent_node (graph, rhsvar, rhslabel);
2539 c->lhs.var = lhsvar;
2540 c->rhs.var = rhsvar;
2541 }
2542 }
2543
2544 /* Eliminate indirect cycles involving NODE. Return true if NODE was
2545 part of an SCC, false otherwise. */
2546
2547 static bool
2548 eliminate_indirect_cycles (unsigned int node)
2549 {
2550 if (graph->indirect_cycles[node] != -1
2551 && !bitmap_empty_p (get_varinfo (node)->solution))
2552 {
2553 unsigned int i;
2554 vec<unsigned> queue = vNULL;
2555 int queuepos;
2556 unsigned int to = find (graph->indirect_cycles[node]);
2557 bitmap_iterator bi;
2558
2559 /* We can't touch the solution set and call unify_nodes
2560 at the same time, because unify_nodes is going to do
2561 bitmap unions into it. */
2562
2563 EXECUTE_IF_SET_IN_BITMAP (get_varinfo (node)->solution, 0, i, bi)
2564 {
2565 if (find (i) == i && i != to)
2566 {
2567 if (unite (to, i))
2568 queue.safe_push (i);
2569 }
2570 }
2571
2572 for (queuepos = 0;
2573 queue.iterate (queuepos, &i);
2574 queuepos++)
2575 {
2576 unify_nodes (graph, to, i, true);
2577 }
2578 queue.release ();
2579 return true;
2580 }
2581 return false;
2582 }
2583
2584 /* Solve the constraint graph GRAPH using our worklist solver.
2585 This is based on the PW* family of solvers from the "Efficient Field
2586 Sensitive Pointer Analysis for C" paper.
2587 It works by iterating over all the graph nodes, processing the complex
2588 constraints and propagating the copy constraints, until everything stops
2589 changed. This corresponds to steps 6-8 in the solving list given above. */
2590
2591 static void
2592 solve_graph (constraint_graph_t graph)
2593 {
2594 unsigned int size = graph->size;
2595 unsigned int i;
2596 bitmap pts;
2597
2598 changed = BITMAP_ALLOC (NULL);
2599
2600 /* Mark all initial non-collapsed nodes as changed. */
2601 for (i = 1; i < size; i++)
2602 {
2603 varinfo_t ivi = get_varinfo (i);
2604 if (find (i) == i && !bitmap_empty_p (ivi->solution)
2605 && ((graph->succs[i] && !bitmap_empty_p (graph->succs[i]))
2606 || graph->complex[i].length () > 0))
2607 bitmap_set_bit (changed, i);
2608 }
2609
2610 /* Allocate a bitmap to be used to store the changed bits. */
2611 pts = BITMAP_ALLOC (&pta_obstack);
2612
2613 while (!bitmap_empty_p (changed))
2614 {
2615 unsigned int i;
2616 struct topo_info *ti = init_topo_info ();
2617 stats.iterations++;
2618
2619 bitmap_obstack_initialize (&iteration_obstack);
2620
2621 compute_topo_order (graph, ti);
2622
2623 while (ti->topo_order.length () != 0)
2624 {
2625
2626 i = ti->topo_order.pop ();
2627
2628 /* If this variable is not a representative, skip it. */
2629 if (find (i) != i)
2630 continue;
2631
2632 /* In certain indirect cycle cases, we may merge this
2633 variable to another. */
2634 if (eliminate_indirect_cycles (i) && find (i) != i)
2635 continue;
2636
2637 /* If the node has changed, we need to process the
2638 complex constraints and outgoing edges again. */
2639 if (bitmap_clear_bit (changed, i))
2640 {
2641 unsigned int j;
2642 constraint_t c;
2643 bitmap solution;
2644 vec<constraint_t> complex = graph->complex[i];
2645 varinfo_t vi = get_varinfo (i);
2646 bool solution_empty;
2647
2648 /* Compute the changed set of solution bits. If anything
2649 is in the solution just propagate that. */
2650 if (bitmap_bit_p (vi->solution, anything_id))
2651 {
2652 /* If anything is also in the old solution there is
2653 nothing to do.
2654 ??? But we shouldn't ended up with "changed" set ... */
2655 if (vi->oldsolution
2656 && bitmap_bit_p (vi->oldsolution, anything_id))
2657 continue;
2658 bitmap_copy (pts, get_varinfo (find (anything_id))->solution);
2659 }
2660 else if (vi->oldsolution)
2661 bitmap_and_compl (pts, vi->solution, vi->oldsolution);
2662 else
2663 bitmap_copy (pts, vi->solution);
2664
2665 if (bitmap_empty_p (pts))
2666 continue;
2667
2668 if (vi->oldsolution)
2669 bitmap_ior_into (vi->oldsolution, pts);
2670 else
2671 {
2672 vi->oldsolution = BITMAP_ALLOC (&oldpta_obstack);
2673 bitmap_copy (vi->oldsolution, pts);
2674 }
2675
2676 solution = vi->solution;
2677 solution_empty = bitmap_empty_p (solution);
2678
2679 /* Process the complex constraints */
2680 FOR_EACH_VEC_ELT (complex, j, c)
2681 {
2682 /* XXX: This is going to unsort the constraints in
2683 some cases, which will occasionally add duplicate
2684 constraints during unification. This does not
2685 affect correctness. */
2686 c->lhs.var = find (c->lhs.var);
2687 c->rhs.var = find (c->rhs.var);
2688
2689 /* The only complex constraint that can change our
2690 solution to non-empty, given an empty solution,
2691 is a constraint where the lhs side is receiving
2692 some set from elsewhere. */
2693 if (!solution_empty || c->lhs.type != DEREF)
2694 do_complex_constraint (graph, c, pts);
2695 }
2696
2697 solution_empty = bitmap_empty_p (solution);
2698
2699 if (!solution_empty)
2700 {
2701 bitmap_iterator bi;
2702 unsigned eff_escaped_id = find (escaped_id);
2703
2704 /* Propagate solution to all successors. */
2705 EXECUTE_IF_IN_NONNULL_BITMAP (graph->succs[i],
2706 0, j, bi)
2707 {
2708 bitmap tmp;
2709 bool flag;
2710
2711 unsigned int to = find (j);
2712 tmp = get_varinfo (to)->solution;
2713 flag = false;
2714
2715 /* Don't try to propagate to ourselves. */
2716 if (to == i)
2717 continue;
2718
2719 /* If we propagate from ESCAPED use ESCAPED as
2720 placeholder. */
2721 if (i == eff_escaped_id)
2722 flag = bitmap_set_bit (tmp, escaped_id);
2723 else
2724 flag = bitmap_ior_into (tmp, pts);
2725
2726 if (flag)
2727 bitmap_set_bit (changed, to);
2728 }
2729 }
2730 }
2731 }
2732 free_topo_info (ti);
2733 bitmap_obstack_release (&iteration_obstack);
2734 }
2735
2736 BITMAP_FREE (pts);
2737 BITMAP_FREE (changed);
2738 bitmap_obstack_release (&oldpta_obstack);
2739 }
2740
2741 /* Map from trees to variable infos. */
2742 static struct pointer_map_t *vi_for_tree;
2743
2744
2745 /* Insert ID as the variable id for tree T in the vi_for_tree map. */
2746
2747 static void
2748 insert_vi_for_tree (tree t, varinfo_t vi)
2749 {
2750 void **slot = pointer_map_insert (vi_for_tree, t);
2751 gcc_assert (vi);
2752 gcc_assert (*slot == NULL);
2753 *slot = vi;
2754 }
2755
2756 /* Find the variable info for tree T in VI_FOR_TREE. If T does not
2757 exist in the map, return NULL, otherwise, return the varinfo we found. */
2758
2759 static varinfo_t
2760 lookup_vi_for_tree (tree t)
2761 {
2762 void **slot = pointer_map_contains (vi_for_tree, t);
2763 if (slot == NULL)
2764 return NULL;
2765
2766 return (varinfo_t) *slot;
2767 }
2768
2769 /* Return a printable name for DECL */
2770
2771 static const char *
2772 alias_get_name (tree decl)
2773 {
2774 const char *res = NULL;
2775 char *temp;
2776 int num_printed = 0;
2777
2778 if (!dump_file)
2779 return "NULL";
2780
2781 if (TREE_CODE (decl) == SSA_NAME)
2782 {
2783 res = get_name (decl);
2784 if (res)
2785 num_printed = asprintf (&temp, "%s_%u", res, SSA_NAME_VERSION (decl));
2786 else
2787 num_printed = asprintf (&temp, "_%u", SSA_NAME_VERSION (decl));
2788 if (num_printed > 0)
2789 {
2790 res = ggc_strdup (temp);
2791 free (temp);
2792 }
2793 }
2794 else if (DECL_P (decl))
2795 {
2796 if (DECL_ASSEMBLER_NAME_SET_P (decl))
2797 res = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
2798 else
2799 {
2800 res = get_name (decl);
2801 if (!res)
2802 {
2803 num_printed = asprintf (&temp, "D.%u", DECL_UID (decl));
2804 if (num_printed > 0)
2805 {
2806 res = ggc_strdup (temp);
2807 free (temp);
2808 }
2809 }
2810 }
2811 }
2812 if (res != NULL)
2813 return res;
2814
2815 return "NULL";
2816 }
2817
2818 /* Find the variable id for tree T in the map.
2819 If T doesn't exist in the map, create an entry for it and return it. */
2820
2821 static varinfo_t
2822 get_vi_for_tree (tree t)
2823 {
2824 void **slot = pointer_map_contains (vi_for_tree, t);
2825 if (slot == NULL)
2826 return get_varinfo (create_variable_info_for (t, alias_get_name (t)));
2827
2828 return (varinfo_t) *slot;
2829 }
2830
2831 /* Get a scalar constraint expression for a new temporary variable. */
2832
2833 static struct constraint_expr
2834 new_scalar_tmp_constraint_exp (const char *name)
2835 {
2836 struct constraint_expr tmp;
2837 varinfo_t vi;
2838
2839 vi = new_var_info (NULL_TREE, name);
2840 vi->offset = 0;
2841 vi->size = -1;
2842 vi->fullsize = -1;
2843 vi->is_full_var = 1;
2844
2845 tmp.var = vi->id;
2846 tmp.type = SCALAR;
2847 tmp.offset = 0;
2848
2849 return tmp;
2850 }
2851
2852 /* Get a constraint expression vector from an SSA_VAR_P node.
2853 If address_p is true, the result will be taken its address of. */
2854
2855 static void
2856 get_constraint_for_ssa_var (tree t, vec<ce_s> *results, bool address_p)
2857 {
2858 struct constraint_expr cexpr;
2859 varinfo_t vi;
2860
2861 /* We allow FUNCTION_DECLs here even though it doesn't make much sense. */
2862 gcc_assert (TREE_CODE (t) == SSA_NAME || DECL_P (t));
2863
2864 /* For parameters, get at the points-to set for the actual parm
2865 decl. */
2866 if (TREE_CODE (t) == SSA_NAME
2867 && SSA_NAME_IS_DEFAULT_DEF (t)
2868 && (TREE_CODE (SSA_NAME_VAR (t)) == PARM_DECL
2869 || TREE_CODE (SSA_NAME_VAR (t)) == RESULT_DECL))
2870 {
2871 get_constraint_for_ssa_var (SSA_NAME_VAR (t), results, address_p);
2872 return;
2873 }
2874
2875 /* For global variables resort to the alias target. */
2876 if (TREE_CODE (t) == VAR_DECL
2877 && (TREE_STATIC (t) || DECL_EXTERNAL (t)))
2878 {
2879 struct varpool_node *node = varpool_get_node (t);
2880 if (node && node->alias && node->analyzed)
2881 {
2882 node = varpool_variable_node (node, NULL);
2883 t = node->decl;
2884 }
2885 }
2886
2887 vi = get_vi_for_tree (t);
2888 cexpr.var = vi->id;
2889 cexpr.type = SCALAR;
2890 cexpr.offset = 0;
2891 /* If we determine the result is "anything", and we know this is readonly,
2892 say it points to readonly memory instead. */
2893 if (cexpr.var == anything_id && TREE_READONLY (t))
2894 {
2895 gcc_unreachable ();
2896 cexpr.type = ADDRESSOF;
2897 cexpr.var = readonly_id;
2898 }
2899
2900 /* If we are not taking the address of the constraint expr, add all
2901 sub-fiels of the variable as well. */
2902 if (!address_p
2903 && !vi->is_full_var)
2904 {
2905 for (; vi; vi = vi_next (vi))
2906 {
2907 cexpr.var = vi->id;
2908 results->safe_push (cexpr);
2909 }
2910 return;
2911 }
2912
2913 results->safe_push (cexpr);
2914 }
2915
2916 /* Process constraint T, performing various simplifications and then
2917 adding it to our list of overall constraints. */
2918
2919 static void
2920 process_constraint (constraint_t t)
2921 {
2922 struct constraint_expr rhs = t->rhs;
2923 struct constraint_expr lhs = t->lhs;
2924
2925 gcc_assert (rhs.var < varmap.length ());
2926 gcc_assert (lhs.var < varmap.length ());
2927
2928 /* If we didn't get any useful constraint from the lhs we get
2929 &ANYTHING as fallback from get_constraint_for. Deal with
2930 it here by turning it into *ANYTHING. */
2931 if (lhs.type == ADDRESSOF
2932 && lhs.var == anything_id)
2933 lhs.type = DEREF;
2934
2935 /* ADDRESSOF on the lhs is invalid. */
2936 gcc_assert (lhs.type != ADDRESSOF);
2937
2938 /* We shouldn't add constraints from things that cannot have pointers.
2939 It's not completely trivial to avoid in the callers, so do it here. */
2940 if (rhs.type != ADDRESSOF
2941 && !get_varinfo (rhs.var)->may_have_pointers)
2942 return;
2943
2944 /* Likewise adding to the solution of a non-pointer var isn't useful. */
2945 if (!get_varinfo (lhs.var)->may_have_pointers)
2946 return;
2947
2948 /* This can happen in our IR with things like n->a = *p */
2949 if (rhs.type == DEREF && lhs.type == DEREF && rhs.var != anything_id)
2950 {
2951 /* Split into tmp = *rhs, *lhs = tmp */
2952 struct constraint_expr tmplhs;
2953 tmplhs = new_scalar_tmp_constraint_exp ("doubledereftmp");
2954 process_constraint (new_constraint (tmplhs, rhs));
2955 process_constraint (new_constraint (lhs, tmplhs));
2956 }
2957 else if (rhs.type == ADDRESSOF && lhs.type == DEREF)
2958 {
2959 /* Split into tmp = &rhs, *lhs = tmp */
2960 struct constraint_expr tmplhs;
2961 tmplhs = new_scalar_tmp_constraint_exp ("derefaddrtmp");
2962 process_constraint (new_constraint (tmplhs, rhs));
2963 process_constraint (new_constraint (lhs, tmplhs));
2964 }
2965 else
2966 {
2967 gcc_assert (rhs.type != ADDRESSOF || rhs.offset == 0);
2968 constraints.safe_push (t);
2969 }
2970 }
2971
2972
2973 /* Return the position, in bits, of FIELD_DECL from the beginning of its
2974 structure. */
2975
2976 static HOST_WIDE_INT
2977 bitpos_of_field (const tree fdecl)
2978 {
2979 if (!tree_fits_shwi_p (DECL_FIELD_OFFSET (fdecl))
2980 || !tree_fits_shwi_p (DECL_FIELD_BIT_OFFSET (fdecl)))
2981 return -1;
2982
2983 return (tree_to_shwi (DECL_FIELD_OFFSET (fdecl)) * BITS_PER_UNIT
2984 + tree_to_shwi (DECL_FIELD_BIT_OFFSET (fdecl)));
2985 }
2986
2987
2988 /* Get constraint expressions for offsetting PTR by OFFSET. Stores the
2989 resulting constraint expressions in *RESULTS. */
2990
2991 static void
2992 get_constraint_for_ptr_offset (tree ptr, tree offset,
2993 vec<ce_s> *results)
2994 {
2995 struct constraint_expr c;
2996 unsigned int j, n;
2997 HOST_WIDE_INT rhsoffset;
2998
2999 /* If we do not do field-sensitive PTA adding offsets to pointers
3000 does not change the points-to solution. */
3001 if (!use_field_sensitive)
3002 {
3003 get_constraint_for_rhs (ptr, results);
3004 return;
3005 }
3006
3007 /* If the offset is not a non-negative integer constant that fits
3008 in a HOST_WIDE_INT, we have to fall back to a conservative
3009 solution which includes all sub-fields of all pointed-to
3010 variables of ptr. */
3011 if (offset == NULL_TREE
3012 || TREE_CODE (offset) != INTEGER_CST)
3013 rhsoffset = UNKNOWN_OFFSET;
3014 else
3015 {
3016 /* Sign-extend the offset. */
3017 offset_int soffset = offset_int::from (offset, SIGNED);
3018 if (!wi::fits_shwi_p (soffset))
3019 rhsoffset = UNKNOWN_OFFSET;
3020 else
3021 {
3022 /* Make sure the bit-offset also fits. */
3023 HOST_WIDE_INT rhsunitoffset = soffset.to_shwi ();
3024 rhsoffset = rhsunitoffset * BITS_PER_UNIT;
3025 if (rhsunitoffset != rhsoffset / BITS_PER_UNIT)
3026 rhsoffset = UNKNOWN_OFFSET;
3027 }
3028 }
3029
3030 get_constraint_for_rhs (ptr, results);
3031 if (rhsoffset == 0)
3032 return;
3033
3034 /* As we are eventually appending to the solution do not use
3035 vec::iterate here. */
3036 n = results->length ();
3037 for (j = 0; j < n; j++)
3038 {
3039 varinfo_t curr;
3040 c = (*results)[j];
3041 curr = get_varinfo (c.var);
3042
3043 if (c.type == ADDRESSOF
3044 /* If this varinfo represents a full variable just use it. */
3045 && curr->is_full_var)
3046 c.offset = 0;
3047 else if (c.type == ADDRESSOF
3048 /* If we do not know the offset add all subfields. */
3049 && rhsoffset == UNKNOWN_OFFSET)
3050 {
3051 varinfo_t temp = get_varinfo (curr->head);
3052 do
3053 {
3054 struct constraint_expr c2;
3055 c2.var = temp->id;
3056 c2.type = ADDRESSOF;
3057 c2.offset = 0;
3058 if (c2.var != c.var)
3059 results->safe_push (c2);
3060 temp = vi_next (temp);
3061 }
3062 while (temp);
3063 }
3064 else if (c.type == ADDRESSOF)
3065 {
3066 varinfo_t temp;
3067 unsigned HOST_WIDE_INT offset = curr->offset + rhsoffset;
3068
3069 /* Search the sub-field which overlaps with the
3070 pointed-to offset. If the result is outside of the variable
3071 we have to provide a conservative result, as the variable is
3072 still reachable from the resulting pointer (even though it
3073 technically cannot point to anything). The last and first
3074 sub-fields are such conservative results.
3075 ??? If we always had a sub-field for &object + 1 then
3076 we could represent this in a more precise way. */
3077 if (rhsoffset < 0
3078 && curr->offset < offset)
3079 offset = 0;
3080 temp = first_or_preceding_vi_for_offset (curr, offset);
3081
3082 /* If the found variable is not exactly at the pointed to
3083 result, we have to include the next variable in the
3084 solution as well. Otherwise two increments by offset / 2
3085 do not result in the same or a conservative superset
3086 solution. */
3087 if (temp->offset != offset
3088 && temp->next != 0)
3089 {
3090 struct constraint_expr c2;
3091 c2.var = temp->next;
3092 c2.type = ADDRESSOF;
3093 c2.offset = 0;
3094 results->safe_push (c2);
3095 }
3096 c.var = temp->id;
3097 c.offset = 0;
3098 }
3099 else
3100 c.offset = rhsoffset;
3101
3102 (*results)[j] = c;
3103 }
3104 }
3105
3106
3107 /* Given a COMPONENT_REF T, return the constraint_expr vector for it.
3108 If address_p is true the result will be taken its address of.
3109 If lhs_p is true then the constraint expression is assumed to be used
3110 as the lhs. */
3111
3112 static void
3113 get_constraint_for_component_ref (tree t, vec<ce_s> *results,
3114 bool address_p, bool lhs_p)
3115 {
3116 tree orig_t = t;
3117 HOST_WIDE_INT bitsize = -1;
3118 HOST_WIDE_INT bitmaxsize = -1;
3119 HOST_WIDE_INT bitpos;
3120 tree forzero;
3121
3122 /* Some people like to do cute things like take the address of
3123 &0->a.b */
3124 forzero = t;
3125 while (handled_component_p (forzero)
3126 || INDIRECT_REF_P (forzero)
3127 || TREE_CODE (forzero) == MEM_REF)
3128 forzero = TREE_OPERAND (forzero, 0);
3129
3130 if (CONSTANT_CLASS_P (forzero) && integer_zerop (forzero))
3131 {
3132 struct constraint_expr temp;
3133
3134 temp.offset = 0;
3135 temp.var = integer_id;
3136 temp.type = SCALAR;
3137 results->safe_push (temp);
3138 return;
3139 }
3140
3141 /* Handle type-punning through unions. If we are extracting a pointer
3142 from a union via a possibly type-punning access that pointer
3143 points to anything, similar to a conversion of an integer to
3144 a pointer. */
3145 if (!lhs_p)
3146 {
3147 tree u;
3148 for (u = t;
3149 TREE_CODE (u) == COMPONENT_REF || TREE_CODE (u) == ARRAY_REF;
3150 u = TREE_OPERAND (u, 0))
3151 if (TREE_CODE (u) == COMPONENT_REF
3152 && TREE_CODE (TREE_TYPE (TREE_OPERAND (u, 0))) == UNION_TYPE)
3153 {
3154 struct constraint_expr temp;
3155
3156 temp.offset = 0;
3157 temp.var = anything_id;
3158 temp.type = ADDRESSOF;
3159 results->safe_push (temp);
3160 return;
3161 }
3162 }
3163
3164 t = get_ref_base_and_extent (t, &bitpos, &bitsize, &bitmaxsize);
3165
3166 /* Pretend to take the address of the base, we'll take care of
3167 adding the required subset of sub-fields below. */
3168 get_constraint_for_1 (t, results, true, lhs_p);
3169 gcc_assert (results->length () == 1);
3170 struct constraint_expr &result = results->last ();
3171
3172 if (result.type == SCALAR
3173 && get_varinfo (result.var)->is_full_var)
3174 /* For single-field vars do not bother about the offset. */
3175 result.offset = 0;
3176 else if (result.type == SCALAR)
3177 {
3178 /* In languages like C, you can access one past the end of an
3179 array. You aren't allowed to dereference it, so we can
3180 ignore this constraint. When we handle pointer subtraction,
3181 we may have to do something cute here. */
3182
3183 if ((unsigned HOST_WIDE_INT)bitpos < get_varinfo (result.var)->fullsize
3184 && bitmaxsize != 0)
3185 {
3186 /* It's also not true that the constraint will actually start at the
3187 right offset, it may start in some padding. We only care about
3188 setting the constraint to the first actual field it touches, so
3189 walk to find it. */
3190 struct constraint_expr cexpr = result;
3191 varinfo_t curr;
3192 results->pop ();
3193 cexpr.offset = 0;
3194 for (curr = get_varinfo (cexpr.var); curr; curr = vi_next (curr))
3195 {
3196 if (ranges_overlap_p (curr->offset, curr->size,
3197 bitpos, bitmaxsize))
3198 {
3199 cexpr.var = curr->id;
3200 results->safe_push (cexpr);
3201 if (address_p)
3202 break;
3203 }
3204 }
3205 /* If we are going to take the address of this field then
3206 to be able to compute reachability correctly add at least
3207 the last field of the variable. */
3208 if (address_p && results->length () == 0)
3209 {
3210 curr = get_varinfo (cexpr.var);
3211 while (curr->next != 0)
3212 curr = vi_next (curr);
3213 cexpr.var = curr->id;
3214 results->safe_push (cexpr);
3215 }
3216 else if (results->length () == 0)
3217 /* Assert that we found *some* field there. The user couldn't be
3218 accessing *only* padding. */
3219 /* Still the user could access one past the end of an array
3220 embedded in a struct resulting in accessing *only* padding. */
3221 /* Or accessing only padding via type-punning to a type
3222 that has a filed just in padding space. */
3223 {
3224 cexpr.type = SCALAR;
3225 cexpr.var = anything_id;
3226 cexpr.offset = 0;
3227 results->safe_push (cexpr);
3228 }
3229 }
3230 else if (bitmaxsize == 0)
3231 {
3232 if (dump_file && (dump_flags & TDF_DETAILS))
3233 fprintf (dump_file, "Access to zero-sized part of variable,"
3234 "ignoring\n");
3235 }
3236 else
3237 if (dump_file && (dump_flags & TDF_DETAILS))
3238 fprintf (dump_file, "Access to past the end of variable, ignoring\n");
3239 }
3240 else if (result.type == DEREF)
3241 {
3242 /* If we do not know exactly where the access goes say so. Note
3243 that only for non-structure accesses we know that we access
3244 at most one subfiled of any variable. */
3245 if (bitpos == -1
3246 || bitsize != bitmaxsize
3247 || AGGREGATE_TYPE_P (TREE_TYPE (orig_t))
3248 || result.offset == UNKNOWN_OFFSET)
3249 result.offset = UNKNOWN_OFFSET;
3250 else
3251 result.offset += bitpos;
3252 }
3253 else if (result.type == ADDRESSOF)
3254 {
3255 /* We can end up here for component references on a
3256 VIEW_CONVERT_EXPR <>(&foobar). */
3257 result.type = SCALAR;
3258 result.var = anything_id;
3259 result.offset = 0;
3260 }
3261 else
3262 gcc_unreachable ();
3263 }
3264
3265
3266 /* Dereference the constraint expression CONS, and return the result.
3267 DEREF (ADDRESSOF) = SCALAR
3268 DEREF (SCALAR) = DEREF
3269 DEREF (DEREF) = (temp = DEREF1; result = DEREF(temp))
3270 This is needed so that we can handle dereferencing DEREF constraints. */
3271
3272 static void
3273 do_deref (vec<ce_s> *constraints)
3274 {
3275 struct constraint_expr *c;
3276 unsigned int i = 0;
3277
3278 FOR_EACH_VEC_ELT (*constraints, i, c)
3279 {
3280 if (c->type == SCALAR)
3281 c->type = DEREF;
3282 else if (c->type == ADDRESSOF)
3283 c->type = SCALAR;
3284 else if (c->type == DEREF)
3285 {
3286 struct constraint_expr tmplhs;
3287 tmplhs = new_scalar_tmp_constraint_exp ("dereftmp");
3288 process_constraint (new_constraint (tmplhs, *c));
3289 c->var = tmplhs.var;
3290 }
3291 else
3292 gcc_unreachable ();
3293 }
3294 }
3295
3296 /* Given a tree T, return the constraint expression for taking the
3297 address of it. */
3298
3299 static void
3300 get_constraint_for_address_of (tree t, vec<ce_s> *results)
3301 {
3302 struct constraint_expr *c;
3303 unsigned int i;
3304
3305 get_constraint_for_1 (t, results, true, true);
3306
3307 FOR_EACH_VEC_ELT (*results, i, c)
3308 {
3309 if (c->type == DEREF)
3310 c->type = SCALAR;
3311 else
3312 c->type = ADDRESSOF;
3313 }
3314 }
3315
3316 /* Given a tree T, return the constraint expression for it. */
3317
3318 static void
3319 get_constraint_for_1 (tree t, vec<ce_s> *results, bool address_p,
3320 bool lhs_p)
3321 {
3322 struct constraint_expr temp;
3323
3324 /* x = integer is all glommed to a single variable, which doesn't
3325 point to anything by itself. That is, of course, unless it is an
3326 integer constant being treated as a pointer, in which case, we
3327 will return that this is really the addressof anything. This
3328 happens below, since it will fall into the default case. The only
3329 case we know something about an integer treated like a pointer is
3330 when it is the NULL pointer, and then we just say it points to
3331 NULL.
3332
3333 Do not do that if -fno-delete-null-pointer-checks though, because
3334 in that case *NULL does not fail, so it _should_ alias *anything.
3335 It is not worth adding a new option or renaming the existing one,
3336 since this case is relatively obscure. */
3337 if ((TREE_CODE (t) == INTEGER_CST
3338 && integer_zerop (t))
3339 /* The only valid CONSTRUCTORs in gimple with pointer typed
3340 elements are zero-initializer. But in IPA mode we also
3341 process global initializers, so verify at least. */
3342 || (TREE_CODE (t) == CONSTRUCTOR
3343 && CONSTRUCTOR_NELTS (t) == 0))
3344 {
3345 if (flag_delete_null_pointer_checks)
3346 temp.var = nothing_id;
3347 else
3348 temp.var = nonlocal_id;
3349 temp.type = ADDRESSOF;
3350 temp.offset = 0;
3351 results->safe_push (temp);
3352 return;
3353 }
3354
3355 /* String constants are read-only. */
3356 if (TREE_CODE (t) == STRING_CST)
3357 {
3358 temp.var = readonly_id;
3359 temp.type = SCALAR;
3360 temp.offset = 0;
3361 results->safe_push (temp);
3362 return;
3363 }
3364
3365 switch (TREE_CODE_CLASS (TREE_CODE (t)))
3366 {
3367 case tcc_expression:
3368 {
3369 switch (TREE_CODE (t))
3370 {
3371 case ADDR_EXPR:
3372 get_constraint_for_address_of (TREE_OPERAND (t, 0), results);
3373 return;
3374 default:;
3375 }
3376 break;
3377 }
3378 case tcc_reference:
3379 {
3380 switch (TREE_CODE (t))
3381 {
3382 case MEM_REF:
3383 {
3384 struct constraint_expr cs;
3385 varinfo_t vi, curr;
3386 get_constraint_for_ptr_offset (TREE_OPERAND (t, 0),
3387 TREE_OPERAND (t, 1), results);
3388 do_deref (results);
3389
3390 /* If we are not taking the address then make sure to process
3391 all subvariables we might access. */
3392 if (address_p)
3393 return;
3394
3395 cs = results->last ();
3396 if (cs.type == DEREF
3397 && type_can_have_subvars (TREE_TYPE (t)))
3398 {
3399 /* For dereferences this means we have to defer it
3400 to solving time. */
3401 results->last ().offset = UNKNOWN_OFFSET;
3402 return;
3403 }
3404 if (cs.type != SCALAR)
3405 return;
3406
3407 vi = get_varinfo (cs.var);
3408 curr = vi_next (vi);
3409 if (!vi->is_full_var
3410 && curr)
3411 {
3412 unsigned HOST_WIDE_INT size;
3413 if (tree_fits_uhwi_p (TYPE_SIZE (TREE_TYPE (t))))
3414 size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (t)));
3415 else
3416 size = -1;
3417 for (; curr; curr = vi_next (curr))
3418 {
3419 if (curr->offset - vi->offset < size)
3420 {
3421 cs.var = curr->id;
3422 results->safe_push (cs);
3423 }
3424 else
3425 break;
3426 }
3427 }
3428 return;
3429 }
3430 case ARRAY_REF:
3431 case ARRAY_RANGE_REF:
3432 case COMPONENT_REF:
3433 get_constraint_for_component_ref (t, results, address_p, lhs_p);
3434 return;
3435 case VIEW_CONVERT_EXPR:
3436 get_constraint_for_1 (TREE_OPERAND (t, 0), results, address_p,
3437 lhs_p);
3438 return;
3439 /* We are missing handling for TARGET_MEM_REF here. */
3440 default:;
3441 }
3442 break;
3443 }
3444 case tcc_exceptional:
3445 {
3446 switch (TREE_CODE (t))
3447 {
3448 case SSA_NAME:
3449 {
3450 get_constraint_for_ssa_var (t, results, address_p);
3451 return;
3452 }
3453 case CONSTRUCTOR:
3454 {
3455 unsigned int i;
3456 tree val;
3457 vec<ce_s> tmp = vNULL;
3458 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), i, val)
3459 {
3460 struct constraint_expr *rhsp;
3461 unsigned j;
3462 get_constraint_for_1 (val, &tmp, address_p, lhs_p);
3463 FOR_EACH_VEC_ELT (tmp, j, rhsp)
3464 results->safe_push (*rhsp);
3465 tmp.truncate (0);
3466 }
3467 tmp.release ();
3468 /* We do not know whether the constructor was complete,
3469 so technically we have to add &NOTHING or &ANYTHING
3470 like we do for an empty constructor as well. */
3471 return;
3472 }
3473 default:;
3474 }
3475 break;
3476 }
3477 case tcc_declaration:
3478 {
3479 get_constraint_for_ssa_var (t, results, address_p);
3480 return;
3481 }
3482 case tcc_constant:
3483 {
3484 /* We cannot refer to automatic variables through constants. */
3485 temp.type = ADDRESSOF;
3486 temp.var = nonlocal_id;
3487 temp.offset = 0;
3488 results->safe_push (temp);
3489 return;
3490 }
3491 default:;
3492 }
3493
3494 /* The default fallback is a constraint from anything. */
3495 temp.type = ADDRESSOF;
3496 temp.var = anything_id;
3497 temp.offset = 0;
3498 results->safe_push (temp);
3499 }
3500
3501 /* Given a gimple tree T, return the constraint expression vector for it. */
3502
3503 static void
3504 get_constraint_for (tree t, vec<ce_s> *results)
3505 {
3506 gcc_assert (results->length () == 0);
3507
3508 get_constraint_for_1 (t, results, false, true);
3509 }
3510
3511 /* Given a gimple tree T, return the constraint expression vector for it
3512 to be used as the rhs of a constraint. */
3513
3514 static void
3515 get_constraint_for_rhs (tree t, vec<ce_s> *results)
3516 {
3517 gcc_assert (results->length () == 0);
3518
3519 get_constraint_for_1 (t, results, false, false);
3520 }
3521
3522
3523 /* Efficiently generates constraints from all entries in *RHSC to all
3524 entries in *LHSC. */
3525
3526 static void
3527 process_all_all_constraints (vec<ce_s> lhsc,
3528 vec<ce_s> rhsc)
3529 {
3530 struct constraint_expr *lhsp, *rhsp;
3531 unsigned i, j;
3532
3533 if (lhsc.length () <= 1 || rhsc.length () <= 1)
3534 {
3535 FOR_EACH_VEC_ELT (lhsc, i, lhsp)
3536 FOR_EACH_VEC_ELT (rhsc, j, rhsp)
3537 process_constraint (new_constraint (*lhsp, *rhsp));
3538 }
3539 else
3540 {
3541 struct constraint_expr tmp;
3542 tmp = new_scalar_tmp_constraint_exp ("allalltmp");
3543 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
3544 process_constraint (new_constraint (tmp, *rhsp));
3545 FOR_EACH_VEC_ELT (lhsc, i, lhsp)
3546 process_constraint (new_constraint (*lhsp, tmp));
3547 }
3548 }
3549
3550 /* Handle aggregate copies by expanding into copies of the respective
3551 fields of the structures. */
3552
3553 static void
3554 do_structure_copy (tree lhsop, tree rhsop)
3555 {
3556 struct constraint_expr *lhsp, *rhsp;
3557 vec<ce_s> lhsc = vNULL;
3558 vec<ce_s> rhsc = vNULL;
3559 unsigned j;
3560
3561 get_constraint_for (lhsop, &lhsc);
3562 get_constraint_for_rhs (rhsop, &rhsc);
3563 lhsp = &lhsc[0];
3564 rhsp = &rhsc[0];
3565 if (lhsp->type == DEREF
3566 || (lhsp->type == ADDRESSOF && lhsp->var == anything_id)
3567 || rhsp->type == DEREF)
3568 {
3569 if (lhsp->type == DEREF)
3570 {
3571 gcc_assert (lhsc.length () == 1);
3572 lhsp->offset = UNKNOWN_OFFSET;
3573 }
3574 if (rhsp->type == DEREF)
3575 {
3576 gcc_assert (rhsc.length () == 1);
3577 rhsp->offset = UNKNOWN_OFFSET;
3578 }
3579 process_all_all_constraints (lhsc, rhsc);
3580 }
3581 else if (lhsp->type == SCALAR
3582 && (rhsp->type == SCALAR
3583 || rhsp->type == ADDRESSOF))
3584 {
3585 HOST_WIDE_INT lhssize, lhsmaxsize, lhsoffset;
3586 HOST_WIDE_INT rhssize, rhsmaxsize, rhsoffset;
3587 unsigned k = 0;
3588 get_ref_base_and_extent (lhsop, &lhsoffset, &lhssize, &lhsmaxsize);
3589 get_ref_base_and_extent (rhsop, &rhsoffset, &rhssize, &rhsmaxsize);
3590 for (j = 0; lhsc.iterate (j, &lhsp);)
3591 {
3592 varinfo_t lhsv, rhsv;
3593 rhsp = &rhsc[k];
3594 lhsv = get_varinfo (lhsp->var);
3595 rhsv = get_varinfo (rhsp->var);
3596 if (lhsv->may_have_pointers
3597 && (lhsv->is_full_var
3598 || rhsv->is_full_var
3599 || ranges_overlap_p (lhsv->offset + rhsoffset, lhsv->size,
3600 rhsv->offset + lhsoffset, rhsv->size)))
3601 process_constraint (new_constraint (*lhsp, *rhsp));
3602 if (!rhsv->is_full_var
3603 && (lhsv->is_full_var
3604 || (lhsv->offset + rhsoffset + lhsv->size
3605 > rhsv->offset + lhsoffset + rhsv->size)))
3606 {
3607 ++k;
3608 if (k >= rhsc.length ())
3609 break;
3610 }
3611 else
3612 ++j;
3613 }
3614 }
3615 else
3616 gcc_unreachable ();
3617
3618 lhsc.release ();
3619 rhsc.release ();
3620 }
3621
3622 /* Create constraints ID = { rhsc }. */
3623
3624 static void
3625 make_constraints_to (unsigned id, vec<ce_s> rhsc)
3626 {
3627 struct constraint_expr *c;
3628 struct constraint_expr includes;
3629 unsigned int j;
3630
3631 includes.var = id;
3632 includes.offset = 0;
3633 includes.type = SCALAR;
3634
3635 FOR_EACH_VEC_ELT (rhsc, j, c)
3636 process_constraint (new_constraint (includes, *c));
3637 }
3638
3639 /* Create a constraint ID = OP. */
3640
3641 static void
3642 make_constraint_to (unsigned id, tree op)
3643 {
3644 vec<ce_s> rhsc = vNULL;
3645 get_constraint_for_rhs (op, &rhsc);
3646 make_constraints_to (id, rhsc);
3647 rhsc.release ();
3648 }
3649
3650 /* Create a constraint ID = &FROM. */
3651
3652 static void
3653 make_constraint_from (varinfo_t vi, int from)
3654 {
3655 struct constraint_expr lhs, rhs;
3656
3657 lhs.var = vi->id;
3658 lhs.offset = 0;
3659 lhs.type = SCALAR;
3660
3661 rhs.var = from;
3662 rhs.offset = 0;
3663 rhs.type = ADDRESSOF;
3664 process_constraint (new_constraint (lhs, rhs));
3665 }
3666
3667 /* Create a constraint ID = FROM. */
3668
3669 static void
3670 make_copy_constraint (varinfo_t vi, int from)
3671 {
3672 struct constraint_expr lhs, rhs;
3673
3674 lhs.var = vi->id;
3675 lhs.offset = 0;
3676 lhs.type = SCALAR;
3677
3678 rhs.var = from;
3679 rhs.offset = 0;
3680 rhs.type = SCALAR;
3681 process_constraint (new_constraint (lhs, rhs));
3682 }
3683
3684 /* Make constraints necessary to make OP escape. */
3685
3686 static void
3687 make_escape_constraint (tree op)
3688 {
3689 make_constraint_to (escaped_id, op);
3690 }
3691
3692 /* Add constraints to that the solution of VI is transitively closed. */
3693
3694 static void
3695 make_transitive_closure_constraints (varinfo_t vi)
3696 {
3697 struct constraint_expr lhs, rhs;
3698
3699 /* VAR = *VAR; */
3700 lhs.type = SCALAR;
3701 lhs.var = vi->id;
3702 lhs.offset = 0;
3703 rhs.type = DEREF;
3704 rhs.var = vi->id;
3705 rhs.offset = 0;
3706 process_constraint (new_constraint (lhs, rhs));
3707
3708 /* VAR = VAR + UNKNOWN; */
3709 lhs.type = SCALAR;
3710 lhs.var = vi->id;
3711 lhs.offset = 0;
3712 rhs.type = SCALAR;
3713 rhs.var = vi->id;
3714 rhs.offset = UNKNOWN_OFFSET;
3715 process_constraint (new_constraint (lhs, rhs));
3716 }
3717
3718 /* Temporary storage for fake var decls. */
3719 struct obstack fake_var_decl_obstack;
3720
3721 /* Build a fake VAR_DECL acting as referrer to a DECL_UID. */
3722
3723 static tree
3724 build_fake_var_decl (tree type)
3725 {
3726 tree decl = (tree) XOBNEW (&fake_var_decl_obstack, struct tree_var_decl);
3727 memset (decl, 0, sizeof (struct tree_var_decl));
3728 TREE_SET_CODE (decl, VAR_DECL);
3729 TREE_TYPE (decl) = type;
3730 DECL_UID (decl) = allocate_decl_uid ();
3731 SET_DECL_PT_UID (decl, -1);
3732 layout_decl (decl, 0);
3733 return decl;
3734 }
3735
3736 /* Create a new artificial heap variable with NAME.
3737 Return the created variable. */
3738
3739 static varinfo_t
3740 make_heapvar (const char *name)
3741 {
3742 varinfo_t vi;
3743 tree heapvar;
3744
3745 heapvar = build_fake_var_decl (ptr_type_node);
3746 DECL_EXTERNAL (heapvar) = 1;
3747
3748 vi = new_var_info (heapvar, name);
3749 vi->is_artificial_var = true;
3750 vi->is_heap_var = true;
3751 vi->is_unknown_size_var = true;
3752 vi->offset = 0;
3753 vi->fullsize = ~0;
3754 vi->size = ~0;
3755 vi->is_full_var = true;
3756 insert_vi_for_tree (heapvar, vi);
3757
3758 return vi;
3759 }
3760
3761 /* Create a new artificial heap variable with NAME and make a
3762 constraint from it to LHS. Set flags according to a tag used
3763 for tracking restrict pointers. */
3764
3765 static varinfo_t
3766 make_constraint_from_restrict (varinfo_t lhs, const char *name)
3767 {
3768 varinfo_t vi = make_heapvar (name);
3769 vi->is_global_var = 1;
3770 vi->may_have_pointers = 1;
3771 make_constraint_from (lhs, vi->id);
3772 return vi;
3773 }
3774
3775 /* Create a new artificial heap variable with NAME and make a
3776 constraint from it to LHS. Set flags according to a tag used
3777 for tracking restrict pointers and make the artificial heap
3778 point to global memory. */
3779
3780 static varinfo_t
3781 make_constraint_from_global_restrict (varinfo_t lhs, const char *name)
3782 {
3783 varinfo_t vi = make_constraint_from_restrict (lhs, name);
3784 make_copy_constraint (vi, nonlocal_id);
3785 return vi;
3786 }
3787
3788 /* In IPA mode there are varinfos for different aspects of reach
3789 function designator. One for the points-to set of the return
3790 value, one for the variables that are clobbered by the function,
3791 one for its uses and one for each parameter (including a single
3792 glob for remaining variadic arguments). */
3793
3794 enum { fi_clobbers = 1, fi_uses = 2,
3795 fi_static_chain = 3, fi_result = 4, fi_parm_base = 5 };
3796
3797 /* Get a constraint for the requested part of a function designator FI
3798 when operating in IPA mode. */
3799
3800 static struct constraint_expr
3801 get_function_part_constraint (varinfo_t fi, unsigned part)
3802 {
3803 struct constraint_expr c;
3804
3805 gcc_assert (in_ipa_mode);
3806
3807 if (fi->id == anything_id)
3808 {
3809 /* ??? We probably should have a ANYFN special variable. */
3810 c.var = anything_id;
3811 c.offset = 0;
3812 c.type = SCALAR;
3813 }
3814 else if (TREE_CODE (fi->decl) == FUNCTION_DECL)
3815 {
3816 varinfo_t ai = first_vi_for_offset (fi, part);
3817 if (ai)
3818 c.var = ai->id;
3819 else
3820 c.var = anything_id;
3821 c.offset = 0;
3822 c.type = SCALAR;
3823 }
3824 else
3825 {
3826 c.var = fi->id;
3827 c.offset = part;
3828 c.type = DEREF;
3829 }
3830
3831 return c;
3832 }
3833
3834 /* For non-IPA mode, generate constraints necessary for a call on the
3835 RHS. */
3836
3837 static void
3838 handle_rhs_call (gimple stmt, vec<ce_s> *results)
3839 {
3840 struct constraint_expr rhsc;
3841 unsigned i;
3842 bool returns_uses = false;
3843
3844 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3845 {
3846 tree arg = gimple_call_arg (stmt, i);
3847 int flags = gimple_call_arg_flags (stmt, i);
3848
3849 /* If the argument is not used we can ignore it. */
3850 if (flags & EAF_UNUSED)
3851 continue;
3852
3853 /* As we compute ESCAPED context-insensitive we do not gain
3854 any precision with just EAF_NOCLOBBER but not EAF_NOESCAPE
3855 set. The argument would still get clobbered through the
3856 escape solution. */
3857 if ((flags & EAF_NOCLOBBER)
3858 && (flags & EAF_NOESCAPE))
3859 {
3860 varinfo_t uses = get_call_use_vi (stmt);
3861 if (!(flags & EAF_DIRECT))
3862 {
3863 varinfo_t tem = new_var_info (NULL_TREE, "callarg");
3864 make_constraint_to (tem->id, arg);
3865 make_transitive_closure_constraints (tem);
3866 make_copy_constraint (uses, tem->id);
3867 }
3868 else
3869 make_constraint_to (uses->id, arg);
3870 returns_uses = true;
3871 }
3872 else if (flags & EAF_NOESCAPE)
3873 {
3874 struct constraint_expr lhs, rhs;
3875 varinfo_t uses = get_call_use_vi (stmt);
3876 varinfo_t clobbers = get_call_clobber_vi (stmt);
3877 varinfo_t tem = new_var_info (NULL_TREE, "callarg");
3878 make_constraint_to (tem->id, arg);
3879 if (!(flags & EAF_DIRECT))
3880 make_transitive_closure_constraints (tem);
3881 make_copy_constraint (uses, tem->id);
3882 make_copy_constraint (clobbers, tem->id);
3883 /* Add *tem = nonlocal, do not add *tem = callused as
3884 EAF_NOESCAPE parameters do not escape to other parameters
3885 and all other uses appear in NONLOCAL as well. */
3886 lhs.type = DEREF;
3887 lhs.var = tem->id;
3888 lhs.offset = 0;
3889 rhs.type = SCALAR;
3890 rhs.var = nonlocal_id;
3891 rhs.offset = 0;
3892 process_constraint (new_constraint (lhs, rhs));
3893 returns_uses = true;
3894 }
3895 else
3896 make_escape_constraint (arg);
3897 }
3898
3899 /* If we added to the calls uses solution make sure we account for
3900 pointers to it to be returned. */
3901 if (returns_uses)
3902 {
3903 rhsc.var = get_call_use_vi (stmt)->id;
3904 rhsc.offset = 0;
3905 rhsc.type = SCALAR;
3906 results->safe_push (rhsc);
3907 }
3908
3909 /* The static chain escapes as well. */
3910 if (gimple_call_chain (stmt))
3911 make_escape_constraint (gimple_call_chain (stmt));
3912
3913 /* And if we applied NRV the address of the return slot escapes as well. */
3914 if (gimple_call_return_slot_opt_p (stmt)
3915 && gimple_call_lhs (stmt) != NULL_TREE
3916 && TREE_ADDRESSABLE (TREE_TYPE (gimple_call_lhs (stmt))))
3917 {
3918 vec<ce_s> tmpc = vNULL;
3919 struct constraint_expr lhsc, *c;
3920 get_constraint_for_address_of (gimple_call_lhs (stmt), &tmpc);
3921 lhsc.var = escaped_id;
3922 lhsc.offset = 0;
3923 lhsc.type = SCALAR;
3924 FOR_EACH_VEC_ELT (tmpc, i, c)
3925 process_constraint (new_constraint (lhsc, *c));
3926 tmpc.release ();
3927 }
3928
3929 /* Regular functions return nonlocal memory. */
3930 rhsc.var = nonlocal_id;
3931 rhsc.offset = 0;
3932 rhsc.type = SCALAR;
3933 results->safe_push (rhsc);
3934 }
3935
3936 /* For non-IPA mode, generate constraints necessary for a call
3937 that returns a pointer and assigns it to LHS. This simply makes
3938 the LHS point to global and escaped variables. */
3939
3940 static void
3941 handle_lhs_call (gimple stmt, tree lhs, int flags, vec<ce_s> rhsc,
3942 tree fndecl)
3943 {
3944 vec<ce_s> lhsc = vNULL;
3945
3946 get_constraint_for (lhs, &lhsc);
3947 /* If the store is to a global decl make sure to
3948 add proper escape constraints. */
3949 lhs = get_base_address (lhs);
3950 if (lhs
3951 && DECL_P (lhs)
3952 && is_global_var (lhs))
3953 {
3954 struct constraint_expr tmpc;
3955 tmpc.var = escaped_id;
3956 tmpc.offset = 0;
3957 tmpc.type = SCALAR;
3958 lhsc.safe_push (tmpc);
3959 }
3960
3961 /* If the call returns an argument unmodified override the rhs
3962 constraints. */
3963 flags = gimple_call_return_flags (stmt);
3964 if (flags & ERF_RETURNS_ARG
3965 && (flags & ERF_RETURN_ARG_MASK) < gimple_call_num_args (stmt))
3966 {
3967 tree arg;
3968 rhsc.create (0);
3969 arg = gimple_call_arg (stmt, flags & ERF_RETURN_ARG_MASK);
3970 get_constraint_for (arg, &rhsc);
3971 process_all_all_constraints (lhsc, rhsc);
3972 rhsc.release ();
3973 }
3974 else if (flags & ERF_NOALIAS)
3975 {
3976 varinfo_t vi;
3977 struct constraint_expr tmpc;
3978 rhsc.create (0);
3979 vi = make_heapvar ("HEAP");
3980 /* We delay marking allocated storage global until we know if
3981 it escapes. */
3982 DECL_EXTERNAL (vi->decl) = 0;
3983 vi->is_global_var = 0;
3984 /* If this is not a real malloc call assume the memory was
3985 initialized and thus may point to global memory. All
3986 builtin functions with the malloc attribute behave in a sane way. */
3987 if (!fndecl
3988 || DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_NORMAL)
3989 make_constraint_from (vi, nonlocal_id);
3990 tmpc.var = vi->id;
3991 tmpc.offset = 0;
3992 tmpc.type = ADDRESSOF;
3993 rhsc.safe_push (tmpc);
3994 process_all_all_constraints (lhsc, rhsc);
3995 rhsc.release ();
3996 }
3997 else
3998 process_all_all_constraints (lhsc, rhsc);
3999
4000 lhsc.release ();
4001 }
4002
4003 /* For non-IPA mode, generate constraints necessary for a call of a
4004 const function that returns a pointer in the statement STMT. */
4005
4006 static void
4007 handle_const_call (gimple stmt, vec<ce_s> *results)
4008 {
4009 struct constraint_expr rhsc;
4010 unsigned int k;
4011
4012 /* Treat nested const functions the same as pure functions as far
4013 as the static chain is concerned. */
4014 if (gimple_call_chain (stmt))
4015 {
4016 varinfo_t uses = get_call_use_vi (stmt);
4017 make_transitive_closure_constraints (uses);
4018 make_constraint_to (uses->id, gimple_call_chain (stmt));
4019 rhsc.var = uses->id;
4020 rhsc.offset = 0;
4021 rhsc.type = SCALAR;
4022 results->safe_push (rhsc);
4023 }
4024
4025 /* May return arguments. */
4026 for (k = 0; k < gimple_call_num_args (stmt); ++k)
4027 {
4028 tree arg = gimple_call_arg (stmt, k);
4029 vec<ce_s> argc = vNULL;
4030 unsigned i;
4031 struct constraint_expr *argp;
4032 get_constraint_for_rhs (arg, &argc);
4033 FOR_EACH_VEC_ELT (argc, i, argp)
4034 results->safe_push (*argp);
4035 argc.release ();
4036 }
4037
4038 /* May return addresses of globals. */
4039 rhsc.var = nonlocal_id;
4040 rhsc.offset = 0;
4041 rhsc.type = ADDRESSOF;
4042 results->safe_push (rhsc);
4043 }
4044
4045 /* For non-IPA mode, generate constraints necessary for a call to a
4046 pure function in statement STMT. */
4047
4048 static void
4049 handle_pure_call (gimple stmt, vec<ce_s> *results)
4050 {
4051 struct constraint_expr rhsc;
4052 unsigned i;
4053 varinfo_t uses = NULL;
4054
4055 /* Memory reached from pointer arguments is call-used. */
4056 for (i = 0; i < gimple_call_num_args (stmt); ++i)
4057 {
4058 tree arg = gimple_call_arg (stmt, i);
4059 if (!uses)
4060 {
4061 uses = get_call_use_vi (stmt);
4062 make_transitive_closure_constraints (uses);
4063 }
4064 make_constraint_to (uses->id, arg);
4065 }
4066
4067 /* The static chain is used as well. */
4068 if (gimple_call_chain (stmt))
4069 {
4070 if (!uses)
4071 {
4072 uses = get_call_use_vi (stmt);
4073 make_transitive_closure_constraints (uses);
4074 }
4075 make_constraint_to (uses->id, gimple_call_chain (stmt));
4076 }
4077
4078 /* Pure functions may return call-used and nonlocal memory. */
4079 if (uses)
4080 {
4081 rhsc.var = uses->id;
4082 rhsc.offset = 0;
4083 rhsc.type = SCALAR;
4084 results->safe_push (rhsc);
4085 }
4086 rhsc.var = nonlocal_id;
4087 rhsc.offset = 0;
4088 rhsc.type = SCALAR;
4089 results->safe_push (rhsc);
4090 }
4091
4092
4093 /* Return the varinfo for the callee of CALL. */
4094
4095 static varinfo_t
4096 get_fi_for_callee (gimple call)
4097 {
4098 tree decl, fn = gimple_call_fn (call);
4099
4100 if (fn && TREE_CODE (fn) == OBJ_TYPE_REF)
4101 fn = OBJ_TYPE_REF_EXPR (fn);
4102
4103 /* If we can directly resolve the function being called, do so.
4104 Otherwise, it must be some sort of indirect expression that
4105 we should still be able to handle. */
4106 decl = gimple_call_addr_fndecl (fn);
4107 if (decl)
4108 return get_vi_for_tree (decl);
4109
4110 /* If the function is anything other than a SSA name pointer we have no
4111 clue and should be getting ANYFN (well, ANYTHING for now). */
4112 if (!fn || TREE_CODE (fn) != SSA_NAME)
4113 return get_varinfo (anything_id);
4114
4115 if (SSA_NAME_IS_DEFAULT_DEF (fn)
4116 && (TREE_CODE (SSA_NAME_VAR (fn)) == PARM_DECL
4117 || TREE_CODE (SSA_NAME_VAR (fn)) == RESULT_DECL))
4118 fn = SSA_NAME_VAR (fn);
4119
4120 return get_vi_for_tree (fn);
4121 }
4122
4123 /* Create constraints for the builtin call T. Return true if the call
4124 was handled, otherwise false. */
4125
4126 static bool
4127 find_func_aliases_for_builtin_call (gimple t)
4128 {
4129 tree fndecl = gimple_call_fndecl (t);
4130 vec<ce_s> lhsc = vNULL;
4131 vec<ce_s> rhsc = vNULL;
4132 varinfo_t fi;
4133
4134 if (gimple_call_builtin_p (t, BUILT_IN_NORMAL))
4135 /* ??? All builtins that are handled here need to be handled
4136 in the alias-oracle query functions explicitly! */
4137 switch (DECL_FUNCTION_CODE (fndecl))
4138 {
4139 /* All the following functions return a pointer to the same object
4140 as their first argument points to. The functions do not add
4141 to the ESCAPED solution. The functions make the first argument
4142 pointed to memory point to what the second argument pointed to
4143 memory points to. */
4144 case BUILT_IN_STRCPY:
4145 case BUILT_IN_STRNCPY:
4146 case BUILT_IN_BCOPY:
4147 case BUILT_IN_MEMCPY:
4148 case BUILT_IN_MEMMOVE:
4149 case BUILT_IN_MEMPCPY:
4150 case BUILT_IN_STPCPY:
4151 case BUILT_IN_STPNCPY:
4152 case BUILT_IN_STRCAT:
4153 case BUILT_IN_STRNCAT:
4154 case BUILT_IN_STRCPY_CHK:
4155 case BUILT_IN_STRNCPY_CHK:
4156 case BUILT_IN_MEMCPY_CHK:
4157 case BUILT_IN_MEMMOVE_CHK:
4158 case BUILT_IN_MEMPCPY_CHK:
4159 case BUILT_IN_STPCPY_CHK:
4160 case BUILT_IN_STPNCPY_CHK:
4161 case BUILT_IN_STRCAT_CHK:
4162 case BUILT_IN_STRNCAT_CHK:
4163 case BUILT_IN_TM_MEMCPY:
4164 case BUILT_IN_TM_MEMMOVE:
4165 {
4166 tree res = gimple_call_lhs (t);
4167 tree dest = gimple_call_arg (t, (DECL_FUNCTION_CODE (fndecl)
4168 == BUILT_IN_BCOPY ? 1 : 0));
4169 tree src = gimple_call_arg (t, (DECL_FUNCTION_CODE (fndecl)
4170 == BUILT_IN_BCOPY ? 0 : 1));
4171 if (res != NULL_TREE)
4172 {
4173 get_constraint_for (res, &lhsc);
4174 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_MEMPCPY
4175 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STPCPY
4176 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STPNCPY
4177 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_MEMPCPY_CHK
4178 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STPCPY_CHK
4179 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STPNCPY_CHK)
4180 get_constraint_for_ptr_offset (dest, NULL_TREE, &rhsc);
4181 else
4182 get_constraint_for (dest, &rhsc);
4183 process_all_all_constraints (lhsc, rhsc);
4184 lhsc.release ();
4185 rhsc.release ();
4186 }
4187 get_constraint_for_ptr_offset (dest, NULL_TREE, &lhsc);
4188 get_constraint_for_ptr_offset (src, NULL_TREE, &rhsc);
4189 do_deref (&lhsc);
4190 do_deref (&rhsc);
4191 process_all_all_constraints (lhsc, rhsc);
4192 lhsc.release ();
4193 rhsc.release ();
4194 return true;
4195 }
4196 case BUILT_IN_MEMSET:
4197 case BUILT_IN_MEMSET_CHK:
4198 case BUILT_IN_TM_MEMSET:
4199 {
4200 tree res = gimple_call_lhs (t);
4201 tree dest = gimple_call_arg (t, 0);
4202 unsigned i;
4203 ce_s *lhsp;
4204 struct constraint_expr ac;
4205 if (res != NULL_TREE)
4206 {
4207 get_constraint_for (res, &lhsc);
4208 get_constraint_for (dest, &rhsc);
4209 process_all_all_constraints (lhsc, rhsc);
4210 lhsc.release ();
4211 rhsc.release ();
4212 }
4213 get_constraint_for_ptr_offset (dest, NULL_TREE, &lhsc);
4214 do_deref (&lhsc);
4215 if (flag_delete_null_pointer_checks
4216 && integer_zerop (gimple_call_arg (t, 1)))
4217 {
4218 ac.type = ADDRESSOF;
4219 ac.var = nothing_id;
4220 }
4221 else
4222 {
4223 ac.type = SCALAR;
4224 ac.var = integer_id;
4225 }
4226 ac.offset = 0;
4227 FOR_EACH_VEC_ELT (lhsc, i, lhsp)
4228 process_constraint (new_constraint (*lhsp, ac));
4229 lhsc.release ();
4230 return true;
4231 }
4232 case BUILT_IN_ASSUME_ALIGNED:
4233 {
4234 tree res = gimple_call_lhs (t);
4235 tree dest = gimple_call_arg (t, 0);
4236 if (res != NULL_TREE)
4237 {
4238 get_constraint_for (res, &lhsc);
4239 get_constraint_for (dest, &rhsc);
4240 process_all_all_constraints (lhsc, rhsc);
4241 lhsc.release ();
4242 rhsc.release ();
4243 }
4244 return true;
4245 }
4246 /* All the following functions do not return pointers, do not
4247 modify the points-to sets of memory reachable from their
4248 arguments and do not add to the ESCAPED solution. */
4249 case BUILT_IN_SINCOS:
4250 case BUILT_IN_SINCOSF:
4251 case BUILT_IN_SINCOSL:
4252 case BUILT_IN_FREXP:
4253 case BUILT_IN_FREXPF:
4254 case BUILT_IN_FREXPL:
4255 case BUILT_IN_GAMMA_R:
4256 case BUILT_IN_GAMMAF_R:
4257 case BUILT_IN_GAMMAL_R:
4258 case BUILT_IN_LGAMMA_R:
4259 case BUILT_IN_LGAMMAF_R:
4260 case BUILT_IN_LGAMMAL_R:
4261 case BUILT_IN_MODF:
4262 case BUILT_IN_MODFF:
4263 case BUILT_IN_MODFL:
4264 case BUILT_IN_REMQUO:
4265 case BUILT_IN_REMQUOF:
4266 case BUILT_IN_REMQUOL:
4267 case BUILT_IN_FREE:
4268 return true;
4269 case BUILT_IN_STRDUP:
4270 case BUILT_IN_STRNDUP:
4271 if (gimple_call_lhs (t))
4272 {
4273 handle_lhs_call (t, gimple_call_lhs (t), gimple_call_flags (t),
4274 vNULL, fndecl);
4275 get_constraint_for_ptr_offset (gimple_call_lhs (t),
4276 NULL_TREE, &lhsc);
4277 get_constraint_for_ptr_offset (gimple_call_arg (t, 0),
4278 NULL_TREE, &rhsc);
4279 do_deref (&lhsc);
4280 do_deref (&rhsc);
4281 process_all_all_constraints (lhsc, rhsc);
4282 lhsc.release ();
4283 rhsc.release ();
4284 return true;
4285 }
4286 break;
4287 /* String / character search functions return a pointer into the
4288 source string or NULL. */
4289 case BUILT_IN_INDEX:
4290 case BUILT_IN_STRCHR:
4291 case BUILT_IN_STRRCHR:
4292 case BUILT_IN_MEMCHR:
4293 case BUILT_IN_STRSTR:
4294 case BUILT_IN_STRPBRK:
4295 if (gimple_call_lhs (t))
4296 {
4297 tree src = gimple_call_arg (t, 0);
4298 get_constraint_for_ptr_offset (src, NULL_TREE, &rhsc);
4299 constraint_expr nul;
4300 nul.var = nothing_id;
4301 nul.offset = 0;
4302 nul.type = ADDRESSOF;
4303 rhsc.safe_push (nul);
4304 get_constraint_for (gimple_call_lhs (t), &lhsc);
4305 process_all_all_constraints (lhsc, rhsc);
4306 lhsc.release ();
4307 rhsc.release ();
4308 }
4309 return true;
4310 /* Trampolines are special - they set up passing the static
4311 frame. */
4312 case BUILT_IN_INIT_TRAMPOLINE:
4313 {
4314 tree tramp = gimple_call_arg (t, 0);
4315 tree nfunc = gimple_call_arg (t, 1);
4316 tree frame = gimple_call_arg (t, 2);
4317 unsigned i;
4318 struct constraint_expr lhs, *rhsp;
4319 if (in_ipa_mode)
4320 {
4321 varinfo_t nfi = NULL;
4322 gcc_assert (TREE_CODE (nfunc) == ADDR_EXPR);
4323 nfi = lookup_vi_for_tree (TREE_OPERAND (nfunc, 0));
4324 if (nfi)
4325 {
4326 lhs = get_function_part_constraint (nfi, fi_static_chain);
4327 get_constraint_for (frame, &rhsc);
4328 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
4329 process_constraint (new_constraint (lhs, *rhsp));
4330 rhsc.release ();
4331
4332 /* Make the frame point to the function for
4333 the trampoline adjustment call. */
4334 get_constraint_for (tramp, &lhsc);
4335 do_deref (&lhsc);
4336 get_constraint_for (nfunc, &rhsc);
4337 process_all_all_constraints (lhsc, rhsc);
4338 rhsc.release ();
4339 lhsc.release ();
4340
4341 return true;
4342 }
4343 }
4344 /* Else fallthru to generic handling which will let
4345 the frame escape. */
4346 break;
4347 }
4348 case BUILT_IN_ADJUST_TRAMPOLINE:
4349 {
4350 tree tramp = gimple_call_arg (t, 0);
4351 tree res = gimple_call_lhs (t);
4352 if (in_ipa_mode && res)
4353 {
4354 get_constraint_for (res, &lhsc);
4355 get_constraint_for (tramp, &rhsc);
4356 do_deref (&rhsc);
4357 process_all_all_constraints (lhsc, rhsc);
4358 rhsc.release ();
4359 lhsc.release ();
4360 }
4361 return true;
4362 }
4363 CASE_BUILT_IN_TM_STORE (1):
4364 CASE_BUILT_IN_TM_STORE (2):
4365 CASE_BUILT_IN_TM_STORE (4):
4366 CASE_BUILT_IN_TM_STORE (8):
4367 CASE_BUILT_IN_TM_STORE (FLOAT):
4368 CASE_BUILT_IN_TM_STORE (DOUBLE):
4369 CASE_BUILT_IN_TM_STORE (LDOUBLE):
4370 CASE_BUILT_IN_TM_STORE (M64):
4371 CASE_BUILT_IN_TM_STORE (M128):
4372 CASE_BUILT_IN_TM_STORE (M256):
4373 {
4374 tree addr = gimple_call_arg (t, 0);
4375 tree src = gimple_call_arg (t, 1);
4376
4377 get_constraint_for (addr, &lhsc);
4378 do_deref (&lhsc);
4379 get_constraint_for (src, &rhsc);
4380 process_all_all_constraints (lhsc, rhsc);
4381 lhsc.release ();
4382 rhsc.release ();
4383 return true;
4384 }
4385 CASE_BUILT_IN_TM_LOAD (1):
4386 CASE_BUILT_IN_TM_LOAD (2):
4387 CASE_BUILT_IN_TM_LOAD (4):
4388 CASE_BUILT_IN_TM_LOAD (8):
4389 CASE_BUILT_IN_TM_LOAD (FLOAT):
4390 CASE_BUILT_IN_TM_LOAD (DOUBLE):
4391 CASE_BUILT_IN_TM_LOAD (LDOUBLE):
4392 CASE_BUILT_IN_TM_LOAD (M64):
4393 CASE_BUILT_IN_TM_LOAD (M128):
4394 CASE_BUILT_IN_TM_LOAD (M256):
4395 {
4396 tree dest = gimple_call_lhs (t);
4397 tree addr = gimple_call_arg (t, 0);
4398
4399 get_constraint_for (dest, &lhsc);
4400 get_constraint_for (addr, &rhsc);
4401 do_deref (&rhsc);
4402 process_all_all_constraints (lhsc, rhsc);
4403 lhsc.release ();
4404 rhsc.release ();
4405 return true;
4406 }
4407 /* Variadic argument handling needs to be handled in IPA
4408 mode as well. */
4409 case BUILT_IN_VA_START:
4410 {
4411 tree valist = gimple_call_arg (t, 0);
4412 struct constraint_expr rhs, *lhsp;
4413 unsigned i;
4414 get_constraint_for (valist, &lhsc);
4415 do_deref (&lhsc);
4416 /* The va_list gets access to pointers in variadic
4417 arguments. Which we know in the case of IPA analysis
4418 and otherwise are just all nonlocal variables. */
4419 if (in_ipa_mode)
4420 {
4421 fi = lookup_vi_for_tree (cfun->decl);
4422 rhs = get_function_part_constraint (fi, ~0);
4423 rhs.type = ADDRESSOF;
4424 }
4425 else
4426 {
4427 rhs.var = nonlocal_id;
4428 rhs.type = ADDRESSOF;
4429 rhs.offset = 0;
4430 }
4431 FOR_EACH_VEC_ELT (lhsc, i, lhsp)
4432 process_constraint (new_constraint (*lhsp, rhs));
4433 lhsc.release ();
4434 /* va_list is clobbered. */
4435 make_constraint_to (get_call_clobber_vi (t)->id, valist);
4436 return true;
4437 }
4438 /* va_end doesn't have any effect that matters. */
4439 case BUILT_IN_VA_END:
4440 return true;
4441 /* Alternate return. Simply give up for now. */
4442 case BUILT_IN_RETURN:
4443 {
4444 fi = NULL;
4445 if (!in_ipa_mode
4446 || !(fi = get_vi_for_tree (cfun->decl)))
4447 make_constraint_from (get_varinfo (escaped_id), anything_id);
4448 else if (in_ipa_mode
4449 && fi != NULL)
4450 {
4451 struct constraint_expr lhs, rhs;
4452 lhs = get_function_part_constraint (fi, fi_result);
4453 rhs.var = anything_id;
4454 rhs.offset = 0;
4455 rhs.type = SCALAR;
4456 process_constraint (new_constraint (lhs, rhs));
4457 }
4458 return true;
4459 }
4460 /* printf-style functions may have hooks to set pointers to
4461 point to somewhere into the generated string. Leave them
4462 for a later exercise... */
4463 default:
4464 /* Fallthru to general call handling. */;
4465 }
4466
4467 return false;
4468 }
4469
4470 /* Create constraints for the call T. */
4471
4472 static void
4473 find_func_aliases_for_call (gimple t)
4474 {
4475 tree fndecl = gimple_call_fndecl (t);
4476 vec<ce_s> lhsc = vNULL;
4477 vec<ce_s> rhsc = vNULL;
4478 varinfo_t fi;
4479
4480 if (fndecl != NULL_TREE
4481 && DECL_BUILT_IN (fndecl)
4482 && find_func_aliases_for_builtin_call (t))
4483 return;
4484
4485 fi = get_fi_for_callee (t);
4486 if (!in_ipa_mode
4487 || (fndecl && !fi->is_fn_info))
4488 {
4489 vec<ce_s> rhsc = vNULL;
4490 int flags = gimple_call_flags (t);
4491
4492 /* Const functions can return their arguments and addresses
4493 of global memory but not of escaped memory. */
4494 if (flags & (ECF_CONST|ECF_NOVOPS))
4495 {
4496 if (gimple_call_lhs (t))
4497 handle_const_call (t, &rhsc);
4498 }
4499 /* Pure functions can return addresses in and of memory
4500 reachable from their arguments, but they are not an escape
4501 point for reachable memory of their arguments. */
4502 else if (flags & (ECF_PURE|ECF_LOOPING_CONST_OR_PURE))
4503 handle_pure_call (t, &rhsc);
4504 else
4505 handle_rhs_call (t, &rhsc);
4506 if (gimple_call_lhs (t))
4507 handle_lhs_call (t, gimple_call_lhs (t), flags, rhsc, fndecl);
4508 rhsc.release ();
4509 }
4510 else
4511 {
4512 tree lhsop;
4513 unsigned j;
4514
4515 /* Assign all the passed arguments to the appropriate incoming
4516 parameters of the function. */
4517 for (j = 0; j < gimple_call_num_args (t); j++)
4518 {
4519 struct constraint_expr lhs ;
4520 struct constraint_expr *rhsp;
4521 tree arg = gimple_call_arg (t, j);
4522
4523 get_constraint_for_rhs (arg, &rhsc);
4524 lhs = get_function_part_constraint (fi, fi_parm_base + j);
4525 while (rhsc.length () != 0)
4526 {
4527 rhsp = &rhsc.last ();
4528 process_constraint (new_constraint (lhs, *rhsp));
4529 rhsc.pop ();
4530 }
4531 }
4532
4533 /* If we are returning a value, assign it to the result. */
4534 lhsop = gimple_call_lhs (t);
4535 if (lhsop)
4536 {
4537 struct constraint_expr rhs;
4538 struct constraint_expr *lhsp;
4539
4540 get_constraint_for (lhsop, &lhsc);
4541 rhs = get_function_part_constraint (fi, fi_result);
4542 if (fndecl
4543 && DECL_RESULT (fndecl)
4544 && DECL_BY_REFERENCE (DECL_RESULT (fndecl)))
4545 {
4546 vec<ce_s> tem = vNULL;
4547 tem.safe_push (rhs);
4548 do_deref (&tem);
4549 rhs = tem[0];
4550 tem.release ();
4551 }
4552 FOR_EACH_VEC_ELT (lhsc, j, lhsp)
4553 process_constraint (new_constraint (*lhsp, rhs));
4554 }
4555
4556 /* If we pass the result decl by reference, honor that. */
4557 if (lhsop
4558 && fndecl
4559 && DECL_RESULT (fndecl)
4560 && DECL_BY_REFERENCE (DECL_RESULT (fndecl)))
4561 {
4562 struct constraint_expr lhs;
4563 struct constraint_expr *rhsp;
4564
4565 get_constraint_for_address_of (lhsop, &rhsc);
4566 lhs = get_function_part_constraint (fi, fi_result);
4567 FOR_EACH_VEC_ELT (rhsc, j, rhsp)
4568 process_constraint (new_constraint (lhs, *rhsp));
4569 rhsc.release ();
4570 }
4571
4572 /* If we use a static chain, pass it along. */
4573 if (gimple_call_chain (t))
4574 {
4575 struct constraint_expr lhs;
4576 struct constraint_expr *rhsp;
4577
4578 get_constraint_for (gimple_call_chain (t), &rhsc);
4579 lhs = get_function_part_constraint (fi, fi_static_chain);
4580 FOR_EACH_VEC_ELT (rhsc, j, rhsp)
4581 process_constraint (new_constraint (lhs, *rhsp));
4582 }
4583 }
4584 }
4585
4586 /* Walk statement T setting up aliasing constraints according to the
4587 references found in T. This function is the main part of the
4588 constraint builder. AI points to auxiliary alias information used
4589 when building alias sets and computing alias grouping heuristics. */
4590
4591 static void
4592 find_func_aliases (gimple origt)
4593 {
4594 gimple t = origt;
4595 vec<ce_s> lhsc = vNULL;
4596 vec<ce_s> rhsc = vNULL;
4597 struct constraint_expr *c;
4598 varinfo_t fi;
4599
4600 /* Now build constraints expressions. */
4601 if (gimple_code (t) == GIMPLE_PHI)
4602 {
4603 size_t i;
4604 unsigned int j;
4605
4606 /* For a phi node, assign all the arguments to
4607 the result. */
4608 get_constraint_for (gimple_phi_result (t), &lhsc);
4609 for (i = 0; i < gimple_phi_num_args (t); i++)
4610 {
4611 tree strippedrhs = PHI_ARG_DEF (t, i);
4612
4613 STRIP_NOPS (strippedrhs);
4614 get_constraint_for_rhs (gimple_phi_arg_def (t, i), &rhsc);
4615
4616 FOR_EACH_VEC_ELT (lhsc, j, c)
4617 {
4618 struct constraint_expr *c2;
4619 while (rhsc.length () > 0)
4620 {
4621 c2 = &rhsc.last ();
4622 process_constraint (new_constraint (*c, *c2));
4623 rhsc.pop ();
4624 }
4625 }
4626 }
4627 }
4628 /* In IPA mode, we need to generate constraints to pass call
4629 arguments through their calls. There are two cases,
4630 either a GIMPLE_CALL returning a value, or just a plain
4631 GIMPLE_CALL when we are not.
4632
4633 In non-ipa mode, we need to generate constraints for each
4634 pointer passed by address. */
4635 else if (is_gimple_call (t))
4636 find_func_aliases_for_call (t);
4637
4638 /* Otherwise, just a regular assignment statement. Only care about
4639 operations with pointer result, others are dealt with as escape
4640 points if they have pointer operands. */
4641 else if (is_gimple_assign (t))
4642 {
4643 /* Otherwise, just a regular assignment statement. */
4644 tree lhsop = gimple_assign_lhs (t);
4645 tree rhsop = (gimple_num_ops (t) == 2) ? gimple_assign_rhs1 (t) : NULL;
4646
4647 if (rhsop && TREE_CLOBBER_P (rhsop))
4648 /* Ignore clobbers, they don't actually store anything into
4649 the LHS. */
4650 ;
4651 else if (rhsop && AGGREGATE_TYPE_P (TREE_TYPE (lhsop)))
4652 do_structure_copy (lhsop, rhsop);
4653 else
4654 {
4655 enum tree_code code = gimple_assign_rhs_code (t);
4656
4657 get_constraint_for (lhsop, &lhsc);
4658
4659 if (FLOAT_TYPE_P (TREE_TYPE (lhsop)))
4660 /* If the operation produces a floating point result then
4661 assume the value is not produced to transfer a pointer. */
4662 ;
4663 else if (code == POINTER_PLUS_EXPR)
4664 get_constraint_for_ptr_offset (gimple_assign_rhs1 (t),
4665 gimple_assign_rhs2 (t), &rhsc);
4666 else if (code == BIT_AND_EXPR
4667 && TREE_CODE (gimple_assign_rhs2 (t)) == INTEGER_CST)
4668 {
4669 /* Aligning a pointer via a BIT_AND_EXPR is offsetting
4670 the pointer. Handle it by offsetting it by UNKNOWN. */
4671 get_constraint_for_ptr_offset (gimple_assign_rhs1 (t),
4672 NULL_TREE, &rhsc);
4673 }
4674 else if ((CONVERT_EXPR_CODE_P (code)
4675 && !(POINTER_TYPE_P (gimple_expr_type (t))
4676 && !POINTER_TYPE_P (TREE_TYPE (rhsop))))
4677 || gimple_assign_single_p (t))
4678 get_constraint_for_rhs (rhsop, &rhsc);
4679 else if (code == COND_EXPR)
4680 {
4681 /* The result is a merge of both COND_EXPR arms. */
4682 vec<ce_s> tmp = vNULL;
4683 struct constraint_expr *rhsp;
4684 unsigned i;
4685 get_constraint_for_rhs (gimple_assign_rhs2 (t), &rhsc);
4686 get_constraint_for_rhs (gimple_assign_rhs3 (t), &tmp);
4687 FOR_EACH_VEC_ELT (tmp, i, rhsp)
4688 rhsc.safe_push (*rhsp);
4689 tmp.release ();
4690 }
4691 else if (truth_value_p (code))
4692 /* Truth value results are not pointer (parts). Or at least
4693 very very unreasonable obfuscation of a part. */
4694 ;
4695 else
4696 {
4697 /* All other operations are merges. */
4698 vec<ce_s> tmp = vNULL;
4699 struct constraint_expr *rhsp;
4700 unsigned i, j;
4701 get_constraint_for_rhs (gimple_assign_rhs1 (t), &rhsc);
4702 for (i = 2; i < gimple_num_ops (t); ++i)
4703 {
4704 get_constraint_for_rhs (gimple_op (t, i), &tmp);
4705 FOR_EACH_VEC_ELT (tmp, j, rhsp)
4706 rhsc.safe_push (*rhsp);
4707 tmp.truncate (0);
4708 }
4709 tmp.release ();
4710 }
4711 process_all_all_constraints (lhsc, rhsc);
4712 }
4713 /* If there is a store to a global variable the rhs escapes. */
4714 if ((lhsop = get_base_address (lhsop)) != NULL_TREE
4715 && DECL_P (lhsop)
4716 && is_global_var (lhsop)
4717 && (!in_ipa_mode
4718 || DECL_EXTERNAL (lhsop) || TREE_PUBLIC (lhsop)))
4719 make_escape_constraint (rhsop);
4720 }
4721 /* Handle escapes through return. */
4722 else if (gimple_code (t) == GIMPLE_RETURN
4723 && gimple_return_retval (t) != NULL_TREE)
4724 {
4725 fi = NULL;
4726 if (!in_ipa_mode
4727 || !(fi = get_vi_for_tree (cfun->decl)))
4728 make_escape_constraint (gimple_return_retval (t));
4729 else if (in_ipa_mode
4730 && fi != NULL)
4731 {
4732 struct constraint_expr lhs ;
4733 struct constraint_expr *rhsp;
4734 unsigned i;
4735
4736 lhs = get_function_part_constraint (fi, fi_result);
4737 get_constraint_for_rhs (gimple_return_retval (t), &rhsc);
4738 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
4739 process_constraint (new_constraint (lhs, *rhsp));
4740 }
4741 }
4742 /* Handle asms conservatively by adding escape constraints to everything. */
4743 else if (gimple_code (t) == GIMPLE_ASM)
4744 {
4745 unsigned i, noutputs;
4746 const char **oconstraints;
4747 const char *constraint;
4748 bool allows_mem, allows_reg, is_inout;
4749
4750 noutputs = gimple_asm_noutputs (t);
4751 oconstraints = XALLOCAVEC (const char *, noutputs);
4752
4753 for (i = 0; i < noutputs; ++i)
4754 {
4755 tree link = gimple_asm_output_op (t, i);
4756 tree op = TREE_VALUE (link);
4757
4758 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
4759 oconstraints[i] = constraint;
4760 parse_output_constraint (&constraint, i, 0, 0, &allows_mem,
4761 &allows_reg, &is_inout);
4762
4763 /* A memory constraint makes the address of the operand escape. */
4764 if (!allows_reg && allows_mem)
4765 make_escape_constraint (build_fold_addr_expr (op));
4766
4767 /* The asm may read global memory, so outputs may point to
4768 any global memory. */
4769 if (op)
4770 {
4771 vec<ce_s> lhsc = vNULL;
4772 struct constraint_expr rhsc, *lhsp;
4773 unsigned j;
4774 get_constraint_for (op, &lhsc);
4775 rhsc.var = nonlocal_id;
4776 rhsc.offset = 0;
4777 rhsc.type = SCALAR;
4778 FOR_EACH_VEC_ELT (lhsc, j, lhsp)
4779 process_constraint (new_constraint (*lhsp, rhsc));
4780 lhsc.release ();
4781 }
4782 }
4783 for (i = 0; i < gimple_asm_ninputs (t); ++i)
4784 {
4785 tree link = gimple_asm_input_op (t, i);
4786 tree op = TREE_VALUE (link);
4787
4788 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
4789
4790 parse_input_constraint (&constraint, 0, 0, noutputs, 0, oconstraints,
4791 &allows_mem, &allows_reg);
4792
4793 /* A memory constraint makes the address of the operand escape. */
4794 if (!allows_reg && allows_mem)
4795 make_escape_constraint (build_fold_addr_expr (op));
4796 /* Strictly we'd only need the constraint to ESCAPED if
4797 the asm clobbers memory, otherwise using something
4798 along the lines of per-call clobbers/uses would be enough. */
4799 else if (op)
4800 make_escape_constraint (op);
4801 }
4802 }
4803
4804 rhsc.release ();
4805 lhsc.release ();
4806 }
4807
4808
4809 /* Create a constraint adding to the clobber set of FI the memory
4810 pointed to by PTR. */
4811
4812 static void
4813 process_ipa_clobber (varinfo_t fi, tree ptr)
4814 {
4815 vec<ce_s> ptrc = vNULL;
4816 struct constraint_expr *c, lhs;
4817 unsigned i;
4818 get_constraint_for_rhs (ptr, &ptrc);
4819 lhs = get_function_part_constraint (fi, fi_clobbers);
4820 FOR_EACH_VEC_ELT (ptrc, i, c)
4821 process_constraint (new_constraint (lhs, *c));
4822 ptrc.release ();
4823 }
4824
4825 /* Walk statement T setting up clobber and use constraints according to the
4826 references found in T. This function is a main part of the
4827 IPA constraint builder. */
4828
4829 static void
4830 find_func_clobbers (gimple origt)
4831 {
4832 gimple t = origt;
4833 vec<ce_s> lhsc = vNULL;
4834 vec<ce_s> rhsc = vNULL;
4835 varinfo_t fi;
4836
4837 /* Add constraints for clobbered/used in IPA mode.
4838 We are not interested in what automatic variables are clobbered
4839 or used as we only use the information in the caller to which
4840 they do not escape. */
4841 gcc_assert (in_ipa_mode);
4842
4843 /* If the stmt refers to memory in any way it better had a VUSE. */
4844 if (gimple_vuse (t) == NULL_TREE)
4845 return;
4846
4847 /* We'd better have function information for the current function. */
4848 fi = lookup_vi_for_tree (cfun->decl);
4849 gcc_assert (fi != NULL);
4850
4851 /* Account for stores in assignments and calls. */
4852 if (gimple_vdef (t) != NULL_TREE
4853 && gimple_has_lhs (t))
4854 {
4855 tree lhs = gimple_get_lhs (t);
4856 tree tem = lhs;
4857 while (handled_component_p (tem))
4858 tem = TREE_OPERAND (tem, 0);
4859 if ((DECL_P (tem)
4860 && !auto_var_in_fn_p (tem, cfun->decl))
4861 || INDIRECT_REF_P (tem)
4862 || (TREE_CODE (tem) == MEM_REF
4863 && !(TREE_CODE (TREE_OPERAND (tem, 0)) == ADDR_EXPR
4864 && auto_var_in_fn_p
4865 (TREE_OPERAND (TREE_OPERAND (tem, 0), 0), cfun->decl))))
4866 {
4867 struct constraint_expr lhsc, *rhsp;
4868 unsigned i;
4869 lhsc = get_function_part_constraint (fi, fi_clobbers);
4870 get_constraint_for_address_of (lhs, &rhsc);
4871 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
4872 process_constraint (new_constraint (lhsc, *rhsp));
4873 rhsc.release ();
4874 }
4875 }
4876
4877 /* Account for uses in assigments and returns. */
4878 if (gimple_assign_single_p (t)
4879 || (gimple_code (t) == GIMPLE_RETURN
4880 && gimple_return_retval (t) != NULL_TREE))
4881 {
4882 tree rhs = (gimple_assign_single_p (t)
4883 ? gimple_assign_rhs1 (t) : gimple_return_retval (t));
4884 tree tem = rhs;
4885 while (handled_component_p (tem))
4886 tem = TREE_OPERAND (tem, 0);
4887 if ((DECL_P (tem)
4888 && !auto_var_in_fn_p (tem, cfun->decl))
4889 || INDIRECT_REF_P (tem)
4890 || (TREE_CODE (tem) == MEM_REF
4891 && !(TREE_CODE (TREE_OPERAND (tem, 0)) == ADDR_EXPR
4892 && auto_var_in_fn_p
4893 (TREE_OPERAND (TREE_OPERAND (tem, 0), 0), cfun->decl))))
4894 {
4895 struct constraint_expr lhs, *rhsp;
4896 unsigned i;
4897 lhs = get_function_part_constraint (fi, fi_uses);
4898 get_constraint_for_address_of (rhs, &rhsc);
4899 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
4900 process_constraint (new_constraint (lhs, *rhsp));
4901 rhsc.release ();
4902 }
4903 }
4904
4905 if (is_gimple_call (t))
4906 {
4907 varinfo_t cfi = NULL;
4908 tree decl = gimple_call_fndecl (t);
4909 struct constraint_expr lhs, rhs;
4910 unsigned i, j;
4911
4912 /* For builtins we do not have separate function info. For those
4913 we do not generate escapes for we have to generate clobbers/uses. */
4914 if (gimple_call_builtin_p (t, BUILT_IN_NORMAL))
4915 switch (DECL_FUNCTION_CODE (decl))
4916 {
4917 /* The following functions use and clobber memory pointed to
4918 by their arguments. */
4919 case BUILT_IN_STRCPY:
4920 case BUILT_IN_STRNCPY:
4921 case BUILT_IN_BCOPY:
4922 case BUILT_IN_MEMCPY:
4923 case BUILT_IN_MEMMOVE:
4924 case BUILT_IN_MEMPCPY:
4925 case BUILT_IN_STPCPY:
4926 case BUILT_IN_STPNCPY:
4927 case BUILT_IN_STRCAT:
4928 case BUILT_IN_STRNCAT:
4929 case BUILT_IN_STRCPY_CHK:
4930 case BUILT_IN_STRNCPY_CHK:
4931 case BUILT_IN_MEMCPY_CHK:
4932 case BUILT_IN_MEMMOVE_CHK:
4933 case BUILT_IN_MEMPCPY_CHK:
4934 case BUILT_IN_STPCPY_CHK:
4935 case BUILT_IN_STPNCPY_CHK:
4936 case BUILT_IN_STRCAT_CHK:
4937 case BUILT_IN_STRNCAT_CHK:
4938 {
4939 tree dest = gimple_call_arg (t, (DECL_FUNCTION_CODE (decl)
4940 == BUILT_IN_BCOPY ? 1 : 0));
4941 tree src = gimple_call_arg (t, (DECL_FUNCTION_CODE (decl)
4942 == BUILT_IN_BCOPY ? 0 : 1));
4943 unsigned i;
4944 struct constraint_expr *rhsp, *lhsp;
4945 get_constraint_for_ptr_offset (dest, NULL_TREE, &lhsc);
4946 lhs = get_function_part_constraint (fi, fi_clobbers);
4947 FOR_EACH_VEC_ELT (lhsc, i, lhsp)
4948 process_constraint (new_constraint (lhs, *lhsp));
4949 lhsc.release ();
4950 get_constraint_for_ptr_offset (src, NULL_TREE, &rhsc);
4951 lhs = get_function_part_constraint (fi, fi_uses);
4952 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
4953 process_constraint (new_constraint (lhs, *rhsp));
4954 rhsc.release ();
4955 return;
4956 }
4957 /* The following function clobbers memory pointed to by
4958 its argument. */
4959 case BUILT_IN_MEMSET:
4960 case BUILT_IN_MEMSET_CHK:
4961 {
4962 tree dest = gimple_call_arg (t, 0);
4963 unsigned i;
4964 ce_s *lhsp;
4965 get_constraint_for_ptr_offset (dest, NULL_TREE, &lhsc);
4966 lhs = get_function_part_constraint (fi, fi_clobbers);
4967 FOR_EACH_VEC_ELT (lhsc, i, lhsp)
4968 process_constraint (new_constraint (lhs, *lhsp));
4969 lhsc.release ();
4970 return;
4971 }
4972 /* The following functions clobber their second and third
4973 arguments. */
4974 case BUILT_IN_SINCOS:
4975 case BUILT_IN_SINCOSF:
4976 case BUILT_IN_SINCOSL:
4977 {
4978 process_ipa_clobber (fi, gimple_call_arg (t, 1));
4979 process_ipa_clobber (fi, gimple_call_arg (t, 2));
4980 return;
4981 }
4982 /* The following functions clobber their second argument. */
4983 case BUILT_IN_FREXP:
4984 case BUILT_IN_FREXPF:
4985 case BUILT_IN_FREXPL:
4986 case BUILT_IN_LGAMMA_R:
4987 case BUILT_IN_LGAMMAF_R:
4988 case BUILT_IN_LGAMMAL_R:
4989 case BUILT_IN_GAMMA_R:
4990 case BUILT_IN_GAMMAF_R:
4991 case BUILT_IN_GAMMAL_R:
4992 case BUILT_IN_MODF:
4993 case BUILT_IN_MODFF:
4994 case BUILT_IN_MODFL:
4995 {
4996 process_ipa_clobber (fi, gimple_call_arg (t, 1));
4997 return;
4998 }
4999 /* The following functions clobber their third argument. */
5000 case BUILT_IN_REMQUO:
5001 case BUILT_IN_REMQUOF:
5002 case BUILT_IN_REMQUOL:
5003 {
5004 process_ipa_clobber (fi, gimple_call_arg (t, 2));
5005 return;
5006 }
5007 /* The following functions neither read nor clobber memory. */
5008 case BUILT_IN_ASSUME_ALIGNED:
5009 case BUILT_IN_FREE:
5010 return;
5011 /* Trampolines are of no interest to us. */
5012 case BUILT_IN_INIT_TRAMPOLINE:
5013 case BUILT_IN_ADJUST_TRAMPOLINE:
5014 return;
5015 case BUILT_IN_VA_START:
5016 case BUILT_IN_VA_END:
5017 return;
5018 /* printf-style functions may have hooks to set pointers to
5019 point to somewhere into the generated string. Leave them
5020 for a later exercise... */
5021 default:
5022 /* Fallthru to general call handling. */;
5023 }
5024
5025 /* Parameters passed by value are used. */
5026 lhs = get_function_part_constraint (fi, fi_uses);
5027 for (i = 0; i < gimple_call_num_args (t); i++)
5028 {
5029 struct constraint_expr *rhsp;
5030 tree arg = gimple_call_arg (t, i);
5031
5032 if (TREE_CODE (arg) == SSA_NAME
5033 || is_gimple_min_invariant (arg))
5034 continue;
5035
5036 get_constraint_for_address_of (arg, &rhsc);
5037 FOR_EACH_VEC_ELT (rhsc, j, rhsp)
5038 process_constraint (new_constraint (lhs, *rhsp));
5039 rhsc.release ();
5040 }
5041
5042 /* Build constraints for propagating clobbers/uses along the
5043 callgraph edges. */
5044 cfi = get_fi_for_callee (t);
5045 if (cfi->id == anything_id)
5046 {
5047 if (gimple_vdef (t))
5048 make_constraint_from (first_vi_for_offset (fi, fi_clobbers),
5049 anything_id);
5050 make_constraint_from (first_vi_for_offset (fi, fi_uses),
5051 anything_id);
5052 return;
5053 }
5054
5055 /* For callees without function info (that's external functions),
5056 ESCAPED is clobbered and used. */
5057 if (gimple_call_fndecl (t)
5058 && !cfi->is_fn_info)
5059 {
5060 varinfo_t vi;
5061
5062 if (gimple_vdef (t))
5063 make_copy_constraint (first_vi_for_offset (fi, fi_clobbers),
5064 escaped_id);
5065 make_copy_constraint (first_vi_for_offset (fi, fi_uses), escaped_id);
5066
5067 /* Also honor the call statement use/clobber info. */
5068 if ((vi = lookup_call_clobber_vi (t)) != NULL)
5069 make_copy_constraint (first_vi_for_offset (fi, fi_clobbers),
5070 vi->id);
5071 if ((vi = lookup_call_use_vi (t)) != NULL)
5072 make_copy_constraint (first_vi_for_offset (fi, fi_uses),
5073 vi->id);
5074 return;
5075 }
5076
5077 /* Otherwise the caller clobbers and uses what the callee does.
5078 ??? This should use a new complex constraint that filters
5079 local variables of the callee. */
5080 if (gimple_vdef (t))
5081 {
5082 lhs = get_function_part_constraint (fi, fi_clobbers);
5083 rhs = get_function_part_constraint (cfi, fi_clobbers);
5084 process_constraint (new_constraint (lhs, rhs));
5085 }
5086 lhs = get_function_part_constraint (fi, fi_uses);
5087 rhs = get_function_part_constraint (cfi, fi_uses);
5088 process_constraint (new_constraint (lhs, rhs));
5089 }
5090 else if (gimple_code (t) == GIMPLE_ASM)
5091 {
5092 /* ??? Ick. We can do better. */
5093 if (gimple_vdef (t))
5094 make_constraint_from (first_vi_for_offset (fi, fi_clobbers),
5095 anything_id);
5096 make_constraint_from (first_vi_for_offset (fi, fi_uses),
5097 anything_id);
5098 }
5099
5100 rhsc.release ();
5101 }
5102
5103
5104 /* Find the first varinfo in the same variable as START that overlaps with
5105 OFFSET. Return NULL if we can't find one. */
5106
5107 static varinfo_t
5108 first_vi_for_offset (varinfo_t start, unsigned HOST_WIDE_INT offset)
5109 {
5110 /* If the offset is outside of the variable, bail out. */
5111 if (offset >= start->fullsize)
5112 return NULL;
5113
5114 /* If we cannot reach offset from start, lookup the first field
5115 and start from there. */
5116 if (start->offset > offset)
5117 start = get_varinfo (start->head);
5118
5119 while (start)
5120 {
5121 /* We may not find a variable in the field list with the actual
5122 offset when when we have glommed a structure to a variable.
5123 In that case, however, offset should still be within the size
5124 of the variable. */
5125 if (offset >= start->offset
5126 && (offset - start->offset) < start->size)
5127 return start;
5128
5129 start = vi_next (start);
5130 }
5131
5132 return NULL;
5133 }
5134
5135 /* Find the first varinfo in the same variable as START that overlaps with
5136 OFFSET. If there is no such varinfo the varinfo directly preceding
5137 OFFSET is returned. */
5138
5139 static varinfo_t
5140 first_or_preceding_vi_for_offset (varinfo_t start,
5141 unsigned HOST_WIDE_INT offset)
5142 {
5143 /* If we cannot reach offset from start, lookup the first field
5144 and start from there. */
5145 if (start->offset > offset)
5146 start = get_varinfo (start->head);
5147
5148 /* We may not find a variable in the field list with the actual
5149 offset when when we have glommed a structure to a variable.
5150 In that case, however, offset should still be within the size
5151 of the variable.
5152 If we got beyond the offset we look for return the field
5153 directly preceding offset which may be the last field. */
5154 while (start->next
5155 && offset >= start->offset
5156 && !((offset - start->offset) < start->size))
5157 start = vi_next (start);
5158
5159 return start;
5160 }
5161
5162
5163 /* This structure is used during pushing fields onto the fieldstack
5164 to track the offset of the field, since bitpos_of_field gives it
5165 relative to its immediate containing type, and we want it relative
5166 to the ultimate containing object. */
5167
5168 struct fieldoff
5169 {
5170 /* Offset from the base of the base containing object to this field. */
5171 HOST_WIDE_INT offset;
5172
5173 /* Size, in bits, of the field. */
5174 unsigned HOST_WIDE_INT size;
5175
5176 unsigned has_unknown_size : 1;
5177
5178 unsigned must_have_pointers : 1;
5179
5180 unsigned may_have_pointers : 1;
5181
5182 unsigned only_restrict_pointers : 1;
5183 };
5184 typedef struct fieldoff fieldoff_s;
5185
5186
5187 /* qsort comparison function for two fieldoff's PA and PB */
5188
5189 static int
5190 fieldoff_compare (const void *pa, const void *pb)
5191 {
5192 const fieldoff_s *foa = (const fieldoff_s *)pa;
5193 const fieldoff_s *fob = (const fieldoff_s *)pb;
5194 unsigned HOST_WIDE_INT foasize, fobsize;
5195
5196 if (foa->offset < fob->offset)
5197 return -1;
5198 else if (foa->offset > fob->offset)
5199 return 1;
5200
5201 foasize = foa->size;
5202 fobsize = fob->size;
5203 if (foasize < fobsize)
5204 return -1;
5205 else if (foasize > fobsize)
5206 return 1;
5207 return 0;
5208 }
5209
5210 /* Sort a fieldstack according to the field offset and sizes. */
5211 static void
5212 sort_fieldstack (vec<fieldoff_s> fieldstack)
5213 {
5214 fieldstack.qsort (fieldoff_compare);
5215 }
5216
5217 /* Return true if T is a type that can have subvars. */
5218
5219 static inline bool
5220 type_can_have_subvars (const_tree t)
5221 {
5222 /* Aggregates without overlapping fields can have subvars. */
5223 return TREE_CODE (t) == RECORD_TYPE;
5224 }
5225
5226 /* Return true if V is a tree that we can have subvars for.
5227 Normally, this is any aggregate type. Also complex
5228 types which are not gimple registers can have subvars. */
5229
5230 static inline bool
5231 var_can_have_subvars (const_tree v)
5232 {
5233 /* Volatile variables should never have subvars. */
5234 if (TREE_THIS_VOLATILE (v))
5235 return false;
5236
5237 /* Non decls or memory tags can never have subvars. */
5238 if (!DECL_P (v))
5239 return false;
5240
5241 return type_can_have_subvars (TREE_TYPE (v));
5242 }
5243
5244 /* Return true if T is a type that does contain pointers. */
5245
5246 static bool
5247 type_must_have_pointers (tree type)
5248 {
5249 if (POINTER_TYPE_P (type))
5250 return true;
5251
5252 if (TREE_CODE (type) == ARRAY_TYPE)
5253 return type_must_have_pointers (TREE_TYPE (type));
5254
5255 /* A function or method can have pointers as arguments, so track
5256 those separately. */
5257 if (TREE_CODE (type) == FUNCTION_TYPE
5258 || TREE_CODE (type) == METHOD_TYPE)
5259 return true;
5260
5261 return false;
5262 }
5263
5264 static bool
5265 field_must_have_pointers (tree t)
5266 {
5267 return type_must_have_pointers (TREE_TYPE (t));
5268 }
5269
5270 /* Given a TYPE, and a vector of field offsets FIELDSTACK, push all
5271 the fields of TYPE onto fieldstack, recording their offsets along
5272 the way.
5273
5274 OFFSET is used to keep track of the offset in this entire
5275 structure, rather than just the immediately containing structure.
5276 Returns false if the caller is supposed to handle the field we
5277 recursed for. */
5278
5279 static bool
5280 push_fields_onto_fieldstack (tree type, vec<fieldoff_s> *fieldstack,
5281 HOST_WIDE_INT offset)
5282 {
5283 tree field;
5284 bool empty_p = true;
5285
5286 if (TREE_CODE (type) != RECORD_TYPE)
5287 return false;
5288
5289 /* If the vector of fields is growing too big, bail out early.
5290 Callers check for vec::length <= MAX_FIELDS_FOR_FIELD_SENSITIVE, make
5291 sure this fails. */
5292 if (fieldstack->length () > MAX_FIELDS_FOR_FIELD_SENSITIVE)
5293 return false;
5294
5295 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
5296 if (TREE_CODE (field) == FIELD_DECL)
5297 {
5298 bool push = false;
5299 HOST_WIDE_INT foff = bitpos_of_field (field);
5300
5301 if (!var_can_have_subvars (field)
5302 || TREE_CODE (TREE_TYPE (field)) == QUAL_UNION_TYPE
5303 || TREE_CODE (TREE_TYPE (field)) == UNION_TYPE)
5304 push = true;
5305 else if (!push_fields_onto_fieldstack
5306 (TREE_TYPE (field), fieldstack, offset + foff)
5307 && (DECL_SIZE (field)
5308 && !integer_zerop (DECL_SIZE (field))))
5309 /* Empty structures may have actual size, like in C++. So
5310 see if we didn't push any subfields and the size is
5311 nonzero, push the field onto the stack. */
5312 push = true;
5313
5314 if (push)
5315 {
5316 fieldoff_s *pair = NULL;
5317 bool has_unknown_size = false;
5318 bool must_have_pointers_p;
5319
5320 if (!fieldstack->is_empty ())
5321 pair = &fieldstack->last ();
5322
5323 /* If there isn't anything at offset zero, create sth. */
5324 if (!pair
5325 && offset + foff != 0)
5326 {
5327 fieldoff_s e = {0, offset + foff, false, false, false, false};
5328 pair = fieldstack->safe_push (e);
5329 }
5330
5331 if (!DECL_SIZE (field)
5332 || !tree_fits_uhwi_p (DECL_SIZE (field)))
5333 has_unknown_size = true;
5334
5335 /* If adjacent fields do not contain pointers merge them. */
5336 must_have_pointers_p = field_must_have_pointers (field);
5337 if (pair
5338 && !has_unknown_size
5339 && !must_have_pointers_p
5340 && !pair->must_have_pointers
5341 && !pair->has_unknown_size
5342 && pair->offset + (HOST_WIDE_INT)pair->size == offset + foff)
5343 {
5344 pair->size += tree_to_hwi (DECL_SIZE (field));
5345 }
5346 else
5347 {
5348 fieldoff_s e;
5349 e.offset = offset + foff;
5350 e.has_unknown_size = has_unknown_size;
5351 if (!has_unknown_size)
5352 e.size = tree_to_hwi (DECL_SIZE (field));
5353 else
5354 e.size = -1;
5355 e.must_have_pointers = must_have_pointers_p;
5356 e.may_have_pointers = true;
5357 e.only_restrict_pointers
5358 = (!has_unknown_size
5359 && POINTER_TYPE_P (TREE_TYPE (field))
5360 && TYPE_RESTRICT (TREE_TYPE (field)));
5361 fieldstack->safe_push (e);
5362 }
5363 }
5364
5365 empty_p = false;
5366 }
5367
5368 return !empty_p;
5369 }
5370
5371 /* Count the number of arguments DECL has, and set IS_VARARGS to true
5372 if it is a varargs function. */
5373
5374 static unsigned int
5375 count_num_arguments (tree decl, bool *is_varargs)
5376 {
5377 unsigned int num = 0;
5378 tree t;
5379
5380 /* Capture named arguments for K&R functions. They do not
5381 have a prototype and thus no TYPE_ARG_TYPES. */
5382 for (t = DECL_ARGUMENTS (decl); t; t = DECL_CHAIN (t))
5383 ++num;
5384
5385 /* Check if the function has variadic arguments. */
5386 for (t = TYPE_ARG_TYPES (TREE_TYPE (decl)); t; t = TREE_CHAIN (t))
5387 if (TREE_VALUE (t) == void_type_node)
5388 break;
5389 if (!t)
5390 *is_varargs = true;
5391
5392 return num;
5393 }
5394
5395 /* Creation function node for DECL, using NAME, and return the index
5396 of the variable we've created for the function. */
5397
5398 static varinfo_t
5399 create_function_info_for (tree decl, const char *name)
5400 {
5401 struct function *fn = DECL_STRUCT_FUNCTION (decl);
5402 varinfo_t vi, prev_vi;
5403 tree arg;
5404 unsigned int i;
5405 bool is_varargs = false;
5406 unsigned int num_args = count_num_arguments (decl, &is_varargs);
5407
5408 /* Create the variable info. */
5409
5410 vi = new_var_info (decl, name);
5411 vi->offset = 0;
5412 vi->size = 1;
5413 vi->fullsize = fi_parm_base + num_args;
5414 vi->is_fn_info = 1;
5415 vi->may_have_pointers = false;
5416 if (is_varargs)
5417 vi->fullsize = ~0;
5418 insert_vi_for_tree (vi->decl, vi);
5419
5420 prev_vi = vi;
5421
5422 /* Create a variable for things the function clobbers and one for
5423 things the function uses. */
5424 {
5425 varinfo_t clobbervi, usevi;
5426 const char *newname;
5427 char *tempname;
5428
5429 asprintf (&tempname, "%s.clobber", name);
5430 newname = ggc_strdup (tempname);
5431 free (tempname);
5432
5433 clobbervi = new_var_info (NULL, newname);
5434 clobbervi->offset = fi_clobbers;
5435 clobbervi->size = 1;
5436 clobbervi->fullsize = vi->fullsize;
5437 clobbervi->is_full_var = true;
5438 clobbervi->is_global_var = false;
5439 gcc_assert (prev_vi->offset < clobbervi->offset);
5440 prev_vi->next = clobbervi->id;
5441 prev_vi = clobbervi;
5442
5443 asprintf (&tempname, "%s.use", name);
5444 newname = ggc_strdup (tempname);
5445 free (tempname);
5446
5447 usevi = new_var_info (NULL, newname);
5448 usevi->offset = fi_uses;
5449 usevi->size = 1;
5450 usevi->fullsize = vi->fullsize;
5451 usevi->is_full_var = true;
5452 usevi->is_global_var = false;
5453 gcc_assert (prev_vi->offset < usevi->offset);
5454 prev_vi->next = usevi->id;
5455 prev_vi = usevi;
5456 }
5457
5458 /* And one for the static chain. */
5459 if (fn->static_chain_decl != NULL_TREE)
5460 {
5461 varinfo_t chainvi;
5462 const char *newname;
5463 char *tempname;
5464
5465 asprintf (&tempname, "%s.chain", name);
5466 newname = ggc_strdup (tempname);
5467 free (tempname);
5468
5469 chainvi = new_var_info (fn->static_chain_decl, newname);
5470 chainvi->offset = fi_static_chain;
5471 chainvi->size = 1;
5472 chainvi->fullsize = vi->fullsize;
5473 chainvi->is_full_var = true;
5474 chainvi->is_global_var = false;
5475 gcc_assert (prev_vi->offset < chainvi->offset);
5476 prev_vi->next = chainvi->id;
5477 prev_vi = chainvi;
5478 insert_vi_for_tree (fn->static_chain_decl, chainvi);
5479 }
5480
5481 /* Create a variable for the return var. */
5482 if (DECL_RESULT (decl) != NULL
5483 || !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (decl))))
5484 {
5485 varinfo_t resultvi;
5486 const char *newname;
5487 char *tempname;
5488 tree resultdecl = decl;
5489
5490 if (DECL_RESULT (decl))
5491 resultdecl = DECL_RESULT (decl);
5492
5493 asprintf (&tempname, "%s.result", name);
5494 newname = ggc_strdup (tempname);
5495 free (tempname);
5496
5497 resultvi = new_var_info (resultdecl, newname);
5498 resultvi->offset = fi_result;
5499 resultvi->size = 1;
5500 resultvi->fullsize = vi->fullsize;
5501 resultvi->is_full_var = true;
5502 if (DECL_RESULT (decl))
5503 resultvi->may_have_pointers = true;
5504 gcc_assert (prev_vi->offset < resultvi->offset);
5505 prev_vi->next = resultvi->id;
5506 prev_vi = resultvi;
5507 if (DECL_RESULT (decl))
5508 insert_vi_for_tree (DECL_RESULT (decl), resultvi);
5509 }
5510
5511 /* Set up variables for each argument. */
5512 arg = DECL_ARGUMENTS (decl);
5513 for (i = 0; i < num_args; i++)
5514 {
5515 varinfo_t argvi;
5516 const char *newname;
5517 char *tempname;
5518 tree argdecl = decl;
5519
5520 if (arg)
5521 argdecl = arg;
5522
5523 asprintf (&tempname, "%s.arg%d", name, i);
5524 newname = ggc_strdup (tempname);
5525 free (tempname);
5526
5527 argvi = new_var_info (argdecl, newname);
5528 argvi->offset = fi_parm_base + i;
5529 argvi->size = 1;
5530 argvi->is_full_var = true;
5531 argvi->fullsize = vi->fullsize;
5532 if (arg)
5533 argvi->may_have_pointers = true;
5534 gcc_assert (prev_vi->offset < argvi->offset);
5535 prev_vi->next = argvi->id;
5536 prev_vi = argvi;
5537 if (arg)
5538 {
5539 insert_vi_for_tree (arg, argvi);
5540 arg = DECL_CHAIN (arg);
5541 }
5542 }
5543
5544 /* Add one representative for all further args. */
5545 if (is_varargs)
5546 {
5547 varinfo_t argvi;
5548 const char *newname;
5549 char *tempname;
5550 tree decl;
5551
5552 asprintf (&tempname, "%s.varargs", name);
5553 newname = ggc_strdup (tempname);
5554 free (tempname);
5555
5556 /* We need sth that can be pointed to for va_start. */
5557 decl = build_fake_var_decl (ptr_type_node);
5558
5559 argvi = new_var_info (decl, newname);
5560 argvi->offset = fi_parm_base + num_args;
5561 argvi->size = ~0;
5562 argvi->is_full_var = true;
5563 argvi->is_heap_var = true;
5564 argvi->fullsize = vi->fullsize;
5565 gcc_assert (prev_vi->offset < argvi->offset);
5566 prev_vi->next = argvi->id;
5567 prev_vi = argvi;
5568 }
5569
5570 return vi;
5571 }
5572
5573
5574 /* Return true if FIELDSTACK contains fields that overlap.
5575 FIELDSTACK is assumed to be sorted by offset. */
5576
5577 static bool
5578 check_for_overlaps (vec<fieldoff_s> fieldstack)
5579 {
5580 fieldoff_s *fo = NULL;
5581 unsigned int i;
5582 HOST_WIDE_INT lastoffset = -1;
5583
5584 FOR_EACH_VEC_ELT (fieldstack, i, fo)
5585 {
5586 if (fo->offset == lastoffset)
5587 return true;
5588 lastoffset = fo->offset;
5589 }
5590 return false;
5591 }
5592
5593 /* Create a varinfo structure for NAME and DECL, and add it to VARMAP.
5594 This will also create any varinfo structures necessary for fields
5595 of DECL. */
5596
5597 static varinfo_t
5598 create_variable_info_for_1 (tree decl, const char *name)
5599 {
5600 varinfo_t vi, newvi;
5601 tree decl_type = TREE_TYPE (decl);
5602 tree declsize = DECL_P (decl) ? DECL_SIZE (decl) : TYPE_SIZE (decl_type);
5603 vec<fieldoff_s> fieldstack = vNULL;
5604 fieldoff_s *fo;
5605 unsigned int i;
5606
5607 if (!declsize
5608 || !tree_fits_uhwi_p (declsize))
5609 {
5610 vi = new_var_info (decl, name);
5611 vi->offset = 0;
5612 vi->size = ~0;
5613 vi->fullsize = ~0;
5614 vi->is_unknown_size_var = true;
5615 vi->is_full_var = true;
5616 vi->may_have_pointers = true;
5617 return vi;
5618 }
5619
5620 /* Collect field information. */
5621 if (use_field_sensitive
5622 && var_can_have_subvars (decl)
5623 /* ??? Force us to not use subfields for global initializers
5624 in IPA mode. Else we'd have to parse arbitrary initializers. */
5625 && !(in_ipa_mode
5626 && is_global_var (decl)
5627 && DECL_INITIAL (decl)))
5628 {
5629 fieldoff_s *fo = NULL;
5630 bool notokay = false;
5631 unsigned int i;
5632
5633 push_fields_onto_fieldstack (decl_type, &fieldstack, 0);
5634
5635 for (i = 0; !notokay && fieldstack.iterate (i, &fo); i++)
5636 if (fo->has_unknown_size
5637 || fo->offset < 0)
5638 {
5639 notokay = true;
5640 break;
5641 }
5642
5643 /* We can't sort them if we have a field with a variable sized type,
5644 which will make notokay = true. In that case, we are going to return
5645 without creating varinfos for the fields anyway, so sorting them is a
5646 waste to boot. */
5647 if (!notokay)
5648 {
5649 sort_fieldstack (fieldstack);
5650 /* Due to some C++ FE issues, like PR 22488, we might end up
5651 what appear to be overlapping fields even though they,
5652 in reality, do not overlap. Until the C++ FE is fixed,
5653 we will simply disable field-sensitivity for these cases. */
5654 notokay = check_for_overlaps (fieldstack);
5655 }
5656
5657 if (notokay)
5658 fieldstack.release ();
5659 }
5660
5661 /* If we didn't end up collecting sub-variables create a full
5662 variable for the decl. */
5663 if (fieldstack.length () <= 1
5664 || fieldstack.length () > MAX_FIELDS_FOR_FIELD_SENSITIVE)
5665 {
5666 vi = new_var_info (decl, name);
5667 vi->offset = 0;
5668 vi->may_have_pointers = true;
5669 vi->fullsize = tree_to_hwi (declsize);
5670 vi->size = vi->fullsize;
5671 vi->is_full_var = true;
5672 fieldstack.release ();
5673 return vi;
5674 }
5675
5676 vi = new_var_info (decl, name);
5677 vi->fullsize = tree_to_hwi (declsize);
5678 for (i = 0, newvi = vi;
5679 fieldstack.iterate (i, &fo);
5680 ++i, newvi = vi_next (newvi))
5681 {
5682 const char *newname = "NULL";
5683 char *tempname;
5684
5685 if (dump_file)
5686 {
5687 asprintf (&tempname, "%s." HOST_WIDE_INT_PRINT_DEC
5688 "+" HOST_WIDE_INT_PRINT_DEC, name, fo->offset, fo->size);
5689 newname = ggc_strdup (tempname);
5690 free (tempname);
5691 }
5692 newvi->name = newname;
5693 newvi->offset = fo->offset;
5694 newvi->size = fo->size;
5695 newvi->fullsize = vi->fullsize;
5696 newvi->may_have_pointers = fo->may_have_pointers;
5697 newvi->only_restrict_pointers = fo->only_restrict_pointers;
5698 if (i + 1 < fieldstack.length ())
5699 {
5700 varinfo_t tem = new_var_info (decl, name);
5701 newvi->next = tem->id;
5702 tem->head = vi->id;
5703 }
5704 }
5705
5706 fieldstack.release ();
5707
5708 return vi;
5709 }
5710
5711 static unsigned int
5712 create_variable_info_for (tree decl, const char *name)
5713 {
5714 varinfo_t vi = create_variable_info_for_1 (decl, name);
5715 unsigned int id = vi->id;
5716
5717 insert_vi_for_tree (decl, vi);
5718
5719 if (TREE_CODE (decl) != VAR_DECL)
5720 return id;
5721
5722 /* Create initial constraints for globals. */
5723 for (; vi; vi = vi_next (vi))
5724 {
5725 if (!vi->may_have_pointers
5726 || !vi->is_global_var)
5727 continue;
5728
5729 /* Mark global restrict qualified pointers. */
5730 if ((POINTER_TYPE_P (TREE_TYPE (decl))
5731 && TYPE_RESTRICT (TREE_TYPE (decl)))
5732 || vi->only_restrict_pointers)
5733 {
5734 make_constraint_from_global_restrict (vi, "GLOBAL_RESTRICT");
5735 continue;
5736 }
5737
5738 /* In non-IPA mode the initializer from nonlocal is all we need. */
5739 if (!in_ipa_mode
5740 || DECL_HARD_REGISTER (decl))
5741 make_copy_constraint (vi, nonlocal_id);
5742
5743 /* In IPA mode parse the initializer and generate proper constraints
5744 for it. */
5745 else
5746 {
5747 struct varpool_node *vnode = varpool_get_node (decl);
5748
5749 /* For escaped variables initialize them from nonlocal. */
5750 if (!varpool_all_refs_explicit_p (vnode))
5751 make_copy_constraint (vi, nonlocal_id);
5752
5753 /* If this is a global variable with an initializer and we are in
5754 IPA mode generate constraints for it. */
5755 if (DECL_INITIAL (decl)
5756 && vnode->definition)
5757 {
5758 vec<ce_s> rhsc = vNULL;
5759 struct constraint_expr lhs, *rhsp;
5760 unsigned i;
5761 get_constraint_for_rhs (DECL_INITIAL (decl), &rhsc);
5762 lhs.var = vi->id;
5763 lhs.offset = 0;
5764 lhs.type = SCALAR;
5765 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
5766 process_constraint (new_constraint (lhs, *rhsp));
5767 /* If this is a variable that escapes from the unit
5768 the initializer escapes as well. */
5769 if (!varpool_all_refs_explicit_p (vnode))
5770 {
5771 lhs.var = escaped_id;
5772 lhs.offset = 0;
5773 lhs.type = SCALAR;
5774 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
5775 process_constraint (new_constraint (lhs, *rhsp));
5776 }
5777 rhsc.release ();
5778 }
5779 }
5780 }
5781
5782 return id;
5783 }
5784
5785 /* Print out the points-to solution for VAR to FILE. */
5786
5787 static void
5788 dump_solution_for_var (FILE *file, unsigned int var)
5789 {
5790 varinfo_t vi = get_varinfo (var);
5791 unsigned int i;
5792 bitmap_iterator bi;
5793
5794 /* Dump the solution for unified vars anyway, this avoids difficulties
5795 in scanning dumps in the testsuite. */
5796 fprintf (file, "%s = { ", vi->name);
5797 vi = get_varinfo (find (var));
5798 EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, i, bi)
5799 fprintf (file, "%s ", get_varinfo (i)->name);
5800 fprintf (file, "}");
5801
5802 /* But note when the variable was unified. */
5803 if (vi->id != var)
5804 fprintf (file, " same as %s", vi->name);
5805
5806 fprintf (file, "\n");
5807 }
5808
5809 /* Print the points-to solution for VAR to stdout. */
5810
5811 DEBUG_FUNCTION void
5812 debug_solution_for_var (unsigned int var)
5813 {
5814 dump_solution_for_var (stdout, var);
5815 }
5816
5817 /* Create varinfo structures for all of the variables in the
5818 function for intraprocedural mode. */
5819
5820 static void
5821 intra_create_variable_infos (void)
5822 {
5823 tree t;
5824
5825 /* For each incoming pointer argument arg, create the constraint ARG
5826 = NONLOCAL or a dummy variable if it is a restrict qualified
5827 passed-by-reference argument. */
5828 for (t = DECL_ARGUMENTS (current_function_decl); t; t = DECL_CHAIN (t))
5829 {
5830 varinfo_t p = get_vi_for_tree (t);
5831
5832 /* For restrict qualified pointers to objects passed by
5833 reference build a real representative for the pointed-to object.
5834 Treat restrict qualified references the same. */
5835 if (TYPE_RESTRICT (TREE_TYPE (t))
5836 && ((DECL_BY_REFERENCE (t) && POINTER_TYPE_P (TREE_TYPE (t)))
5837 || TREE_CODE (TREE_TYPE (t)) == REFERENCE_TYPE)
5838 && !type_contains_placeholder_p (TREE_TYPE (TREE_TYPE (t))))
5839 {
5840 struct constraint_expr lhsc, rhsc;
5841 varinfo_t vi;
5842 tree heapvar = build_fake_var_decl (TREE_TYPE (TREE_TYPE (t)));
5843 DECL_EXTERNAL (heapvar) = 1;
5844 vi = create_variable_info_for_1 (heapvar, "PARM_NOALIAS");
5845 insert_vi_for_tree (heapvar, vi);
5846 lhsc.var = p->id;
5847 lhsc.type = SCALAR;
5848 lhsc.offset = 0;
5849 rhsc.var = vi->id;
5850 rhsc.type = ADDRESSOF;
5851 rhsc.offset = 0;
5852 process_constraint (new_constraint (lhsc, rhsc));
5853 for (; vi; vi = vi_next (vi))
5854 if (vi->may_have_pointers)
5855 {
5856 if (vi->only_restrict_pointers)
5857 make_constraint_from_global_restrict (vi, "GLOBAL_RESTRICT");
5858 else
5859 make_copy_constraint (vi, nonlocal_id);
5860 }
5861 continue;
5862 }
5863
5864 if (POINTER_TYPE_P (TREE_TYPE (t))
5865 && TYPE_RESTRICT (TREE_TYPE (t)))
5866 make_constraint_from_global_restrict (p, "PARM_RESTRICT");
5867 else
5868 {
5869 for (; p; p = vi_next (p))
5870 {
5871 if (p->only_restrict_pointers)
5872 make_constraint_from_global_restrict (p, "PARM_RESTRICT");
5873 else if (p->may_have_pointers)
5874 make_constraint_from (p, nonlocal_id);
5875 }
5876 }
5877 }
5878
5879 /* Add a constraint for a result decl that is passed by reference. */
5880 if (DECL_RESULT (cfun->decl)
5881 && DECL_BY_REFERENCE (DECL_RESULT (cfun->decl)))
5882 {
5883 varinfo_t p, result_vi = get_vi_for_tree (DECL_RESULT (cfun->decl));
5884
5885 for (p = result_vi; p; p = vi_next (p))
5886 make_constraint_from (p, nonlocal_id);
5887 }
5888
5889 /* Add a constraint for the incoming static chain parameter. */
5890 if (cfun->static_chain_decl != NULL_TREE)
5891 {
5892 varinfo_t p, chain_vi = get_vi_for_tree (cfun->static_chain_decl);
5893
5894 for (p = chain_vi; p; p = vi_next (p))
5895 make_constraint_from (p, nonlocal_id);
5896 }
5897 }
5898
5899 /* Structure used to put solution bitmaps in a hashtable so they can
5900 be shared among variables with the same points-to set. */
5901
5902 typedef struct shared_bitmap_info
5903 {
5904 bitmap pt_vars;
5905 hashval_t hashcode;
5906 } *shared_bitmap_info_t;
5907 typedef const struct shared_bitmap_info *const_shared_bitmap_info_t;
5908
5909 /* Shared_bitmap hashtable helpers. */
5910
5911 struct shared_bitmap_hasher : typed_free_remove <shared_bitmap_info>
5912 {
5913 typedef shared_bitmap_info value_type;
5914 typedef shared_bitmap_info compare_type;
5915 static inline hashval_t hash (const value_type *);
5916 static inline bool equal (const value_type *, const compare_type *);
5917 };
5918
5919 /* Hash function for a shared_bitmap_info_t */
5920
5921 inline hashval_t
5922 shared_bitmap_hasher::hash (const value_type *bi)
5923 {
5924 return bi->hashcode;
5925 }
5926
5927 /* Equality function for two shared_bitmap_info_t's. */
5928
5929 inline bool
5930 shared_bitmap_hasher::equal (const value_type *sbi1, const compare_type *sbi2)
5931 {
5932 return bitmap_equal_p (sbi1->pt_vars, sbi2->pt_vars);
5933 }
5934
5935 /* Shared_bitmap hashtable. */
5936
5937 static hash_table <shared_bitmap_hasher> shared_bitmap_table;
5938
5939 /* Lookup a bitmap in the shared bitmap hashtable, and return an already
5940 existing instance if there is one, NULL otherwise. */
5941
5942 static bitmap
5943 shared_bitmap_lookup (bitmap pt_vars)
5944 {
5945 shared_bitmap_info **slot;
5946 struct shared_bitmap_info sbi;
5947
5948 sbi.pt_vars = pt_vars;
5949 sbi.hashcode = bitmap_hash (pt_vars);
5950
5951 slot = shared_bitmap_table.find_slot_with_hash (&sbi, sbi.hashcode,
5952 NO_INSERT);
5953 if (!slot)
5954 return NULL;
5955 else
5956 return (*slot)->pt_vars;
5957 }
5958
5959
5960 /* Add a bitmap to the shared bitmap hashtable. */
5961
5962 static void
5963 shared_bitmap_add (bitmap pt_vars)
5964 {
5965 shared_bitmap_info **slot;
5966 shared_bitmap_info_t sbi = XNEW (struct shared_bitmap_info);
5967
5968 sbi->pt_vars = pt_vars;
5969 sbi->hashcode = bitmap_hash (pt_vars);
5970
5971 slot = shared_bitmap_table.find_slot_with_hash (sbi, sbi->hashcode, INSERT);
5972 gcc_assert (!*slot);
5973 *slot = sbi;
5974 }
5975
5976
5977 /* Set bits in INTO corresponding to the variable uids in solution set FROM. */
5978
5979 static void
5980 set_uids_in_ptset (bitmap into, bitmap from, struct pt_solution *pt)
5981 {
5982 unsigned int i;
5983 bitmap_iterator bi;
5984
5985 EXECUTE_IF_SET_IN_BITMAP (from, 0, i, bi)
5986 {
5987 varinfo_t vi = get_varinfo (i);
5988
5989 /* The only artificial variables that are allowed in a may-alias
5990 set are heap variables. */
5991 if (vi->is_artificial_var && !vi->is_heap_var)
5992 continue;
5993
5994 if (TREE_CODE (vi->decl) == VAR_DECL
5995 || TREE_CODE (vi->decl) == PARM_DECL
5996 || TREE_CODE (vi->decl) == RESULT_DECL)
5997 {
5998 /* If we are in IPA mode we will not recompute points-to
5999 sets after inlining so make sure they stay valid. */
6000 if (in_ipa_mode
6001 && !DECL_PT_UID_SET_P (vi->decl))
6002 SET_DECL_PT_UID (vi->decl, DECL_UID (vi->decl));
6003
6004 /* Add the decl to the points-to set. Note that the points-to
6005 set contains global variables. */
6006 bitmap_set_bit (into, DECL_PT_UID (vi->decl));
6007 if (vi->is_global_var)
6008 pt->vars_contains_global = true;
6009 }
6010 }
6011 }
6012
6013
6014 /* Compute the points-to solution *PT for the variable VI. */
6015
6016 static struct pt_solution
6017 find_what_var_points_to (varinfo_t orig_vi)
6018 {
6019 unsigned int i;
6020 bitmap_iterator bi;
6021 bitmap finished_solution;
6022 bitmap result;
6023 varinfo_t vi;
6024 void **slot;
6025 struct pt_solution *pt;
6026
6027 /* This variable may have been collapsed, let's get the real
6028 variable. */
6029 vi = get_varinfo (find (orig_vi->id));
6030
6031 /* See if we have already computed the solution and return it. */
6032 slot = pointer_map_insert (final_solutions, vi);
6033 if (*slot != NULL)
6034 return *(struct pt_solution *)*slot;
6035
6036 *slot = pt = XOBNEW (&final_solutions_obstack, struct pt_solution);
6037 memset (pt, 0, sizeof (struct pt_solution));
6038
6039 /* Translate artificial variables into SSA_NAME_PTR_INFO
6040 attributes. */
6041 EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, i, bi)
6042 {
6043 varinfo_t vi = get_varinfo (i);
6044
6045 if (vi->is_artificial_var)
6046 {
6047 if (vi->id == nothing_id)
6048 pt->null = 1;
6049 else if (vi->id == escaped_id)
6050 {
6051 if (in_ipa_mode)
6052 pt->ipa_escaped = 1;
6053 else
6054 pt->escaped = 1;
6055 }
6056 else if (vi->id == nonlocal_id)
6057 pt->nonlocal = 1;
6058 else if (vi->is_heap_var)
6059 /* We represent heapvars in the points-to set properly. */
6060 ;
6061 else if (vi->id == readonly_id)
6062 /* Nobody cares. */
6063 ;
6064 else if (vi->id == anything_id
6065 || vi->id == integer_id)
6066 pt->anything = 1;
6067 }
6068 }
6069
6070 /* Instead of doing extra work, simply do not create
6071 elaborate points-to information for pt_anything pointers. */
6072 if (pt->anything)
6073 return *pt;
6074
6075 /* Share the final set of variables when possible. */
6076 finished_solution = BITMAP_GGC_ALLOC ();
6077 stats.points_to_sets_created++;
6078
6079 set_uids_in_ptset (finished_solution, vi->solution, pt);
6080 result = shared_bitmap_lookup (finished_solution);
6081 if (!result)
6082 {
6083 shared_bitmap_add (finished_solution);
6084 pt->vars = finished_solution;
6085 }
6086 else
6087 {
6088 pt->vars = result;
6089 bitmap_clear (finished_solution);
6090 }
6091
6092 return *pt;
6093 }
6094
6095 /* Given a pointer variable P, fill in its points-to set. */
6096
6097 static void
6098 find_what_p_points_to (tree p)
6099 {
6100 struct ptr_info_def *pi;
6101 tree lookup_p = p;
6102 varinfo_t vi;
6103
6104 /* For parameters, get at the points-to set for the actual parm
6105 decl. */
6106 if (TREE_CODE (p) == SSA_NAME
6107 && SSA_NAME_IS_DEFAULT_DEF (p)
6108 && (TREE_CODE (SSA_NAME_VAR (p)) == PARM_DECL
6109 || TREE_CODE (SSA_NAME_VAR (p)) == RESULT_DECL))
6110 lookup_p = SSA_NAME_VAR (p);
6111
6112 vi = lookup_vi_for_tree (lookup_p);
6113 if (!vi)
6114 return;
6115
6116 pi = get_ptr_info (p);
6117 pi->pt = find_what_var_points_to (vi);
6118 }
6119
6120
6121 /* Query statistics for points-to solutions. */
6122
6123 static struct {
6124 unsigned HOST_WIDE_INT pt_solution_includes_may_alias;
6125 unsigned HOST_WIDE_INT pt_solution_includes_no_alias;
6126 unsigned HOST_WIDE_INT pt_solutions_intersect_may_alias;
6127 unsigned HOST_WIDE_INT pt_solutions_intersect_no_alias;
6128 } pta_stats;
6129
6130 void
6131 dump_pta_stats (FILE *s)
6132 {
6133 fprintf (s, "\nPTA query stats:\n");
6134 fprintf (s, " pt_solution_includes: "
6135 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
6136 HOST_WIDE_INT_PRINT_DEC" queries\n",
6137 pta_stats.pt_solution_includes_no_alias,
6138 pta_stats.pt_solution_includes_no_alias
6139 + pta_stats.pt_solution_includes_may_alias);
6140 fprintf (s, " pt_solutions_intersect: "
6141 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
6142 HOST_WIDE_INT_PRINT_DEC" queries\n",
6143 pta_stats.pt_solutions_intersect_no_alias,
6144 pta_stats.pt_solutions_intersect_no_alias
6145 + pta_stats.pt_solutions_intersect_may_alias);
6146 }
6147
6148
6149 /* Reset the points-to solution *PT to a conservative default
6150 (point to anything). */
6151
6152 void
6153 pt_solution_reset (struct pt_solution *pt)
6154 {
6155 memset (pt, 0, sizeof (struct pt_solution));
6156 pt->anything = true;
6157 }
6158
6159 /* Set the points-to solution *PT to point only to the variables
6160 in VARS. VARS_CONTAINS_GLOBAL specifies whether that contains
6161 global variables and VARS_CONTAINS_RESTRICT specifies whether
6162 it contains restrict tag variables. */
6163
6164 void
6165 pt_solution_set (struct pt_solution *pt, bitmap vars, bool vars_contains_global)
6166 {
6167 memset (pt, 0, sizeof (struct pt_solution));
6168 pt->vars = vars;
6169 pt->vars_contains_global = vars_contains_global;
6170 }
6171
6172 /* Set the points-to solution *PT to point only to the variable VAR. */
6173
6174 void
6175 pt_solution_set_var (struct pt_solution *pt, tree var)
6176 {
6177 memset (pt, 0, sizeof (struct pt_solution));
6178 pt->vars = BITMAP_GGC_ALLOC ();
6179 bitmap_set_bit (pt->vars, DECL_PT_UID (var));
6180 pt->vars_contains_global = is_global_var (var);
6181 }
6182
6183 /* Computes the union of the points-to solutions *DEST and *SRC and
6184 stores the result in *DEST. This changes the points-to bitmap
6185 of *DEST and thus may not be used if that might be shared.
6186 The points-to bitmap of *SRC and *DEST will not be shared after
6187 this function if they were not before. */
6188
6189 static void
6190 pt_solution_ior_into (struct pt_solution *dest, struct pt_solution *src)
6191 {
6192 dest->anything |= src->anything;
6193 if (dest->anything)
6194 {
6195 pt_solution_reset (dest);
6196 return;
6197 }
6198
6199 dest->nonlocal |= src->nonlocal;
6200 dest->escaped |= src->escaped;
6201 dest->ipa_escaped |= src->ipa_escaped;
6202 dest->null |= src->null;
6203 dest->vars_contains_global |= src->vars_contains_global;
6204 if (!src->vars)
6205 return;
6206
6207 if (!dest->vars)
6208 dest->vars = BITMAP_GGC_ALLOC ();
6209 bitmap_ior_into (dest->vars, src->vars);
6210 }
6211
6212 /* Return true if the points-to solution *PT is empty. */
6213
6214 bool
6215 pt_solution_empty_p (struct pt_solution *pt)
6216 {
6217 if (pt->anything
6218 || pt->nonlocal)
6219 return false;
6220
6221 if (pt->vars
6222 && !bitmap_empty_p (pt->vars))
6223 return false;
6224
6225 /* If the solution includes ESCAPED, check if that is empty. */
6226 if (pt->escaped
6227 && !pt_solution_empty_p (&cfun->gimple_df->escaped))
6228 return false;
6229
6230 /* If the solution includes ESCAPED, check if that is empty. */
6231 if (pt->ipa_escaped
6232 && !pt_solution_empty_p (&ipa_escaped_pt))
6233 return false;
6234
6235 return true;
6236 }
6237
6238 /* Return true if the points-to solution *PT only point to a single var, and
6239 return the var uid in *UID. */
6240
6241 bool
6242 pt_solution_singleton_p (struct pt_solution *pt, unsigned *uid)
6243 {
6244 if (pt->anything || pt->nonlocal || pt->escaped || pt->ipa_escaped
6245 || pt->null || pt->vars == NULL
6246 || !bitmap_single_bit_set_p (pt->vars))
6247 return false;
6248
6249 *uid = bitmap_first_set_bit (pt->vars);
6250 return true;
6251 }
6252
6253 /* Return true if the points-to solution *PT includes global memory. */
6254
6255 bool
6256 pt_solution_includes_global (struct pt_solution *pt)
6257 {
6258 if (pt->anything
6259 || pt->nonlocal
6260 || pt->vars_contains_global)
6261 return true;
6262
6263 if (pt->escaped)
6264 return pt_solution_includes_global (&cfun->gimple_df->escaped);
6265
6266 if (pt->ipa_escaped)
6267 return pt_solution_includes_global (&ipa_escaped_pt);
6268
6269 /* ??? This predicate is not correct for the IPA-PTA solution
6270 as we do not properly distinguish between unit escape points
6271 and global variables. */
6272 if (cfun->gimple_df->ipa_pta)
6273 return true;
6274
6275 return false;
6276 }
6277
6278 /* Return true if the points-to solution *PT includes the variable
6279 declaration DECL. */
6280
6281 static bool
6282 pt_solution_includes_1 (struct pt_solution *pt, const_tree decl)
6283 {
6284 if (pt->anything)
6285 return true;
6286
6287 if (pt->nonlocal
6288 && is_global_var (decl))
6289 return true;
6290
6291 if (pt->vars
6292 && bitmap_bit_p (pt->vars, DECL_PT_UID (decl)))
6293 return true;
6294
6295 /* If the solution includes ESCAPED, check it. */
6296 if (pt->escaped
6297 && pt_solution_includes_1 (&cfun->gimple_df->escaped, decl))
6298 return true;
6299
6300 /* If the solution includes ESCAPED, check it. */
6301 if (pt->ipa_escaped
6302 && pt_solution_includes_1 (&ipa_escaped_pt, decl))
6303 return true;
6304
6305 return false;
6306 }
6307
6308 bool
6309 pt_solution_includes (struct pt_solution *pt, const_tree decl)
6310 {
6311 bool res = pt_solution_includes_1 (pt, decl);
6312 if (res)
6313 ++pta_stats.pt_solution_includes_may_alias;
6314 else
6315 ++pta_stats.pt_solution_includes_no_alias;
6316 return res;
6317 }
6318
6319 /* Return true if both points-to solutions PT1 and PT2 have a non-empty
6320 intersection. */
6321
6322 static bool
6323 pt_solutions_intersect_1 (struct pt_solution *pt1, struct pt_solution *pt2)
6324 {
6325 if (pt1->anything || pt2->anything)
6326 return true;
6327
6328 /* If either points to unknown global memory and the other points to
6329 any global memory they alias. */
6330 if ((pt1->nonlocal
6331 && (pt2->nonlocal
6332 || pt2->vars_contains_global))
6333 || (pt2->nonlocal
6334 && pt1->vars_contains_global))
6335 return true;
6336
6337 /* Check the escaped solution if required. */
6338 if ((pt1->escaped || pt2->escaped)
6339 && !pt_solution_empty_p (&cfun->gimple_df->escaped))
6340 {
6341 /* If both point to escaped memory and that solution
6342 is not empty they alias. */
6343 if (pt1->escaped && pt2->escaped)
6344 return true;
6345
6346 /* If either points to escaped memory see if the escaped solution
6347 intersects with the other. */
6348 if ((pt1->escaped
6349 && pt_solutions_intersect_1 (&cfun->gimple_df->escaped, pt2))
6350 || (pt2->escaped
6351 && pt_solutions_intersect_1 (&cfun->gimple_df->escaped, pt1)))
6352 return true;
6353 }
6354
6355 /* Check the escaped solution if required.
6356 ??? Do we need to check the local against the IPA escaped sets? */
6357 if ((pt1->ipa_escaped || pt2->ipa_escaped)
6358 && !pt_solution_empty_p (&ipa_escaped_pt))
6359 {
6360 /* If both point to escaped memory and that solution
6361 is not empty they alias. */
6362 if (pt1->ipa_escaped && pt2->ipa_escaped)
6363 return true;
6364
6365 /* If either points to escaped memory see if the escaped solution
6366 intersects with the other. */
6367 if ((pt1->ipa_escaped
6368 && pt_solutions_intersect_1 (&ipa_escaped_pt, pt2))
6369 || (pt2->ipa_escaped
6370 && pt_solutions_intersect_1 (&ipa_escaped_pt, pt1)))
6371 return true;
6372 }
6373
6374 /* Now both pointers alias if their points-to solution intersects. */
6375 return (pt1->vars
6376 && pt2->vars
6377 && bitmap_intersect_p (pt1->vars, pt2->vars));
6378 }
6379
6380 bool
6381 pt_solutions_intersect (struct pt_solution *pt1, struct pt_solution *pt2)
6382 {
6383 bool res = pt_solutions_intersect_1 (pt1, pt2);
6384 if (res)
6385 ++pta_stats.pt_solutions_intersect_may_alias;
6386 else
6387 ++pta_stats.pt_solutions_intersect_no_alias;
6388 return res;
6389 }
6390
6391
6392 /* Dump points-to information to OUTFILE. */
6393
6394 static void
6395 dump_sa_points_to_info (FILE *outfile)
6396 {
6397 unsigned int i;
6398
6399 fprintf (outfile, "\nPoints-to sets\n\n");
6400
6401 if (dump_flags & TDF_STATS)
6402 {
6403 fprintf (outfile, "Stats:\n");
6404 fprintf (outfile, "Total vars: %d\n", stats.total_vars);
6405 fprintf (outfile, "Non-pointer vars: %d\n",
6406 stats.nonpointer_vars);
6407 fprintf (outfile, "Statically unified vars: %d\n",
6408 stats.unified_vars_static);
6409 fprintf (outfile, "Dynamically unified vars: %d\n",
6410 stats.unified_vars_dynamic);
6411 fprintf (outfile, "Iterations: %d\n", stats.iterations);
6412 fprintf (outfile, "Number of edges: %d\n", stats.num_edges);
6413 fprintf (outfile, "Number of implicit edges: %d\n",
6414 stats.num_implicit_edges);
6415 }
6416
6417 for (i = 1; i < varmap.length (); i++)
6418 {
6419 varinfo_t vi = get_varinfo (i);
6420 if (!vi->may_have_pointers)
6421 continue;
6422 dump_solution_for_var (outfile, i);
6423 }
6424 }
6425
6426
6427 /* Debug points-to information to stderr. */
6428
6429 DEBUG_FUNCTION void
6430 debug_sa_points_to_info (void)
6431 {
6432 dump_sa_points_to_info (stderr);
6433 }
6434
6435
6436 /* Initialize the always-existing constraint variables for NULL
6437 ANYTHING, READONLY, and INTEGER */
6438
6439 static void
6440 init_base_vars (void)
6441 {
6442 struct constraint_expr lhs, rhs;
6443 varinfo_t var_anything;
6444 varinfo_t var_nothing;
6445 varinfo_t var_readonly;
6446 varinfo_t var_escaped;
6447 varinfo_t var_nonlocal;
6448 varinfo_t var_storedanything;
6449 varinfo_t var_integer;
6450
6451 /* Variable ID zero is reserved and should be NULL. */
6452 varmap.safe_push (NULL);
6453
6454 /* Create the NULL variable, used to represent that a variable points
6455 to NULL. */
6456 var_nothing = new_var_info (NULL_TREE, "NULL");
6457 gcc_assert (var_nothing->id == nothing_id);
6458 var_nothing->is_artificial_var = 1;
6459 var_nothing->offset = 0;
6460 var_nothing->size = ~0;
6461 var_nothing->fullsize = ~0;
6462 var_nothing->is_special_var = 1;
6463 var_nothing->may_have_pointers = 0;
6464 var_nothing->is_global_var = 0;
6465
6466 /* Create the ANYTHING variable, used to represent that a variable
6467 points to some unknown piece of memory. */
6468 var_anything = new_var_info (NULL_TREE, "ANYTHING");
6469 gcc_assert (var_anything->id == anything_id);
6470 var_anything->is_artificial_var = 1;
6471 var_anything->size = ~0;
6472 var_anything->offset = 0;
6473 var_anything->fullsize = ~0;
6474 var_anything->is_special_var = 1;
6475
6476 /* Anything points to anything. This makes deref constraints just
6477 work in the presence of linked list and other p = *p type loops,
6478 by saying that *ANYTHING = ANYTHING. */
6479 lhs.type = SCALAR;
6480 lhs.var = anything_id;
6481 lhs.offset = 0;
6482 rhs.type = ADDRESSOF;
6483 rhs.var = anything_id;
6484 rhs.offset = 0;
6485
6486 /* This specifically does not use process_constraint because
6487 process_constraint ignores all anything = anything constraints, since all
6488 but this one are redundant. */
6489 constraints.safe_push (new_constraint (lhs, rhs));
6490
6491 /* Create the READONLY variable, used to represent that a variable
6492 points to readonly memory. */
6493 var_readonly = new_var_info (NULL_TREE, "READONLY");
6494 gcc_assert (var_readonly->id == readonly_id);
6495 var_readonly->is_artificial_var = 1;
6496 var_readonly->offset = 0;
6497 var_readonly->size = ~0;
6498 var_readonly->fullsize = ~0;
6499 var_readonly->is_special_var = 1;
6500
6501 /* readonly memory points to anything, in order to make deref
6502 easier. In reality, it points to anything the particular
6503 readonly variable can point to, but we don't track this
6504 separately. */
6505 lhs.type = SCALAR;
6506 lhs.var = readonly_id;
6507 lhs.offset = 0;
6508 rhs.type = ADDRESSOF;
6509 rhs.var = readonly_id; /* FIXME */
6510 rhs.offset = 0;
6511 process_constraint (new_constraint (lhs, rhs));
6512
6513 /* Create the ESCAPED variable, used to represent the set of escaped
6514 memory. */
6515 var_escaped = new_var_info (NULL_TREE, "ESCAPED");
6516 gcc_assert (var_escaped->id == escaped_id);
6517 var_escaped->is_artificial_var = 1;
6518 var_escaped->offset = 0;
6519 var_escaped->size = ~0;
6520 var_escaped->fullsize = ~0;
6521 var_escaped->is_special_var = 0;
6522
6523 /* Create the NONLOCAL variable, used to represent the set of nonlocal
6524 memory. */
6525 var_nonlocal = new_var_info (NULL_TREE, "NONLOCAL");
6526 gcc_assert (var_nonlocal->id == nonlocal_id);
6527 var_nonlocal->is_artificial_var = 1;
6528 var_nonlocal->offset = 0;
6529 var_nonlocal->size = ~0;
6530 var_nonlocal->fullsize = ~0;
6531 var_nonlocal->is_special_var = 1;
6532
6533 /* ESCAPED = *ESCAPED, because escaped is may-deref'd at calls, etc. */
6534 lhs.type = SCALAR;
6535 lhs.var = escaped_id;
6536 lhs.offset = 0;
6537 rhs.type = DEREF;
6538 rhs.var = escaped_id;
6539 rhs.offset = 0;
6540 process_constraint (new_constraint (lhs, rhs));
6541
6542 /* ESCAPED = ESCAPED + UNKNOWN_OFFSET, because if a sub-field escapes the
6543 whole variable escapes. */
6544 lhs.type = SCALAR;
6545 lhs.var = escaped_id;
6546 lhs.offset = 0;
6547 rhs.type = SCALAR;
6548 rhs.var = escaped_id;
6549 rhs.offset = UNKNOWN_OFFSET;
6550 process_constraint (new_constraint (lhs, rhs));
6551
6552 /* *ESCAPED = NONLOCAL. This is true because we have to assume
6553 everything pointed to by escaped points to what global memory can
6554 point to. */
6555 lhs.type = DEREF;
6556 lhs.var = escaped_id;
6557 lhs.offset = 0;
6558 rhs.type = SCALAR;
6559 rhs.var = nonlocal_id;
6560 rhs.offset = 0;
6561 process_constraint (new_constraint (lhs, rhs));
6562
6563 /* NONLOCAL = &NONLOCAL, NONLOCAL = &ESCAPED. This is true because
6564 global memory may point to global memory and escaped memory. */
6565 lhs.type = SCALAR;
6566 lhs.var = nonlocal_id;
6567 lhs.offset = 0;
6568 rhs.type = ADDRESSOF;
6569 rhs.var = nonlocal_id;
6570 rhs.offset = 0;
6571 process_constraint (new_constraint (lhs, rhs));
6572 rhs.type = ADDRESSOF;
6573 rhs.var = escaped_id;
6574 rhs.offset = 0;
6575 process_constraint (new_constraint (lhs, rhs));
6576
6577 /* Create the STOREDANYTHING variable, used to represent the set of
6578 variables stored to *ANYTHING. */
6579 var_storedanything = new_var_info (NULL_TREE, "STOREDANYTHING");
6580 gcc_assert (var_storedanything->id == storedanything_id);
6581 var_storedanything->is_artificial_var = 1;
6582 var_storedanything->offset = 0;
6583 var_storedanything->size = ~0;
6584 var_storedanything->fullsize = ~0;
6585 var_storedanything->is_special_var = 0;
6586
6587 /* Create the INTEGER variable, used to represent that a variable points
6588 to what an INTEGER "points to". */
6589 var_integer = new_var_info (NULL_TREE, "INTEGER");
6590 gcc_assert (var_integer->id == integer_id);
6591 var_integer->is_artificial_var = 1;
6592 var_integer->size = ~0;
6593 var_integer->fullsize = ~0;
6594 var_integer->offset = 0;
6595 var_integer->is_special_var = 1;
6596
6597 /* INTEGER = ANYTHING, because we don't know where a dereference of
6598 a random integer will point to. */
6599 lhs.type = SCALAR;
6600 lhs.var = integer_id;
6601 lhs.offset = 0;
6602 rhs.type = ADDRESSOF;
6603 rhs.var = anything_id;
6604 rhs.offset = 0;
6605 process_constraint (new_constraint (lhs, rhs));
6606 }
6607
6608 /* Initialize things necessary to perform PTA */
6609
6610 static void
6611 init_alias_vars (void)
6612 {
6613 use_field_sensitive = (MAX_FIELDS_FOR_FIELD_SENSITIVE > 1);
6614
6615 bitmap_obstack_initialize (&pta_obstack);
6616 bitmap_obstack_initialize (&oldpta_obstack);
6617 bitmap_obstack_initialize (&predbitmap_obstack);
6618
6619 constraint_pool = create_alloc_pool ("Constraint pool",
6620 sizeof (struct constraint), 30);
6621 variable_info_pool = create_alloc_pool ("Variable info pool",
6622 sizeof (struct variable_info), 30);
6623 constraints.create (8);
6624 varmap.create (8);
6625 vi_for_tree = pointer_map_create ();
6626 call_stmt_vars = pointer_map_create ();
6627
6628 memset (&stats, 0, sizeof (stats));
6629 shared_bitmap_table.create (511);
6630 init_base_vars ();
6631
6632 gcc_obstack_init (&fake_var_decl_obstack);
6633
6634 final_solutions = pointer_map_create ();
6635 gcc_obstack_init (&final_solutions_obstack);
6636 }
6637
6638 /* Remove the REF and ADDRESS edges from GRAPH, as well as all the
6639 predecessor edges. */
6640
6641 static void
6642 remove_preds_and_fake_succs (constraint_graph_t graph)
6643 {
6644 unsigned int i;
6645
6646 /* Clear the implicit ref and address nodes from the successor
6647 lists. */
6648 for (i = 1; i < FIRST_REF_NODE; i++)
6649 {
6650 if (graph->succs[i])
6651 bitmap_clear_range (graph->succs[i], FIRST_REF_NODE,
6652 FIRST_REF_NODE * 2);
6653 }
6654
6655 /* Free the successor list for the non-ref nodes. */
6656 for (i = FIRST_REF_NODE + 1; i < graph->size; i++)
6657 {
6658 if (graph->succs[i])
6659 BITMAP_FREE (graph->succs[i]);
6660 }
6661
6662 /* Now reallocate the size of the successor list as, and blow away
6663 the predecessor bitmaps. */
6664 graph->size = varmap.length ();
6665 graph->succs = XRESIZEVEC (bitmap, graph->succs, graph->size);
6666
6667 free (graph->implicit_preds);
6668 graph->implicit_preds = NULL;
6669 free (graph->preds);
6670 graph->preds = NULL;
6671 bitmap_obstack_release (&predbitmap_obstack);
6672 }
6673
6674 /* Solve the constraint set. */
6675
6676 static void
6677 solve_constraints (void)
6678 {
6679 struct scc_info *si;
6680
6681 if (dump_file)
6682 fprintf (dump_file,
6683 "\nCollapsing static cycles and doing variable "
6684 "substitution\n");
6685
6686 init_graph (varmap.length () * 2);
6687
6688 if (dump_file)
6689 fprintf (dump_file, "Building predecessor graph\n");
6690 build_pred_graph ();
6691
6692 if (dump_file)
6693 fprintf (dump_file, "Detecting pointer and location "
6694 "equivalences\n");
6695 si = perform_var_substitution (graph);
6696
6697 if (dump_file)
6698 fprintf (dump_file, "Rewriting constraints and unifying "
6699 "variables\n");
6700 rewrite_constraints (graph, si);
6701
6702 build_succ_graph ();
6703
6704 free_var_substitution_info (si);
6705
6706 /* Attach complex constraints to graph nodes. */
6707 move_complex_constraints (graph);
6708
6709 if (dump_file)
6710 fprintf (dump_file, "Uniting pointer but not location equivalent "
6711 "variables\n");
6712 unite_pointer_equivalences (graph);
6713
6714 if (dump_file)
6715 fprintf (dump_file, "Finding indirect cycles\n");
6716 find_indirect_cycles (graph);
6717
6718 /* Implicit nodes and predecessors are no longer necessary at this
6719 point. */
6720 remove_preds_and_fake_succs (graph);
6721
6722 if (dump_file && (dump_flags & TDF_GRAPH))
6723 {
6724 fprintf (dump_file, "\n\n// The constraint graph before solve-graph "
6725 "in dot format:\n");
6726 dump_constraint_graph (dump_file);
6727 fprintf (dump_file, "\n\n");
6728 }
6729
6730 if (dump_file)
6731 fprintf (dump_file, "Solving graph\n");
6732
6733 solve_graph (graph);
6734
6735 if (dump_file && (dump_flags & TDF_GRAPH))
6736 {
6737 fprintf (dump_file, "\n\n// The constraint graph after solve-graph "
6738 "in dot format:\n");
6739 dump_constraint_graph (dump_file);
6740 fprintf (dump_file, "\n\n");
6741 }
6742
6743 if (dump_file)
6744 dump_sa_points_to_info (dump_file);
6745 }
6746
6747 /* Create points-to sets for the current function. See the comments
6748 at the start of the file for an algorithmic overview. */
6749
6750 static void
6751 compute_points_to_sets (void)
6752 {
6753 basic_block bb;
6754 unsigned i;
6755 varinfo_t vi;
6756
6757 timevar_push (TV_TREE_PTA);
6758
6759 init_alias_vars ();
6760
6761 intra_create_variable_infos ();
6762
6763 /* Now walk all statements and build the constraint set. */
6764 FOR_EACH_BB (bb)
6765 {
6766 gimple_stmt_iterator gsi;
6767
6768 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
6769 {
6770 gimple phi = gsi_stmt (gsi);
6771
6772 if (! virtual_operand_p (gimple_phi_result (phi)))
6773 find_func_aliases (phi);
6774 }
6775
6776 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
6777 {
6778 gimple stmt = gsi_stmt (gsi);
6779
6780 find_func_aliases (stmt);
6781 }
6782 }
6783
6784 if (dump_file)
6785 {
6786 fprintf (dump_file, "Points-to analysis\n\nConstraints:\n\n");
6787 dump_constraints (dump_file, 0);
6788 }
6789
6790 /* From the constraints compute the points-to sets. */
6791 solve_constraints ();
6792
6793 /* Compute the points-to set for ESCAPED used for call-clobber analysis. */
6794 cfun->gimple_df->escaped = find_what_var_points_to (get_varinfo (escaped_id));
6795
6796 /* Make sure the ESCAPED solution (which is used as placeholder in
6797 other solutions) does not reference itself. This simplifies
6798 points-to solution queries. */
6799 cfun->gimple_df->escaped.escaped = 0;
6800
6801 /* Mark escaped HEAP variables as global. */
6802 FOR_EACH_VEC_ELT (varmap, i, vi)
6803 if (vi
6804 && vi->is_heap_var
6805 && !vi->is_global_var)
6806 DECL_EXTERNAL (vi->decl) = vi->is_global_var
6807 = pt_solution_includes (&cfun->gimple_df->escaped, vi->decl);
6808
6809 /* Compute the points-to sets for pointer SSA_NAMEs. */
6810 for (i = 0; i < num_ssa_names; ++i)
6811 {
6812 tree ptr = ssa_name (i);
6813 if (ptr
6814 && POINTER_TYPE_P (TREE_TYPE (ptr)))
6815 find_what_p_points_to (ptr);
6816 }
6817
6818 /* Compute the call-used/clobbered sets. */
6819 FOR_EACH_BB (bb)
6820 {
6821 gimple_stmt_iterator gsi;
6822
6823 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
6824 {
6825 gimple stmt = gsi_stmt (gsi);
6826 struct pt_solution *pt;
6827 if (!is_gimple_call (stmt))
6828 continue;
6829
6830 pt = gimple_call_use_set (stmt);
6831 if (gimple_call_flags (stmt) & ECF_CONST)
6832 memset (pt, 0, sizeof (struct pt_solution));
6833 else if ((vi = lookup_call_use_vi (stmt)) != NULL)
6834 {
6835 *pt = find_what_var_points_to (vi);
6836 /* Escaped (and thus nonlocal) variables are always
6837 implicitly used by calls. */
6838 /* ??? ESCAPED can be empty even though NONLOCAL
6839 always escaped. */
6840 pt->nonlocal = 1;
6841 pt->escaped = 1;
6842 }
6843 else
6844 {
6845 /* If there is nothing special about this call then
6846 we have made everything that is used also escape. */
6847 *pt = cfun->gimple_df->escaped;
6848 pt->nonlocal = 1;
6849 }
6850
6851 pt = gimple_call_clobber_set (stmt);
6852 if (gimple_call_flags (stmt) & (ECF_CONST|ECF_PURE|ECF_NOVOPS))
6853 memset (pt, 0, sizeof (struct pt_solution));
6854 else if ((vi = lookup_call_clobber_vi (stmt)) != NULL)
6855 {
6856 *pt = find_what_var_points_to (vi);
6857 /* Escaped (and thus nonlocal) variables are always
6858 implicitly clobbered by calls. */
6859 /* ??? ESCAPED can be empty even though NONLOCAL
6860 always escaped. */
6861 pt->nonlocal = 1;
6862 pt->escaped = 1;
6863 }
6864 else
6865 {
6866 /* If there is nothing special about this call then
6867 we have made everything that is used also escape. */
6868 *pt = cfun->gimple_df->escaped;
6869 pt->nonlocal = 1;
6870 }
6871 }
6872 }
6873
6874 timevar_pop (TV_TREE_PTA);
6875 }
6876
6877
6878 /* Delete created points-to sets. */
6879
6880 static void
6881 delete_points_to_sets (void)
6882 {
6883 unsigned int i;
6884
6885 shared_bitmap_table.dispose ();
6886 if (dump_file && (dump_flags & TDF_STATS))
6887 fprintf (dump_file, "Points to sets created:%d\n",
6888 stats.points_to_sets_created);
6889
6890 pointer_map_destroy (vi_for_tree);
6891 pointer_map_destroy (call_stmt_vars);
6892 bitmap_obstack_release (&pta_obstack);
6893 constraints.release ();
6894
6895 for (i = 0; i < graph->size; i++)
6896 graph->complex[i].release ();
6897 free (graph->complex);
6898
6899 free (graph->rep);
6900 free (graph->succs);
6901 free (graph->pe);
6902 free (graph->pe_rep);
6903 free (graph->indirect_cycles);
6904 free (graph);
6905
6906 varmap.release ();
6907 free_alloc_pool (variable_info_pool);
6908 free_alloc_pool (constraint_pool);
6909
6910 obstack_free (&fake_var_decl_obstack, NULL);
6911
6912 pointer_map_destroy (final_solutions);
6913 obstack_free (&final_solutions_obstack, NULL);
6914 }
6915
6916
6917 /* Compute points-to information for every SSA_NAME pointer in the
6918 current function and compute the transitive closure of escaped
6919 variables to re-initialize the call-clobber states of local variables. */
6920
6921 unsigned int
6922 compute_may_aliases (void)
6923 {
6924 if (cfun->gimple_df->ipa_pta)
6925 {
6926 if (dump_file)
6927 {
6928 fprintf (dump_file, "\nNot re-computing points-to information "
6929 "because IPA points-to information is available.\n\n");
6930
6931 /* But still dump what we have remaining it. */
6932 dump_alias_info (dump_file);
6933 }
6934
6935 return 0;
6936 }
6937
6938 /* For each pointer P_i, determine the sets of variables that P_i may
6939 point-to. Compute the reachability set of escaped and call-used
6940 variables. */
6941 compute_points_to_sets ();
6942
6943 /* Debugging dumps. */
6944 if (dump_file)
6945 dump_alias_info (dump_file);
6946
6947 /* Deallocate memory used by aliasing data structures and the internal
6948 points-to solution. */
6949 delete_points_to_sets ();
6950
6951 gcc_assert (!need_ssa_update_p (cfun));
6952
6953 return 0;
6954 }
6955
6956 static bool
6957 gate_tree_pta (void)
6958 {
6959 return flag_tree_pta;
6960 }
6961
6962 /* A dummy pass to cause points-to information to be computed via
6963 TODO_rebuild_alias. */
6964
6965 namespace {
6966
6967 const pass_data pass_data_build_alias =
6968 {
6969 GIMPLE_PASS, /* type */
6970 "alias", /* name */
6971 OPTGROUP_NONE, /* optinfo_flags */
6972 true, /* has_gate */
6973 false, /* has_execute */
6974 TV_NONE, /* tv_id */
6975 ( PROP_cfg | PROP_ssa ), /* properties_required */
6976 0, /* properties_provided */
6977 0, /* properties_destroyed */
6978 0, /* todo_flags_start */
6979 TODO_rebuild_alias, /* todo_flags_finish */
6980 };
6981
6982 class pass_build_alias : public gimple_opt_pass
6983 {
6984 public:
6985 pass_build_alias (gcc::context *ctxt)
6986 : gimple_opt_pass (pass_data_build_alias, ctxt)
6987 {}
6988
6989 /* opt_pass methods: */
6990 bool gate () { return gate_tree_pta (); }
6991
6992 }; // class pass_build_alias
6993
6994 } // anon namespace
6995
6996 gimple_opt_pass *
6997 make_pass_build_alias (gcc::context *ctxt)
6998 {
6999 return new pass_build_alias (ctxt);
7000 }
7001
7002 /* A dummy pass to cause points-to information to be computed via
7003 TODO_rebuild_alias. */
7004
7005 namespace {
7006
7007 const pass_data pass_data_build_ealias =
7008 {
7009 GIMPLE_PASS, /* type */
7010 "ealias", /* name */
7011 OPTGROUP_NONE, /* optinfo_flags */
7012 true, /* has_gate */
7013 false, /* has_execute */
7014 TV_NONE, /* tv_id */
7015 ( PROP_cfg | PROP_ssa ), /* properties_required */
7016 0, /* properties_provided */
7017 0, /* properties_destroyed */
7018 0, /* todo_flags_start */
7019 TODO_rebuild_alias, /* todo_flags_finish */
7020 };
7021
7022 class pass_build_ealias : public gimple_opt_pass
7023 {
7024 public:
7025 pass_build_ealias (gcc::context *ctxt)
7026 : gimple_opt_pass (pass_data_build_ealias, ctxt)
7027 {}
7028
7029 /* opt_pass methods: */
7030 bool gate () { return gate_tree_pta (); }
7031
7032 }; // class pass_build_ealias
7033
7034 } // anon namespace
7035
7036 gimple_opt_pass *
7037 make_pass_build_ealias (gcc::context *ctxt)
7038 {
7039 return new pass_build_ealias (ctxt);
7040 }
7041
7042
7043 /* Return true if we should execute IPA PTA. */
7044 static bool
7045 gate_ipa_pta (void)
7046 {
7047 return (optimize
7048 && flag_ipa_pta
7049 /* Don't bother doing anything if the program has errors. */
7050 && !seen_error ());
7051 }
7052
7053 /* IPA PTA solutions for ESCAPED. */
7054 struct pt_solution ipa_escaped_pt
7055 = { true, false, false, false, false, false, NULL };
7056
7057 /* Associate node with varinfo DATA. Worker for
7058 cgraph_for_node_and_aliases. */
7059 static bool
7060 associate_varinfo_to_alias (struct cgraph_node *node, void *data)
7061 {
7062 if ((node->alias || node->thunk.thunk_p)
7063 && node->analyzed)
7064 insert_vi_for_tree (node->decl, (varinfo_t)data);
7065 return false;
7066 }
7067
7068 /* Execute the driver for IPA PTA. */
7069 static unsigned int
7070 ipa_pta_execute (void)
7071 {
7072 struct cgraph_node *node;
7073 struct varpool_node *var;
7074 int from;
7075
7076 in_ipa_mode = 1;
7077
7078 init_alias_vars ();
7079
7080 if (dump_file && (dump_flags & TDF_DETAILS))
7081 {
7082 dump_symtab (dump_file);
7083 fprintf (dump_file, "\n");
7084 }
7085
7086 /* Build the constraints. */
7087 FOR_EACH_DEFINED_FUNCTION (node)
7088 {
7089 varinfo_t vi;
7090 /* Nodes without a body are not interesting. Especially do not
7091 visit clones at this point for now - we get duplicate decls
7092 there for inline clones at least. */
7093 if (!cgraph_function_with_gimple_body_p (node) || node->clone_of)
7094 continue;
7095 cgraph_get_body (node);
7096
7097 gcc_assert (!node->clone_of);
7098
7099 vi = create_function_info_for (node->decl,
7100 alias_get_name (node->decl));
7101 cgraph_for_node_and_aliases (node, associate_varinfo_to_alias, vi, true);
7102 }
7103
7104 /* Create constraints for global variables and their initializers. */
7105 FOR_EACH_VARIABLE (var)
7106 {
7107 if (var->alias && var->analyzed)
7108 continue;
7109
7110 get_vi_for_tree (var->decl);
7111 }
7112
7113 if (dump_file)
7114 {
7115 fprintf (dump_file,
7116 "Generating constraints for global initializers\n\n");
7117 dump_constraints (dump_file, 0);
7118 fprintf (dump_file, "\n");
7119 }
7120 from = constraints.length ();
7121
7122 FOR_EACH_DEFINED_FUNCTION (node)
7123 {
7124 struct function *func;
7125 basic_block bb;
7126
7127 /* Nodes without a body are not interesting. */
7128 if (!cgraph_function_with_gimple_body_p (node) || node->clone_of)
7129 continue;
7130
7131 if (dump_file)
7132 {
7133 fprintf (dump_file,
7134 "Generating constraints for %s", cgraph_node_name (node));
7135 if (DECL_ASSEMBLER_NAME_SET_P (node->decl))
7136 fprintf (dump_file, " (%s)",
7137 IDENTIFIER_POINTER
7138 (DECL_ASSEMBLER_NAME (node->decl)));
7139 fprintf (dump_file, "\n");
7140 }
7141
7142 func = DECL_STRUCT_FUNCTION (node->decl);
7143 push_cfun (func);
7144
7145 /* For externally visible or attribute used annotated functions use
7146 local constraints for their arguments.
7147 For local functions we see all callers and thus do not need initial
7148 constraints for parameters. */
7149 if (node->used_from_other_partition
7150 || node->externally_visible
7151 || node->force_output)
7152 {
7153 intra_create_variable_infos ();
7154
7155 /* We also need to make function return values escape. Nothing
7156 escapes by returning from main though. */
7157 if (!MAIN_NAME_P (DECL_NAME (node->decl)))
7158 {
7159 varinfo_t fi, rvi;
7160 fi = lookup_vi_for_tree (node->decl);
7161 rvi = first_vi_for_offset (fi, fi_result);
7162 if (rvi && rvi->offset == fi_result)
7163 {
7164 struct constraint_expr includes;
7165 struct constraint_expr var;
7166 includes.var = escaped_id;
7167 includes.offset = 0;
7168 includes.type = SCALAR;
7169 var.var = rvi->id;
7170 var.offset = 0;
7171 var.type = SCALAR;
7172 process_constraint (new_constraint (includes, var));
7173 }
7174 }
7175 }
7176
7177 /* Build constriants for the function body. */
7178 FOR_EACH_BB_FN (bb, func)
7179 {
7180 gimple_stmt_iterator gsi;
7181
7182 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
7183 gsi_next (&gsi))
7184 {
7185 gimple phi = gsi_stmt (gsi);
7186
7187 if (! virtual_operand_p (gimple_phi_result (phi)))
7188 find_func_aliases (phi);
7189 }
7190
7191 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
7192 {
7193 gimple stmt = gsi_stmt (gsi);
7194
7195 find_func_aliases (stmt);
7196 find_func_clobbers (stmt);
7197 }
7198 }
7199
7200 pop_cfun ();
7201
7202 if (dump_file)
7203 {
7204 fprintf (dump_file, "\n");
7205 dump_constraints (dump_file, from);
7206 fprintf (dump_file, "\n");
7207 }
7208 from = constraints.length ();
7209 }
7210
7211 /* From the constraints compute the points-to sets. */
7212 solve_constraints ();
7213
7214 /* Compute the global points-to sets for ESCAPED.
7215 ??? Note that the computed escape set is not correct
7216 for the whole unit as we fail to consider graph edges to
7217 externally visible functions. */
7218 ipa_escaped_pt = find_what_var_points_to (get_varinfo (escaped_id));
7219
7220 /* Make sure the ESCAPED solution (which is used as placeholder in
7221 other solutions) does not reference itself. This simplifies
7222 points-to solution queries. */
7223 ipa_escaped_pt.ipa_escaped = 0;
7224
7225 /* Assign the points-to sets to the SSA names in the unit. */
7226 FOR_EACH_DEFINED_FUNCTION (node)
7227 {
7228 tree ptr;
7229 struct function *fn;
7230 unsigned i;
7231 varinfo_t fi;
7232 basic_block bb;
7233 struct pt_solution uses, clobbers;
7234 struct cgraph_edge *e;
7235
7236 /* Nodes without a body are not interesting. */
7237 if (!cgraph_function_with_gimple_body_p (node) || node->clone_of)
7238 continue;
7239
7240 fn = DECL_STRUCT_FUNCTION (node->decl);
7241
7242 /* Compute the points-to sets for pointer SSA_NAMEs. */
7243 FOR_EACH_VEC_ELT (*fn->gimple_df->ssa_names, i, ptr)
7244 {
7245 if (ptr
7246 && POINTER_TYPE_P (TREE_TYPE (ptr)))
7247 find_what_p_points_to (ptr);
7248 }
7249
7250 /* Compute the call-use and call-clobber sets for all direct calls. */
7251 fi = lookup_vi_for_tree (node->decl);
7252 gcc_assert (fi->is_fn_info);
7253 clobbers
7254 = find_what_var_points_to (first_vi_for_offset (fi, fi_clobbers));
7255 uses = find_what_var_points_to (first_vi_for_offset (fi, fi_uses));
7256 for (e = node->callers; e; e = e->next_caller)
7257 {
7258 if (!e->call_stmt)
7259 continue;
7260
7261 *gimple_call_clobber_set (e->call_stmt) = clobbers;
7262 *gimple_call_use_set (e->call_stmt) = uses;
7263 }
7264
7265 /* Compute the call-use and call-clobber sets for indirect calls
7266 and calls to external functions. */
7267 FOR_EACH_BB_FN (bb, fn)
7268 {
7269 gimple_stmt_iterator gsi;
7270
7271 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
7272 {
7273 gimple stmt = gsi_stmt (gsi);
7274 struct pt_solution *pt;
7275 varinfo_t vi;
7276 tree decl;
7277
7278 if (!is_gimple_call (stmt))
7279 continue;
7280
7281 /* Handle direct calls to external functions. */
7282 decl = gimple_call_fndecl (stmt);
7283 if (decl
7284 && (!(fi = lookup_vi_for_tree (decl))
7285 || !fi->is_fn_info))
7286 {
7287 pt = gimple_call_use_set (stmt);
7288 if (gimple_call_flags (stmt) & ECF_CONST)
7289 memset (pt, 0, sizeof (struct pt_solution));
7290 else if ((vi = lookup_call_use_vi (stmt)) != NULL)
7291 {
7292 *pt = find_what_var_points_to (vi);
7293 /* Escaped (and thus nonlocal) variables are always
7294 implicitly used by calls. */
7295 /* ??? ESCAPED can be empty even though NONLOCAL
7296 always escaped. */
7297 pt->nonlocal = 1;
7298 pt->ipa_escaped = 1;
7299 }
7300 else
7301 {
7302 /* If there is nothing special about this call then
7303 we have made everything that is used also escape. */
7304 *pt = ipa_escaped_pt;
7305 pt->nonlocal = 1;
7306 }
7307
7308 pt = gimple_call_clobber_set (stmt);
7309 if (gimple_call_flags (stmt) & (ECF_CONST|ECF_PURE|ECF_NOVOPS))
7310 memset (pt, 0, sizeof (struct pt_solution));
7311 else if ((vi = lookup_call_clobber_vi (stmt)) != NULL)
7312 {
7313 *pt = find_what_var_points_to (vi);
7314 /* Escaped (and thus nonlocal) variables are always
7315 implicitly clobbered by calls. */
7316 /* ??? ESCAPED can be empty even though NONLOCAL
7317 always escaped. */
7318 pt->nonlocal = 1;
7319 pt->ipa_escaped = 1;
7320 }
7321 else
7322 {
7323 /* If there is nothing special about this call then
7324 we have made everything that is used also escape. */
7325 *pt = ipa_escaped_pt;
7326 pt->nonlocal = 1;
7327 }
7328 }
7329
7330 /* Handle indirect calls. */
7331 if (!decl
7332 && (fi = get_fi_for_callee (stmt)))
7333 {
7334 /* We need to accumulate all clobbers/uses of all possible
7335 callees. */
7336 fi = get_varinfo (find (fi->id));
7337 /* If we cannot constrain the set of functions we'll end up
7338 calling we end up using/clobbering everything. */
7339 if (bitmap_bit_p (fi->solution, anything_id)
7340 || bitmap_bit_p (fi->solution, nonlocal_id)
7341 || bitmap_bit_p (fi->solution, escaped_id))
7342 {
7343 pt_solution_reset (gimple_call_clobber_set (stmt));
7344 pt_solution_reset (gimple_call_use_set (stmt));
7345 }
7346 else
7347 {
7348 bitmap_iterator bi;
7349 unsigned i;
7350 struct pt_solution *uses, *clobbers;
7351
7352 uses = gimple_call_use_set (stmt);
7353 clobbers = gimple_call_clobber_set (stmt);
7354 memset (uses, 0, sizeof (struct pt_solution));
7355 memset (clobbers, 0, sizeof (struct pt_solution));
7356 EXECUTE_IF_SET_IN_BITMAP (fi->solution, 0, i, bi)
7357 {
7358 struct pt_solution sol;
7359
7360 vi = get_varinfo (i);
7361 if (!vi->is_fn_info)
7362 {
7363 /* ??? We could be more precise here? */
7364 uses->nonlocal = 1;
7365 uses->ipa_escaped = 1;
7366 clobbers->nonlocal = 1;
7367 clobbers->ipa_escaped = 1;
7368 continue;
7369 }
7370
7371 if (!uses->anything)
7372 {
7373 sol = find_what_var_points_to
7374 (first_vi_for_offset (vi, fi_uses));
7375 pt_solution_ior_into (uses, &sol);
7376 }
7377 if (!clobbers->anything)
7378 {
7379 sol = find_what_var_points_to
7380 (first_vi_for_offset (vi, fi_clobbers));
7381 pt_solution_ior_into (clobbers, &sol);
7382 }
7383 }
7384 }
7385 }
7386 }
7387 }
7388
7389 fn->gimple_df->ipa_pta = true;
7390 }
7391
7392 delete_points_to_sets ();
7393
7394 in_ipa_mode = 0;
7395
7396 return 0;
7397 }
7398
7399 namespace {
7400
7401 const pass_data pass_data_ipa_pta =
7402 {
7403 SIMPLE_IPA_PASS, /* type */
7404 "pta", /* name */
7405 OPTGROUP_NONE, /* optinfo_flags */
7406 true, /* has_gate */
7407 true, /* has_execute */
7408 TV_IPA_PTA, /* tv_id */
7409 0, /* properties_required */
7410 0, /* properties_provided */
7411 0, /* properties_destroyed */
7412 0, /* todo_flags_start */
7413 TODO_update_ssa, /* todo_flags_finish */
7414 };
7415
7416 class pass_ipa_pta : public simple_ipa_opt_pass
7417 {
7418 public:
7419 pass_ipa_pta (gcc::context *ctxt)
7420 : simple_ipa_opt_pass (pass_data_ipa_pta, ctxt)
7421 {}
7422
7423 /* opt_pass methods: */
7424 bool gate () { return gate_ipa_pta (); }
7425 unsigned int execute () { return ipa_pta_execute (); }
7426
7427 }; // class pass_ipa_pta
7428
7429 } // anon namespace
7430
7431 simple_ipa_opt_pass *
7432 make_pass_ipa_pta (gcc::context *ctxt)
7433 {
7434 return new pass_ipa_pta (ctxt);
7435 }