]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/tree-ssa-structalias.c
2015-06-25 Andrew MacLeod <amacleod@redhat.com>
[thirdparty/gcc.git] / gcc / tree-ssa-structalias.c
1 /* Tree based points-to analysis
2 Copyright (C) 2005-2015 Free Software Foundation, Inc.
3 Contributed by Daniel Berlin <dberlin@dberlin.org>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3 of the License, or
10 (at your option) any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "obstack.h"
26 #include "bitmap.h"
27 #include "sbitmap.h"
28 #include "flags.h"
29 #include "predict.h"
30 #include "hard-reg-set.h"
31 #include "function.h"
32 #include "dominance.h"
33 #include "cfg.h"
34 #include "basic-block.h"
35 #include "alias.h"
36 #include "symtab.h"
37 #include "tree.h"
38 #include "fold-const.h"
39 #include "stor-layout.h"
40 #include "stmt.h"
41 #include "tree-ssa-alias.h"
42 #include "internal-fn.h"
43 #include "gimple-expr.h"
44 #include "gimple.h"
45 #include "gimple-iterator.h"
46 #include "gimple-ssa.h"
47 #include "cgraph.h"
48 #include "stringpool.h"
49 #include "tree-ssanames.h"
50 #include "tree-into-ssa.h"
51 #include "rtl.h"
52 #include "insn-config.h"
53 #include "expmed.h"
54 #include "dojump.h"
55 #include "explow.h"
56 #include "calls.h"
57 #include "emit-rtl.h"
58 #include "varasm.h"
59 #include "expr.h"
60 #include "tree-dfa.h"
61 #include "tree-inline.h"
62 #include "diagnostic-core.h"
63 #include "tree-pass.h"
64 #include "alloc-pool.h"
65 #include "splay-tree.h"
66 #include "params.h"
67 #include "tree-phinodes.h"
68 #include "ssa-iterators.h"
69 #include "tree-pretty-print.h"
70 #include "gimple-walk.h"
71
72 /* The idea behind this analyzer is to generate set constraints from the
73 program, then solve the resulting constraints in order to generate the
74 points-to sets.
75
76 Set constraints are a way of modeling program analysis problems that
77 involve sets. They consist of an inclusion constraint language,
78 describing the variables (each variable is a set) and operations that
79 are involved on the variables, and a set of rules that derive facts
80 from these operations. To solve a system of set constraints, you derive
81 all possible facts under the rules, which gives you the correct sets
82 as a consequence.
83
84 See "Efficient Field-sensitive pointer analysis for C" by "David
85 J. Pearce and Paul H. J. Kelly and Chris Hankin, at
86 http://citeseer.ist.psu.edu/pearce04efficient.html
87
88 Also see "Ultra-fast Aliasing Analysis using CLA: A Million Lines
89 of C Code in a Second" by ""Nevin Heintze and Olivier Tardieu" at
90 http://citeseer.ist.psu.edu/heintze01ultrafast.html
91
92 There are three types of real constraint expressions, DEREF,
93 ADDRESSOF, and SCALAR. Each constraint expression consists
94 of a constraint type, a variable, and an offset.
95
96 SCALAR is a constraint expression type used to represent x, whether
97 it appears on the LHS or the RHS of a statement.
98 DEREF is a constraint expression type used to represent *x, whether
99 it appears on the LHS or the RHS of a statement.
100 ADDRESSOF is a constraint expression used to represent &x, whether
101 it appears on the LHS or the RHS of a statement.
102
103 Each pointer variable in the program is assigned an integer id, and
104 each field of a structure variable is assigned an integer id as well.
105
106 Structure variables are linked to their list of fields through a "next
107 field" in each variable that points to the next field in offset
108 order.
109 Each variable for a structure field has
110
111 1. "size", that tells the size in bits of that field.
112 2. "fullsize, that tells the size in bits of the entire structure.
113 3. "offset", that tells the offset in bits from the beginning of the
114 structure to this field.
115
116 Thus,
117 struct f
118 {
119 int a;
120 int b;
121 } foo;
122 int *bar;
123
124 looks like
125
126 foo.a -> id 1, size 32, offset 0, fullsize 64, next foo.b
127 foo.b -> id 2, size 32, offset 32, fullsize 64, next NULL
128 bar -> id 3, size 32, offset 0, fullsize 32, next NULL
129
130
131 In order to solve the system of set constraints, the following is
132 done:
133
134 1. Each constraint variable x has a solution set associated with it,
135 Sol(x).
136
137 2. Constraints are separated into direct, copy, and complex.
138 Direct constraints are ADDRESSOF constraints that require no extra
139 processing, such as P = &Q
140 Copy constraints are those of the form P = Q.
141 Complex constraints are all the constraints involving dereferences
142 and offsets (including offsetted copies).
143
144 3. All direct constraints of the form P = &Q are processed, such
145 that Q is added to Sol(P)
146
147 4. All complex constraints for a given constraint variable are stored in a
148 linked list attached to that variable's node.
149
150 5. A directed graph is built out of the copy constraints. Each
151 constraint variable is a node in the graph, and an edge from
152 Q to P is added for each copy constraint of the form P = Q
153
154 6. The graph is then walked, and solution sets are
155 propagated along the copy edges, such that an edge from Q to P
156 causes Sol(P) <- Sol(P) union Sol(Q).
157
158 7. As we visit each node, all complex constraints associated with
159 that node are processed by adding appropriate copy edges to the graph, or the
160 appropriate variables to the solution set.
161
162 8. The process of walking the graph is iterated until no solution
163 sets change.
164
165 Prior to walking the graph in steps 6 and 7, We perform static
166 cycle elimination on the constraint graph, as well
167 as off-line variable substitution.
168
169 TODO: Adding offsets to pointer-to-structures can be handled (IE not punted
170 on and turned into anything), but isn't. You can just see what offset
171 inside the pointed-to struct it's going to access.
172
173 TODO: Constant bounded arrays can be handled as if they were structs of the
174 same number of elements.
175
176 TODO: Modeling heap and incoming pointers becomes much better if we
177 add fields to them as we discover them, which we could do.
178
179 TODO: We could handle unions, but to be honest, it's probably not
180 worth the pain or slowdown. */
181
182 /* IPA-PTA optimizations possible.
183
184 When the indirect function called is ANYTHING we can add disambiguation
185 based on the function signatures (or simply the parameter count which
186 is the varinfo size). We also do not need to consider functions that
187 do not have their address taken.
188
189 The is_global_var bit which marks escape points is overly conservative
190 in IPA mode. Split it to is_escape_point and is_global_var - only
191 externally visible globals are escape points in IPA mode. This is
192 also needed to fix the pt_solution_includes_global predicate
193 (and thus ptr_deref_may_alias_global_p).
194
195 The way we introduce DECL_PT_UID to avoid fixing up all points-to
196 sets in the translation unit when we copy a DECL during inlining
197 pessimizes precision. The advantage is that the DECL_PT_UID keeps
198 compile-time and memory usage overhead low - the points-to sets
199 do not grow or get unshared as they would during a fixup phase.
200 An alternative solution is to delay IPA PTA until after all
201 inlining transformations have been applied.
202
203 The way we propagate clobber/use information isn't optimized.
204 It should use a new complex constraint that properly filters
205 out local variables of the callee (though that would make
206 the sets invalid after inlining). OTOH we might as well
207 admit defeat to WHOPR and simply do all the clobber/use analysis
208 and propagation after PTA finished but before we threw away
209 points-to information for memory variables. WHOPR and PTA
210 do not play along well anyway - the whole constraint solving
211 would need to be done in WPA phase and it will be very interesting
212 to apply the results to local SSA names during LTRANS phase.
213
214 We probably should compute a per-function unit-ESCAPE solution
215 propagating it simply like the clobber / uses solutions. The
216 solution can go alongside the non-IPA espaced solution and be
217 used to query which vars escape the unit through a function.
218
219 We never put function decls in points-to sets so we do not
220 keep the set of called functions for indirect calls.
221
222 And probably more. */
223
224 static bool use_field_sensitive = true;
225 static int in_ipa_mode = 0;
226
227 /* Used for predecessor bitmaps. */
228 static bitmap_obstack predbitmap_obstack;
229
230 /* Used for points-to sets. */
231 static bitmap_obstack pta_obstack;
232
233 /* Used for oldsolution members of variables. */
234 static bitmap_obstack oldpta_obstack;
235
236 /* Used for per-solver-iteration bitmaps. */
237 static bitmap_obstack iteration_obstack;
238
239 static unsigned int create_variable_info_for (tree, const char *);
240 typedef struct constraint_graph *constraint_graph_t;
241 static void unify_nodes (constraint_graph_t, unsigned int, unsigned int, bool);
242
243 struct constraint;
244 typedef struct constraint *constraint_t;
245
246
247 #define EXECUTE_IF_IN_NONNULL_BITMAP(a, b, c, d) \
248 if (a) \
249 EXECUTE_IF_SET_IN_BITMAP (a, b, c, d)
250
251 static struct constraint_stats
252 {
253 unsigned int total_vars;
254 unsigned int nonpointer_vars;
255 unsigned int unified_vars_static;
256 unsigned int unified_vars_dynamic;
257 unsigned int iterations;
258 unsigned int num_edges;
259 unsigned int num_implicit_edges;
260 unsigned int points_to_sets_created;
261 } stats;
262
263 struct variable_info
264 {
265 /* ID of this variable */
266 unsigned int id;
267
268 /* True if this is a variable created by the constraint analysis, such as
269 heap variables and constraints we had to break up. */
270 unsigned int is_artificial_var : 1;
271
272 /* True if this is a special variable whose solution set should not be
273 changed. */
274 unsigned int is_special_var : 1;
275
276 /* True for variables whose size is not known or variable. */
277 unsigned int is_unknown_size_var : 1;
278
279 /* True for (sub-)fields that represent a whole variable. */
280 unsigned int is_full_var : 1;
281
282 /* True if this is a heap variable. */
283 unsigned int is_heap_var : 1;
284
285 /* True if this field may contain pointers. */
286 unsigned int may_have_pointers : 1;
287
288 /* True if this field has only restrict qualified pointers. */
289 unsigned int only_restrict_pointers : 1;
290
291 /* True if this represents a heap var created for a restrict qualified
292 pointer. */
293 unsigned int is_restrict_var : 1;
294
295 /* True if this represents a global variable. */
296 unsigned int is_global_var : 1;
297
298 /* True if this represents a IPA function info. */
299 unsigned int is_fn_info : 1;
300
301 /* ??? Store somewhere better. */
302 unsigned short ruid;
303
304 /* The ID of the variable for the next field in this structure
305 or zero for the last field in this structure. */
306 unsigned next;
307
308 /* The ID of the variable for the first field in this structure. */
309 unsigned head;
310
311 /* Offset of this variable, in bits, from the base variable */
312 unsigned HOST_WIDE_INT offset;
313
314 /* Size of the variable, in bits. */
315 unsigned HOST_WIDE_INT size;
316
317 /* Full size of the base variable, in bits. */
318 unsigned HOST_WIDE_INT fullsize;
319
320 /* Name of this variable */
321 const char *name;
322
323 /* Tree that this variable is associated with. */
324 tree decl;
325
326 /* Points-to set for this variable. */
327 bitmap solution;
328
329 /* Old points-to set for this variable. */
330 bitmap oldsolution;
331 };
332 typedef struct variable_info *varinfo_t;
333
334 static varinfo_t first_vi_for_offset (varinfo_t, unsigned HOST_WIDE_INT);
335 static varinfo_t first_or_preceding_vi_for_offset (varinfo_t,
336 unsigned HOST_WIDE_INT);
337 static varinfo_t lookup_vi_for_tree (tree);
338 static inline bool type_can_have_subvars (const_tree);
339
340 /* Pool of variable info structures. */
341 static pool_allocator<variable_info> variable_info_pool
342 ("Variable info pool", 30);
343
344 /* Map varinfo to final pt_solution. */
345 static hash_map<varinfo_t, pt_solution *> *final_solutions;
346 struct obstack final_solutions_obstack;
347
348 /* Table of variable info structures for constraint variables.
349 Indexed directly by variable info id. */
350 static vec<varinfo_t> varmap;
351
352 /* Return the varmap element N */
353
354 static inline varinfo_t
355 get_varinfo (unsigned int n)
356 {
357 return varmap[n];
358 }
359
360 /* Return the next variable in the list of sub-variables of VI
361 or NULL if VI is the last sub-variable. */
362
363 static inline varinfo_t
364 vi_next (varinfo_t vi)
365 {
366 return get_varinfo (vi->next);
367 }
368
369 /* Static IDs for the special variables. Variable ID zero is unused
370 and used as terminator for the sub-variable chain. */
371 enum { nothing_id = 1, anything_id = 2, string_id = 3,
372 escaped_id = 4, nonlocal_id = 5,
373 storedanything_id = 6, integer_id = 7 };
374
375 /* Return a new variable info structure consisting for a variable
376 named NAME, and using constraint graph node NODE. Append it
377 to the vector of variable info structures. */
378
379 static varinfo_t
380 new_var_info (tree t, const char *name)
381 {
382 unsigned index = varmap.length ();
383 varinfo_t ret = variable_info_pool.allocate ();
384
385 ret->id = index;
386 ret->name = name;
387 ret->decl = t;
388 /* Vars without decl are artificial and do not have sub-variables. */
389 ret->is_artificial_var = (t == NULL_TREE);
390 ret->is_special_var = false;
391 ret->is_unknown_size_var = false;
392 ret->is_full_var = (t == NULL_TREE);
393 ret->is_heap_var = false;
394 ret->may_have_pointers = true;
395 ret->only_restrict_pointers = false;
396 ret->is_restrict_var = false;
397 ret->ruid = 0;
398 ret->is_global_var = (t == NULL_TREE);
399 ret->is_fn_info = false;
400 if (t && DECL_P (t))
401 ret->is_global_var = (is_global_var (t)
402 /* We have to treat even local register variables
403 as escape points. */
404 || (TREE_CODE (t) == VAR_DECL
405 && DECL_HARD_REGISTER (t)));
406 ret->solution = BITMAP_ALLOC (&pta_obstack);
407 ret->oldsolution = NULL;
408 ret->next = 0;
409 ret->head = ret->id;
410
411 stats.total_vars++;
412
413 varmap.safe_push (ret);
414
415 return ret;
416 }
417
418
419 /* A map mapping call statements to per-stmt variables for uses
420 and clobbers specific to the call. */
421 static hash_map<gimple, varinfo_t> *call_stmt_vars;
422
423 /* Lookup or create the variable for the call statement CALL. */
424
425 static varinfo_t
426 get_call_vi (gcall *call)
427 {
428 varinfo_t vi, vi2;
429
430 bool existed;
431 varinfo_t *slot_p = &call_stmt_vars->get_or_insert (call, &existed);
432 if (existed)
433 return *slot_p;
434
435 vi = new_var_info (NULL_TREE, "CALLUSED");
436 vi->offset = 0;
437 vi->size = 1;
438 vi->fullsize = 2;
439 vi->is_full_var = true;
440
441 vi2 = new_var_info (NULL_TREE, "CALLCLOBBERED");
442 vi2->offset = 1;
443 vi2->size = 1;
444 vi2->fullsize = 2;
445 vi2->is_full_var = true;
446
447 vi->next = vi2->id;
448
449 *slot_p = vi;
450 return vi;
451 }
452
453 /* Lookup the variable for the call statement CALL representing
454 the uses. Returns NULL if there is nothing special about this call. */
455
456 static varinfo_t
457 lookup_call_use_vi (gcall *call)
458 {
459 varinfo_t *slot_p = call_stmt_vars->get (call);
460 if (slot_p)
461 return *slot_p;
462
463 return NULL;
464 }
465
466 /* Lookup the variable for the call statement CALL representing
467 the clobbers. Returns NULL if there is nothing special about this call. */
468
469 static varinfo_t
470 lookup_call_clobber_vi (gcall *call)
471 {
472 varinfo_t uses = lookup_call_use_vi (call);
473 if (!uses)
474 return NULL;
475
476 return vi_next (uses);
477 }
478
479 /* Lookup or create the variable for the call statement CALL representing
480 the uses. */
481
482 static varinfo_t
483 get_call_use_vi (gcall *call)
484 {
485 return get_call_vi (call);
486 }
487
488 /* Lookup or create the variable for the call statement CALL representing
489 the clobbers. */
490
491 static varinfo_t ATTRIBUTE_UNUSED
492 get_call_clobber_vi (gcall *call)
493 {
494 return vi_next (get_call_vi (call));
495 }
496
497
498 typedef enum {SCALAR, DEREF, ADDRESSOF} constraint_expr_type;
499
500 /* An expression that appears in a constraint. */
501
502 struct constraint_expr
503 {
504 /* Constraint type. */
505 constraint_expr_type type;
506
507 /* Variable we are referring to in the constraint. */
508 unsigned int var;
509
510 /* Offset, in bits, of this constraint from the beginning of
511 variables it ends up referring to.
512
513 IOW, in a deref constraint, we would deref, get the result set,
514 then add OFFSET to each member. */
515 HOST_WIDE_INT offset;
516 };
517
518 /* Use 0x8000... as special unknown offset. */
519 #define UNKNOWN_OFFSET HOST_WIDE_INT_MIN
520
521 typedef struct constraint_expr ce_s;
522 static void get_constraint_for_1 (tree, vec<ce_s> *, bool, bool);
523 static void get_constraint_for (tree, vec<ce_s> *);
524 static void get_constraint_for_rhs (tree, vec<ce_s> *);
525 static void do_deref (vec<ce_s> *);
526
527 /* Our set constraints are made up of two constraint expressions, one
528 LHS, and one RHS.
529
530 As described in the introduction, our set constraints each represent an
531 operation between set valued variables.
532 */
533 struct constraint
534 {
535 struct constraint_expr lhs;
536 struct constraint_expr rhs;
537 };
538
539 /* List of constraints that we use to build the constraint graph from. */
540
541 static vec<constraint_t> constraints;
542 static pool_allocator<constraint> constraint_pool ("Constraint pool", 30);
543
544 /* The constraint graph is represented as an array of bitmaps
545 containing successor nodes. */
546
547 struct constraint_graph
548 {
549 /* Size of this graph, which may be different than the number of
550 nodes in the variable map. */
551 unsigned int size;
552
553 /* Explicit successors of each node. */
554 bitmap *succs;
555
556 /* Implicit predecessors of each node (Used for variable
557 substitution). */
558 bitmap *implicit_preds;
559
560 /* Explicit predecessors of each node (Used for variable substitution). */
561 bitmap *preds;
562
563 /* Indirect cycle representatives, or -1 if the node has no indirect
564 cycles. */
565 int *indirect_cycles;
566
567 /* Representative node for a node. rep[a] == a unless the node has
568 been unified. */
569 unsigned int *rep;
570
571 /* Equivalence class representative for a label. This is used for
572 variable substitution. */
573 int *eq_rep;
574
575 /* Pointer equivalence label for a node. All nodes with the same
576 pointer equivalence label can be unified together at some point
577 (either during constraint optimization or after the constraint
578 graph is built). */
579 unsigned int *pe;
580
581 /* Pointer equivalence representative for a label. This is used to
582 handle nodes that are pointer equivalent but not location
583 equivalent. We can unite these once the addressof constraints
584 are transformed into initial points-to sets. */
585 int *pe_rep;
586
587 /* Pointer equivalence label for each node, used during variable
588 substitution. */
589 unsigned int *pointer_label;
590
591 /* Location equivalence label for each node, used during location
592 equivalence finding. */
593 unsigned int *loc_label;
594
595 /* Pointed-by set for each node, used during location equivalence
596 finding. This is pointed-by rather than pointed-to, because it
597 is constructed using the predecessor graph. */
598 bitmap *pointed_by;
599
600 /* Points to sets for pointer equivalence. This is *not* the actual
601 points-to sets for nodes. */
602 bitmap *points_to;
603
604 /* Bitmap of nodes where the bit is set if the node is a direct
605 node. Used for variable substitution. */
606 sbitmap direct_nodes;
607
608 /* Bitmap of nodes where the bit is set if the node is address
609 taken. Used for variable substitution. */
610 bitmap address_taken;
611
612 /* Vector of complex constraints for each graph node. Complex
613 constraints are those involving dereferences or offsets that are
614 not 0. */
615 vec<constraint_t> *complex;
616 };
617
618 static constraint_graph_t graph;
619
620 /* During variable substitution and the offline version of indirect
621 cycle finding, we create nodes to represent dereferences and
622 address taken constraints. These represent where these start and
623 end. */
624 #define FIRST_REF_NODE (varmap).length ()
625 #define LAST_REF_NODE (FIRST_REF_NODE + (FIRST_REF_NODE - 1))
626
627 /* Return the representative node for NODE, if NODE has been unioned
628 with another NODE.
629 This function performs path compression along the way to finding
630 the representative. */
631
632 static unsigned int
633 find (unsigned int node)
634 {
635 gcc_checking_assert (node < graph->size);
636 if (graph->rep[node] != node)
637 return graph->rep[node] = find (graph->rep[node]);
638 return node;
639 }
640
641 /* Union the TO and FROM nodes to the TO nodes.
642 Note that at some point in the future, we may want to do
643 union-by-rank, in which case we are going to have to return the
644 node we unified to. */
645
646 static bool
647 unite (unsigned int to, unsigned int from)
648 {
649 gcc_checking_assert (to < graph->size && from < graph->size);
650 if (to != from && graph->rep[from] != to)
651 {
652 graph->rep[from] = to;
653 return true;
654 }
655 return false;
656 }
657
658 /* Create a new constraint consisting of LHS and RHS expressions. */
659
660 static constraint_t
661 new_constraint (const struct constraint_expr lhs,
662 const struct constraint_expr rhs)
663 {
664 constraint_t ret = constraint_pool.allocate ();
665 ret->lhs = lhs;
666 ret->rhs = rhs;
667 return ret;
668 }
669
670 /* Print out constraint C to FILE. */
671
672 static void
673 dump_constraint (FILE *file, constraint_t c)
674 {
675 if (c->lhs.type == ADDRESSOF)
676 fprintf (file, "&");
677 else if (c->lhs.type == DEREF)
678 fprintf (file, "*");
679 fprintf (file, "%s", get_varinfo (c->lhs.var)->name);
680 if (c->lhs.offset == UNKNOWN_OFFSET)
681 fprintf (file, " + UNKNOWN");
682 else if (c->lhs.offset != 0)
683 fprintf (file, " + " HOST_WIDE_INT_PRINT_DEC, c->lhs.offset);
684 fprintf (file, " = ");
685 if (c->rhs.type == ADDRESSOF)
686 fprintf (file, "&");
687 else if (c->rhs.type == DEREF)
688 fprintf (file, "*");
689 fprintf (file, "%s", get_varinfo (c->rhs.var)->name);
690 if (c->rhs.offset == UNKNOWN_OFFSET)
691 fprintf (file, " + UNKNOWN");
692 else if (c->rhs.offset != 0)
693 fprintf (file, " + " HOST_WIDE_INT_PRINT_DEC, c->rhs.offset);
694 }
695
696
697 void debug_constraint (constraint_t);
698 void debug_constraints (void);
699 void debug_constraint_graph (void);
700 void debug_solution_for_var (unsigned int);
701 void debug_sa_points_to_info (void);
702
703 /* Print out constraint C to stderr. */
704
705 DEBUG_FUNCTION void
706 debug_constraint (constraint_t c)
707 {
708 dump_constraint (stderr, c);
709 fprintf (stderr, "\n");
710 }
711
712 /* Print out all constraints to FILE */
713
714 static void
715 dump_constraints (FILE *file, int from)
716 {
717 int i;
718 constraint_t c;
719 for (i = from; constraints.iterate (i, &c); i++)
720 if (c)
721 {
722 dump_constraint (file, c);
723 fprintf (file, "\n");
724 }
725 }
726
727 /* Print out all constraints to stderr. */
728
729 DEBUG_FUNCTION void
730 debug_constraints (void)
731 {
732 dump_constraints (stderr, 0);
733 }
734
735 /* Print the constraint graph in dot format. */
736
737 static void
738 dump_constraint_graph (FILE *file)
739 {
740 unsigned int i;
741
742 /* Only print the graph if it has already been initialized: */
743 if (!graph)
744 return;
745
746 /* Prints the header of the dot file: */
747 fprintf (file, "strict digraph {\n");
748 fprintf (file, " node [\n shape = box\n ]\n");
749 fprintf (file, " edge [\n fontsize = \"12\"\n ]\n");
750 fprintf (file, "\n // List of nodes and complex constraints in "
751 "the constraint graph:\n");
752
753 /* The next lines print the nodes in the graph together with the
754 complex constraints attached to them. */
755 for (i = 1; i < graph->size; i++)
756 {
757 if (i == FIRST_REF_NODE)
758 continue;
759 if (find (i) != i)
760 continue;
761 if (i < FIRST_REF_NODE)
762 fprintf (file, "\"%s\"", get_varinfo (i)->name);
763 else
764 fprintf (file, "\"*%s\"", get_varinfo (i - FIRST_REF_NODE)->name);
765 if (graph->complex[i].exists ())
766 {
767 unsigned j;
768 constraint_t c;
769 fprintf (file, " [label=\"\\N\\n");
770 for (j = 0; graph->complex[i].iterate (j, &c); ++j)
771 {
772 dump_constraint (file, c);
773 fprintf (file, "\\l");
774 }
775 fprintf (file, "\"]");
776 }
777 fprintf (file, ";\n");
778 }
779
780 /* Go over the edges. */
781 fprintf (file, "\n // Edges in the constraint graph:\n");
782 for (i = 1; i < graph->size; i++)
783 {
784 unsigned j;
785 bitmap_iterator bi;
786 if (find (i) != i)
787 continue;
788 EXECUTE_IF_IN_NONNULL_BITMAP (graph->succs[i], 0, j, bi)
789 {
790 unsigned to = find (j);
791 if (i == to)
792 continue;
793 if (i < FIRST_REF_NODE)
794 fprintf (file, "\"%s\"", get_varinfo (i)->name);
795 else
796 fprintf (file, "\"*%s\"", get_varinfo (i - FIRST_REF_NODE)->name);
797 fprintf (file, " -> ");
798 if (to < FIRST_REF_NODE)
799 fprintf (file, "\"%s\"", get_varinfo (to)->name);
800 else
801 fprintf (file, "\"*%s\"", get_varinfo (to - FIRST_REF_NODE)->name);
802 fprintf (file, ";\n");
803 }
804 }
805
806 /* Prints the tail of the dot file. */
807 fprintf (file, "}\n");
808 }
809
810 /* Print out the constraint graph to stderr. */
811
812 DEBUG_FUNCTION void
813 debug_constraint_graph (void)
814 {
815 dump_constraint_graph (stderr);
816 }
817
818 /* SOLVER FUNCTIONS
819
820 The solver is a simple worklist solver, that works on the following
821 algorithm:
822
823 sbitmap changed_nodes = all zeroes;
824 changed_count = 0;
825 For each node that is not already collapsed:
826 changed_count++;
827 set bit in changed nodes
828
829 while (changed_count > 0)
830 {
831 compute topological ordering for constraint graph
832
833 find and collapse cycles in the constraint graph (updating
834 changed if necessary)
835
836 for each node (n) in the graph in topological order:
837 changed_count--;
838
839 Process each complex constraint associated with the node,
840 updating changed if necessary.
841
842 For each outgoing edge from n, propagate the solution from n to
843 the destination of the edge, updating changed as necessary.
844
845 } */
846
847 /* Return true if two constraint expressions A and B are equal. */
848
849 static bool
850 constraint_expr_equal (struct constraint_expr a, struct constraint_expr b)
851 {
852 return a.type == b.type && a.var == b.var && a.offset == b.offset;
853 }
854
855 /* Return true if constraint expression A is less than constraint expression
856 B. This is just arbitrary, but consistent, in order to give them an
857 ordering. */
858
859 static bool
860 constraint_expr_less (struct constraint_expr a, struct constraint_expr b)
861 {
862 if (a.type == b.type)
863 {
864 if (a.var == b.var)
865 return a.offset < b.offset;
866 else
867 return a.var < b.var;
868 }
869 else
870 return a.type < b.type;
871 }
872
873 /* Return true if constraint A is less than constraint B. This is just
874 arbitrary, but consistent, in order to give them an ordering. */
875
876 static bool
877 constraint_less (const constraint_t &a, const constraint_t &b)
878 {
879 if (constraint_expr_less (a->lhs, b->lhs))
880 return true;
881 else if (constraint_expr_less (b->lhs, a->lhs))
882 return false;
883 else
884 return constraint_expr_less (a->rhs, b->rhs);
885 }
886
887 /* Return true if two constraints A and B are equal. */
888
889 static bool
890 constraint_equal (struct constraint a, struct constraint b)
891 {
892 return constraint_expr_equal (a.lhs, b.lhs)
893 && constraint_expr_equal (a.rhs, b.rhs);
894 }
895
896
897 /* Find a constraint LOOKFOR in the sorted constraint vector VEC */
898
899 static constraint_t
900 constraint_vec_find (vec<constraint_t> vec,
901 struct constraint lookfor)
902 {
903 unsigned int place;
904 constraint_t found;
905
906 if (!vec.exists ())
907 return NULL;
908
909 place = vec.lower_bound (&lookfor, constraint_less);
910 if (place >= vec.length ())
911 return NULL;
912 found = vec[place];
913 if (!constraint_equal (*found, lookfor))
914 return NULL;
915 return found;
916 }
917
918 /* Union two constraint vectors, TO and FROM. Put the result in TO.
919 Returns true of TO set is changed. */
920
921 static bool
922 constraint_set_union (vec<constraint_t> *to,
923 vec<constraint_t> *from)
924 {
925 int i;
926 constraint_t c;
927 bool any_change = false;
928
929 FOR_EACH_VEC_ELT (*from, i, c)
930 {
931 if (constraint_vec_find (*to, *c) == NULL)
932 {
933 unsigned int place = to->lower_bound (c, constraint_less);
934 to->safe_insert (place, c);
935 any_change = true;
936 }
937 }
938 return any_change;
939 }
940
941 /* Expands the solution in SET to all sub-fields of variables included. */
942
943 static bitmap
944 solution_set_expand (bitmap set, bitmap *expanded)
945 {
946 bitmap_iterator bi;
947 unsigned j;
948
949 if (*expanded)
950 return *expanded;
951
952 *expanded = BITMAP_ALLOC (&iteration_obstack);
953
954 /* In a first pass expand to the head of the variables we need to
955 add all sub-fields off. This avoids quadratic behavior. */
956 EXECUTE_IF_SET_IN_BITMAP (set, 0, j, bi)
957 {
958 varinfo_t v = get_varinfo (j);
959 if (v->is_artificial_var
960 || v->is_full_var)
961 continue;
962 bitmap_set_bit (*expanded, v->head);
963 }
964
965 /* In the second pass now expand all head variables with subfields. */
966 EXECUTE_IF_SET_IN_BITMAP (*expanded, 0, j, bi)
967 {
968 varinfo_t v = get_varinfo (j);
969 if (v->head != j)
970 continue;
971 for (v = vi_next (v); v != NULL; v = vi_next (v))
972 bitmap_set_bit (*expanded, v->id);
973 }
974
975 /* And finally set the rest of the bits from SET. */
976 bitmap_ior_into (*expanded, set);
977
978 return *expanded;
979 }
980
981 /* Union solution sets TO and DELTA, and add INC to each member of DELTA in the
982 process. */
983
984 static bool
985 set_union_with_increment (bitmap to, bitmap delta, HOST_WIDE_INT inc,
986 bitmap *expanded_delta)
987 {
988 bool changed = false;
989 bitmap_iterator bi;
990 unsigned int i;
991
992 /* If the solution of DELTA contains anything it is good enough to transfer
993 this to TO. */
994 if (bitmap_bit_p (delta, anything_id))
995 return bitmap_set_bit (to, anything_id);
996
997 /* If the offset is unknown we have to expand the solution to
998 all subfields. */
999 if (inc == UNKNOWN_OFFSET)
1000 {
1001 delta = solution_set_expand (delta, expanded_delta);
1002 changed |= bitmap_ior_into (to, delta);
1003 return changed;
1004 }
1005
1006 /* For non-zero offset union the offsetted solution into the destination. */
1007 EXECUTE_IF_SET_IN_BITMAP (delta, 0, i, bi)
1008 {
1009 varinfo_t vi = get_varinfo (i);
1010
1011 /* If this is a variable with just one field just set its bit
1012 in the result. */
1013 if (vi->is_artificial_var
1014 || vi->is_unknown_size_var
1015 || vi->is_full_var)
1016 changed |= bitmap_set_bit (to, i);
1017 else
1018 {
1019 HOST_WIDE_INT fieldoffset = vi->offset + inc;
1020 unsigned HOST_WIDE_INT size = vi->size;
1021
1022 /* If the offset makes the pointer point to before the
1023 variable use offset zero for the field lookup. */
1024 if (fieldoffset < 0)
1025 vi = get_varinfo (vi->head);
1026 else
1027 vi = first_or_preceding_vi_for_offset (vi, fieldoffset);
1028
1029 do
1030 {
1031 changed |= bitmap_set_bit (to, vi->id);
1032 if (vi->is_full_var
1033 || vi->next == 0)
1034 break;
1035
1036 /* We have to include all fields that overlap the current field
1037 shifted by inc. */
1038 vi = vi_next (vi);
1039 }
1040 while (vi->offset < fieldoffset + size);
1041 }
1042 }
1043
1044 return changed;
1045 }
1046
1047 /* Insert constraint C into the list of complex constraints for graph
1048 node VAR. */
1049
1050 static void
1051 insert_into_complex (constraint_graph_t graph,
1052 unsigned int var, constraint_t c)
1053 {
1054 vec<constraint_t> complex = graph->complex[var];
1055 unsigned int place = complex.lower_bound (c, constraint_less);
1056
1057 /* Only insert constraints that do not already exist. */
1058 if (place >= complex.length ()
1059 || !constraint_equal (*c, *complex[place]))
1060 graph->complex[var].safe_insert (place, c);
1061 }
1062
1063
1064 /* Condense two variable nodes into a single variable node, by moving
1065 all associated info from FROM to TO. Returns true if TO node's
1066 constraint set changes after the merge. */
1067
1068 static bool
1069 merge_node_constraints (constraint_graph_t graph, unsigned int to,
1070 unsigned int from)
1071 {
1072 unsigned int i;
1073 constraint_t c;
1074 bool any_change = false;
1075
1076 gcc_checking_assert (find (from) == to);
1077
1078 /* Move all complex constraints from src node into to node */
1079 FOR_EACH_VEC_ELT (graph->complex[from], i, c)
1080 {
1081 /* In complex constraints for node FROM, we may have either
1082 a = *FROM, and *FROM = a, or an offseted constraint which are
1083 always added to the rhs node's constraints. */
1084
1085 if (c->rhs.type == DEREF)
1086 c->rhs.var = to;
1087 else if (c->lhs.type == DEREF)
1088 c->lhs.var = to;
1089 else
1090 c->rhs.var = to;
1091
1092 }
1093 any_change = constraint_set_union (&graph->complex[to],
1094 &graph->complex[from]);
1095 graph->complex[from].release ();
1096 return any_change;
1097 }
1098
1099
1100 /* Remove edges involving NODE from GRAPH. */
1101
1102 static void
1103 clear_edges_for_node (constraint_graph_t graph, unsigned int node)
1104 {
1105 if (graph->succs[node])
1106 BITMAP_FREE (graph->succs[node]);
1107 }
1108
1109 /* Merge GRAPH nodes FROM and TO into node TO. */
1110
1111 static void
1112 merge_graph_nodes (constraint_graph_t graph, unsigned int to,
1113 unsigned int from)
1114 {
1115 if (graph->indirect_cycles[from] != -1)
1116 {
1117 /* If we have indirect cycles with the from node, and we have
1118 none on the to node, the to node has indirect cycles from the
1119 from node now that they are unified.
1120 If indirect cycles exist on both, unify the nodes that they
1121 are in a cycle with, since we know they are in a cycle with
1122 each other. */
1123 if (graph->indirect_cycles[to] == -1)
1124 graph->indirect_cycles[to] = graph->indirect_cycles[from];
1125 }
1126
1127 /* Merge all the successor edges. */
1128 if (graph->succs[from])
1129 {
1130 if (!graph->succs[to])
1131 graph->succs[to] = BITMAP_ALLOC (&pta_obstack);
1132 bitmap_ior_into (graph->succs[to],
1133 graph->succs[from]);
1134 }
1135
1136 clear_edges_for_node (graph, from);
1137 }
1138
1139
1140 /* Add an indirect graph edge to GRAPH, going from TO to FROM if
1141 it doesn't exist in the graph already. */
1142
1143 static void
1144 add_implicit_graph_edge (constraint_graph_t graph, unsigned int to,
1145 unsigned int from)
1146 {
1147 if (to == from)
1148 return;
1149
1150 if (!graph->implicit_preds[to])
1151 graph->implicit_preds[to] = BITMAP_ALLOC (&predbitmap_obstack);
1152
1153 if (bitmap_set_bit (graph->implicit_preds[to], from))
1154 stats.num_implicit_edges++;
1155 }
1156
1157 /* Add a predecessor graph edge to GRAPH, going from TO to FROM if
1158 it doesn't exist in the graph already.
1159 Return false if the edge already existed, true otherwise. */
1160
1161 static void
1162 add_pred_graph_edge (constraint_graph_t graph, unsigned int to,
1163 unsigned int from)
1164 {
1165 if (!graph->preds[to])
1166 graph->preds[to] = BITMAP_ALLOC (&predbitmap_obstack);
1167 bitmap_set_bit (graph->preds[to], from);
1168 }
1169
1170 /* Add a graph edge to GRAPH, going from FROM to TO if
1171 it doesn't exist in the graph already.
1172 Return false if the edge already existed, true otherwise. */
1173
1174 static bool
1175 add_graph_edge (constraint_graph_t graph, unsigned int to,
1176 unsigned int from)
1177 {
1178 if (to == from)
1179 {
1180 return false;
1181 }
1182 else
1183 {
1184 bool r = false;
1185
1186 if (!graph->succs[from])
1187 graph->succs[from] = BITMAP_ALLOC (&pta_obstack);
1188 if (bitmap_set_bit (graph->succs[from], to))
1189 {
1190 r = true;
1191 if (to < FIRST_REF_NODE && from < FIRST_REF_NODE)
1192 stats.num_edges++;
1193 }
1194 return r;
1195 }
1196 }
1197
1198
1199 /* Initialize the constraint graph structure to contain SIZE nodes. */
1200
1201 static void
1202 init_graph (unsigned int size)
1203 {
1204 unsigned int j;
1205
1206 graph = XCNEW (struct constraint_graph);
1207 graph->size = size;
1208 graph->succs = XCNEWVEC (bitmap, graph->size);
1209 graph->indirect_cycles = XNEWVEC (int, graph->size);
1210 graph->rep = XNEWVEC (unsigned int, graph->size);
1211 /* ??? Macros do not support template types with multiple arguments,
1212 so we use a typedef to work around it. */
1213 typedef vec<constraint_t> vec_constraint_t_heap;
1214 graph->complex = XCNEWVEC (vec_constraint_t_heap, size);
1215 graph->pe = XCNEWVEC (unsigned int, graph->size);
1216 graph->pe_rep = XNEWVEC (int, graph->size);
1217
1218 for (j = 0; j < graph->size; j++)
1219 {
1220 graph->rep[j] = j;
1221 graph->pe_rep[j] = -1;
1222 graph->indirect_cycles[j] = -1;
1223 }
1224 }
1225
1226 /* Build the constraint graph, adding only predecessor edges right now. */
1227
1228 static void
1229 build_pred_graph (void)
1230 {
1231 int i;
1232 constraint_t c;
1233 unsigned int j;
1234
1235 graph->implicit_preds = XCNEWVEC (bitmap, graph->size);
1236 graph->preds = XCNEWVEC (bitmap, graph->size);
1237 graph->pointer_label = XCNEWVEC (unsigned int, graph->size);
1238 graph->loc_label = XCNEWVEC (unsigned int, graph->size);
1239 graph->pointed_by = XCNEWVEC (bitmap, graph->size);
1240 graph->points_to = XCNEWVEC (bitmap, graph->size);
1241 graph->eq_rep = XNEWVEC (int, graph->size);
1242 graph->direct_nodes = sbitmap_alloc (graph->size);
1243 graph->address_taken = BITMAP_ALLOC (&predbitmap_obstack);
1244 bitmap_clear (graph->direct_nodes);
1245
1246 for (j = 1; j < FIRST_REF_NODE; j++)
1247 {
1248 if (!get_varinfo (j)->is_special_var)
1249 bitmap_set_bit (graph->direct_nodes, j);
1250 }
1251
1252 for (j = 0; j < graph->size; j++)
1253 graph->eq_rep[j] = -1;
1254
1255 for (j = 0; j < varmap.length (); j++)
1256 graph->indirect_cycles[j] = -1;
1257
1258 FOR_EACH_VEC_ELT (constraints, i, c)
1259 {
1260 struct constraint_expr lhs = c->lhs;
1261 struct constraint_expr rhs = c->rhs;
1262 unsigned int lhsvar = lhs.var;
1263 unsigned int rhsvar = rhs.var;
1264
1265 if (lhs.type == DEREF)
1266 {
1267 /* *x = y. */
1268 if (rhs.offset == 0 && lhs.offset == 0 && rhs.type == SCALAR)
1269 add_pred_graph_edge (graph, FIRST_REF_NODE + lhsvar, rhsvar);
1270 }
1271 else if (rhs.type == DEREF)
1272 {
1273 /* x = *y */
1274 if (rhs.offset == 0 && lhs.offset == 0 && lhs.type == SCALAR)
1275 add_pred_graph_edge (graph, lhsvar, FIRST_REF_NODE + rhsvar);
1276 else
1277 bitmap_clear_bit (graph->direct_nodes, lhsvar);
1278 }
1279 else if (rhs.type == ADDRESSOF)
1280 {
1281 varinfo_t v;
1282
1283 /* x = &y */
1284 if (graph->points_to[lhsvar] == NULL)
1285 graph->points_to[lhsvar] = BITMAP_ALLOC (&predbitmap_obstack);
1286 bitmap_set_bit (graph->points_to[lhsvar], rhsvar);
1287
1288 if (graph->pointed_by[rhsvar] == NULL)
1289 graph->pointed_by[rhsvar] = BITMAP_ALLOC (&predbitmap_obstack);
1290 bitmap_set_bit (graph->pointed_by[rhsvar], lhsvar);
1291
1292 /* Implicitly, *x = y */
1293 add_implicit_graph_edge (graph, FIRST_REF_NODE + lhsvar, rhsvar);
1294
1295 /* All related variables are no longer direct nodes. */
1296 bitmap_clear_bit (graph->direct_nodes, rhsvar);
1297 v = get_varinfo (rhsvar);
1298 if (!v->is_full_var)
1299 {
1300 v = get_varinfo (v->head);
1301 do
1302 {
1303 bitmap_clear_bit (graph->direct_nodes, v->id);
1304 v = vi_next (v);
1305 }
1306 while (v != NULL);
1307 }
1308 bitmap_set_bit (graph->address_taken, rhsvar);
1309 }
1310 else if (lhsvar > anything_id
1311 && lhsvar != rhsvar && lhs.offset == 0 && rhs.offset == 0)
1312 {
1313 /* x = y */
1314 add_pred_graph_edge (graph, lhsvar, rhsvar);
1315 /* Implicitly, *x = *y */
1316 add_implicit_graph_edge (graph, FIRST_REF_NODE + lhsvar,
1317 FIRST_REF_NODE + rhsvar);
1318 }
1319 else if (lhs.offset != 0 || rhs.offset != 0)
1320 {
1321 if (rhs.offset != 0)
1322 bitmap_clear_bit (graph->direct_nodes, lhs.var);
1323 else if (lhs.offset != 0)
1324 bitmap_clear_bit (graph->direct_nodes, rhs.var);
1325 }
1326 }
1327 }
1328
1329 /* Build the constraint graph, adding successor edges. */
1330
1331 static void
1332 build_succ_graph (void)
1333 {
1334 unsigned i, t;
1335 constraint_t c;
1336
1337 FOR_EACH_VEC_ELT (constraints, i, c)
1338 {
1339 struct constraint_expr lhs;
1340 struct constraint_expr rhs;
1341 unsigned int lhsvar;
1342 unsigned int rhsvar;
1343
1344 if (!c)
1345 continue;
1346
1347 lhs = c->lhs;
1348 rhs = c->rhs;
1349 lhsvar = find (lhs.var);
1350 rhsvar = find (rhs.var);
1351
1352 if (lhs.type == DEREF)
1353 {
1354 if (rhs.offset == 0 && lhs.offset == 0 && rhs.type == SCALAR)
1355 add_graph_edge (graph, FIRST_REF_NODE + lhsvar, rhsvar);
1356 }
1357 else if (rhs.type == DEREF)
1358 {
1359 if (rhs.offset == 0 && lhs.offset == 0 && lhs.type == SCALAR)
1360 add_graph_edge (graph, lhsvar, FIRST_REF_NODE + rhsvar);
1361 }
1362 else if (rhs.type == ADDRESSOF)
1363 {
1364 /* x = &y */
1365 gcc_checking_assert (find (rhs.var) == rhs.var);
1366 bitmap_set_bit (get_varinfo (lhsvar)->solution, rhsvar);
1367 }
1368 else if (lhsvar > anything_id
1369 && lhsvar != rhsvar && lhs.offset == 0 && rhs.offset == 0)
1370 {
1371 add_graph_edge (graph, lhsvar, rhsvar);
1372 }
1373 }
1374
1375 /* Add edges from STOREDANYTHING to all non-direct nodes that can
1376 receive pointers. */
1377 t = find (storedanything_id);
1378 for (i = integer_id + 1; i < FIRST_REF_NODE; ++i)
1379 {
1380 if (!bitmap_bit_p (graph->direct_nodes, i)
1381 && get_varinfo (i)->may_have_pointers)
1382 add_graph_edge (graph, find (i), t);
1383 }
1384
1385 /* Everything stored to ANYTHING also potentially escapes. */
1386 add_graph_edge (graph, find (escaped_id), t);
1387 }
1388
1389
1390 /* Changed variables on the last iteration. */
1391 static bitmap changed;
1392
1393 /* Strongly Connected Component visitation info. */
1394
1395 struct scc_info
1396 {
1397 sbitmap visited;
1398 sbitmap deleted;
1399 unsigned int *dfs;
1400 unsigned int *node_mapping;
1401 int current_index;
1402 vec<unsigned> scc_stack;
1403 };
1404
1405
1406 /* Recursive routine to find strongly connected components in GRAPH.
1407 SI is the SCC info to store the information in, and N is the id of current
1408 graph node we are processing.
1409
1410 This is Tarjan's strongly connected component finding algorithm, as
1411 modified by Nuutila to keep only non-root nodes on the stack.
1412 The algorithm can be found in "On finding the strongly connected
1413 connected components in a directed graph" by Esko Nuutila and Eljas
1414 Soisalon-Soininen, in Information Processing Letters volume 49,
1415 number 1, pages 9-14. */
1416
1417 static void
1418 scc_visit (constraint_graph_t graph, struct scc_info *si, unsigned int n)
1419 {
1420 unsigned int i;
1421 bitmap_iterator bi;
1422 unsigned int my_dfs;
1423
1424 bitmap_set_bit (si->visited, n);
1425 si->dfs[n] = si->current_index ++;
1426 my_dfs = si->dfs[n];
1427
1428 /* Visit all the successors. */
1429 EXECUTE_IF_IN_NONNULL_BITMAP (graph->succs[n], 0, i, bi)
1430 {
1431 unsigned int w;
1432
1433 if (i > LAST_REF_NODE)
1434 break;
1435
1436 w = find (i);
1437 if (bitmap_bit_p (si->deleted, w))
1438 continue;
1439
1440 if (!bitmap_bit_p (si->visited, w))
1441 scc_visit (graph, si, w);
1442
1443 unsigned int t = find (w);
1444 gcc_checking_assert (find (n) == n);
1445 if (si->dfs[t] < si->dfs[n])
1446 si->dfs[n] = si->dfs[t];
1447 }
1448
1449 /* See if any components have been identified. */
1450 if (si->dfs[n] == my_dfs)
1451 {
1452 if (si->scc_stack.length () > 0
1453 && si->dfs[si->scc_stack.last ()] >= my_dfs)
1454 {
1455 bitmap scc = BITMAP_ALLOC (NULL);
1456 unsigned int lowest_node;
1457 bitmap_iterator bi;
1458
1459 bitmap_set_bit (scc, n);
1460
1461 while (si->scc_stack.length () != 0
1462 && si->dfs[si->scc_stack.last ()] >= my_dfs)
1463 {
1464 unsigned int w = si->scc_stack.pop ();
1465
1466 bitmap_set_bit (scc, w);
1467 }
1468
1469 lowest_node = bitmap_first_set_bit (scc);
1470 gcc_assert (lowest_node < FIRST_REF_NODE);
1471
1472 /* Collapse the SCC nodes into a single node, and mark the
1473 indirect cycles. */
1474 EXECUTE_IF_SET_IN_BITMAP (scc, 0, i, bi)
1475 {
1476 if (i < FIRST_REF_NODE)
1477 {
1478 if (unite (lowest_node, i))
1479 unify_nodes (graph, lowest_node, i, false);
1480 }
1481 else
1482 {
1483 unite (lowest_node, i);
1484 graph->indirect_cycles[i - FIRST_REF_NODE] = lowest_node;
1485 }
1486 }
1487 }
1488 bitmap_set_bit (si->deleted, n);
1489 }
1490 else
1491 si->scc_stack.safe_push (n);
1492 }
1493
1494 /* Unify node FROM into node TO, updating the changed count if
1495 necessary when UPDATE_CHANGED is true. */
1496
1497 static void
1498 unify_nodes (constraint_graph_t graph, unsigned int to, unsigned int from,
1499 bool update_changed)
1500 {
1501 gcc_checking_assert (to != from && find (to) == to);
1502
1503 if (dump_file && (dump_flags & TDF_DETAILS))
1504 fprintf (dump_file, "Unifying %s to %s\n",
1505 get_varinfo (from)->name,
1506 get_varinfo (to)->name);
1507
1508 if (update_changed)
1509 stats.unified_vars_dynamic++;
1510 else
1511 stats.unified_vars_static++;
1512
1513 merge_graph_nodes (graph, to, from);
1514 if (merge_node_constraints (graph, to, from))
1515 {
1516 if (update_changed)
1517 bitmap_set_bit (changed, to);
1518 }
1519
1520 /* Mark TO as changed if FROM was changed. If TO was already marked
1521 as changed, decrease the changed count. */
1522
1523 if (update_changed
1524 && bitmap_clear_bit (changed, from))
1525 bitmap_set_bit (changed, to);
1526 varinfo_t fromvi = get_varinfo (from);
1527 if (fromvi->solution)
1528 {
1529 /* If the solution changes because of the merging, we need to mark
1530 the variable as changed. */
1531 varinfo_t tovi = get_varinfo (to);
1532 if (bitmap_ior_into (tovi->solution, fromvi->solution))
1533 {
1534 if (update_changed)
1535 bitmap_set_bit (changed, to);
1536 }
1537
1538 BITMAP_FREE (fromvi->solution);
1539 if (fromvi->oldsolution)
1540 BITMAP_FREE (fromvi->oldsolution);
1541
1542 if (stats.iterations > 0
1543 && tovi->oldsolution)
1544 BITMAP_FREE (tovi->oldsolution);
1545 }
1546 if (graph->succs[to])
1547 bitmap_clear_bit (graph->succs[to], to);
1548 }
1549
1550 /* Information needed to compute the topological ordering of a graph. */
1551
1552 struct topo_info
1553 {
1554 /* sbitmap of visited nodes. */
1555 sbitmap visited;
1556 /* Array that stores the topological order of the graph, *in
1557 reverse*. */
1558 vec<unsigned> topo_order;
1559 };
1560
1561
1562 /* Initialize and return a topological info structure. */
1563
1564 static struct topo_info *
1565 init_topo_info (void)
1566 {
1567 size_t size = graph->size;
1568 struct topo_info *ti = XNEW (struct topo_info);
1569 ti->visited = sbitmap_alloc (size);
1570 bitmap_clear (ti->visited);
1571 ti->topo_order.create (1);
1572 return ti;
1573 }
1574
1575
1576 /* Free the topological sort info pointed to by TI. */
1577
1578 static void
1579 free_topo_info (struct topo_info *ti)
1580 {
1581 sbitmap_free (ti->visited);
1582 ti->topo_order.release ();
1583 free (ti);
1584 }
1585
1586 /* Visit the graph in topological order, and store the order in the
1587 topo_info structure. */
1588
1589 static void
1590 topo_visit (constraint_graph_t graph, struct topo_info *ti,
1591 unsigned int n)
1592 {
1593 bitmap_iterator bi;
1594 unsigned int j;
1595
1596 bitmap_set_bit (ti->visited, n);
1597
1598 if (graph->succs[n])
1599 EXECUTE_IF_SET_IN_BITMAP (graph->succs[n], 0, j, bi)
1600 {
1601 if (!bitmap_bit_p (ti->visited, j))
1602 topo_visit (graph, ti, j);
1603 }
1604
1605 ti->topo_order.safe_push (n);
1606 }
1607
1608 /* Process a constraint C that represents x = *(y + off), using DELTA as the
1609 starting solution for y. */
1610
1611 static void
1612 do_sd_constraint (constraint_graph_t graph, constraint_t c,
1613 bitmap delta, bitmap *expanded_delta)
1614 {
1615 unsigned int lhs = c->lhs.var;
1616 bool flag = false;
1617 bitmap sol = get_varinfo (lhs)->solution;
1618 unsigned int j;
1619 bitmap_iterator bi;
1620 HOST_WIDE_INT roffset = c->rhs.offset;
1621
1622 /* Our IL does not allow this. */
1623 gcc_checking_assert (c->lhs.offset == 0);
1624
1625 /* If the solution of Y contains anything it is good enough to transfer
1626 this to the LHS. */
1627 if (bitmap_bit_p (delta, anything_id))
1628 {
1629 flag |= bitmap_set_bit (sol, anything_id);
1630 goto done;
1631 }
1632
1633 /* If we do not know at with offset the rhs is dereferenced compute
1634 the reachability set of DELTA, conservatively assuming it is
1635 dereferenced at all valid offsets. */
1636 if (roffset == UNKNOWN_OFFSET)
1637 {
1638 delta = solution_set_expand (delta, expanded_delta);
1639 /* No further offset processing is necessary. */
1640 roffset = 0;
1641 }
1642
1643 /* For each variable j in delta (Sol(y)), add
1644 an edge in the graph from j to x, and union Sol(j) into Sol(x). */
1645 EXECUTE_IF_SET_IN_BITMAP (delta, 0, j, bi)
1646 {
1647 varinfo_t v = get_varinfo (j);
1648 HOST_WIDE_INT fieldoffset = v->offset + roffset;
1649 unsigned HOST_WIDE_INT size = v->size;
1650 unsigned int t;
1651
1652 if (v->is_full_var)
1653 ;
1654 else if (roffset != 0)
1655 {
1656 if (fieldoffset < 0)
1657 v = get_varinfo (v->head);
1658 else
1659 v = first_or_preceding_vi_for_offset (v, fieldoffset);
1660 }
1661
1662 /* We have to include all fields that overlap the current field
1663 shifted by roffset. */
1664 do
1665 {
1666 t = find (v->id);
1667
1668 /* Adding edges from the special vars is pointless.
1669 They don't have sets that can change. */
1670 if (get_varinfo (t)->is_special_var)
1671 flag |= bitmap_ior_into (sol, get_varinfo (t)->solution);
1672 /* Merging the solution from ESCAPED needlessly increases
1673 the set. Use ESCAPED as representative instead. */
1674 else if (v->id == escaped_id)
1675 flag |= bitmap_set_bit (sol, escaped_id);
1676 else if (v->may_have_pointers
1677 && add_graph_edge (graph, lhs, t))
1678 flag |= bitmap_ior_into (sol, get_varinfo (t)->solution);
1679
1680 if (v->is_full_var
1681 || v->next == 0)
1682 break;
1683
1684 v = vi_next (v);
1685 }
1686 while (v->offset < fieldoffset + size);
1687 }
1688
1689 done:
1690 /* If the LHS solution changed, mark the var as changed. */
1691 if (flag)
1692 {
1693 get_varinfo (lhs)->solution = sol;
1694 bitmap_set_bit (changed, lhs);
1695 }
1696 }
1697
1698 /* Process a constraint C that represents *(x + off) = y using DELTA
1699 as the starting solution for x. */
1700
1701 static void
1702 do_ds_constraint (constraint_t c, bitmap delta, bitmap *expanded_delta)
1703 {
1704 unsigned int rhs = c->rhs.var;
1705 bitmap sol = get_varinfo (rhs)->solution;
1706 unsigned int j;
1707 bitmap_iterator bi;
1708 HOST_WIDE_INT loff = c->lhs.offset;
1709 bool escaped_p = false;
1710
1711 /* Our IL does not allow this. */
1712 gcc_checking_assert (c->rhs.offset == 0);
1713
1714 /* If the solution of y contains ANYTHING simply use the ANYTHING
1715 solution. This avoids needlessly increasing the points-to sets. */
1716 if (bitmap_bit_p (sol, anything_id))
1717 sol = get_varinfo (find (anything_id))->solution;
1718
1719 /* If the solution for x contains ANYTHING we have to merge the
1720 solution of y into all pointer variables which we do via
1721 STOREDANYTHING. */
1722 if (bitmap_bit_p (delta, anything_id))
1723 {
1724 unsigned t = find (storedanything_id);
1725 if (add_graph_edge (graph, t, rhs))
1726 {
1727 if (bitmap_ior_into (get_varinfo (t)->solution, sol))
1728 bitmap_set_bit (changed, t);
1729 }
1730 return;
1731 }
1732
1733 /* If we do not know at with offset the rhs is dereferenced compute
1734 the reachability set of DELTA, conservatively assuming it is
1735 dereferenced at all valid offsets. */
1736 if (loff == UNKNOWN_OFFSET)
1737 {
1738 delta = solution_set_expand (delta, expanded_delta);
1739 loff = 0;
1740 }
1741
1742 /* For each member j of delta (Sol(x)), add an edge from y to j and
1743 union Sol(y) into Sol(j) */
1744 EXECUTE_IF_SET_IN_BITMAP (delta, 0, j, bi)
1745 {
1746 varinfo_t v = get_varinfo (j);
1747 unsigned int t;
1748 HOST_WIDE_INT fieldoffset = v->offset + loff;
1749 unsigned HOST_WIDE_INT size = v->size;
1750
1751 if (v->is_full_var)
1752 ;
1753 else if (loff != 0)
1754 {
1755 if (fieldoffset < 0)
1756 v = get_varinfo (v->head);
1757 else
1758 v = first_or_preceding_vi_for_offset (v, fieldoffset);
1759 }
1760
1761 /* We have to include all fields that overlap the current field
1762 shifted by loff. */
1763 do
1764 {
1765 if (v->may_have_pointers)
1766 {
1767 /* If v is a global variable then this is an escape point. */
1768 if (v->is_global_var
1769 && !escaped_p)
1770 {
1771 t = find (escaped_id);
1772 if (add_graph_edge (graph, t, rhs)
1773 && bitmap_ior_into (get_varinfo (t)->solution, sol))
1774 bitmap_set_bit (changed, t);
1775 /* Enough to let rhs escape once. */
1776 escaped_p = true;
1777 }
1778
1779 if (v->is_special_var)
1780 break;
1781
1782 t = find (v->id);
1783 if (add_graph_edge (graph, t, rhs)
1784 && bitmap_ior_into (get_varinfo (t)->solution, sol))
1785 bitmap_set_bit (changed, t);
1786 }
1787
1788 if (v->is_full_var
1789 || v->next == 0)
1790 break;
1791
1792 v = vi_next (v);
1793 }
1794 while (v->offset < fieldoffset + size);
1795 }
1796 }
1797
1798 /* Handle a non-simple (simple meaning requires no iteration),
1799 constraint (IE *x = &y, x = *y, *x = y, and x = y with offsets involved). */
1800
1801 static void
1802 do_complex_constraint (constraint_graph_t graph, constraint_t c, bitmap delta,
1803 bitmap *expanded_delta)
1804 {
1805 if (c->lhs.type == DEREF)
1806 {
1807 if (c->rhs.type == ADDRESSOF)
1808 {
1809 gcc_unreachable ();
1810 }
1811 else
1812 {
1813 /* *x = y */
1814 do_ds_constraint (c, delta, expanded_delta);
1815 }
1816 }
1817 else if (c->rhs.type == DEREF)
1818 {
1819 /* x = *y */
1820 if (!(get_varinfo (c->lhs.var)->is_special_var))
1821 do_sd_constraint (graph, c, delta, expanded_delta);
1822 }
1823 else
1824 {
1825 bitmap tmp;
1826 bool flag = false;
1827
1828 gcc_checking_assert (c->rhs.type == SCALAR && c->lhs.type == SCALAR
1829 && c->rhs.offset != 0 && c->lhs.offset == 0);
1830 tmp = get_varinfo (c->lhs.var)->solution;
1831
1832 flag = set_union_with_increment (tmp, delta, c->rhs.offset,
1833 expanded_delta);
1834
1835 if (flag)
1836 bitmap_set_bit (changed, c->lhs.var);
1837 }
1838 }
1839
1840 /* Initialize and return a new SCC info structure. */
1841
1842 static struct scc_info *
1843 init_scc_info (size_t size)
1844 {
1845 struct scc_info *si = XNEW (struct scc_info);
1846 size_t i;
1847
1848 si->current_index = 0;
1849 si->visited = sbitmap_alloc (size);
1850 bitmap_clear (si->visited);
1851 si->deleted = sbitmap_alloc (size);
1852 bitmap_clear (si->deleted);
1853 si->node_mapping = XNEWVEC (unsigned int, size);
1854 si->dfs = XCNEWVEC (unsigned int, size);
1855
1856 for (i = 0; i < size; i++)
1857 si->node_mapping[i] = i;
1858
1859 si->scc_stack.create (1);
1860 return si;
1861 }
1862
1863 /* Free an SCC info structure pointed to by SI */
1864
1865 static void
1866 free_scc_info (struct scc_info *si)
1867 {
1868 sbitmap_free (si->visited);
1869 sbitmap_free (si->deleted);
1870 free (si->node_mapping);
1871 free (si->dfs);
1872 si->scc_stack.release ();
1873 free (si);
1874 }
1875
1876
1877 /* Find indirect cycles in GRAPH that occur, using strongly connected
1878 components, and note them in the indirect cycles map.
1879
1880 This technique comes from Ben Hardekopf and Calvin Lin,
1881 "It Pays to be Lazy: Fast and Accurate Pointer Analysis for Millions of
1882 Lines of Code", submitted to PLDI 2007. */
1883
1884 static void
1885 find_indirect_cycles (constraint_graph_t graph)
1886 {
1887 unsigned int i;
1888 unsigned int size = graph->size;
1889 struct scc_info *si = init_scc_info (size);
1890
1891 for (i = 0; i < MIN (LAST_REF_NODE, size); i ++ )
1892 if (!bitmap_bit_p (si->visited, i) && find (i) == i)
1893 scc_visit (graph, si, i);
1894
1895 free_scc_info (si);
1896 }
1897
1898 /* Compute a topological ordering for GRAPH, and store the result in the
1899 topo_info structure TI. */
1900
1901 static void
1902 compute_topo_order (constraint_graph_t graph,
1903 struct topo_info *ti)
1904 {
1905 unsigned int i;
1906 unsigned int size = graph->size;
1907
1908 for (i = 0; i != size; ++i)
1909 if (!bitmap_bit_p (ti->visited, i) && find (i) == i)
1910 topo_visit (graph, ti, i);
1911 }
1912
1913 /* Structure used to for hash value numbering of pointer equivalence
1914 classes. */
1915
1916 typedef struct equiv_class_label
1917 {
1918 hashval_t hashcode;
1919 unsigned int equivalence_class;
1920 bitmap labels;
1921 } *equiv_class_label_t;
1922 typedef const struct equiv_class_label *const_equiv_class_label_t;
1923
1924 /* Equiv_class_label hashtable helpers. */
1925
1926 struct equiv_class_hasher : free_ptr_hash <equiv_class_label>
1927 {
1928 static inline hashval_t hash (const equiv_class_label *);
1929 static inline bool equal (const equiv_class_label *,
1930 const equiv_class_label *);
1931 };
1932
1933 /* Hash function for a equiv_class_label_t */
1934
1935 inline hashval_t
1936 equiv_class_hasher::hash (const equiv_class_label *ecl)
1937 {
1938 return ecl->hashcode;
1939 }
1940
1941 /* Equality function for two equiv_class_label_t's. */
1942
1943 inline bool
1944 equiv_class_hasher::equal (const equiv_class_label *eql1,
1945 const equiv_class_label *eql2)
1946 {
1947 return (eql1->hashcode == eql2->hashcode
1948 && bitmap_equal_p (eql1->labels, eql2->labels));
1949 }
1950
1951 /* A hashtable for mapping a bitmap of labels->pointer equivalence
1952 classes. */
1953 static hash_table<equiv_class_hasher> *pointer_equiv_class_table;
1954
1955 /* A hashtable for mapping a bitmap of labels->location equivalence
1956 classes. */
1957 static hash_table<equiv_class_hasher> *location_equiv_class_table;
1958
1959 /* Lookup a equivalence class in TABLE by the bitmap of LABELS with
1960 hash HAS it contains. Sets *REF_LABELS to the bitmap LABELS
1961 is equivalent to. */
1962
1963 static equiv_class_label *
1964 equiv_class_lookup_or_add (hash_table<equiv_class_hasher> *table,
1965 bitmap labels)
1966 {
1967 equiv_class_label **slot;
1968 equiv_class_label ecl;
1969
1970 ecl.labels = labels;
1971 ecl.hashcode = bitmap_hash (labels);
1972 slot = table->find_slot (&ecl, INSERT);
1973 if (!*slot)
1974 {
1975 *slot = XNEW (struct equiv_class_label);
1976 (*slot)->labels = labels;
1977 (*slot)->hashcode = ecl.hashcode;
1978 (*slot)->equivalence_class = 0;
1979 }
1980
1981 return *slot;
1982 }
1983
1984 /* Perform offline variable substitution.
1985
1986 This is a worst case quadratic time way of identifying variables
1987 that must have equivalent points-to sets, including those caused by
1988 static cycles, and single entry subgraphs, in the constraint graph.
1989
1990 The technique is described in "Exploiting Pointer and Location
1991 Equivalence to Optimize Pointer Analysis. In the 14th International
1992 Static Analysis Symposium (SAS), August 2007." It is known as the
1993 "HU" algorithm, and is equivalent to value numbering the collapsed
1994 constraint graph including evaluating unions.
1995
1996 The general method of finding equivalence classes is as follows:
1997 Add fake nodes (REF nodes) and edges for *a = b and a = *b constraints.
1998 Initialize all non-REF nodes to be direct nodes.
1999 For each constraint a = a U {b}, we set pts(a) = pts(a) u {fresh
2000 variable}
2001 For each constraint containing the dereference, we also do the same
2002 thing.
2003
2004 We then compute SCC's in the graph and unify nodes in the same SCC,
2005 including pts sets.
2006
2007 For each non-collapsed node x:
2008 Visit all unvisited explicit incoming edges.
2009 Ignoring all non-pointers, set pts(x) = Union of pts(a) for y
2010 where y->x.
2011 Lookup the equivalence class for pts(x).
2012 If we found one, equivalence_class(x) = found class.
2013 Otherwise, equivalence_class(x) = new class, and new_class is
2014 added to the lookup table.
2015
2016 All direct nodes with the same equivalence class can be replaced
2017 with a single representative node.
2018 All unlabeled nodes (label == 0) are not pointers and all edges
2019 involving them can be eliminated.
2020 We perform these optimizations during rewrite_constraints
2021
2022 In addition to pointer equivalence class finding, we also perform
2023 location equivalence class finding. This is the set of variables
2024 that always appear together in points-to sets. We use this to
2025 compress the size of the points-to sets. */
2026
2027 /* Current maximum pointer equivalence class id. */
2028 static int pointer_equiv_class;
2029
2030 /* Current maximum location equivalence class id. */
2031 static int location_equiv_class;
2032
2033 /* Recursive routine to find strongly connected components in GRAPH,
2034 and label it's nodes with DFS numbers. */
2035
2036 static void
2037 condense_visit (constraint_graph_t graph, struct scc_info *si, unsigned int n)
2038 {
2039 unsigned int i;
2040 bitmap_iterator bi;
2041 unsigned int my_dfs;
2042
2043 gcc_checking_assert (si->node_mapping[n] == n);
2044 bitmap_set_bit (si->visited, n);
2045 si->dfs[n] = si->current_index ++;
2046 my_dfs = si->dfs[n];
2047
2048 /* Visit all the successors. */
2049 EXECUTE_IF_IN_NONNULL_BITMAP (graph->preds[n], 0, i, bi)
2050 {
2051 unsigned int w = si->node_mapping[i];
2052
2053 if (bitmap_bit_p (si->deleted, w))
2054 continue;
2055
2056 if (!bitmap_bit_p (si->visited, w))
2057 condense_visit (graph, si, w);
2058
2059 unsigned int t = si->node_mapping[w];
2060 gcc_checking_assert (si->node_mapping[n] == n);
2061 if (si->dfs[t] < si->dfs[n])
2062 si->dfs[n] = si->dfs[t];
2063 }
2064
2065 /* Visit all the implicit predecessors. */
2066 EXECUTE_IF_IN_NONNULL_BITMAP (graph->implicit_preds[n], 0, i, bi)
2067 {
2068 unsigned int w = si->node_mapping[i];
2069
2070 if (bitmap_bit_p (si->deleted, w))
2071 continue;
2072
2073 if (!bitmap_bit_p (si->visited, w))
2074 condense_visit (graph, si, w);
2075
2076 unsigned int t = si->node_mapping[w];
2077 gcc_assert (si->node_mapping[n] == n);
2078 if (si->dfs[t] < si->dfs[n])
2079 si->dfs[n] = si->dfs[t];
2080 }
2081
2082 /* See if any components have been identified. */
2083 if (si->dfs[n] == my_dfs)
2084 {
2085 while (si->scc_stack.length () != 0
2086 && si->dfs[si->scc_stack.last ()] >= my_dfs)
2087 {
2088 unsigned int w = si->scc_stack.pop ();
2089 si->node_mapping[w] = n;
2090
2091 if (!bitmap_bit_p (graph->direct_nodes, w))
2092 bitmap_clear_bit (graph->direct_nodes, n);
2093
2094 /* Unify our nodes. */
2095 if (graph->preds[w])
2096 {
2097 if (!graph->preds[n])
2098 graph->preds[n] = BITMAP_ALLOC (&predbitmap_obstack);
2099 bitmap_ior_into (graph->preds[n], graph->preds[w]);
2100 }
2101 if (graph->implicit_preds[w])
2102 {
2103 if (!graph->implicit_preds[n])
2104 graph->implicit_preds[n] = BITMAP_ALLOC (&predbitmap_obstack);
2105 bitmap_ior_into (graph->implicit_preds[n],
2106 graph->implicit_preds[w]);
2107 }
2108 if (graph->points_to[w])
2109 {
2110 if (!graph->points_to[n])
2111 graph->points_to[n] = BITMAP_ALLOC (&predbitmap_obstack);
2112 bitmap_ior_into (graph->points_to[n],
2113 graph->points_to[w]);
2114 }
2115 }
2116 bitmap_set_bit (si->deleted, n);
2117 }
2118 else
2119 si->scc_stack.safe_push (n);
2120 }
2121
2122 /* Label pointer equivalences.
2123
2124 This performs a value numbering of the constraint graph to
2125 discover which variables will always have the same points-to sets
2126 under the current set of constraints.
2127
2128 The way it value numbers is to store the set of points-to bits
2129 generated by the constraints and graph edges. This is just used as a
2130 hash and equality comparison. The *actual set of points-to bits* is
2131 completely irrelevant, in that we don't care about being able to
2132 extract them later.
2133
2134 The equality values (currently bitmaps) just have to satisfy a few
2135 constraints, the main ones being:
2136 1. The combining operation must be order independent.
2137 2. The end result of a given set of operations must be unique iff the
2138 combination of input values is unique
2139 3. Hashable. */
2140
2141 static void
2142 label_visit (constraint_graph_t graph, struct scc_info *si, unsigned int n)
2143 {
2144 unsigned int i, first_pred;
2145 bitmap_iterator bi;
2146
2147 bitmap_set_bit (si->visited, n);
2148
2149 /* Label and union our incoming edges's points to sets. */
2150 first_pred = -1U;
2151 EXECUTE_IF_IN_NONNULL_BITMAP (graph->preds[n], 0, i, bi)
2152 {
2153 unsigned int w = si->node_mapping[i];
2154 if (!bitmap_bit_p (si->visited, w))
2155 label_visit (graph, si, w);
2156
2157 /* Skip unused edges */
2158 if (w == n || graph->pointer_label[w] == 0)
2159 continue;
2160
2161 if (graph->points_to[w])
2162 {
2163 if (!graph->points_to[n])
2164 {
2165 if (first_pred == -1U)
2166 first_pred = w;
2167 else
2168 {
2169 graph->points_to[n] = BITMAP_ALLOC (&predbitmap_obstack);
2170 bitmap_ior (graph->points_to[n],
2171 graph->points_to[first_pred],
2172 graph->points_to[w]);
2173 }
2174 }
2175 else
2176 bitmap_ior_into (graph->points_to[n], graph->points_to[w]);
2177 }
2178 }
2179
2180 /* Indirect nodes get fresh variables and a new pointer equiv class. */
2181 if (!bitmap_bit_p (graph->direct_nodes, n))
2182 {
2183 if (!graph->points_to[n])
2184 {
2185 graph->points_to[n] = BITMAP_ALLOC (&predbitmap_obstack);
2186 if (first_pred != -1U)
2187 bitmap_copy (graph->points_to[n], graph->points_to[first_pred]);
2188 }
2189 bitmap_set_bit (graph->points_to[n], FIRST_REF_NODE + n);
2190 graph->pointer_label[n] = pointer_equiv_class++;
2191 equiv_class_label_t ecl;
2192 ecl = equiv_class_lookup_or_add (pointer_equiv_class_table,
2193 graph->points_to[n]);
2194 ecl->equivalence_class = graph->pointer_label[n];
2195 return;
2196 }
2197
2198 /* If there was only a single non-empty predecessor the pointer equiv
2199 class is the same. */
2200 if (!graph->points_to[n])
2201 {
2202 if (first_pred != -1U)
2203 {
2204 graph->pointer_label[n] = graph->pointer_label[first_pred];
2205 graph->points_to[n] = graph->points_to[first_pred];
2206 }
2207 return;
2208 }
2209
2210 if (!bitmap_empty_p (graph->points_to[n]))
2211 {
2212 equiv_class_label_t ecl;
2213 ecl = equiv_class_lookup_or_add (pointer_equiv_class_table,
2214 graph->points_to[n]);
2215 if (ecl->equivalence_class == 0)
2216 ecl->equivalence_class = pointer_equiv_class++;
2217 else
2218 {
2219 BITMAP_FREE (graph->points_to[n]);
2220 graph->points_to[n] = ecl->labels;
2221 }
2222 graph->pointer_label[n] = ecl->equivalence_class;
2223 }
2224 }
2225
2226 /* Print the pred graph in dot format. */
2227
2228 static void
2229 dump_pred_graph (struct scc_info *si, FILE *file)
2230 {
2231 unsigned int i;
2232
2233 /* Only print the graph if it has already been initialized: */
2234 if (!graph)
2235 return;
2236
2237 /* Prints the header of the dot file: */
2238 fprintf (file, "strict digraph {\n");
2239 fprintf (file, " node [\n shape = box\n ]\n");
2240 fprintf (file, " edge [\n fontsize = \"12\"\n ]\n");
2241 fprintf (file, "\n // List of nodes and complex constraints in "
2242 "the constraint graph:\n");
2243
2244 /* The next lines print the nodes in the graph together with the
2245 complex constraints attached to them. */
2246 for (i = 1; i < graph->size; i++)
2247 {
2248 if (i == FIRST_REF_NODE)
2249 continue;
2250 if (si->node_mapping[i] != i)
2251 continue;
2252 if (i < FIRST_REF_NODE)
2253 fprintf (file, "\"%s\"", get_varinfo (i)->name);
2254 else
2255 fprintf (file, "\"*%s\"", get_varinfo (i - FIRST_REF_NODE)->name);
2256 if (graph->points_to[i]
2257 && !bitmap_empty_p (graph->points_to[i]))
2258 {
2259 fprintf (file, "[label=\"%s = {", get_varinfo (i)->name);
2260 unsigned j;
2261 bitmap_iterator bi;
2262 EXECUTE_IF_SET_IN_BITMAP (graph->points_to[i], 0, j, bi)
2263 fprintf (file, " %d", j);
2264 fprintf (file, " }\"]");
2265 }
2266 fprintf (file, ";\n");
2267 }
2268
2269 /* Go over the edges. */
2270 fprintf (file, "\n // Edges in the constraint graph:\n");
2271 for (i = 1; i < graph->size; i++)
2272 {
2273 unsigned j;
2274 bitmap_iterator bi;
2275 if (si->node_mapping[i] != i)
2276 continue;
2277 EXECUTE_IF_IN_NONNULL_BITMAP (graph->preds[i], 0, j, bi)
2278 {
2279 unsigned from = si->node_mapping[j];
2280 if (from < FIRST_REF_NODE)
2281 fprintf (file, "\"%s\"", get_varinfo (from)->name);
2282 else
2283 fprintf (file, "\"*%s\"", get_varinfo (from - FIRST_REF_NODE)->name);
2284 fprintf (file, " -> ");
2285 if (i < FIRST_REF_NODE)
2286 fprintf (file, "\"%s\"", get_varinfo (i)->name);
2287 else
2288 fprintf (file, "\"*%s\"", get_varinfo (i - FIRST_REF_NODE)->name);
2289 fprintf (file, ";\n");
2290 }
2291 }
2292
2293 /* Prints the tail of the dot file. */
2294 fprintf (file, "}\n");
2295 }
2296
2297 /* Perform offline variable substitution, discovering equivalence
2298 classes, and eliminating non-pointer variables. */
2299
2300 static struct scc_info *
2301 perform_var_substitution (constraint_graph_t graph)
2302 {
2303 unsigned int i;
2304 unsigned int size = graph->size;
2305 struct scc_info *si = init_scc_info (size);
2306
2307 bitmap_obstack_initialize (&iteration_obstack);
2308 pointer_equiv_class_table = new hash_table<equiv_class_hasher> (511);
2309 location_equiv_class_table
2310 = new hash_table<equiv_class_hasher> (511);
2311 pointer_equiv_class = 1;
2312 location_equiv_class = 1;
2313
2314 /* Condense the nodes, which means to find SCC's, count incoming
2315 predecessors, and unite nodes in SCC's. */
2316 for (i = 1; i < FIRST_REF_NODE; i++)
2317 if (!bitmap_bit_p (si->visited, si->node_mapping[i]))
2318 condense_visit (graph, si, si->node_mapping[i]);
2319
2320 if (dump_file && (dump_flags & TDF_GRAPH))
2321 {
2322 fprintf (dump_file, "\n\n// The constraint graph before var-substitution "
2323 "in dot format:\n");
2324 dump_pred_graph (si, dump_file);
2325 fprintf (dump_file, "\n\n");
2326 }
2327
2328 bitmap_clear (si->visited);
2329 /* Actually the label the nodes for pointer equivalences */
2330 for (i = 1; i < FIRST_REF_NODE; i++)
2331 if (!bitmap_bit_p (si->visited, si->node_mapping[i]))
2332 label_visit (graph, si, si->node_mapping[i]);
2333
2334 /* Calculate location equivalence labels. */
2335 for (i = 1; i < FIRST_REF_NODE; i++)
2336 {
2337 bitmap pointed_by;
2338 bitmap_iterator bi;
2339 unsigned int j;
2340
2341 if (!graph->pointed_by[i])
2342 continue;
2343 pointed_by = BITMAP_ALLOC (&iteration_obstack);
2344
2345 /* Translate the pointed-by mapping for pointer equivalence
2346 labels. */
2347 EXECUTE_IF_SET_IN_BITMAP (graph->pointed_by[i], 0, j, bi)
2348 {
2349 bitmap_set_bit (pointed_by,
2350 graph->pointer_label[si->node_mapping[j]]);
2351 }
2352 /* The original pointed_by is now dead. */
2353 BITMAP_FREE (graph->pointed_by[i]);
2354
2355 /* Look up the location equivalence label if one exists, or make
2356 one otherwise. */
2357 equiv_class_label_t ecl;
2358 ecl = equiv_class_lookup_or_add (location_equiv_class_table, pointed_by);
2359 if (ecl->equivalence_class == 0)
2360 ecl->equivalence_class = location_equiv_class++;
2361 else
2362 {
2363 if (dump_file && (dump_flags & TDF_DETAILS))
2364 fprintf (dump_file, "Found location equivalence for node %s\n",
2365 get_varinfo (i)->name);
2366 BITMAP_FREE (pointed_by);
2367 }
2368 graph->loc_label[i] = ecl->equivalence_class;
2369
2370 }
2371
2372 if (dump_file && (dump_flags & TDF_DETAILS))
2373 for (i = 1; i < FIRST_REF_NODE; i++)
2374 {
2375 unsigned j = si->node_mapping[i];
2376 if (j != i)
2377 {
2378 fprintf (dump_file, "%s node id %d ",
2379 bitmap_bit_p (graph->direct_nodes, i)
2380 ? "Direct" : "Indirect", i);
2381 if (i < FIRST_REF_NODE)
2382 fprintf (dump_file, "\"%s\"", get_varinfo (i)->name);
2383 else
2384 fprintf (dump_file, "\"*%s\"",
2385 get_varinfo (i - FIRST_REF_NODE)->name);
2386 fprintf (dump_file, " mapped to SCC leader node id %d ", j);
2387 if (j < FIRST_REF_NODE)
2388 fprintf (dump_file, "\"%s\"\n", get_varinfo (j)->name);
2389 else
2390 fprintf (dump_file, "\"*%s\"\n",
2391 get_varinfo (j - FIRST_REF_NODE)->name);
2392 }
2393 else
2394 {
2395 fprintf (dump_file,
2396 "Equivalence classes for %s node id %d ",
2397 bitmap_bit_p (graph->direct_nodes, i)
2398 ? "direct" : "indirect", i);
2399 if (i < FIRST_REF_NODE)
2400 fprintf (dump_file, "\"%s\"", get_varinfo (i)->name);
2401 else
2402 fprintf (dump_file, "\"*%s\"",
2403 get_varinfo (i - FIRST_REF_NODE)->name);
2404 fprintf (dump_file,
2405 ": pointer %d, location %d\n",
2406 graph->pointer_label[i], graph->loc_label[i]);
2407 }
2408 }
2409
2410 /* Quickly eliminate our non-pointer variables. */
2411
2412 for (i = 1; i < FIRST_REF_NODE; i++)
2413 {
2414 unsigned int node = si->node_mapping[i];
2415
2416 if (graph->pointer_label[node] == 0)
2417 {
2418 if (dump_file && (dump_flags & TDF_DETAILS))
2419 fprintf (dump_file,
2420 "%s is a non-pointer variable, eliminating edges.\n",
2421 get_varinfo (node)->name);
2422 stats.nonpointer_vars++;
2423 clear_edges_for_node (graph, node);
2424 }
2425 }
2426
2427 return si;
2428 }
2429
2430 /* Free information that was only necessary for variable
2431 substitution. */
2432
2433 static void
2434 free_var_substitution_info (struct scc_info *si)
2435 {
2436 free_scc_info (si);
2437 free (graph->pointer_label);
2438 free (graph->loc_label);
2439 free (graph->pointed_by);
2440 free (graph->points_to);
2441 free (graph->eq_rep);
2442 sbitmap_free (graph->direct_nodes);
2443 delete pointer_equiv_class_table;
2444 pointer_equiv_class_table = NULL;
2445 delete location_equiv_class_table;
2446 location_equiv_class_table = NULL;
2447 bitmap_obstack_release (&iteration_obstack);
2448 }
2449
2450 /* Return an existing node that is equivalent to NODE, which has
2451 equivalence class LABEL, if one exists. Return NODE otherwise. */
2452
2453 static unsigned int
2454 find_equivalent_node (constraint_graph_t graph,
2455 unsigned int node, unsigned int label)
2456 {
2457 /* If the address version of this variable is unused, we can
2458 substitute it for anything else with the same label.
2459 Otherwise, we know the pointers are equivalent, but not the
2460 locations, and we can unite them later. */
2461
2462 if (!bitmap_bit_p (graph->address_taken, node))
2463 {
2464 gcc_checking_assert (label < graph->size);
2465
2466 if (graph->eq_rep[label] != -1)
2467 {
2468 /* Unify the two variables since we know they are equivalent. */
2469 if (unite (graph->eq_rep[label], node))
2470 unify_nodes (graph, graph->eq_rep[label], node, false);
2471 return graph->eq_rep[label];
2472 }
2473 else
2474 {
2475 graph->eq_rep[label] = node;
2476 graph->pe_rep[label] = node;
2477 }
2478 }
2479 else
2480 {
2481 gcc_checking_assert (label < graph->size);
2482 graph->pe[node] = label;
2483 if (graph->pe_rep[label] == -1)
2484 graph->pe_rep[label] = node;
2485 }
2486
2487 return node;
2488 }
2489
2490 /* Unite pointer equivalent but not location equivalent nodes in
2491 GRAPH. This may only be performed once variable substitution is
2492 finished. */
2493
2494 static void
2495 unite_pointer_equivalences (constraint_graph_t graph)
2496 {
2497 unsigned int i;
2498
2499 /* Go through the pointer equivalences and unite them to their
2500 representative, if they aren't already. */
2501 for (i = 1; i < FIRST_REF_NODE; i++)
2502 {
2503 unsigned int label = graph->pe[i];
2504 if (label)
2505 {
2506 int label_rep = graph->pe_rep[label];
2507
2508 if (label_rep == -1)
2509 continue;
2510
2511 label_rep = find (label_rep);
2512 if (label_rep >= 0 && unite (label_rep, find (i)))
2513 unify_nodes (graph, label_rep, i, false);
2514 }
2515 }
2516 }
2517
2518 /* Move complex constraints to the GRAPH nodes they belong to. */
2519
2520 static void
2521 move_complex_constraints (constraint_graph_t graph)
2522 {
2523 int i;
2524 constraint_t c;
2525
2526 FOR_EACH_VEC_ELT (constraints, i, c)
2527 {
2528 if (c)
2529 {
2530 struct constraint_expr lhs = c->lhs;
2531 struct constraint_expr rhs = c->rhs;
2532
2533 if (lhs.type == DEREF)
2534 {
2535 insert_into_complex (graph, lhs.var, c);
2536 }
2537 else if (rhs.type == DEREF)
2538 {
2539 if (!(get_varinfo (lhs.var)->is_special_var))
2540 insert_into_complex (graph, rhs.var, c);
2541 }
2542 else if (rhs.type != ADDRESSOF && lhs.var > anything_id
2543 && (lhs.offset != 0 || rhs.offset != 0))
2544 {
2545 insert_into_complex (graph, rhs.var, c);
2546 }
2547 }
2548 }
2549 }
2550
2551
2552 /* Optimize and rewrite complex constraints while performing
2553 collapsing of equivalent nodes. SI is the SCC_INFO that is the
2554 result of perform_variable_substitution. */
2555
2556 static void
2557 rewrite_constraints (constraint_graph_t graph,
2558 struct scc_info *si)
2559 {
2560 int i;
2561 constraint_t c;
2562
2563 #ifdef ENABLE_CHECKING
2564 for (unsigned int j = 0; j < graph->size; j++)
2565 gcc_assert (find (j) == j);
2566 #endif
2567
2568 FOR_EACH_VEC_ELT (constraints, i, c)
2569 {
2570 struct constraint_expr lhs = c->lhs;
2571 struct constraint_expr rhs = c->rhs;
2572 unsigned int lhsvar = find (lhs.var);
2573 unsigned int rhsvar = find (rhs.var);
2574 unsigned int lhsnode, rhsnode;
2575 unsigned int lhslabel, rhslabel;
2576
2577 lhsnode = si->node_mapping[lhsvar];
2578 rhsnode = si->node_mapping[rhsvar];
2579 lhslabel = graph->pointer_label[lhsnode];
2580 rhslabel = graph->pointer_label[rhsnode];
2581
2582 /* See if it is really a non-pointer variable, and if so, ignore
2583 the constraint. */
2584 if (lhslabel == 0)
2585 {
2586 if (dump_file && (dump_flags & TDF_DETAILS))
2587 {
2588
2589 fprintf (dump_file, "%s is a non-pointer variable,"
2590 "ignoring constraint:",
2591 get_varinfo (lhs.var)->name);
2592 dump_constraint (dump_file, c);
2593 fprintf (dump_file, "\n");
2594 }
2595 constraints[i] = NULL;
2596 continue;
2597 }
2598
2599 if (rhslabel == 0)
2600 {
2601 if (dump_file && (dump_flags & TDF_DETAILS))
2602 {
2603
2604 fprintf (dump_file, "%s is a non-pointer variable,"
2605 "ignoring constraint:",
2606 get_varinfo (rhs.var)->name);
2607 dump_constraint (dump_file, c);
2608 fprintf (dump_file, "\n");
2609 }
2610 constraints[i] = NULL;
2611 continue;
2612 }
2613
2614 lhsvar = find_equivalent_node (graph, lhsvar, lhslabel);
2615 rhsvar = find_equivalent_node (graph, rhsvar, rhslabel);
2616 c->lhs.var = lhsvar;
2617 c->rhs.var = rhsvar;
2618 }
2619 }
2620
2621 /* Eliminate indirect cycles involving NODE. Return true if NODE was
2622 part of an SCC, false otherwise. */
2623
2624 static bool
2625 eliminate_indirect_cycles (unsigned int node)
2626 {
2627 if (graph->indirect_cycles[node] != -1
2628 && !bitmap_empty_p (get_varinfo (node)->solution))
2629 {
2630 unsigned int i;
2631 auto_vec<unsigned> queue;
2632 int queuepos;
2633 unsigned int to = find (graph->indirect_cycles[node]);
2634 bitmap_iterator bi;
2635
2636 /* We can't touch the solution set and call unify_nodes
2637 at the same time, because unify_nodes is going to do
2638 bitmap unions into it. */
2639
2640 EXECUTE_IF_SET_IN_BITMAP (get_varinfo (node)->solution, 0, i, bi)
2641 {
2642 if (find (i) == i && i != to)
2643 {
2644 if (unite (to, i))
2645 queue.safe_push (i);
2646 }
2647 }
2648
2649 for (queuepos = 0;
2650 queue.iterate (queuepos, &i);
2651 queuepos++)
2652 {
2653 unify_nodes (graph, to, i, true);
2654 }
2655 return true;
2656 }
2657 return false;
2658 }
2659
2660 /* Solve the constraint graph GRAPH using our worklist solver.
2661 This is based on the PW* family of solvers from the "Efficient Field
2662 Sensitive Pointer Analysis for C" paper.
2663 It works by iterating over all the graph nodes, processing the complex
2664 constraints and propagating the copy constraints, until everything stops
2665 changed. This corresponds to steps 6-8 in the solving list given above. */
2666
2667 static void
2668 solve_graph (constraint_graph_t graph)
2669 {
2670 unsigned int size = graph->size;
2671 unsigned int i;
2672 bitmap pts;
2673
2674 changed = BITMAP_ALLOC (NULL);
2675
2676 /* Mark all initial non-collapsed nodes as changed. */
2677 for (i = 1; i < size; i++)
2678 {
2679 varinfo_t ivi = get_varinfo (i);
2680 if (find (i) == i && !bitmap_empty_p (ivi->solution)
2681 && ((graph->succs[i] && !bitmap_empty_p (graph->succs[i]))
2682 || graph->complex[i].length () > 0))
2683 bitmap_set_bit (changed, i);
2684 }
2685
2686 /* Allocate a bitmap to be used to store the changed bits. */
2687 pts = BITMAP_ALLOC (&pta_obstack);
2688
2689 while (!bitmap_empty_p (changed))
2690 {
2691 unsigned int i;
2692 struct topo_info *ti = init_topo_info ();
2693 stats.iterations++;
2694
2695 bitmap_obstack_initialize (&iteration_obstack);
2696
2697 compute_topo_order (graph, ti);
2698
2699 while (ti->topo_order.length () != 0)
2700 {
2701
2702 i = ti->topo_order.pop ();
2703
2704 /* If this variable is not a representative, skip it. */
2705 if (find (i) != i)
2706 continue;
2707
2708 /* In certain indirect cycle cases, we may merge this
2709 variable to another. */
2710 if (eliminate_indirect_cycles (i) && find (i) != i)
2711 continue;
2712
2713 /* If the node has changed, we need to process the
2714 complex constraints and outgoing edges again. */
2715 if (bitmap_clear_bit (changed, i))
2716 {
2717 unsigned int j;
2718 constraint_t c;
2719 bitmap solution;
2720 vec<constraint_t> complex = graph->complex[i];
2721 varinfo_t vi = get_varinfo (i);
2722 bool solution_empty;
2723
2724 /* Compute the changed set of solution bits. If anything
2725 is in the solution just propagate that. */
2726 if (bitmap_bit_p (vi->solution, anything_id))
2727 {
2728 /* If anything is also in the old solution there is
2729 nothing to do.
2730 ??? But we shouldn't ended up with "changed" set ... */
2731 if (vi->oldsolution
2732 && bitmap_bit_p (vi->oldsolution, anything_id))
2733 continue;
2734 bitmap_copy (pts, get_varinfo (find (anything_id))->solution);
2735 }
2736 else if (vi->oldsolution)
2737 bitmap_and_compl (pts, vi->solution, vi->oldsolution);
2738 else
2739 bitmap_copy (pts, vi->solution);
2740
2741 if (bitmap_empty_p (pts))
2742 continue;
2743
2744 if (vi->oldsolution)
2745 bitmap_ior_into (vi->oldsolution, pts);
2746 else
2747 {
2748 vi->oldsolution = BITMAP_ALLOC (&oldpta_obstack);
2749 bitmap_copy (vi->oldsolution, pts);
2750 }
2751
2752 solution = vi->solution;
2753 solution_empty = bitmap_empty_p (solution);
2754
2755 /* Process the complex constraints */
2756 bitmap expanded_pts = NULL;
2757 FOR_EACH_VEC_ELT (complex, j, c)
2758 {
2759 /* XXX: This is going to unsort the constraints in
2760 some cases, which will occasionally add duplicate
2761 constraints during unification. This does not
2762 affect correctness. */
2763 c->lhs.var = find (c->lhs.var);
2764 c->rhs.var = find (c->rhs.var);
2765
2766 /* The only complex constraint that can change our
2767 solution to non-empty, given an empty solution,
2768 is a constraint where the lhs side is receiving
2769 some set from elsewhere. */
2770 if (!solution_empty || c->lhs.type != DEREF)
2771 do_complex_constraint (graph, c, pts, &expanded_pts);
2772 }
2773 BITMAP_FREE (expanded_pts);
2774
2775 solution_empty = bitmap_empty_p (solution);
2776
2777 if (!solution_empty)
2778 {
2779 bitmap_iterator bi;
2780 unsigned eff_escaped_id = find (escaped_id);
2781
2782 /* Propagate solution to all successors. */
2783 EXECUTE_IF_IN_NONNULL_BITMAP (graph->succs[i],
2784 0, j, bi)
2785 {
2786 bitmap tmp;
2787 bool flag;
2788
2789 unsigned int to = find (j);
2790 tmp = get_varinfo (to)->solution;
2791 flag = false;
2792
2793 /* Don't try to propagate to ourselves. */
2794 if (to == i)
2795 continue;
2796
2797 /* If we propagate from ESCAPED use ESCAPED as
2798 placeholder. */
2799 if (i == eff_escaped_id)
2800 flag = bitmap_set_bit (tmp, escaped_id);
2801 else
2802 flag = bitmap_ior_into (tmp, pts);
2803
2804 if (flag)
2805 bitmap_set_bit (changed, to);
2806 }
2807 }
2808 }
2809 }
2810 free_topo_info (ti);
2811 bitmap_obstack_release (&iteration_obstack);
2812 }
2813
2814 BITMAP_FREE (pts);
2815 BITMAP_FREE (changed);
2816 bitmap_obstack_release (&oldpta_obstack);
2817 }
2818
2819 /* Map from trees to variable infos. */
2820 static hash_map<tree, varinfo_t> *vi_for_tree;
2821
2822
2823 /* Insert ID as the variable id for tree T in the vi_for_tree map. */
2824
2825 static void
2826 insert_vi_for_tree (tree t, varinfo_t vi)
2827 {
2828 gcc_assert (vi);
2829 gcc_assert (!vi_for_tree->put (t, vi));
2830 }
2831
2832 /* Find the variable info for tree T in VI_FOR_TREE. If T does not
2833 exist in the map, return NULL, otherwise, return the varinfo we found. */
2834
2835 static varinfo_t
2836 lookup_vi_for_tree (tree t)
2837 {
2838 varinfo_t *slot = vi_for_tree->get (t);
2839 if (slot == NULL)
2840 return NULL;
2841
2842 return *slot;
2843 }
2844
2845 /* Return a printable name for DECL */
2846
2847 static const char *
2848 alias_get_name (tree decl)
2849 {
2850 const char *res = NULL;
2851 char *temp;
2852 int num_printed = 0;
2853
2854 if (!dump_file)
2855 return "NULL";
2856
2857 if (TREE_CODE (decl) == SSA_NAME)
2858 {
2859 res = get_name (decl);
2860 if (res)
2861 num_printed = asprintf (&temp, "%s_%u", res, SSA_NAME_VERSION (decl));
2862 else
2863 num_printed = asprintf (&temp, "_%u", SSA_NAME_VERSION (decl));
2864 if (num_printed > 0)
2865 {
2866 res = ggc_strdup (temp);
2867 free (temp);
2868 }
2869 }
2870 else if (DECL_P (decl))
2871 {
2872 if (DECL_ASSEMBLER_NAME_SET_P (decl))
2873 res = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
2874 else
2875 {
2876 res = get_name (decl);
2877 if (!res)
2878 {
2879 num_printed = asprintf (&temp, "D.%u", DECL_UID (decl));
2880 if (num_printed > 0)
2881 {
2882 res = ggc_strdup (temp);
2883 free (temp);
2884 }
2885 }
2886 }
2887 }
2888 if (res != NULL)
2889 return res;
2890
2891 return "NULL";
2892 }
2893
2894 /* Find the variable id for tree T in the map.
2895 If T doesn't exist in the map, create an entry for it and return it. */
2896
2897 static varinfo_t
2898 get_vi_for_tree (tree t)
2899 {
2900 varinfo_t *slot = vi_for_tree->get (t);
2901 if (slot == NULL)
2902 return get_varinfo (create_variable_info_for (t, alias_get_name (t)));
2903
2904 return *slot;
2905 }
2906
2907 /* Get a scalar constraint expression for a new temporary variable. */
2908
2909 static struct constraint_expr
2910 new_scalar_tmp_constraint_exp (const char *name)
2911 {
2912 struct constraint_expr tmp;
2913 varinfo_t vi;
2914
2915 vi = new_var_info (NULL_TREE, name);
2916 vi->offset = 0;
2917 vi->size = -1;
2918 vi->fullsize = -1;
2919 vi->is_full_var = 1;
2920
2921 tmp.var = vi->id;
2922 tmp.type = SCALAR;
2923 tmp.offset = 0;
2924
2925 return tmp;
2926 }
2927
2928 /* Get a constraint expression vector from an SSA_VAR_P node.
2929 If address_p is true, the result will be taken its address of. */
2930
2931 static void
2932 get_constraint_for_ssa_var (tree t, vec<ce_s> *results, bool address_p)
2933 {
2934 struct constraint_expr cexpr;
2935 varinfo_t vi;
2936
2937 /* We allow FUNCTION_DECLs here even though it doesn't make much sense. */
2938 gcc_assert (TREE_CODE (t) == SSA_NAME || DECL_P (t));
2939
2940 /* For parameters, get at the points-to set for the actual parm
2941 decl. */
2942 if (TREE_CODE (t) == SSA_NAME
2943 && SSA_NAME_IS_DEFAULT_DEF (t)
2944 && (TREE_CODE (SSA_NAME_VAR (t)) == PARM_DECL
2945 || TREE_CODE (SSA_NAME_VAR (t)) == RESULT_DECL))
2946 {
2947 get_constraint_for_ssa_var (SSA_NAME_VAR (t), results, address_p);
2948 return;
2949 }
2950
2951 /* For global variables resort to the alias target. */
2952 if (TREE_CODE (t) == VAR_DECL
2953 && (TREE_STATIC (t) || DECL_EXTERNAL (t)))
2954 {
2955 varpool_node *node = varpool_node::get (t);
2956 if (node && node->alias && node->analyzed)
2957 {
2958 node = node->ultimate_alias_target ();
2959 t = node->decl;
2960 }
2961 }
2962
2963 vi = get_vi_for_tree (t);
2964 cexpr.var = vi->id;
2965 cexpr.type = SCALAR;
2966 cexpr.offset = 0;
2967
2968 /* If we are not taking the address of the constraint expr, add all
2969 sub-fiels of the variable as well. */
2970 if (!address_p
2971 && !vi->is_full_var)
2972 {
2973 for (; vi; vi = vi_next (vi))
2974 {
2975 cexpr.var = vi->id;
2976 results->safe_push (cexpr);
2977 }
2978 return;
2979 }
2980
2981 results->safe_push (cexpr);
2982 }
2983
2984 /* Process constraint T, performing various simplifications and then
2985 adding it to our list of overall constraints. */
2986
2987 static void
2988 process_constraint (constraint_t t)
2989 {
2990 struct constraint_expr rhs = t->rhs;
2991 struct constraint_expr lhs = t->lhs;
2992
2993 gcc_assert (rhs.var < varmap.length ());
2994 gcc_assert (lhs.var < varmap.length ());
2995
2996 /* If we didn't get any useful constraint from the lhs we get
2997 &ANYTHING as fallback from get_constraint_for. Deal with
2998 it here by turning it into *ANYTHING. */
2999 if (lhs.type == ADDRESSOF
3000 && lhs.var == anything_id)
3001 lhs.type = DEREF;
3002
3003 /* ADDRESSOF on the lhs is invalid. */
3004 gcc_assert (lhs.type != ADDRESSOF);
3005
3006 /* We shouldn't add constraints from things that cannot have pointers.
3007 It's not completely trivial to avoid in the callers, so do it here. */
3008 if (rhs.type != ADDRESSOF
3009 && !get_varinfo (rhs.var)->may_have_pointers)
3010 return;
3011
3012 /* Likewise adding to the solution of a non-pointer var isn't useful. */
3013 if (!get_varinfo (lhs.var)->may_have_pointers)
3014 return;
3015
3016 /* This can happen in our IR with things like n->a = *p */
3017 if (rhs.type == DEREF && lhs.type == DEREF && rhs.var != anything_id)
3018 {
3019 /* Split into tmp = *rhs, *lhs = tmp */
3020 struct constraint_expr tmplhs;
3021 tmplhs = new_scalar_tmp_constraint_exp ("doubledereftmp");
3022 process_constraint (new_constraint (tmplhs, rhs));
3023 process_constraint (new_constraint (lhs, tmplhs));
3024 }
3025 else if (rhs.type == ADDRESSOF && lhs.type == DEREF)
3026 {
3027 /* Split into tmp = &rhs, *lhs = tmp */
3028 struct constraint_expr tmplhs;
3029 tmplhs = new_scalar_tmp_constraint_exp ("derefaddrtmp");
3030 process_constraint (new_constraint (tmplhs, rhs));
3031 process_constraint (new_constraint (lhs, tmplhs));
3032 }
3033 else
3034 {
3035 gcc_assert (rhs.type != ADDRESSOF || rhs.offset == 0);
3036 constraints.safe_push (t);
3037 }
3038 }
3039
3040
3041 /* Return the position, in bits, of FIELD_DECL from the beginning of its
3042 structure. */
3043
3044 static HOST_WIDE_INT
3045 bitpos_of_field (const tree fdecl)
3046 {
3047 if (!tree_fits_shwi_p (DECL_FIELD_OFFSET (fdecl))
3048 || !tree_fits_shwi_p (DECL_FIELD_BIT_OFFSET (fdecl)))
3049 return -1;
3050
3051 return (tree_to_shwi (DECL_FIELD_OFFSET (fdecl)) * BITS_PER_UNIT
3052 + tree_to_shwi (DECL_FIELD_BIT_OFFSET (fdecl)));
3053 }
3054
3055
3056 /* Get constraint expressions for offsetting PTR by OFFSET. Stores the
3057 resulting constraint expressions in *RESULTS. */
3058
3059 static void
3060 get_constraint_for_ptr_offset (tree ptr, tree offset,
3061 vec<ce_s> *results)
3062 {
3063 struct constraint_expr c;
3064 unsigned int j, n;
3065 HOST_WIDE_INT rhsoffset;
3066
3067 /* If we do not do field-sensitive PTA adding offsets to pointers
3068 does not change the points-to solution. */
3069 if (!use_field_sensitive)
3070 {
3071 get_constraint_for_rhs (ptr, results);
3072 return;
3073 }
3074
3075 /* If the offset is not a non-negative integer constant that fits
3076 in a HOST_WIDE_INT, we have to fall back to a conservative
3077 solution which includes all sub-fields of all pointed-to
3078 variables of ptr. */
3079 if (offset == NULL_TREE
3080 || TREE_CODE (offset) != INTEGER_CST)
3081 rhsoffset = UNKNOWN_OFFSET;
3082 else
3083 {
3084 /* Sign-extend the offset. */
3085 offset_int soffset = offset_int::from (offset, SIGNED);
3086 if (!wi::fits_shwi_p (soffset))
3087 rhsoffset = UNKNOWN_OFFSET;
3088 else
3089 {
3090 /* Make sure the bit-offset also fits. */
3091 HOST_WIDE_INT rhsunitoffset = soffset.to_shwi ();
3092 rhsoffset = rhsunitoffset * BITS_PER_UNIT;
3093 if (rhsunitoffset != rhsoffset / BITS_PER_UNIT)
3094 rhsoffset = UNKNOWN_OFFSET;
3095 }
3096 }
3097
3098 get_constraint_for_rhs (ptr, results);
3099 if (rhsoffset == 0)
3100 return;
3101
3102 /* As we are eventually appending to the solution do not use
3103 vec::iterate here. */
3104 n = results->length ();
3105 for (j = 0; j < n; j++)
3106 {
3107 varinfo_t curr;
3108 c = (*results)[j];
3109 curr = get_varinfo (c.var);
3110
3111 if (c.type == ADDRESSOF
3112 /* If this varinfo represents a full variable just use it. */
3113 && curr->is_full_var)
3114 ;
3115 else if (c.type == ADDRESSOF
3116 /* If we do not know the offset add all subfields. */
3117 && rhsoffset == UNKNOWN_OFFSET)
3118 {
3119 varinfo_t temp = get_varinfo (curr->head);
3120 do
3121 {
3122 struct constraint_expr c2;
3123 c2.var = temp->id;
3124 c2.type = ADDRESSOF;
3125 c2.offset = 0;
3126 if (c2.var != c.var)
3127 results->safe_push (c2);
3128 temp = vi_next (temp);
3129 }
3130 while (temp);
3131 }
3132 else if (c.type == ADDRESSOF)
3133 {
3134 varinfo_t temp;
3135 unsigned HOST_WIDE_INT offset = curr->offset + rhsoffset;
3136
3137 /* If curr->offset + rhsoffset is less than zero adjust it. */
3138 if (rhsoffset < 0
3139 && curr->offset < offset)
3140 offset = 0;
3141
3142 /* We have to include all fields that overlap the current
3143 field shifted by rhsoffset. And we include at least
3144 the last or the first field of the variable to represent
3145 reachability of off-bound addresses, in particular &object + 1,
3146 conservatively correct. */
3147 temp = first_or_preceding_vi_for_offset (curr, offset);
3148 c.var = temp->id;
3149 c.offset = 0;
3150 temp = vi_next (temp);
3151 while (temp
3152 && temp->offset < offset + curr->size)
3153 {
3154 struct constraint_expr c2;
3155 c2.var = temp->id;
3156 c2.type = ADDRESSOF;
3157 c2.offset = 0;
3158 results->safe_push (c2);
3159 temp = vi_next (temp);
3160 }
3161 }
3162 else if (c.type == SCALAR)
3163 {
3164 gcc_assert (c.offset == 0);
3165 c.offset = rhsoffset;
3166 }
3167 else
3168 /* We shouldn't get any DEREFs here. */
3169 gcc_unreachable ();
3170
3171 (*results)[j] = c;
3172 }
3173 }
3174
3175
3176 /* Given a COMPONENT_REF T, return the constraint_expr vector for it.
3177 If address_p is true the result will be taken its address of.
3178 If lhs_p is true then the constraint expression is assumed to be used
3179 as the lhs. */
3180
3181 static void
3182 get_constraint_for_component_ref (tree t, vec<ce_s> *results,
3183 bool address_p, bool lhs_p)
3184 {
3185 tree orig_t = t;
3186 HOST_WIDE_INT bitsize = -1;
3187 HOST_WIDE_INT bitmaxsize = -1;
3188 HOST_WIDE_INT bitpos;
3189 tree forzero;
3190
3191 /* Some people like to do cute things like take the address of
3192 &0->a.b */
3193 forzero = t;
3194 while (handled_component_p (forzero)
3195 || INDIRECT_REF_P (forzero)
3196 || TREE_CODE (forzero) == MEM_REF)
3197 forzero = TREE_OPERAND (forzero, 0);
3198
3199 if (CONSTANT_CLASS_P (forzero) && integer_zerop (forzero))
3200 {
3201 struct constraint_expr temp;
3202
3203 temp.offset = 0;
3204 temp.var = integer_id;
3205 temp.type = SCALAR;
3206 results->safe_push (temp);
3207 return;
3208 }
3209
3210 t = get_ref_base_and_extent (t, &bitpos, &bitsize, &bitmaxsize);
3211
3212 /* Pretend to take the address of the base, we'll take care of
3213 adding the required subset of sub-fields below. */
3214 get_constraint_for_1 (t, results, true, lhs_p);
3215 gcc_assert (results->length () == 1);
3216 struct constraint_expr &result = results->last ();
3217
3218 if (result.type == SCALAR
3219 && get_varinfo (result.var)->is_full_var)
3220 /* For single-field vars do not bother about the offset. */
3221 result.offset = 0;
3222 else if (result.type == SCALAR)
3223 {
3224 /* In languages like C, you can access one past the end of an
3225 array. You aren't allowed to dereference it, so we can
3226 ignore this constraint. When we handle pointer subtraction,
3227 we may have to do something cute here. */
3228
3229 if ((unsigned HOST_WIDE_INT)bitpos < get_varinfo (result.var)->fullsize
3230 && bitmaxsize != 0)
3231 {
3232 /* It's also not true that the constraint will actually start at the
3233 right offset, it may start in some padding. We only care about
3234 setting the constraint to the first actual field it touches, so
3235 walk to find it. */
3236 struct constraint_expr cexpr = result;
3237 varinfo_t curr;
3238 results->pop ();
3239 cexpr.offset = 0;
3240 for (curr = get_varinfo (cexpr.var); curr; curr = vi_next (curr))
3241 {
3242 if (ranges_overlap_p (curr->offset, curr->size,
3243 bitpos, bitmaxsize))
3244 {
3245 cexpr.var = curr->id;
3246 results->safe_push (cexpr);
3247 if (address_p)
3248 break;
3249 }
3250 }
3251 /* If we are going to take the address of this field then
3252 to be able to compute reachability correctly add at least
3253 the last field of the variable. */
3254 if (address_p && results->length () == 0)
3255 {
3256 curr = get_varinfo (cexpr.var);
3257 while (curr->next != 0)
3258 curr = vi_next (curr);
3259 cexpr.var = curr->id;
3260 results->safe_push (cexpr);
3261 }
3262 else if (results->length () == 0)
3263 /* Assert that we found *some* field there. The user couldn't be
3264 accessing *only* padding. */
3265 /* Still the user could access one past the end of an array
3266 embedded in a struct resulting in accessing *only* padding. */
3267 /* Or accessing only padding via type-punning to a type
3268 that has a filed just in padding space. */
3269 {
3270 cexpr.type = SCALAR;
3271 cexpr.var = anything_id;
3272 cexpr.offset = 0;
3273 results->safe_push (cexpr);
3274 }
3275 }
3276 else if (bitmaxsize == 0)
3277 {
3278 if (dump_file && (dump_flags & TDF_DETAILS))
3279 fprintf (dump_file, "Access to zero-sized part of variable,"
3280 "ignoring\n");
3281 }
3282 else
3283 if (dump_file && (dump_flags & TDF_DETAILS))
3284 fprintf (dump_file, "Access to past the end of variable, ignoring\n");
3285 }
3286 else if (result.type == DEREF)
3287 {
3288 /* If we do not know exactly where the access goes say so. Note
3289 that only for non-structure accesses we know that we access
3290 at most one subfiled of any variable. */
3291 if (bitpos == -1
3292 || bitsize != bitmaxsize
3293 || AGGREGATE_TYPE_P (TREE_TYPE (orig_t))
3294 || result.offset == UNKNOWN_OFFSET)
3295 result.offset = UNKNOWN_OFFSET;
3296 else
3297 result.offset += bitpos;
3298 }
3299 else if (result.type == ADDRESSOF)
3300 {
3301 /* We can end up here for component references on a
3302 VIEW_CONVERT_EXPR <>(&foobar). */
3303 result.type = SCALAR;
3304 result.var = anything_id;
3305 result.offset = 0;
3306 }
3307 else
3308 gcc_unreachable ();
3309 }
3310
3311
3312 /* Dereference the constraint expression CONS, and return the result.
3313 DEREF (ADDRESSOF) = SCALAR
3314 DEREF (SCALAR) = DEREF
3315 DEREF (DEREF) = (temp = DEREF1; result = DEREF(temp))
3316 This is needed so that we can handle dereferencing DEREF constraints. */
3317
3318 static void
3319 do_deref (vec<ce_s> *constraints)
3320 {
3321 struct constraint_expr *c;
3322 unsigned int i = 0;
3323
3324 FOR_EACH_VEC_ELT (*constraints, i, c)
3325 {
3326 if (c->type == SCALAR)
3327 c->type = DEREF;
3328 else if (c->type == ADDRESSOF)
3329 c->type = SCALAR;
3330 else if (c->type == DEREF)
3331 {
3332 struct constraint_expr tmplhs;
3333 tmplhs = new_scalar_tmp_constraint_exp ("dereftmp");
3334 process_constraint (new_constraint (tmplhs, *c));
3335 c->var = tmplhs.var;
3336 }
3337 else
3338 gcc_unreachable ();
3339 }
3340 }
3341
3342 /* Given a tree T, return the constraint expression for taking the
3343 address of it. */
3344
3345 static void
3346 get_constraint_for_address_of (tree t, vec<ce_s> *results)
3347 {
3348 struct constraint_expr *c;
3349 unsigned int i;
3350
3351 get_constraint_for_1 (t, results, true, true);
3352
3353 FOR_EACH_VEC_ELT (*results, i, c)
3354 {
3355 if (c->type == DEREF)
3356 c->type = SCALAR;
3357 else
3358 c->type = ADDRESSOF;
3359 }
3360 }
3361
3362 /* Given a tree T, return the constraint expression for it. */
3363
3364 static void
3365 get_constraint_for_1 (tree t, vec<ce_s> *results, bool address_p,
3366 bool lhs_p)
3367 {
3368 struct constraint_expr temp;
3369
3370 /* x = integer is all glommed to a single variable, which doesn't
3371 point to anything by itself. That is, of course, unless it is an
3372 integer constant being treated as a pointer, in which case, we
3373 will return that this is really the addressof anything. This
3374 happens below, since it will fall into the default case. The only
3375 case we know something about an integer treated like a pointer is
3376 when it is the NULL pointer, and then we just say it points to
3377 NULL.
3378
3379 Do not do that if -fno-delete-null-pointer-checks though, because
3380 in that case *NULL does not fail, so it _should_ alias *anything.
3381 It is not worth adding a new option or renaming the existing one,
3382 since this case is relatively obscure. */
3383 if ((TREE_CODE (t) == INTEGER_CST
3384 && integer_zerop (t))
3385 /* The only valid CONSTRUCTORs in gimple with pointer typed
3386 elements are zero-initializer. But in IPA mode we also
3387 process global initializers, so verify at least. */
3388 || (TREE_CODE (t) == CONSTRUCTOR
3389 && CONSTRUCTOR_NELTS (t) == 0))
3390 {
3391 if (flag_delete_null_pointer_checks)
3392 temp.var = nothing_id;
3393 else
3394 temp.var = nonlocal_id;
3395 temp.type = ADDRESSOF;
3396 temp.offset = 0;
3397 results->safe_push (temp);
3398 return;
3399 }
3400
3401 /* String constants are read-only, ideally we'd have a CONST_DECL
3402 for those. */
3403 if (TREE_CODE (t) == STRING_CST)
3404 {
3405 temp.var = string_id;
3406 temp.type = SCALAR;
3407 temp.offset = 0;
3408 results->safe_push (temp);
3409 return;
3410 }
3411
3412 switch (TREE_CODE_CLASS (TREE_CODE (t)))
3413 {
3414 case tcc_expression:
3415 {
3416 switch (TREE_CODE (t))
3417 {
3418 case ADDR_EXPR:
3419 get_constraint_for_address_of (TREE_OPERAND (t, 0), results);
3420 return;
3421 default:;
3422 }
3423 break;
3424 }
3425 case tcc_reference:
3426 {
3427 switch (TREE_CODE (t))
3428 {
3429 case MEM_REF:
3430 {
3431 struct constraint_expr cs;
3432 varinfo_t vi, curr;
3433 get_constraint_for_ptr_offset (TREE_OPERAND (t, 0),
3434 TREE_OPERAND (t, 1), results);
3435 do_deref (results);
3436
3437 /* If we are not taking the address then make sure to process
3438 all subvariables we might access. */
3439 if (address_p)
3440 return;
3441
3442 cs = results->last ();
3443 if (cs.type == DEREF
3444 && type_can_have_subvars (TREE_TYPE (t)))
3445 {
3446 /* For dereferences this means we have to defer it
3447 to solving time. */
3448 results->last ().offset = UNKNOWN_OFFSET;
3449 return;
3450 }
3451 if (cs.type != SCALAR)
3452 return;
3453
3454 vi = get_varinfo (cs.var);
3455 curr = vi_next (vi);
3456 if (!vi->is_full_var
3457 && curr)
3458 {
3459 unsigned HOST_WIDE_INT size;
3460 if (tree_fits_uhwi_p (TYPE_SIZE (TREE_TYPE (t))))
3461 size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (t)));
3462 else
3463 size = -1;
3464 for (; curr; curr = vi_next (curr))
3465 {
3466 if (curr->offset - vi->offset < size)
3467 {
3468 cs.var = curr->id;
3469 results->safe_push (cs);
3470 }
3471 else
3472 break;
3473 }
3474 }
3475 return;
3476 }
3477 case ARRAY_REF:
3478 case ARRAY_RANGE_REF:
3479 case COMPONENT_REF:
3480 case IMAGPART_EXPR:
3481 case REALPART_EXPR:
3482 case BIT_FIELD_REF:
3483 get_constraint_for_component_ref (t, results, address_p, lhs_p);
3484 return;
3485 case VIEW_CONVERT_EXPR:
3486 get_constraint_for_1 (TREE_OPERAND (t, 0), results, address_p,
3487 lhs_p);
3488 return;
3489 /* We are missing handling for TARGET_MEM_REF here. */
3490 default:;
3491 }
3492 break;
3493 }
3494 case tcc_exceptional:
3495 {
3496 switch (TREE_CODE (t))
3497 {
3498 case SSA_NAME:
3499 {
3500 get_constraint_for_ssa_var (t, results, address_p);
3501 return;
3502 }
3503 case CONSTRUCTOR:
3504 {
3505 unsigned int i;
3506 tree val;
3507 auto_vec<ce_s> tmp;
3508 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), i, val)
3509 {
3510 struct constraint_expr *rhsp;
3511 unsigned j;
3512 get_constraint_for_1 (val, &tmp, address_p, lhs_p);
3513 FOR_EACH_VEC_ELT (tmp, j, rhsp)
3514 results->safe_push (*rhsp);
3515 tmp.truncate (0);
3516 }
3517 /* We do not know whether the constructor was complete,
3518 so technically we have to add &NOTHING or &ANYTHING
3519 like we do for an empty constructor as well. */
3520 return;
3521 }
3522 default:;
3523 }
3524 break;
3525 }
3526 case tcc_declaration:
3527 {
3528 get_constraint_for_ssa_var (t, results, address_p);
3529 return;
3530 }
3531 case tcc_constant:
3532 {
3533 /* We cannot refer to automatic variables through constants. */
3534 temp.type = ADDRESSOF;
3535 temp.var = nonlocal_id;
3536 temp.offset = 0;
3537 results->safe_push (temp);
3538 return;
3539 }
3540 default:;
3541 }
3542
3543 /* The default fallback is a constraint from anything. */
3544 temp.type = ADDRESSOF;
3545 temp.var = anything_id;
3546 temp.offset = 0;
3547 results->safe_push (temp);
3548 }
3549
3550 /* Given a gimple tree T, return the constraint expression vector for it. */
3551
3552 static void
3553 get_constraint_for (tree t, vec<ce_s> *results)
3554 {
3555 gcc_assert (results->length () == 0);
3556
3557 get_constraint_for_1 (t, results, false, true);
3558 }
3559
3560 /* Given a gimple tree T, return the constraint expression vector for it
3561 to be used as the rhs of a constraint. */
3562
3563 static void
3564 get_constraint_for_rhs (tree t, vec<ce_s> *results)
3565 {
3566 gcc_assert (results->length () == 0);
3567
3568 get_constraint_for_1 (t, results, false, false);
3569 }
3570
3571
3572 /* Efficiently generates constraints from all entries in *RHSC to all
3573 entries in *LHSC. */
3574
3575 static void
3576 process_all_all_constraints (vec<ce_s> lhsc,
3577 vec<ce_s> rhsc)
3578 {
3579 struct constraint_expr *lhsp, *rhsp;
3580 unsigned i, j;
3581
3582 if (lhsc.length () <= 1 || rhsc.length () <= 1)
3583 {
3584 FOR_EACH_VEC_ELT (lhsc, i, lhsp)
3585 FOR_EACH_VEC_ELT (rhsc, j, rhsp)
3586 process_constraint (new_constraint (*lhsp, *rhsp));
3587 }
3588 else
3589 {
3590 struct constraint_expr tmp;
3591 tmp = new_scalar_tmp_constraint_exp ("allalltmp");
3592 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
3593 process_constraint (new_constraint (tmp, *rhsp));
3594 FOR_EACH_VEC_ELT (lhsc, i, lhsp)
3595 process_constraint (new_constraint (*lhsp, tmp));
3596 }
3597 }
3598
3599 /* Handle aggregate copies by expanding into copies of the respective
3600 fields of the structures. */
3601
3602 static void
3603 do_structure_copy (tree lhsop, tree rhsop)
3604 {
3605 struct constraint_expr *lhsp, *rhsp;
3606 auto_vec<ce_s> lhsc;
3607 auto_vec<ce_s> rhsc;
3608 unsigned j;
3609
3610 get_constraint_for (lhsop, &lhsc);
3611 get_constraint_for_rhs (rhsop, &rhsc);
3612 lhsp = &lhsc[0];
3613 rhsp = &rhsc[0];
3614 if (lhsp->type == DEREF
3615 || (lhsp->type == ADDRESSOF && lhsp->var == anything_id)
3616 || rhsp->type == DEREF)
3617 {
3618 if (lhsp->type == DEREF)
3619 {
3620 gcc_assert (lhsc.length () == 1);
3621 lhsp->offset = UNKNOWN_OFFSET;
3622 }
3623 if (rhsp->type == DEREF)
3624 {
3625 gcc_assert (rhsc.length () == 1);
3626 rhsp->offset = UNKNOWN_OFFSET;
3627 }
3628 process_all_all_constraints (lhsc, rhsc);
3629 }
3630 else if (lhsp->type == SCALAR
3631 && (rhsp->type == SCALAR
3632 || rhsp->type == ADDRESSOF))
3633 {
3634 HOST_WIDE_INT lhssize, lhsmaxsize, lhsoffset;
3635 HOST_WIDE_INT rhssize, rhsmaxsize, rhsoffset;
3636 unsigned k = 0;
3637 get_ref_base_and_extent (lhsop, &lhsoffset, &lhssize, &lhsmaxsize);
3638 get_ref_base_and_extent (rhsop, &rhsoffset, &rhssize, &rhsmaxsize);
3639 for (j = 0; lhsc.iterate (j, &lhsp);)
3640 {
3641 varinfo_t lhsv, rhsv;
3642 rhsp = &rhsc[k];
3643 lhsv = get_varinfo (lhsp->var);
3644 rhsv = get_varinfo (rhsp->var);
3645 if (lhsv->may_have_pointers
3646 && (lhsv->is_full_var
3647 || rhsv->is_full_var
3648 || ranges_overlap_p (lhsv->offset + rhsoffset, lhsv->size,
3649 rhsv->offset + lhsoffset, rhsv->size)))
3650 process_constraint (new_constraint (*lhsp, *rhsp));
3651 if (!rhsv->is_full_var
3652 && (lhsv->is_full_var
3653 || (lhsv->offset + rhsoffset + lhsv->size
3654 > rhsv->offset + lhsoffset + rhsv->size)))
3655 {
3656 ++k;
3657 if (k >= rhsc.length ())
3658 break;
3659 }
3660 else
3661 ++j;
3662 }
3663 }
3664 else
3665 gcc_unreachable ();
3666 }
3667
3668 /* Create constraints ID = { rhsc }. */
3669
3670 static void
3671 make_constraints_to (unsigned id, vec<ce_s> rhsc)
3672 {
3673 struct constraint_expr *c;
3674 struct constraint_expr includes;
3675 unsigned int j;
3676
3677 includes.var = id;
3678 includes.offset = 0;
3679 includes.type = SCALAR;
3680
3681 FOR_EACH_VEC_ELT (rhsc, j, c)
3682 process_constraint (new_constraint (includes, *c));
3683 }
3684
3685 /* Create a constraint ID = OP. */
3686
3687 static void
3688 make_constraint_to (unsigned id, tree op)
3689 {
3690 auto_vec<ce_s> rhsc;
3691 get_constraint_for_rhs (op, &rhsc);
3692 make_constraints_to (id, rhsc);
3693 }
3694
3695 /* Create a constraint ID = &FROM. */
3696
3697 static void
3698 make_constraint_from (varinfo_t vi, int from)
3699 {
3700 struct constraint_expr lhs, rhs;
3701
3702 lhs.var = vi->id;
3703 lhs.offset = 0;
3704 lhs.type = SCALAR;
3705
3706 rhs.var = from;
3707 rhs.offset = 0;
3708 rhs.type = ADDRESSOF;
3709 process_constraint (new_constraint (lhs, rhs));
3710 }
3711
3712 /* Create a constraint ID = FROM. */
3713
3714 static void
3715 make_copy_constraint (varinfo_t vi, int from)
3716 {
3717 struct constraint_expr lhs, rhs;
3718
3719 lhs.var = vi->id;
3720 lhs.offset = 0;
3721 lhs.type = SCALAR;
3722
3723 rhs.var = from;
3724 rhs.offset = 0;
3725 rhs.type = SCALAR;
3726 process_constraint (new_constraint (lhs, rhs));
3727 }
3728
3729 /* Make constraints necessary to make OP escape. */
3730
3731 static void
3732 make_escape_constraint (tree op)
3733 {
3734 make_constraint_to (escaped_id, op);
3735 }
3736
3737 /* Add constraints to that the solution of VI is transitively closed. */
3738
3739 static void
3740 make_transitive_closure_constraints (varinfo_t vi)
3741 {
3742 struct constraint_expr lhs, rhs;
3743
3744 /* VAR = *VAR; */
3745 lhs.type = SCALAR;
3746 lhs.var = vi->id;
3747 lhs.offset = 0;
3748 rhs.type = DEREF;
3749 rhs.var = vi->id;
3750 rhs.offset = UNKNOWN_OFFSET;
3751 process_constraint (new_constraint (lhs, rhs));
3752 }
3753
3754 /* Temporary storage for fake var decls. */
3755 struct obstack fake_var_decl_obstack;
3756
3757 /* Build a fake VAR_DECL acting as referrer to a DECL_UID. */
3758
3759 static tree
3760 build_fake_var_decl (tree type)
3761 {
3762 tree decl = (tree) XOBNEW (&fake_var_decl_obstack, struct tree_var_decl);
3763 memset (decl, 0, sizeof (struct tree_var_decl));
3764 TREE_SET_CODE (decl, VAR_DECL);
3765 TREE_TYPE (decl) = type;
3766 DECL_UID (decl) = allocate_decl_uid ();
3767 SET_DECL_PT_UID (decl, -1);
3768 layout_decl (decl, 0);
3769 return decl;
3770 }
3771
3772 /* Create a new artificial heap variable with NAME.
3773 Return the created variable. */
3774
3775 static varinfo_t
3776 make_heapvar (const char *name)
3777 {
3778 varinfo_t vi;
3779 tree heapvar;
3780
3781 heapvar = build_fake_var_decl (ptr_type_node);
3782 DECL_EXTERNAL (heapvar) = 1;
3783
3784 vi = new_var_info (heapvar, name);
3785 vi->is_artificial_var = true;
3786 vi->is_heap_var = true;
3787 vi->is_unknown_size_var = true;
3788 vi->offset = 0;
3789 vi->fullsize = ~0;
3790 vi->size = ~0;
3791 vi->is_full_var = true;
3792 insert_vi_for_tree (heapvar, vi);
3793
3794 return vi;
3795 }
3796
3797 /* Create a new artificial heap variable with NAME and make a
3798 constraint from it to LHS. Set flags according to a tag used
3799 for tracking restrict pointers. */
3800
3801 static varinfo_t
3802 make_constraint_from_restrict (varinfo_t lhs, const char *name)
3803 {
3804 varinfo_t vi = make_heapvar (name);
3805 vi->is_restrict_var = 1;
3806 vi->is_global_var = 1;
3807 vi->may_have_pointers = 1;
3808 make_constraint_from (lhs, vi->id);
3809 return vi;
3810 }
3811
3812 /* Create a new artificial heap variable with NAME and make a
3813 constraint from it to LHS. Set flags according to a tag used
3814 for tracking restrict pointers and make the artificial heap
3815 point to global memory. */
3816
3817 static varinfo_t
3818 make_constraint_from_global_restrict (varinfo_t lhs, const char *name)
3819 {
3820 varinfo_t vi = make_constraint_from_restrict (lhs, name);
3821 make_copy_constraint (vi, nonlocal_id);
3822 return vi;
3823 }
3824
3825 /* In IPA mode there are varinfos for different aspects of reach
3826 function designator. One for the points-to set of the return
3827 value, one for the variables that are clobbered by the function,
3828 one for its uses and one for each parameter (including a single
3829 glob for remaining variadic arguments). */
3830
3831 enum { fi_clobbers = 1, fi_uses = 2,
3832 fi_static_chain = 3, fi_result = 4, fi_parm_base = 5 };
3833
3834 /* Get a constraint for the requested part of a function designator FI
3835 when operating in IPA mode. */
3836
3837 static struct constraint_expr
3838 get_function_part_constraint (varinfo_t fi, unsigned part)
3839 {
3840 struct constraint_expr c;
3841
3842 gcc_assert (in_ipa_mode);
3843
3844 if (fi->id == anything_id)
3845 {
3846 /* ??? We probably should have a ANYFN special variable. */
3847 c.var = anything_id;
3848 c.offset = 0;
3849 c.type = SCALAR;
3850 }
3851 else if (TREE_CODE (fi->decl) == FUNCTION_DECL)
3852 {
3853 varinfo_t ai = first_vi_for_offset (fi, part);
3854 if (ai)
3855 c.var = ai->id;
3856 else
3857 c.var = anything_id;
3858 c.offset = 0;
3859 c.type = SCALAR;
3860 }
3861 else
3862 {
3863 c.var = fi->id;
3864 c.offset = part;
3865 c.type = DEREF;
3866 }
3867
3868 return c;
3869 }
3870
3871 /* For non-IPA mode, generate constraints necessary for a call on the
3872 RHS. */
3873
3874 static void
3875 handle_rhs_call (gcall *stmt, vec<ce_s> *results)
3876 {
3877 struct constraint_expr rhsc;
3878 unsigned i;
3879 bool returns_uses = false;
3880
3881 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3882 {
3883 tree arg = gimple_call_arg (stmt, i);
3884 int flags = gimple_call_arg_flags (stmt, i);
3885
3886 /* If the argument is not used we can ignore it. */
3887 if (flags & EAF_UNUSED)
3888 continue;
3889
3890 /* As we compute ESCAPED context-insensitive we do not gain
3891 any precision with just EAF_NOCLOBBER but not EAF_NOESCAPE
3892 set. The argument would still get clobbered through the
3893 escape solution. */
3894 if ((flags & EAF_NOCLOBBER)
3895 && (flags & EAF_NOESCAPE))
3896 {
3897 varinfo_t uses = get_call_use_vi (stmt);
3898 if (!(flags & EAF_DIRECT))
3899 {
3900 varinfo_t tem = new_var_info (NULL_TREE, "callarg");
3901 make_constraint_to (tem->id, arg);
3902 make_transitive_closure_constraints (tem);
3903 make_copy_constraint (uses, tem->id);
3904 }
3905 else
3906 make_constraint_to (uses->id, arg);
3907 returns_uses = true;
3908 }
3909 else if (flags & EAF_NOESCAPE)
3910 {
3911 struct constraint_expr lhs, rhs;
3912 varinfo_t uses = get_call_use_vi (stmt);
3913 varinfo_t clobbers = get_call_clobber_vi (stmt);
3914 varinfo_t tem = new_var_info (NULL_TREE, "callarg");
3915 make_constraint_to (tem->id, arg);
3916 if (!(flags & EAF_DIRECT))
3917 make_transitive_closure_constraints (tem);
3918 make_copy_constraint (uses, tem->id);
3919 make_copy_constraint (clobbers, tem->id);
3920 /* Add *tem = nonlocal, do not add *tem = callused as
3921 EAF_NOESCAPE parameters do not escape to other parameters
3922 and all other uses appear in NONLOCAL as well. */
3923 lhs.type = DEREF;
3924 lhs.var = tem->id;
3925 lhs.offset = 0;
3926 rhs.type = SCALAR;
3927 rhs.var = nonlocal_id;
3928 rhs.offset = 0;
3929 process_constraint (new_constraint (lhs, rhs));
3930 returns_uses = true;
3931 }
3932 else
3933 make_escape_constraint (arg);
3934 }
3935
3936 /* If we added to the calls uses solution make sure we account for
3937 pointers to it to be returned. */
3938 if (returns_uses)
3939 {
3940 rhsc.var = get_call_use_vi (stmt)->id;
3941 rhsc.offset = 0;
3942 rhsc.type = SCALAR;
3943 results->safe_push (rhsc);
3944 }
3945
3946 /* The static chain escapes as well. */
3947 if (gimple_call_chain (stmt))
3948 make_escape_constraint (gimple_call_chain (stmt));
3949
3950 /* And if we applied NRV the address of the return slot escapes as well. */
3951 if (gimple_call_return_slot_opt_p (stmt)
3952 && gimple_call_lhs (stmt) != NULL_TREE
3953 && TREE_ADDRESSABLE (TREE_TYPE (gimple_call_lhs (stmt))))
3954 {
3955 auto_vec<ce_s> tmpc;
3956 struct constraint_expr lhsc, *c;
3957 get_constraint_for_address_of (gimple_call_lhs (stmt), &tmpc);
3958 lhsc.var = escaped_id;
3959 lhsc.offset = 0;
3960 lhsc.type = SCALAR;
3961 FOR_EACH_VEC_ELT (tmpc, i, c)
3962 process_constraint (new_constraint (lhsc, *c));
3963 }
3964
3965 /* Regular functions return nonlocal memory. */
3966 rhsc.var = nonlocal_id;
3967 rhsc.offset = 0;
3968 rhsc.type = SCALAR;
3969 results->safe_push (rhsc);
3970 }
3971
3972 /* For non-IPA mode, generate constraints necessary for a call
3973 that returns a pointer and assigns it to LHS. This simply makes
3974 the LHS point to global and escaped variables. */
3975
3976 static void
3977 handle_lhs_call (gcall *stmt, tree lhs, int flags, vec<ce_s> rhsc,
3978 tree fndecl)
3979 {
3980 auto_vec<ce_s> lhsc;
3981
3982 get_constraint_for (lhs, &lhsc);
3983 /* If the store is to a global decl make sure to
3984 add proper escape constraints. */
3985 lhs = get_base_address (lhs);
3986 if (lhs
3987 && DECL_P (lhs)
3988 && is_global_var (lhs))
3989 {
3990 struct constraint_expr tmpc;
3991 tmpc.var = escaped_id;
3992 tmpc.offset = 0;
3993 tmpc.type = SCALAR;
3994 lhsc.safe_push (tmpc);
3995 }
3996
3997 /* If the call returns an argument unmodified override the rhs
3998 constraints. */
3999 if (flags & ERF_RETURNS_ARG
4000 && (flags & ERF_RETURN_ARG_MASK) < gimple_call_num_args (stmt))
4001 {
4002 tree arg;
4003 rhsc.create (0);
4004 arg = gimple_call_arg (stmt, flags & ERF_RETURN_ARG_MASK);
4005 get_constraint_for (arg, &rhsc);
4006 process_all_all_constraints (lhsc, rhsc);
4007 rhsc.release ();
4008 }
4009 else if (flags & ERF_NOALIAS)
4010 {
4011 varinfo_t vi;
4012 struct constraint_expr tmpc;
4013 rhsc.create (0);
4014 vi = make_heapvar ("HEAP");
4015 /* We are marking allocated storage local, we deal with it becoming
4016 global by escaping and setting of vars_contains_escaped_heap. */
4017 DECL_EXTERNAL (vi->decl) = 0;
4018 vi->is_global_var = 0;
4019 /* If this is not a real malloc call assume the memory was
4020 initialized and thus may point to global memory. All
4021 builtin functions with the malloc attribute behave in a sane way. */
4022 if (!fndecl
4023 || DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_NORMAL)
4024 make_constraint_from (vi, nonlocal_id);
4025 tmpc.var = vi->id;
4026 tmpc.offset = 0;
4027 tmpc.type = ADDRESSOF;
4028 rhsc.safe_push (tmpc);
4029 process_all_all_constraints (lhsc, rhsc);
4030 rhsc.release ();
4031 }
4032 else
4033 process_all_all_constraints (lhsc, rhsc);
4034 }
4035
4036 /* For non-IPA mode, generate constraints necessary for a call of a
4037 const function that returns a pointer in the statement STMT. */
4038
4039 static void
4040 handle_const_call (gcall *stmt, vec<ce_s> *results)
4041 {
4042 struct constraint_expr rhsc;
4043 unsigned int k;
4044
4045 /* Treat nested const functions the same as pure functions as far
4046 as the static chain is concerned. */
4047 if (gimple_call_chain (stmt))
4048 {
4049 varinfo_t uses = get_call_use_vi (stmt);
4050 make_transitive_closure_constraints (uses);
4051 make_constraint_to (uses->id, gimple_call_chain (stmt));
4052 rhsc.var = uses->id;
4053 rhsc.offset = 0;
4054 rhsc.type = SCALAR;
4055 results->safe_push (rhsc);
4056 }
4057
4058 /* May return arguments. */
4059 for (k = 0; k < gimple_call_num_args (stmt); ++k)
4060 {
4061 tree arg = gimple_call_arg (stmt, k);
4062 auto_vec<ce_s> argc;
4063 unsigned i;
4064 struct constraint_expr *argp;
4065 get_constraint_for_rhs (arg, &argc);
4066 FOR_EACH_VEC_ELT (argc, i, argp)
4067 results->safe_push (*argp);
4068 }
4069
4070 /* May return addresses of globals. */
4071 rhsc.var = nonlocal_id;
4072 rhsc.offset = 0;
4073 rhsc.type = ADDRESSOF;
4074 results->safe_push (rhsc);
4075 }
4076
4077 /* For non-IPA mode, generate constraints necessary for a call to a
4078 pure function in statement STMT. */
4079
4080 static void
4081 handle_pure_call (gcall *stmt, vec<ce_s> *results)
4082 {
4083 struct constraint_expr rhsc;
4084 unsigned i;
4085 varinfo_t uses = NULL;
4086
4087 /* Memory reached from pointer arguments is call-used. */
4088 for (i = 0; i < gimple_call_num_args (stmt); ++i)
4089 {
4090 tree arg = gimple_call_arg (stmt, i);
4091 if (!uses)
4092 {
4093 uses = get_call_use_vi (stmt);
4094 make_transitive_closure_constraints (uses);
4095 }
4096 make_constraint_to (uses->id, arg);
4097 }
4098
4099 /* The static chain is used as well. */
4100 if (gimple_call_chain (stmt))
4101 {
4102 if (!uses)
4103 {
4104 uses = get_call_use_vi (stmt);
4105 make_transitive_closure_constraints (uses);
4106 }
4107 make_constraint_to (uses->id, gimple_call_chain (stmt));
4108 }
4109
4110 /* Pure functions may return call-used and nonlocal memory. */
4111 if (uses)
4112 {
4113 rhsc.var = uses->id;
4114 rhsc.offset = 0;
4115 rhsc.type = SCALAR;
4116 results->safe_push (rhsc);
4117 }
4118 rhsc.var = nonlocal_id;
4119 rhsc.offset = 0;
4120 rhsc.type = SCALAR;
4121 results->safe_push (rhsc);
4122 }
4123
4124
4125 /* Return the varinfo for the callee of CALL. */
4126
4127 static varinfo_t
4128 get_fi_for_callee (gcall *call)
4129 {
4130 tree decl, fn = gimple_call_fn (call);
4131
4132 if (fn && TREE_CODE (fn) == OBJ_TYPE_REF)
4133 fn = OBJ_TYPE_REF_EXPR (fn);
4134
4135 /* If we can directly resolve the function being called, do so.
4136 Otherwise, it must be some sort of indirect expression that
4137 we should still be able to handle. */
4138 decl = gimple_call_addr_fndecl (fn);
4139 if (decl)
4140 return get_vi_for_tree (decl);
4141
4142 /* If the function is anything other than a SSA name pointer we have no
4143 clue and should be getting ANYFN (well, ANYTHING for now). */
4144 if (!fn || TREE_CODE (fn) != SSA_NAME)
4145 return get_varinfo (anything_id);
4146
4147 if (SSA_NAME_IS_DEFAULT_DEF (fn)
4148 && (TREE_CODE (SSA_NAME_VAR (fn)) == PARM_DECL
4149 || TREE_CODE (SSA_NAME_VAR (fn)) == RESULT_DECL))
4150 fn = SSA_NAME_VAR (fn);
4151
4152 return get_vi_for_tree (fn);
4153 }
4154
4155 /* Create constraints for the builtin call T. Return true if the call
4156 was handled, otherwise false. */
4157
4158 static bool
4159 find_func_aliases_for_builtin_call (struct function *fn, gcall *t)
4160 {
4161 tree fndecl = gimple_call_fndecl (t);
4162 auto_vec<ce_s, 2> lhsc;
4163 auto_vec<ce_s, 4> rhsc;
4164 varinfo_t fi;
4165
4166 if (gimple_call_builtin_p (t, BUILT_IN_NORMAL))
4167 /* ??? All builtins that are handled here need to be handled
4168 in the alias-oracle query functions explicitly! */
4169 switch (DECL_FUNCTION_CODE (fndecl))
4170 {
4171 /* All the following functions return a pointer to the same object
4172 as their first argument points to. The functions do not add
4173 to the ESCAPED solution. The functions make the first argument
4174 pointed to memory point to what the second argument pointed to
4175 memory points to. */
4176 case BUILT_IN_STRCPY:
4177 case BUILT_IN_STRNCPY:
4178 case BUILT_IN_BCOPY:
4179 case BUILT_IN_MEMCPY:
4180 case BUILT_IN_MEMMOVE:
4181 case BUILT_IN_MEMPCPY:
4182 case BUILT_IN_STPCPY:
4183 case BUILT_IN_STPNCPY:
4184 case BUILT_IN_STRCAT:
4185 case BUILT_IN_STRNCAT:
4186 case BUILT_IN_STRCPY_CHK:
4187 case BUILT_IN_STRNCPY_CHK:
4188 case BUILT_IN_MEMCPY_CHK:
4189 case BUILT_IN_MEMMOVE_CHK:
4190 case BUILT_IN_MEMPCPY_CHK:
4191 case BUILT_IN_STPCPY_CHK:
4192 case BUILT_IN_STPNCPY_CHK:
4193 case BUILT_IN_STRCAT_CHK:
4194 case BUILT_IN_STRNCAT_CHK:
4195 case BUILT_IN_TM_MEMCPY:
4196 case BUILT_IN_TM_MEMMOVE:
4197 {
4198 tree res = gimple_call_lhs (t);
4199 tree dest = gimple_call_arg (t, (DECL_FUNCTION_CODE (fndecl)
4200 == BUILT_IN_BCOPY ? 1 : 0));
4201 tree src = gimple_call_arg (t, (DECL_FUNCTION_CODE (fndecl)
4202 == BUILT_IN_BCOPY ? 0 : 1));
4203 if (res != NULL_TREE)
4204 {
4205 get_constraint_for (res, &lhsc);
4206 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_MEMPCPY
4207 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STPCPY
4208 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STPNCPY
4209 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_MEMPCPY_CHK
4210 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STPCPY_CHK
4211 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STPNCPY_CHK)
4212 get_constraint_for_ptr_offset (dest, NULL_TREE, &rhsc);
4213 else
4214 get_constraint_for (dest, &rhsc);
4215 process_all_all_constraints (lhsc, rhsc);
4216 lhsc.truncate (0);
4217 rhsc.truncate (0);
4218 }
4219 get_constraint_for_ptr_offset (dest, NULL_TREE, &lhsc);
4220 get_constraint_for_ptr_offset (src, NULL_TREE, &rhsc);
4221 do_deref (&lhsc);
4222 do_deref (&rhsc);
4223 process_all_all_constraints (lhsc, rhsc);
4224 return true;
4225 }
4226 case BUILT_IN_MEMSET:
4227 case BUILT_IN_MEMSET_CHK:
4228 case BUILT_IN_TM_MEMSET:
4229 {
4230 tree res = gimple_call_lhs (t);
4231 tree dest = gimple_call_arg (t, 0);
4232 unsigned i;
4233 ce_s *lhsp;
4234 struct constraint_expr ac;
4235 if (res != NULL_TREE)
4236 {
4237 get_constraint_for (res, &lhsc);
4238 get_constraint_for (dest, &rhsc);
4239 process_all_all_constraints (lhsc, rhsc);
4240 lhsc.truncate (0);
4241 }
4242 get_constraint_for_ptr_offset (dest, NULL_TREE, &lhsc);
4243 do_deref (&lhsc);
4244 if (flag_delete_null_pointer_checks
4245 && integer_zerop (gimple_call_arg (t, 1)))
4246 {
4247 ac.type = ADDRESSOF;
4248 ac.var = nothing_id;
4249 }
4250 else
4251 {
4252 ac.type = SCALAR;
4253 ac.var = integer_id;
4254 }
4255 ac.offset = 0;
4256 FOR_EACH_VEC_ELT (lhsc, i, lhsp)
4257 process_constraint (new_constraint (*lhsp, ac));
4258 return true;
4259 }
4260 case BUILT_IN_POSIX_MEMALIGN:
4261 {
4262 tree ptrptr = gimple_call_arg (t, 0);
4263 get_constraint_for (ptrptr, &lhsc);
4264 do_deref (&lhsc);
4265 varinfo_t vi = make_heapvar ("HEAP");
4266 /* We are marking allocated storage local, we deal with it becoming
4267 global by escaping and setting of vars_contains_escaped_heap. */
4268 DECL_EXTERNAL (vi->decl) = 0;
4269 vi->is_global_var = 0;
4270 struct constraint_expr tmpc;
4271 tmpc.var = vi->id;
4272 tmpc.offset = 0;
4273 tmpc.type = ADDRESSOF;
4274 rhsc.safe_push (tmpc);
4275 process_all_all_constraints (lhsc, rhsc);
4276 return true;
4277 }
4278 case BUILT_IN_ASSUME_ALIGNED:
4279 {
4280 tree res = gimple_call_lhs (t);
4281 tree dest = gimple_call_arg (t, 0);
4282 if (res != NULL_TREE)
4283 {
4284 get_constraint_for (res, &lhsc);
4285 get_constraint_for (dest, &rhsc);
4286 process_all_all_constraints (lhsc, rhsc);
4287 }
4288 return true;
4289 }
4290 /* All the following functions do not return pointers, do not
4291 modify the points-to sets of memory reachable from their
4292 arguments and do not add to the ESCAPED solution. */
4293 case BUILT_IN_SINCOS:
4294 case BUILT_IN_SINCOSF:
4295 case BUILT_IN_SINCOSL:
4296 case BUILT_IN_FREXP:
4297 case BUILT_IN_FREXPF:
4298 case BUILT_IN_FREXPL:
4299 case BUILT_IN_GAMMA_R:
4300 case BUILT_IN_GAMMAF_R:
4301 case BUILT_IN_GAMMAL_R:
4302 case BUILT_IN_LGAMMA_R:
4303 case BUILT_IN_LGAMMAF_R:
4304 case BUILT_IN_LGAMMAL_R:
4305 case BUILT_IN_MODF:
4306 case BUILT_IN_MODFF:
4307 case BUILT_IN_MODFL:
4308 case BUILT_IN_REMQUO:
4309 case BUILT_IN_REMQUOF:
4310 case BUILT_IN_REMQUOL:
4311 case BUILT_IN_FREE:
4312 return true;
4313 case BUILT_IN_STRDUP:
4314 case BUILT_IN_STRNDUP:
4315 case BUILT_IN_REALLOC:
4316 if (gimple_call_lhs (t))
4317 {
4318 handle_lhs_call (t, gimple_call_lhs (t),
4319 gimple_call_return_flags (t) | ERF_NOALIAS,
4320 vNULL, fndecl);
4321 get_constraint_for_ptr_offset (gimple_call_lhs (t),
4322 NULL_TREE, &lhsc);
4323 get_constraint_for_ptr_offset (gimple_call_arg (t, 0),
4324 NULL_TREE, &rhsc);
4325 do_deref (&lhsc);
4326 do_deref (&rhsc);
4327 process_all_all_constraints (lhsc, rhsc);
4328 lhsc.truncate (0);
4329 rhsc.truncate (0);
4330 /* For realloc the resulting pointer can be equal to the
4331 argument as well. But only doing this wouldn't be
4332 correct because with ptr == 0 realloc behaves like malloc. */
4333 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_REALLOC)
4334 {
4335 get_constraint_for (gimple_call_lhs (t), &lhsc);
4336 get_constraint_for (gimple_call_arg (t, 0), &rhsc);
4337 process_all_all_constraints (lhsc, rhsc);
4338 }
4339 return true;
4340 }
4341 break;
4342 /* String / character search functions return a pointer into the
4343 source string or NULL. */
4344 case BUILT_IN_INDEX:
4345 case BUILT_IN_STRCHR:
4346 case BUILT_IN_STRRCHR:
4347 case BUILT_IN_MEMCHR:
4348 case BUILT_IN_STRSTR:
4349 case BUILT_IN_STRPBRK:
4350 if (gimple_call_lhs (t))
4351 {
4352 tree src = gimple_call_arg (t, 0);
4353 get_constraint_for_ptr_offset (src, NULL_TREE, &rhsc);
4354 constraint_expr nul;
4355 nul.var = nothing_id;
4356 nul.offset = 0;
4357 nul.type = ADDRESSOF;
4358 rhsc.safe_push (nul);
4359 get_constraint_for (gimple_call_lhs (t), &lhsc);
4360 process_all_all_constraints (lhsc, rhsc);
4361 }
4362 return true;
4363 /* Trampolines are special - they set up passing the static
4364 frame. */
4365 case BUILT_IN_INIT_TRAMPOLINE:
4366 {
4367 tree tramp = gimple_call_arg (t, 0);
4368 tree nfunc = gimple_call_arg (t, 1);
4369 tree frame = gimple_call_arg (t, 2);
4370 unsigned i;
4371 struct constraint_expr lhs, *rhsp;
4372 if (in_ipa_mode)
4373 {
4374 varinfo_t nfi = NULL;
4375 gcc_assert (TREE_CODE (nfunc) == ADDR_EXPR);
4376 nfi = lookup_vi_for_tree (TREE_OPERAND (nfunc, 0));
4377 if (nfi)
4378 {
4379 lhs = get_function_part_constraint (nfi, fi_static_chain);
4380 get_constraint_for (frame, &rhsc);
4381 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
4382 process_constraint (new_constraint (lhs, *rhsp));
4383 rhsc.truncate (0);
4384
4385 /* Make the frame point to the function for
4386 the trampoline adjustment call. */
4387 get_constraint_for (tramp, &lhsc);
4388 do_deref (&lhsc);
4389 get_constraint_for (nfunc, &rhsc);
4390 process_all_all_constraints (lhsc, rhsc);
4391
4392 return true;
4393 }
4394 }
4395 /* Else fallthru to generic handling which will let
4396 the frame escape. */
4397 break;
4398 }
4399 case BUILT_IN_ADJUST_TRAMPOLINE:
4400 {
4401 tree tramp = gimple_call_arg (t, 0);
4402 tree res = gimple_call_lhs (t);
4403 if (in_ipa_mode && res)
4404 {
4405 get_constraint_for (res, &lhsc);
4406 get_constraint_for (tramp, &rhsc);
4407 do_deref (&rhsc);
4408 process_all_all_constraints (lhsc, rhsc);
4409 }
4410 return true;
4411 }
4412 CASE_BUILT_IN_TM_STORE (1):
4413 CASE_BUILT_IN_TM_STORE (2):
4414 CASE_BUILT_IN_TM_STORE (4):
4415 CASE_BUILT_IN_TM_STORE (8):
4416 CASE_BUILT_IN_TM_STORE (FLOAT):
4417 CASE_BUILT_IN_TM_STORE (DOUBLE):
4418 CASE_BUILT_IN_TM_STORE (LDOUBLE):
4419 CASE_BUILT_IN_TM_STORE (M64):
4420 CASE_BUILT_IN_TM_STORE (M128):
4421 CASE_BUILT_IN_TM_STORE (M256):
4422 {
4423 tree addr = gimple_call_arg (t, 0);
4424 tree src = gimple_call_arg (t, 1);
4425
4426 get_constraint_for (addr, &lhsc);
4427 do_deref (&lhsc);
4428 get_constraint_for (src, &rhsc);
4429 process_all_all_constraints (lhsc, rhsc);
4430 return true;
4431 }
4432 CASE_BUILT_IN_TM_LOAD (1):
4433 CASE_BUILT_IN_TM_LOAD (2):
4434 CASE_BUILT_IN_TM_LOAD (4):
4435 CASE_BUILT_IN_TM_LOAD (8):
4436 CASE_BUILT_IN_TM_LOAD (FLOAT):
4437 CASE_BUILT_IN_TM_LOAD (DOUBLE):
4438 CASE_BUILT_IN_TM_LOAD (LDOUBLE):
4439 CASE_BUILT_IN_TM_LOAD (M64):
4440 CASE_BUILT_IN_TM_LOAD (M128):
4441 CASE_BUILT_IN_TM_LOAD (M256):
4442 {
4443 tree dest = gimple_call_lhs (t);
4444 tree addr = gimple_call_arg (t, 0);
4445
4446 get_constraint_for (dest, &lhsc);
4447 get_constraint_for (addr, &rhsc);
4448 do_deref (&rhsc);
4449 process_all_all_constraints (lhsc, rhsc);
4450 return true;
4451 }
4452 /* Variadic argument handling needs to be handled in IPA
4453 mode as well. */
4454 case BUILT_IN_VA_START:
4455 {
4456 tree valist = gimple_call_arg (t, 0);
4457 struct constraint_expr rhs, *lhsp;
4458 unsigned i;
4459 get_constraint_for (valist, &lhsc);
4460 do_deref (&lhsc);
4461 /* The va_list gets access to pointers in variadic
4462 arguments. Which we know in the case of IPA analysis
4463 and otherwise are just all nonlocal variables. */
4464 if (in_ipa_mode)
4465 {
4466 fi = lookup_vi_for_tree (fn->decl);
4467 rhs = get_function_part_constraint (fi, ~0);
4468 rhs.type = ADDRESSOF;
4469 }
4470 else
4471 {
4472 rhs.var = nonlocal_id;
4473 rhs.type = ADDRESSOF;
4474 rhs.offset = 0;
4475 }
4476 FOR_EACH_VEC_ELT (lhsc, i, lhsp)
4477 process_constraint (new_constraint (*lhsp, rhs));
4478 /* va_list is clobbered. */
4479 make_constraint_to (get_call_clobber_vi (t)->id, valist);
4480 return true;
4481 }
4482 /* va_end doesn't have any effect that matters. */
4483 case BUILT_IN_VA_END:
4484 return true;
4485 /* Alternate return. Simply give up for now. */
4486 case BUILT_IN_RETURN:
4487 {
4488 fi = NULL;
4489 if (!in_ipa_mode
4490 || !(fi = get_vi_for_tree (fn->decl)))
4491 make_constraint_from (get_varinfo (escaped_id), anything_id);
4492 else if (in_ipa_mode
4493 && fi != NULL)
4494 {
4495 struct constraint_expr lhs, rhs;
4496 lhs = get_function_part_constraint (fi, fi_result);
4497 rhs.var = anything_id;
4498 rhs.offset = 0;
4499 rhs.type = SCALAR;
4500 process_constraint (new_constraint (lhs, rhs));
4501 }
4502 return true;
4503 }
4504 /* printf-style functions may have hooks to set pointers to
4505 point to somewhere into the generated string. Leave them
4506 for a later exercise... */
4507 default:
4508 /* Fallthru to general call handling. */;
4509 }
4510
4511 return false;
4512 }
4513
4514 /* Create constraints for the call T. */
4515
4516 static void
4517 find_func_aliases_for_call (struct function *fn, gcall *t)
4518 {
4519 tree fndecl = gimple_call_fndecl (t);
4520 varinfo_t fi;
4521
4522 if (fndecl != NULL_TREE
4523 && DECL_BUILT_IN (fndecl)
4524 && find_func_aliases_for_builtin_call (fn, t))
4525 return;
4526
4527 fi = get_fi_for_callee (t);
4528 if (!in_ipa_mode
4529 || (fndecl && !fi->is_fn_info))
4530 {
4531 auto_vec<ce_s, 16> rhsc;
4532 int flags = gimple_call_flags (t);
4533
4534 /* Const functions can return their arguments and addresses
4535 of global memory but not of escaped memory. */
4536 if (flags & (ECF_CONST|ECF_NOVOPS))
4537 {
4538 if (gimple_call_lhs (t))
4539 handle_const_call (t, &rhsc);
4540 }
4541 /* Pure functions can return addresses in and of memory
4542 reachable from their arguments, but they are not an escape
4543 point for reachable memory of their arguments. */
4544 else if (flags & (ECF_PURE|ECF_LOOPING_CONST_OR_PURE))
4545 handle_pure_call (t, &rhsc);
4546 else
4547 handle_rhs_call (t, &rhsc);
4548 if (gimple_call_lhs (t))
4549 handle_lhs_call (t, gimple_call_lhs (t),
4550 gimple_call_return_flags (t), rhsc, fndecl);
4551 }
4552 else
4553 {
4554 auto_vec<ce_s, 2> rhsc;
4555 tree lhsop;
4556 unsigned j;
4557
4558 /* Assign all the passed arguments to the appropriate incoming
4559 parameters of the function. */
4560 for (j = 0; j < gimple_call_num_args (t); j++)
4561 {
4562 struct constraint_expr lhs ;
4563 struct constraint_expr *rhsp;
4564 tree arg = gimple_call_arg (t, j);
4565
4566 get_constraint_for_rhs (arg, &rhsc);
4567 lhs = get_function_part_constraint (fi, fi_parm_base + j);
4568 while (rhsc.length () != 0)
4569 {
4570 rhsp = &rhsc.last ();
4571 process_constraint (new_constraint (lhs, *rhsp));
4572 rhsc.pop ();
4573 }
4574 }
4575
4576 /* If we are returning a value, assign it to the result. */
4577 lhsop = gimple_call_lhs (t);
4578 if (lhsop)
4579 {
4580 auto_vec<ce_s, 2> lhsc;
4581 struct constraint_expr rhs;
4582 struct constraint_expr *lhsp;
4583
4584 get_constraint_for (lhsop, &lhsc);
4585 rhs = get_function_part_constraint (fi, fi_result);
4586 if (fndecl
4587 && DECL_RESULT (fndecl)
4588 && DECL_BY_REFERENCE (DECL_RESULT (fndecl)))
4589 {
4590 auto_vec<ce_s, 2> tem;
4591 tem.quick_push (rhs);
4592 do_deref (&tem);
4593 gcc_checking_assert (tem.length () == 1);
4594 rhs = tem[0];
4595 }
4596 FOR_EACH_VEC_ELT (lhsc, j, lhsp)
4597 process_constraint (new_constraint (*lhsp, rhs));
4598 }
4599
4600 /* If we pass the result decl by reference, honor that. */
4601 if (lhsop
4602 && fndecl
4603 && DECL_RESULT (fndecl)
4604 && DECL_BY_REFERENCE (DECL_RESULT (fndecl)))
4605 {
4606 struct constraint_expr lhs;
4607 struct constraint_expr *rhsp;
4608
4609 get_constraint_for_address_of (lhsop, &rhsc);
4610 lhs = get_function_part_constraint (fi, fi_result);
4611 FOR_EACH_VEC_ELT (rhsc, j, rhsp)
4612 process_constraint (new_constraint (lhs, *rhsp));
4613 rhsc.truncate (0);
4614 }
4615
4616 /* If we use a static chain, pass it along. */
4617 if (gimple_call_chain (t))
4618 {
4619 struct constraint_expr lhs;
4620 struct constraint_expr *rhsp;
4621
4622 get_constraint_for (gimple_call_chain (t), &rhsc);
4623 lhs = get_function_part_constraint (fi, fi_static_chain);
4624 FOR_EACH_VEC_ELT (rhsc, j, rhsp)
4625 process_constraint (new_constraint (lhs, *rhsp));
4626 }
4627 }
4628 }
4629
4630 /* Walk statement T setting up aliasing constraints according to the
4631 references found in T. This function is the main part of the
4632 constraint builder. AI points to auxiliary alias information used
4633 when building alias sets and computing alias grouping heuristics. */
4634
4635 static void
4636 find_func_aliases (struct function *fn, gimple origt)
4637 {
4638 gimple t = origt;
4639 auto_vec<ce_s, 16> lhsc;
4640 auto_vec<ce_s, 16> rhsc;
4641 struct constraint_expr *c;
4642 varinfo_t fi;
4643
4644 /* Now build constraints expressions. */
4645 if (gimple_code (t) == GIMPLE_PHI)
4646 {
4647 size_t i;
4648 unsigned int j;
4649
4650 /* For a phi node, assign all the arguments to
4651 the result. */
4652 get_constraint_for (gimple_phi_result (t), &lhsc);
4653 for (i = 0; i < gimple_phi_num_args (t); i++)
4654 {
4655 tree strippedrhs = PHI_ARG_DEF (t, i);
4656
4657 STRIP_NOPS (strippedrhs);
4658 get_constraint_for_rhs (gimple_phi_arg_def (t, i), &rhsc);
4659
4660 FOR_EACH_VEC_ELT (lhsc, j, c)
4661 {
4662 struct constraint_expr *c2;
4663 while (rhsc.length () > 0)
4664 {
4665 c2 = &rhsc.last ();
4666 process_constraint (new_constraint (*c, *c2));
4667 rhsc.pop ();
4668 }
4669 }
4670 }
4671 }
4672 /* In IPA mode, we need to generate constraints to pass call
4673 arguments through their calls. There are two cases,
4674 either a GIMPLE_CALL returning a value, or just a plain
4675 GIMPLE_CALL when we are not.
4676
4677 In non-ipa mode, we need to generate constraints for each
4678 pointer passed by address. */
4679 else if (is_gimple_call (t))
4680 find_func_aliases_for_call (fn, as_a <gcall *> (t));
4681
4682 /* Otherwise, just a regular assignment statement. Only care about
4683 operations with pointer result, others are dealt with as escape
4684 points if they have pointer operands. */
4685 else if (is_gimple_assign (t))
4686 {
4687 /* Otherwise, just a regular assignment statement. */
4688 tree lhsop = gimple_assign_lhs (t);
4689 tree rhsop = (gimple_num_ops (t) == 2) ? gimple_assign_rhs1 (t) : NULL;
4690
4691 if (rhsop && TREE_CLOBBER_P (rhsop))
4692 /* Ignore clobbers, they don't actually store anything into
4693 the LHS. */
4694 ;
4695 else if (rhsop && AGGREGATE_TYPE_P (TREE_TYPE (lhsop)))
4696 do_structure_copy (lhsop, rhsop);
4697 else
4698 {
4699 enum tree_code code = gimple_assign_rhs_code (t);
4700
4701 get_constraint_for (lhsop, &lhsc);
4702
4703 if (code == POINTER_PLUS_EXPR)
4704 get_constraint_for_ptr_offset (gimple_assign_rhs1 (t),
4705 gimple_assign_rhs2 (t), &rhsc);
4706 else if (code == BIT_AND_EXPR
4707 && TREE_CODE (gimple_assign_rhs2 (t)) == INTEGER_CST)
4708 {
4709 /* Aligning a pointer via a BIT_AND_EXPR is offsetting
4710 the pointer. Handle it by offsetting it by UNKNOWN. */
4711 get_constraint_for_ptr_offset (gimple_assign_rhs1 (t),
4712 NULL_TREE, &rhsc);
4713 }
4714 else if ((CONVERT_EXPR_CODE_P (code)
4715 && !(POINTER_TYPE_P (gimple_expr_type (t))
4716 && !POINTER_TYPE_P (TREE_TYPE (rhsop))))
4717 || gimple_assign_single_p (t))
4718 get_constraint_for_rhs (rhsop, &rhsc);
4719 else if (code == COND_EXPR)
4720 {
4721 /* The result is a merge of both COND_EXPR arms. */
4722 auto_vec<ce_s, 2> tmp;
4723 struct constraint_expr *rhsp;
4724 unsigned i;
4725 get_constraint_for_rhs (gimple_assign_rhs2 (t), &rhsc);
4726 get_constraint_for_rhs (gimple_assign_rhs3 (t), &tmp);
4727 FOR_EACH_VEC_ELT (tmp, i, rhsp)
4728 rhsc.safe_push (*rhsp);
4729 }
4730 else if (truth_value_p (code))
4731 /* Truth value results are not pointer (parts). Or at least
4732 very very unreasonable obfuscation of a part. */
4733 ;
4734 else
4735 {
4736 /* All other operations are merges. */
4737 auto_vec<ce_s, 4> tmp;
4738 struct constraint_expr *rhsp;
4739 unsigned i, j;
4740 get_constraint_for_rhs (gimple_assign_rhs1 (t), &rhsc);
4741 for (i = 2; i < gimple_num_ops (t); ++i)
4742 {
4743 get_constraint_for_rhs (gimple_op (t, i), &tmp);
4744 FOR_EACH_VEC_ELT (tmp, j, rhsp)
4745 rhsc.safe_push (*rhsp);
4746 tmp.truncate (0);
4747 }
4748 }
4749 process_all_all_constraints (lhsc, rhsc);
4750 }
4751 /* If there is a store to a global variable the rhs escapes. */
4752 if ((lhsop = get_base_address (lhsop)) != NULL_TREE
4753 && DECL_P (lhsop)
4754 && is_global_var (lhsop)
4755 && (!in_ipa_mode
4756 || DECL_EXTERNAL (lhsop) || TREE_PUBLIC (lhsop)))
4757 make_escape_constraint (rhsop);
4758 }
4759 /* Handle escapes through return. */
4760 else if (gimple_code (t) == GIMPLE_RETURN
4761 && gimple_return_retval (as_a <greturn *> (t)) != NULL_TREE)
4762 {
4763 greturn *return_stmt = as_a <greturn *> (t);
4764 fi = NULL;
4765 if (!in_ipa_mode
4766 || !(fi = get_vi_for_tree (fn->decl)))
4767 make_escape_constraint (gimple_return_retval (return_stmt));
4768 else if (in_ipa_mode
4769 && fi != NULL)
4770 {
4771 struct constraint_expr lhs ;
4772 struct constraint_expr *rhsp;
4773 unsigned i;
4774
4775 lhs = get_function_part_constraint (fi, fi_result);
4776 get_constraint_for_rhs (gimple_return_retval (return_stmt), &rhsc);
4777 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
4778 process_constraint (new_constraint (lhs, *rhsp));
4779 }
4780 }
4781 /* Handle asms conservatively by adding escape constraints to everything. */
4782 else if (gasm *asm_stmt = dyn_cast <gasm *> (t))
4783 {
4784 unsigned i, noutputs;
4785 const char **oconstraints;
4786 const char *constraint;
4787 bool allows_mem, allows_reg, is_inout;
4788
4789 noutputs = gimple_asm_noutputs (asm_stmt);
4790 oconstraints = XALLOCAVEC (const char *, noutputs);
4791
4792 for (i = 0; i < noutputs; ++i)
4793 {
4794 tree link = gimple_asm_output_op (asm_stmt, i);
4795 tree op = TREE_VALUE (link);
4796
4797 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
4798 oconstraints[i] = constraint;
4799 parse_output_constraint (&constraint, i, 0, 0, &allows_mem,
4800 &allows_reg, &is_inout);
4801
4802 /* A memory constraint makes the address of the operand escape. */
4803 if (!allows_reg && allows_mem)
4804 make_escape_constraint (build_fold_addr_expr (op));
4805
4806 /* The asm may read global memory, so outputs may point to
4807 any global memory. */
4808 if (op)
4809 {
4810 auto_vec<ce_s, 2> lhsc;
4811 struct constraint_expr rhsc, *lhsp;
4812 unsigned j;
4813 get_constraint_for (op, &lhsc);
4814 rhsc.var = nonlocal_id;
4815 rhsc.offset = 0;
4816 rhsc.type = SCALAR;
4817 FOR_EACH_VEC_ELT (lhsc, j, lhsp)
4818 process_constraint (new_constraint (*lhsp, rhsc));
4819 }
4820 }
4821 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
4822 {
4823 tree link = gimple_asm_input_op (asm_stmt, i);
4824 tree op = TREE_VALUE (link);
4825
4826 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
4827
4828 parse_input_constraint (&constraint, 0, 0, noutputs, 0, oconstraints,
4829 &allows_mem, &allows_reg);
4830
4831 /* A memory constraint makes the address of the operand escape. */
4832 if (!allows_reg && allows_mem)
4833 make_escape_constraint (build_fold_addr_expr (op));
4834 /* Strictly we'd only need the constraint to ESCAPED if
4835 the asm clobbers memory, otherwise using something
4836 along the lines of per-call clobbers/uses would be enough. */
4837 else if (op)
4838 make_escape_constraint (op);
4839 }
4840 }
4841 }
4842
4843
4844 /* Create a constraint adding to the clobber set of FI the memory
4845 pointed to by PTR. */
4846
4847 static void
4848 process_ipa_clobber (varinfo_t fi, tree ptr)
4849 {
4850 vec<ce_s> ptrc = vNULL;
4851 struct constraint_expr *c, lhs;
4852 unsigned i;
4853 get_constraint_for_rhs (ptr, &ptrc);
4854 lhs = get_function_part_constraint (fi, fi_clobbers);
4855 FOR_EACH_VEC_ELT (ptrc, i, c)
4856 process_constraint (new_constraint (lhs, *c));
4857 ptrc.release ();
4858 }
4859
4860 /* Walk statement T setting up clobber and use constraints according to the
4861 references found in T. This function is a main part of the
4862 IPA constraint builder. */
4863
4864 static void
4865 find_func_clobbers (struct function *fn, gimple origt)
4866 {
4867 gimple t = origt;
4868 auto_vec<ce_s, 16> lhsc;
4869 auto_vec<ce_s, 16> rhsc;
4870 varinfo_t fi;
4871
4872 /* Add constraints for clobbered/used in IPA mode.
4873 We are not interested in what automatic variables are clobbered
4874 or used as we only use the information in the caller to which
4875 they do not escape. */
4876 gcc_assert (in_ipa_mode);
4877
4878 /* If the stmt refers to memory in any way it better had a VUSE. */
4879 if (gimple_vuse (t) == NULL_TREE)
4880 return;
4881
4882 /* We'd better have function information for the current function. */
4883 fi = lookup_vi_for_tree (fn->decl);
4884 gcc_assert (fi != NULL);
4885
4886 /* Account for stores in assignments and calls. */
4887 if (gimple_vdef (t) != NULL_TREE
4888 && gimple_has_lhs (t))
4889 {
4890 tree lhs = gimple_get_lhs (t);
4891 tree tem = lhs;
4892 while (handled_component_p (tem))
4893 tem = TREE_OPERAND (tem, 0);
4894 if ((DECL_P (tem)
4895 && !auto_var_in_fn_p (tem, fn->decl))
4896 || INDIRECT_REF_P (tem)
4897 || (TREE_CODE (tem) == MEM_REF
4898 && !(TREE_CODE (TREE_OPERAND (tem, 0)) == ADDR_EXPR
4899 && auto_var_in_fn_p
4900 (TREE_OPERAND (TREE_OPERAND (tem, 0), 0), fn->decl))))
4901 {
4902 struct constraint_expr lhsc, *rhsp;
4903 unsigned i;
4904 lhsc = get_function_part_constraint (fi, fi_clobbers);
4905 get_constraint_for_address_of (lhs, &rhsc);
4906 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
4907 process_constraint (new_constraint (lhsc, *rhsp));
4908 rhsc.truncate (0);
4909 }
4910 }
4911
4912 /* Account for uses in assigments and returns. */
4913 if (gimple_assign_single_p (t)
4914 || (gimple_code (t) == GIMPLE_RETURN
4915 && gimple_return_retval (as_a <greturn *> (t)) != NULL_TREE))
4916 {
4917 tree rhs = (gimple_assign_single_p (t)
4918 ? gimple_assign_rhs1 (t)
4919 : gimple_return_retval (as_a <greturn *> (t)));
4920 tree tem = rhs;
4921 while (handled_component_p (tem))
4922 tem = TREE_OPERAND (tem, 0);
4923 if ((DECL_P (tem)
4924 && !auto_var_in_fn_p (tem, fn->decl))
4925 || INDIRECT_REF_P (tem)
4926 || (TREE_CODE (tem) == MEM_REF
4927 && !(TREE_CODE (TREE_OPERAND (tem, 0)) == ADDR_EXPR
4928 && auto_var_in_fn_p
4929 (TREE_OPERAND (TREE_OPERAND (tem, 0), 0), fn->decl))))
4930 {
4931 struct constraint_expr lhs, *rhsp;
4932 unsigned i;
4933 lhs = get_function_part_constraint (fi, fi_uses);
4934 get_constraint_for_address_of (rhs, &rhsc);
4935 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
4936 process_constraint (new_constraint (lhs, *rhsp));
4937 rhsc.truncate (0);
4938 }
4939 }
4940
4941 if (gcall *call_stmt = dyn_cast <gcall *> (t))
4942 {
4943 varinfo_t cfi = NULL;
4944 tree decl = gimple_call_fndecl (t);
4945 struct constraint_expr lhs, rhs;
4946 unsigned i, j;
4947
4948 /* For builtins we do not have separate function info. For those
4949 we do not generate escapes for we have to generate clobbers/uses. */
4950 if (gimple_call_builtin_p (t, BUILT_IN_NORMAL))
4951 switch (DECL_FUNCTION_CODE (decl))
4952 {
4953 /* The following functions use and clobber memory pointed to
4954 by their arguments. */
4955 case BUILT_IN_STRCPY:
4956 case BUILT_IN_STRNCPY:
4957 case BUILT_IN_BCOPY:
4958 case BUILT_IN_MEMCPY:
4959 case BUILT_IN_MEMMOVE:
4960 case BUILT_IN_MEMPCPY:
4961 case BUILT_IN_STPCPY:
4962 case BUILT_IN_STPNCPY:
4963 case BUILT_IN_STRCAT:
4964 case BUILT_IN_STRNCAT:
4965 case BUILT_IN_STRCPY_CHK:
4966 case BUILT_IN_STRNCPY_CHK:
4967 case BUILT_IN_MEMCPY_CHK:
4968 case BUILT_IN_MEMMOVE_CHK:
4969 case BUILT_IN_MEMPCPY_CHK:
4970 case BUILT_IN_STPCPY_CHK:
4971 case BUILT_IN_STPNCPY_CHK:
4972 case BUILT_IN_STRCAT_CHK:
4973 case BUILT_IN_STRNCAT_CHK:
4974 {
4975 tree dest = gimple_call_arg (t, (DECL_FUNCTION_CODE (decl)
4976 == BUILT_IN_BCOPY ? 1 : 0));
4977 tree src = gimple_call_arg (t, (DECL_FUNCTION_CODE (decl)
4978 == BUILT_IN_BCOPY ? 0 : 1));
4979 unsigned i;
4980 struct constraint_expr *rhsp, *lhsp;
4981 get_constraint_for_ptr_offset (dest, NULL_TREE, &lhsc);
4982 lhs = get_function_part_constraint (fi, fi_clobbers);
4983 FOR_EACH_VEC_ELT (lhsc, i, lhsp)
4984 process_constraint (new_constraint (lhs, *lhsp));
4985 get_constraint_for_ptr_offset (src, NULL_TREE, &rhsc);
4986 lhs = get_function_part_constraint (fi, fi_uses);
4987 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
4988 process_constraint (new_constraint (lhs, *rhsp));
4989 return;
4990 }
4991 /* The following function clobbers memory pointed to by
4992 its argument. */
4993 case BUILT_IN_MEMSET:
4994 case BUILT_IN_MEMSET_CHK:
4995 case BUILT_IN_POSIX_MEMALIGN:
4996 {
4997 tree dest = gimple_call_arg (t, 0);
4998 unsigned i;
4999 ce_s *lhsp;
5000 get_constraint_for_ptr_offset (dest, NULL_TREE, &lhsc);
5001 lhs = get_function_part_constraint (fi, fi_clobbers);
5002 FOR_EACH_VEC_ELT (lhsc, i, lhsp)
5003 process_constraint (new_constraint (lhs, *lhsp));
5004 return;
5005 }
5006 /* The following functions clobber their second and third
5007 arguments. */
5008 case BUILT_IN_SINCOS:
5009 case BUILT_IN_SINCOSF:
5010 case BUILT_IN_SINCOSL:
5011 {
5012 process_ipa_clobber (fi, gimple_call_arg (t, 1));
5013 process_ipa_clobber (fi, gimple_call_arg (t, 2));
5014 return;
5015 }
5016 /* The following functions clobber their second argument. */
5017 case BUILT_IN_FREXP:
5018 case BUILT_IN_FREXPF:
5019 case BUILT_IN_FREXPL:
5020 case BUILT_IN_LGAMMA_R:
5021 case BUILT_IN_LGAMMAF_R:
5022 case BUILT_IN_LGAMMAL_R:
5023 case BUILT_IN_GAMMA_R:
5024 case BUILT_IN_GAMMAF_R:
5025 case BUILT_IN_GAMMAL_R:
5026 case BUILT_IN_MODF:
5027 case BUILT_IN_MODFF:
5028 case BUILT_IN_MODFL:
5029 {
5030 process_ipa_clobber (fi, gimple_call_arg (t, 1));
5031 return;
5032 }
5033 /* The following functions clobber their third argument. */
5034 case BUILT_IN_REMQUO:
5035 case BUILT_IN_REMQUOF:
5036 case BUILT_IN_REMQUOL:
5037 {
5038 process_ipa_clobber (fi, gimple_call_arg (t, 2));
5039 return;
5040 }
5041 /* The following functions neither read nor clobber memory. */
5042 case BUILT_IN_ASSUME_ALIGNED:
5043 case BUILT_IN_FREE:
5044 return;
5045 /* Trampolines are of no interest to us. */
5046 case BUILT_IN_INIT_TRAMPOLINE:
5047 case BUILT_IN_ADJUST_TRAMPOLINE:
5048 return;
5049 case BUILT_IN_VA_START:
5050 case BUILT_IN_VA_END:
5051 return;
5052 /* printf-style functions may have hooks to set pointers to
5053 point to somewhere into the generated string. Leave them
5054 for a later exercise... */
5055 default:
5056 /* Fallthru to general call handling. */;
5057 }
5058
5059 /* Parameters passed by value are used. */
5060 lhs = get_function_part_constraint (fi, fi_uses);
5061 for (i = 0; i < gimple_call_num_args (t); i++)
5062 {
5063 struct constraint_expr *rhsp;
5064 tree arg = gimple_call_arg (t, i);
5065
5066 if (TREE_CODE (arg) == SSA_NAME
5067 || is_gimple_min_invariant (arg))
5068 continue;
5069
5070 get_constraint_for_address_of (arg, &rhsc);
5071 FOR_EACH_VEC_ELT (rhsc, j, rhsp)
5072 process_constraint (new_constraint (lhs, *rhsp));
5073 rhsc.truncate (0);
5074 }
5075
5076 /* Build constraints for propagating clobbers/uses along the
5077 callgraph edges. */
5078 cfi = get_fi_for_callee (call_stmt);
5079 if (cfi->id == anything_id)
5080 {
5081 if (gimple_vdef (t))
5082 make_constraint_from (first_vi_for_offset (fi, fi_clobbers),
5083 anything_id);
5084 make_constraint_from (first_vi_for_offset (fi, fi_uses),
5085 anything_id);
5086 return;
5087 }
5088
5089 /* For callees without function info (that's external functions),
5090 ESCAPED is clobbered and used. */
5091 if (gimple_call_fndecl (t)
5092 && !cfi->is_fn_info)
5093 {
5094 varinfo_t vi;
5095
5096 if (gimple_vdef (t))
5097 make_copy_constraint (first_vi_for_offset (fi, fi_clobbers),
5098 escaped_id);
5099 make_copy_constraint (first_vi_for_offset (fi, fi_uses), escaped_id);
5100
5101 /* Also honor the call statement use/clobber info. */
5102 if ((vi = lookup_call_clobber_vi (call_stmt)) != NULL)
5103 make_copy_constraint (first_vi_for_offset (fi, fi_clobbers),
5104 vi->id);
5105 if ((vi = lookup_call_use_vi (call_stmt)) != NULL)
5106 make_copy_constraint (first_vi_for_offset (fi, fi_uses),
5107 vi->id);
5108 return;
5109 }
5110
5111 /* Otherwise the caller clobbers and uses what the callee does.
5112 ??? This should use a new complex constraint that filters
5113 local variables of the callee. */
5114 if (gimple_vdef (t))
5115 {
5116 lhs = get_function_part_constraint (fi, fi_clobbers);
5117 rhs = get_function_part_constraint (cfi, fi_clobbers);
5118 process_constraint (new_constraint (lhs, rhs));
5119 }
5120 lhs = get_function_part_constraint (fi, fi_uses);
5121 rhs = get_function_part_constraint (cfi, fi_uses);
5122 process_constraint (new_constraint (lhs, rhs));
5123 }
5124 else if (gimple_code (t) == GIMPLE_ASM)
5125 {
5126 /* ??? Ick. We can do better. */
5127 if (gimple_vdef (t))
5128 make_constraint_from (first_vi_for_offset (fi, fi_clobbers),
5129 anything_id);
5130 make_constraint_from (first_vi_for_offset (fi, fi_uses),
5131 anything_id);
5132 }
5133 }
5134
5135
5136 /* Find the first varinfo in the same variable as START that overlaps with
5137 OFFSET. Return NULL if we can't find one. */
5138
5139 static varinfo_t
5140 first_vi_for_offset (varinfo_t start, unsigned HOST_WIDE_INT offset)
5141 {
5142 /* If the offset is outside of the variable, bail out. */
5143 if (offset >= start->fullsize)
5144 return NULL;
5145
5146 /* If we cannot reach offset from start, lookup the first field
5147 and start from there. */
5148 if (start->offset > offset)
5149 start = get_varinfo (start->head);
5150
5151 while (start)
5152 {
5153 /* We may not find a variable in the field list with the actual
5154 offset when when we have glommed a structure to a variable.
5155 In that case, however, offset should still be within the size
5156 of the variable. */
5157 if (offset >= start->offset
5158 && (offset - start->offset) < start->size)
5159 return start;
5160
5161 start = vi_next (start);
5162 }
5163
5164 return NULL;
5165 }
5166
5167 /* Find the first varinfo in the same variable as START that overlaps with
5168 OFFSET. If there is no such varinfo the varinfo directly preceding
5169 OFFSET is returned. */
5170
5171 static varinfo_t
5172 first_or_preceding_vi_for_offset (varinfo_t start,
5173 unsigned HOST_WIDE_INT offset)
5174 {
5175 /* If we cannot reach offset from start, lookup the first field
5176 and start from there. */
5177 if (start->offset > offset)
5178 start = get_varinfo (start->head);
5179
5180 /* We may not find a variable in the field list with the actual
5181 offset when when we have glommed a structure to a variable.
5182 In that case, however, offset should still be within the size
5183 of the variable.
5184 If we got beyond the offset we look for return the field
5185 directly preceding offset which may be the last field. */
5186 while (start->next
5187 && offset >= start->offset
5188 && !((offset - start->offset) < start->size))
5189 start = vi_next (start);
5190
5191 return start;
5192 }
5193
5194
5195 /* This structure is used during pushing fields onto the fieldstack
5196 to track the offset of the field, since bitpos_of_field gives it
5197 relative to its immediate containing type, and we want it relative
5198 to the ultimate containing object. */
5199
5200 struct fieldoff
5201 {
5202 /* Offset from the base of the base containing object to this field. */
5203 HOST_WIDE_INT offset;
5204
5205 /* Size, in bits, of the field. */
5206 unsigned HOST_WIDE_INT size;
5207
5208 unsigned has_unknown_size : 1;
5209
5210 unsigned must_have_pointers : 1;
5211
5212 unsigned may_have_pointers : 1;
5213
5214 unsigned only_restrict_pointers : 1;
5215 };
5216 typedef struct fieldoff fieldoff_s;
5217
5218
5219 /* qsort comparison function for two fieldoff's PA and PB */
5220
5221 static int
5222 fieldoff_compare (const void *pa, const void *pb)
5223 {
5224 const fieldoff_s *foa = (const fieldoff_s *)pa;
5225 const fieldoff_s *fob = (const fieldoff_s *)pb;
5226 unsigned HOST_WIDE_INT foasize, fobsize;
5227
5228 if (foa->offset < fob->offset)
5229 return -1;
5230 else if (foa->offset > fob->offset)
5231 return 1;
5232
5233 foasize = foa->size;
5234 fobsize = fob->size;
5235 if (foasize < fobsize)
5236 return -1;
5237 else if (foasize > fobsize)
5238 return 1;
5239 return 0;
5240 }
5241
5242 /* Sort a fieldstack according to the field offset and sizes. */
5243 static void
5244 sort_fieldstack (vec<fieldoff_s> fieldstack)
5245 {
5246 fieldstack.qsort (fieldoff_compare);
5247 }
5248
5249 /* Return true if T is a type that can have subvars. */
5250
5251 static inline bool
5252 type_can_have_subvars (const_tree t)
5253 {
5254 /* Aggregates without overlapping fields can have subvars. */
5255 return TREE_CODE (t) == RECORD_TYPE;
5256 }
5257
5258 /* Return true if V is a tree that we can have subvars for.
5259 Normally, this is any aggregate type. Also complex
5260 types which are not gimple registers can have subvars. */
5261
5262 static inline bool
5263 var_can_have_subvars (const_tree v)
5264 {
5265 /* Volatile variables should never have subvars. */
5266 if (TREE_THIS_VOLATILE (v))
5267 return false;
5268
5269 /* Non decls or memory tags can never have subvars. */
5270 if (!DECL_P (v))
5271 return false;
5272
5273 return type_can_have_subvars (TREE_TYPE (v));
5274 }
5275
5276 /* Return true if T is a type that does contain pointers. */
5277
5278 static bool
5279 type_must_have_pointers (tree type)
5280 {
5281 if (POINTER_TYPE_P (type))
5282 return true;
5283
5284 if (TREE_CODE (type) == ARRAY_TYPE)
5285 return type_must_have_pointers (TREE_TYPE (type));
5286
5287 /* A function or method can have pointers as arguments, so track
5288 those separately. */
5289 if (TREE_CODE (type) == FUNCTION_TYPE
5290 || TREE_CODE (type) == METHOD_TYPE)
5291 return true;
5292
5293 return false;
5294 }
5295
5296 static bool
5297 field_must_have_pointers (tree t)
5298 {
5299 return type_must_have_pointers (TREE_TYPE (t));
5300 }
5301
5302 /* Given a TYPE, and a vector of field offsets FIELDSTACK, push all
5303 the fields of TYPE onto fieldstack, recording their offsets along
5304 the way.
5305
5306 OFFSET is used to keep track of the offset in this entire
5307 structure, rather than just the immediately containing structure.
5308 Returns false if the caller is supposed to handle the field we
5309 recursed for. */
5310
5311 static bool
5312 push_fields_onto_fieldstack (tree type, vec<fieldoff_s> *fieldstack,
5313 HOST_WIDE_INT offset)
5314 {
5315 tree field;
5316 bool empty_p = true;
5317
5318 if (TREE_CODE (type) != RECORD_TYPE)
5319 return false;
5320
5321 /* If the vector of fields is growing too big, bail out early.
5322 Callers check for vec::length <= MAX_FIELDS_FOR_FIELD_SENSITIVE, make
5323 sure this fails. */
5324 if (fieldstack->length () > MAX_FIELDS_FOR_FIELD_SENSITIVE)
5325 return false;
5326
5327 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
5328 if (TREE_CODE (field) == FIELD_DECL)
5329 {
5330 bool push = false;
5331 HOST_WIDE_INT foff = bitpos_of_field (field);
5332
5333 if (!var_can_have_subvars (field)
5334 || TREE_CODE (TREE_TYPE (field)) == QUAL_UNION_TYPE
5335 || TREE_CODE (TREE_TYPE (field)) == UNION_TYPE)
5336 push = true;
5337 else if (!push_fields_onto_fieldstack
5338 (TREE_TYPE (field), fieldstack, offset + foff)
5339 && (DECL_SIZE (field)
5340 && !integer_zerop (DECL_SIZE (field))))
5341 /* Empty structures may have actual size, like in C++. So
5342 see if we didn't push any subfields and the size is
5343 nonzero, push the field onto the stack. */
5344 push = true;
5345
5346 if (push)
5347 {
5348 fieldoff_s *pair = NULL;
5349 bool has_unknown_size = false;
5350 bool must_have_pointers_p;
5351
5352 if (!fieldstack->is_empty ())
5353 pair = &fieldstack->last ();
5354
5355 /* If there isn't anything at offset zero, create sth. */
5356 if (!pair
5357 && offset + foff != 0)
5358 {
5359 fieldoff_s e = {0, offset + foff, false, false, false, false};
5360 pair = fieldstack->safe_push (e);
5361 }
5362
5363 if (!DECL_SIZE (field)
5364 || !tree_fits_uhwi_p (DECL_SIZE (field)))
5365 has_unknown_size = true;
5366
5367 /* If adjacent fields do not contain pointers merge them. */
5368 must_have_pointers_p = field_must_have_pointers (field);
5369 if (pair
5370 && !has_unknown_size
5371 && !must_have_pointers_p
5372 && !pair->must_have_pointers
5373 && !pair->has_unknown_size
5374 && pair->offset + (HOST_WIDE_INT)pair->size == offset + foff)
5375 {
5376 pair->size += tree_to_uhwi (DECL_SIZE (field));
5377 }
5378 else
5379 {
5380 fieldoff_s e;
5381 e.offset = offset + foff;
5382 e.has_unknown_size = has_unknown_size;
5383 if (!has_unknown_size)
5384 e.size = tree_to_uhwi (DECL_SIZE (field));
5385 else
5386 e.size = -1;
5387 e.must_have_pointers = must_have_pointers_p;
5388 e.may_have_pointers = true;
5389 e.only_restrict_pointers
5390 = (!has_unknown_size
5391 && POINTER_TYPE_P (TREE_TYPE (field))
5392 && TYPE_RESTRICT (TREE_TYPE (field)));
5393 fieldstack->safe_push (e);
5394 }
5395 }
5396
5397 empty_p = false;
5398 }
5399
5400 return !empty_p;
5401 }
5402
5403 /* Count the number of arguments DECL has, and set IS_VARARGS to true
5404 if it is a varargs function. */
5405
5406 static unsigned int
5407 count_num_arguments (tree decl, bool *is_varargs)
5408 {
5409 unsigned int num = 0;
5410 tree t;
5411
5412 /* Capture named arguments for K&R functions. They do not
5413 have a prototype and thus no TYPE_ARG_TYPES. */
5414 for (t = DECL_ARGUMENTS (decl); t; t = DECL_CHAIN (t))
5415 ++num;
5416
5417 /* Check if the function has variadic arguments. */
5418 for (t = TYPE_ARG_TYPES (TREE_TYPE (decl)); t; t = TREE_CHAIN (t))
5419 if (TREE_VALUE (t) == void_type_node)
5420 break;
5421 if (!t)
5422 *is_varargs = true;
5423
5424 return num;
5425 }
5426
5427 /* Creation function node for DECL, using NAME, and return the index
5428 of the variable we've created for the function. */
5429
5430 static varinfo_t
5431 create_function_info_for (tree decl, const char *name)
5432 {
5433 struct function *fn = DECL_STRUCT_FUNCTION (decl);
5434 varinfo_t vi, prev_vi;
5435 tree arg;
5436 unsigned int i;
5437 bool is_varargs = false;
5438 unsigned int num_args = count_num_arguments (decl, &is_varargs);
5439
5440 /* Create the variable info. */
5441
5442 vi = new_var_info (decl, name);
5443 vi->offset = 0;
5444 vi->size = 1;
5445 vi->fullsize = fi_parm_base + num_args;
5446 vi->is_fn_info = 1;
5447 vi->may_have_pointers = false;
5448 if (is_varargs)
5449 vi->fullsize = ~0;
5450 insert_vi_for_tree (vi->decl, vi);
5451
5452 prev_vi = vi;
5453
5454 /* Create a variable for things the function clobbers and one for
5455 things the function uses. */
5456 {
5457 varinfo_t clobbervi, usevi;
5458 const char *newname;
5459 char *tempname;
5460
5461 tempname = xasprintf ("%s.clobber", name);
5462 newname = ggc_strdup (tempname);
5463 free (tempname);
5464
5465 clobbervi = new_var_info (NULL, newname);
5466 clobbervi->offset = fi_clobbers;
5467 clobbervi->size = 1;
5468 clobbervi->fullsize = vi->fullsize;
5469 clobbervi->is_full_var = true;
5470 clobbervi->is_global_var = false;
5471 gcc_assert (prev_vi->offset < clobbervi->offset);
5472 prev_vi->next = clobbervi->id;
5473 prev_vi = clobbervi;
5474
5475 tempname = xasprintf ("%s.use", name);
5476 newname = ggc_strdup (tempname);
5477 free (tempname);
5478
5479 usevi = new_var_info (NULL, newname);
5480 usevi->offset = fi_uses;
5481 usevi->size = 1;
5482 usevi->fullsize = vi->fullsize;
5483 usevi->is_full_var = true;
5484 usevi->is_global_var = false;
5485 gcc_assert (prev_vi->offset < usevi->offset);
5486 prev_vi->next = usevi->id;
5487 prev_vi = usevi;
5488 }
5489
5490 /* And one for the static chain. */
5491 if (fn->static_chain_decl != NULL_TREE)
5492 {
5493 varinfo_t chainvi;
5494 const char *newname;
5495 char *tempname;
5496
5497 tempname = xasprintf ("%s.chain", name);
5498 newname = ggc_strdup (tempname);
5499 free (tempname);
5500
5501 chainvi = new_var_info (fn->static_chain_decl, newname);
5502 chainvi->offset = fi_static_chain;
5503 chainvi->size = 1;
5504 chainvi->fullsize = vi->fullsize;
5505 chainvi->is_full_var = true;
5506 chainvi->is_global_var = false;
5507 gcc_assert (prev_vi->offset < chainvi->offset);
5508 prev_vi->next = chainvi->id;
5509 prev_vi = chainvi;
5510 insert_vi_for_tree (fn->static_chain_decl, chainvi);
5511 }
5512
5513 /* Create a variable for the return var. */
5514 if (DECL_RESULT (decl) != NULL
5515 || !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (decl))))
5516 {
5517 varinfo_t resultvi;
5518 const char *newname;
5519 char *tempname;
5520 tree resultdecl = decl;
5521
5522 if (DECL_RESULT (decl))
5523 resultdecl = DECL_RESULT (decl);
5524
5525 tempname = xasprintf ("%s.result", name);
5526 newname = ggc_strdup (tempname);
5527 free (tempname);
5528
5529 resultvi = new_var_info (resultdecl, newname);
5530 resultvi->offset = fi_result;
5531 resultvi->size = 1;
5532 resultvi->fullsize = vi->fullsize;
5533 resultvi->is_full_var = true;
5534 if (DECL_RESULT (decl))
5535 resultvi->may_have_pointers = true;
5536 gcc_assert (prev_vi->offset < resultvi->offset);
5537 prev_vi->next = resultvi->id;
5538 prev_vi = resultvi;
5539 if (DECL_RESULT (decl))
5540 insert_vi_for_tree (DECL_RESULT (decl), resultvi);
5541 }
5542
5543 /* Set up variables for each argument. */
5544 arg = DECL_ARGUMENTS (decl);
5545 for (i = 0; i < num_args; i++)
5546 {
5547 varinfo_t argvi;
5548 const char *newname;
5549 char *tempname;
5550 tree argdecl = decl;
5551
5552 if (arg)
5553 argdecl = arg;
5554
5555 tempname = xasprintf ("%s.arg%d", name, i);
5556 newname = ggc_strdup (tempname);
5557 free (tempname);
5558
5559 argvi = new_var_info (argdecl, newname);
5560 argvi->offset = fi_parm_base + i;
5561 argvi->size = 1;
5562 argvi->is_full_var = true;
5563 argvi->fullsize = vi->fullsize;
5564 if (arg)
5565 argvi->may_have_pointers = true;
5566 gcc_assert (prev_vi->offset < argvi->offset);
5567 prev_vi->next = argvi->id;
5568 prev_vi = argvi;
5569 if (arg)
5570 {
5571 insert_vi_for_tree (arg, argvi);
5572 arg = DECL_CHAIN (arg);
5573 }
5574 }
5575
5576 /* Add one representative for all further args. */
5577 if (is_varargs)
5578 {
5579 varinfo_t argvi;
5580 const char *newname;
5581 char *tempname;
5582 tree decl;
5583
5584 tempname = xasprintf ("%s.varargs", name);
5585 newname = ggc_strdup (tempname);
5586 free (tempname);
5587
5588 /* We need sth that can be pointed to for va_start. */
5589 decl = build_fake_var_decl (ptr_type_node);
5590
5591 argvi = new_var_info (decl, newname);
5592 argvi->offset = fi_parm_base + num_args;
5593 argvi->size = ~0;
5594 argvi->is_full_var = true;
5595 argvi->is_heap_var = true;
5596 argvi->fullsize = vi->fullsize;
5597 gcc_assert (prev_vi->offset < argvi->offset);
5598 prev_vi->next = argvi->id;
5599 prev_vi = argvi;
5600 }
5601
5602 return vi;
5603 }
5604
5605
5606 /* Return true if FIELDSTACK contains fields that overlap.
5607 FIELDSTACK is assumed to be sorted by offset. */
5608
5609 static bool
5610 check_for_overlaps (vec<fieldoff_s> fieldstack)
5611 {
5612 fieldoff_s *fo = NULL;
5613 unsigned int i;
5614 HOST_WIDE_INT lastoffset = -1;
5615
5616 FOR_EACH_VEC_ELT (fieldstack, i, fo)
5617 {
5618 if (fo->offset == lastoffset)
5619 return true;
5620 lastoffset = fo->offset;
5621 }
5622 return false;
5623 }
5624
5625 /* Create a varinfo structure for NAME and DECL, and add it to VARMAP.
5626 This will also create any varinfo structures necessary for fields
5627 of DECL. */
5628
5629 static varinfo_t
5630 create_variable_info_for_1 (tree decl, const char *name)
5631 {
5632 varinfo_t vi, newvi;
5633 tree decl_type = TREE_TYPE (decl);
5634 tree declsize = DECL_P (decl) ? DECL_SIZE (decl) : TYPE_SIZE (decl_type);
5635 auto_vec<fieldoff_s> fieldstack;
5636 fieldoff_s *fo;
5637 unsigned int i;
5638 varpool_node *vnode;
5639
5640 if (!declsize
5641 || !tree_fits_uhwi_p (declsize))
5642 {
5643 vi = new_var_info (decl, name);
5644 vi->offset = 0;
5645 vi->size = ~0;
5646 vi->fullsize = ~0;
5647 vi->is_unknown_size_var = true;
5648 vi->is_full_var = true;
5649 vi->may_have_pointers = true;
5650 return vi;
5651 }
5652
5653 /* Collect field information. */
5654 if (use_field_sensitive
5655 && var_can_have_subvars (decl)
5656 /* ??? Force us to not use subfields for global initializers
5657 in IPA mode. Else we'd have to parse arbitrary initializers. */
5658 && !(in_ipa_mode
5659 && is_global_var (decl)
5660 && (vnode = varpool_node::get (decl))
5661 && vnode->get_constructor ()))
5662 {
5663 fieldoff_s *fo = NULL;
5664 bool notokay = false;
5665 unsigned int i;
5666
5667 push_fields_onto_fieldstack (decl_type, &fieldstack, 0);
5668
5669 for (i = 0; !notokay && fieldstack.iterate (i, &fo); i++)
5670 if (fo->has_unknown_size
5671 || fo->offset < 0)
5672 {
5673 notokay = true;
5674 break;
5675 }
5676
5677 /* We can't sort them if we have a field with a variable sized type,
5678 which will make notokay = true. In that case, we are going to return
5679 without creating varinfos for the fields anyway, so sorting them is a
5680 waste to boot. */
5681 if (!notokay)
5682 {
5683 sort_fieldstack (fieldstack);
5684 /* Due to some C++ FE issues, like PR 22488, we might end up
5685 what appear to be overlapping fields even though they,
5686 in reality, do not overlap. Until the C++ FE is fixed,
5687 we will simply disable field-sensitivity for these cases. */
5688 notokay = check_for_overlaps (fieldstack);
5689 }
5690
5691 if (notokay)
5692 fieldstack.release ();
5693 }
5694
5695 /* If we didn't end up collecting sub-variables create a full
5696 variable for the decl. */
5697 if (fieldstack.length () <= 1
5698 || fieldstack.length () > MAX_FIELDS_FOR_FIELD_SENSITIVE)
5699 {
5700 vi = new_var_info (decl, name);
5701 vi->offset = 0;
5702 vi->may_have_pointers = true;
5703 vi->fullsize = tree_to_uhwi (declsize);
5704 vi->size = vi->fullsize;
5705 vi->is_full_var = true;
5706 fieldstack.release ();
5707 return vi;
5708 }
5709
5710 vi = new_var_info (decl, name);
5711 vi->fullsize = tree_to_uhwi (declsize);
5712 for (i = 0, newvi = vi;
5713 fieldstack.iterate (i, &fo);
5714 ++i, newvi = vi_next (newvi))
5715 {
5716 const char *newname = "NULL";
5717 char *tempname;
5718
5719 if (dump_file)
5720 {
5721 tempname
5722 = xasprintf ("%s." HOST_WIDE_INT_PRINT_DEC
5723 "+" HOST_WIDE_INT_PRINT_DEC, name,
5724 fo->offset, fo->size);
5725 newname = ggc_strdup (tempname);
5726 free (tempname);
5727 }
5728 newvi->name = newname;
5729 newvi->offset = fo->offset;
5730 newvi->size = fo->size;
5731 newvi->fullsize = vi->fullsize;
5732 newvi->may_have_pointers = fo->may_have_pointers;
5733 newvi->only_restrict_pointers = fo->only_restrict_pointers;
5734 if (i + 1 < fieldstack.length ())
5735 {
5736 varinfo_t tem = new_var_info (decl, name);
5737 newvi->next = tem->id;
5738 tem->head = vi->id;
5739 }
5740 }
5741
5742 return vi;
5743 }
5744
5745 static unsigned int
5746 create_variable_info_for (tree decl, const char *name)
5747 {
5748 varinfo_t vi = create_variable_info_for_1 (decl, name);
5749 unsigned int id = vi->id;
5750
5751 insert_vi_for_tree (decl, vi);
5752
5753 if (TREE_CODE (decl) != VAR_DECL)
5754 return id;
5755
5756 /* Create initial constraints for globals. */
5757 for (; vi; vi = vi_next (vi))
5758 {
5759 if (!vi->may_have_pointers
5760 || !vi->is_global_var)
5761 continue;
5762
5763 /* Mark global restrict qualified pointers. */
5764 if ((POINTER_TYPE_P (TREE_TYPE (decl))
5765 && TYPE_RESTRICT (TREE_TYPE (decl)))
5766 || vi->only_restrict_pointers)
5767 {
5768 varinfo_t rvi
5769 = make_constraint_from_global_restrict (vi, "GLOBAL_RESTRICT");
5770 /* ??? For now exclude reads from globals as restrict sources
5771 if those are not (indirectly) from incoming parameters. */
5772 rvi->is_restrict_var = false;
5773 continue;
5774 }
5775
5776 /* In non-IPA mode the initializer from nonlocal is all we need. */
5777 if (!in_ipa_mode
5778 || DECL_HARD_REGISTER (decl))
5779 make_copy_constraint (vi, nonlocal_id);
5780
5781 /* In IPA mode parse the initializer and generate proper constraints
5782 for it. */
5783 else
5784 {
5785 varpool_node *vnode = varpool_node::get (decl);
5786
5787 /* For escaped variables initialize them from nonlocal. */
5788 if (!vnode->all_refs_explicit_p ())
5789 make_copy_constraint (vi, nonlocal_id);
5790
5791 /* If this is a global variable with an initializer and we are in
5792 IPA mode generate constraints for it. */
5793 if (vnode->get_constructor ()
5794 && vnode->definition)
5795 {
5796 auto_vec<ce_s> rhsc;
5797 struct constraint_expr lhs, *rhsp;
5798 unsigned i;
5799 get_constraint_for_rhs (vnode->get_constructor (), &rhsc);
5800 lhs.var = vi->id;
5801 lhs.offset = 0;
5802 lhs.type = SCALAR;
5803 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
5804 process_constraint (new_constraint (lhs, *rhsp));
5805 /* If this is a variable that escapes from the unit
5806 the initializer escapes as well. */
5807 if (!vnode->all_refs_explicit_p ())
5808 {
5809 lhs.var = escaped_id;
5810 lhs.offset = 0;
5811 lhs.type = SCALAR;
5812 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
5813 process_constraint (new_constraint (lhs, *rhsp));
5814 }
5815 }
5816 }
5817 }
5818
5819 return id;
5820 }
5821
5822 /* Print out the points-to solution for VAR to FILE. */
5823
5824 static void
5825 dump_solution_for_var (FILE *file, unsigned int var)
5826 {
5827 varinfo_t vi = get_varinfo (var);
5828 unsigned int i;
5829 bitmap_iterator bi;
5830
5831 /* Dump the solution for unified vars anyway, this avoids difficulties
5832 in scanning dumps in the testsuite. */
5833 fprintf (file, "%s = { ", vi->name);
5834 vi = get_varinfo (find (var));
5835 EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, i, bi)
5836 fprintf (file, "%s ", get_varinfo (i)->name);
5837 fprintf (file, "}");
5838
5839 /* But note when the variable was unified. */
5840 if (vi->id != var)
5841 fprintf (file, " same as %s", vi->name);
5842
5843 fprintf (file, "\n");
5844 }
5845
5846 /* Print the points-to solution for VAR to stderr. */
5847
5848 DEBUG_FUNCTION void
5849 debug_solution_for_var (unsigned int var)
5850 {
5851 dump_solution_for_var (stderr, var);
5852 }
5853
5854 /* Create varinfo structures for all of the variables in the
5855 function for intraprocedural mode. */
5856
5857 static void
5858 intra_create_variable_infos (struct function *fn)
5859 {
5860 tree t;
5861
5862 /* For each incoming pointer argument arg, create the constraint ARG
5863 = NONLOCAL or a dummy variable if it is a restrict qualified
5864 passed-by-reference argument. */
5865 for (t = DECL_ARGUMENTS (fn->decl); t; t = DECL_CHAIN (t))
5866 {
5867 varinfo_t p = get_vi_for_tree (t);
5868
5869 /* For restrict qualified pointers to objects passed by
5870 reference build a real representative for the pointed-to object.
5871 Treat restrict qualified references the same. */
5872 if (TYPE_RESTRICT (TREE_TYPE (t))
5873 && ((DECL_BY_REFERENCE (t) && POINTER_TYPE_P (TREE_TYPE (t)))
5874 || TREE_CODE (TREE_TYPE (t)) == REFERENCE_TYPE)
5875 && !type_contains_placeholder_p (TREE_TYPE (TREE_TYPE (t))))
5876 {
5877 struct constraint_expr lhsc, rhsc;
5878 varinfo_t vi;
5879 tree heapvar = build_fake_var_decl (TREE_TYPE (TREE_TYPE (t)));
5880 DECL_EXTERNAL (heapvar) = 1;
5881 vi = create_variable_info_for_1 (heapvar, "PARM_NOALIAS");
5882 vi->is_restrict_var = 1;
5883 insert_vi_for_tree (heapvar, vi);
5884 lhsc.var = p->id;
5885 lhsc.type = SCALAR;
5886 lhsc.offset = 0;
5887 rhsc.var = vi->id;
5888 rhsc.type = ADDRESSOF;
5889 rhsc.offset = 0;
5890 process_constraint (new_constraint (lhsc, rhsc));
5891 for (; vi; vi = vi_next (vi))
5892 if (vi->may_have_pointers)
5893 {
5894 if (vi->only_restrict_pointers)
5895 make_constraint_from_global_restrict (vi, "GLOBAL_RESTRICT");
5896 else
5897 make_copy_constraint (vi, nonlocal_id);
5898 }
5899 continue;
5900 }
5901
5902 if (POINTER_TYPE_P (TREE_TYPE (t))
5903 && TYPE_RESTRICT (TREE_TYPE (t)))
5904 make_constraint_from_global_restrict (p, "PARM_RESTRICT");
5905 else
5906 {
5907 for (; p; p = vi_next (p))
5908 {
5909 if (p->only_restrict_pointers)
5910 make_constraint_from_global_restrict (p, "PARM_RESTRICT");
5911 else if (p->may_have_pointers)
5912 make_constraint_from (p, nonlocal_id);
5913 }
5914 }
5915 }
5916
5917 /* Add a constraint for a result decl that is passed by reference. */
5918 if (DECL_RESULT (fn->decl)
5919 && DECL_BY_REFERENCE (DECL_RESULT (fn->decl)))
5920 {
5921 varinfo_t p, result_vi = get_vi_for_tree (DECL_RESULT (fn->decl));
5922
5923 for (p = result_vi; p; p = vi_next (p))
5924 make_constraint_from (p, nonlocal_id);
5925 }
5926
5927 /* Add a constraint for the incoming static chain parameter. */
5928 if (fn->static_chain_decl != NULL_TREE)
5929 {
5930 varinfo_t p, chain_vi = get_vi_for_tree (fn->static_chain_decl);
5931
5932 for (p = chain_vi; p; p = vi_next (p))
5933 make_constraint_from (p, nonlocal_id);
5934 }
5935 }
5936
5937 /* Structure used to put solution bitmaps in a hashtable so they can
5938 be shared among variables with the same points-to set. */
5939
5940 typedef struct shared_bitmap_info
5941 {
5942 bitmap pt_vars;
5943 hashval_t hashcode;
5944 } *shared_bitmap_info_t;
5945 typedef const struct shared_bitmap_info *const_shared_bitmap_info_t;
5946
5947 /* Shared_bitmap hashtable helpers. */
5948
5949 struct shared_bitmap_hasher : free_ptr_hash <shared_bitmap_info>
5950 {
5951 static inline hashval_t hash (const shared_bitmap_info *);
5952 static inline bool equal (const shared_bitmap_info *,
5953 const shared_bitmap_info *);
5954 };
5955
5956 /* Hash function for a shared_bitmap_info_t */
5957
5958 inline hashval_t
5959 shared_bitmap_hasher::hash (const shared_bitmap_info *bi)
5960 {
5961 return bi->hashcode;
5962 }
5963
5964 /* Equality function for two shared_bitmap_info_t's. */
5965
5966 inline bool
5967 shared_bitmap_hasher::equal (const shared_bitmap_info *sbi1,
5968 const shared_bitmap_info *sbi2)
5969 {
5970 return bitmap_equal_p (sbi1->pt_vars, sbi2->pt_vars);
5971 }
5972
5973 /* Shared_bitmap hashtable. */
5974
5975 static hash_table<shared_bitmap_hasher> *shared_bitmap_table;
5976
5977 /* Lookup a bitmap in the shared bitmap hashtable, and return an already
5978 existing instance if there is one, NULL otherwise. */
5979
5980 static bitmap
5981 shared_bitmap_lookup (bitmap pt_vars)
5982 {
5983 shared_bitmap_info **slot;
5984 struct shared_bitmap_info sbi;
5985
5986 sbi.pt_vars = pt_vars;
5987 sbi.hashcode = bitmap_hash (pt_vars);
5988
5989 slot = shared_bitmap_table->find_slot (&sbi, NO_INSERT);
5990 if (!slot)
5991 return NULL;
5992 else
5993 return (*slot)->pt_vars;
5994 }
5995
5996
5997 /* Add a bitmap to the shared bitmap hashtable. */
5998
5999 static void
6000 shared_bitmap_add (bitmap pt_vars)
6001 {
6002 shared_bitmap_info **slot;
6003 shared_bitmap_info_t sbi = XNEW (struct shared_bitmap_info);
6004
6005 sbi->pt_vars = pt_vars;
6006 sbi->hashcode = bitmap_hash (pt_vars);
6007
6008 slot = shared_bitmap_table->find_slot (sbi, INSERT);
6009 gcc_assert (!*slot);
6010 *slot = sbi;
6011 }
6012
6013
6014 /* Set bits in INTO corresponding to the variable uids in solution set FROM. */
6015
6016 static void
6017 set_uids_in_ptset (bitmap into, bitmap from, struct pt_solution *pt)
6018 {
6019 unsigned int i;
6020 bitmap_iterator bi;
6021 varinfo_t escaped_vi = get_varinfo (find (escaped_id));
6022 bool everything_escaped
6023 = escaped_vi->solution && bitmap_bit_p (escaped_vi->solution, anything_id);
6024
6025 EXECUTE_IF_SET_IN_BITMAP (from, 0, i, bi)
6026 {
6027 varinfo_t vi = get_varinfo (i);
6028
6029 /* The only artificial variables that are allowed in a may-alias
6030 set are heap variables. */
6031 if (vi->is_artificial_var && !vi->is_heap_var)
6032 continue;
6033
6034 if (everything_escaped
6035 || (escaped_vi->solution
6036 && bitmap_bit_p (escaped_vi->solution, i)))
6037 {
6038 pt->vars_contains_escaped = true;
6039 pt->vars_contains_escaped_heap = vi->is_heap_var;
6040 }
6041
6042 if (TREE_CODE (vi->decl) == VAR_DECL
6043 || TREE_CODE (vi->decl) == PARM_DECL
6044 || TREE_CODE (vi->decl) == RESULT_DECL)
6045 {
6046 /* If we are in IPA mode we will not recompute points-to
6047 sets after inlining so make sure they stay valid. */
6048 if (in_ipa_mode
6049 && !DECL_PT_UID_SET_P (vi->decl))
6050 SET_DECL_PT_UID (vi->decl, DECL_UID (vi->decl));
6051
6052 /* Add the decl to the points-to set. Note that the points-to
6053 set contains global variables. */
6054 bitmap_set_bit (into, DECL_PT_UID (vi->decl));
6055 if (vi->is_global_var)
6056 pt->vars_contains_nonlocal = true;
6057 }
6058 }
6059 }
6060
6061
6062 /* Compute the points-to solution *PT for the variable VI. */
6063
6064 static struct pt_solution
6065 find_what_var_points_to (varinfo_t orig_vi)
6066 {
6067 unsigned int i;
6068 bitmap_iterator bi;
6069 bitmap finished_solution;
6070 bitmap result;
6071 varinfo_t vi;
6072 struct pt_solution *pt;
6073
6074 /* This variable may have been collapsed, let's get the real
6075 variable. */
6076 vi = get_varinfo (find (orig_vi->id));
6077
6078 /* See if we have already computed the solution and return it. */
6079 pt_solution **slot = &final_solutions->get_or_insert (vi);
6080 if (*slot != NULL)
6081 return **slot;
6082
6083 *slot = pt = XOBNEW (&final_solutions_obstack, struct pt_solution);
6084 memset (pt, 0, sizeof (struct pt_solution));
6085
6086 /* Translate artificial variables into SSA_NAME_PTR_INFO
6087 attributes. */
6088 EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, i, bi)
6089 {
6090 varinfo_t vi = get_varinfo (i);
6091
6092 if (vi->is_artificial_var)
6093 {
6094 if (vi->id == nothing_id)
6095 pt->null = 1;
6096 else if (vi->id == escaped_id)
6097 {
6098 if (in_ipa_mode)
6099 pt->ipa_escaped = 1;
6100 else
6101 pt->escaped = 1;
6102 /* Expand some special vars of ESCAPED in-place here. */
6103 varinfo_t evi = get_varinfo (find (escaped_id));
6104 if (bitmap_bit_p (evi->solution, nonlocal_id))
6105 pt->nonlocal = 1;
6106 }
6107 else if (vi->id == nonlocal_id)
6108 pt->nonlocal = 1;
6109 else if (vi->is_heap_var)
6110 /* We represent heapvars in the points-to set properly. */
6111 ;
6112 else if (vi->id == string_id)
6113 /* Nobody cares - STRING_CSTs are read-only entities. */
6114 ;
6115 else if (vi->id == anything_id
6116 || vi->id == integer_id)
6117 pt->anything = 1;
6118 }
6119 }
6120
6121 /* Instead of doing extra work, simply do not create
6122 elaborate points-to information for pt_anything pointers. */
6123 if (pt->anything)
6124 return *pt;
6125
6126 /* Share the final set of variables when possible. */
6127 finished_solution = BITMAP_GGC_ALLOC ();
6128 stats.points_to_sets_created++;
6129
6130 set_uids_in_ptset (finished_solution, vi->solution, pt);
6131 result = shared_bitmap_lookup (finished_solution);
6132 if (!result)
6133 {
6134 shared_bitmap_add (finished_solution);
6135 pt->vars = finished_solution;
6136 }
6137 else
6138 {
6139 pt->vars = result;
6140 bitmap_clear (finished_solution);
6141 }
6142
6143 return *pt;
6144 }
6145
6146 /* Given a pointer variable P, fill in its points-to set. */
6147
6148 static void
6149 find_what_p_points_to (tree p)
6150 {
6151 struct ptr_info_def *pi;
6152 tree lookup_p = p;
6153 varinfo_t vi;
6154
6155 /* For parameters, get at the points-to set for the actual parm
6156 decl. */
6157 if (TREE_CODE (p) == SSA_NAME
6158 && SSA_NAME_IS_DEFAULT_DEF (p)
6159 && (TREE_CODE (SSA_NAME_VAR (p)) == PARM_DECL
6160 || TREE_CODE (SSA_NAME_VAR (p)) == RESULT_DECL))
6161 lookup_p = SSA_NAME_VAR (p);
6162
6163 vi = lookup_vi_for_tree (lookup_p);
6164 if (!vi)
6165 return;
6166
6167 pi = get_ptr_info (p);
6168 pi->pt = find_what_var_points_to (vi);
6169 }
6170
6171
6172 /* Query statistics for points-to solutions. */
6173
6174 static struct {
6175 unsigned HOST_WIDE_INT pt_solution_includes_may_alias;
6176 unsigned HOST_WIDE_INT pt_solution_includes_no_alias;
6177 unsigned HOST_WIDE_INT pt_solutions_intersect_may_alias;
6178 unsigned HOST_WIDE_INT pt_solutions_intersect_no_alias;
6179 } pta_stats;
6180
6181 void
6182 dump_pta_stats (FILE *s)
6183 {
6184 fprintf (s, "\nPTA query stats:\n");
6185 fprintf (s, " pt_solution_includes: "
6186 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
6187 HOST_WIDE_INT_PRINT_DEC" queries\n",
6188 pta_stats.pt_solution_includes_no_alias,
6189 pta_stats.pt_solution_includes_no_alias
6190 + pta_stats.pt_solution_includes_may_alias);
6191 fprintf (s, " pt_solutions_intersect: "
6192 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
6193 HOST_WIDE_INT_PRINT_DEC" queries\n",
6194 pta_stats.pt_solutions_intersect_no_alias,
6195 pta_stats.pt_solutions_intersect_no_alias
6196 + pta_stats.pt_solutions_intersect_may_alias);
6197 }
6198
6199
6200 /* Reset the points-to solution *PT to a conservative default
6201 (point to anything). */
6202
6203 void
6204 pt_solution_reset (struct pt_solution *pt)
6205 {
6206 memset (pt, 0, sizeof (struct pt_solution));
6207 pt->anything = true;
6208 }
6209
6210 /* Set the points-to solution *PT to point only to the variables
6211 in VARS. VARS_CONTAINS_GLOBAL specifies whether that contains
6212 global variables and VARS_CONTAINS_RESTRICT specifies whether
6213 it contains restrict tag variables. */
6214
6215 void
6216 pt_solution_set (struct pt_solution *pt, bitmap vars,
6217 bool vars_contains_nonlocal)
6218 {
6219 memset (pt, 0, sizeof (struct pt_solution));
6220 pt->vars = vars;
6221 pt->vars_contains_nonlocal = vars_contains_nonlocal;
6222 pt->vars_contains_escaped
6223 = (cfun->gimple_df->escaped.anything
6224 || bitmap_intersect_p (cfun->gimple_df->escaped.vars, vars));
6225 }
6226
6227 /* Set the points-to solution *PT to point only to the variable VAR. */
6228
6229 void
6230 pt_solution_set_var (struct pt_solution *pt, tree var)
6231 {
6232 memset (pt, 0, sizeof (struct pt_solution));
6233 pt->vars = BITMAP_GGC_ALLOC ();
6234 bitmap_set_bit (pt->vars, DECL_PT_UID (var));
6235 pt->vars_contains_nonlocal = is_global_var (var);
6236 pt->vars_contains_escaped
6237 = (cfun->gimple_df->escaped.anything
6238 || bitmap_bit_p (cfun->gimple_df->escaped.vars, DECL_PT_UID (var)));
6239 }
6240
6241 /* Computes the union of the points-to solutions *DEST and *SRC and
6242 stores the result in *DEST. This changes the points-to bitmap
6243 of *DEST and thus may not be used if that might be shared.
6244 The points-to bitmap of *SRC and *DEST will not be shared after
6245 this function if they were not before. */
6246
6247 static void
6248 pt_solution_ior_into (struct pt_solution *dest, struct pt_solution *src)
6249 {
6250 dest->anything |= src->anything;
6251 if (dest->anything)
6252 {
6253 pt_solution_reset (dest);
6254 return;
6255 }
6256
6257 dest->nonlocal |= src->nonlocal;
6258 dest->escaped |= src->escaped;
6259 dest->ipa_escaped |= src->ipa_escaped;
6260 dest->null |= src->null;
6261 dest->vars_contains_nonlocal |= src->vars_contains_nonlocal;
6262 dest->vars_contains_escaped |= src->vars_contains_escaped;
6263 dest->vars_contains_escaped_heap |= src->vars_contains_escaped_heap;
6264 if (!src->vars)
6265 return;
6266
6267 if (!dest->vars)
6268 dest->vars = BITMAP_GGC_ALLOC ();
6269 bitmap_ior_into (dest->vars, src->vars);
6270 }
6271
6272 /* Return true if the points-to solution *PT is empty. */
6273
6274 bool
6275 pt_solution_empty_p (struct pt_solution *pt)
6276 {
6277 if (pt->anything
6278 || pt->nonlocal)
6279 return false;
6280
6281 if (pt->vars
6282 && !bitmap_empty_p (pt->vars))
6283 return false;
6284
6285 /* If the solution includes ESCAPED, check if that is empty. */
6286 if (pt->escaped
6287 && !pt_solution_empty_p (&cfun->gimple_df->escaped))
6288 return false;
6289
6290 /* If the solution includes ESCAPED, check if that is empty. */
6291 if (pt->ipa_escaped
6292 && !pt_solution_empty_p (&ipa_escaped_pt))
6293 return false;
6294
6295 return true;
6296 }
6297
6298 /* Return true if the points-to solution *PT only point to a single var, and
6299 return the var uid in *UID. */
6300
6301 bool
6302 pt_solution_singleton_p (struct pt_solution *pt, unsigned *uid)
6303 {
6304 if (pt->anything || pt->nonlocal || pt->escaped || pt->ipa_escaped
6305 || pt->null || pt->vars == NULL
6306 || !bitmap_single_bit_set_p (pt->vars))
6307 return false;
6308
6309 *uid = bitmap_first_set_bit (pt->vars);
6310 return true;
6311 }
6312
6313 /* Return true if the points-to solution *PT includes global memory. */
6314
6315 bool
6316 pt_solution_includes_global (struct pt_solution *pt)
6317 {
6318 if (pt->anything
6319 || pt->nonlocal
6320 || pt->vars_contains_nonlocal
6321 /* The following is a hack to make the malloc escape hack work.
6322 In reality we'd need different sets for escaped-through-return
6323 and escaped-to-callees and passes would need to be updated. */
6324 || pt->vars_contains_escaped_heap)
6325 return true;
6326
6327 /* 'escaped' is also a placeholder so we have to look into it. */
6328 if (pt->escaped)
6329 return pt_solution_includes_global (&cfun->gimple_df->escaped);
6330
6331 if (pt->ipa_escaped)
6332 return pt_solution_includes_global (&ipa_escaped_pt);
6333
6334 /* ??? This predicate is not correct for the IPA-PTA solution
6335 as we do not properly distinguish between unit escape points
6336 and global variables. */
6337 if (cfun->gimple_df->ipa_pta)
6338 return true;
6339
6340 return false;
6341 }
6342
6343 /* Return true if the points-to solution *PT includes the variable
6344 declaration DECL. */
6345
6346 static bool
6347 pt_solution_includes_1 (struct pt_solution *pt, const_tree decl)
6348 {
6349 if (pt->anything)
6350 return true;
6351
6352 if (pt->nonlocal
6353 && is_global_var (decl))
6354 return true;
6355
6356 if (pt->vars
6357 && bitmap_bit_p (pt->vars, DECL_PT_UID (decl)))
6358 return true;
6359
6360 /* If the solution includes ESCAPED, check it. */
6361 if (pt->escaped
6362 && pt_solution_includes_1 (&cfun->gimple_df->escaped, decl))
6363 return true;
6364
6365 /* If the solution includes ESCAPED, check it. */
6366 if (pt->ipa_escaped
6367 && pt_solution_includes_1 (&ipa_escaped_pt, decl))
6368 return true;
6369
6370 return false;
6371 }
6372
6373 bool
6374 pt_solution_includes (struct pt_solution *pt, const_tree decl)
6375 {
6376 bool res = pt_solution_includes_1 (pt, decl);
6377 if (res)
6378 ++pta_stats.pt_solution_includes_may_alias;
6379 else
6380 ++pta_stats.pt_solution_includes_no_alias;
6381 return res;
6382 }
6383
6384 /* Return true if both points-to solutions PT1 and PT2 have a non-empty
6385 intersection. */
6386
6387 static bool
6388 pt_solutions_intersect_1 (struct pt_solution *pt1, struct pt_solution *pt2)
6389 {
6390 if (pt1->anything || pt2->anything)
6391 return true;
6392
6393 /* If either points to unknown global memory and the other points to
6394 any global memory they alias. */
6395 if ((pt1->nonlocal
6396 && (pt2->nonlocal
6397 || pt2->vars_contains_nonlocal))
6398 || (pt2->nonlocal
6399 && pt1->vars_contains_nonlocal))
6400 return true;
6401
6402 /* If either points to all escaped memory and the other points to
6403 any escaped memory they alias. */
6404 if ((pt1->escaped
6405 && (pt2->escaped
6406 || pt2->vars_contains_escaped))
6407 || (pt2->escaped
6408 && pt1->vars_contains_escaped))
6409 return true;
6410
6411 /* Check the escaped solution if required.
6412 ??? Do we need to check the local against the IPA escaped sets? */
6413 if ((pt1->ipa_escaped || pt2->ipa_escaped)
6414 && !pt_solution_empty_p (&ipa_escaped_pt))
6415 {
6416 /* If both point to escaped memory and that solution
6417 is not empty they alias. */
6418 if (pt1->ipa_escaped && pt2->ipa_escaped)
6419 return true;
6420
6421 /* If either points to escaped memory see if the escaped solution
6422 intersects with the other. */
6423 if ((pt1->ipa_escaped
6424 && pt_solutions_intersect_1 (&ipa_escaped_pt, pt2))
6425 || (pt2->ipa_escaped
6426 && pt_solutions_intersect_1 (&ipa_escaped_pt, pt1)))
6427 return true;
6428 }
6429
6430 /* Now both pointers alias if their points-to solution intersects. */
6431 return (pt1->vars
6432 && pt2->vars
6433 && bitmap_intersect_p (pt1->vars, pt2->vars));
6434 }
6435
6436 bool
6437 pt_solutions_intersect (struct pt_solution *pt1, struct pt_solution *pt2)
6438 {
6439 bool res = pt_solutions_intersect_1 (pt1, pt2);
6440 if (res)
6441 ++pta_stats.pt_solutions_intersect_may_alias;
6442 else
6443 ++pta_stats.pt_solutions_intersect_no_alias;
6444 return res;
6445 }
6446
6447
6448 /* Dump points-to information to OUTFILE. */
6449
6450 static void
6451 dump_sa_points_to_info (FILE *outfile)
6452 {
6453 unsigned int i;
6454
6455 fprintf (outfile, "\nPoints-to sets\n\n");
6456
6457 if (dump_flags & TDF_STATS)
6458 {
6459 fprintf (outfile, "Stats:\n");
6460 fprintf (outfile, "Total vars: %d\n", stats.total_vars);
6461 fprintf (outfile, "Non-pointer vars: %d\n",
6462 stats.nonpointer_vars);
6463 fprintf (outfile, "Statically unified vars: %d\n",
6464 stats.unified_vars_static);
6465 fprintf (outfile, "Dynamically unified vars: %d\n",
6466 stats.unified_vars_dynamic);
6467 fprintf (outfile, "Iterations: %d\n", stats.iterations);
6468 fprintf (outfile, "Number of edges: %d\n", stats.num_edges);
6469 fprintf (outfile, "Number of implicit edges: %d\n",
6470 stats.num_implicit_edges);
6471 }
6472
6473 for (i = 1; i < varmap.length (); i++)
6474 {
6475 varinfo_t vi = get_varinfo (i);
6476 if (!vi->may_have_pointers)
6477 continue;
6478 dump_solution_for_var (outfile, i);
6479 }
6480 }
6481
6482
6483 /* Debug points-to information to stderr. */
6484
6485 DEBUG_FUNCTION void
6486 debug_sa_points_to_info (void)
6487 {
6488 dump_sa_points_to_info (stderr);
6489 }
6490
6491
6492 /* Initialize the always-existing constraint variables for NULL
6493 ANYTHING, READONLY, and INTEGER */
6494
6495 static void
6496 init_base_vars (void)
6497 {
6498 struct constraint_expr lhs, rhs;
6499 varinfo_t var_anything;
6500 varinfo_t var_nothing;
6501 varinfo_t var_string;
6502 varinfo_t var_escaped;
6503 varinfo_t var_nonlocal;
6504 varinfo_t var_storedanything;
6505 varinfo_t var_integer;
6506
6507 /* Variable ID zero is reserved and should be NULL. */
6508 varmap.safe_push (NULL);
6509
6510 /* Create the NULL variable, used to represent that a variable points
6511 to NULL. */
6512 var_nothing = new_var_info (NULL_TREE, "NULL");
6513 gcc_assert (var_nothing->id == nothing_id);
6514 var_nothing->is_artificial_var = 1;
6515 var_nothing->offset = 0;
6516 var_nothing->size = ~0;
6517 var_nothing->fullsize = ~0;
6518 var_nothing->is_special_var = 1;
6519 var_nothing->may_have_pointers = 0;
6520 var_nothing->is_global_var = 0;
6521
6522 /* Create the ANYTHING variable, used to represent that a variable
6523 points to some unknown piece of memory. */
6524 var_anything = new_var_info (NULL_TREE, "ANYTHING");
6525 gcc_assert (var_anything->id == anything_id);
6526 var_anything->is_artificial_var = 1;
6527 var_anything->size = ~0;
6528 var_anything->offset = 0;
6529 var_anything->fullsize = ~0;
6530 var_anything->is_special_var = 1;
6531
6532 /* Anything points to anything. This makes deref constraints just
6533 work in the presence of linked list and other p = *p type loops,
6534 by saying that *ANYTHING = ANYTHING. */
6535 lhs.type = SCALAR;
6536 lhs.var = anything_id;
6537 lhs.offset = 0;
6538 rhs.type = ADDRESSOF;
6539 rhs.var = anything_id;
6540 rhs.offset = 0;
6541
6542 /* This specifically does not use process_constraint because
6543 process_constraint ignores all anything = anything constraints, since all
6544 but this one are redundant. */
6545 constraints.safe_push (new_constraint (lhs, rhs));
6546
6547 /* Create the STRING variable, used to represent that a variable
6548 points to a string literal. String literals don't contain
6549 pointers so STRING doesn't point to anything. */
6550 var_string = new_var_info (NULL_TREE, "STRING");
6551 gcc_assert (var_string->id == string_id);
6552 var_string->is_artificial_var = 1;
6553 var_string->offset = 0;
6554 var_string->size = ~0;
6555 var_string->fullsize = ~0;
6556 var_string->is_special_var = 1;
6557 var_string->may_have_pointers = 0;
6558
6559 /* Create the ESCAPED variable, used to represent the set of escaped
6560 memory. */
6561 var_escaped = new_var_info (NULL_TREE, "ESCAPED");
6562 gcc_assert (var_escaped->id == escaped_id);
6563 var_escaped->is_artificial_var = 1;
6564 var_escaped->offset = 0;
6565 var_escaped->size = ~0;
6566 var_escaped->fullsize = ~0;
6567 var_escaped->is_special_var = 0;
6568
6569 /* Create the NONLOCAL variable, used to represent the set of nonlocal
6570 memory. */
6571 var_nonlocal = new_var_info (NULL_TREE, "NONLOCAL");
6572 gcc_assert (var_nonlocal->id == nonlocal_id);
6573 var_nonlocal->is_artificial_var = 1;
6574 var_nonlocal->offset = 0;
6575 var_nonlocal->size = ~0;
6576 var_nonlocal->fullsize = ~0;
6577 var_nonlocal->is_special_var = 1;
6578
6579 /* ESCAPED = *ESCAPED, because escaped is may-deref'd at calls, etc. */
6580 lhs.type = SCALAR;
6581 lhs.var = escaped_id;
6582 lhs.offset = 0;
6583 rhs.type = DEREF;
6584 rhs.var = escaped_id;
6585 rhs.offset = 0;
6586 process_constraint (new_constraint (lhs, rhs));
6587
6588 /* ESCAPED = ESCAPED + UNKNOWN_OFFSET, because if a sub-field escapes the
6589 whole variable escapes. */
6590 lhs.type = SCALAR;
6591 lhs.var = escaped_id;
6592 lhs.offset = 0;
6593 rhs.type = SCALAR;
6594 rhs.var = escaped_id;
6595 rhs.offset = UNKNOWN_OFFSET;
6596 process_constraint (new_constraint (lhs, rhs));
6597
6598 /* *ESCAPED = NONLOCAL. This is true because we have to assume
6599 everything pointed to by escaped points to what global memory can
6600 point to. */
6601 lhs.type = DEREF;
6602 lhs.var = escaped_id;
6603 lhs.offset = 0;
6604 rhs.type = SCALAR;
6605 rhs.var = nonlocal_id;
6606 rhs.offset = 0;
6607 process_constraint (new_constraint (lhs, rhs));
6608
6609 /* NONLOCAL = &NONLOCAL, NONLOCAL = &ESCAPED. This is true because
6610 global memory may point to global memory and escaped memory. */
6611 lhs.type = SCALAR;
6612 lhs.var = nonlocal_id;
6613 lhs.offset = 0;
6614 rhs.type = ADDRESSOF;
6615 rhs.var = nonlocal_id;
6616 rhs.offset = 0;
6617 process_constraint (new_constraint (lhs, rhs));
6618 rhs.type = ADDRESSOF;
6619 rhs.var = escaped_id;
6620 rhs.offset = 0;
6621 process_constraint (new_constraint (lhs, rhs));
6622
6623 /* Create the STOREDANYTHING variable, used to represent the set of
6624 variables stored to *ANYTHING. */
6625 var_storedanything = new_var_info (NULL_TREE, "STOREDANYTHING");
6626 gcc_assert (var_storedanything->id == storedanything_id);
6627 var_storedanything->is_artificial_var = 1;
6628 var_storedanything->offset = 0;
6629 var_storedanything->size = ~0;
6630 var_storedanything->fullsize = ~0;
6631 var_storedanything->is_special_var = 0;
6632
6633 /* Create the INTEGER variable, used to represent that a variable points
6634 to what an INTEGER "points to". */
6635 var_integer = new_var_info (NULL_TREE, "INTEGER");
6636 gcc_assert (var_integer->id == integer_id);
6637 var_integer->is_artificial_var = 1;
6638 var_integer->size = ~0;
6639 var_integer->fullsize = ~0;
6640 var_integer->offset = 0;
6641 var_integer->is_special_var = 1;
6642
6643 /* INTEGER = ANYTHING, because we don't know where a dereference of
6644 a random integer will point to. */
6645 lhs.type = SCALAR;
6646 lhs.var = integer_id;
6647 lhs.offset = 0;
6648 rhs.type = ADDRESSOF;
6649 rhs.var = anything_id;
6650 rhs.offset = 0;
6651 process_constraint (new_constraint (lhs, rhs));
6652 }
6653
6654 /* Initialize things necessary to perform PTA */
6655
6656 static void
6657 init_alias_vars (void)
6658 {
6659 use_field_sensitive = (MAX_FIELDS_FOR_FIELD_SENSITIVE > 1);
6660
6661 bitmap_obstack_initialize (&pta_obstack);
6662 bitmap_obstack_initialize (&oldpta_obstack);
6663 bitmap_obstack_initialize (&predbitmap_obstack);
6664
6665 constraints.create (8);
6666 varmap.create (8);
6667 vi_for_tree = new hash_map<tree, varinfo_t>;
6668 call_stmt_vars = new hash_map<gimple, varinfo_t>;
6669
6670 memset (&stats, 0, sizeof (stats));
6671 shared_bitmap_table = new hash_table<shared_bitmap_hasher> (511);
6672 init_base_vars ();
6673
6674 gcc_obstack_init (&fake_var_decl_obstack);
6675
6676 final_solutions = new hash_map<varinfo_t, pt_solution *>;
6677 gcc_obstack_init (&final_solutions_obstack);
6678 }
6679
6680 /* Remove the REF and ADDRESS edges from GRAPH, as well as all the
6681 predecessor edges. */
6682
6683 static void
6684 remove_preds_and_fake_succs (constraint_graph_t graph)
6685 {
6686 unsigned int i;
6687
6688 /* Clear the implicit ref and address nodes from the successor
6689 lists. */
6690 for (i = 1; i < FIRST_REF_NODE; i++)
6691 {
6692 if (graph->succs[i])
6693 bitmap_clear_range (graph->succs[i], FIRST_REF_NODE,
6694 FIRST_REF_NODE * 2);
6695 }
6696
6697 /* Free the successor list for the non-ref nodes. */
6698 for (i = FIRST_REF_NODE + 1; i < graph->size; i++)
6699 {
6700 if (graph->succs[i])
6701 BITMAP_FREE (graph->succs[i]);
6702 }
6703
6704 /* Now reallocate the size of the successor list as, and blow away
6705 the predecessor bitmaps. */
6706 graph->size = varmap.length ();
6707 graph->succs = XRESIZEVEC (bitmap, graph->succs, graph->size);
6708
6709 free (graph->implicit_preds);
6710 graph->implicit_preds = NULL;
6711 free (graph->preds);
6712 graph->preds = NULL;
6713 bitmap_obstack_release (&predbitmap_obstack);
6714 }
6715
6716 /* Solve the constraint set. */
6717
6718 static void
6719 solve_constraints (void)
6720 {
6721 struct scc_info *si;
6722
6723 if (dump_file)
6724 fprintf (dump_file,
6725 "\nCollapsing static cycles and doing variable "
6726 "substitution\n");
6727
6728 init_graph (varmap.length () * 2);
6729
6730 if (dump_file)
6731 fprintf (dump_file, "Building predecessor graph\n");
6732 build_pred_graph ();
6733
6734 if (dump_file)
6735 fprintf (dump_file, "Detecting pointer and location "
6736 "equivalences\n");
6737 si = perform_var_substitution (graph);
6738
6739 if (dump_file)
6740 fprintf (dump_file, "Rewriting constraints and unifying "
6741 "variables\n");
6742 rewrite_constraints (graph, si);
6743
6744 build_succ_graph ();
6745
6746 free_var_substitution_info (si);
6747
6748 /* Attach complex constraints to graph nodes. */
6749 move_complex_constraints (graph);
6750
6751 if (dump_file)
6752 fprintf (dump_file, "Uniting pointer but not location equivalent "
6753 "variables\n");
6754 unite_pointer_equivalences (graph);
6755
6756 if (dump_file)
6757 fprintf (dump_file, "Finding indirect cycles\n");
6758 find_indirect_cycles (graph);
6759
6760 /* Implicit nodes and predecessors are no longer necessary at this
6761 point. */
6762 remove_preds_and_fake_succs (graph);
6763
6764 if (dump_file && (dump_flags & TDF_GRAPH))
6765 {
6766 fprintf (dump_file, "\n\n// The constraint graph before solve-graph "
6767 "in dot format:\n");
6768 dump_constraint_graph (dump_file);
6769 fprintf (dump_file, "\n\n");
6770 }
6771
6772 if (dump_file)
6773 fprintf (dump_file, "Solving graph\n");
6774
6775 solve_graph (graph);
6776
6777 if (dump_file && (dump_flags & TDF_GRAPH))
6778 {
6779 fprintf (dump_file, "\n\n// The constraint graph after solve-graph "
6780 "in dot format:\n");
6781 dump_constraint_graph (dump_file);
6782 fprintf (dump_file, "\n\n");
6783 }
6784
6785 if (dump_file)
6786 dump_sa_points_to_info (dump_file);
6787 }
6788
6789 /* Create points-to sets for the current function. See the comments
6790 at the start of the file for an algorithmic overview. */
6791
6792 static void
6793 compute_points_to_sets (void)
6794 {
6795 basic_block bb;
6796 unsigned i;
6797 varinfo_t vi;
6798
6799 timevar_push (TV_TREE_PTA);
6800
6801 init_alias_vars ();
6802
6803 intra_create_variable_infos (cfun);
6804
6805 /* Now walk all statements and build the constraint set. */
6806 FOR_EACH_BB_FN (bb, cfun)
6807 {
6808 for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
6809 gsi_next (&gsi))
6810 {
6811 gphi *phi = gsi.phi ();
6812
6813 if (! virtual_operand_p (gimple_phi_result (phi)))
6814 find_func_aliases (cfun, phi);
6815 }
6816
6817 for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);
6818 gsi_next (&gsi))
6819 {
6820 gimple stmt = gsi_stmt (gsi);
6821
6822 find_func_aliases (cfun, stmt);
6823 }
6824 }
6825
6826 if (dump_file)
6827 {
6828 fprintf (dump_file, "Points-to analysis\n\nConstraints:\n\n");
6829 dump_constraints (dump_file, 0);
6830 }
6831
6832 /* From the constraints compute the points-to sets. */
6833 solve_constraints ();
6834
6835 /* Compute the points-to set for ESCAPED used for call-clobber analysis. */
6836 cfun->gimple_df->escaped = find_what_var_points_to (get_varinfo (escaped_id));
6837
6838 /* Make sure the ESCAPED solution (which is used as placeholder in
6839 other solutions) does not reference itself. This simplifies
6840 points-to solution queries. */
6841 cfun->gimple_df->escaped.escaped = 0;
6842
6843 /* Compute the points-to sets for pointer SSA_NAMEs. */
6844 for (i = 0; i < num_ssa_names; ++i)
6845 {
6846 tree ptr = ssa_name (i);
6847 if (ptr
6848 && POINTER_TYPE_P (TREE_TYPE (ptr)))
6849 find_what_p_points_to (ptr);
6850 }
6851
6852 /* Compute the call-used/clobbered sets. */
6853 FOR_EACH_BB_FN (bb, cfun)
6854 {
6855 gimple_stmt_iterator gsi;
6856
6857 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
6858 {
6859 gcall *stmt;
6860 struct pt_solution *pt;
6861
6862 stmt = dyn_cast <gcall *> (gsi_stmt (gsi));
6863 if (!stmt)
6864 continue;
6865
6866 pt = gimple_call_use_set (stmt);
6867 if (gimple_call_flags (stmt) & ECF_CONST)
6868 memset (pt, 0, sizeof (struct pt_solution));
6869 else if ((vi = lookup_call_use_vi (stmt)) != NULL)
6870 {
6871 *pt = find_what_var_points_to (vi);
6872 /* Escaped (and thus nonlocal) variables are always
6873 implicitly used by calls. */
6874 /* ??? ESCAPED can be empty even though NONLOCAL
6875 always escaped. */
6876 pt->nonlocal = 1;
6877 pt->escaped = 1;
6878 }
6879 else
6880 {
6881 /* If there is nothing special about this call then
6882 we have made everything that is used also escape. */
6883 *pt = cfun->gimple_df->escaped;
6884 pt->nonlocal = 1;
6885 }
6886
6887 pt = gimple_call_clobber_set (stmt);
6888 if (gimple_call_flags (stmt) & (ECF_CONST|ECF_PURE|ECF_NOVOPS))
6889 memset (pt, 0, sizeof (struct pt_solution));
6890 else if ((vi = lookup_call_clobber_vi (stmt)) != NULL)
6891 {
6892 *pt = find_what_var_points_to (vi);
6893 /* Escaped (and thus nonlocal) variables are always
6894 implicitly clobbered by calls. */
6895 /* ??? ESCAPED can be empty even though NONLOCAL
6896 always escaped. */
6897 pt->nonlocal = 1;
6898 pt->escaped = 1;
6899 }
6900 else
6901 {
6902 /* If there is nothing special about this call then
6903 we have made everything that is used also escape. */
6904 *pt = cfun->gimple_df->escaped;
6905 pt->nonlocal = 1;
6906 }
6907 }
6908 }
6909
6910 timevar_pop (TV_TREE_PTA);
6911 }
6912
6913
6914 /* Delete created points-to sets. */
6915
6916 static void
6917 delete_points_to_sets (void)
6918 {
6919 unsigned int i;
6920
6921 delete shared_bitmap_table;
6922 shared_bitmap_table = NULL;
6923 if (dump_file && (dump_flags & TDF_STATS))
6924 fprintf (dump_file, "Points to sets created:%d\n",
6925 stats.points_to_sets_created);
6926
6927 delete vi_for_tree;
6928 delete call_stmt_vars;
6929 bitmap_obstack_release (&pta_obstack);
6930 constraints.release ();
6931
6932 for (i = 0; i < graph->size; i++)
6933 graph->complex[i].release ();
6934 free (graph->complex);
6935
6936 free (graph->rep);
6937 free (graph->succs);
6938 free (graph->pe);
6939 free (graph->pe_rep);
6940 free (graph->indirect_cycles);
6941 free (graph);
6942
6943 varmap.release ();
6944 variable_info_pool.release ();
6945 constraint_pool.release ();
6946
6947 obstack_free (&fake_var_decl_obstack, NULL);
6948
6949 delete final_solutions;
6950 obstack_free (&final_solutions_obstack, NULL);
6951 }
6952
6953 /* Mark "other" loads and stores as belonging to CLIQUE and with
6954 base zero. */
6955
6956 static bool
6957 visit_loadstore (gimple, tree base, tree ref, void *clique_)
6958 {
6959 unsigned short clique = (uintptr_t)clique_;
6960 if (TREE_CODE (base) == MEM_REF
6961 || TREE_CODE (base) == TARGET_MEM_REF)
6962 {
6963 tree ptr = TREE_OPERAND (base, 0);
6964 if (TREE_CODE (ptr) == SSA_NAME)
6965 {
6966 /* ??? We need to make sure 'ptr' doesn't include any of
6967 the restrict tags in its points-to set. */
6968 return false;
6969 }
6970
6971 /* For now let decls through. */
6972
6973 /* Do not overwrite existing cliques (that includes clique, base
6974 pairs we just set). */
6975 if (MR_DEPENDENCE_CLIQUE (base) == 0)
6976 {
6977 MR_DEPENDENCE_CLIQUE (base) = clique;
6978 MR_DEPENDENCE_BASE (base) = 0;
6979 }
6980 }
6981
6982 /* For plain decl accesses see whether they are accesses to globals
6983 and rewrite them to MEM_REFs with { clique, 0 }. */
6984 if (TREE_CODE (base) == VAR_DECL
6985 && is_global_var (base)
6986 /* ??? We can't rewrite a plain decl with the walk_stmt_load_store
6987 ops callback. */
6988 && base != ref)
6989 {
6990 tree *basep = &ref;
6991 while (handled_component_p (*basep))
6992 basep = &TREE_OPERAND (*basep, 0);
6993 gcc_assert (TREE_CODE (*basep) == VAR_DECL);
6994 tree ptr = build_fold_addr_expr (*basep);
6995 tree zero = build_int_cst (TREE_TYPE (ptr), 0);
6996 *basep = build2 (MEM_REF, TREE_TYPE (*basep), ptr, zero);
6997 MR_DEPENDENCE_CLIQUE (*basep) = clique;
6998 MR_DEPENDENCE_BASE (*basep) = 0;
6999 }
7000
7001 return false;
7002 }
7003
7004 /* If REF is a MEM_REF then assign a clique, base pair to it, updating
7005 CLIQUE, *RESTRICT_VAR and LAST_RUID. Return whether dependence info
7006 was assigned to REF. */
7007
7008 static bool
7009 maybe_set_dependence_info (tree ref, tree ptr,
7010 unsigned short &clique, varinfo_t restrict_var,
7011 unsigned short &last_ruid)
7012 {
7013 while (handled_component_p (ref))
7014 ref = TREE_OPERAND (ref, 0);
7015 if ((TREE_CODE (ref) == MEM_REF
7016 || TREE_CODE (ref) == TARGET_MEM_REF)
7017 && TREE_OPERAND (ref, 0) == ptr)
7018 {
7019 /* Do not overwrite existing cliques. This avoids overwriting dependence
7020 info inlined from a function with restrict parameters inlined
7021 into a function with restrict parameters. This usually means we
7022 prefer to be precise in innermost loops. */
7023 if (MR_DEPENDENCE_CLIQUE (ref) == 0)
7024 {
7025 if (clique == 0)
7026 clique = ++cfun->last_clique;
7027 if (restrict_var->ruid == 0)
7028 restrict_var->ruid = ++last_ruid;
7029 MR_DEPENDENCE_CLIQUE (ref) = clique;
7030 MR_DEPENDENCE_BASE (ref) = restrict_var->ruid;
7031 return true;
7032 }
7033 }
7034 return false;
7035 }
7036
7037 /* Compute the set of independend memory references based on restrict
7038 tags and their conservative propagation to the points-to sets. */
7039
7040 static void
7041 compute_dependence_clique (void)
7042 {
7043 unsigned short clique = 0;
7044 unsigned short last_ruid = 0;
7045 for (unsigned i = 0; i < num_ssa_names; ++i)
7046 {
7047 tree ptr = ssa_name (i);
7048 if (!ptr || !POINTER_TYPE_P (TREE_TYPE (ptr)))
7049 continue;
7050
7051 /* Avoid all this when ptr is not dereferenced? */
7052 tree p = ptr;
7053 if (SSA_NAME_IS_DEFAULT_DEF (ptr)
7054 && (TREE_CODE (SSA_NAME_VAR (ptr)) == PARM_DECL
7055 || TREE_CODE (SSA_NAME_VAR (ptr)) == RESULT_DECL))
7056 p = SSA_NAME_VAR (ptr);
7057 varinfo_t vi = lookup_vi_for_tree (p);
7058 if (!vi)
7059 continue;
7060 vi = get_varinfo (find (vi->id));
7061 bitmap_iterator bi;
7062 unsigned j;
7063 varinfo_t restrict_var = NULL;
7064 EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, j, bi)
7065 {
7066 varinfo_t oi = get_varinfo (j);
7067 if (oi->is_restrict_var)
7068 {
7069 if (restrict_var)
7070 {
7071 if (dump_file && (dump_flags & TDF_DETAILS))
7072 {
7073 fprintf (dump_file, "found restrict pointed-to "
7074 "for ");
7075 print_generic_expr (dump_file, ptr, 0);
7076 fprintf (dump_file, " but not exclusively\n");
7077 }
7078 restrict_var = NULL;
7079 break;
7080 }
7081 restrict_var = oi;
7082 }
7083 /* NULL is the only other valid points-to entry. */
7084 else if (oi->id != nothing_id)
7085 {
7086 restrict_var = NULL;
7087 break;
7088 }
7089 }
7090 /* Ok, found that ptr must(!) point to a single(!) restrict
7091 variable. */
7092 /* ??? PTA isn't really a proper propagation engine to compute
7093 this property.
7094 ??? We could handle merging of two restricts by unifying them. */
7095 if (restrict_var)
7096 {
7097 /* Now look at possible dereferences of ptr. */
7098 imm_use_iterator ui;
7099 gimple use_stmt;
7100 FOR_EACH_IMM_USE_STMT (use_stmt, ui, ptr)
7101 {
7102 /* ??? Calls and asms. */
7103 if (!gimple_assign_single_p (use_stmt))
7104 continue;
7105 maybe_set_dependence_info (gimple_assign_lhs (use_stmt), ptr,
7106 clique, restrict_var, last_ruid);
7107 maybe_set_dependence_info (gimple_assign_rhs1 (use_stmt), ptr,
7108 clique, restrict_var, last_ruid);
7109 }
7110 }
7111 }
7112
7113 if (clique == 0)
7114 return;
7115
7116 /* Assign the BASE id zero to all accesses not based on a restrict
7117 pointer. That way they get disabiguated against restrict
7118 accesses but not against each other. */
7119 /* ??? For restricts derived from globals (thus not incoming
7120 parameters) we can't restrict scoping properly thus the following
7121 is too aggressive there. For now we have excluded those globals from
7122 getting into the MR_DEPENDENCE machinery. */
7123 basic_block bb;
7124 FOR_EACH_BB_FN (bb, cfun)
7125 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
7126 !gsi_end_p (gsi); gsi_next (&gsi))
7127 {
7128 gimple stmt = gsi_stmt (gsi);
7129 walk_stmt_load_store_ops (stmt, (void *)(uintptr_t)clique,
7130 visit_loadstore, visit_loadstore);
7131 }
7132 }
7133
7134 /* Compute points-to information for every SSA_NAME pointer in the
7135 current function and compute the transitive closure of escaped
7136 variables to re-initialize the call-clobber states of local variables. */
7137
7138 unsigned int
7139 compute_may_aliases (void)
7140 {
7141 if (cfun->gimple_df->ipa_pta)
7142 {
7143 if (dump_file)
7144 {
7145 fprintf (dump_file, "\nNot re-computing points-to information "
7146 "because IPA points-to information is available.\n\n");
7147
7148 /* But still dump what we have remaining it. */
7149 dump_alias_info (dump_file);
7150 }
7151
7152 return 0;
7153 }
7154
7155 /* For each pointer P_i, determine the sets of variables that P_i may
7156 point-to. Compute the reachability set of escaped and call-used
7157 variables. */
7158 compute_points_to_sets ();
7159
7160 /* Debugging dumps. */
7161 if (dump_file)
7162 dump_alias_info (dump_file);
7163
7164 /* Compute restrict-based memory disambiguations. */
7165 compute_dependence_clique ();
7166
7167 /* Deallocate memory used by aliasing data structures and the internal
7168 points-to solution. */
7169 delete_points_to_sets ();
7170
7171 gcc_assert (!need_ssa_update_p (cfun));
7172
7173 return 0;
7174 }
7175
7176 /* A dummy pass to cause points-to information to be computed via
7177 TODO_rebuild_alias. */
7178
7179 namespace {
7180
7181 const pass_data pass_data_build_alias =
7182 {
7183 GIMPLE_PASS, /* type */
7184 "alias", /* name */
7185 OPTGROUP_NONE, /* optinfo_flags */
7186 TV_NONE, /* tv_id */
7187 ( PROP_cfg | PROP_ssa ), /* properties_required */
7188 0, /* properties_provided */
7189 0, /* properties_destroyed */
7190 0, /* todo_flags_start */
7191 TODO_rebuild_alias, /* todo_flags_finish */
7192 };
7193
7194 class pass_build_alias : public gimple_opt_pass
7195 {
7196 public:
7197 pass_build_alias (gcc::context *ctxt)
7198 : gimple_opt_pass (pass_data_build_alias, ctxt)
7199 {}
7200
7201 /* opt_pass methods: */
7202 virtual bool gate (function *) { return flag_tree_pta; }
7203
7204 }; // class pass_build_alias
7205
7206 } // anon namespace
7207
7208 gimple_opt_pass *
7209 make_pass_build_alias (gcc::context *ctxt)
7210 {
7211 return new pass_build_alias (ctxt);
7212 }
7213
7214 /* A dummy pass to cause points-to information to be computed via
7215 TODO_rebuild_alias. */
7216
7217 namespace {
7218
7219 const pass_data pass_data_build_ealias =
7220 {
7221 GIMPLE_PASS, /* type */
7222 "ealias", /* name */
7223 OPTGROUP_NONE, /* optinfo_flags */
7224 TV_NONE, /* tv_id */
7225 ( PROP_cfg | PROP_ssa ), /* properties_required */
7226 0, /* properties_provided */
7227 0, /* properties_destroyed */
7228 0, /* todo_flags_start */
7229 TODO_rebuild_alias, /* todo_flags_finish */
7230 };
7231
7232 class pass_build_ealias : public gimple_opt_pass
7233 {
7234 public:
7235 pass_build_ealias (gcc::context *ctxt)
7236 : gimple_opt_pass (pass_data_build_ealias, ctxt)
7237 {}
7238
7239 /* opt_pass methods: */
7240 virtual bool gate (function *) { return flag_tree_pta; }
7241
7242 }; // class pass_build_ealias
7243
7244 } // anon namespace
7245
7246 gimple_opt_pass *
7247 make_pass_build_ealias (gcc::context *ctxt)
7248 {
7249 return new pass_build_ealias (ctxt);
7250 }
7251
7252
7253 /* IPA PTA solutions for ESCAPED. */
7254 struct pt_solution ipa_escaped_pt
7255 = { true, false, false, false, false, false, false, false, NULL };
7256
7257 /* Associate node with varinfo DATA. Worker for
7258 cgraph_for_node_and_aliases. */
7259 static bool
7260 associate_varinfo_to_alias (struct cgraph_node *node, void *data)
7261 {
7262 if ((node->alias || node->thunk.thunk_p)
7263 && node->analyzed)
7264 insert_vi_for_tree (node->decl, (varinfo_t)data);
7265 return false;
7266 }
7267
7268 /* Execute the driver for IPA PTA. */
7269 static unsigned int
7270 ipa_pta_execute (void)
7271 {
7272 struct cgraph_node *node;
7273 varpool_node *var;
7274 int from;
7275
7276 in_ipa_mode = 1;
7277
7278 init_alias_vars ();
7279
7280 if (dump_file && (dump_flags & TDF_DETAILS))
7281 {
7282 symtab_node::dump_table (dump_file);
7283 fprintf (dump_file, "\n");
7284 }
7285
7286 /* Build the constraints. */
7287 FOR_EACH_DEFINED_FUNCTION (node)
7288 {
7289 varinfo_t vi;
7290 /* Nodes without a body are not interesting. Especially do not
7291 visit clones at this point for now - we get duplicate decls
7292 there for inline clones at least. */
7293 if (!node->has_gimple_body_p () || node->global.inlined_to)
7294 continue;
7295 node->get_body ();
7296
7297 gcc_assert (!node->clone_of);
7298
7299 vi = create_function_info_for (node->decl,
7300 alias_get_name (node->decl));
7301 node->call_for_symbol_thunks_and_aliases
7302 (associate_varinfo_to_alias, vi, true);
7303 }
7304
7305 /* Create constraints for global variables and their initializers. */
7306 FOR_EACH_VARIABLE (var)
7307 {
7308 if (var->alias && var->analyzed)
7309 continue;
7310
7311 get_vi_for_tree (var->decl);
7312 }
7313
7314 if (dump_file)
7315 {
7316 fprintf (dump_file,
7317 "Generating constraints for global initializers\n\n");
7318 dump_constraints (dump_file, 0);
7319 fprintf (dump_file, "\n");
7320 }
7321 from = constraints.length ();
7322
7323 FOR_EACH_DEFINED_FUNCTION (node)
7324 {
7325 struct function *func;
7326 basic_block bb;
7327
7328 /* Nodes without a body are not interesting. */
7329 if (!node->has_gimple_body_p () || node->clone_of)
7330 continue;
7331
7332 if (dump_file)
7333 {
7334 fprintf (dump_file,
7335 "Generating constraints for %s", node->name ());
7336 if (DECL_ASSEMBLER_NAME_SET_P (node->decl))
7337 fprintf (dump_file, " (%s)",
7338 IDENTIFIER_POINTER
7339 (DECL_ASSEMBLER_NAME (node->decl)));
7340 fprintf (dump_file, "\n");
7341 }
7342
7343 func = DECL_STRUCT_FUNCTION (node->decl);
7344 gcc_assert (cfun == NULL);
7345
7346 /* For externally visible or attribute used annotated functions use
7347 local constraints for their arguments.
7348 For local functions we see all callers and thus do not need initial
7349 constraints for parameters. */
7350 if (node->used_from_other_partition
7351 || node->externally_visible
7352 || node->force_output
7353 || node->address_taken)
7354 {
7355 intra_create_variable_infos (func);
7356
7357 /* We also need to make function return values escape. Nothing
7358 escapes by returning from main though. */
7359 if (!MAIN_NAME_P (DECL_NAME (node->decl)))
7360 {
7361 varinfo_t fi, rvi;
7362 fi = lookup_vi_for_tree (node->decl);
7363 rvi = first_vi_for_offset (fi, fi_result);
7364 if (rvi && rvi->offset == fi_result)
7365 {
7366 struct constraint_expr includes;
7367 struct constraint_expr var;
7368 includes.var = escaped_id;
7369 includes.offset = 0;
7370 includes.type = SCALAR;
7371 var.var = rvi->id;
7372 var.offset = 0;
7373 var.type = SCALAR;
7374 process_constraint (new_constraint (includes, var));
7375 }
7376 }
7377 }
7378
7379 /* Build constriants for the function body. */
7380 FOR_EACH_BB_FN (bb, func)
7381 {
7382 for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
7383 gsi_next (&gsi))
7384 {
7385 gphi *phi = gsi.phi ();
7386
7387 if (! virtual_operand_p (gimple_phi_result (phi)))
7388 find_func_aliases (func, phi);
7389 }
7390
7391 for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);
7392 gsi_next (&gsi))
7393 {
7394 gimple stmt = gsi_stmt (gsi);
7395
7396 find_func_aliases (func, stmt);
7397 find_func_clobbers (func, stmt);
7398 }
7399 }
7400
7401 if (dump_file)
7402 {
7403 fprintf (dump_file, "\n");
7404 dump_constraints (dump_file, from);
7405 fprintf (dump_file, "\n");
7406 }
7407 from = constraints.length ();
7408 }
7409
7410 /* From the constraints compute the points-to sets. */
7411 solve_constraints ();
7412
7413 /* Compute the global points-to sets for ESCAPED.
7414 ??? Note that the computed escape set is not correct
7415 for the whole unit as we fail to consider graph edges to
7416 externally visible functions. */
7417 ipa_escaped_pt = find_what_var_points_to (get_varinfo (escaped_id));
7418
7419 /* Make sure the ESCAPED solution (which is used as placeholder in
7420 other solutions) does not reference itself. This simplifies
7421 points-to solution queries. */
7422 ipa_escaped_pt.ipa_escaped = 0;
7423
7424 /* Assign the points-to sets to the SSA names in the unit. */
7425 FOR_EACH_DEFINED_FUNCTION (node)
7426 {
7427 tree ptr;
7428 struct function *fn;
7429 unsigned i;
7430 basic_block bb;
7431
7432 /* Nodes without a body are not interesting. */
7433 if (!node->has_gimple_body_p () || node->clone_of)
7434 continue;
7435
7436 fn = DECL_STRUCT_FUNCTION (node->decl);
7437
7438 /* Compute the points-to sets for pointer SSA_NAMEs. */
7439 FOR_EACH_VEC_ELT (*fn->gimple_df->ssa_names, i, ptr)
7440 {
7441 if (ptr
7442 && POINTER_TYPE_P (TREE_TYPE (ptr)))
7443 find_what_p_points_to (ptr);
7444 }
7445
7446 /* Compute the call-use and call-clobber sets for indirect calls
7447 and calls to external functions. */
7448 FOR_EACH_BB_FN (bb, fn)
7449 {
7450 gimple_stmt_iterator gsi;
7451
7452 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
7453 {
7454 gcall *stmt;
7455 struct pt_solution *pt;
7456 varinfo_t vi, fi;
7457 tree decl;
7458
7459 stmt = dyn_cast <gcall *> (gsi_stmt (gsi));
7460 if (!stmt)
7461 continue;
7462
7463 /* Handle direct calls to functions with body. */
7464 decl = gimple_call_fndecl (stmt);
7465 if (decl
7466 && (fi = lookup_vi_for_tree (decl))
7467 && fi->is_fn_info)
7468 {
7469 *gimple_call_clobber_set (stmt)
7470 = find_what_var_points_to
7471 (first_vi_for_offset (fi, fi_clobbers));
7472 *gimple_call_use_set (stmt)
7473 = find_what_var_points_to
7474 (first_vi_for_offset (fi, fi_uses));
7475 }
7476 /* Handle direct calls to external functions. */
7477 else if (decl)
7478 {
7479 pt = gimple_call_use_set (stmt);
7480 if (gimple_call_flags (stmt) & ECF_CONST)
7481 memset (pt, 0, sizeof (struct pt_solution));
7482 else if ((vi = lookup_call_use_vi (stmt)) != NULL)
7483 {
7484 *pt = find_what_var_points_to (vi);
7485 /* Escaped (and thus nonlocal) variables are always
7486 implicitly used by calls. */
7487 /* ??? ESCAPED can be empty even though NONLOCAL
7488 always escaped. */
7489 pt->nonlocal = 1;
7490 pt->ipa_escaped = 1;
7491 }
7492 else
7493 {
7494 /* If there is nothing special about this call then
7495 we have made everything that is used also escape. */
7496 *pt = ipa_escaped_pt;
7497 pt->nonlocal = 1;
7498 }
7499
7500 pt = gimple_call_clobber_set (stmt);
7501 if (gimple_call_flags (stmt) & (ECF_CONST|ECF_PURE|ECF_NOVOPS))
7502 memset (pt, 0, sizeof (struct pt_solution));
7503 else if ((vi = lookup_call_clobber_vi (stmt)) != NULL)
7504 {
7505 *pt = find_what_var_points_to (vi);
7506 /* Escaped (and thus nonlocal) variables are always
7507 implicitly clobbered by calls. */
7508 /* ??? ESCAPED can be empty even though NONLOCAL
7509 always escaped. */
7510 pt->nonlocal = 1;
7511 pt->ipa_escaped = 1;
7512 }
7513 else
7514 {
7515 /* If there is nothing special about this call then
7516 we have made everything that is used also escape. */
7517 *pt = ipa_escaped_pt;
7518 pt->nonlocal = 1;
7519 }
7520 }
7521 /* Handle indirect calls. */
7522 else if (!decl
7523 && (fi = get_fi_for_callee (stmt)))
7524 {
7525 /* We need to accumulate all clobbers/uses of all possible
7526 callees. */
7527 fi = get_varinfo (find (fi->id));
7528 /* If we cannot constrain the set of functions we'll end up
7529 calling we end up using/clobbering everything. */
7530 if (bitmap_bit_p (fi->solution, anything_id)
7531 || bitmap_bit_p (fi->solution, nonlocal_id)
7532 || bitmap_bit_p (fi->solution, escaped_id))
7533 {
7534 pt_solution_reset (gimple_call_clobber_set (stmt));
7535 pt_solution_reset (gimple_call_use_set (stmt));
7536 }
7537 else
7538 {
7539 bitmap_iterator bi;
7540 unsigned i;
7541 struct pt_solution *uses, *clobbers;
7542
7543 uses = gimple_call_use_set (stmt);
7544 clobbers = gimple_call_clobber_set (stmt);
7545 memset (uses, 0, sizeof (struct pt_solution));
7546 memset (clobbers, 0, sizeof (struct pt_solution));
7547 EXECUTE_IF_SET_IN_BITMAP (fi->solution, 0, i, bi)
7548 {
7549 struct pt_solution sol;
7550
7551 vi = get_varinfo (i);
7552 if (!vi->is_fn_info)
7553 {
7554 /* ??? We could be more precise here? */
7555 uses->nonlocal = 1;
7556 uses->ipa_escaped = 1;
7557 clobbers->nonlocal = 1;
7558 clobbers->ipa_escaped = 1;
7559 continue;
7560 }
7561
7562 if (!uses->anything)
7563 {
7564 sol = find_what_var_points_to
7565 (first_vi_for_offset (vi, fi_uses));
7566 pt_solution_ior_into (uses, &sol);
7567 }
7568 if (!clobbers->anything)
7569 {
7570 sol = find_what_var_points_to
7571 (first_vi_for_offset (vi, fi_clobbers));
7572 pt_solution_ior_into (clobbers, &sol);
7573 }
7574 }
7575 }
7576 }
7577 }
7578 }
7579
7580 fn->gimple_df->ipa_pta = true;
7581 }
7582
7583 delete_points_to_sets ();
7584
7585 in_ipa_mode = 0;
7586
7587 return 0;
7588 }
7589
7590 namespace {
7591
7592 const pass_data pass_data_ipa_pta =
7593 {
7594 SIMPLE_IPA_PASS, /* type */
7595 "pta", /* name */
7596 OPTGROUP_NONE, /* optinfo_flags */
7597 TV_IPA_PTA, /* tv_id */
7598 0, /* properties_required */
7599 0, /* properties_provided */
7600 0, /* properties_destroyed */
7601 0, /* todo_flags_start */
7602 0, /* todo_flags_finish */
7603 };
7604
7605 class pass_ipa_pta : public simple_ipa_opt_pass
7606 {
7607 public:
7608 pass_ipa_pta (gcc::context *ctxt)
7609 : simple_ipa_opt_pass (pass_data_ipa_pta, ctxt)
7610 {}
7611
7612 /* opt_pass methods: */
7613 virtual bool gate (function *)
7614 {
7615 return (optimize
7616 && flag_ipa_pta
7617 /* Don't bother doing anything if the program has errors. */
7618 && !seen_error ());
7619 }
7620
7621 virtual unsigned int execute (function *) { return ipa_pta_execute (); }
7622
7623 }; // class pass_ipa_pta
7624
7625 } // anon namespace
7626
7627 simple_ipa_opt_pass *
7628 make_pass_ipa_pta (gcc::context *ctxt)
7629 {
7630 return new pass_ipa_pta (ctxt);
7631 }