]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/tree-ssa-structalias.c
dojump.h: New header file.
[thirdparty/gcc.git] / gcc / tree-ssa-structalias.c
1 /* Tree based points-to analysis
2 Copyright (C) 2005-2015 Free Software Foundation, Inc.
3 Contributed by Daniel Berlin <dberlin@dberlin.org>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3 of the License, or
10 (at your option) any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "obstack.h"
26 #include "bitmap.h"
27 #include "sbitmap.h"
28 #include "flags.h"
29 #include "predict.h"
30 #include "vec.h"
31 #include "hashtab.h"
32 #include "hash-set.h"
33 #include "machmode.h"
34 #include "hard-reg-set.h"
35 #include "input.h"
36 #include "function.h"
37 #include "dominance.h"
38 #include "cfg.h"
39 #include "basic-block.h"
40 #include "double-int.h"
41 #include "alias.h"
42 #include "symtab.h"
43 #include "wide-int.h"
44 #include "inchash.h"
45 #include "tree.h"
46 #include "fold-const.h"
47 #include "stor-layout.h"
48 #include "stmt.h"
49 #include "hash-table.h"
50 #include "tree-ssa-alias.h"
51 #include "internal-fn.h"
52 #include "gimple-expr.h"
53 #include "is-a.h"
54 #include "gimple.h"
55 #include "gimple-iterator.h"
56 #include "gimple-ssa.h"
57 #include "hash-map.h"
58 #include "plugin-api.h"
59 #include "ipa-ref.h"
60 #include "cgraph.h"
61 #include "stringpool.h"
62 #include "tree-ssanames.h"
63 #include "tree-into-ssa.h"
64 #include "rtl.h"
65 #include "statistics.h"
66 #include "real.h"
67 #include "fixed-value.h"
68 #include "insn-config.h"
69 #include "expmed.h"
70 #include "dojump.h"
71 #include "explow.h"
72 #include "calls.h"
73 #include "emit-rtl.h"
74 #include "varasm.h"
75 #include "expr.h"
76 #include "tree-dfa.h"
77 #include "tree-inline.h"
78 #include "diagnostic-core.h"
79 #include "tree-pass.h"
80 #include "alloc-pool.h"
81 #include "splay-tree.h"
82 #include "params.h"
83 #include "tree-phinodes.h"
84 #include "ssa-iterators.h"
85 #include "tree-pretty-print.h"
86 #include "gimple-walk.h"
87
88 /* The idea behind this analyzer is to generate set constraints from the
89 program, then solve the resulting constraints in order to generate the
90 points-to sets.
91
92 Set constraints are a way of modeling program analysis problems that
93 involve sets. They consist of an inclusion constraint language,
94 describing the variables (each variable is a set) and operations that
95 are involved on the variables, and a set of rules that derive facts
96 from these operations. To solve a system of set constraints, you derive
97 all possible facts under the rules, which gives you the correct sets
98 as a consequence.
99
100 See "Efficient Field-sensitive pointer analysis for C" by "David
101 J. Pearce and Paul H. J. Kelly and Chris Hankin, at
102 http://citeseer.ist.psu.edu/pearce04efficient.html
103
104 Also see "Ultra-fast Aliasing Analysis using CLA: A Million Lines
105 of C Code in a Second" by ""Nevin Heintze and Olivier Tardieu" at
106 http://citeseer.ist.psu.edu/heintze01ultrafast.html
107
108 There are three types of real constraint expressions, DEREF,
109 ADDRESSOF, and SCALAR. Each constraint expression consists
110 of a constraint type, a variable, and an offset.
111
112 SCALAR is a constraint expression type used to represent x, whether
113 it appears on the LHS or the RHS of a statement.
114 DEREF is a constraint expression type used to represent *x, whether
115 it appears on the LHS or the RHS of a statement.
116 ADDRESSOF is a constraint expression used to represent &x, whether
117 it appears on the LHS or the RHS of a statement.
118
119 Each pointer variable in the program is assigned an integer id, and
120 each field of a structure variable is assigned an integer id as well.
121
122 Structure variables are linked to their list of fields through a "next
123 field" in each variable that points to the next field in offset
124 order.
125 Each variable for a structure field has
126
127 1. "size", that tells the size in bits of that field.
128 2. "fullsize, that tells the size in bits of the entire structure.
129 3. "offset", that tells the offset in bits from the beginning of the
130 structure to this field.
131
132 Thus,
133 struct f
134 {
135 int a;
136 int b;
137 } foo;
138 int *bar;
139
140 looks like
141
142 foo.a -> id 1, size 32, offset 0, fullsize 64, next foo.b
143 foo.b -> id 2, size 32, offset 32, fullsize 64, next NULL
144 bar -> id 3, size 32, offset 0, fullsize 32, next NULL
145
146
147 In order to solve the system of set constraints, the following is
148 done:
149
150 1. Each constraint variable x has a solution set associated with it,
151 Sol(x).
152
153 2. Constraints are separated into direct, copy, and complex.
154 Direct constraints are ADDRESSOF constraints that require no extra
155 processing, such as P = &Q
156 Copy constraints are those of the form P = Q.
157 Complex constraints are all the constraints involving dereferences
158 and offsets (including offsetted copies).
159
160 3. All direct constraints of the form P = &Q are processed, such
161 that Q is added to Sol(P)
162
163 4. All complex constraints for a given constraint variable are stored in a
164 linked list attached to that variable's node.
165
166 5. A directed graph is built out of the copy constraints. Each
167 constraint variable is a node in the graph, and an edge from
168 Q to P is added for each copy constraint of the form P = Q
169
170 6. The graph is then walked, and solution sets are
171 propagated along the copy edges, such that an edge from Q to P
172 causes Sol(P) <- Sol(P) union Sol(Q).
173
174 7. As we visit each node, all complex constraints associated with
175 that node are processed by adding appropriate copy edges to the graph, or the
176 appropriate variables to the solution set.
177
178 8. The process of walking the graph is iterated until no solution
179 sets change.
180
181 Prior to walking the graph in steps 6 and 7, We perform static
182 cycle elimination on the constraint graph, as well
183 as off-line variable substitution.
184
185 TODO: Adding offsets to pointer-to-structures can be handled (IE not punted
186 on and turned into anything), but isn't. You can just see what offset
187 inside the pointed-to struct it's going to access.
188
189 TODO: Constant bounded arrays can be handled as if they were structs of the
190 same number of elements.
191
192 TODO: Modeling heap and incoming pointers becomes much better if we
193 add fields to them as we discover them, which we could do.
194
195 TODO: We could handle unions, but to be honest, it's probably not
196 worth the pain or slowdown. */
197
198 /* IPA-PTA optimizations possible.
199
200 When the indirect function called is ANYTHING we can add disambiguation
201 based on the function signatures (or simply the parameter count which
202 is the varinfo size). We also do not need to consider functions that
203 do not have their address taken.
204
205 The is_global_var bit which marks escape points is overly conservative
206 in IPA mode. Split it to is_escape_point and is_global_var - only
207 externally visible globals are escape points in IPA mode. This is
208 also needed to fix the pt_solution_includes_global predicate
209 (and thus ptr_deref_may_alias_global_p).
210
211 The way we introduce DECL_PT_UID to avoid fixing up all points-to
212 sets in the translation unit when we copy a DECL during inlining
213 pessimizes precision. The advantage is that the DECL_PT_UID keeps
214 compile-time and memory usage overhead low - the points-to sets
215 do not grow or get unshared as they would during a fixup phase.
216 An alternative solution is to delay IPA PTA until after all
217 inlining transformations have been applied.
218
219 The way we propagate clobber/use information isn't optimized.
220 It should use a new complex constraint that properly filters
221 out local variables of the callee (though that would make
222 the sets invalid after inlining). OTOH we might as well
223 admit defeat to WHOPR and simply do all the clobber/use analysis
224 and propagation after PTA finished but before we threw away
225 points-to information for memory variables. WHOPR and PTA
226 do not play along well anyway - the whole constraint solving
227 would need to be done in WPA phase and it will be very interesting
228 to apply the results to local SSA names during LTRANS phase.
229
230 We probably should compute a per-function unit-ESCAPE solution
231 propagating it simply like the clobber / uses solutions. The
232 solution can go alongside the non-IPA espaced solution and be
233 used to query which vars escape the unit through a function.
234
235 We never put function decls in points-to sets so we do not
236 keep the set of called functions for indirect calls.
237
238 And probably more. */
239
240 static bool use_field_sensitive = true;
241 static int in_ipa_mode = 0;
242
243 /* Used for predecessor bitmaps. */
244 static bitmap_obstack predbitmap_obstack;
245
246 /* Used for points-to sets. */
247 static bitmap_obstack pta_obstack;
248
249 /* Used for oldsolution members of variables. */
250 static bitmap_obstack oldpta_obstack;
251
252 /* Used for per-solver-iteration bitmaps. */
253 static bitmap_obstack iteration_obstack;
254
255 static unsigned int create_variable_info_for (tree, const char *);
256 typedef struct constraint_graph *constraint_graph_t;
257 static void unify_nodes (constraint_graph_t, unsigned int, unsigned int, bool);
258
259 struct constraint;
260 typedef struct constraint *constraint_t;
261
262
263 #define EXECUTE_IF_IN_NONNULL_BITMAP(a, b, c, d) \
264 if (a) \
265 EXECUTE_IF_SET_IN_BITMAP (a, b, c, d)
266
267 static struct constraint_stats
268 {
269 unsigned int total_vars;
270 unsigned int nonpointer_vars;
271 unsigned int unified_vars_static;
272 unsigned int unified_vars_dynamic;
273 unsigned int iterations;
274 unsigned int num_edges;
275 unsigned int num_implicit_edges;
276 unsigned int points_to_sets_created;
277 } stats;
278
279 struct variable_info
280 {
281 /* ID of this variable */
282 unsigned int id;
283
284 /* True if this is a variable created by the constraint analysis, such as
285 heap variables and constraints we had to break up. */
286 unsigned int is_artificial_var : 1;
287
288 /* True if this is a special variable whose solution set should not be
289 changed. */
290 unsigned int is_special_var : 1;
291
292 /* True for variables whose size is not known or variable. */
293 unsigned int is_unknown_size_var : 1;
294
295 /* True for (sub-)fields that represent a whole variable. */
296 unsigned int is_full_var : 1;
297
298 /* True if this is a heap variable. */
299 unsigned int is_heap_var : 1;
300
301 /* True if this field may contain pointers. */
302 unsigned int may_have_pointers : 1;
303
304 /* True if this field has only restrict qualified pointers. */
305 unsigned int only_restrict_pointers : 1;
306
307 /* True if this represents a heap var created for a restrict qualified
308 pointer. */
309 unsigned int is_restrict_var : 1;
310
311 /* True if this represents a global variable. */
312 unsigned int is_global_var : 1;
313
314 /* True if this represents a IPA function info. */
315 unsigned int is_fn_info : 1;
316
317 /* ??? Store somewhere better. */
318 unsigned short ruid;
319
320 /* The ID of the variable for the next field in this structure
321 or zero for the last field in this structure. */
322 unsigned next;
323
324 /* The ID of the variable for the first field in this structure. */
325 unsigned head;
326
327 /* Offset of this variable, in bits, from the base variable */
328 unsigned HOST_WIDE_INT offset;
329
330 /* Size of the variable, in bits. */
331 unsigned HOST_WIDE_INT size;
332
333 /* Full size of the base variable, in bits. */
334 unsigned HOST_WIDE_INT fullsize;
335
336 /* Name of this variable */
337 const char *name;
338
339 /* Tree that this variable is associated with. */
340 tree decl;
341
342 /* Points-to set for this variable. */
343 bitmap solution;
344
345 /* Old points-to set for this variable. */
346 bitmap oldsolution;
347 };
348 typedef struct variable_info *varinfo_t;
349
350 static varinfo_t first_vi_for_offset (varinfo_t, unsigned HOST_WIDE_INT);
351 static varinfo_t first_or_preceding_vi_for_offset (varinfo_t,
352 unsigned HOST_WIDE_INT);
353 static varinfo_t lookup_vi_for_tree (tree);
354 static inline bool type_can_have_subvars (const_tree);
355
356 /* Pool of variable info structures. */
357 static alloc_pool variable_info_pool;
358
359 /* Map varinfo to final pt_solution. */
360 static hash_map<varinfo_t, pt_solution *> *final_solutions;
361 struct obstack final_solutions_obstack;
362
363 /* Table of variable info structures for constraint variables.
364 Indexed directly by variable info id. */
365 static vec<varinfo_t> varmap;
366
367 /* Return the varmap element N */
368
369 static inline varinfo_t
370 get_varinfo (unsigned int n)
371 {
372 return varmap[n];
373 }
374
375 /* Return the next variable in the list of sub-variables of VI
376 or NULL if VI is the last sub-variable. */
377
378 static inline varinfo_t
379 vi_next (varinfo_t vi)
380 {
381 return get_varinfo (vi->next);
382 }
383
384 /* Static IDs for the special variables. Variable ID zero is unused
385 and used as terminator for the sub-variable chain. */
386 enum { nothing_id = 1, anything_id = 2, string_id = 3,
387 escaped_id = 4, nonlocal_id = 5,
388 storedanything_id = 6, integer_id = 7 };
389
390 /* Return a new variable info structure consisting for a variable
391 named NAME, and using constraint graph node NODE. Append it
392 to the vector of variable info structures. */
393
394 static varinfo_t
395 new_var_info (tree t, const char *name)
396 {
397 unsigned index = varmap.length ();
398 varinfo_t ret = (varinfo_t) pool_alloc (variable_info_pool);
399
400 ret->id = index;
401 ret->name = name;
402 ret->decl = t;
403 /* Vars without decl are artificial and do not have sub-variables. */
404 ret->is_artificial_var = (t == NULL_TREE);
405 ret->is_special_var = false;
406 ret->is_unknown_size_var = false;
407 ret->is_full_var = (t == NULL_TREE);
408 ret->is_heap_var = false;
409 ret->may_have_pointers = true;
410 ret->only_restrict_pointers = false;
411 ret->is_restrict_var = false;
412 ret->is_global_var = (t == NULL_TREE);
413 ret->is_fn_info = false;
414 if (t && DECL_P (t))
415 ret->is_global_var = (is_global_var (t)
416 /* We have to treat even local register variables
417 as escape points. */
418 || (TREE_CODE (t) == VAR_DECL
419 && DECL_HARD_REGISTER (t)));
420 ret->solution = BITMAP_ALLOC (&pta_obstack);
421 ret->oldsolution = NULL;
422 ret->next = 0;
423 ret->head = ret->id;
424
425 stats.total_vars++;
426
427 varmap.safe_push (ret);
428
429 return ret;
430 }
431
432
433 /* A map mapping call statements to per-stmt variables for uses
434 and clobbers specific to the call. */
435 static hash_map<gimple, varinfo_t> *call_stmt_vars;
436
437 /* Lookup or create the variable for the call statement CALL. */
438
439 static varinfo_t
440 get_call_vi (gcall *call)
441 {
442 varinfo_t vi, vi2;
443
444 bool existed;
445 varinfo_t *slot_p = &call_stmt_vars->get_or_insert (call, &existed);
446 if (existed)
447 return *slot_p;
448
449 vi = new_var_info (NULL_TREE, "CALLUSED");
450 vi->offset = 0;
451 vi->size = 1;
452 vi->fullsize = 2;
453 vi->is_full_var = true;
454
455 vi2 = new_var_info (NULL_TREE, "CALLCLOBBERED");
456 vi2->offset = 1;
457 vi2->size = 1;
458 vi2->fullsize = 2;
459 vi2->is_full_var = true;
460
461 vi->next = vi2->id;
462
463 *slot_p = vi;
464 return vi;
465 }
466
467 /* Lookup the variable for the call statement CALL representing
468 the uses. Returns NULL if there is nothing special about this call. */
469
470 static varinfo_t
471 lookup_call_use_vi (gcall *call)
472 {
473 varinfo_t *slot_p = call_stmt_vars->get (call);
474 if (slot_p)
475 return *slot_p;
476
477 return NULL;
478 }
479
480 /* Lookup the variable for the call statement CALL representing
481 the clobbers. Returns NULL if there is nothing special about this call. */
482
483 static varinfo_t
484 lookup_call_clobber_vi (gcall *call)
485 {
486 varinfo_t uses = lookup_call_use_vi (call);
487 if (!uses)
488 return NULL;
489
490 return vi_next (uses);
491 }
492
493 /* Lookup or create the variable for the call statement CALL representing
494 the uses. */
495
496 static varinfo_t
497 get_call_use_vi (gcall *call)
498 {
499 return get_call_vi (call);
500 }
501
502 /* Lookup or create the variable for the call statement CALL representing
503 the clobbers. */
504
505 static varinfo_t ATTRIBUTE_UNUSED
506 get_call_clobber_vi (gcall *call)
507 {
508 return vi_next (get_call_vi (call));
509 }
510
511
512 typedef enum {SCALAR, DEREF, ADDRESSOF} constraint_expr_type;
513
514 /* An expression that appears in a constraint. */
515
516 struct constraint_expr
517 {
518 /* Constraint type. */
519 constraint_expr_type type;
520
521 /* Variable we are referring to in the constraint. */
522 unsigned int var;
523
524 /* Offset, in bits, of this constraint from the beginning of
525 variables it ends up referring to.
526
527 IOW, in a deref constraint, we would deref, get the result set,
528 then add OFFSET to each member. */
529 HOST_WIDE_INT offset;
530 };
531
532 /* Use 0x8000... as special unknown offset. */
533 #define UNKNOWN_OFFSET HOST_WIDE_INT_MIN
534
535 typedef struct constraint_expr ce_s;
536 static void get_constraint_for_1 (tree, vec<ce_s> *, bool, bool);
537 static void get_constraint_for (tree, vec<ce_s> *);
538 static void get_constraint_for_rhs (tree, vec<ce_s> *);
539 static void do_deref (vec<ce_s> *);
540
541 /* Our set constraints are made up of two constraint expressions, one
542 LHS, and one RHS.
543
544 As described in the introduction, our set constraints each represent an
545 operation between set valued variables.
546 */
547 struct constraint
548 {
549 struct constraint_expr lhs;
550 struct constraint_expr rhs;
551 };
552
553 /* List of constraints that we use to build the constraint graph from. */
554
555 static vec<constraint_t> constraints;
556 static alloc_pool constraint_pool;
557
558 /* The constraint graph is represented as an array of bitmaps
559 containing successor nodes. */
560
561 struct constraint_graph
562 {
563 /* Size of this graph, which may be different than the number of
564 nodes in the variable map. */
565 unsigned int size;
566
567 /* Explicit successors of each node. */
568 bitmap *succs;
569
570 /* Implicit predecessors of each node (Used for variable
571 substitution). */
572 bitmap *implicit_preds;
573
574 /* Explicit predecessors of each node (Used for variable substitution). */
575 bitmap *preds;
576
577 /* Indirect cycle representatives, or -1 if the node has no indirect
578 cycles. */
579 int *indirect_cycles;
580
581 /* Representative node for a node. rep[a] == a unless the node has
582 been unified. */
583 unsigned int *rep;
584
585 /* Equivalence class representative for a label. This is used for
586 variable substitution. */
587 int *eq_rep;
588
589 /* Pointer equivalence label for a node. All nodes with the same
590 pointer equivalence label can be unified together at some point
591 (either during constraint optimization or after the constraint
592 graph is built). */
593 unsigned int *pe;
594
595 /* Pointer equivalence representative for a label. This is used to
596 handle nodes that are pointer equivalent but not location
597 equivalent. We can unite these once the addressof constraints
598 are transformed into initial points-to sets. */
599 int *pe_rep;
600
601 /* Pointer equivalence label for each node, used during variable
602 substitution. */
603 unsigned int *pointer_label;
604
605 /* Location equivalence label for each node, used during location
606 equivalence finding. */
607 unsigned int *loc_label;
608
609 /* Pointed-by set for each node, used during location equivalence
610 finding. This is pointed-by rather than pointed-to, because it
611 is constructed using the predecessor graph. */
612 bitmap *pointed_by;
613
614 /* Points to sets for pointer equivalence. This is *not* the actual
615 points-to sets for nodes. */
616 bitmap *points_to;
617
618 /* Bitmap of nodes where the bit is set if the node is a direct
619 node. Used for variable substitution. */
620 sbitmap direct_nodes;
621
622 /* Bitmap of nodes where the bit is set if the node is address
623 taken. Used for variable substitution. */
624 bitmap address_taken;
625
626 /* Vector of complex constraints for each graph node. Complex
627 constraints are those involving dereferences or offsets that are
628 not 0. */
629 vec<constraint_t> *complex;
630 };
631
632 static constraint_graph_t graph;
633
634 /* During variable substitution and the offline version of indirect
635 cycle finding, we create nodes to represent dereferences and
636 address taken constraints. These represent where these start and
637 end. */
638 #define FIRST_REF_NODE (varmap).length ()
639 #define LAST_REF_NODE (FIRST_REF_NODE + (FIRST_REF_NODE - 1))
640
641 /* Return the representative node for NODE, if NODE has been unioned
642 with another NODE.
643 This function performs path compression along the way to finding
644 the representative. */
645
646 static unsigned int
647 find (unsigned int node)
648 {
649 gcc_checking_assert (node < graph->size);
650 if (graph->rep[node] != node)
651 return graph->rep[node] = find (graph->rep[node]);
652 return node;
653 }
654
655 /* Union the TO and FROM nodes to the TO nodes.
656 Note that at some point in the future, we may want to do
657 union-by-rank, in which case we are going to have to return the
658 node we unified to. */
659
660 static bool
661 unite (unsigned int to, unsigned int from)
662 {
663 gcc_checking_assert (to < graph->size && from < graph->size);
664 if (to != from && graph->rep[from] != to)
665 {
666 graph->rep[from] = to;
667 return true;
668 }
669 return false;
670 }
671
672 /* Create a new constraint consisting of LHS and RHS expressions. */
673
674 static constraint_t
675 new_constraint (const struct constraint_expr lhs,
676 const struct constraint_expr rhs)
677 {
678 constraint_t ret = (constraint_t) pool_alloc (constraint_pool);
679 ret->lhs = lhs;
680 ret->rhs = rhs;
681 return ret;
682 }
683
684 /* Print out constraint C to FILE. */
685
686 static void
687 dump_constraint (FILE *file, constraint_t c)
688 {
689 if (c->lhs.type == ADDRESSOF)
690 fprintf (file, "&");
691 else if (c->lhs.type == DEREF)
692 fprintf (file, "*");
693 fprintf (file, "%s", get_varinfo (c->lhs.var)->name);
694 if (c->lhs.offset == UNKNOWN_OFFSET)
695 fprintf (file, " + UNKNOWN");
696 else if (c->lhs.offset != 0)
697 fprintf (file, " + " HOST_WIDE_INT_PRINT_DEC, c->lhs.offset);
698 fprintf (file, " = ");
699 if (c->rhs.type == ADDRESSOF)
700 fprintf (file, "&");
701 else if (c->rhs.type == DEREF)
702 fprintf (file, "*");
703 fprintf (file, "%s", get_varinfo (c->rhs.var)->name);
704 if (c->rhs.offset == UNKNOWN_OFFSET)
705 fprintf (file, " + UNKNOWN");
706 else if (c->rhs.offset != 0)
707 fprintf (file, " + " HOST_WIDE_INT_PRINT_DEC, c->rhs.offset);
708 }
709
710
711 void debug_constraint (constraint_t);
712 void debug_constraints (void);
713 void debug_constraint_graph (void);
714 void debug_solution_for_var (unsigned int);
715 void debug_sa_points_to_info (void);
716
717 /* Print out constraint C to stderr. */
718
719 DEBUG_FUNCTION void
720 debug_constraint (constraint_t c)
721 {
722 dump_constraint (stderr, c);
723 fprintf (stderr, "\n");
724 }
725
726 /* Print out all constraints to FILE */
727
728 static void
729 dump_constraints (FILE *file, int from)
730 {
731 int i;
732 constraint_t c;
733 for (i = from; constraints.iterate (i, &c); i++)
734 if (c)
735 {
736 dump_constraint (file, c);
737 fprintf (file, "\n");
738 }
739 }
740
741 /* Print out all constraints to stderr. */
742
743 DEBUG_FUNCTION void
744 debug_constraints (void)
745 {
746 dump_constraints (stderr, 0);
747 }
748
749 /* Print the constraint graph in dot format. */
750
751 static void
752 dump_constraint_graph (FILE *file)
753 {
754 unsigned int i;
755
756 /* Only print the graph if it has already been initialized: */
757 if (!graph)
758 return;
759
760 /* Prints the header of the dot file: */
761 fprintf (file, "strict digraph {\n");
762 fprintf (file, " node [\n shape = box\n ]\n");
763 fprintf (file, " edge [\n fontsize = \"12\"\n ]\n");
764 fprintf (file, "\n // List of nodes and complex constraints in "
765 "the constraint graph:\n");
766
767 /* The next lines print the nodes in the graph together with the
768 complex constraints attached to them. */
769 for (i = 1; i < graph->size; i++)
770 {
771 if (i == FIRST_REF_NODE)
772 continue;
773 if (find (i) != i)
774 continue;
775 if (i < FIRST_REF_NODE)
776 fprintf (file, "\"%s\"", get_varinfo (i)->name);
777 else
778 fprintf (file, "\"*%s\"", get_varinfo (i - FIRST_REF_NODE)->name);
779 if (graph->complex[i].exists ())
780 {
781 unsigned j;
782 constraint_t c;
783 fprintf (file, " [label=\"\\N\\n");
784 for (j = 0; graph->complex[i].iterate (j, &c); ++j)
785 {
786 dump_constraint (file, c);
787 fprintf (file, "\\l");
788 }
789 fprintf (file, "\"]");
790 }
791 fprintf (file, ";\n");
792 }
793
794 /* Go over the edges. */
795 fprintf (file, "\n // Edges in the constraint graph:\n");
796 for (i = 1; i < graph->size; i++)
797 {
798 unsigned j;
799 bitmap_iterator bi;
800 if (find (i) != i)
801 continue;
802 EXECUTE_IF_IN_NONNULL_BITMAP (graph->succs[i], 0, j, bi)
803 {
804 unsigned to = find (j);
805 if (i == to)
806 continue;
807 if (i < FIRST_REF_NODE)
808 fprintf (file, "\"%s\"", get_varinfo (i)->name);
809 else
810 fprintf (file, "\"*%s\"", get_varinfo (i - FIRST_REF_NODE)->name);
811 fprintf (file, " -> ");
812 if (to < FIRST_REF_NODE)
813 fprintf (file, "\"%s\"", get_varinfo (to)->name);
814 else
815 fprintf (file, "\"*%s\"", get_varinfo (to - FIRST_REF_NODE)->name);
816 fprintf (file, ";\n");
817 }
818 }
819
820 /* Prints the tail of the dot file. */
821 fprintf (file, "}\n");
822 }
823
824 /* Print out the constraint graph to stderr. */
825
826 DEBUG_FUNCTION void
827 debug_constraint_graph (void)
828 {
829 dump_constraint_graph (stderr);
830 }
831
832 /* SOLVER FUNCTIONS
833
834 The solver is a simple worklist solver, that works on the following
835 algorithm:
836
837 sbitmap changed_nodes = all zeroes;
838 changed_count = 0;
839 For each node that is not already collapsed:
840 changed_count++;
841 set bit in changed nodes
842
843 while (changed_count > 0)
844 {
845 compute topological ordering for constraint graph
846
847 find and collapse cycles in the constraint graph (updating
848 changed if necessary)
849
850 for each node (n) in the graph in topological order:
851 changed_count--;
852
853 Process each complex constraint associated with the node,
854 updating changed if necessary.
855
856 For each outgoing edge from n, propagate the solution from n to
857 the destination of the edge, updating changed as necessary.
858
859 } */
860
861 /* Return true if two constraint expressions A and B are equal. */
862
863 static bool
864 constraint_expr_equal (struct constraint_expr a, struct constraint_expr b)
865 {
866 return a.type == b.type && a.var == b.var && a.offset == b.offset;
867 }
868
869 /* Return true if constraint expression A is less than constraint expression
870 B. This is just arbitrary, but consistent, in order to give them an
871 ordering. */
872
873 static bool
874 constraint_expr_less (struct constraint_expr a, struct constraint_expr b)
875 {
876 if (a.type == b.type)
877 {
878 if (a.var == b.var)
879 return a.offset < b.offset;
880 else
881 return a.var < b.var;
882 }
883 else
884 return a.type < b.type;
885 }
886
887 /* Return true if constraint A is less than constraint B. This is just
888 arbitrary, but consistent, in order to give them an ordering. */
889
890 static bool
891 constraint_less (const constraint_t &a, const constraint_t &b)
892 {
893 if (constraint_expr_less (a->lhs, b->lhs))
894 return true;
895 else if (constraint_expr_less (b->lhs, a->lhs))
896 return false;
897 else
898 return constraint_expr_less (a->rhs, b->rhs);
899 }
900
901 /* Return true if two constraints A and B are equal. */
902
903 static bool
904 constraint_equal (struct constraint a, struct constraint b)
905 {
906 return constraint_expr_equal (a.lhs, b.lhs)
907 && constraint_expr_equal (a.rhs, b.rhs);
908 }
909
910
911 /* Find a constraint LOOKFOR in the sorted constraint vector VEC */
912
913 static constraint_t
914 constraint_vec_find (vec<constraint_t> vec,
915 struct constraint lookfor)
916 {
917 unsigned int place;
918 constraint_t found;
919
920 if (!vec.exists ())
921 return NULL;
922
923 place = vec.lower_bound (&lookfor, constraint_less);
924 if (place >= vec.length ())
925 return NULL;
926 found = vec[place];
927 if (!constraint_equal (*found, lookfor))
928 return NULL;
929 return found;
930 }
931
932 /* Union two constraint vectors, TO and FROM. Put the result in TO.
933 Returns true of TO set is changed. */
934
935 static bool
936 constraint_set_union (vec<constraint_t> *to,
937 vec<constraint_t> *from)
938 {
939 int i;
940 constraint_t c;
941 bool any_change = false;
942
943 FOR_EACH_VEC_ELT (*from, i, c)
944 {
945 if (constraint_vec_find (*to, *c) == NULL)
946 {
947 unsigned int place = to->lower_bound (c, constraint_less);
948 to->safe_insert (place, c);
949 any_change = true;
950 }
951 }
952 return any_change;
953 }
954
955 /* Expands the solution in SET to all sub-fields of variables included. */
956
957 static bitmap
958 solution_set_expand (bitmap set, bitmap *expanded)
959 {
960 bitmap_iterator bi;
961 unsigned j;
962
963 if (*expanded)
964 return *expanded;
965
966 *expanded = BITMAP_ALLOC (&iteration_obstack);
967
968 /* In a first pass expand to the head of the variables we need to
969 add all sub-fields off. This avoids quadratic behavior. */
970 EXECUTE_IF_SET_IN_BITMAP (set, 0, j, bi)
971 {
972 varinfo_t v = get_varinfo (j);
973 if (v->is_artificial_var
974 || v->is_full_var)
975 continue;
976 bitmap_set_bit (*expanded, v->head);
977 }
978
979 /* In the second pass now expand all head variables with subfields. */
980 EXECUTE_IF_SET_IN_BITMAP (*expanded, 0, j, bi)
981 {
982 varinfo_t v = get_varinfo (j);
983 if (v->head != j)
984 continue;
985 for (v = vi_next (v); v != NULL; v = vi_next (v))
986 bitmap_set_bit (*expanded, v->id);
987 }
988
989 /* And finally set the rest of the bits from SET. */
990 bitmap_ior_into (*expanded, set);
991
992 return *expanded;
993 }
994
995 /* Union solution sets TO and DELTA, and add INC to each member of DELTA in the
996 process. */
997
998 static bool
999 set_union_with_increment (bitmap to, bitmap delta, HOST_WIDE_INT inc,
1000 bitmap *expanded_delta)
1001 {
1002 bool changed = false;
1003 bitmap_iterator bi;
1004 unsigned int i;
1005
1006 /* If the solution of DELTA contains anything it is good enough to transfer
1007 this to TO. */
1008 if (bitmap_bit_p (delta, anything_id))
1009 return bitmap_set_bit (to, anything_id);
1010
1011 /* If the offset is unknown we have to expand the solution to
1012 all subfields. */
1013 if (inc == UNKNOWN_OFFSET)
1014 {
1015 delta = solution_set_expand (delta, expanded_delta);
1016 changed |= bitmap_ior_into (to, delta);
1017 return changed;
1018 }
1019
1020 /* For non-zero offset union the offsetted solution into the destination. */
1021 EXECUTE_IF_SET_IN_BITMAP (delta, 0, i, bi)
1022 {
1023 varinfo_t vi = get_varinfo (i);
1024
1025 /* If this is a variable with just one field just set its bit
1026 in the result. */
1027 if (vi->is_artificial_var
1028 || vi->is_unknown_size_var
1029 || vi->is_full_var)
1030 changed |= bitmap_set_bit (to, i);
1031 else
1032 {
1033 HOST_WIDE_INT fieldoffset = vi->offset + inc;
1034 unsigned HOST_WIDE_INT size = vi->size;
1035
1036 /* If the offset makes the pointer point to before the
1037 variable use offset zero for the field lookup. */
1038 if (fieldoffset < 0)
1039 vi = get_varinfo (vi->head);
1040 else
1041 vi = first_or_preceding_vi_for_offset (vi, fieldoffset);
1042
1043 do
1044 {
1045 changed |= bitmap_set_bit (to, vi->id);
1046 if (vi->is_full_var
1047 || vi->next == 0)
1048 break;
1049
1050 /* We have to include all fields that overlap the current field
1051 shifted by inc. */
1052 vi = vi_next (vi);
1053 }
1054 while (vi->offset < fieldoffset + size);
1055 }
1056 }
1057
1058 return changed;
1059 }
1060
1061 /* Insert constraint C into the list of complex constraints for graph
1062 node VAR. */
1063
1064 static void
1065 insert_into_complex (constraint_graph_t graph,
1066 unsigned int var, constraint_t c)
1067 {
1068 vec<constraint_t> complex = graph->complex[var];
1069 unsigned int place = complex.lower_bound (c, constraint_less);
1070
1071 /* Only insert constraints that do not already exist. */
1072 if (place >= complex.length ()
1073 || !constraint_equal (*c, *complex[place]))
1074 graph->complex[var].safe_insert (place, c);
1075 }
1076
1077
1078 /* Condense two variable nodes into a single variable node, by moving
1079 all associated info from FROM to TO. Returns true if TO node's
1080 constraint set changes after the merge. */
1081
1082 static bool
1083 merge_node_constraints (constraint_graph_t graph, unsigned int to,
1084 unsigned int from)
1085 {
1086 unsigned int i;
1087 constraint_t c;
1088 bool any_change = false;
1089
1090 gcc_checking_assert (find (from) == to);
1091
1092 /* Move all complex constraints from src node into to node */
1093 FOR_EACH_VEC_ELT (graph->complex[from], i, c)
1094 {
1095 /* In complex constraints for node FROM, we may have either
1096 a = *FROM, and *FROM = a, or an offseted constraint which are
1097 always added to the rhs node's constraints. */
1098
1099 if (c->rhs.type == DEREF)
1100 c->rhs.var = to;
1101 else if (c->lhs.type == DEREF)
1102 c->lhs.var = to;
1103 else
1104 c->rhs.var = to;
1105
1106 }
1107 any_change = constraint_set_union (&graph->complex[to],
1108 &graph->complex[from]);
1109 graph->complex[from].release ();
1110 return any_change;
1111 }
1112
1113
1114 /* Remove edges involving NODE from GRAPH. */
1115
1116 static void
1117 clear_edges_for_node (constraint_graph_t graph, unsigned int node)
1118 {
1119 if (graph->succs[node])
1120 BITMAP_FREE (graph->succs[node]);
1121 }
1122
1123 /* Merge GRAPH nodes FROM and TO into node TO. */
1124
1125 static void
1126 merge_graph_nodes (constraint_graph_t graph, unsigned int to,
1127 unsigned int from)
1128 {
1129 if (graph->indirect_cycles[from] != -1)
1130 {
1131 /* If we have indirect cycles with the from node, and we have
1132 none on the to node, the to node has indirect cycles from the
1133 from node now that they are unified.
1134 If indirect cycles exist on both, unify the nodes that they
1135 are in a cycle with, since we know they are in a cycle with
1136 each other. */
1137 if (graph->indirect_cycles[to] == -1)
1138 graph->indirect_cycles[to] = graph->indirect_cycles[from];
1139 }
1140
1141 /* Merge all the successor edges. */
1142 if (graph->succs[from])
1143 {
1144 if (!graph->succs[to])
1145 graph->succs[to] = BITMAP_ALLOC (&pta_obstack);
1146 bitmap_ior_into (graph->succs[to],
1147 graph->succs[from]);
1148 }
1149
1150 clear_edges_for_node (graph, from);
1151 }
1152
1153
1154 /* Add an indirect graph edge to GRAPH, going from TO to FROM if
1155 it doesn't exist in the graph already. */
1156
1157 static void
1158 add_implicit_graph_edge (constraint_graph_t graph, unsigned int to,
1159 unsigned int from)
1160 {
1161 if (to == from)
1162 return;
1163
1164 if (!graph->implicit_preds[to])
1165 graph->implicit_preds[to] = BITMAP_ALLOC (&predbitmap_obstack);
1166
1167 if (bitmap_set_bit (graph->implicit_preds[to], from))
1168 stats.num_implicit_edges++;
1169 }
1170
1171 /* Add a predecessor graph edge to GRAPH, going from TO to FROM if
1172 it doesn't exist in the graph already.
1173 Return false if the edge already existed, true otherwise. */
1174
1175 static void
1176 add_pred_graph_edge (constraint_graph_t graph, unsigned int to,
1177 unsigned int from)
1178 {
1179 if (!graph->preds[to])
1180 graph->preds[to] = BITMAP_ALLOC (&predbitmap_obstack);
1181 bitmap_set_bit (graph->preds[to], from);
1182 }
1183
1184 /* Add a graph edge to GRAPH, going from FROM to TO if
1185 it doesn't exist in the graph already.
1186 Return false if the edge already existed, true otherwise. */
1187
1188 static bool
1189 add_graph_edge (constraint_graph_t graph, unsigned int to,
1190 unsigned int from)
1191 {
1192 if (to == from)
1193 {
1194 return false;
1195 }
1196 else
1197 {
1198 bool r = false;
1199
1200 if (!graph->succs[from])
1201 graph->succs[from] = BITMAP_ALLOC (&pta_obstack);
1202 if (bitmap_set_bit (graph->succs[from], to))
1203 {
1204 r = true;
1205 if (to < FIRST_REF_NODE && from < FIRST_REF_NODE)
1206 stats.num_edges++;
1207 }
1208 return r;
1209 }
1210 }
1211
1212
1213 /* Initialize the constraint graph structure to contain SIZE nodes. */
1214
1215 static void
1216 init_graph (unsigned int size)
1217 {
1218 unsigned int j;
1219
1220 graph = XCNEW (struct constraint_graph);
1221 graph->size = size;
1222 graph->succs = XCNEWVEC (bitmap, graph->size);
1223 graph->indirect_cycles = XNEWVEC (int, graph->size);
1224 graph->rep = XNEWVEC (unsigned int, graph->size);
1225 /* ??? Macros do not support template types with multiple arguments,
1226 so we use a typedef to work around it. */
1227 typedef vec<constraint_t> vec_constraint_t_heap;
1228 graph->complex = XCNEWVEC (vec_constraint_t_heap, size);
1229 graph->pe = XCNEWVEC (unsigned int, graph->size);
1230 graph->pe_rep = XNEWVEC (int, graph->size);
1231
1232 for (j = 0; j < graph->size; j++)
1233 {
1234 graph->rep[j] = j;
1235 graph->pe_rep[j] = -1;
1236 graph->indirect_cycles[j] = -1;
1237 }
1238 }
1239
1240 /* Build the constraint graph, adding only predecessor edges right now. */
1241
1242 static void
1243 build_pred_graph (void)
1244 {
1245 int i;
1246 constraint_t c;
1247 unsigned int j;
1248
1249 graph->implicit_preds = XCNEWVEC (bitmap, graph->size);
1250 graph->preds = XCNEWVEC (bitmap, graph->size);
1251 graph->pointer_label = XCNEWVEC (unsigned int, graph->size);
1252 graph->loc_label = XCNEWVEC (unsigned int, graph->size);
1253 graph->pointed_by = XCNEWVEC (bitmap, graph->size);
1254 graph->points_to = XCNEWVEC (bitmap, graph->size);
1255 graph->eq_rep = XNEWVEC (int, graph->size);
1256 graph->direct_nodes = sbitmap_alloc (graph->size);
1257 graph->address_taken = BITMAP_ALLOC (&predbitmap_obstack);
1258 bitmap_clear (graph->direct_nodes);
1259
1260 for (j = 1; j < FIRST_REF_NODE; j++)
1261 {
1262 if (!get_varinfo (j)->is_special_var)
1263 bitmap_set_bit (graph->direct_nodes, j);
1264 }
1265
1266 for (j = 0; j < graph->size; j++)
1267 graph->eq_rep[j] = -1;
1268
1269 for (j = 0; j < varmap.length (); j++)
1270 graph->indirect_cycles[j] = -1;
1271
1272 FOR_EACH_VEC_ELT (constraints, i, c)
1273 {
1274 struct constraint_expr lhs = c->lhs;
1275 struct constraint_expr rhs = c->rhs;
1276 unsigned int lhsvar = lhs.var;
1277 unsigned int rhsvar = rhs.var;
1278
1279 if (lhs.type == DEREF)
1280 {
1281 /* *x = y. */
1282 if (rhs.offset == 0 && lhs.offset == 0 && rhs.type == SCALAR)
1283 add_pred_graph_edge (graph, FIRST_REF_NODE + lhsvar, rhsvar);
1284 }
1285 else if (rhs.type == DEREF)
1286 {
1287 /* x = *y */
1288 if (rhs.offset == 0 && lhs.offset == 0 && lhs.type == SCALAR)
1289 add_pred_graph_edge (graph, lhsvar, FIRST_REF_NODE + rhsvar);
1290 else
1291 bitmap_clear_bit (graph->direct_nodes, lhsvar);
1292 }
1293 else if (rhs.type == ADDRESSOF)
1294 {
1295 varinfo_t v;
1296
1297 /* x = &y */
1298 if (graph->points_to[lhsvar] == NULL)
1299 graph->points_to[lhsvar] = BITMAP_ALLOC (&predbitmap_obstack);
1300 bitmap_set_bit (graph->points_to[lhsvar], rhsvar);
1301
1302 if (graph->pointed_by[rhsvar] == NULL)
1303 graph->pointed_by[rhsvar] = BITMAP_ALLOC (&predbitmap_obstack);
1304 bitmap_set_bit (graph->pointed_by[rhsvar], lhsvar);
1305
1306 /* Implicitly, *x = y */
1307 add_implicit_graph_edge (graph, FIRST_REF_NODE + lhsvar, rhsvar);
1308
1309 /* All related variables are no longer direct nodes. */
1310 bitmap_clear_bit (graph->direct_nodes, rhsvar);
1311 v = get_varinfo (rhsvar);
1312 if (!v->is_full_var)
1313 {
1314 v = get_varinfo (v->head);
1315 do
1316 {
1317 bitmap_clear_bit (graph->direct_nodes, v->id);
1318 v = vi_next (v);
1319 }
1320 while (v != NULL);
1321 }
1322 bitmap_set_bit (graph->address_taken, rhsvar);
1323 }
1324 else if (lhsvar > anything_id
1325 && lhsvar != rhsvar && lhs.offset == 0 && rhs.offset == 0)
1326 {
1327 /* x = y */
1328 add_pred_graph_edge (graph, lhsvar, rhsvar);
1329 /* Implicitly, *x = *y */
1330 add_implicit_graph_edge (graph, FIRST_REF_NODE + lhsvar,
1331 FIRST_REF_NODE + rhsvar);
1332 }
1333 else if (lhs.offset != 0 || rhs.offset != 0)
1334 {
1335 if (rhs.offset != 0)
1336 bitmap_clear_bit (graph->direct_nodes, lhs.var);
1337 else if (lhs.offset != 0)
1338 bitmap_clear_bit (graph->direct_nodes, rhs.var);
1339 }
1340 }
1341 }
1342
1343 /* Build the constraint graph, adding successor edges. */
1344
1345 static void
1346 build_succ_graph (void)
1347 {
1348 unsigned i, t;
1349 constraint_t c;
1350
1351 FOR_EACH_VEC_ELT (constraints, i, c)
1352 {
1353 struct constraint_expr lhs;
1354 struct constraint_expr rhs;
1355 unsigned int lhsvar;
1356 unsigned int rhsvar;
1357
1358 if (!c)
1359 continue;
1360
1361 lhs = c->lhs;
1362 rhs = c->rhs;
1363 lhsvar = find (lhs.var);
1364 rhsvar = find (rhs.var);
1365
1366 if (lhs.type == DEREF)
1367 {
1368 if (rhs.offset == 0 && lhs.offset == 0 && rhs.type == SCALAR)
1369 add_graph_edge (graph, FIRST_REF_NODE + lhsvar, rhsvar);
1370 }
1371 else if (rhs.type == DEREF)
1372 {
1373 if (rhs.offset == 0 && lhs.offset == 0 && lhs.type == SCALAR)
1374 add_graph_edge (graph, lhsvar, FIRST_REF_NODE + rhsvar);
1375 }
1376 else if (rhs.type == ADDRESSOF)
1377 {
1378 /* x = &y */
1379 gcc_checking_assert (find (rhs.var) == rhs.var);
1380 bitmap_set_bit (get_varinfo (lhsvar)->solution, rhsvar);
1381 }
1382 else if (lhsvar > anything_id
1383 && lhsvar != rhsvar && lhs.offset == 0 && rhs.offset == 0)
1384 {
1385 add_graph_edge (graph, lhsvar, rhsvar);
1386 }
1387 }
1388
1389 /* Add edges from STOREDANYTHING to all non-direct nodes that can
1390 receive pointers. */
1391 t = find (storedanything_id);
1392 for (i = integer_id + 1; i < FIRST_REF_NODE; ++i)
1393 {
1394 if (!bitmap_bit_p (graph->direct_nodes, i)
1395 && get_varinfo (i)->may_have_pointers)
1396 add_graph_edge (graph, find (i), t);
1397 }
1398
1399 /* Everything stored to ANYTHING also potentially escapes. */
1400 add_graph_edge (graph, find (escaped_id), t);
1401 }
1402
1403
1404 /* Changed variables on the last iteration. */
1405 static bitmap changed;
1406
1407 /* Strongly Connected Component visitation info. */
1408
1409 struct scc_info
1410 {
1411 sbitmap visited;
1412 sbitmap deleted;
1413 unsigned int *dfs;
1414 unsigned int *node_mapping;
1415 int current_index;
1416 vec<unsigned> scc_stack;
1417 };
1418
1419
1420 /* Recursive routine to find strongly connected components in GRAPH.
1421 SI is the SCC info to store the information in, and N is the id of current
1422 graph node we are processing.
1423
1424 This is Tarjan's strongly connected component finding algorithm, as
1425 modified by Nuutila to keep only non-root nodes on the stack.
1426 The algorithm can be found in "On finding the strongly connected
1427 connected components in a directed graph" by Esko Nuutila and Eljas
1428 Soisalon-Soininen, in Information Processing Letters volume 49,
1429 number 1, pages 9-14. */
1430
1431 static void
1432 scc_visit (constraint_graph_t graph, struct scc_info *si, unsigned int n)
1433 {
1434 unsigned int i;
1435 bitmap_iterator bi;
1436 unsigned int my_dfs;
1437
1438 bitmap_set_bit (si->visited, n);
1439 si->dfs[n] = si->current_index ++;
1440 my_dfs = si->dfs[n];
1441
1442 /* Visit all the successors. */
1443 EXECUTE_IF_IN_NONNULL_BITMAP (graph->succs[n], 0, i, bi)
1444 {
1445 unsigned int w;
1446
1447 if (i > LAST_REF_NODE)
1448 break;
1449
1450 w = find (i);
1451 if (bitmap_bit_p (si->deleted, w))
1452 continue;
1453
1454 if (!bitmap_bit_p (si->visited, w))
1455 scc_visit (graph, si, w);
1456
1457 unsigned int t = find (w);
1458 gcc_checking_assert (find (n) == n);
1459 if (si->dfs[t] < si->dfs[n])
1460 si->dfs[n] = si->dfs[t];
1461 }
1462
1463 /* See if any components have been identified. */
1464 if (si->dfs[n] == my_dfs)
1465 {
1466 if (si->scc_stack.length () > 0
1467 && si->dfs[si->scc_stack.last ()] >= my_dfs)
1468 {
1469 bitmap scc = BITMAP_ALLOC (NULL);
1470 unsigned int lowest_node;
1471 bitmap_iterator bi;
1472
1473 bitmap_set_bit (scc, n);
1474
1475 while (si->scc_stack.length () != 0
1476 && si->dfs[si->scc_stack.last ()] >= my_dfs)
1477 {
1478 unsigned int w = si->scc_stack.pop ();
1479
1480 bitmap_set_bit (scc, w);
1481 }
1482
1483 lowest_node = bitmap_first_set_bit (scc);
1484 gcc_assert (lowest_node < FIRST_REF_NODE);
1485
1486 /* Collapse the SCC nodes into a single node, and mark the
1487 indirect cycles. */
1488 EXECUTE_IF_SET_IN_BITMAP (scc, 0, i, bi)
1489 {
1490 if (i < FIRST_REF_NODE)
1491 {
1492 if (unite (lowest_node, i))
1493 unify_nodes (graph, lowest_node, i, false);
1494 }
1495 else
1496 {
1497 unite (lowest_node, i);
1498 graph->indirect_cycles[i - FIRST_REF_NODE] = lowest_node;
1499 }
1500 }
1501 }
1502 bitmap_set_bit (si->deleted, n);
1503 }
1504 else
1505 si->scc_stack.safe_push (n);
1506 }
1507
1508 /* Unify node FROM into node TO, updating the changed count if
1509 necessary when UPDATE_CHANGED is true. */
1510
1511 static void
1512 unify_nodes (constraint_graph_t graph, unsigned int to, unsigned int from,
1513 bool update_changed)
1514 {
1515 gcc_checking_assert (to != from && find (to) == to);
1516
1517 if (dump_file && (dump_flags & TDF_DETAILS))
1518 fprintf (dump_file, "Unifying %s to %s\n",
1519 get_varinfo (from)->name,
1520 get_varinfo (to)->name);
1521
1522 if (update_changed)
1523 stats.unified_vars_dynamic++;
1524 else
1525 stats.unified_vars_static++;
1526
1527 merge_graph_nodes (graph, to, from);
1528 if (merge_node_constraints (graph, to, from))
1529 {
1530 if (update_changed)
1531 bitmap_set_bit (changed, to);
1532 }
1533
1534 /* Mark TO as changed if FROM was changed. If TO was already marked
1535 as changed, decrease the changed count. */
1536
1537 if (update_changed
1538 && bitmap_clear_bit (changed, from))
1539 bitmap_set_bit (changed, to);
1540 varinfo_t fromvi = get_varinfo (from);
1541 if (fromvi->solution)
1542 {
1543 /* If the solution changes because of the merging, we need to mark
1544 the variable as changed. */
1545 varinfo_t tovi = get_varinfo (to);
1546 if (bitmap_ior_into (tovi->solution, fromvi->solution))
1547 {
1548 if (update_changed)
1549 bitmap_set_bit (changed, to);
1550 }
1551
1552 BITMAP_FREE (fromvi->solution);
1553 if (fromvi->oldsolution)
1554 BITMAP_FREE (fromvi->oldsolution);
1555
1556 if (stats.iterations > 0
1557 && tovi->oldsolution)
1558 BITMAP_FREE (tovi->oldsolution);
1559 }
1560 if (graph->succs[to])
1561 bitmap_clear_bit (graph->succs[to], to);
1562 }
1563
1564 /* Information needed to compute the topological ordering of a graph. */
1565
1566 struct topo_info
1567 {
1568 /* sbitmap of visited nodes. */
1569 sbitmap visited;
1570 /* Array that stores the topological order of the graph, *in
1571 reverse*. */
1572 vec<unsigned> topo_order;
1573 };
1574
1575
1576 /* Initialize and return a topological info structure. */
1577
1578 static struct topo_info *
1579 init_topo_info (void)
1580 {
1581 size_t size = graph->size;
1582 struct topo_info *ti = XNEW (struct topo_info);
1583 ti->visited = sbitmap_alloc (size);
1584 bitmap_clear (ti->visited);
1585 ti->topo_order.create (1);
1586 return ti;
1587 }
1588
1589
1590 /* Free the topological sort info pointed to by TI. */
1591
1592 static void
1593 free_topo_info (struct topo_info *ti)
1594 {
1595 sbitmap_free (ti->visited);
1596 ti->topo_order.release ();
1597 free (ti);
1598 }
1599
1600 /* Visit the graph in topological order, and store the order in the
1601 topo_info structure. */
1602
1603 static void
1604 topo_visit (constraint_graph_t graph, struct topo_info *ti,
1605 unsigned int n)
1606 {
1607 bitmap_iterator bi;
1608 unsigned int j;
1609
1610 bitmap_set_bit (ti->visited, n);
1611
1612 if (graph->succs[n])
1613 EXECUTE_IF_SET_IN_BITMAP (graph->succs[n], 0, j, bi)
1614 {
1615 if (!bitmap_bit_p (ti->visited, j))
1616 topo_visit (graph, ti, j);
1617 }
1618
1619 ti->topo_order.safe_push (n);
1620 }
1621
1622 /* Process a constraint C that represents x = *(y + off), using DELTA as the
1623 starting solution for y. */
1624
1625 static void
1626 do_sd_constraint (constraint_graph_t graph, constraint_t c,
1627 bitmap delta, bitmap *expanded_delta)
1628 {
1629 unsigned int lhs = c->lhs.var;
1630 bool flag = false;
1631 bitmap sol = get_varinfo (lhs)->solution;
1632 unsigned int j;
1633 bitmap_iterator bi;
1634 HOST_WIDE_INT roffset = c->rhs.offset;
1635
1636 /* Our IL does not allow this. */
1637 gcc_checking_assert (c->lhs.offset == 0);
1638
1639 /* If the solution of Y contains anything it is good enough to transfer
1640 this to the LHS. */
1641 if (bitmap_bit_p (delta, anything_id))
1642 {
1643 flag |= bitmap_set_bit (sol, anything_id);
1644 goto done;
1645 }
1646
1647 /* If we do not know at with offset the rhs is dereferenced compute
1648 the reachability set of DELTA, conservatively assuming it is
1649 dereferenced at all valid offsets. */
1650 if (roffset == UNKNOWN_OFFSET)
1651 {
1652 delta = solution_set_expand (delta, expanded_delta);
1653 /* No further offset processing is necessary. */
1654 roffset = 0;
1655 }
1656
1657 /* For each variable j in delta (Sol(y)), add
1658 an edge in the graph from j to x, and union Sol(j) into Sol(x). */
1659 EXECUTE_IF_SET_IN_BITMAP (delta, 0, j, bi)
1660 {
1661 varinfo_t v = get_varinfo (j);
1662 HOST_WIDE_INT fieldoffset = v->offset + roffset;
1663 unsigned HOST_WIDE_INT size = v->size;
1664 unsigned int t;
1665
1666 if (v->is_full_var)
1667 ;
1668 else if (roffset != 0)
1669 {
1670 if (fieldoffset < 0)
1671 v = get_varinfo (v->head);
1672 else
1673 v = first_or_preceding_vi_for_offset (v, fieldoffset);
1674 }
1675
1676 /* We have to include all fields that overlap the current field
1677 shifted by roffset. */
1678 do
1679 {
1680 t = find (v->id);
1681
1682 /* Adding edges from the special vars is pointless.
1683 They don't have sets that can change. */
1684 if (get_varinfo (t)->is_special_var)
1685 flag |= bitmap_ior_into (sol, get_varinfo (t)->solution);
1686 /* Merging the solution from ESCAPED needlessly increases
1687 the set. Use ESCAPED as representative instead. */
1688 else if (v->id == escaped_id)
1689 flag |= bitmap_set_bit (sol, escaped_id);
1690 else if (v->may_have_pointers
1691 && add_graph_edge (graph, lhs, t))
1692 flag |= bitmap_ior_into (sol, get_varinfo (t)->solution);
1693
1694 if (v->is_full_var
1695 || v->next == 0)
1696 break;
1697
1698 v = vi_next (v);
1699 }
1700 while (v->offset < fieldoffset + size);
1701 }
1702
1703 done:
1704 /* If the LHS solution changed, mark the var as changed. */
1705 if (flag)
1706 {
1707 get_varinfo (lhs)->solution = sol;
1708 bitmap_set_bit (changed, lhs);
1709 }
1710 }
1711
1712 /* Process a constraint C that represents *(x + off) = y using DELTA
1713 as the starting solution for x. */
1714
1715 static void
1716 do_ds_constraint (constraint_t c, bitmap delta, bitmap *expanded_delta)
1717 {
1718 unsigned int rhs = c->rhs.var;
1719 bitmap sol = get_varinfo (rhs)->solution;
1720 unsigned int j;
1721 bitmap_iterator bi;
1722 HOST_WIDE_INT loff = c->lhs.offset;
1723 bool escaped_p = false;
1724
1725 /* Our IL does not allow this. */
1726 gcc_checking_assert (c->rhs.offset == 0);
1727
1728 /* If the solution of y contains ANYTHING simply use the ANYTHING
1729 solution. This avoids needlessly increasing the points-to sets. */
1730 if (bitmap_bit_p (sol, anything_id))
1731 sol = get_varinfo (find (anything_id))->solution;
1732
1733 /* If the solution for x contains ANYTHING we have to merge the
1734 solution of y into all pointer variables which we do via
1735 STOREDANYTHING. */
1736 if (bitmap_bit_p (delta, anything_id))
1737 {
1738 unsigned t = find (storedanything_id);
1739 if (add_graph_edge (graph, t, rhs))
1740 {
1741 if (bitmap_ior_into (get_varinfo (t)->solution, sol))
1742 bitmap_set_bit (changed, t);
1743 }
1744 return;
1745 }
1746
1747 /* If we do not know at with offset the rhs is dereferenced compute
1748 the reachability set of DELTA, conservatively assuming it is
1749 dereferenced at all valid offsets. */
1750 if (loff == UNKNOWN_OFFSET)
1751 {
1752 delta = solution_set_expand (delta, expanded_delta);
1753 loff = 0;
1754 }
1755
1756 /* For each member j of delta (Sol(x)), add an edge from y to j and
1757 union Sol(y) into Sol(j) */
1758 EXECUTE_IF_SET_IN_BITMAP (delta, 0, j, bi)
1759 {
1760 varinfo_t v = get_varinfo (j);
1761 unsigned int t;
1762 HOST_WIDE_INT fieldoffset = v->offset + loff;
1763 unsigned HOST_WIDE_INT size = v->size;
1764
1765 if (v->is_full_var)
1766 ;
1767 else if (loff != 0)
1768 {
1769 if (fieldoffset < 0)
1770 v = get_varinfo (v->head);
1771 else
1772 v = first_or_preceding_vi_for_offset (v, fieldoffset);
1773 }
1774
1775 /* We have to include all fields that overlap the current field
1776 shifted by loff. */
1777 do
1778 {
1779 if (v->may_have_pointers)
1780 {
1781 /* If v is a global variable then this is an escape point. */
1782 if (v->is_global_var
1783 && !escaped_p)
1784 {
1785 t = find (escaped_id);
1786 if (add_graph_edge (graph, t, rhs)
1787 && bitmap_ior_into (get_varinfo (t)->solution, sol))
1788 bitmap_set_bit (changed, t);
1789 /* Enough to let rhs escape once. */
1790 escaped_p = true;
1791 }
1792
1793 if (v->is_special_var)
1794 break;
1795
1796 t = find (v->id);
1797 if (add_graph_edge (graph, t, rhs)
1798 && bitmap_ior_into (get_varinfo (t)->solution, sol))
1799 bitmap_set_bit (changed, t);
1800 }
1801
1802 if (v->is_full_var
1803 || v->next == 0)
1804 break;
1805
1806 v = vi_next (v);
1807 }
1808 while (v->offset < fieldoffset + size);
1809 }
1810 }
1811
1812 /* Handle a non-simple (simple meaning requires no iteration),
1813 constraint (IE *x = &y, x = *y, *x = y, and x = y with offsets involved). */
1814
1815 static void
1816 do_complex_constraint (constraint_graph_t graph, constraint_t c, bitmap delta,
1817 bitmap *expanded_delta)
1818 {
1819 if (c->lhs.type == DEREF)
1820 {
1821 if (c->rhs.type == ADDRESSOF)
1822 {
1823 gcc_unreachable ();
1824 }
1825 else
1826 {
1827 /* *x = y */
1828 do_ds_constraint (c, delta, expanded_delta);
1829 }
1830 }
1831 else if (c->rhs.type == DEREF)
1832 {
1833 /* x = *y */
1834 if (!(get_varinfo (c->lhs.var)->is_special_var))
1835 do_sd_constraint (graph, c, delta, expanded_delta);
1836 }
1837 else
1838 {
1839 bitmap tmp;
1840 bool flag = false;
1841
1842 gcc_checking_assert (c->rhs.type == SCALAR && c->lhs.type == SCALAR
1843 && c->rhs.offset != 0 && c->lhs.offset == 0);
1844 tmp = get_varinfo (c->lhs.var)->solution;
1845
1846 flag = set_union_with_increment (tmp, delta, c->rhs.offset,
1847 expanded_delta);
1848
1849 if (flag)
1850 bitmap_set_bit (changed, c->lhs.var);
1851 }
1852 }
1853
1854 /* Initialize and return a new SCC info structure. */
1855
1856 static struct scc_info *
1857 init_scc_info (size_t size)
1858 {
1859 struct scc_info *si = XNEW (struct scc_info);
1860 size_t i;
1861
1862 si->current_index = 0;
1863 si->visited = sbitmap_alloc (size);
1864 bitmap_clear (si->visited);
1865 si->deleted = sbitmap_alloc (size);
1866 bitmap_clear (si->deleted);
1867 si->node_mapping = XNEWVEC (unsigned int, size);
1868 si->dfs = XCNEWVEC (unsigned int, size);
1869
1870 for (i = 0; i < size; i++)
1871 si->node_mapping[i] = i;
1872
1873 si->scc_stack.create (1);
1874 return si;
1875 }
1876
1877 /* Free an SCC info structure pointed to by SI */
1878
1879 static void
1880 free_scc_info (struct scc_info *si)
1881 {
1882 sbitmap_free (si->visited);
1883 sbitmap_free (si->deleted);
1884 free (si->node_mapping);
1885 free (si->dfs);
1886 si->scc_stack.release ();
1887 free (si);
1888 }
1889
1890
1891 /* Find indirect cycles in GRAPH that occur, using strongly connected
1892 components, and note them in the indirect cycles map.
1893
1894 This technique comes from Ben Hardekopf and Calvin Lin,
1895 "It Pays to be Lazy: Fast and Accurate Pointer Analysis for Millions of
1896 Lines of Code", submitted to PLDI 2007. */
1897
1898 static void
1899 find_indirect_cycles (constraint_graph_t graph)
1900 {
1901 unsigned int i;
1902 unsigned int size = graph->size;
1903 struct scc_info *si = init_scc_info (size);
1904
1905 for (i = 0; i < MIN (LAST_REF_NODE, size); i ++ )
1906 if (!bitmap_bit_p (si->visited, i) && find (i) == i)
1907 scc_visit (graph, si, i);
1908
1909 free_scc_info (si);
1910 }
1911
1912 /* Compute a topological ordering for GRAPH, and store the result in the
1913 topo_info structure TI. */
1914
1915 static void
1916 compute_topo_order (constraint_graph_t graph,
1917 struct topo_info *ti)
1918 {
1919 unsigned int i;
1920 unsigned int size = graph->size;
1921
1922 for (i = 0; i != size; ++i)
1923 if (!bitmap_bit_p (ti->visited, i) && find (i) == i)
1924 topo_visit (graph, ti, i);
1925 }
1926
1927 /* Structure used to for hash value numbering of pointer equivalence
1928 classes. */
1929
1930 typedef struct equiv_class_label
1931 {
1932 hashval_t hashcode;
1933 unsigned int equivalence_class;
1934 bitmap labels;
1935 } *equiv_class_label_t;
1936 typedef const struct equiv_class_label *const_equiv_class_label_t;
1937
1938 /* Equiv_class_label hashtable helpers. */
1939
1940 struct equiv_class_hasher : typed_free_remove <equiv_class_label>
1941 {
1942 typedef equiv_class_label value_type;
1943 typedef equiv_class_label compare_type;
1944 static inline hashval_t hash (const value_type *);
1945 static inline bool equal (const value_type *, const compare_type *);
1946 };
1947
1948 /* Hash function for a equiv_class_label_t */
1949
1950 inline hashval_t
1951 equiv_class_hasher::hash (const value_type *ecl)
1952 {
1953 return ecl->hashcode;
1954 }
1955
1956 /* Equality function for two equiv_class_label_t's. */
1957
1958 inline bool
1959 equiv_class_hasher::equal (const value_type *eql1, const compare_type *eql2)
1960 {
1961 return (eql1->hashcode == eql2->hashcode
1962 && bitmap_equal_p (eql1->labels, eql2->labels));
1963 }
1964
1965 /* A hashtable for mapping a bitmap of labels->pointer equivalence
1966 classes. */
1967 static hash_table<equiv_class_hasher> *pointer_equiv_class_table;
1968
1969 /* A hashtable for mapping a bitmap of labels->location equivalence
1970 classes. */
1971 static hash_table<equiv_class_hasher> *location_equiv_class_table;
1972
1973 /* Lookup a equivalence class in TABLE by the bitmap of LABELS with
1974 hash HAS it contains. Sets *REF_LABELS to the bitmap LABELS
1975 is equivalent to. */
1976
1977 static equiv_class_label *
1978 equiv_class_lookup_or_add (hash_table<equiv_class_hasher> *table,
1979 bitmap labels)
1980 {
1981 equiv_class_label **slot;
1982 equiv_class_label ecl;
1983
1984 ecl.labels = labels;
1985 ecl.hashcode = bitmap_hash (labels);
1986 slot = table->find_slot (&ecl, INSERT);
1987 if (!*slot)
1988 {
1989 *slot = XNEW (struct equiv_class_label);
1990 (*slot)->labels = labels;
1991 (*slot)->hashcode = ecl.hashcode;
1992 (*slot)->equivalence_class = 0;
1993 }
1994
1995 return *slot;
1996 }
1997
1998 /* Perform offline variable substitution.
1999
2000 This is a worst case quadratic time way of identifying variables
2001 that must have equivalent points-to sets, including those caused by
2002 static cycles, and single entry subgraphs, in the constraint graph.
2003
2004 The technique is described in "Exploiting Pointer and Location
2005 Equivalence to Optimize Pointer Analysis. In the 14th International
2006 Static Analysis Symposium (SAS), August 2007." It is known as the
2007 "HU" algorithm, and is equivalent to value numbering the collapsed
2008 constraint graph including evaluating unions.
2009
2010 The general method of finding equivalence classes is as follows:
2011 Add fake nodes (REF nodes) and edges for *a = b and a = *b constraints.
2012 Initialize all non-REF nodes to be direct nodes.
2013 For each constraint a = a U {b}, we set pts(a) = pts(a) u {fresh
2014 variable}
2015 For each constraint containing the dereference, we also do the same
2016 thing.
2017
2018 We then compute SCC's in the graph and unify nodes in the same SCC,
2019 including pts sets.
2020
2021 For each non-collapsed node x:
2022 Visit all unvisited explicit incoming edges.
2023 Ignoring all non-pointers, set pts(x) = Union of pts(a) for y
2024 where y->x.
2025 Lookup the equivalence class for pts(x).
2026 If we found one, equivalence_class(x) = found class.
2027 Otherwise, equivalence_class(x) = new class, and new_class is
2028 added to the lookup table.
2029
2030 All direct nodes with the same equivalence class can be replaced
2031 with a single representative node.
2032 All unlabeled nodes (label == 0) are not pointers and all edges
2033 involving them can be eliminated.
2034 We perform these optimizations during rewrite_constraints
2035
2036 In addition to pointer equivalence class finding, we also perform
2037 location equivalence class finding. This is the set of variables
2038 that always appear together in points-to sets. We use this to
2039 compress the size of the points-to sets. */
2040
2041 /* Current maximum pointer equivalence class id. */
2042 static int pointer_equiv_class;
2043
2044 /* Current maximum location equivalence class id. */
2045 static int location_equiv_class;
2046
2047 /* Recursive routine to find strongly connected components in GRAPH,
2048 and label it's nodes with DFS numbers. */
2049
2050 static void
2051 condense_visit (constraint_graph_t graph, struct scc_info *si, unsigned int n)
2052 {
2053 unsigned int i;
2054 bitmap_iterator bi;
2055 unsigned int my_dfs;
2056
2057 gcc_checking_assert (si->node_mapping[n] == n);
2058 bitmap_set_bit (si->visited, n);
2059 si->dfs[n] = si->current_index ++;
2060 my_dfs = si->dfs[n];
2061
2062 /* Visit all the successors. */
2063 EXECUTE_IF_IN_NONNULL_BITMAP (graph->preds[n], 0, i, bi)
2064 {
2065 unsigned int w = si->node_mapping[i];
2066
2067 if (bitmap_bit_p (si->deleted, w))
2068 continue;
2069
2070 if (!bitmap_bit_p (si->visited, w))
2071 condense_visit (graph, si, w);
2072
2073 unsigned int t = si->node_mapping[w];
2074 gcc_checking_assert (si->node_mapping[n] == n);
2075 if (si->dfs[t] < si->dfs[n])
2076 si->dfs[n] = si->dfs[t];
2077 }
2078
2079 /* Visit all the implicit predecessors. */
2080 EXECUTE_IF_IN_NONNULL_BITMAP (graph->implicit_preds[n], 0, i, bi)
2081 {
2082 unsigned int w = si->node_mapping[i];
2083
2084 if (bitmap_bit_p (si->deleted, w))
2085 continue;
2086
2087 if (!bitmap_bit_p (si->visited, w))
2088 condense_visit (graph, si, w);
2089
2090 unsigned int t = si->node_mapping[w];
2091 gcc_assert (si->node_mapping[n] == n);
2092 if (si->dfs[t] < si->dfs[n])
2093 si->dfs[n] = si->dfs[t];
2094 }
2095
2096 /* See if any components have been identified. */
2097 if (si->dfs[n] == my_dfs)
2098 {
2099 while (si->scc_stack.length () != 0
2100 && si->dfs[si->scc_stack.last ()] >= my_dfs)
2101 {
2102 unsigned int w = si->scc_stack.pop ();
2103 si->node_mapping[w] = n;
2104
2105 if (!bitmap_bit_p (graph->direct_nodes, w))
2106 bitmap_clear_bit (graph->direct_nodes, n);
2107
2108 /* Unify our nodes. */
2109 if (graph->preds[w])
2110 {
2111 if (!graph->preds[n])
2112 graph->preds[n] = BITMAP_ALLOC (&predbitmap_obstack);
2113 bitmap_ior_into (graph->preds[n], graph->preds[w]);
2114 }
2115 if (graph->implicit_preds[w])
2116 {
2117 if (!graph->implicit_preds[n])
2118 graph->implicit_preds[n] = BITMAP_ALLOC (&predbitmap_obstack);
2119 bitmap_ior_into (graph->implicit_preds[n],
2120 graph->implicit_preds[w]);
2121 }
2122 if (graph->points_to[w])
2123 {
2124 if (!graph->points_to[n])
2125 graph->points_to[n] = BITMAP_ALLOC (&predbitmap_obstack);
2126 bitmap_ior_into (graph->points_to[n],
2127 graph->points_to[w]);
2128 }
2129 }
2130 bitmap_set_bit (si->deleted, n);
2131 }
2132 else
2133 si->scc_stack.safe_push (n);
2134 }
2135
2136 /* Label pointer equivalences.
2137
2138 This performs a value numbering of the constraint graph to
2139 discover which variables will always have the same points-to sets
2140 under the current set of constraints.
2141
2142 The way it value numbers is to store the set of points-to bits
2143 generated by the constraints and graph edges. This is just used as a
2144 hash and equality comparison. The *actual set of points-to bits* is
2145 completely irrelevant, in that we don't care about being able to
2146 extract them later.
2147
2148 The equality values (currently bitmaps) just have to satisfy a few
2149 constraints, the main ones being:
2150 1. The combining operation must be order independent.
2151 2. The end result of a given set of operations must be unique iff the
2152 combination of input values is unique
2153 3. Hashable. */
2154
2155 static void
2156 label_visit (constraint_graph_t graph, struct scc_info *si, unsigned int n)
2157 {
2158 unsigned int i, first_pred;
2159 bitmap_iterator bi;
2160
2161 bitmap_set_bit (si->visited, n);
2162
2163 /* Label and union our incoming edges's points to sets. */
2164 first_pred = -1U;
2165 EXECUTE_IF_IN_NONNULL_BITMAP (graph->preds[n], 0, i, bi)
2166 {
2167 unsigned int w = si->node_mapping[i];
2168 if (!bitmap_bit_p (si->visited, w))
2169 label_visit (graph, si, w);
2170
2171 /* Skip unused edges */
2172 if (w == n || graph->pointer_label[w] == 0)
2173 continue;
2174
2175 if (graph->points_to[w])
2176 {
2177 if (!graph->points_to[n])
2178 {
2179 if (first_pred == -1U)
2180 first_pred = w;
2181 else
2182 {
2183 graph->points_to[n] = BITMAP_ALLOC (&predbitmap_obstack);
2184 bitmap_ior (graph->points_to[n],
2185 graph->points_to[first_pred],
2186 graph->points_to[w]);
2187 }
2188 }
2189 else
2190 bitmap_ior_into (graph->points_to[n], graph->points_to[w]);
2191 }
2192 }
2193
2194 /* Indirect nodes get fresh variables and a new pointer equiv class. */
2195 if (!bitmap_bit_p (graph->direct_nodes, n))
2196 {
2197 if (!graph->points_to[n])
2198 {
2199 graph->points_to[n] = BITMAP_ALLOC (&predbitmap_obstack);
2200 if (first_pred != -1U)
2201 bitmap_copy (graph->points_to[n], graph->points_to[first_pred]);
2202 }
2203 bitmap_set_bit (graph->points_to[n], FIRST_REF_NODE + n);
2204 graph->pointer_label[n] = pointer_equiv_class++;
2205 equiv_class_label_t ecl;
2206 ecl = equiv_class_lookup_or_add (pointer_equiv_class_table,
2207 graph->points_to[n]);
2208 ecl->equivalence_class = graph->pointer_label[n];
2209 return;
2210 }
2211
2212 /* If there was only a single non-empty predecessor the pointer equiv
2213 class is the same. */
2214 if (!graph->points_to[n])
2215 {
2216 if (first_pred != -1U)
2217 {
2218 graph->pointer_label[n] = graph->pointer_label[first_pred];
2219 graph->points_to[n] = graph->points_to[first_pred];
2220 }
2221 return;
2222 }
2223
2224 if (!bitmap_empty_p (graph->points_to[n]))
2225 {
2226 equiv_class_label_t ecl;
2227 ecl = equiv_class_lookup_or_add (pointer_equiv_class_table,
2228 graph->points_to[n]);
2229 if (ecl->equivalence_class == 0)
2230 ecl->equivalence_class = pointer_equiv_class++;
2231 else
2232 {
2233 BITMAP_FREE (graph->points_to[n]);
2234 graph->points_to[n] = ecl->labels;
2235 }
2236 graph->pointer_label[n] = ecl->equivalence_class;
2237 }
2238 }
2239
2240 /* Print the pred graph in dot format. */
2241
2242 static void
2243 dump_pred_graph (struct scc_info *si, FILE *file)
2244 {
2245 unsigned int i;
2246
2247 /* Only print the graph if it has already been initialized: */
2248 if (!graph)
2249 return;
2250
2251 /* Prints the header of the dot file: */
2252 fprintf (file, "strict digraph {\n");
2253 fprintf (file, " node [\n shape = box\n ]\n");
2254 fprintf (file, " edge [\n fontsize = \"12\"\n ]\n");
2255 fprintf (file, "\n // List of nodes and complex constraints in "
2256 "the constraint graph:\n");
2257
2258 /* The next lines print the nodes in the graph together with the
2259 complex constraints attached to them. */
2260 for (i = 1; i < graph->size; i++)
2261 {
2262 if (i == FIRST_REF_NODE)
2263 continue;
2264 if (si->node_mapping[i] != i)
2265 continue;
2266 if (i < FIRST_REF_NODE)
2267 fprintf (file, "\"%s\"", get_varinfo (i)->name);
2268 else
2269 fprintf (file, "\"*%s\"", get_varinfo (i - FIRST_REF_NODE)->name);
2270 if (graph->points_to[i]
2271 && !bitmap_empty_p (graph->points_to[i]))
2272 {
2273 fprintf (file, "[label=\"%s = {", get_varinfo (i)->name);
2274 unsigned j;
2275 bitmap_iterator bi;
2276 EXECUTE_IF_SET_IN_BITMAP (graph->points_to[i], 0, j, bi)
2277 fprintf (file, " %d", j);
2278 fprintf (file, " }\"]");
2279 }
2280 fprintf (file, ";\n");
2281 }
2282
2283 /* Go over the edges. */
2284 fprintf (file, "\n // Edges in the constraint graph:\n");
2285 for (i = 1; i < graph->size; i++)
2286 {
2287 unsigned j;
2288 bitmap_iterator bi;
2289 if (si->node_mapping[i] != i)
2290 continue;
2291 EXECUTE_IF_IN_NONNULL_BITMAP (graph->preds[i], 0, j, bi)
2292 {
2293 unsigned from = si->node_mapping[j];
2294 if (from < FIRST_REF_NODE)
2295 fprintf (file, "\"%s\"", get_varinfo (from)->name);
2296 else
2297 fprintf (file, "\"*%s\"", get_varinfo (from - FIRST_REF_NODE)->name);
2298 fprintf (file, " -> ");
2299 if (i < FIRST_REF_NODE)
2300 fprintf (file, "\"%s\"", get_varinfo (i)->name);
2301 else
2302 fprintf (file, "\"*%s\"", get_varinfo (i - FIRST_REF_NODE)->name);
2303 fprintf (file, ";\n");
2304 }
2305 }
2306
2307 /* Prints the tail of the dot file. */
2308 fprintf (file, "}\n");
2309 }
2310
2311 /* Perform offline variable substitution, discovering equivalence
2312 classes, and eliminating non-pointer variables. */
2313
2314 static struct scc_info *
2315 perform_var_substitution (constraint_graph_t graph)
2316 {
2317 unsigned int i;
2318 unsigned int size = graph->size;
2319 struct scc_info *si = init_scc_info (size);
2320
2321 bitmap_obstack_initialize (&iteration_obstack);
2322 pointer_equiv_class_table = new hash_table<equiv_class_hasher> (511);
2323 location_equiv_class_table
2324 = new hash_table<equiv_class_hasher> (511);
2325 pointer_equiv_class = 1;
2326 location_equiv_class = 1;
2327
2328 /* Condense the nodes, which means to find SCC's, count incoming
2329 predecessors, and unite nodes in SCC's. */
2330 for (i = 1; i < FIRST_REF_NODE; i++)
2331 if (!bitmap_bit_p (si->visited, si->node_mapping[i]))
2332 condense_visit (graph, si, si->node_mapping[i]);
2333
2334 if (dump_file && (dump_flags & TDF_GRAPH))
2335 {
2336 fprintf (dump_file, "\n\n// The constraint graph before var-substitution "
2337 "in dot format:\n");
2338 dump_pred_graph (si, dump_file);
2339 fprintf (dump_file, "\n\n");
2340 }
2341
2342 bitmap_clear (si->visited);
2343 /* Actually the label the nodes for pointer equivalences */
2344 for (i = 1; i < FIRST_REF_NODE; i++)
2345 if (!bitmap_bit_p (si->visited, si->node_mapping[i]))
2346 label_visit (graph, si, si->node_mapping[i]);
2347
2348 /* Calculate location equivalence labels. */
2349 for (i = 1; i < FIRST_REF_NODE; i++)
2350 {
2351 bitmap pointed_by;
2352 bitmap_iterator bi;
2353 unsigned int j;
2354
2355 if (!graph->pointed_by[i])
2356 continue;
2357 pointed_by = BITMAP_ALLOC (&iteration_obstack);
2358
2359 /* Translate the pointed-by mapping for pointer equivalence
2360 labels. */
2361 EXECUTE_IF_SET_IN_BITMAP (graph->pointed_by[i], 0, j, bi)
2362 {
2363 bitmap_set_bit (pointed_by,
2364 graph->pointer_label[si->node_mapping[j]]);
2365 }
2366 /* The original pointed_by is now dead. */
2367 BITMAP_FREE (graph->pointed_by[i]);
2368
2369 /* Look up the location equivalence label if one exists, or make
2370 one otherwise. */
2371 equiv_class_label_t ecl;
2372 ecl = equiv_class_lookup_or_add (location_equiv_class_table, pointed_by);
2373 if (ecl->equivalence_class == 0)
2374 ecl->equivalence_class = location_equiv_class++;
2375 else
2376 {
2377 if (dump_file && (dump_flags & TDF_DETAILS))
2378 fprintf (dump_file, "Found location equivalence for node %s\n",
2379 get_varinfo (i)->name);
2380 BITMAP_FREE (pointed_by);
2381 }
2382 graph->loc_label[i] = ecl->equivalence_class;
2383
2384 }
2385
2386 if (dump_file && (dump_flags & TDF_DETAILS))
2387 for (i = 1; i < FIRST_REF_NODE; i++)
2388 {
2389 unsigned j = si->node_mapping[i];
2390 if (j != i)
2391 {
2392 fprintf (dump_file, "%s node id %d ",
2393 bitmap_bit_p (graph->direct_nodes, i)
2394 ? "Direct" : "Indirect", i);
2395 if (i < FIRST_REF_NODE)
2396 fprintf (dump_file, "\"%s\"", get_varinfo (i)->name);
2397 else
2398 fprintf (dump_file, "\"*%s\"",
2399 get_varinfo (i - FIRST_REF_NODE)->name);
2400 fprintf (dump_file, " mapped to SCC leader node id %d ", j);
2401 if (j < FIRST_REF_NODE)
2402 fprintf (dump_file, "\"%s\"\n", get_varinfo (j)->name);
2403 else
2404 fprintf (dump_file, "\"*%s\"\n",
2405 get_varinfo (j - FIRST_REF_NODE)->name);
2406 }
2407 else
2408 {
2409 fprintf (dump_file,
2410 "Equivalence classes for %s node id %d ",
2411 bitmap_bit_p (graph->direct_nodes, i)
2412 ? "direct" : "indirect", i);
2413 if (i < FIRST_REF_NODE)
2414 fprintf (dump_file, "\"%s\"", get_varinfo (i)->name);
2415 else
2416 fprintf (dump_file, "\"*%s\"",
2417 get_varinfo (i - FIRST_REF_NODE)->name);
2418 fprintf (dump_file,
2419 ": pointer %d, location %d\n",
2420 graph->pointer_label[i], graph->loc_label[i]);
2421 }
2422 }
2423
2424 /* Quickly eliminate our non-pointer variables. */
2425
2426 for (i = 1; i < FIRST_REF_NODE; i++)
2427 {
2428 unsigned int node = si->node_mapping[i];
2429
2430 if (graph->pointer_label[node] == 0)
2431 {
2432 if (dump_file && (dump_flags & TDF_DETAILS))
2433 fprintf (dump_file,
2434 "%s is a non-pointer variable, eliminating edges.\n",
2435 get_varinfo (node)->name);
2436 stats.nonpointer_vars++;
2437 clear_edges_for_node (graph, node);
2438 }
2439 }
2440
2441 return si;
2442 }
2443
2444 /* Free information that was only necessary for variable
2445 substitution. */
2446
2447 static void
2448 free_var_substitution_info (struct scc_info *si)
2449 {
2450 free_scc_info (si);
2451 free (graph->pointer_label);
2452 free (graph->loc_label);
2453 free (graph->pointed_by);
2454 free (graph->points_to);
2455 free (graph->eq_rep);
2456 sbitmap_free (graph->direct_nodes);
2457 delete pointer_equiv_class_table;
2458 pointer_equiv_class_table = NULL;
2459 delete location_equiv_class_table;
2460 location_equiv_class_table = NULL;
2461 bitmap_obstack_release (&iteration_obstack);
2462 }
2463
2464 /* Return an existing node that is equivalent to NODE, which has
2465 equivalence class LABEL, if one exists. Return NODE otherwise. */
2466
2467 static unsigned int
2468 find_equivalent_node (constraint_graph_t graph,
2469 unsigned int node, unsigned int label)
2470 {
2471 /* If the address version of this variable is unused, we can
2472 substitute it for anything else with the same label.
2473 Otherwise, we know the pointers are equivalent, but not the
2474 locations, and we can unite them later. */
2475
2476 if (!bitmap_bit_p (graph->address_taken, node))
2477 {
2478 gcc_checking_assert (label < graph->size);
2479
2480 if (graph->eq_rep[label] != -1)
2481 {
2482 /* Unify the two variables since we know they are equivalent. */
2483 if (unite (graph->eq_rep[label], node))
2484 unify_nodes (graph, graph->eq_rep[label], node, false);
2485 return graph->eq_rep[label];
2486 }
2487 else
2488 {
2489 graph->eq_rep[label] = node;
2490 graph->pe_rep[label] = node;
2491 }
2492 }
2493 else
2494 {
2495 gcc_checking_assert (label < graph->size);
2496 graph->pe[node] = label;
2497 if (graph->pe_rep[label] == -1)
2498 graph->pe_rep[label] = node;
2499 }
2500
2501 return node;
2502 }
2503
2504 /* Unite pointer equivalent but not location equivalent nodes in
2505 GRAPH. This may only be performed once variable substitution is
2506 finished. */
2507
2508 static void
2509 unite_pointer_equivalences (constraint_graph_t graph)
2510 {
2511 unsigned int i;
2512
2513 /* Go through the pointer equivalences and unite them to their
2514 representative, if they aren't already. */
2515 for (i = 1; i < FIRST_REF_NODE; i++)
2516 {
2517 unsigned int label = graph->pe[i];
2518 if (label)
2519 {
2520 int label_rep = graph->pe_rep[label];
2521
2522 if (label_rep == -1)
2523 continue;
2524
2525 label_rep = find (label_rep);
2526 if (label_rep >= 0 && unite (label_rep, find (i)))
2527 unify_nodes (graph, label_rep, i, false);
2528 }
2529 }
2530 }
2531
2532 /* Move complex constraints to the GRAPH nodes they belong to. */
2533
2534 static void
2535 move_complex_constraints (constraint_graph_t graph)
2536 {
2537 int i;
2538 constraint_t c;
2539
2540 FOR_EACH_VEC_ELT (constraints, i, c)
2541 {
2542 if (c)
2543 {
2544 struct constraint_expr lhs = c->lhs;
2545 struct constraint_expr rhs = c->rhs;
2546
2547 if (lhs.type == DEREF)
2548 {
2549 insert_into_complex (graph, lhs.var, c);
2550 }
2551 else if (rhs.type == DEREF)
2552 {
2553 if (!(get_varinfo (lhs.var)->is_special_var))
2554 insert_into_complex (graph, rhs.var, c);
2555 }
2556 else if (rhs.type != ADDRESSOF && lhs.var > anything_id
2557 && (lhs.offset != 0 || rhs.offset != 0))
2558 {
2559 insert_into_complex (graph, rhs.var, c);
2560 }
2561 }
2562 }
2563 }
2564
2565
2566 /* Optimize and rewrite complex constraints while performing
2567 collapsing of equivalent nodes. SI is the SCC_INFO that is the
2568 result of perform_variable_substitution. */
2569
2570 static void
2571 rewrite_constraints (constraint_graph_t graph,
2572 struct scc_info *si)
2573 {
2574 int i;
2575 constraint_t c;
2576
2577 #ifdef ENABLE_CHECKING
2578 for (unsigned int j = 0; j < graph->size; j++)
2579 gcc_assert (find (j) == j);
2580 #endif
2581
2582 FOR_EACH_VEC_ELT (constraints, i, c)
2583 {
2584 struct constraint_expr lhs = c->lhs;
2585 struct constraint_expr rhs = c->rhs;
2586 unsigned int lhsvar = find (lhs.var);
2587 unsigned int rhsvar = find (rhs.var);
2588 unsigned int lhsnode, rhsnode;
2589 unsigned int lhslabel, rhslabel;
2590
2591 lhsnode = si->node_mapping[lhsvar];
2592 rhsnode = si->node_mapping[rhsvar];
2593 lhslabel = graph->pointer_label[lhsnode];
2594 rhslabel = graph->pointer_label[rhsnode];
2595
2596 /* See if it is really a non-pointer variable, and if so, ignore
2597 the constraint. */
2598 if (lhslabel == 0)
2599 {
2600 if (dump_file && (dump_flags & TDF_DETAILS))
2601 {
2602
2603 fprintf (dump_file, "%s is a non-pointer variable,"
2604 "ignoring constraint:",
2605 get_varinfo (lhs.var)->name);
2606 dump_constraint (dump_file, c);
2607 fprintf (dump_file, "\n");
2608 }
2609 constraints[i] = NULL;
2610 continue;
2611 }
2612
2613 if (rhslabel == 0)
2614 {
2615 if (dump_file && (dump_flags & TDF_DETAILS))
2616 {
2617
2618 fprintf (dump_file, "%s is a non-pointer variable,"
2619 "ignoring constraint:",
2620 get_varinfo (rhs.var)->name);
2621 dump_constraint (dump_file, c);
2622 fprintf (dump_file, "\n");
2623 }
2624 constraints[i] = NULL;
2625 continue;
2626 }
2627
2628 lhsvar = find_equivalent_node (graph, lhsvar, lhslabel);
2629 rhsvar = find_equivalent_node (graph, rhsvar, rhslabel);
2630 c->lhs.var = lhsvar;
2631 c->rhs.var = rhsvar;
2632 }
2633 }
2634
2635 /* Eliminate indirect cycles involving NODE. Return true if NODE was
2636 part of an SCC, false otherwise. */
2637
2638 static bool
2639 eliminate_indirect_cycles (unsigned int node)
2640 {
2641 if (graph->indirect_cycles[node] != -1
2642 && !bitmap_empty_p (get_varinfo (node)->solution))
2643 {
2644 unsigned int i;
2645 auto_vec<unsigned> queue;
2646 int queuepos;
2647 unsigned int to = find (graph->indirect_cycles[node]);
2648 bitmap_iterator bi;
2649
2650 /* We can't touch the solution set and call unify_nodes
2651 at the same time, because unify_nodes is going to do
2652 bitmap unions into it. */
2653
2654 EXECUTE_IF_SET_IN_BITMAP (get_varinfo (node)->solution, 0, i, bi)
2655 {
2656 if (find (i) == i && i != to)
2657 {
2658 if (unite (to, i))
2659 queue.safe_push (i);
2660 }
2661 }
2662
2663 for (queuepos = 0;
2664 queue.iterate (queuepos, &i);
2665 queuepos++)
2666 {
2667 unify_nodes (graph, to, i, true);
2668 }
2669 return true;
2670 }
2671 return false;
2672 }
2673
2674 /* Solve the constraint graph GRAPH using our worklist solver.
2675 This is based on the PW* family of solvers from the "Efficient Field
2676 Sensitive Pointer Analysis for C" paper.
2677 It works by iterating over all the graph nodes, processing the complex
2678 constraints and propagating the copy constraints, until everything stops
2679 changed. This corresponds to steps 6-8 in the solving list given above. */
2680
2681 static void
2682 solve_graph (constraint_graph_t graph)
2683 {
2684 unsigned int size = graph->size;
2685 unsigned int i;
2686 bitmap pts;
2687
2688 changed = BITMAP_ALLOC (NULL);
2689
2690 /* Mark all initial non-collapsed nodes as changed. */
2691 for (i = 1; i < size; i++)
2692 {
2693 varinfo_t ivi = get_varinfo (i);
2694 if (find (i) == i && !bitmap_empty_p (ivi->solution)
2695 && ((graph->succs[i] && !bitmap_empty_p (graph->succs[i]))
2696 || graph->complex[i].length () > 0))
2697 bitmap_set_bit (changed, i);
2698 }
2699
2700 /* Allocate a bitmap to be used to store the changed bits. */
2701 pts = BITMAP_ALLOC (&pta_obstack);
2702
2703 while (!bitmap_empty_p (changed))
2704 {
2705 unsigned int i;
2706 struct topo_info *ti = init_topo_info ();
2707 stats.iterations++;
2708
2709 bitmap_obstack_initialize (&iteration_obstack);
2710
2711 compute_topo_order (graph, ti);
2712
2713 while (ti->topo_order.length () != 0)
2714 {
2715
2716 i = ti->topo_order.pop ();
2717
2718 /* If this variable is not a representative, skip it. */
2719 if (find (i) != i)
2720 continue;
2721
2722 /* In certain indirect cycle cases, we may merge this
2723 variable to another. */
2724 if (eliminate_indirect_cycles (i) && find (i) != i)
2725 continue;
2726
2727 /* If the node has changed, we need to process the
2728 complex constraints and outgoing edges again. */
2729 if (bitmap_clear_bit (changed, i))
2730 {
2731 unsigned int j;
2732 constraint_t c;
2733 bitmap solution;
2734 vec<constraint_t> complex = graph->complex[i];
2735 varinfo_t vi = get_varinfo (i);
2736 bool solution_empty;
2737
2738 /* Compute the changed set of solution bits. If anything
2739 is in the solution just propagate that. */
2740 if (bitmap_bit_p (vi->solution, anything_id))
2741 {
2742 /* If anything is also in the old solution there is
2743 nothing to do.
2744 ??? But we shouldn't ended up with "changed" set ... */
2745 if (vi->oldsolution
2746 && bitmap_bit_p (vi->oldsolution, anything_id))
2747 continue;
2748 bitmap_copy (pts, get_varinfo (find (anything_id))->solution);
2749 }
2750 else if (vi->oldsolution)
2751 bitmap_and_compl (pts, vi->solution, vi->oldsolution);
2752 else
2753 bitmap_copy (pts, vi->solution);
2754
2755 if (bitmap_empty_p (pts))
2756 continue;
2757
2758 if (vi->oldsolution)
2759 bitmap_ior_into (vi->oldsolution, pts);
2760 else
2761 {
2762 vi->oldsolution = BITMAP_ALLOC (&oldpta_obstack);
2763 bitmap_copy (vi->oldsolution, pts);
2764 }
2765
2766 solution = vi->solution;
2767 solution_empty = bitmap_empty_p (solution);
2768
2769 /* Process the complex constraints */
2770 bitmap expanded_pts = NULL;
2771 FOR_EACH_VEC_ELT (complex, j, c)
2772 {
2773 /* XXX: This is going to unsort the constraints in
2774 some cases, which will occasionally add duplicate
2775 constraints during unification. This does not
2776 affect correctness. */
2777 c->lhs.var = find (c->lhs.var);
2778 c->rhs.var = find (c->rhs.var);
2779
2780 /* The only complex constraint that can change our
2781 solution to non-empty, given an empty solution,
2782 is a constraint where the lhs side is receiving
2783 some set from elsewhere. */
2784 if (!solution_empty || c->lhs.type != DEREF)
2785 do_complex_constraint (graph, c, pts, &expanded_pts);
2786 }
2787 BITMAP_FREE (expanded_pts);
2788
2789 solution_empty = bitmap_empty_p (solution);
2790
2791 if (!solution_empty)
2792 {
2793 bitmap_iterator bi;
2794 unsigned eff_escaped_id = find (escaped_id);
2795
2796 /* Propagate solution to all successors. */
2797 EXECUTE_IF_IN_NONNULL_BITMAP (graph->succs[i],
2798 0, j, bi)
2799 {
2800 bitmap tmp;
2801 bool flag;
2802
2803 unsigned int to = find (j);
2804 tmp = get_varinfo (to)->solution;
2805 flag = false;
2806
2807 /* Don't try to propagate to ourselves. */
2808 if (to == i)
2809 continue;
2810
2811 /* If we propagate from ESCAPED use ESCAPED as
2812 placeholder. */
2813 if (i == eff_escaped_id)
2814 flag = bitmap_set_bit (tmp, escaped_id);
2815 else
2816 flag = bitmap_ior_into (tmp, pts);
2817
2818 if (flag)
2819 bitmap_set_bit (changed, to);
2820 }
2821 }
2822 }
2823 }
2824 free_topo_info (ti);
2825 bitmap_obstack_release (&iteration_obstack);
2826 }
2827
2828 BITMAP_FREE (pts);
2829 BITMAP_FREE (changed);
2830 bitmap_obstack_release (&oldpta_obstack);
2831 }
2832
2833 /* Map from trees to variable infos. */
2834 static hash_map<tree, varinfo_t> *vi_for_tree;
2835
2836
2837 /* Insert ID as the variable id for tree T in the vi_for_tree map. */
2838
2839 static void
2840 insert_vi_for_tree (tree t, varinfo_t vi)
2841 {
2842 gcc_assert (vi);
2843 gcc_assert (!vi_for_tree->put (t, vi));
2844 }
2845
2846 /* Find the variable info for tree T in VI_FOR_TREE. If T does not
2847 exist in the map, return NULL, otherwise, return the varinfo we found. */
2848
2849 static varinfo_t
2850 lookup_vi_for_tree (tree t)
2851 {
2852 varinfo_t *slot = vi_for_tree->get (t);
2853 if (slot == NULL)
2854 return NULL;
2855
2856 return *slot;
2857 }
2858
2859 /* Return a printable name for DECL */
2860
2861 static const char *
2862 alias_get_name (tree decl)
2863 {
2864 const char *res = NULL;
2865 char *temp;
2866 int num_printed = 0;
2867
2868 if (!dump_file)
2869 return "NULL";
2870
2871 if (TREE_CODE (decl) == SSA_NAME)
2872 {
2873 res = get_name (decl);
2874 if (res)
2875 num_printed = asprintf (&temp, "%s_%u", res, SSA_NAME_VERSION (decl));
2876 else
2877 num_printed = asprintf (&temp, "_%u", SSA_NAME_VERSION (decl));
2878 if (num_printed > 0)
2879 {
2880 res = ggc_strdup (temp);
2881 free (temp);
2882 }
2883 }
2884 else if (DECL_P (decl))
2885 {
2886 if (DECL_ASSEMBLER_NAME_SET_P (decl))
2887 res = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
2888 else
2889 {
2890 res = get_name (decl);
2891 if (!res)
2892 {
2893 num_printed = asprintf (&temp, "D.%u", DECL_UID (decl));
2894 if (num_printed > 0)
2895 {
2896 res = ggc_strdup (temp);
2897 free (temp);
2898 }
2899 }
2900 }
2901 }
2902 if (res != NULL)
2903 return res;
2904
2905 return "NULL";
2906 }
2907
2908 /* Find the variable id for tree T in the map.
2909 If T doesn't exist in the map, create an entry for it and return it. */
2910
2911 static varinfo_t
2912 get_vi_for_tree (tree t)
2913 {
2914 varinfo_t *slot = vi_for_tree->get (t);
2915 if (slot == NULL)
2916 return get_varinfo (create_variable_info_for (t, alias_get_name (t)));
2917
2918 return *slot;
2919 }
2920
2921 /* Get a scalar constraint expression for a new temporary variable. */
2922
2923 static struct constraint_expr
2924 new_scalar_tmp_constraint_exp (const char *name)
2925 {
2926 struct constraint_expr tmp;
2927 varinfo_t vi;
2928
2929 vi = new_var_info (NULL_TREE, name);
2930 vi->offset = 0;
2931 vi->size = -1;
2932 vi->fullsize = -1;
2933 vi->is_full_var = 1;
2934
2935 tmp.var = vi->id;
2936 tmp.type = SCALAR;
2937 tmp.offset = 0;
2938
2939 return tmp;
2940 }
2941
2942 /* Get a constraint expression vector from an SSA_VAR_P node.
2943 If address_p is true, the result will be taken its address of. */
2944
2945 static void
2946 get_constraint_for_ssa_var (tree t, vec<ce_s> *results, bool address_p)
2947 {
2948 struct constraint_expr cexpr;
2949 varinfo_t vi;
2950
2951 /* We allow FUNCTION_DECLs here even though it doesn't make much sense. */
2952 gcc_assert (TREE_CODE (t) == SSA_NAME || DECL_P (t));
2953
2954 /* For parameters, get at the points-to set for the actual parm
2955 decl. */
2956 if (TREE_CODE (t) == SSA_NAME
2957 && SSA_NAME_IS_DEFAULT_DEF (t)
2958 && (TREE_CODE (SSA_NAME_VAR (t)) == PARM_DECL
2959 || TREE_CODE (SSA_NAME_VAR (t)) == RESULT_DECL))
2960 {
2961 get_constraint_for_ssa_var (SSA_NAME_VAR (t), results, address_p);
2962 return;
2963 }
2964
2965 /* For global variables resort to the alias target. */
2966 if (TREE_CODE (t) == VAR_DECL
2967 && (TREE_STATIC (t) || DECL_EXTERNAL (t)))
2968 {
2969 varpool_node *node = varpool_node::get (t);
2970 if (node && node->alias && node->analyzed)
2971 {
2972 node = node->ultimate_alias_target ();
2973 t = node->decl;
2974 }
2975 }
2976
2977 vi = get_vi_for_tree (t);
2978 cexpr.var = vi->id;
2979 cexpr.type = SCALAR;
2980 cexpr.offset = 0;
2981
2982 /* If we are not taking the address of the constraint expr, add all
2983 sub-fiels of the variable as well. */
2984 if (!address_p
2985 && !vi->is_full_var)
2986 {
2987 for (; vi; vi = vi_next (vi))
2988 {
2989 cexpr.var = vi->id;
2990 results->safe_push (cexpr);
2991 }
2992 return;
2993 }
2994
2995 results->safe_push (cexpr);
2996 }
2997
2998 /* Process constraint T, performing various simplifications and then
2999 adding it to our list of overall constraints. */
3000
3001 static void
3002 process_constraint (constraint_t t)
3003 {
3004 struct constraint_expr rhs = t->rhs;
3005 struct constraint_expr lhs = t->lhs;
3006
3007 gcc_assert (rhs.var < varmap.length ());
3008 gcc_assert (lhs.var < varmap.length ());
3009
3010 /* If we didn't get any useful constraint from the lhs we get
3011 &ANYTHING as fallback from get_constraint_for. Deal with
3012 it here by turning it into *ANYTHING. */
3013 if (lhs.type == ADDRESSOF
3014 && lhs.var == anything_id)
3015 lhs.type = DEREF;
3016
3017 /* ADDRESSOF on the lhs is invalid. */
3018 gcc_assert (lhs.type != ADDRESSOF);
3019
3020 /* We shouldn't add constraints from things that cannot have pointers.
3021 It's not completely trivial to avoid in the callers, so do it here. */
3022 if (rhs.type != ADDRESSOF
3023 && !get_varinfo (rhs.var)->may_have_pointers)
3024 return;
3025
3026 /* Likewise adding to the solution of a non-pointer var isn't useful. */
3027 if (!get_varinfo (lhs.var)->may_have_pointers)
3028 return;
3029
3030 /* This can happen in our IR with things like n->a = *p */
3031 if (rhs.type == DEREF && lhs.type == DEREF && rhs.var != anything_id)
3032 {
3033 /* Split into tmp = *rhs, *lhs = tmp */
3034 struct constraint_expr tmplhs;
3035 tmplhs = new_scalar_tmp_constraint_exp ("doubledereftmp");
3036 process_constraint (new_constraint (tmplhs, rhs));
3037 process_constraint (new_constraint (lhs, tmplhs));
3038 }
3039 else if (rhs.type == ADDRESSOF && lhs.type == DEREF)
3040 {
3041 /* Split into tmp = &rhs, *lhs = tmp */
3042 struct constraint_expr tmplhs;
3043 tmplhs = new_scalar_tmp_constraint_exp ("derefaddrtmp");
3044 process_constraint (new_constraint (tmplhs, rhs));
3045 process_constraint (new_constraint (lhs, tmplhs));
3046 }
3047 else
3048 {
3049 gcc_assert (rhs.type != ADDRESSOF || rhs.offset == 0);
3050 constraints.safe_push (t);
3051 }
3052 }
3053
3054
3055 /* Return the position, in bits, of FIELD_DECL from the beginning of its
3056 structure. */
3057
3058 static HOST_WIDE_INT
3059 bitpos_of_field (const tree fdecl)
3060 {
3061 if (!tree_fits_shwi_p (DECL_FIELD_OFFSET (fdecl))
3062 || !tree_fits_shwi_p (DECL_FIELD_BIT_OFFSET (fdecl)))
3063 return -1;
3064
3065 return (tree_to_shwi (DECL_FIELD_OFFSET (fdecl)) * BITS_PER_UNIT
3066 + tree_to_shwi (DECL_FIELD_BIT_OFFSET (fdecl)));
3067 }
3068
3069
3070 /* Get constraint expressions for offsetting PTR by OFFSET. Stores the
3071 resulting constraint expressions in *RESULTS. */
3072
3073 static void
3074 get_constraint_for_ptr_offset (tree ptr, tree offset,
3075 vec<ce_s> *results)
3076 {
3077 struct constraint_expr c;
3078 unsigned int j, n;
3079 HOST_WIDE_INT rhsoffset;
3080
3081 /* If we do not do field-sensitive PTA adding offsets to pointers
3082 does not change the points-to solution. */
3083 if (!use_field_sensitive)
3084 {
3085 get_constraint_for_rhs (ptr, results);
3086 return;
3087 }
3088
3089 /* If the offset is not a non-negative integer constant that fits
3090 in a HOST_WIDE_INT, we have to fall back to a conservative
3091 solution which includes all sub-fields of all pointed-to
3092 variables of ptr. */
3093 if (offset == NULL_TREE
3094 || TREE_CODE (offset) != INTEGER_CST)
3095 rhsoffset = UNKNOWN_OFFSET;
3096 else
3097 {
3098 /* Sign-extend the offset. */
3099 offset_int soffset = offset_int::from (offset, SIGNED);
3100 if (!wi::fits_shwi_p (soffset))
3101 rhsoffset = UNKNOWN_OFFSET;
3102 else
3103 {
3104 /* Make sure the bit-offset also fits. */
3105 HOST_WIDE_INT rhsunitoffset = soffset.to_shwi ();
3106 rhsoffset = rhsunitoffset * BITS_PER_UNIT;
3107 if (rhsunitoffset != rhsoffset / BITS_PER_UNIT)
3108 rhsoffset = UNKNOWN_OFFSET;
3109 }
3110 }
3111
3112 get_constraint_for_rhs (ptr, results);
3113 if (rhsoffset == 0)
3114 return;
3115
3116 /* As we are eventually appending to the solution do not use
3117 vec::iterate here. */
3118 n = results->length ();
3119 for (j = 0; j < n; j++)
3120 {
3121 varinfo_t curr;
3122 c = (*results)[j];
3123 curr = get_varinfo (c.var);
3124
3125 if (c.type == ADDRESSOF
3126 /* If this varinfo represents a full variable just use it. */
3127 && curr->is_full_var)
3128 ;
3129 else if (c.type == ADDRESSOF
3130 /* If we do not know the offset add all subfields. */
3131 && rhsoffset == UNKNOWN_OFFSET)
3132 {
3133 varinfo_t temp = get_varinfo (curr->head);
3134 do
3135 {
3136 struct constraint_expr c2;
3137 c2.var = temp->id;
3138 c2.type = ADDRESSOF;
3139 c2.offset = 0;
3140 if (c2.var != c.var)
3141 results->safe_push (c2);
3142 temp = vi_next (temp);
3143 }
3144 while (temp);
3145 }
3146 else if (c.type == ADDRESSOF)
3147 {
3148 varinfo_t temp;
3149 unsigned HOST_WIDE_INT offset = curr->offset + rhsoffset;
3150
3151 /* If curr->offset + rhsoffset is less than zero adjust it. */
3152 if (rhsoffset < 0
3153 && curr->offset < offset)
3154 offset = 0;
3155
3156 /* We have to include all fields that overlap the current
3157 field shifted by rhsoffset. And we include at least
3158 the last or the first field of the variable to represent
3159 reachability of off-bound addresses, in particular &object + 1,
3160 conservatively correct. */
3161 temp = first_or_preceding_vi_for_offset (curr, offset);
3162 c.var = temp->id;
3163 c.offset = 0;
3164 temp = vi_next (temp);
3165 while (temp
3166 && temp->offset < offset + curr->size)
3167 {
3168 struct constraint_expr c2;
3169 c2.var = temp->id;
3170 c2.type = ADDRESSOF;
3171 c2.offset = 0;
3172 results->safe_push (c2);
3173 temp = vi_next (temp);
3174 }
3175 }
3176 else if (c.type == SCALAR)
3177 {
3178 gcc_assert (c.offset == 0);
3179 c.offset = rhsoffset;
3180 }
3181 else
3182 /* We shouldn't get any DEREFs here. */
3183 gcc_unreachable ();
3184
3185 (*results)[j] = c;
3186 }
3187 }
3188
3189
3190 /* Given a COMPONENT_REF T, return the constraint_expr vector for it.
3191 If address_p is true the result will be taken its address of.
3192 If lhs_p is true then the constraint expression is assumed to be used
3193 as the lhs. */
3194
3195 static void
3196 get_constraint_for_component_ref (tree t, vec<ce_s> *results,
3197 bool address_p, bool lhs_p)
3198 {
3199 tree orig_t = t;
3200 HOST_WIDE_INT bitsize = -1;
3201 HOST_WIDE_INT bitmaxsize = -1;
3202 HOST_WIDE_INT bitpos;
3203 tree forzero;
3204
3205 /* Some people like to do cute things like take the address of
3206 &0->a.b */
3207 forzero = t;
3208 while (handled_component_p (forzero)
3209 || INDIRECT_REF_P (forzero)
3210 || TREE_CODE (forzero) == MEM_REF)
3211 forzero = TREE_OPERAND (forzero, 0);
3212
3213 if (CONSTANT_CLASS_P (forzero) && integer_zerop (forzero))
3214 {
3215 struct constraint_expr temp;
3216
3217 temp.offset = 0;
3218 temp.var = integer_id;
3219 temp.type = SCALAR;
3220 results->safe_push (temp);
3221 return;
3222 }
3223
3224 t = get_ref_base_and_extent (t, &bitpos, &bitsize, &bitmaxsize);
3225
3226 /* Pretend to take the address of the base, we'll take care of
3227 adding the required subset of sub-fields below. */
3228 get_constraint_for_1 (t, results, true, lhs_p);
3229 gcc_assert (results->length () == 1);
3230 struct constraint_expr &result = results->last ();
3231
3232 if (result.type == SCALAR
3233 && get_varinfo (result.var)->is_full_var)
3234 /* For single-field vars do not bother about the offset. */
3235 result.offset = 0;
3236 else if (result.type == SCALAR)
3237 {
3238 /* In languages like C, you can access one past the end of an
3239 array. You aren't allowed to dereference it, so we can
3240 ignore this constraint. When we handle pointer subtraction,
3241 we may have to do something cute here. */
3242
3243 if ((unsigned HOST_WIDE_INT)bitpos < get_varinfo (result.var)->fullsize
3244 && bitmaxsize != 0)
3245 {
3246 /* It's also not true that the constraint will actually start at the
3247 right offset, it may start in some padding. We only care about
3248 setting the constraint to the first actual field it touches, so
3249 walk to find it. */
3250 struct constraint_expr cexpr = result;
3251 varinfo_t curr;
3252 results->pop ();
3253 cexpr.offset = 0;
3254 for (curr = get_varinfo (cexpr.var); curr; curr = vi_next (curr))
3255 {
3256 if (ranges_overlap_p (curr->offset, curr->size,
3257 bitpos, bitmaxsize))
3258 {
3259 cexpr.var = curr->id;
3260 results->safe_push (cexpr);
3261 if (address_p)
3262 break;
3263 }
3264 }
3265 /* If we are going to take the address of this field then
3266 to be able to compute reachability correctly add at least
3267 the last field of the variable. */
3268 if (address_p && results->length () == 0)
3269 {
3270 curr = get_varinfo (cexpr.var);
3271 while (curr->next != 0)
3272 curr = vi_next (curr);
3273 cexpr.var = curr->id;
3274 results->safe_push (cexpr);
3275 }
3276 else if (results->length () == 0)
3277 /* Assert that we found *some* field there. The user couldn't be
3278 accessing *only* padding. */
3279 /* Still the user could access one past the end of an array
3280 embedded in a struct resulting in accessing *only* padding. */
3281 /* Or accessing only padding via type-punning to a type
3282 that has a filed just in padding space. */
3283 {
3284 cexpr.type = SCALAR;
3285 cexpr.var = anything_id;
3286 cexpr.offset = 0;
3287 results->safe_push (cexpr);
3288 }
3289 }
3290 else if (bitmaxsize == 0)
3291 {
3292 if (dump_file && (dump_flags & TDF_DETAILS))
3293 fprintf (dump_file, "Access to zero-sized part of variable,"
3294 "ignoring\n");
3295 }
3296 else
3297 if (dump_file && (dump_flags & TDF_DETAILS))
3298 fprintf (dump_file, "Access to past the end of variable, ignoring\n");
3299 }
3300 else if (result.type == DEREF)
3301 {
3302 /* If we do not know exactly where the access goes say so. Note
3303 that only for non-structure accesses we know that we access
3304 at most one subfiled of any variable. */
3305 if (bitpos == -1
3306 || bitsize != bitmaxsize
3307 || AGGREGATE_TYPE_P (TREE_TYPE (orig_t))
3308 || result.offset == UNKNOWN_OFFSET)
3309 result.offset = UNKNOWN_OFFSET;
3310 else
3311 result.offset += bitpos;
3312 }
3313 else if (result.type == ADDRESSOF)
3314 {
3315 /* We can end up here for component references on a
3316 VIEW_CONVERT_EXPR <>(&foobar). */
3317 result.type = SCALAR;
3318 result.var = anything_id;
3319 result.offset = 0;
3320 }
3321 else
3322 gcc_unreachable ();
3323 }
3324
3325
3326 /* Dereference the constraint expression CONS, and return the result.
3327 DEREF (ADDRESSOF) = SCALAR
3328 DEREF (SCALAR) = DEREF
3329 DEREF (DEREF) = (temp = DEREF1; result = DEREF(temp))
3330 This is needed so that we can handle dereferencing DEREF constraints. */
3331
3332 static void
3333 do_deref (vec<ce_s> *constraints)
3334 {
3335 struct constraint_expr *c;
3336 unsigned int i = 0;
3337
3338 FOR_EACH_VEC_ELT (*constraints, i, c)
3339 {
3340 if (c->type == SCALAR)
3341 c->type = DEREF;
3342 else if (c->type == ADDRESSOF)
3343 c->type = SCALAR;
3344 else if (c->type == DEREF)
3345 {
3346 struct constraint_expr tmplhs;
3347 tmplhs = new_scalar_tmp_constraint_exp ("dereftmp");
3348 process_constraint (new_constraint (tmplhs, *c));
3349 c->var = tmplhs.var;
3350 }
3351 else
3352 gcc_unreachable ();
3353 }
3354 }
3355
3356 /* Given a tree T, return the constraint expression for taking the
3357 address of it. */
3358
3359 static void
3360 get_constraint_for_address_of (tree t, vec<ce_s> *results)
3361 {
3362 struct constraint_expr *c;
3363 unsigned int i;
3364
3365 get_constraint_for_1 (t, results, true, true);
3366
3367 FOR_EACH_VEC_ELT (*results, i, c)
3368 {
3369 if (c->type == DEREF)
3370 c->type = SCALAR;
3371 else
3372 c->type = ADDRESSOF;
3373 }
3374 }
3375
3376 /* Given a tree T, return the constraint expression for it. */
3377
3378 static void
3379 get_constraint_for_1 (tree t, vec<ce_s> *results, bool address_p,
3380 bool lhs_p)
3381 {
3382 struct constraint_expr temp;
3383
3384 /* x = integer is all glommed to a single variable, which doesn't
3385 point to anything by itself. That is, of course, unless it is an
3386 integer constant being treated as a pointer, in which case, we
3387 will return that this is really the addressof anything. This
3388 happens below, since it will fall into the default case. The only
3389 case we know something about an integer treated like a pointer is
3390 when it is the NULL pointer, and then we just say it points to
3391 NULL.
3392
3393 Do not do that if -fno-delete-null-pointer-checks though, because
3394 in that case *NULL does not fail, so it _should_ alias *anything.
3395 It is not worth adding a new option or renaming the existing one,
3396 since this case is relatively obscure. */
3397 if ((TREE_CODE (t) == INTEGER_CST
3398 && integer_zerop (t))
3399 /* The only valid CONSTRUCTORs in gimple with pointer typed
3400 elements are zero-initializer. But in IPA mode we also
3401 process global initializers, so verify at least. */
3402 || (TREE_CODE (t) == CONSTRUCTOR
3403 && CONSTRUCTOR_NELTS (t) == 0))
3404 {
3405 if (flag_delete_null_pointer_checks)
3406 temp.var = nothing_id;
3407 else
3408 temp.var = nonlocal_id;
3409 temp.type = ADDRESSOF;
3410 temp.offset = 0;
3411 results->safe_push (temp);
3412 return;
3413 }
3414
3415 /* String constants are read-only, ideally we'd have a CONST_DECL
3416 for those. */
3417 if (TREE_CODE (t) == STRING_CST)
3418 {
3419 temp.var = string_id;
3420 temp.type = SCALAR;
3421 temp.offset = 0;
3422 results->safe_push (temp);
3423 return;
3424 }
3425
3426 switch (TREE_CODE_CLASS (TREE_CODE (t)))
3427 {
3428 case tcc_expression:
3429 {
3430 switch (TREE_CODE (t))
3431 {
3432 case ADDR_EXPR:
3433 get_constraint_for_address_of (TREE_OPERAND (t, 0), results);
3434 return;
3435 default:;
3436 }
3437 break;
3438 }
3439 case tcc_reference:
3440 {
3441 switch (TREE_CODE (t))
3442 {
3443 case MEM_REF:
3444 {
3445 struct constraint_expr cs;
3446 varinfo_t vi, curr;
3447 get_constraint_for_ptr_offset (TREE_OPERAND (t, 0),
3448 TREE_OPERAND (t, 1), results);
3449 do_deref (results);
3450
3451 /* If we are not taking the address then make sure to process
3452 all subvariables we might access. */
3453 if (address_p)
3454 return;
3455
3456 cs = results->last ();
3457 if (cs.type == DEREF
3458 && type_can_have_subvars (TREE_TYPE (t)))
3459 {
3460 /* For dereferences this means we have to defer it
3461 to solving time. */
3462 results->last ().offset = UNKNOWN_OFFSET;
3463 return;
3464 }
3465 if (cs.type != SCALAR)
3466 return;
3467
3468 vi = get_varinfo (cs.var);
3469 curr = vi_next (vi);
3470 if (!vi->is_full_var
3471 && curr)
3472 {
3473 unsigned HOST_WIDE_INT size;
3474 if (tree_fits_uhwi_p (TYPE_SIZE (TREE_TYPE (t))))
3475 size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (t)));
3476 else
3477 size = -1;
3478 for (; curr; curr = vi_next (curr))
3479 {
3480 if (curr->offset - vi->offset < size)
3481 {
3482 cs.var = curr->id;
3483 results->safe_push (cs);
3484 }
3485 else
3486 break;
3487 }
3488 }
3489 return;
3490 }
3491 case ARRAY_REF:
3492 case ARRAY_RANGE_REF:
3493 case COMPONENT_REF:
3494 get_constraint_for_component_ref (t, results, address_p, lhs_p);
3495 return;
3496 case VIEW_CONVERT_EXPR:
3497 get_constraint_for_1 (TREE_OPERAND (t, 0), results, address_p,
3498 lhs_p);
3499 return;
3500 /* We are missing handling for TARGET_MEM_REF here. */
3501 default:;
3502 }
3503 break;
3504 }
3505 case tcc_exceptional:
3506 {
3507 switch (TREE_CODE (t))
3508 {
3509 case SSA_NAME:
3510 {
3511 get_constraint_for_ssa_var (t, results, address_p);
3512 return;
3513 }
3514 case CONSTRUCTOR:
3515 {
3516 unsigned int i;
3517 tree val;
3518 auto_vec<ce_s> tmp;
3519 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), i, val)
3520 {
3521 struct constraint_expr *rhsp;
3522 unsigned j;
3523 get_constraint_for_1 (val, &tmp, address_p, lhs_p);
3524 FOR_EACH_VEC_ELT (tmp, j, rhsp)
3525 results->safe_push (*rhsp);
3526 tmp.truncate (0);
3527 }
3528 /* We do not know whether the constructor was complete,
3529 so technically we have to add &NOTHING or &ANYTHING
3530 like we do for an empty constructor as well. */
3531 return;
3532 }
3533 default:;
3534 }
3535 break;
3536 }
3537 case tcc_declaration:
3538 {
3539 get_constraint_for_ssa_var (t, results, address_p);
3540 return;
3541 }
3542 case tcc_constant:
3543 {
3544 /* We cannot refer to automatic variables through constants. */
3545 temp.type = ADDRESSOF;
3546 temp.var = nonlocal_id;
3547 temp.offset = 0;
3548 results->safe_push (temp);
3549 return;
3550 }
3551 default:;
3552 }
3553
3554 /* The default fallback is a constraint from anything. */
3555 temp.type = ADDRESSOF;
3556 temp.var = anything_id;
3557 temp.offset = 0;
3558 results->safe_push (temp);
3559 }
3560
3561 /* Given a gimple tree T, return the constraint expression vector for it. */
3562
3563 static void
3564 get_constraint_for (tree t, vec<ce_s> *results)
3565 {
3566 gcc_assert (results->length () == 0);
3567
3568 get_constraint_for_1 (t, results, false, true);
3569 }
3570
3571 /* Given a gimple tree T, return the constraint expression vector for it
3572 to be used as the rhs of a constraint. */
3573
3574 static void
3575 get_constraint_for_rhs (tree t, vec<ce_s> *results)
3576 {
3577 gcc_assert (results->length () == 0);
3578
3579 get_constraint_for_1 (t, results, false, false);
3580 }
3581
3582
3583 /* Efficiently generates constraints from all entries in *RHSC to all
3584 entries in *LHSC. */
3585
3586 static void
3587 process_all_all_constraints (vec<ce_s> lhsc,
3588 vec<ce_s> rhsc)
3589 {
3590 struct constraint_expr *lhsp, *rhsp;
3591 unsigned i, j;
3592
3593 if (lhsc.length () <= 1 || rhsc.length () <= 1)
3594 {
3595 FOR_EACH_VEC_ELT (lhsc, i, lhsp)
3596 FOR_EACH_VEC_ELT (rhsc, j, rhsp)
3597 process_constraint (new_constraint (*lhsp, *rhsp));
3598 }
3599 else
3600 {
3601 struct constraint_expr tmp;
3602 tmp = new_scalar_tmp_constraint_exp ("allalltmp");
3603 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
3604 process_constraint (new_constraint (tmp, *rhsp));
3605 FOR_EACH_VEC_ELT (lhsc, i, lhsp)
3606 process_constraint (new_constraint (*lhsp, tmp));
3607 }
3608 }
3609
3610 /* Handle aggregate copies by expanding into copies of the respective
3611 fields of the structures. */
3612
3613 static void
3614 do_structure_copy (tree lhsop, tree rhsop)
3615 {
3616 struct constraint_expr *lhsp, *rhsp;
3617 auto_vec<ce_s> lhsc;
3618 auto_vec<ce_s> rhsc;
3619 unsigned j;
3620
3621 get_constraint_for (lhsop, &lhsc);
3622 get_constraint_for_rhs (rhsop, &rhsc);
3623 lhsp = &lhsc[0];
3624 rhsp = &rhsc[0];
3625 if (lhsp->type == DEREF
3626 || (lhsp->type == ADDRESSOF && lhsp->var == anything_id)
3627 || rhsp->type == DEREF)
3628 {
3629 if (lhsp->type == DEREF)
3630 {
3631 gcc_assert (lhsc.length () == 1);
3632 lhsp->offset = UNKNOWN_OFFSET;
3633 }
3634 if (rhsp->type == DEREF)
3635 {
3636 gcc_assert (rhsc.length () == 1);
3637 rhsp->offset = UNKNOWN_OFFSET;
3638 }
3639 process_all_all_constraints (lhsc, rhsc);
3640 }
3641 else if (lhsp->type == SCALAR
3642 && (rhsp->type == SCALAR
3643 || rhsp->type == ADDRESSOF))
3644 {
3645 HOST_WIDE_INT lhssize, lhsmaxsize, lhsoffset;
3646 HOST_WIDE_INT rhssize, rhsmaxsize, rhsoffset;
3647 unsigned k = 0;
3648 get_ref_base_and_extent (lhsop, &lhsoffset, &lhssize, &lhsmaxsize);
3649 get_ref_base_and_extent (rhsop, &rhsoffset, &rhssize, &rhsmaxsize);
3650 for (j = 0; lhsc.iterate (j, &lhsp);)
3651 {
3652 varinfo_t lhsv, rhsv;
3653 rhsp = &rhsc[k];
3654 lhsv = get_varinfo (lhsp->var);
3655 rhsv = get_varinfo (rhsp->var);
3656 if (lhsv->may_have_pointers
3657 && (lhsv->is_full_var
3658 || rhsv->is_full_var
3659 || ranges_overlap_p (lhsv->offset + rhsoffset, lhsv->size,
3660 rhsv->offset + lhsoffset, rhsv->size)))
3661 process_constraint (new_constraint (*lhsp, *rhsp));
3662 if (!rhsv->is_full_var
3663 && (lhsv->is_full_var
3664 || (lhsv->offset + rhsoffset + lhsv->size
3665 > rhsv->offset + lhsoffset + rhsv->size)))
3666 {
3667 ++k;
3668 if (k >= rhsc.length ())
3669 break;
3670 }
3671 else
3672 ++j;
3673 }
3674 }
3675 else
3676 gcc_unreachable ();
3677 }
3678
3679 /* Create constraints ID = { rhsc }. */
3680
3681 static void
3682 make_constraints_to (unsigned id, vec<ce_s> rhsc)
3683 {
3684 struct constraint_expr *c;
3685 struct constraint_expr includes;
3686 unsigned int j;
3687
3688 includes.var = id;
3689 includes.offset = 0;
3690 includes.type = SCALAR;
3691
3692 FOR_EACH_VEC_ELT (rhsc, j, c)
3693 process_constraint (new_constraint (includes, *c));
3694 }
3695
3696 /* Create a constraint ID = OP. */
3697
3698 static void
3699 make_constraint_to (unsigned id, tree op)
3700 {
3701 auto_vec<ce_s> rhsc;
3702 get_constraint_for_rhs (op, &rhsc);
3703 make_constraints_to (id, rhsc);
3704 }
3705
3706 /* Create a constraint ID = &FROM. */
3707
3708 static void
3709 make_constraint_from (varinfo_t vi, int from)
3710 {
3711 struct constraint_expr lhs, rhs;
3712
3713 lhs.var = vi->id;
3714 lhs.offset = 0;
3715 lhs.type = SCALAR;
3716
3717 rhs.var = from;
3718 rhs.offset = 0;
3719 rhs.type = ADDRESSOF;
3720 process_constraint (new_constraint (lhs, rhs));
3721 }
3722
3723 /* Create a constraint ID = FROM. */
3724
3725 static void
3726 make_copy_constraint (varinfo_t vi, int from)
3727 {
3728 struct constraint_expr lhs, rhs;
3729
3730 lhs.var = vi->id;
3731 lhs.offset = 0;
3732 lhs.type = SCALAR;
3733
3734 rhs.var = from;
3735 rhs.offset = 0;
3736 rhs.type = SCALAR;
3737 process_constraint (new_constraint (lhs, rhs));
3738 }
3739
3740 /* Make constraints necessary to make OP escape. */
3741
3742 static void
3743 make_escape_constraint (tree op)
3744 {
3745 make_constraint_to (escaped_id, op);
3746 }
3747
3748 /* Add constraints to that the solution of VI is transitively closed. */
3749
3750 static void
3751 make_transitive_closure_constraints (varinfo_t vi)
3752 {
3753 struct constraint_expr lhs, rhs;
3754
3755 /* VAR = *VAR; */
3756 lhs.type = SCALAR;
3757 lhs.var = vi->id;
3758 lhs.offset = 0;
3759 rhs.type = DEREF;
3760 rhs.var = vi->id;
3761 rhs.offset = UNKNOWN_OFFSET;
3762 process_constraint (new_constraint (lhs, rhs));
3763 }
3764
3765 /* Temporary storage for fake var decls. */
3766 struct obstack fake_var_decl_obstack;
3767
3768 /* Build a fake VAR_DECL acting as referrer to a DECL_UID. */
3769
3770 static tree
3771 build_fake_var_decl (tree type)
3772 {
3773 tree decl = (tree) XOBNEW (&fake_var_decl_obstack, struct tree_var_decl);
3774 memset (decl, 0, sizeof (struct tree_var_decl));
3775 TREE_SET_CODE (decl, VAR_DECL);
3776 TREE_TYPE (decl) = type;
3777 DECL_UID (decl) = allocate_decl_uid ();
3778 SET_DECL_PT_UID (decl, -1);
3779 layout_decl (decl, 0);
3780 return decl;
3781 }
3782
3783 /* Create a new artificial heap variable with NAME.
3784 Return the created variable. */
3785
3786 static varinfo_t
3787 make_heapvar (const char *name)
3788 {
3789 varinfo_t vi;
3790 tree heapvar;
3791
3792 heapvar = build_fake_var_decl (ptr_type_node);
3793 DECL_EXTERNAL (heapvar) = 1;
3794
3795 vi = new_var_info (heapvar, name);
3796 vi->is_artificial_var = true;
3797 vi->is_heap_var = true;
3798 vi->is_unknown_size_var = true;
3799 vi->offset = 0;
3800 vi->fullsize = ~0;
3801 vi->size = ~0;
3802 vi->is_full_var = true;
3803 insert_vi_for_tree (heapvar, vi);
3804
3805 return vi;
3806 }
3807
3808 /* Create a new artificial heap variable with NAME and make a
3809 constraint from it to LHS. Set flags according to a tag used
3810 for tracking restrict pointers. */
3811
3812 static varinfo_t
3813 make_constraint_from_restrict (varinfo_t lhs, const char *name)
3814 {
3815 varinfo_t vi = make_heapvar (name);
3816 vi->is_restrict_var = 1;
3817 vi->is_global_var = 1;
3818 vi->may_have_pointers = 1;
3819 make_constraint_from (lhs, vi->id);
3820 return vi;
3821 }
3822
3823 /* Create a new artificial heap variable with NAME and make a
3824 constraint from it to LHS. Set flags according to a tag used
3825 for tracking restrict pointers and make the artificial heap
3826 point to global memory. */
3827
3828 static varinfo_t
3829 make_constraint_from_global_restrict (varinfo_t lhs, const char *name)
3830 {
3831 varinfo_t vi = make_constraint_from_restrict (lhs, name);
3832 make_copy_constraint (vi, nonlocal_id);
3833 return vi;
3834 }
3835
3836 /* In IPA mode there are varinfos for different aspects of reach
3837 function designator. One for the points-to set of the return
3838 value, one for the variables that are clobbered by the function,
3839 one for its uses and one for each parameter (including a single
3840 glob for remaining variadic arguments). */
3841
3842 enum { fi_clobbers = 1, fi_uses = 2,
3843 fi_static_chain = 3, fi_result = 4, fi_parm_base = 5 };
3844
3845 /* Get a constraint for the requested part of a function designator FI
3846 when operating in IPA mode. */
3847
3848 static struct constraint_expr
3849 get_function_part_constraint (varinfo_t fi, unsigned part)
3850 {
3851 struct constraint_expr c;
3852
3853 gcc_assert (in_ipa_mode);
3854
3855 if (fi->id == anything_id)
3856 {
3857 /* ??? We probably should have a ANYFN special variable. */
3858 c.var = anything_id;
3859 c.offset = 0;
3860 c.type = SCALAR;
3861 }
3862 else if (TREE_CODE (fi->decl) == FUNCTION_DECL)
3863 {
3864 varinfo_t ai = first_vi_for_offset (fi, part);
3865 if (ai)
3866 c.var = ai->id;
3867 else
3868 c.var = anything_id;
3869 c.offset = 0;
3870 c.type = SCALAR;
3871 }
3872 else
3873 {
3874 c.var = fi->id;
3875 c.offset = part;
3876 c.type = DEREF;
3877 }
3878
3879 return c;
3880 }
3881
3882 /* For non-IPA mode, generate constraints necessary for a call on the
3883 RHS. */
3884
3885 static void
3886 handle_rhs_call (gcall *stmt, vec<ce_s> *results)
3887 {
3888 struct constraint_expr rhsc;
3889 unsigned i;
3890 bool returns_uses = false;
3891
3892 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3893 {
3894 tree arg = gimple_call_arg (stmt, i);
3895 int flags = gimple_call_arg_flags (stmt, i);
3896
3897 /* If the argument is not used we can ignore it. */
3898 if (flags & EAF_UNUSED)
3899 continue;
3900
3901 /* As we compute ESCAPED context-insensitive we do not gain
3902 any precision with just EAF_NOCLOBBER but not EAF_NOESCAPE
3903 set. The argument would still get clobbered through the
3904 escape solution. */
3905 if ((flags & EAF_NOCLOBBER)
3906 && (flags & EAF_NOESCAPE))
3907 {
3908 varinfo_t uses = get_call_use_vi (stmt);
3909 if (!(flags & EAF_DIRECT))
3910 {
3911 varinfo_t tem = new_var_info (NULL_TREE, "callarg");
3912 make_constraint_to (tem->id, arg);
3913 make_transitive_closure_constraints (tem);
3914 make_copy_constraint (uses, tem->id);
3915 }
3916 else
3917 make_constraint_to (uses->id, arg);
3918 returns_uses = true;
3919 }
3920 else if (flags & EAF_NOESCAPE)
3921 {
3922 struct constraint_expr lhs, rhs;
3923 varinfo_t uses = get_call_use_vi (stmt);
3924 varinfo_t clobbers = get_call_clobber_vi (stmt);
3925 varinfo_t tem = new_var_info (NULL_TREE, "callarg");
3926 make_constraint_to (tem->id, arg);
3927 if (!(flags & EAF_DIRECT))
3928 make_transitive_closure_constraints (tem);
3929 make_copy_constraint (uses, tem->id);
3930 make_copy_constraint (clobbers, tem->id);
3931 /* Add *tem = nonlocal, do not add *tem = callused as
3932 EAF_NOESCAPE parameters do not escape to other parameters
3933 and all other uses appear in NONLOCAL as well. */
3934 lhs.type = DEREF;
3935 lhs.var = tem->id;
3936 lhs.offset = 0;
3937 rhs.type = SCALAR;
3938 rhs.var = nonlocal_id;
3939 rhs.offset = 0;
3940 process_constraint (new_constraint (lhs, rhs));
3941 returns_uses = true;
3942 }
3943 else
3944 make_escape_constraint (arg);
3945 }
3946
3947 /* If we added to the calls uses solution make sure we account for
3948 pointers to it to be returned. */
3949 if (returns_uses)
3950 {
3951 rhsc.var = get_call_use_vi (stmt)->id;
3952 rhsc.offset = 0;
3953 rhsc.type = SCALAR;
3954 results->safe_push (rhsc);
3955 }
3956
3957 /* The static chain escapes as well. */
3958 if (gimple_call_chain (stmt))
3959 make_escape_constraint (gimple_call_chain (stmt));
3960
3961 /* And if we applied NRV the address of the return slot escapes as well. */
3962 if (gimple_call_return_slot_opt_p (stmt)
3963 && gimple_call_lhs (stmt) != NULL_TREE
3964 && TREE_ADDRESSABLE (TREE_TYPE (gimple_call_lhs (stmt))))
3965 {
3966 auto_vec<ce_s> tmpc;
3967 struct constraint_expr lhsc, *c;
3968 get_constraint_for_address_of (gimple_call_lhs (stmt), &tmpc);
3969 lhsc.var = escaped_id;
3970 lhsc.offset = 0;
3971 lhsc.type = SCALAR;
3972 FOR_EACH_VEC_ELT (tmpc, i, c)
3973 process_constraint (new_constraint (lhsc, *c));
3974 }
3975
3976 /* Regular functions return nonlocal memory. */
3977 rhsc.var = nonlocal_id;
3978 rhsc.offset = 0;
3979 rhsc.type = SCALAR;
3980 results->safe_push (rhsc);
3981 }
3982
3983 /* For non-IPA mode, generate constraints necessary for a call
3984 that returns a pointer and assigns it to LHS. This simply makes
3985 the LHS point to global and escaped variables. */
3986
3987 static void
3988 handle_lhs_call (gcall *stmt, tree lhs, int flags, vec<ce_s> rhsc,
3989 tree fndecl)
3990 {
3991 auto_vec<ce_s> lhsc;
3992
3993 get_constraint_for (lhs, &lhsc);
3994 /* If the store is to a global decl make sure to
3995 add proper escape constraints. */
3996 lhs = get_base_address (lhs);
3997 if (lhs
3998 && DECL_P (lhs)
3999 && is_global_var (lhs))
4000 {
4001 struct constraint_expr tmpc;
4002 tmpc.var = escaped_id;
4003 tmpc.offset = 0;
4004 tmpc.type = SCALAR;
4005 lhsc.safe_push (tmpc);
4006 }
4007
4008 /* If the call returns an argument unmodified override the rhs
4009 constraints. */
4010 if (flags & ERF_RETURNS_ARG
4011 && (flags & ERF_RETURN_ARG_MASK) < gimple_call_num_args (stmt))
4012 {
4013 tree arg;
4014 rhsc.create (0);
4015 arg = gimple_call_arg (stmt, flags & ERF_RETURN_ARG_MASK);
4016 get_constraint_for (arg, &rhsc);
4017 process_all_all_constraints (lhsc, rhsc);
4018 rhsc.release ();
4019 }
4020 else if (flags & ERF_NOALIAS)
4021 {
4022 varinfo_t vi;
4023 struct constraint_expr tmpc;
4024 rhsc.create (0);
4025 vi = make_heapvar ("HEAP");
4026 /* We are marking allocated storage local, we deal with it becoming
4027 global by escaping and setting of vars_contains_escaped_heap. */
4028 DECL_EXTERNAL (vi->decl) = 0;
4029 vi->is_global_var = 0;
4030 /* If this is not a real malloc call assume the memory was
4031 initialized and thus may point to global memory. All
4032 builtin functions with the malloc attribute behave in a sane way. */
4033 if (!fndecl
4034 || DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_NORMAL)
4035 make_constraint_from (vi, nonlocal_id);
4036 tmpc.var = vi->id;
4037 tmpc.offset = 0;
4038 tmpc.type = ADDRESSOF;
4039 rhsc.safe_push (tmpc);
4040 process_all_all_constraints (lhsc, rhsc);
4041 rhsc.release ();
4042 }
4043 else
4044 process_all_all_constraints (lhsc, rhsc);
4045 }
4046
4047 /* For non-IPA mode, generate constraints necessary for a call of a
4048 const function that returns a pointer in the statement STMT. */
4049
4050 static void
4051 handle_const_call (gcall *stmt, vec<ce_s> *results)
4052 {
4053 struct constraint_expr rhsc;
4054 unsigned int k;
4055
4056 /* Treat nested const functions the same as pure functions as far
4057 as the static chain is concerned. */
4058 if (gimple_call_chain (stmt))
4059 {
4060 varinfo_t uses = get_call_use_vi (stmt);
4061 make_transitive_closure_constraints (uses);
4062 make_constraint_to (uses->id, gimple_call_chain (stmt));
4063 rhsc.var = uses->id;
4064 rhsc.offset = 0;
4065 rhsc.type = SCALAR;
4066 results->safe_push (rhsc);
4067 }
4068
4069 /* May return arguments. */
4070 for (k = 0; k < gimple_call_num_args (stmt); ++k)
4071 {
4072 tree arg = gimple_call_arg (stmt, k);
4073 auto_vec<ce_s> argc;
4074 unsigned i;
4075 struct constraint_expr *argp;
4076 get_constraint_for_rhs (arg, &argc);
4077 FOR_EACH_VEC_ELT (argc, i, argp)
4078 results->safe_push (*argp);
4079 }
4080
4081 /* May return addresses of globals. */
4082 rhsc.var = nonlocal_id;
4083 rhsc.offset = 0;
4084 rhsc.type = ADDRESSOF;
4085 results->safe_push (rhsc);
4086 }
4087
4088 /* For non-IPA mode, generate constraints necessary for a call to a
4089 pure function in statement STMT. */
4090
4091 static void
4092 handle_pure_call (gcall *stmt, vec<ce_s> *results)
4093 {
4094 struct constraint_expr rhsc;
4095 unsigned i;
4096 varinfo_t uses = NULL;
4097
4098 /* Memory reached from pointer arguments is call-used. */
4099 for (i = 0; i < gimple_call_num_args (stmt); ++i)
4100 {
4101 tree arg = gimple_call_arg (stmt, i);
4102 if (!uses)
4103 {
4104 uses = get_call_use_vi (stmt);
4105 make_transitive_closure_constraints (uses);
4106 }
4107 make_constraint_to (uses->id, arg);
4108 }
4109
4110 /* The static chain is used as well. */
4111 if (gimple_call_chain (stmt))
4112 {
4113 if (!uses)
4114 {
4115 uses = get_call_use_vi (stmt);
4116 make_transitive_closure_constraints (uses);
4117 }
4118 make_constraint_to (uses->id, gimple_call_chain (stmt));
4119 }
4120
4121 /* Pure functions may return call-used and nonlocal memory. */
4122 if (uses)
4123 {
4124 rhsc.var = uses->id;
4125 rhsc.offset = 0;
4126 rhsc.type = SCALAR;
4127 results->safe_push (rhsc);
4128 }
4129 rhsc.var = nonlocal_id;
4130 rhsc.offset = 0;
4131 rhsc.type = SCALAR;
4132 results->safe_push (rhsc);
4133 }
4134
4135
4136 /* Return the varinfo for the callee of CALL. */
4137
4138 static varinfo_t
4139 get_fi_for_callee (gcall *call)
4140 {
4141 tree decl, fn = gimple_call_fn (call);
4142
4143 if (fn && TREE_CODE (fn) == OBJ_TYPE_REF)
4144 fn = OBJ_TYPE_REF_EXPR (fn);
4145
4146 /* If we can directly resolve the function being called, do so.
4147 Otherwise, it must be some sort of indirect expression that
4148 we should still be able to handle. */
4149 decl = gimple_call_addr_fndecl (fn);
4150 if (decl)
4151 return get_vi_for_tree (decl);
4152
4153 /* If the function is anything other than a SSA name pointer we have no
4154 clue and should be getting ANYFN (well, ANYTHING for now). */
4155 if (!fn || TREE_CODE (fn) != SSA_NAME)
4156 return get_varinfo (anything_id);
4157
4158 if (SSA_NAME_IS_DEFAULT_DEF (fn)
4159 && (TREE_CODE (SSA_NAME_VAR (fn)) == PARM_DECL
4160 || TREE_CODE (SSA_NAME_VAR (fn)) == RESULT_DECL))
4161 fn = SSA_NAME_VAR (fn);
4162
4163 return get_vi_for_tree (fn);
4164 }
4165
4166 /* Create constraints for the builtin call T. Return true if the call
4167 was handled, otherwise false. */
4168
4169 static bool
4170 find_func_aliases_for_builtin_call (struct function *fn, gcall *t)
4171 {
4172 tree fndecl = gimple_call_fndecl (t);
4173 auto_vec<ce_s, 2> lhsc;
4174 auto_vec<ce_s, 4> rhsc;
4175 varinfo_t fi;
4176
4177 if (gimple_call_builtin_p (t, BUILT_IN_NORMAL))
4178 /* ??? All builtins that are handled here need to be handled
4179 in the alias-oracle query functions explicitly! */
4180 switch (DECL_FUNCTION_CODE (fndecl))
4181 {
4182 /* All the following functions return a pointer to the same object
4183 as their first argument points to. The functions do not add
4184 to the ESCAPED solution. The functions make the first argument
4185 pointed to memory point to what the second argument pointed to
4186 memory points to. */
4187 case BUILT_IN_STRCPY:
4188 case BUILT_IN_STRNCPY:
4189 case BUILT_IN_BCOPY:
4190 case BUILT_IN_MEMCPY:
4191 case BUILT_IN_MEMMOVE:
4192 case BUILT_IN_MEMPCPY:
4193 case BUILT_IN_STPCPY:
4194 case BUILT_IN_STPNCPY:
4195 case BUILT_IN_STRCAT:
4196 case BUILT_IN_STRNCAT:
4197 case BUILT_IN_STRCPY_CHK:
4198 case BUILT_IN_STRNCPY_CHK:
4199 case BUILT_IN_MEMCPY_CHK:
4200 case BUILT_IN_MEMMOVE_CHK:
4201 case BUILT_IN_MEMPCPY_CHK:
4202 case BUILT_IN_STPCPY_CHK:
4203 case BUILT_IN_STPNCPY_CHK:
4204 case BUILT_IN_STRCAT_CHK:
4205 case BUILT_IN_STRNCAT_CHK:
4206 case BUILT_IN_TM_MEMCPY:
4207 case BUILT_IN_TM_MEMMOVE:
4208 {
4209 tree res = gimple_call_lhs (t);
4210 tree dest = gimple_call_arg (t, (DECL_FUNCTION_CODE (fndecl)
4211 == BUILT_IN_BCOPY ? 1 : 0));
4212 tree src = gimple_call_arg (t, (DECL_FUNCTION_CODE (fndecl)
4213 == BUILT_IN_BCOPY ? 0 : 1));
4214 if (res != NULL_TREE)
4215 {
4216 get_constraint_for (res, &lhsc);
4217 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_MEMPCPY
4218 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STPCPY
4219 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STPNCPY
4220 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_MEMPCPY_CHK
4221 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STPCPY_CHK
4222 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STPNCPY_CHK)
4223 get_constraint_for_ptr_offset (dest, NULL_TREE, &rhsc);
4224 else
4225 get_constraint_for (dest, &rhsc);
4226 process_all_all_constraints (lhsc, rhsc);
4227 lhsc.truncate (0);
4228 rhsc.truncate (0);
4229 }
4230 get_constraint_for_ptr_offset (dest, NULL_TREE, &lhsc);
4231 get_constraint_for_ptr_offset (src, NULL_TREE, &rhsc);
4232 do_deref (&lhsc);
4233 do_deref (&rhsc);
4234 process_all_all_constraints (lhsc, rhsc);
4235 return true;
4236 }
4237 case BUILT_IN_MEMSET:
4238 case BUILT_IN_MEMSET_CHK:
4239 case BUILT_IN_TM_MEMSET:
4240 {
4241 tree res = gimple_call_lhs (t);
4242 tree dest = gimple_call_arg (t, 0);
4243 unsigned i;
4244 ce_s *lhsp;
4245 struct constraint_expr ac;
4246 if (res != NULL_TREE)
4247 {
4248 get_constraint_for (res, &lhsc);
4249 get_constraint_for (dest, &rhsc);
4250 process_all_all_constraints (lhsc, rhsc);
4251 lhsc.truncate (0);
4252 }
4253 get_constraint_for_ptr_offset (dest, NULL_TREE, &lhsc);
4254 do_deref (&lhsc);
4255 if (flag_delete_null_pointer_checks
4256 && integer_zerop (gimple_call_arg (t, 1)))
4257 {
4258 ac.type = ADDRESSOF;
4259 ac.var = nothing_id;
4260 }
4261 else
4262 {
4263 ac.type = SCALAR;
4264 ac.var = integer_id;
4265 }
4266 ac.offset = 0;
4267 FOR_EACH_VEC_ELT (lhsc, i, lhsp)
4268 process_constraint (new_constraint (*lhsp, ac));
4269 return true;
4270 }
4271 case BUILT_IN_POSIX_MEMALIGN:
4272 {
4273 tree ptrptr = gimple_call_arg (t, 0);
4274 get_constraint_for (ptrptr, &lhsc);
4275 do_deref (&lhsc);
4276 varinfo_t vi = make_heapvar ("HEAP");
4277 /* We are marking allocated storage local, we deal with it becoming
4278 global by escaping and setting of vars_contains_escaped_heap. */
4279 DECL_EXTERNAL (vi->decl) = 0;
4280 vi->is_global_var = 0;
4281 struct constraint_expr tmpc;
4282 tmpc.var = vi->id;
4283 tmpc.offset = 0;
4284 tmpc.type = ADDRESSOF;
4285 rhsc.safe_push (tmpc);
4286 process_all_all_constraints (lhsc, rhsc);
4287 return true;
4288 }
4289 case BUILT_IN_ASSUME_ALIGNED:
4290 {
4291 tree res = gimple_call_lhs (t);
4292 tree dest = gimple_call_arg (t, 0);
4293 if (res != NULL_TREE)
4294 {
4295 get_constraint_for (res, &lhsc);
4296 get_constraint_for (dest, &rhsc);
4297 process_all_all_constraints (lhsc, rhsc);
4298 }
4299 return true;
4300 }
4301 /* All the following functions do not return pointers, do not
4302 modify the points-to sets of memory reachable from their
4303 arguments and do not add to the ESCAPED solution. */
4304 case BUILT_IN_SINCOS:
4305 case BUILT_IN_SINCOSF:
4306 case BUILT_IN_SINCOSL:
4307 case BUILT_IN_FREXP:
4308 case BUILT_IN_FREXPF:
4309 case BUILT_IN_FREXPL:
4310 case BUILT_IN_GAMMA_R:
4311 case BUILT_IN_GAMMAF_R:
4312 case BUILT_IN_GAMMAL_R:
4313 case BUILT_IN_LGAMMA_R:
4314 case BUILT_IN_LGAMMAF_R:
4315 case BUILT_IN_LGAMMAL_R:
4316 case BUILT_IN_MODF:
4317 case BUILT_IN_MODFF:
4318 case BUILT_IN_MODFL:
4319 case BUILT_IN_REMQUO:
4320 case BUILT_IN_REMQUOF:
4321 case BUILT_IN_REMQUOL:
4322 case BUILT_IN_FREE:
4323 return true;
4324 case BUILT_IN_STRDUP:
4325 case BUILT_IN_STRNDUP:
4326 case BUILT_IN_REALLOC:
4327 if (gimple_call_lhs (t))
4328 {
4329 handle_lhs_call (t, gimple_call_lhs (t),
4330 gimple_call_return_flags (t) | ERF_NOALIAS,
4331 vNULL, fndecl);
4332 get_constraint_for_ptr_offset (gimple_call_lhs (t),
4333 NULL_TREE, &lhsc);
4334 get_constraint_for_ptr_offset (gimple_call_arg (t, 0),
4335 NULL_TREE, &rhsc);
4336 do_deref (&lhsc);
4337 do_deref (&rhsc);
4338 process_all_all_constraints (lhsc, rhsc);
4339 lhsc.truncate (0);
4340 rhsc.truncate (0);
4341 /* For realloc the resulting pointer can be equal to the
4342 argument as well. But only doing this wouldn't be
4343 correct because with ptr == 0 realloc behaves like malloc. */
4344 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_REALLOC)
4345 {
4346 get_constraint_for (gimple_call_lhs (t), &lhsc);
4347 get_constraint_for (gimple_call_arg (t, 0), &rhsc);
4348 process_all_all_constraints (lhsc, rhsc);
4349 }
4350 return true;
4351 }
4352 break;
4353 /* String / character search functions return a pointer into the
4354 source string or NULL. */
4355 case BUILT_IN_INDEX:
4356 case BUILT_IN_STRCHR:
4357 case BUILT_IN_STRRCHR:
4358 case BUILT_IN_MEMCHR:
4359 case BUILT_IN_STRSTR:
4360 case BUILT_IN_STRPBRK:
4361 if (gimple_call_lhs (t))
4362 {
4363 tree src = gimple_call_arg (t, 0);
4364 get_constraint_for_ptr_offset (src, NULL_TREE, &rhsc);
4365 constraint_expr nul;
4366 nul.var = nothing_id;
4367 nul.offset = 0;
4368 nul.type = ADDRESSOF;
4369 rhsc.safe_push (nul);
4370 get_constraint_for (gimple_call_lhs (t), &lhsc);
4371 process_all_all_constraints (lhsc, rhsc);
4372 }
4373 return true;
4374 /* Trampolines are special - they set up passing the static
4375 frame. */
4376 case BUILT_IN_INIT_TRAMPOLINE:
4377 {
4378 tree tramp = gimple_call_arg (t, 0);
4379 tree nfunc = gimple_call_arg (t, 1);
4380 tree frame = gimple_call_arg (t, 2);
4381 unsigned i;
4382 struct constraint_expr lhs, *rhsp;
4383 if (in_ipa_mode)
4384 {
4385 varinfo_t nfi = NULL;
4386 gcc_assert (TREE_CODE (nfunc) == ADDR_EXPR);
4387 nfi = lookup_vi_for_tree (TREE_OPERAND (nfunc, 0));
4388 if (nfi)
4389 {
4390 lhs = get_function_part_constraint (nfi, fi_static_chain);
4391 get_constraint_for (frame, &rhsc);
4392 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
4393 process_constraint (new_constraint (lhs, *rhsp));
4394 rhsc.truncate (0);
4395
4396 /* Make the frame point to the function for
4397 the trampoline adjustment call. */
4398 get_constraint_for (tramp, &lhsc);
4399 do_deref (&lhsc);
4400 get_constraint_for (nfunc, &rhsc);
4401 process_all_all_constraints (lhsc, rhsc);
4402
4403 return true;
4404 }
4405 }
4406 /* Else fallthru to generic handling which will let
4407 the frame escape. */
4408 break;
4409 }
4410 case BUILT_IN_ADJUST_TRAMPOLINE:
4411 {
4412 tree tramp = gimple_call_arg (t, 0);
4413 tree res = gimple_call_lhs (t);
4414 if (in_ipa_mode && res)
4415 {
4416 get_constraint_for (res, &lhsc);
4417 get_constraint_for (tramp, &rhsc);
4418 do_deref (&rhsc);
4419 process_all_all_constraints (lhsc, rhsc);
4420 }
4421 return true;
4422 }
4423 CASE_BUILT_IN_TM_STORE (1):
4424 CASE_BUILT_IN_TM_STORE (2):
4425 CASE_BUILT_IN_TM_STORE (4):
4426 CASE_BUILT_IN_TM_STORE (8):
4427 CASE_BUILT_IN_TM_STORE (FLOAT):
4428 CASE_BUILT_IN_TM_STORE (DOUBLE):
4429 CASE_BUILT_IN_TM_STORE (LDOUBLE):
4430 CASE_BUILT_IN_TM_STORE (M64):
4431 CASE_BUILT_IN_TM_STORE (M128):
4432 CASE_BUILT_IN_TM_STORE (M256):
4433 {
4434 tree addr = gimple_call_arg (t, 0);
4435 tree src = gimple_call_arg (t, 1);
4436
4437 get_constraint_for (addr, &lhsc);
4438 do_deref (&lhsc);
4439 get_constraint_for (src, &rhsc);
4440 process_all_all_constraints (lhsc, rhsc);
4441 return true;
4442 }
4443 CASE_BUILT_IN_TM_LOAD (1):
4444 CASE_BUILT_IN_TM_LOAD (2):
4445 CASE_BUILT_IN_TM_LOAD (4):
4446 CASE_BUILT_IN_TM_LOAD (8):
4447 CASE_BUILT_IN_TM_LOAD (FLOAT):
4448 CASE_BUILT_IN_TM_LOAD (DOUBLE):
4449 CASE_BUILT_IN_TM_LOAD (LDOUBLE):
4450 CASE_BUILT_IN_TM_LOAD (M64):
4451 CASE_BUILT_IN_TM_LOAD (M128):
4452 CASE_BUILT_IN_TM_LOAD (M256):
4453 {
4454 tree dest = gimple_call_lhs (t);
4455 tree addr = gimple_call_arg (t, 0);
4456
4457 get_constraint_for (dest, &lhsc);
4458 get_constraint_for (addr, &rhsc);
4459 do_deref (&rhsc);
4460 process_all_all_constraints (lhsc, rhsc);
4461 return true;
4462 }
4463 /* Variadic argument handling needs to be handled in IPA
4464 mode as well. */
4465 case BUILT_IN_VA_START:
4466 {
4467 tree valist = gimple_call_arg (t, 0);
4468 struct constraint_expr rhs, *lhsp;
4469 unsigned i;
4470 get_constraint_for (valist, &lhsc);
4471 do_deref (&lhsc);
4472 /* The va_list gets access to pointers in variadic
4473 arguments. Which we know in the case of IPA analysis
4474 and otherwise are just all nonlocal variables. */
4475 if (in_ipa_mode)
4476 {
4477 fi = lookup_vi_for_tree (fn->decl);
4478 rhs = get_function_part_constraint (fi, ~0);
4479 rhs.type = ADDRESSOF;
4480 }
4481 else
4482 {
4483 rhs.var = nonlocal_id;
4484 rhs.type = ADDRESSOF;
4485 rhs.offset = 0;
4486 }
4487 FOR_EACH_VEC_ELT (lhsc, i, lhsp)
4488 process_constraint (new_constraint (*lhsp, rhs));
4489 /* va_list is clobbered. */
4490 make_constraint_to (get_call_clobber_vi (t)->id, valist);
4491 return true;
4492 }
4493 /* va_end doesn't have any effect that matters. */
4494 case BUILT_IN_VA_END:
4495 return true;
4496 /* Alternate return. Simply give up for now. */
4497 case BUILT_IN_RETURN:
4498 {
4499 fi = NULL;
4500 if (!in_ipa_mode
4501 || !(fi = get_vi_for_tree (fn->decl)))
4502 make_constraint_from (get_varinfo (escaped_id), anything_id);
4503 else if (in_ipa_mode
4504 && fi != NULL)
4505 {
4506 struct constraint_expr lhs, rhs;
4507 lhs = get_function_part_constraint (fi, fi_result);
4508 rhs.var = anything_id;
4509 rhs.offset = 0;
4510 rhs.type = SCALAR;
4511 process_constraint (new_constraint (lhs, rhs));
4512 }
4513 return true;
4514 }
4515 /* printf-style functions may have hooks to set pointers to
4516 point to somewhere into the generated string. Leave them
4517 for a later exercise... */
4518 default:
4519 /* Fallthru to general call handling. */;
4520 }
4521
4522 return false;
4523 }
4524
4525 /* Create constraints for the call T. */
4526
4527 static void
4528 find_func_aliases_for_call (struct function *fn, gcall *t)
4529 {
4530 tree fndecl = gimple_call_fndecl (t);
4531 varinfo_t fi;
4532
4533 if (fndecl != NULL_TREE
4534 && DECL_BUILT_IN (fndecl)
4535 && find_func_aliases_for_builtin_call (fn, t))
4536 return;
4537
4538 fi = get_fi_for_callee (t);
4539 if (!in_ipa_mode
4540 || (fndecl && !fi->is_fn_info))
4541 {
4542 auto_vec<ce_s, 16> rhsc;
4543 int flags = gimple_call_flags (t);
4544
4545 /* Const functions can return their arguments and addresses
4546 of global memory but not of escaped memory. */
4547 if (flags & (ECF_CONST|ECF_NOVOPS))
4548 {
4549 if (gimple_call_lhs (t))
4550 handle_const_call (t, &rhsc);
4551 }
4552 /* Pure functions can return addresses in and of memory
4553 reachable from their arguments, but they are not an escape
4554 point for reachable memory of their arguments. */
4555 else if (flags & (ECF_PURE|ECF_LOOPING_CONST_OR_PURE))
4556 handle_pure_call (t, &rhsc);
4557 else
4558 handle_rhs_call (t, &rhsc);
4559 if (gimple_call_lhs (t))
4560 handle_lhs_call (t, gimple_call_lhs (t),
4561 gimple_call_return_flags (t), rhsc, fndecl);
4562 }
4563 else
4564 {
4565 auto_vec<ce_s, 2> rhsc;
4566 tree lhsop;
4567 unsigned j;
4568
4569 /* Assign all the passed arguments to the appropriate incoming
4570 parameters of the function. */
4571 for (j = 0; j < gimple_call_num_args (t); j++)
4572 {
4573 struct constraint_expr lhs ;
4574 struct constraint_expr *rhsp;
4575 tree arg = gimple_call_arg (t, j);
4576
4577 get_constraint_for_rhs (arg, &rhsc);
4578 lhs = get_function_part_constraint (fi, fi_parm_base + j);
4579 while (rhsc.length () != 0)
4580 {
4581 rhsp = &rhsc.last ();
4582 process_constraint (new_constraint (lhs, *rhsp));
4583 rhsc.pop ();
4584 }
4585 }
4586
4587 /* If we are returning a value, assign it to the result. */
4588 lhsop = gimple_call_lhs (t);
4589 if (lhsop)
4590 {
4591 auto_vec<ce_s, 2> lhsc;
4592 struct constraint_expr rhs;
4593 struct constraint_expr *lhsp;
4594
4595 get_constraint_for (lhsop, &lhsc);
4596 rhs = get_function_part_constraint (fi, fi_result);
4597 if (fndecl
4598 && DECL_RESULT (fndecl)
4599 && DECL_BY_REFERENCE (DECL_RESULT (fndecl)))
4600 {
4601 auto_vec<ce_s, 2> tem;
4602 tem.quick_push (rhs);
4603 do_deref (&tem);
4604 gcc_checking_assert (tem.length () == 1);
4605 rhs = tem[0];
4606 }
4607 FOR_EACH_VEC_ELT (lhsc, j, lhsp)
4608 process_constraint (new_constraint (*lhsp, rhs));
4609 }
4610
4611 /* If we pass the result decl by reference, honor that. */
4612 if (lhsop
4613 && fndecl
4614 && DECL_RESULT (fndecl)
4615 && DECL_BY_REFERENCE (DECL_RESULT (fndecl)))
4616 {
4617 struct constraint_expr lhs;
4618 struct constraint_expr *rhsp;
4619
4620 get_constraint_for_address_of (lhsop, &rhsc);
4621 lhs = get_function_part_constraint (fi, fi_result);
4622 FOR_EACH_VEC_ELT (rhsc, j, rhsp)
4623 process_constraint (new_constraint (lhs, *rhsp));
4624 rhsc.truncate (0);
4625 }
4626
4627 /* If we use a static chain, pass it along. */
4628 if (gimple_call_chain (t))
4629 {
4630 struct constraint_expr lhs;
4631 struct constraint_expr *rhsp;
4632
4633 get_constraint_for (gimple_call_chain (t), &rhsc);
4634 lhs = get_function_part_constraint (fi, fi_static_chain);
4635 FOR_EACH_VEC_ELT (rhsc, j, rhsp)
4636 process_constraint (new_constraint (lhs, *rhsp));
4637 }
4638 }
4639 }
4640
4641 /* Walk statement T setting up aliasing constraints according to the
4642 references found in T. This function is the main part of the
4643 constraint builder. AI points to auxiliary alias information used
4644 when building alias sets and computing alias grouping heuristics. */
4645
4646 static void
4647 find_func_aliases (struct function *fn, gimple origt)
4648 {
4649 gimple t = origt;
4650 auto_vec<ce_s, 16> lhsc;
4651 auto_vec<ce_s, 16> rhsc;
4652 struct constraint_expr *c;
4653 varinfo_t fi;
4654
4655 /* Now build constraints expressions. */
4656 if (gimple_code (t) == GIMPLE_PHI)
4657 {
4658 size_t i;
4659 unsigned int j;
4660
4661 /* For a phi node, assign all the arguments to
4662 the result. */
4663 get_constraint_for (gimple_phi_result (t), &lhsc);
4664 for (i = 0; i < gimple_phi_num_args (t); i++)
4665 {
4666 tree strippedrhs = PHI_ARG_DEF (t, i);
4667
4668 STRIP_NOPS (strippedrhs);
4669 get_constraint_for_rhs (gimple_phi_arg_def (t, i), &rhsc);
4670
4671 FOR_EACH_VEC_ELT (lhsc, j, c)
4672 {
4673 struct constraint_expr *c2;
4674 while (rhsc.length () > 0)
4675 {
4676 c2 = &rhsc.last ();
4677 process_constraint (new_constraint (*c, *c2));
4678 rhsc.pop ();
4679 }
4680 }
4681 }
4682 }
4683 /* In IPA mode, we need to generate constraints to pass call
4684 arguments through their calls. There are two cases,
4685 either a GIMPLE_CALL returning a value, or just a plain
4686 GIMPLE_CALL when we are not.
4687
4688 In non-ipa mode, we need to generate constraints for each
4689 pointer passed by address. */
4690 else if (is_gimple_call (t))
4691 find_func_aliases_for_call (fn, as_a <gcall *> (t));
4692
4693 /* Otherwise, just a regular assignment statement. Only care about
4694 operations with pointer result, others are dealt with as escape
4695 points if they have pointer operands. */
4696 else if (is_gimple_assign (t))
4697 {
4698 /* Otherwise, just a regular assignment statement. */
4699 tree lhsop = gimple_assign_lhs (t);
4700 tree rhsop = (gimple_num_ops (t) == 2) ? gimple_assign_rhs1 (t) : NULL;
4701
4702 if (rhsop && TREE_CLOBBER_P (rhsop))
4703 /* Ignore clobbers, they don't actually store anything into
4704 the LHS. */
4705 ;
4706 else if (rhsop && AGGREGATE_TYPE_P (TREE_TYPE (lhsop)))
4707 do_structure_copy (lhsop, rhsop);
4708 else
4709 {
4710 enum tree_code code = gimple_assign_rhs_code (t);
4711
4712 get_constraint_for (lhsop, &lhsc);
4713
4714 if (FLOAT_TYPE_P (TREE_TYPE (lhsop)))
4715 /* If the operation produces a floating point result then
4716 assume the value is not produced to transfer a pointer. */
4717 ;
4718 else if (code == POINTER_PLUS_EXPR)
4719 get_constraint_for_ptr_offset (gimple_assign_rhs1 (t),
4720 gimple_assign_rhs2 (t), &rhsc);
4721 else if (code == BIT_AND_EXPR
4722 && TREE_CODE (gimple_assign_rhs2 (t)) == INTEGER_CST)
4723 {
4724 /* Aligning a pointer via a BIT_AND_EXPR is offsetting
4725 the pointer. Handle it by offsetting it by UNKNOWN. */
4726 get_constraint_for_ptr_offset (gimple_assign_rhs1 (t),
4727 NULL_TREE, &rhsc);
4728 }
4729 else if ((CONVERT_EXPR_CODE_P (code)
4730 && !(POINTER_TYPE_P (gimple_expr_type (t))
4731 && !POINTER_TYPE_P (TREE_TYPE (rhsop))))
4732 || gimple_assign_single_p (t))
4733 get_constraint_for_rhs (rhsop, &rhsc);
4734 else if (code == COND_EXPR)
4735 {
4736 /* The result is a merge of both COND_EXPR arms. */
4737 auto_vec<ce_s, 2> tmp;
4738 struct constraint_expr *rhsp;
4739 unsigned i;
4740 get_constraint_for_rhs (gimple_assign_rhs2 (t), &rhsc);
4741 get_constraint_for_rhs (gimple_assign_rhs3 (t), &tmp);
4742 FOR_EACH_VEC_ELT (tmp, i, rhsp)
4743 rhsc.safe_push (*rhsp);
4744 }
4745 else if (truth_value_p (code))
4746 /* Truth value results are not pointer (parts). Or at least
4747 very very unreasonable obfuscation of a part. */
4748 ;
4749 else
4750 {
4751 /* All other operations are merges. */
4752 auto_vec<ce_s, 4> tmp;
4753 struct constraint_expr *rhsp;
4754 unsigned i, j;
4755 get_constraint_for_rhs (gimple_assign_rhs1 (t), &rhsc);
4756 for (i = 2; i < gimple_num_ops (t); ++i)
4757 {
4758 get_constraint_for_rhs (gimple_op (t, i), &tmp);
4759 FOR_EACH_VEC_ELT (tmp, j, rhsp)
4760 rhsc.safe_push (*rhsp);
4761 tmp.truncate (0);
4762 }
4763 }
4764 process_all_all_constraints (lhsc, rhsc);
4765 }
4766 /* If there is a store to a global variable the rhs escapes. */
4767 if ((lhsop = get_base_address (lhsop)) != NULL_TREE
4768 && DECL_P (lhsop)
4769 && is_global_var (lhsop)
4770 && (!in_ipa_mode
4771 || DECL_EXTERNAL (lhsop) || TREE_PUBLIC (lhsop)))
4772 make_escape_constraint (rhsop);
4773 }
4774 /* Handle escapes through return. */
4775 else if (gimple_code (t) == GIMPLE_RETURN
4776 && gimple_return_retval (as_a <greturn *> (t)) != NULL_TREE)
4777 {
4778 greturn *return_stmt = as_a <greturn *> (t);
4779 fi = NULL;
4780 if (!in_ipa_mode
4781 || !(fi = get_vi_for_tree (fn->decl)))
4782 make_escape_constraint (gimple_return_retval (return_stmt));
4783 else if (in_ipa_mode
4784 && fi != NULL)
4785 {
4786 struct constraint_expr lhs ;
4787 struct constraint_expr *rhsp;
4788 unsigned i;
4789
4790 lhs = get_function_part_constraint (fi, fi_result);
4791 get_constraint_for_rhs (gimple_return_retval (return_stmt), &rhsc);
4792 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
4793 process_constraint (new_constraint (lhs, *rhsp));
4794 }
4795 }
4796 /* Handle asms conservatively by adding escape constraints to everything. */
4797 else if (gasm *asm_stmt = dyn_cast <gasm *> (t))
4798 {
4799 unsigned i, noutputs;
4800 const char **oconstraints;
4801 const char *constraint;
4802 bool allows_mem, allows_reg, is_inout;
4803
4804 noutputs = gimple_asm_noutputs (asm_stmt);
4805 oconstraints = XALLOCAVEC (const char *, noutputs);
4806
4807 for (i = 0; i < noutputs; ++i)
4808 {
4809 tree link = gimple_asm_output_op (asm_stmt, i);
4810 tree op = TREE_VALUE (link);
4811
4812 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
4813 oconstraints[i] = constraint;
4814 parse_output_constraint (&constraint, i, 0, 0, &allows_mem,
4815 &allows_reg, &is_inout);
4816
4817 /* A memory constraint makes the address of the operand escape. */
4818 if (!allows_reg && allows_mem)
4819 make_escape_constraint (build_fold_addr_expr (op));
4820
4821 /* The asm may read global memory, so outputs may point to
4822 any global memory. */
4823 if (op)
4824 {
4825 auto_vec<ce_s, 2> lhsc;
4826 struct constraint_expr rhsc, *lhsp;
4827 unsigned j;
4828 get_constraint_for (op, &lhsc);
4829 rhsc.var = nonlocal_id;
4830 rhsc.offset = 0;
4831 rhsc.type = SCALAR;
4832 FOR_EACH_VEC_ELT (lhsc, j, lhsp)
4833 process_constraint (new_constraint (*lhsp, rhsc));
4834 }
4835 }
4836 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
4837 {
4838 tree link = gimple_asm_input_op (asm_stmt, i);
4839 tree op = TREE_VALUE (link);
4840
4841 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
4842
4843 parse_input_constraint (&constraint, 0, 0, noutputs, 0, oconstraints,
4844 &allows_mem, &allows_reg);
4845
4846 /* A memory constraint makes the address of the operand escape. */
4847 if (!allows_reg && allows_mem)
4848 make_escape_constraint (build_fold_addr_expr (op));
4849 /* Strictly we'd only need the constraint to ESCAPED if
4850 the asm clobbers memory, otherwise using something
4851 along the lines of per-call clobbers/uses would be enough. */
4852 else if (op)
4853 make_escape_constraint (op);
4854 }
4855 }
4856 }
4857
4858
4859 /* Create a constraint adding to the clobber set of FI the memory
4860 pointed to by PTR. */
4861
4862 static void
4863 process_ipa_clobber (varinfo_t fi, tree ptr)
4864 {
4865 vec<ce_s> ptrc = vNULL;
4866 struct constraint_expr *c, lhs;
4867 unsigned i;
4868 get_constraint_for_rhs (ptr, &ptrc);
4869 lhs = get_function_part_constraint (fi, fi_clobbers);
4870 FOR_EACH_VEC_ELT (ptrc, i, c)
4871 process_constraint (new_constraint (lhs, *c));
4872 ptrc.release ();
4873 }
4874
4875 /* Walk statement T setting up clobber and use constraints according to the
4876 references found in T. This function is a main part of the
4877 IPA constraint builder. */
4878
4879 static void
4880 find_func_clobbers (struct function *fn, gimple origt)
4881 {
4882 gimple t = origt;
4883 auto_vec<ce_s, 16> lhsc;
4884 auto_vec<ce_s, 16> rhsc;
4885 varinfo_t fi;
4886
4887 /* Add constraints for clobbered/used in IPA mode.
4888 We are not interested in what automatic variables are clobbered
4889 or used as we only use the information in the caller to which
4890 they do not escape. */
4891 gcc_assert (in_ipa_mode);
4892
4893 /* If the stmt refers to memory in any way it better had a VUSE. */
4894 if (gimple_vuse (t) == NULL_TREE)
4895 return;
4896
4897 /* We'd better have function information for the current function. */
4898 fi = lookup_vi_for_tree (fn->decl);
4899 gcc_assert (fi != NULL);
4900
4901 /* Account for stores in assignments and calls. */
4902 if (gimple_vdef (t) != NULL_TREE
4903 && gimple_has_lhs (t))
4904 {
4905 tree lhs = gimple_get_lhs (t);
4906 tree tem = lhs;
4907 while (handled_component_p (tem))
4908 tem = TREE_OPERAND (tem, 0);
4909 if ((DECL_P (tem)
4910 && !auto_var_in_fn_p (tem, fn->decl))
4911 || INDIRECT_REF_P (tem)
4912 || (TREE_CODE (tem) == MEM_REF
4913 && !(TREE_CODE (TREE_OPERAND (tem, 0)) == ADDR_EXPR
4914 && auto_var_in_fn_p
4915 (TREE_OPERAND (TREE_OPERAND (tem, 0), 0), fn->decl))))
4916 {
4917 struct constraint_expr lhsc, *rhsp;
4918 unsigned i;
4919 lhsc = get_function_part_constraint (fi, fi_clobbers);
4920 get_constraint_for_address_of (lhs, &rhsc);
4921 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
4922 process_constraint (new_constraint (lhsc, *rhsp));
4923 rhsc.truncate (0);
4924 }
4925 }
4926
4927 /* Account for uses in assigments and returns. */
4928 if (gimple_assign_single_p (t)
4929 || (gimple_code (t) == GIMPLE_RETURN
4930 && gimple_return_retval (as_a <greturn *> (t)) != NULL_TREE))
4931 {
4932 tree rhs = (gimple_assign_single_p (t)
4933 ? gimple_assign_rhs1 (t)
4934 : gimple_return_retval (as_a <greturn *> (t)));
4935 tree tem = rhs;
4936 while (handled_component_p (tem))
4937 tem = TREE_OPERAND (tem, 0);
4938 if ((DECL_P (tem)
4939 && !auto_var_in_fn_p (tem, fn->decl))
4940 || INDIRECT_REF_P (tem)
4941 || (TREE_CODE (tem) == MEM_REF
4942 && !(TREE_CODE (TREE_OPERAND (tem, 0)) == ADDR_EXPR
4943 && auto_var_in_fn_p
4944 (TREE_OPERAND (TREE_OPERAND (tem, 0), 0), fn->decl))))
4945 {
4946 struct constraint_expr lhs, *rhsp;
4947 unsigned i;
4948 lhs = get_function_part_constraint (fi, fi_uses);
4949 get_constraint_for_address_of (rhs, &rhsc);
4950 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
4951 process_constraint (new_constraint (lhs, *rhsp));
4952 rhsc.truncate (0);
4953 }
4954 }
4955
4956 if (gcall *call_stmt = dyn_cast <gcall *> (t))
4957 {
4958 varinfo_t cfi = NULL;
4959 tree decl = gimple_call_fndecl (t);
4960 struct constraint_expr lhs, rhs;
4961 unsigned i, j;
4962
4963 /* For builtins we do not have separate function info. For those
4964 we do not generate escapes for we have to generate clobbers/uses. */
4965 if (gimple_call_builtin_p (t, BUILT_IN_NORMAL))
4966 switch (DECL_FUNCTION_CODE (decl))
4967 {
4968 /* The following functions use and clobber memory pointed to
4969 by their arguments. */
4970 case BUILT_IN_STRCPY:
4971 case BUILT_IN_STRNCPY:
4972 case BUILT_IN_BCOPY:
4973 case BUILT_IN_MEMCPY:
4974 case BUILT_IN_MEMMOVE:
4975 case BUILT_IN_MEMPCPY:
4976 case BUILT_IN_STPCPY:
4977 case BUILT_IN_STPNCPY:
4978 case BUILT_IN_STRCAT:
4979 case BUILT_IN_STRNCAT:
4980 case BUILT_IN_STRCPY_CHK:
4981 case BUILT_IN_STRNCPY_CHK:
4982 case BUILT_IN_MEMCPY_CHK:
4983 case BUILT_IN_MEMMOVE_CHK:
4984 case BUILT_IN_MEMPCPY_CHK:
4985 case BUILT_IN_STPCPY_CHK:
4986 case BUILT_IN_STPNCPY_CHK:
4987 case BUILT_IN_STRCAT_CHK:
4988 case BUILT_IN_STRNCAT_CHK:
4989 {
4990 tree dest = gimple_call_arg (t, (DECL_FUNCTION_CODE (decl)
4991 == BUILT_IN_BCOPY ? 1 : 0));
4992 tree src = gimple_call_arg (t, (DECL_FUNCTION_CODE (decl)
4993 == BUILT_IN_BCOPY ? 0 : 1));
4994 unsigned i;
4995 struct constraint_expr *rhsp, *lhsp;
4996 get_constraint_for_ptr_offset (dest, NULL_TREE, &lhsc);
4997 lhs = get_function_part_constraint (fi, fi_clobbers);
4998 FOR_EACH_VEC_ELT (lhsc, i, lhsp)
4999 process_constraint (new_constraint (lhs, *lhsp));
5000 get_constraint_for_ptr_offset (src, NULL_TREE, &rhsc);
5001 lhs = get_function_part_constraint (fi, fi_uses);
5002 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
5003 process_constraint (new_constraint (lhs, *rhsp));
5004 return;
5005 }
5006 /* The following function clobbers memory pointed to by
5007 its argument. */
5008 case BUILT_IN_MEMSET:
5009 case BUILT_IN_MEMSET_CHK:
5010 case BUILT_IN_POSIX_MEMALIGN:
5011 {
5012 tree dest = gimple_call_arg (t, 0);
5013 unsigned i;
5014 ce_s *lhsp;
5015 get_constraint_for_ptr_offset (dest, NULL_TREE, &lhsc);
5016 lhs = get_function_part_constraint (fi, fi_clobbers);
5017 FOR_EACH_VEC_ELT (lhsc, i, lhsp)
5018 process_constraint (new_constraint (lhs, *lhsp));
5019 return;
5020 }
5021 /* The following functions clobber their second and third
5022 arguments. */
5023 case BUILT_IN_SINCOS:
5024 case BUILT_IN_SINCOSF:
5025 case BUILT_IN_SINCOSL:
5026 {
5027 process_ipa_clobber (fi, gimple_call_arg (t, 1));
5028 process_ipa_clobber (fi, gimple_call_arg (t, 2));
5029 return;
5030 }
5031 /* The following functions clobber their second argument. */
5032 case BUILT_IN_FREXP:
5033 case BUILT_IN_FREXPF:
5034 case BUILT_IN_FREXPL:
5035 case BUILT_IN_LGAMMA_R:
5036 case BUILT_IN_LGAMMAF_R:
5037 case BUILT_IN_LGAMMAL_R:
5038 case BUILT_IN_GAMMA_R:
5039 case BUILT_IN_GAMMAF_R:
5040 case BUILT_IN_GAMMAL_R:
5041 case BUILT_IN_MODF:
5042 case BUILT_IN_MODFF:
5043 case BUILT_IN_MODFL:
5044 {
5045 process_ipa_clobber (fi, gimple_call_arg (t, 1));
5046 return;
5047 }
5048 /* The following functions clobber their third argument. */
5049 case BUILT_IN_REMQUO:
5050 case BUILT_IN_REMQUOF:
5051 case BUILT_IN_REMQUOL:
5052 {
5053 process_ipa_clobber (fi, gimple_call_arg (t, 2));
5054 return;
5055 }
5056 /* The following functions neither read nor clobber memory. */
5057 case BUILT_IN_ASSUME_ALIGNED:
5058 case BUILT_IN_FREE:
5059 return;
5060 /* Trampolines are of no interest to us. */
5061 case BUILT_IN_INIT_TRAMPOLINE:
5062 case BUILT_IN_ADJUST_TRAMPOLINE:
5063 return;
5064 case BUILT_IN_VA_START:
5065 case BUILT_IN_VA_END:
5066 return;
5067 /* printf-style functions may have hooks to set pointers to
5068 point to somewhere into the generated string. Leave them
5069 for a later exercise... */
5070 default:
5071 /* Fallthru to general call handling. */;
5072 }
5073
5074 /* Parameters passed by value are used. */
5075 lhs = get_function_part_constraint (fi, fi_uses);
5076 for (i = 0; i < gimple_call_num_args (t); i++)
5077 {
5078 struct constraint_expr *rhsp;
5079 tree arg = gimple_call_arg (t, i);
5080
5081 if (TREE_CODE (arg) == SSA_NAME
5082 || is_gimple_min_invariant (arg))
5083 continue;
5084
5085 get_constraint_for_address_of (arg, &rhsc);
5086 FOR_EACH_VEC_ELT (rhsc, j, rhsp)
5087 process_constraint (new_constraint (lhs, *rhsp));
5088 rhsc.truncate (0);
5089 }
5090
5091 /* Build constraints for propagating clobbers/uses along the
5092 callgraph edges. */
5093 cfi = get_fi_for_callee (call_stmt);
5094 if (cfi->id == anything_id)
5095 {
5096 if (gimple_vdef (t))
5097 make_constraint_from (first_vi_for_offset (fi, fi_clobbers),
5098 anything_id);
5099 make_constraint_from (first_vi_for_offset (fi, fi_uses),
5100 anything_id);
5101 return;
5102 }
5103
5104 /* For callees without function info (that's external functions),
5105 ESCAPED is clobbered and used. */
5106 if (gimple_call_fndecl (t)
5107 && !cfi->is_fn_info)
5108 {
5109 varinfo_t vi;
5110
5111 if (gimple_vdef (t))
5112 make_copy_constraint (first_vi_for_offset (fi, fi_clobbers),
5113 escaped_id);
5114 make_copy_constraint (first_vi_for_offset (fi, fi_uses), escaped_id);
5115
5116 /* Also honor the call statement use/clobber info. */
5117 if ((vi = lookup_call_clobber_vi (call_stmt)) != NULL)
5118 make_copy_constraint (first_vi_for_offset (fi, fi_clobbers),
5119 vi->id);
5120 if ((vi = lookup_call_use_vi (call_stmt)) != NULL)
5121 make_copy_constraint (first_vi_for_offset (fi, fi_uses),
5122 vi->id);
5123 return;
5124 }
5125
5126 /* Otherwise the caller clobbers and uses what the callee does.
5127 ??? This should use a new complex constraint that filters
5128 local variables of the callee. */
5129 if (gimple_vdef (t))
5130 {
5131 lhs = get_function_part_constraint (fi, fi_clobbers);
5132 rhs = get_function_part_constraint (cfi, fi_clobbers);
5133 process_constraint (new_constraint (lhs, rhs));
5134 }
5135 lhs = get_function_part_constraint (fi, fi_uses);
5136 rhs = get_function_part_constraint (cfi, fi_uses);
5137 process_constraint (new_constraint (lhs, rhs));
5138 }
5139 else if (gimple_code (t) == GIMPLE_ASM)
5140 {
5141 /* ??? Ick. We can do better. */
5142 if (gimple_vdef (t))
5143 make_constraint_from (first_vi_for_offset (fi, fi_clobbers),
5144 anything_id);
5145 make_constraint_from (first_vi_for_offset (fi, fi_uses),
5146 anything_id);
5147 }
5148 }
5149
5150
5151 /* Find the first varinfo in the same variable as START that overlaps with
5152 OFFSET. Return NULL if we can't find one. */
5153
5154 static varinfo_t
5155 first_vi_for_offset (varinfo_t start, unsigned HOST_WIDE_INT offset)
5156 {
5157 /* If the offset is outside of the variable, bail out. */
5158 if (offset >= start->fullsize)
5159 return NULL;
5160
5161 /* If we cannot reach offset from start, lookup the first field
5162 and start from there. */
5163 if (start->offset > offset)
5164 start = get_varinfo (start->head);
5165
5166 while (start)
5167 {
5168 /* We may not find a variable in the field list with the actual
5169 offset when when we have glommed a structure to a variable.
5170 In that case, however, offset should still be within the size
5171 of the variable. */
5172 if (offset >= start->offset
5173 && (offset - start->offset) < start->size)
5174 return start;
5175
5176 start = vi_next (start);
5177 }
5178
5179 return NULL;
5180 }
5181
5182 /* Find the first varinfo in the same variable as START that overlaps with
5183 OFFSET. If there is no such varinfo the varinfo directly preceding
5184 OFFSET is returned. */
5185
5186 static varinfo_t
5187 first_or_preceding_vi_for_offset (varinfo_t start,
5188 unsigned HOST_WIDE_INT offset)
5189 {
5190 /* If we cannot reach offset from start, lookup the first field
5191 and start from there. */
5192 if (start->offset > offset)
5193 start = get_varinfo (start->head);
5194
5195 /* We may not find a variable in the field list with the actual
5196 offset when when we have glommed a structure to a variable.
5197 In that case, however, offset should still be within the size
5198 of the variable.
5199 If we got beyond the offset we look for return the field
5200 directly preceding offset which may be the last field. */
5201 while (start->next
5202 && offset >= start->offset
5203 && !((offset - start->offset) < start->size))
5204 start = vi_next (start);
5205
5206 return start;
5207 }
5208
5209
5210 /* This structure is used during pushing fields onto the fieldstack
5211 to track the offset of the field, since bitpos_of_field gives it
5212 relative to its immediate containing type, and we want it relative
5213 to the ultimate containing object. */
5214
5215 struct fieldoff
5216 {
5217 /* Offset from the base of the base containing object to this field. */
5218 HOST_WIDE_INT offset;
5219
5220 /* Size, in bits, of the field. */
5221 unsigned HOST_WIDE_INT size;
5222
5223 unsigned has_unknown_size : 1;
5224
5225 unsigned must_have_pointers : 1;
5226
5227 unsigned may_have_pointers : 1;
5228
5229 unsigned only_restrict_pointers : 1;
5230 };
5231 typedef struct fieldoff fieldoff_s;
5232
5233
5234 /* qsort comparison function for two fieldoff's PA and PB */
5235
5236 static int
5237 fieldoff_compare (const void *pa, const void *pb)
5238 {
5239 const fieldoff_s *foa = (const fieldoff_s *)pa;
5240 const fieldoff_s *fob = (const fieldoff_s *)pb;
5241 unsigned HOST_WIDE_INT foasize, fobsize;
5242
5243 if (foa->offset < fob->offset)
5244 return -1;
5245 else if (foa->offset > fob->offset)
5246 return 1;
5247
5248 foasize = foa->size;
5249 fobsize = fob->size;
5250 if (foasize < fobsize)
5251 return -1;
5252 else if (foasize > fobsize)
5253 return 1;
5254 return 0;
5255 }
5256
5257 /* Sort a fieldstack according to the field offset and sizes. */
5258 static void
5259 sort_fieldstack (vec<fieldoff_s> fieldstack)
5260 {
5261 fieldstack.qsort (fieldoff_compare);
5262 }
5263
5264 /* Return true if T is a type that can have subvars. */
5265
5266 static inline bool
5267 type_can_have_subvars (const_tree t)
5268 {
5269 /* Aggregates without overlapping fields can have subvars. */
5270 return TREE_CODE (t) == RECORD_TYPE;
5271 }
5272
5273 /* Return true if V is a tree that we can have subvars for.
5274 Normally, this is any aggregate type. Also complex
5275 types which are not gimple registers can have subvars. */
5276
5277 static inline bool
5278 var_can_have_subvars (const_tree v)
5279 {
5280 /* Volatile variables should never have subvars. */
5281 if (TREE_THIS_VOLATILE (v))
5282 return false;
5283
5284 /* Non decls or memory tags can never have subvars. */
5285 if (!DECL_P (v))
5286 return false;
5287
5288 return type_can_have_subvars (TREE_TYPE (v));
5289 }
5290
5291 /* Return true if T is a type that does contain pointers. */
5292
5293 static bool
5294 type_must_have_pointers (tree type)
5295 {
5296 if (POINTER_TYPE_P (type))
5297 return true;
5298
5299 if (TREE_CODE (type) == ARRAY_TYPE)
5300 return type_must_have_pointers (TREE_TYPE (type));
5301
5302 /* A function or method can have pointers as arguments, so track
5303 those separately. */
5304 if (TREE_CODE (type) == FUNCTION_TYPE
5305 || TREE_CODE (type) == METHOD_TYPE)
5306 return true;
5307
5308 return false;
5309 }
5310
5311 static bool
5312 field_must_have_pointers (tree t)
5313 {
5314 return type_must_have_pointers (TREE_TYPE (t));
5315 }
5316
5317 /* Given a TYPE, and a vector of field offsets FIELDSTACK, push all
5318 the fields of TYPE onto fieldstack, recording their offsets along
5319 the way.
5320
5321 OFFSET is used to keep track of the offset in this entire
5322 structure, rather than just the immediately containing structure.
5323 Returns false if the caller is supposed to handle the field we
5324 recursed for. */
5325
5326 static bool
5327 push_fields_onto_fieldstack (tree type, vec<fieldoff_s> *fieldstack,
5328 HOST_WIDE_INT offset)
5329 {
5330 tree field;
5331 bool empty_p = true;
5332
5333 if (TREE_CODE (type) != RECORD_TYPE)
5334 return false;
5335
5336 /* If the vector of fields is growing too big, bail out early.
5337 Callers check for vec::length <= MAX_FIELDS_FOR_FIELD_SENSITIVE, make
5338 sure this fails. */
5339 if (fieldstack->length () > MAX_FIELDS_FOR_FIELD_SENSITIVE)
5340 return false;
5341
5342 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
5343 if (TREE_CODE (field) == FIELD_DECL)
5344 {
5345 bool push = false;
5346 HOST_WIDE_INT foff = bitpos_of_field (field);
5347
5348 if (!var_can_have_subvars (field)
5349 || TREE_CODE (TREE_TYPE (field)) == QUAL_UNION_TYPE
5350 || TREE_CODE (TREE_TYPE (field)) == UNION_TYPE)
5351 push = true;
5352 else if (!push_fields_onto_fieldstack
5353 (TREE_TYPE (field), fieldstack, offset + foff)
5354 && (DECL_SIZE (field)
5355 && !integer_zerop (DECL_SIZE (field))))
5356 /* Empty structures may have actual size, like in C++. So
5357 see if we didn't push any subfields and the size is
5358 nonzero, push the field onto the stack. */
5359 push = true;
5360
5361 if (push)
5362 {
5363 fieldoff_s *pair = NULL;
5364 bool has_unknown_size = false;
5365 bool must_have_pointers_p;
5366
5367 if (!fieldstack->is_empty ())
5368 pair = &fieldstack->last ();
5369
5370 /* If there isn't anything at offset zero, create sth. */
5371 if (!pair
5372 && offset + foff != 0)
5373 {
5374 fieldoff_s e = {0, offset + foff, false, false, false, false};
5375 pair = fieldstack->safe_push (e);
5376 }
5377
5378 if (!DECL_SIZE (field)
5379 || !tree_fits_uhwi_p (DECL_SIZE (field)))
5380 has_unknown_size = true;
5381
5382 /* If adjacent fields do not contain pointers merge them. */
5383 must_have_pointers_p = field_must_have_pointers (field);
5384 if (pair
5385 && !has_unknown_size
5386 && !must_have_pointers_p
5387 && !pair->must_have_pointers
5388 && !pair->has_unknown_size
5389 && pair->offset + (HOST_WIDE_INT)pair->size == offset + foff)
5390 {
5391 pair->size += tree_to_uhwi (DECL_SIZE (field));
5392 }
5393 else
5394 {
5395 fieldoff_s e;
5396 e.offset = offset + foff;
5397 e.has_unknown_size = has_unknown_size;
5398 if (!has_unknown_size)
5399 e.size = tree_to_uhwi (DECL_SIZE (field));
5400 else
5401 e.size = -1;
5402 e.must_have_pointers = must_have_pointers_p;
5403 e.may_have_pointers = true;
5404 e.only_restrict_pointers
5405 = (!has_unknown_size
5406 && POINTER_TYPE_P (TREE_TYPE (field))
5407 && TYPE_RESTRICT (TREE_TYPE (field)));
5408 fieldstack->safe_push (e);
5409 }
5410 }
5411
5412 empty_p = false;
5413 }
5414
5415 return !empty_p;
5416 }
5417
5418 /* Count the number of arguments DECL has, and set IS_VARARGS to true
5419 if it is a varargs function. */
5420
5421 static unsigned int
5422 count_num_arguments (tree decl, bool *is_varargs)
5423 {
5424 unsigned int num = 0;
5425 tree t;
5426
5427 /* Capture named arguments for K&R functions. They do not
5428 have a prototype and thus no TYPE_ARG_TYPES. */
5429 for (t = DECL_ARGUMENTS (decl); t; t = DECL_CHAIN (t))
5430 ++num;
5431
5432 /* Check if the function has variadic arguments. */
5433 for (t = TYPE_ARG_TYPES (TREE_TYPE (decl)); t; t = TREE_CHAIN (t))
5434 if (TREE_VALUE (t) == void_type_node)
5435 break;
5436 if (!t)
5437 *is_varargs = true;
5438
5439 return num;
5440 }
5441
5442 /* Creation function node for DECL, using NAME, and return the index
5443 of the variable we've created for the function. */
5444
5445 static varinfo_t
5446 create_function_info_for (tree decl, const char *name)
5447 {
5448 struct function *fn = DECL_STRUCT_FUNCTION (decl);
5449 varinfo_t vi, prev_vi;
5450 tree arg;
5451 unsigned int i;
5452 bool is_varargs = false;
5453 unsigned int num_args = count_num_arguments (decl, &is_varargs);
5454
5455 /* Create the variable info. */
5456
5457 vi = new_var_info (decl, name);
5458 vi->offset = 0;
5459 vi->size = 1;
5460 vi->fullsize = fi_parm_base + num_args;
5461 vi->is_fn_info = 1;
5462 vi->may_have_pointers = false;
5463 if (is_varargs)
5464 vi->fullsize = ~0;
5465 insert_vi_for_tree (vi->decl, vi);
5466
5467 prev_vi = vi;
5468
5469 /* Create a variable for things the function clobbers and one for
5470 things the function uses. */
5471 {
5472 varinfo_t clobbervi, usevi;
5473 const char *newname;
5474 char *tempname;
5475
5476 tempname = xasprintf ("%s.clobber", name);
5477 newname = ggc_strdup (tempname);
5478 free (tempname);
5479
5480 clobbervi = new_var_info (NULL, newname);
5481 clobbervi->offset = fi_clobbers;
5482 clobbervi->size = 1;
5483 clobbervi->fullsize = vi->fullsize;
5484 clobbervi->is_full_var = true;
5485 clobbervi->is_global_var = false;
5486 gcc_assert (prev_vi->offset < clobbervi->offset);
5487 prev_vi->next = clobbervi->id;
5488 prev_vi = clobbervi;
5489
5490 tempname = xasprintf ("%s.use", name);
5491 newname = ggc_strdup (tempname);
5492 free (tempname);
5493
5494 usevi = new_var_info (NULL, newname);
5495 usevi->offset = fi_uses;
5496 usevi->size = 1;
5497 usevi->fullsize = vi->fullsize;
5498 usevi->is_full_var = true;
5499 usevi->is_global_var = false;
5500 gcc_assert (prev_vi->offset < usevi->offset);
5501 prev_vi->next = usevi->id;
5502 prev_vi = usevi;
5503 }
5504
5505 /* And one for the static chain. */
5506 if (fn->static_chain_decl != NULL_TREE)
5507 {
5508 varinfo_t chainvi;
5509 const char *newname;
5510 char *tempname;
5511
5512 tempname = xasprintf ("%s.chain", name);
5513 newname = ggc_strdup (tempname);
5514 free (tempname);
5515
5516 chainvi = new_var_info (fn->static_chain_decl, newname);
5517 chainvi->offset = fi_static_chain;
5518 chainvi->size = 1;
5519 chainvi->fullsize = vi->fullsize;
5520 chainvi->is_full_var = true;
5521 chainvi->is_global_var = false;
5522 gcc_assert (prev_vi->offset < chainvi->offset);
5523 prev_vi->next = chainvi->id;
5524 prev_vi = chainvi;
5525 insert_vi_for_tree (fn->static_chain_decl, chainvi);
5526 }
5527
5528 /* Create a variable for the return var. */
5529 if (DECL_RESULT (decl) != NULL
5530 || !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (decl))))
5531 {
5532 varinfo_t resultvi;
5533 const char *newname;
5534 char *tempname;
5535 tree resultdecl = decl;
5536
5537 if (DECL_RESULT (decl))
5538 resultdecl = DECL_RESULT (decl);
5539
5540 tempname = xasprintf ("%s.result", name);
5541 newname = ggc_strdup (tempname);
5542 free (tempname);
5543
5544 resultvi = new_var_info (resultdecl, newname);
5545 resultvi->offset = fi_result;
5546 resultvi->size = 1;
5547 resultvi->fullsize = vi->fullsize;
5548 resultvi->is_full_var = true;
5549 if (DECL_RESULT (decl))
5550 resultvi->may_have_pointers = true;
5551 gcc_assert (prev_vi->offset < resultvi->offset);
5552 prev_vi->next = resultvi->id;
5553 prev_vi = resultvi;
5554 if (DECL_RESULT (decl))
5555 insert_vi_for_tree (DECL_RESULT (decl), resultvi);
5556 }
5557
5558 /* Set up variables for each argument. */
5559 arg = DECL_ARGUMENTS (decl);
5560 for (i = 0; i < num_args; i++)
5561 {
5562 varinfo_t argvi;
5563 const char *newname;
5564 char *tempname;
5565 tree argdecl = decl;
5566
5567 if (arg)
5568 argdecl = arg;
5569
5570 tempname = xasprintf ("%s.arg%d", name, i);
5571 newname = ggc_strdup (tempname);
5572 free (tempname);
5573
5574 argvi = new_var_info (argdecl, newname);
5575 argvi->offset = fi_parm_base + i;
5576 argvi->size = 1;
5577 argvi->is_full_var = true;
5578 argvi->fullsize = vi->fullsize;
5579 if (arg)
5580 argvi->may_have_pointers = true;
5581 gcc_assert (prev_vi->offset < argvi->offset);
5582 prev_vi->next = argvi->id;
5583 prev_vi = argvi;
5584 if (arg)
5585 {
5586 insert_vi_for_tree (arg, argvi);
5587 arg = DECL_CHAIN (arg);
5588 }
5589 }
5590
5591 /* Add one representative for all further args. */
5592 if (is_varargs)
5593 {
5594 varinfo_t argvi;
5595 const char *newname;
5596 char *tempname;
5597 tree decl;
5598
5599 tempname = xasprintf ("%s.varargs", name);
5600 newname = ggc_strdup (tempname);
5601 free (tempname);
5602
5603 /* We need sth that can be pointed to for va_start. */
5604 decl = build_fake_var_decl (ptr_type_node);
5605
5606 argvi = new_var_info (decl, newname);
5607 argvi->offset = fi_parm_base + num_args;
5608 argvi->size = ~0;
5609 argvi->is_full_var = true;
5610 argvi->is_heap_var = true;
5611 argvi->fullsize = vi->fullsize;
5612 gcc_assert (prev_vi->offset < argvi->offset);
5613 prev_vi->next = argvi->id;
5614 prev_vi = argvi;
5615 }
5616
5617 return vi;
5618 }
5619
5620
5621 /* Return true if FIELDSTACK contains fields that overlap.
5622 FIELDSTACK is assumed to be sorted by offset. */
5623
5624 static bool
5625 check_for_overlaps (vec<fieldoff_s> fieldstack)
5626 {
5627 fieldoff_s *fo = NULL;
5628 unsigned int i;
5629 HOST_WIDE_INT lastoffset = -1;
5630
5631 FOR_EACH_VEC_ELT (fieldstack, i, fo)
5632 {
5633 if (fo->offset == lastoffset)
5634 return true;
5635 lastoffset = fo->offset;
5636 }
5637 return false;
5638 }
5639
5640 /* Create a varinfo structure for NAME and DECL, and add it to VARMAP.
5641 This will also create any varinfo structures necessary for fields
5642 of DECL. */
5643
5644 static varinfo_t
5645 create_variable_info_for_1 (tree decl, const char *name)
5646 {
5647 varinfo_t vi, newvi;
5648 tree decl_type = TREE_TYPE (decl);
5649 tree declsize = DECL_P (decl) ? DECL_SIZE (decl) : TYPE_SIZE (decl_type);
5650 auto_vec<fieldoff_s> fieldstack;
5651 fieldoff_s *fo;
5652 unsigned int i;
5653 varpool_node *vnode;
5654
5655 if (!declsize
5656 || !tree_fits_uhwi_p (declsize))
5657 {
5658 vi = new_var_info (decl, name);
5659 vi->offset = 0;
5660 vi->size = ~0;
5661 vi->fullsize = ~0;
5662 vi->is_unknown_size_var = true;
5663 vi->is_full_var = true;
5664 vi->may_have_pointers = true;
5665 return vi;
5666 }
5667
5668 /* Collect field information. */
5669 if (use_field_sensitive
5670 && var_can_have_subvars (decl)
5671 /* ??? Force us to not use subfields for global initializers
5672 in IPA mode. Else we'd have to parse arbitrary initializers. */
5673 && !(in_ipa_mode
5674 && is_global_var (decl)
5675 && (vnode = varpool_node::get (decl))
5676 && vnode->get_constructor ()))
5677 {
5678 fieldoff_s *fo = NULL;
5679 bool notokay = false;
5680 unsigned int i;
5681
5682 push_fields_onto_fieldstack (decl_type, &fieldstack, 0);
5683
5684 for (i = 0; !notokay && fieldstack.iterate (i, &fo); i++)
5685 if (fo->has_unknown_size
5686 || fo->offset < 0)
5687 {
5688 notokay = true;
5689 break;
5690 }
5691
5692 /* We can't sort them if we have a field with a variable sized type,
5693 which will make notokay = true. In that case, we are going to return
5694 without creating varinfos for the fields anyway, so sorting them is a
5695 waste to boot. */
5696 if (!notokay)
5697 {
5698 sort_fieldstack (fieldstack);
5699 /* Due to some C++ FE issues, like PR 22488, we might end up
5700 what appear to be overlapping fields even though they,
5701 in reality, do not overlap. Until the C++ FE is fixed,
5702 we will simply disable field-sensitivity for these cases. */
5703 notokay = check_for_overlaps (fieldstack);
5704 }
5705
5706 if (notokay)
5707 fieldstack.release ();
5708 }
5709
5710 /* If we didn't end up collecting sub-variables create a full
5711 variable for the decl. */
5712 if (fieldstack.length () <= 1
5713 || fieldstack.length () > MAX_FIELDS_FOR_FIELD_SENSITIVE)
5714 {
5715 vi = new_var_info (decl, name);
5716 vi->offset = 0;
5717 vi->may_have_pointers = true;
5718 vi->fullsize = tree_to_uhwi (declsize);
5719 vi->size = vi->fullsize;
5720 vi->is_full_var = true;
5721 fieldstack.release ();
5722 return vi;
5723 }
5724
5725 vi = new_var_info (decl, name);
5726 vi->fullsize = tree_to_uhwi (declsize);
5727 for (i = 0, newvi = vi;
5728 fieldstack.iterate (i, &fo);
5729 ++i, newvi = vi_next (newvi))
5730 {
5731 const char *newname = "NULL";
5732 char *tempname;
5733
5734 if (dump_file)
5735 {
5736 tempname
5737 = xasprintf ("%s." HOST_WIDE_INT_PRINT_DEC
5738 "+" HOST_WIDE_INT_PRINT_DEC, name,
5739 fo->offset, fo->size);
5740 newname = ggc_strdup (tempname);
5741 free (tempname);
5742 }
5743 newvi->name = newname;
5744 newvi->offset = fo->offset;
5745 newvi->size = fo->size;
5746 newvi->fullsize = vi->fullsize;
5747 newvi->may_have_pointers = fo->may_have_pointers;
5748 newvi->only_restrict_pointers = fo->only_restrict_pointers;
5749 if (i + 1 < fieldstack.length ())
5750 {
5751 varinfo_t tem = new_var_info (decl, name);
5752 newvi->next = tem->id;
5753 tem->head = vi->id;
5754 }
5755 }
5756
5757 return vi;
5758 }
5759
5760 static unsigned int
5761 create_variable_info_for (tree decl, const char *name)
5762 {
5763 varinfo_t vi = create_variable_info_for_1 (decl, name);
5764 unsigned int id = vi->id;
5765
5766 insert_vi_for_tree (decl, vi);
5767
5768 if (TREE_CODE (decl) != VAR_DECL)
5769 return id;
5770
5771 /* Create initial constraints for globals. */
5772 for (; vi; vi = vi_next (vi))
5773 {
5774 if (!vi->may_have_pointers
5775 || !vi->is_global_var)
5776 continue;
5777
5778 /* Mark global restrict qualified pointers. */
5779 if ((POINTER_TYPE_P (TREE_TYPE (decl))
5780 && TYPE_RESTRICT (TREE_TYPE (decl)))
5781 || vi->only_restrict_pointers)
5782 {
5783 varinfo_t rvi
5784 = make_constraint_from_global_restrict (vi, "GLOBAL_RESTRICT");
5785 /* ??? For now exclude reads from globals as restrict sources
5786 if those are not (indirectly) from incoming parameters. */
5787 rvi->is_restrict_var = false;
5788 continue;
5789 }
5790
5791 /* In non-IPA mode the initializer from nonlocal is all we need. */
5792 if (!in_ipa_mode
5793 || DECL_HARD_REGISTER (decl))
5794 make_copy_constraint (vi, nonlocal_id);
5795
5796 /* In IPA mode parse the initializer and generate proper constraints
5797 for it. */
5798 else
5799 {
5800 varpool_node *vnode = varpool_node::get (decl);
5801
5802 /* For escaped variables initialize them from nonlocal. */
5803 if (!vnode->all_refs_explicit_p ())
5804 make_copy_constraint (vi, nonlocal_id);
5805
5806 /* If this is a global variable with an initializer and we are in
5807 IPA mode generate constraints for it. */
5808 if (vnode->get_constructor ()
5809 && vnode->definition)
5810 {
5811 auto_vec<ce_s> rhsc;
5812 struct constraint_expr lhs, *rhsp;
5813 unsigned i;
5814 get_constraint_for_rhs (vnode->get_constructor (), &rhsc);
5815 lhs.var = vi->id;
5816 lhs.offset = 0;
5817 lhs.type = SCALAR;
5818 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
5819 process_constraint (new_constraint (lhs, *rhsp));
5820 /* If this is a variable that escapes from the unit
5821 the initializer escapes as well. */
5822 if (!vnode->all_refs_explicit_p ())
5823 {
5824 lhs.var = escaped_id;
5825 lhs.offset = 0;
5826 lhs.type = SCALAR;
5827 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
5828 process_constraint (new_constraint (lhs, *rhsp));
5829 }
5830 }
5831 }
5832 }
5833
5834 return id;
5835 }
5836
5837 /* Print out the points-to solution for VAR to FILE. */
5838
5839 static void
5840 dump_solution_for_var (FILE *file, unsigned int var)
5841 {
5842 varinfo_t vi = get_varinfo (var);
5843 unsigned int i;
5844 bitmap_iterator bi;
5845
5846 /* Dump the solution for unified vars anyway, this avoids difficulties
5847 in scanning dumps in the testsuite. */
5848 fprintf (file, "%s = { ", vi->name);
5849 vi = get_varinfo (find (var));
5850 EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, i, bi)
5851 fprintf (file, "%s ", get_varinfo (i)->name);
5852 fprintf (file, "}");
5853
5854 /* But note when the variable was unified. */
5855 if (vi->id != var)
5856 fprintf (file, " same as %s", vi->name);
5857
5858 fprintf (file, "\n");
5859 }
5860
5861 /* Print the points-to solution for VAR to stderr. */
5862
5863 DEBUG_FUNCTION void
5864 debug_solution_for_var (unsigned int var)
5865 {
5866 dump_solution_for_var (stderr, var);
5867 }
5868
5869 /* Create varinfo structures for all of the variables in the
5870 function for intraprocedural mode. */
5871
5872 static void
5873 intra_create_variable_infos (struct function *fn)
5874 {
5875 tree t;
5876
5877 /* For each incoming pointer argument arg, create the constraint ARG
5878 = NONLOCAL or a dummy variable if it is a restrict qualified
5879 passed-by-reference argument. */
5880 for (t = DECL_ARGUMENTS (fn->decl); t; t = DECL_CHAIN (t))
5881 {
5882 varinfo_t p = get_vi_for_tree (t);
5883
5884 /* For restrict qualified pointers to objects passed by
5885 reference build a real representative for the pointed-to object.
5886 Treat restrict qualified references the same. */
5887 if (TYPE_RESTRICT (TREE_TYPE (t))
5888 && ((DECL_BY_REFERENCE (t) && POINTER_TYPE_P (TREE_TYPE (t)))
5889 || TREE_CODE (TREE_TYPE (t)) == REFERENCE_TYPE)
5890 && !type_contains_placeholder_p (TREE_TYPE (TREE_TYPE (t))))
5891 {
5892 struct constraint_expr lhsc, rhsc;
5893 varinfo_t vi;
5894 tree heapvar = build_fake_var_decl (TREE_TYPE (TREE_TYPE (t)));
5895 DECL_EXTERNAL (heapvar) = 1;
5896 vi = create_variable_info_for_1 (heapvar, "PARM_NOALIAS");
5897 vi->is_restrict_var = 1;
5898 insert_vi_for_tree (heapvar, vi);
5899 lhsc.var = p->id;
5900 lhsc.type = SCALAR;
5901 lhsc.offset = 0;
5902 rhsc.var = vi->id;
5903 rhsc.type = ADDRESSOF;
5904 rhsc.offset = 0;
5905 process_constraint (new_constraint (lhsc, rhsc));
5906 for (; vi; vi = vi_next (vi))
5907 if (vi->may_have_pointers)
5908 {
5909 if (vi->only_restrict_pointers)
5910 make_constraint_from_global_restrict (vi, "GLOBAL_RESTRICT");
5911 else
5912 make_copy_constraint (vi, nonlocal_id);
5913 }
5914 continue;
5915 }
5916
5917 if (POINTER_TYPE_P (TREE_TYPE (t))
5918 && TYPE_RESTRICT (TREE_TYPE (t)))
5919 make_constraint_from_global_restrict (p, "PARM_RESTRICT");
5920 else
5921 {
5922 for (; p; p = vi_next (p))
5923 {
5924 if (p->only_restrict_pointers)
5925 make_constraint_from_global_restrict (p, "PARM_RESTRICT");
5926 else if (p->may_have_pointers)
5927 make_constraint_from (p, nonlocal_id);
5928 }
5929 }
5930 }
5931
5932 /* Add a constraint for a result decl that is passed by reference. */
5933 if (DECL_RESULT (fn->decl)
5934 && DECL_BY_REFERENCE (DECL_RESULT (fn->decl)))
5935 {
5936 varinfo_t p, result_vi = get_vi_for_tree (DECL_RESULT (fn->decl));
5937
5938 for (p = result_vi; p; p = vi_next (p))
5939 make_constraint_from (p, nonlocal_id);
5940 }
5941
5942 /* Add a constraint for the incoming static chain parameter. */
5943 if (fn->static_chain_decl != NULL_TREE)
5944 {
5945 varinfo_t p, chain_vi = get_vi_for_tree (fn->static_chain_decl);
5946
5947 for (p = chain_vi; p; p = vi_next (p))
5948 make_constraint_from (p, nonlocal_id);
5949 }
5950 }
5951
5952 /* Structure used to put solution bitmaps in a hashtable so they can
5953 be shared among variables with the same points-to set. */
5954
5955 typedef struct shared_bitmap_info
5956 {
5957 bitmap pt_vars;
5958 hashval_t hashcode;
5959 } *shared_bitmap_info_t;
5960 typedef const struct shared_bitmap_info *const_shared_bitmap_info_t;
5961
5962 /* Shared_bitmap hashtable helpers. */
5963
5964 struct shared_bitmap_hasher : typed_free_remove <shared_bitmap_info>
5965 {
5966 typedef shared_bitmap_info value_type;
5967 typedef shared_bitmap_info compare_type;
5968 static inline hashval_t hash (const value_type *);
5969 static inline bool equal (const value_type *, const compare_type *);
5970 };
5971
5972 /* Hash function for a shared_bitmap_info_t */
5973
5974 inline hashval_t
5975 shared_bitmap_hasher::hash (const value_type *bi)
5976 {
5977 return bi->hashcode;
5978 }
5979
5980 /* Equality function for two shared_bitmap_info_t's. */
5981
5982 inline bool
5983 shared_bitmap_hasher::equal (const value_type *sbi1, const compare_type *sbi2)
5984 {
5985 return bitmap_equal_p (sbi1->pt_vars, sbi2->pt_vars);
5986 }
5987
5988 /* Shared_bitmap hashtable. */
5989
5990 static hash_table<shared_bitmap_hasher> *shared_bitmap_table;
5991
5992 /* Lookup a bitmap in the shared bitmap hashtable, and return an already
5993 existing instance if there is one, NULL otherwise. */
5994
5995 static bitmap
5996 shared_bitmap_lookup (bitmap pt_vars)
5997 {
5998 shared_bitmap_info **slot;
5999 struct shared_bitmap_info sbi;
6000
6001 sbi.pt_vars = pt_vars;
6002 sbi.hashcode = bitmap_hash (pt_vars);
6003
6004 slot = shared_bitmap_table->find_slot (&sbi, NO_INSERT);
6005 if (!slot)
6006 return NULL;
6007 else
6008 return (*slot)->pt_vars;
6009 }
6010
6011
6012 /* Add a bitmap to the shared bitmap hashtable. */
6013
6014 static void
6015 shared_bitmap_add (bitmap pt_vars)
6016 {
6017 shared_bitmap_info **slot;
6018 shared_bitmap_info_t sbi = XNEW (struct shared_bitmap_info);
6019
6020 sbi->pt_vars = pt_vars;
6021 sbi->hashcode = bitmap_hash (pt_vars);
6022
6023 slot = shared_bitmap_table->find_slot (sbi, INSERT);
6024 gcc_assert (!*slot);
6025 *slot = sbi;
6026 }
6027
6028
6029 /* Set bits in INTO corresponding to the variable uids in solution set FROM. */
6030
6031 static void
6032 set_uids_in_ptset (bitmap into, bitmap from, struct pt_solution *pt)
6033 {
6034 unsigned int i;
6035 bitmap_iterator bi;
6036 varinfo_t escaped_vi = get_varinfo (find (escaped_id));
6037 bool everything_escaped
6038 = escaped_vi->solution && bitmap_bit_p (escaped_vi->solution, anything_id);
6039
6040 EXECUTE_IF_SET_IN_BITMAP (from, 0, i, bi)
6041 {
6042 varinfo_t vi = get_varinfo (i);
6043
6044 /* The only artificial variables that are allowed in a may-alias
6045 set are heap variables. */
6046 if (vi->is_artificial_var && !vi->is_heap_var)
6047 continue;
6048
6049 if (everything_escaped
6050 || (escaped_vi->solution
6051 && bitmap_bit_p (escaped_vi->solution, i)))
6052 {
6053 pt->vars_contains_escaped = true;
6054 pt->vars_contains_escaped_heap = vi->is_heap_var;
6055 }
6056
6057 if (TREE_CODE (vi->decl) == VAR_DECL
6058 || TREE_CODE (vi->decl) == PARM_DECL
6059 || TREE_CODE (vi->decl) == RESULT_DECL)
6060 {
6061 /* If we are in IPA mode we will not recompute points-to
6062 sets after inlining so make sure they stay valid. */
6063 if (in_ipa_mode
6064 && !DECL_PT_UID_SET_P (vi->decl))
6065 SET_DECL_PT_UID (vi->decl, DECL_UID (vi->decl));
6066
6067 /* Add the decl to the points-to set. Note that the points-to
6068 set contains global variables. */
6069 bitmap_set_bit (into, DECL_PT_UID (vi->decl));
6070 if (vi->is_global_var)
6071 pt->vars_contains_nonlocal = true;
6072 }
6073 }
6074 }
6075
6076
6077 /* Compute the points-to solution *PT for the variable VI. */
6078
6079 static struct pt_solution
6080 find_what_var_points_to (varinfo_t orig_vi)
6081 {
6082 unsigned int i;
6083 bitmap_iterator bi;
6084 bitmap finished_solution;
6085 bitmap result;
6086 varinfo_t vi;
6087 struct pt_solution *pt;
6088
6089 /* This variable may have been collapsed, let's get the real
6090 variable. */
6091 vi = get_varinfo (find (orig_vi->id));
6092
6093 /* See if we have already computed the solution and return it. */
6094 pt_solution **slot = &final_solutions->get_or_insert (vi);
6095 if (*slot != NULL)
6096 return **slot;
6097
6098 *slot = pt = XOBNEW (&final_solutions_obstack, struct pt_solution);
6099 memset (pt, 0, sizeof (struct pt_solution));
6100
6101 /* Translate artificial variables into SSA_NAME_PTR_INFO
6102 attributes. */
6103 EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, i, bi)
6104 {
6105 varinfo_t vi = get_varinfo (i);
6106
6107 if (vi->is_artificial_var)
6108 {
6109 if (vi->id == nothing_id)
6110 pt->null = 1;
6111 else if (vi->id == escaped_id)
6112 {
6113 if (in_ipa_mode)
6114 pt->ipa_escaped = 1;
6115 else
6116 pt->escaped = 1;
6117 /* Expand some special vars of ESCAPED in-place here. */
6118 varinfo_t evi = get_varinfo (find (escaped_id));
6119 if (bitmap_bit_p (evi->solution, nonlocal_id))
6120 pt->nonlocal = 1;
6121 }
6122 else if (vi->id == nonlocal_id)
6123 pt->nonlocal = 1;
6124 else if (vi->is_heap_var)
6125 /* We represent heapvars in the points-to set properly. */
6126 ;
6127 else if (vi->id == string_id)
6128 /* Nobody cares - STRING_CSTs are read-only entities. */
6129 ;
6130 else if (vi->id == anything_id
6131 || vi->id == integer_id)
6132 pt->anything = 1;
6133 }
6134 }
6135
6136 /* Instead of doing extra work, simply do not create
6137 elaborate points-to information for pt_anything pointers. */
6138 if (pt->anything)
6139 return *pt;
6140
6141 /* Share the final set of variables when possible. */
6142 finished_solution = BITMAP_GGC_ALLOC ();
6143 stats.points_to_sets_created++;
6144
6145 set_uids_in_ptset (finished_solution, vi->solution, pt);
6146 result = shared_bitmap_lookup (finished_solution);
6147 if (!result)
6148 {
6149 shared_bitmap_add (finished_solution);
6150 pt->vars = finished_solution;
6151 }
6152 else
6153 {
6154 pt->vars = result;
6155 bitmap_clear (finished_solution);
6156 }
6157
6158 return *pt;
6159 }
6160
6161 /* Given a pointer variable P, fill in its points-to set. */
6162
6163 static void
6164 find_what_p_points_to (tree p)
6165 {
6166 struct ptr_info_def *pi;
6167 tree lookup_p = p;
6168 varinfo_t vi;
6169
6170 /* For parameters, get at the points-to set for the actual parm
6171 decl. */
6172 if (TREE_CODE (p) == SSA_NAME
6173 && SSA_NAME_IS_DEFAULT_DEF (p)
6174 && (TREE_CODE (SSA_NAME_VAR (p)) == PARM_DECL
6175 || TREE_CODE (SSA_NAME_VAR (p)) == RESULT_DECL))
6176 lookup_p = SSA_NAME_VAR (p);
6177
6178 vi = lookup_vi_for_tree (lookup_p);
6179 if (!vi)
6180 return;
6181
6182 pi = get_ptr_info (p);
6183 pi->pt = find_what_var_points_to (vi);
6184 }
6185
6186
6187 /* Query statistics for points-to solutions. */
6188
6189 static struct {
6190 unsigned HOST_WIDE_INT pt_solution_includes_may_alias;
6191 unsigned HOST_WIDE_INT pt_solution_includes_no_alias;
6192 unsigned HOST_WIDE_INT pt_solutions_intersect_may_alias;
6193 unsigned HOST_WIDE_INT pt_solutions_intersect_no_alias;
6194 } pta_stats;
6195
6196 void
6197 dump_pta_stats (FILE *s)
6198 {
6199 fprintf (s, "\nPTA query stats:\n");
6200 fprintf (s, " pt_solution_includes: "
6201 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
6202 HOST_WIDE_INT_PRINT_DEC" queries\n",
6203 pta_stats.pt_solution_includes_no_alias,
6204 pta_stats.pt_solution_includes_no_alias
6205 + pta_stats.pt_solution_includes_may_alias);
6206 fprintf (s, " pt_solutions_intersect: "
6207 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
6208 HOST_WIDE_INT_PRINT_DEC" queries\n",
6209 pta_stats.pt_solutions_intersect_no_alias,
6210 pta_stats.pt_solutions_intersect_no_alias
6211 + pta_stats.pt_solutions_intersect_may_alias);
6212 }
6213
6214
6215 /* Reset the points-to solution *PT to a conservative default
6216 (point to anything). */
6217
6218 void
6219 pt_solution_reset (struct pt_solution *pt)
6220 {
6221 memset (pt, 0, sizeof (struct pt_solution));
6222 pt->anything = true;
6223 }
6224
6225 /* Set the points-to solution *PT to point only to the variables
6226 in VARS. VARS_CONTAINS_GLOBAL specifies whether that contains
6227 global variables and VARS_CONTAINS_RESTRICT specifies whether
6228 it contains restrict tag variables. */
6229
6230 void
6231 pt_solution_set (struct pt_solution *pt, bitmap vars,
6232 bool vars_contains_nonlocal)
6233 {
6234 memset (pt, 0, sizeof (struct pt_solution));
6235 pt->vars = vars;
6236 pt->vars_contains_nonlocal = vars_contains_nonlocal;
6237 pt->vars_contains_escaped
6238 = (cfun->gimple_df->escaped.anything
6239 || bitmap_intersect_p (cfun->gimple_df->escaped.vars, vars));
6240 }
6241
6242 /* Set the points-to solution *PT to point only to the variable VAR. */
6243
6244 void
6245 pt_solution_set_var (struct pt_solution *pt, tree var)
6246 {
6247 memset (pt, 0, sizeof (struct pt_solution));
6248 pt->vars = BITMAP_GGC_ALLOC ();
6249 bitmap_set_bit (pt->vars, DECL_PT_UID (var));
6250 pt->vars_contains_nonlocal = is_global_var (var);
6251 pt->vars_contains_escaped
6252 = (cfun->gimple_df->escaped.anything
6253 || bitmap_bit_p (cfun->gimple_df->escaped.vars, DECL_PT_UID (var)));
6254 }
6255
6256 /* Computes the union of the points-to solutions *DEST and *SRC and
6257 stores the result in *DEST. This changes the points-to bitmap
6258 of *DEST and thus may not be used if that might be shared.
6259 The points-to bitmap of *SRC and *DEST will not be shared after
6260 this function if they were not before. */
6261
6262 static void
6263 pt_solution_ior_into (struct pt_solution *dest, struct pt_solution *src)
6264 {
6265 dest->anything |= src->anything;
6266 if (dest->anything)
6267 {
6268 pt_solution_reset (dest);
6269 return;
6270 }
6271
6272 dest->nonlocal |= src->nonlocal;
6273 dest->escaped |= src->escaped;
6274 dest->ipa_escaped |= src->ipa_escaped;
6275 dest->null |= src->null;
6276 dest->vars_contains_nonlocal |= src->vars_contains_nonlocal;
6277 dest->vars_contains_escaped |= src->vars_contains_escaped;
6278 dest->vars_contains_escaped_heap |= src->vars_contains_escaped_heap;
6279 if (!src->vars)
6280 return;
6281
6282 if (!dest->vars)
6283 dest->vars = BITMAP_GGC_ALLOC ();
6284 bitmap_ior_into (dest->vars, src->vars);
6285 }
6286
6287 /* Return true if the points-to solution *PT is empty. */
6288
6289 bool
6290 pt_solution_empty_p (struct pt_solution *pt)
6291 {
6292 if (pt->anything
6293 || pt->nonlocal)
6294 return false;
6295
6296 if (pt->vars
6297 && !bitmap_empty_p (pt->vars))
6298 return false;
6299
6300 /* If the solution includes ESCAPED, check if that is empty. */
6301 if (pt->escaped
6302 && !pt_solution_empty_p (&cfun->gimple_df->escaped))
6303 return false;
6304
6305 /* If the solution includes ESCAPED, check if that is empty. */
6306 if (pt->ipa_escaped
6307 && !pt_solution_empty_p (&ipa_escaped_pt))
6308 return false;
6309
6310 return true;
6311 }
6312
6313 /* Return true if the points-to solution *PT only point to a single var, and
6314 return the var uid in *UID. */
6315
6316 bool
6317 pt_solution_singleton_p (struct pt_solution *pt, unsigned *uid)
6318 {
6319 if (pt->anything || pt->nonlocal || pt->escaped || pt->ipa_escaped
6320 || pt->null || pt->vars == NULL
6321 || !bitmap_single_bit_set_p (pt->vars))
6322 return false;
6323
6324 *uid = bitmap_first_set_bit (pt->vars);
6325 return true;
6326 }
6327
6328 /* Return true if the points-to solution *PT includes global memory. */
6329
6330 bool
6331 pt_solution_includes_global (struct pt_solution *pt)
6332 {
6333 if (pt->anything
6334 || pt->nonlocal
6335 || pt->vars_contains_nonlocal
6336 /* The following is a hack to make the malloc escape hack work.
6337 In reality we'd need different sets for escaped-through-return
6338 and escaped-to-callees and passes would need to be updated. */
6339 || pt->vars_contains_escaped_heap)
6340 return true;
6341
6342 /* 'escaped' is also a placeholder so we have to look into it. */
6343 if (pt->escaped)
6344 return pt_solution_includes_global (&cfun->gimple_df->escaped);
6345
6346 if (pt->ipa_escaped)
6347 return pt_solution_includes_global (&ipa_escaped_pt);
6348
6349 /* ??? This predicate is not correct for the IPA-PTA solution
6350 as we do not properly distinguish between unit escape points
6351 and global variables. */
6352 if (cfun->gimple_df->ipa_pta)
6353 return true;
6354
6355 return false;
6356 }
6357
6358 /* Return true if the points-to solution *PT includes the variable
6359 declaration DECL. */
6360
6361 static bool
6362 pt_solution_includes_1 (struct pt_solution *pt, const_tree decl)
6363 {
6364 if (pt->anything)
6365 return true;
6366
6367 if (pt->nonlocal
6368 && is_global_var (decl))
6369 return true;
6370
6371 if (pt->vars
6372 && bitmap_bit_p (pt->vars, DECL_PT_UID (decl)))
6373 return true;
6374
6375 /* If the solution includes ESCAPED, check it. */
6376 if (pt->escaped
6377 && pt_solution_includes_1 (&cfun->gimple_df->escaped, decl))
6378 return true;
6379
6380 /* If the solution includes ESCAPED, check it. */
6381 if (pt->ipa_escaped
6382 && pt_solution_includes_1 (&ipa_escaped_pt, decl))
6383 return true;
6384
6385 return false;
6386 }
6387
6388 bool
6389 pt_solution_includes (struct pt_solution *pt, const_tree decl)
6390 {
6391 bool res = pt_solution_includes_1 (pt, decl);
6392 if (res)
6393 ++pta_stats.pt_solution_includes_may_alias;
6394 else
6395 ++pta_stats.pt_solution_includes_no_alias;
6396 return res;
6397 }
6398
6399 /* Return true if both points-to solutions PT1 and PT2 have a non-empty
6400 intersection. */
6401
6402 static bool
6403 pt_solutions_intersect_1 (struct pt_solution *pt1, struct pt_solution *pt2)
6404 {
6405 if (pt1->anything || pt2->anything)
6406 return true;
6407
6408 /* If either points to unknown global memory and the other points to
6409 any global memory they alias. */
6410 if ((pt1->nonlocal
6411 && (pt2->nonlocal
6412 || pt2->vars_contains_nonlocal))
6413 || (pt2->nonlocal
6414 && pt1->vars_contains_nonlocal))
6415 return true;
6416
6417 /* If either points to all escaped memory and the other points to
6418 any escaped memory they alias. */
6419 if ((pt1->escaped
6420 && (pt2->escaped
6421 || pt2->vars_contains_escaped))
6422 || (pt2->escaped
6423 && pt1->vars_contains_escaped))
6424 return true;
6425
6426 /* Check the escaped solution if required.
6427 ??? Do we need to check the local against the IPA escaped sets? */
6428 if ((pt1->ipa_escaped || pt2->ipa_escaped)
6429 && !pt_solution_empty_p (&ipa_escaped_pt))
6430 {
6431 /* If both point to escaped memory and that solution
6432 is not empty they alias. */
6433 if (pt1->ipa_escaped && pt2->ipa_escaped)
6434 return true;
6435
6436 /* If either points to escaped memory see if the escaped solution
6437 intersects with the other. */
6438 if ((pt1->ipa_escaped
6439 && pt_solutions_intersect_1 (&ipa_escaped_pt, pt2))
6440 || (pt2->ipa_escaped
6441 && pt_solutions_intersect_1 (&ipa_escaped_pt, pt1)))
6442 return true;
6443 }
6444
6445 /* Now both pointers alias if their points-to solution intersects. */
6446 return (pt1->vars
6447 && pt2->vars
6448 && bitmap_intersect_p (pt1->vars, pt2->vars));
6449 }
6450
6451 bool
6452 pt_solutions_intersect (struct pt_solution *pt1, struct pt_solution *pt2)
6453 {
6454 bool res = pt_solutions_intersect_1 (pt1, pt2);
6455 if (res)
6456 ++pta_stats.pt_solutions_intersect_may_alias;
6457 else
6458 ++pta_stats.pt_solutions_intersect_no_alias;
6459 return res;
6460 }
6461
6462
6463 /* Dump points-to information to OUTFILE. */
6464
6465 static void
6466 dump_sa_points_to_info (FILE *outfile)
6467 {
6468 unsigned int i;
6469
6470 fprintf (outfile, "\nPoints-to sets\n\n");
6471
6472 if (dump_flags & TDF_STATS)
6473 {
6474 fprintf (outfile, "Stats:\n");
6475 fprintf (outfile, "Total vars: %d\n", stats.total_vars);
6476 fprintf (outfile, "Non-pointer vars: %d\n",
6477 stats.nonpointer_vars);
6478 fprintf (outfile, "Statically unified vars: %d\n",
6479 stats.unified_vars_static);
6480 fprintf (outfile, "Dynamically unified vars: %d\n",
6481 stats.unified_vars_dynamic);
6482 fprintf (outfile, "Iterations: %d\n", stats.iterations);
6483 fprintf (outfile, "Number of edges: %d\n", stats.num_edges);
6484 fprintf (outfile, "Number of implicit edges: %d\n",
6485 stats.num_implicit_edges);
6486 }
6487
6488 for (i = 1; i < varmap.length (); i++)
6489 {
6490 varinfo_t vi = get_varinfo (i);
6491 if (!vi->may_have_pointers)
6492 continue;
6493 dump_solution_for_var (outfile, i);
6494 }
6495 }
6496
6497
6498 /* Debug points-to information to stderr. */
6499
6500 DEBUG_FUNCTION void
6501 debug_sa_points_to_info (void)
6502 {
6503 dump_sa_points_to_info (stderr);
6504 }
6505
6506
6507 /* Initialize the always-existing constraint variables for NULL
6508 ANYTHING, READONLY, and INTEGER */
6509
6510 static void
6511 init_base_vars (void)
6512 {
6513 struct constraint_expr lhs, rhs;
6514 varinfo_t var_anything;
6515 varinfo_t var_nothing;
6516 varinfo_t var_string;
6517 varinfo_t var_escaped;
6518 varinfo_t var_nonlocal;
6519 varinfo_t var_storedanything;
6520 varinfo_t var_integer;
6521
6522 /* Variable ID zero is reserved and should be NULL. */
6523 varmap.safe_push (NULL);
6524
6525 /* Create the NULL variable, used to represent that a variable points
6526 to NULL. */
6527 var_nothing = new_var_info (NULL_TREE, "NULL");
6528 gcc_assert (var_nothing->id == nothing_id);
6529 var_nothing->is_artificial_var = 1;
6530 var_nothing->offset = 0;
6531 var_nothing->size = ~0;
6532 var_nothing->fullsize = ~0;
6533 var_nothing->is_special_var = 1;
6534 var_nothing->may_have_pointers = 0;
6535 var_nothing->is_global_var = 0;
6536
6537 /* Create the ANYTHING variable, used to represent that a variable
6538 points to some unknown piece of memory. */
6539 var_anything = new_var_info (NULL_TREE, "ANYTHING");
6540 gcc_assert (var_anything->id == anything_id);
6541 var_anything->is_artificial_var = 1;
6542 var_anything->size = ~0;
6543 var_anything->offset = 0;
6544 var_anything->fullsize = ~0;
6545 var_anything->is_special_var = 1;
6546
6547 /* Anything points to anything. This makes deref constraints just
6548 work in the presence of linked list and other p = *p type loops,
6549 by saying that *ANYTHING = ANYTHING. */
6550 lhs.type = SCALAR;
6551 lhs.var = anything_id;
6552 lhs.offset = 0;
6553 rhs.type = ADDRESSOF;
6554 rhs.var = anything_id;
6555 rhs.offset = 0;
6556
6557 /* This specifically does not use process_constraint because
6558 process_constraint ignores all anything = anything constraints, since all
6559 but this one are redundant. */
6560 constraints.safe_push (new_constraint (lhs, rhs));
6561
6562 /* Create the STRING variable, used to represent that a variable
6563 points to a string literal. String literals don't contain
6564 pointers so STRING doesn't point to anything. */
6565 var_string = new_var_info (NULL_TREE, "STRING");
6566 gcc_assert (var_string->id == string_id);
6567 var_string->is_artificial_var = 1;
6568 var_string->offset = 0;
6569 var_string->size = ~0;
6570 var_string->fullsize = ~0;
6571 var_string->is_special_var = 1;
6572 var_string->may_have_pointers = 0;
6573
6574 /* Create the ESCAPED variable, used to represent the set of escaped
6575 memory. */
6576 var_escaped = new_var_info (NULL_TREE, "ESCAPED");
6577 gcc_assert (var_escaped->id == escaped_id);
6578 var_escaped->is_artificial_var = 1;
6579 var_escaped->offset = 0;
6580 var_escaped->size = ~0;
6581 var_escaped->fullsize = ~0;
6582 var_escaped->is_special_var = 0;
6583
6584 /* Create the NONLOCAL variable, used to represent the set of nonlocal
6585 memory. */
6586 var_nonlocal = new_var_info (NULL_TREE, "NONLOCAL");
6587 gcc_assert (var_nonlocal->id == nonlocal_id);
6588 var_nonlocal->is_artificial_var = 1;
6589 var_nonlocal->offset = 0;
6590 var_nonlocal->size = ~0;
6591 var_nonlocal->fullsize = ~0;
6592 var_nonlocal->is_special_var = 1;
6593
6594 /* ESCAPED = *ESCAPED, because escaped is may-deref'd at calls, etc. */
6595 lhs.type = SCALAR;
6596 lhs.var = escaped_id;
6597 lhs.offset = 0;
6598 rhs.type = DEREF;
6599 rhs.var = escaped_id;
6600 rhs.offset = 0;
6601 process_constraint (new_constraint (lhs, rhs));
6602
6603 /* ESCAPED = ESCAPED + UNKNOWN_OFFSET, because if a sub-field escapes the
6604 whole variable escapes. */
6605 lhs.type = SCALAR;
6606 lhs.var = escaped_id;
6607 lhs.offset = 0;
6608 rhs.type = SCALAR;
6609 rhs.var = escaped_id;
6610 rhs.offset = UNKNOWN_OFFSET;
6611 process_constraint (new_constraint (lhs, rhs));
6612
6613 /* *ESCAPED = NONLOCAL. This is true because we have to assume
6614 everything pointed to by escaped points to what global memory can
6615 point to. */
6616 lhs.type = DEREF;
6617 lhs.var = escaped_id;
6618 lhs.offset = 0;
6619 rhs.type = SCALAR;
6620 rhs.var = nonlocal_id;
6621 rhs.offset = 0;
6622 process_constraint (new_constraint (lhs, rhs));
6623
6624 /* NONLOCAL = &NONLOCAL, NONLOCAL = &ESCAPED. This is true because
6625 global memory may point to global memory and escaped memory. */
6626 lhs.type = SCALAR;
6627 lhs.var = nonlocal_id;
6628 lhs.offset = 0;
6629 rhs.type = ADDRESSOF;
6630 rhs.var = nonlocal_id;
6631 rhs.offset = 0;
6632 process_constraint (new_constraint (lhs, rhs));
6633 rhs.type = ADDRESSOF;
6634 rhs.var = escaped_id;
6635 rhs.offset = 0;
6636 process_constraint (new_constraint (lhs, rhs));
6637
6638 /* Create the STOREDANYTHING variable, used to represent the set of
6639 variables stored to *ANYTHING. */
6640 var_storedanything = new_var_info (NULL_TREE, "STOREDANYTHING");
6641 gcc_assert (var_storedanything->id == storedanything_id);
6642 var_storedanything->is_artificial_var = 1;
6643 var_storedanything->offset = 0;
6644 var_storedanything->size = ~0;
6645 var_storedanything->fullsize = ~0;
6646 var_storedanything->is_special_var = 0;
6647
6648 /* Create the INTEGER variable, used to represent that a variable points
6649 to what an INTEGER "points to". */
6650 var_integer = new_var_info (NULL_TREE, "INTEGER");
6651 gcc_assert (var_integer->id == integer_id);
6652 var_integer->is_artificial_var = 1;
6653 var_integer->size = ~0;
6654 var_integer->fullsize = ~0;
6655 var_integer->offset = 0;
6656 var_integer->is_special_var = 1;
6657
6658 /* INTEGER = ANYTHING, because we don't know where a dereference of
6659 a random integer will point to. */
6660 lhs.type = SCALAR;
6661 lhs.var = integer_id;
6662 lhs.offset = 0;
6663 rhs.type = ADDRESSOF;
6664 rhs.var = anything_id;
6665 rhs.offset = 0;
6666 process_constraint (new_constraint (lhs, rhs));
6667 }
6668
6669 /* Initialize things necessary to perform PTA */
6670
6671 static void
6672 init_alias_vars (void)
6673 {
6674 use_field_sensitive = (MAX_FIELDS_FOR_FIELD_SENSITIVE > 1);
6675
6676 bitmap_obstack_initialize (&pta_obstack);
6677 bitmap_obstack_initialize (&oldpta_obstack);
6678 bitmap_obstack_initialize (&predbitmap_obstack);
6679
6680 constraint_pool = create_alloc_pool ("Constraint pool",
6681 sizeof (struct constraint), 30);
6682 variable_info_pool = create_alloc_pool ("Variable info pool",
6683 sizeof (struct variable_info), 30);
6684 constraints.create (8);
6685 varmap.create (8);
6686 vi_for_tree = new hash_map<tree, varinfo_t>;
6687 call_stmt_vars = new hash_map<gimple, varinfo_t>;
6688
6689 memset (&stats, 0, sizeof (stats));
6690 shared_bitmap_table = new hash_table<shared_bitmap_hasher> (511);
6691 init_base_vars ();
6692
6693 gcc_obstack_init (&fake_var_decl_obstack);
6694
6695 final_solutions = new hash_map<varinfo_t, pt_solution *>;
6696 gcc_obstack_init (&final_solutions_obstack);
6697 }
6698
6699 /* Remove the REF and ADDRESS edges from GRAPH, as well as all the
6700 predecessor edges. */
6701
6702 static void
6703 remove_preds_and_fake_succs (constraint_graph_t graph)
6704 {
6705 unsigned int i;
6706
6707 /* Clear the implicit ref and address nodes from the successor
6708 lists. */
6709 for (i = 1; i < FIRST_REF_NODE; i++)
6710 {
6711 if (graph->succs[i])
6712 bitmap_clear_range (graph->succs[i], FIRST_REF_NODE,
6713 FIRST_REF_NODE * 2);
6714 }
6715
6716 /* Free the successor list for the non-ref nodes. */
6717 for (i = FIRST_REF_NODE + 1; i < graph->size; i++)
6718 {
6719 if (graph->succs[i])
6720 BITMAP_FREE (graph->succs[i]);
6721 }
6722
6723 /* Now reallocate the size of the successor list as, and blow away
6724 the predecessor bitmaps. */
6725 graph->size = varmap.length ();
6726 graph->succs = XRESIZEVEC (bitmap, graph->succs, graph->size);
6727
6728 free (graph->implicit_preds);
6729 graph->implicit_preds = NULL;
6730 free (graph->preds);
6731 graph->preds = NULL;
6732 bitmap_obstack_release (&predbitmap_obstack);
6733 }
6734
6735 /* Solve the constraint set. */
6736
6737 static void
6738 solve_constraints (void)
6739 {
6740 struct scc_info *si;
6741
6742 if (dump_file)
6743 fprintf (dump_file,
6744 "\nCollapsing static cycles and doing variable "
6745 "substitution\n");
6746
6747 init_graph (varmap.length () * 2);
6748
6749 if (dump_file)
6750 fprintf (dump_file, "Building predecessor graph\n");
6751 build_pred_graph ();
6752
6753 if (dump_file)
6754 fprintf (dump_file, "Detecting pointer and location "
6755 "equivalences\n");
6756 si = perform_var_substitution (graph);
6757
6758 if (dump_file)
6759 fprintf (dump_file, "Rewriting constraints and unifying "
6760 "variables\n");
6761 rewrite_constraints (graph, si);
6762
6763 build_succ_graph ();
6764
6765 free_var_substitution_info (si);
6766
6767 /* Attach complex constraints to graph nodes. */
6768 move_complex_constraints (graph);
6769
6770 if (dump_file)
6771 fprintf (dump_file, "Uniting pointer but not location equivalent "
6772 "variables\n");
6773 unite_pointer_equivalences (graph);
6774
6775 if (dump_file)
6776 fprintf (dump_file, "Finding indirect cycles\n");
6777 find_indirect_cycles (graph);
6778
6779 /* Implicit nodes and predecessors are no longer necessary at this
6780 point. */
6781 remove_preds_and_fake_succs (graph);
6782
6783 if (dump_file && (dump_flags & TDF_GRAPH))
6784 {
6785 fprintf (dump_file, "\n\n// The constraint graph before solve-graph "
6786 "in dot format:\n");
6787 dump_constraint_graph (dump_file);
6788 fprintf (dump_file, "\n\n");
6789 }
6790
6791 if (dump_file)
6792 fprintf (dump_file, "Solving graph\n");
6793
6794 solve_graph (graph);
6795
6796 if (dump_file && (dump_flags & TDF_GRAPH))
6797 {
6798 fprintf (dump_file, "\n\n// The constraint graph after solve-graph "
6799 "in dot format:\n");
6800 dump_constraint_graph (dump_file);
6801 fprintf (dump_file, "\n\n");
6802 }
6803
6804 if (dump_file)
6805 dump_sa_points_to_info (dump_file);
6806 }
6807
6808 /* Create points-to sets for the current function. See the comments
6809 at the start of the file for an algorithmic overview. */
6810
6811 static void
6812 compute_points_to_sets (void)
6813 {
6814 basic_block bb;
6815 unsigned i;
6816 varinfo_t vi;
6817
6818 timevar_push (TV_TREE_PTA);
6819
6820 init_alias_vars ();
6821
6822 intra_create_variable_infos (cfun);
6823
6824 /* Now walk all statements and build the constraint set. */
6825 FOR_EACH_BB_FN (bb, cfun)
6826 {
6827 for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
6828 gsi_next (&gsi))
6829 {
6830 gphi *phi = gsi.phi ();
6831
6832 if (! virtual_operand_p (gimple_phi_result (phi)))
6833 find_func_aliases (cfun, phi);
6834 }
6835
6836 for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);
6837 gsi_next (&gsi))
6838 {
6839 gimple stmt = gsi_stmt (gsi);
6840
6841 find_func_aliases (cfun, stmt);
6842 }
6843 }
6844
6845 if (dump_file)
6846 {
6847 fprintf (dump_file, "Points-to analysis\n\nConstraints:\n\n");
6848 dump_constraints (dump_file, 0);
6849 }
6850
6851 /* From the constraints compute the points-to sets. */
6852 solve_constraints ();
6853
6854 /* Compute the points-to set for ESCAPED used for call-clobber analysis. */
6855 cfun->gimple_df->escaped = find_what_var_points_to (get_varinfo (escaped_id));
6856
6857 /* Make sure the ESCAPED solution (which is used as placeholder in
6858 other solutions) does not reference itself. This simplifies
6859 points-to solution queries. */
6860 cfun->gimple_df->escaped.escaped = 0;
6861
6862 /* Compute the points-to sets for pointer SSA_NAMEs. */
6863 for (i = 0; i < num_ssa_names; ++i)
6864 {
6865 tree ptr = ssa_name (i);
6866 if (ptr
6867 && POINTER_TYPE_P (TREE_TYPE (ptr)))
6868 find_what_p_points_to (ptr);
6869 }
6870
6871 /* Compute the call-used/clobbered sets. */
6872 FOR_EACH_BB_FN (bb, cfun)
6873 {
6874 gimple_stmt_iterator gsi;
6875
6876 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
6877 {
6878 gcall *stmt;
6879 struct pt_solution *pt;
6880
6881 stmt = dyn_cast <gcall *> (gsi_stmt (gsi));
6882 if (!stmt)
6883 continue;
6884
6885 pt = gimple_call_use_set (stmt);
6886 if (gimple_call_flags (stmt) & ECF_CONST)
6887 memset (pt, 0, sizeof (struct pt_solution));
6888 else if ((vi = lookup_call_use_vi (stmt)) != NULL)
6889 {
6890 *pt = find_what_var_points_to (vi);
6891 /* Escaped (and thus nonlocal) variables are always
6892 implicitly used by calls. */
6893 /* ??? ESCAPED can be empty even though NONLOCAL
6894 always escaped. */
6895 pt->nonlocal = 1;
6896 pt->escaped = 1;
6897 }
6898 else
6899 {
6900 /* If there is nothing special about this call then
6901 we have made everything that is used also escape. */
6902 *pt = cfun->gimple_df->escaped;
6903 pt->nonlocal = 1;
6904 }
6905
6906 pt = gimple_call_clobber_set (stmt);
6907 if (gimple_call_flags (stmt) & (ECF_CONST|ECF_PURE|ECF_NOVOPS))
6908 memset (pt, 0, sizeof (struct pt_solution));
6909 else if ((vi = lookup_call_clobber_vi (stmt)) != NULL)
6910 {
6911 *pt = find_what_var_points_to (vi);
6912 /* Escaped (and thus nonlocal) variables are always
6913 implicitly clobbered by calls. */
6914 /* ??? ESCAPED can be empty even though NONLOCAL
6915 always escaped. */
6916 pt->nonlocal = 1;
6917 pt->escaped = 1;
6918 }
6919 else
6920 {
6921 /* If there is nothing special about this call then
6922 we have made everything that is used also escape. */
6923 *pt = cfun->gimple_df->escaped;
6924 pt->nonlocal = 1;
6925 }
6926 }
6927 }
6928
6929 timevar_pop (TV_TREE_PTA);
6930 }
6931
6932
6933 /* Delete created points-to sets. */
6934
6935 static void
6936 delete_points_to_sets (void)
6937 {
6938 unsigned int i;
6939
6940 delete shared_bitmap_table;
6941 shared_bitmap_table = NULL;
6942 if (dump_file && (dump_flags & TDF_STATS))
6943 fprintf (dump_file, "Points to sets created:%d\n",
6944 stats.points_to_sets_created);
6945
6946 delete vi_for_tree;
6947 delete call_stmt_vars;
6948 bitmap_obstack_release (&pta_obstack);
6949 constraints.release ();
6950
6951 for (i = 0; i < graph->size; i++)
6952 graph->complex[i].release ();
6953 free (graph->complex);
6954
6955 free (graph->rep);
6956 free (graph->succs);
6957 free (graph->pe);
6958 free (graph->pe_rep);
6959 free (graph->indirect_cycles);
6960 free (graph);
6961
6962 varmap.release ();
6963 free_alloc_pool (variable_info_pool);
6964 free_alloc_pool (constraint_pool);
6965
6966 obstack_free (&fake_var_decl_obstack, NULL);
6967
6968 delete final_solutions;
6969 obstack_free (&final_solutions_obstack, NULL);
6970 }
6971
6972 /* Mark "other" loads and stores as belonging to CLIQUE and with
6973 base zero. */
6974
6975 static bool
6976 visit_loadstore (gimple, tree base, tree ref, void *clique_)
6977 {
6978 unsigned short clique = (uintptr_t)clique_;
6979 if (TREE_CODE (base) == MEM_REF
6980 || TREE_CODE (base) == TARGET_MEM_REF)
6981 {
6982 tree ptr = TREE_OPERAND (base, 0);
6983 if (TREE_CODE (ptr) == SSA_NAME)
6984 {
6985 /* ??? We need to make sure 'ptr' doesn't include any of
6986 the restrict tags in its points-to set. */
6987 return false;
6988 }
6989
6990 /* For now let decls through. */
6991
6992 /* Do not overwrite existing cliques (that includes clique, base
6993 pairs we just set). */
6994 if (MR_DEPENDENCE_CLIQUE (base) == 0)
6995 {
6996 MR_DEPENDENCE_CLIQUE (base) = clique;
6997 MR_DEPENDENCE_BASE (base) = 0;
6998 }
6999 }
7000
7001 /* For plain decl accesses see whether they are accesses to globals
7002 and rewrite them to MEM_REFs with { clique, 0 }. */
7003 if (TREE_CODE (base) == VAR_DECL
7004 && is_global_var (base)
7005 /* ??? We can't rewrite a plain decl with the walk_stmt_load_store
7006 ops callback. */
7007 && base != ref)
7008 {
7009 tree *basep = &ref;
7010 while (handled_component_p (*basep))
7011 basep = &TREE_OPERAND (*basep, 0);
7012 gcc_assert (TREE_CODE (*basep) == VAR_DECL);
7013 tree ptr = build_fold_addr_expr (*basep);
7014 tree zero = build_int_cst (TREE_TYPE (ptr), 0);
7015 *basep = build2 (MEM_REF, TREE_TYPE (*basep), ptr, zero);
7016 MR_DEPENDENCE_CLIQUE (*basep) = clique;
7017 MR_DEPENDENCE_BASE (*basep) = 0;
7018 }
7019
7020 return false;
7021 }
7022
7023 /* If REF is a MEM_REF then assign a clique, base pair to it, updating
7024 CLIQUE, *RESTRICT_VAR and LAST_RUID. Return whether dependence info
7025 was assigned to REF. */
7026
7027 static bool
7028 maybe_set_dependence_info (tree ref, tree ptr,
7029 unsigned short &clique, varinfo_t restrict_var,
7030 unsigned short &last_ruid)
7031 {
7032 while (handled_component_p (ref))
7033 ref = TREE_OPERAND (ref, 0);
7034 if ((TREE_CODE (ref) == MEM_REF
7035 || TREE_CODE (ref) == TARGET_MEM_REF)
7036 && TREE_OPERAND (ref, 0) == ptr)
7037 {
7038 /* Do not overwrite existing cliques. This avoids overwriting dependence
7039 info inlined from a function with restrict parameters inlined
7040 into a function with restrict parameters. This usually means we
7041 prefer to be precise in innermost loops. */
7042 if (MR_DEPENDENCE_CLIQUE (ref) == 0)
7043 {
7044 if (clique == 0)
7045 clique = ++cfun->last_clique;
7046 if (restrict_var->ruid == 0)
7047 restrict_var->ruid = ++last_ruid;
7048 MR_DEPENDENCE_CLIQUE (ref) = clique;
7049 MR_DEPENDENCE_BASE (ref) = restrict_var->ruid;
7050 return true;
7051 }
7052 }
7053 return false;
7054 }
7055
7056 /* Compute the set of independend memory references based on restrict
7057 tags and their conservative propagation to the points-to sets. */
7058
7059 static void
7060 compute_dependence_clique (void)
7061 {
7062 unsigned short clique = 0;
7063 unsigned short last_ruid = 0;
7064 for (unsigned i = 0; i < num_ssa_names; ++i)
7065 {
7066 tree ptr = ssa_name (i);
7067 if (!ptr || !POINTER_TYPE_P (TREE_TYPE (ptr)))
7068 continue;
7069
7070 /* Avoid all this when ptr is not dereferenced? */
7071 tree p = ptr;
7072 if (SSA_NAME_IS_DEFAULT_DEF (ptr)
7073 && (TREE_CODE (SSA_NAME_VAR (ptr)) == PARM_DECL
7074 || TREE_CODE (SSA_NAME_VAR (ptr)) == RESULT_DECL))
7075 p = SSA_NAME_VAR (ptr);
7076 varinfo_t vi = lookup_vi_for_tree (p);
7077 if (!vi)
7078 continue;
7079 vi = get_varinfo (find (vi->id));
7080 bitmap_iterator bi;
7081 unsigned j;
7082 varinfo_t restrict_var = NULL;
7083 EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, j, bi)
7084 {
7085 varinfo_t oi = get_varinfo (j);
7086 if (oi->is_restrict_var)
7087 {
7088 if (restrict_var)
7089 {
7090 if (dump_file && (dump_flags & TDF_DETAILS))
7091 {
7092 fprintf (dump_file, "found restrict pointed-to "
7093 "for ");
7094 print_generic_expr (dump_file, ptr, 0);
7095 fprintf (dump_file, " but not exclusively\n");
7096 }
7097 restrict_var = NULL;
7098 break;
7099 }
7100 restrict_var = oi;
7101 }
7102 /* NULL is the only other valid points-to entry. */
7103 else if (oi->id != nothing_id)
7104 {
7105 restrict_var = NULL;
7106 break;
7107 }
7108 }
7109 /* Ok, found that ptr must(!) point to a single(!) restrict
7110 variable. */
7111 /* ??? PTA isn't really a proper propagation engine to compute
7112 this property.
7113 ??? We could handle merging of two restricts by unifying them. */
7114 if (restrict_var)
7115 {
7116 /* Now look at possible dereferences of ptr. */
7117 imm_use_iterator ui;
7118 gimple use_stmt;
7119 FOR_EACH_IMM_USE_STMT (use_stmt, ui, ptr)
7120 {
7121 /* ??? Calls and asms. */
7122 if (!gimple_assign_single_p (use_stmt))
7123 continue;
7124 maybe_set_dependence_info (gimple_assign_lhs (use_stmt), ptr,
7125 clique, restrict_var, last_ruid);
7126 maybe_set_dependence_info (gimple_assign_rhs1 (use_stmt), ptr,
7127 clique, restrict_var, last_ruid);
7128 }
7129 }
7130 }
7131
7132 if (clique == 0)
7133 return;
7134
7135 /* Assign the BASE id zero to all accesses not based on a restrict
7136 pointer. That way they get disabiguated against restrict
7137 accesses but not against each other. */
7138 /* ??? For restricts derived from globals (thus not incoming
7139 parameters) we can't restrict scoping properly thus the following
7140 is too aggressive there. For now we have excluded those globals from
7141 getting into the MR_DEPENDENCE machinery. */
7142 basic_block bb;
7143 FOR_EACH_BB_FN (bb, cfun)
7144 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
7145 !gsi_end_p (gsi); gsi_next (&gsi))
7146 {
7147 gimple stmt = gsi_stmt (gsi);
7148 walk_stmt_load_store_ops (stmt, (void *)(uintptr_t)clique,
7149 visit_loadstore, visit_loadstore);
7150 }
7151 }
7152
7153 /* Compute points-to information for every SSA_NAME pointer in the
7154 current function and compute the transitive closure of escaped
7155 variables to re-initialize the call-clobber states of local variables. */
7156
7157 unsigned int
7158 compute_may_aliases (void)
7159 {
7160 if (cfun->gimple_df->ipa_pta)
7161 {
7162 if (dump_file)
7163 {
7164 fprintf (dump_file, "\nNot re-computing points-to information "
7165 "because IPA points-to information is available.\n\n");
7166
7167 /* But still dump what we have remaining it. */
7168 dump_alias_info (dump_file);
7169 }
7170
7171 return 0;
7172 }
7173
7174 /* For each pointer P_i, determine the sets of variables that P_i may
7175 point-to. Compute the reachability set of escaped and call-used
7176 variables. */
7177 compute_points_to_sets ();
7178
7179 /* Debugging dumps. */
7180 if (dump_file)
7181 dump_alias_info (dump_file);
7182
7183 /* Compute restrict-based memory disambiguations. */
7184 compute_dependence_clique ();
7185
7186 /* Deallocate memory used by aliasing data structures and the internal
7187 points-to solution. */
7188 delete_points_to_sets ();
7189
7190 gcc_assert (!need_ssa_update_p (cfun));
7191
7192 return 0;
7193 }
7194
7195 /* A dummy pass to cause points-to information to be computed via
7196 TODO_rebuild_alias. */
7197
7198 namespace {
7199
7200 const pass_data pass_data_build_alias =
7201 {
7202 GIMPLE_PASS, /* type */
7203 "alias", /* name */
7204 OPTGROUP_NONE, /* optinfo_flags */
7205 TV_NONE, /* tv_id */
7206 ( PROP_cfg | PROP_ssa ), /* properties_required */
7207 0, /* properties_provided */
7208 0, /* properties_destroyed */
7209 0, /* todo_flags_start */
7210 TODO_rebuild_alias, /* todo_flags_finish */
7211 };
7212
7213 class pass_build_alias : public gimple_opt_pass
7214 {
7215 public:
7216 pass_build_alias (gcc::context *ctxt)
7217 : gimple_opt_pass (pass_data_build_alias, ctxt)
7218 {}
7219
7220 /* opt_pass methods: */
7221 virtual bool gate (function *) { return flag_tree_pta; }
7222
7223 }; // class pass_build_alias
7224
7225 } // anon namespace
7226
7227 gimple_opt_pass *
7228 make_pass_build_alias (gcc::context *ctxt)
7229 {
7230 return new pass_build_alias (ctxt);
7231 }
7232
7233 /* A dummy pass to cause points-to information to be computed via
7234 TODO_rebuild_alias. */
7235
7236 namespace {
7237
7238 const pass_data pass_data_build_ealias =
7239 {
7240 GIMPLE_PASS, /* type */
7241 "ealias", /* name */
7242 OPTGROUP_NONE, /* optinfo_flags */
7243 TV_NONE, /* tv_id */
7244 ( PROP_cfg | PROP_ssa ), /* properties_required */
7245 0, /* properties_provided */
7246 0, /* properties_destroyed */
7247 0, /* todo_flags_start */
7248 TODO_rebuild_alias, /* todo_flags_finish */
7249 };
7250
7251 class pass_build_ealias : public gimple_opt_pass
7252 {
7253 public:
7254 pass_build_ealias (gcc::context *ctxt)
7255 : gimple_opt_pass (pass_data_build_ealias, ctxt)
7256 {}
7257
7258 /* opt_pass methods: */
7259 virtual bool gate (function *) { return flag_tree_pta; }
7260
7261 }; // class pass_build_ealias
7262
7263 } // anon namespace
7264
7265 gimple_opt_pass *
7266 make_pass_build_ealias (gcc::context *ctxt)
7267 {
7268 return new pass_build_ealias (ctxt);
7269 }
7270
7271
7272 /* IPA PTA solutions for ESCAPED. */
7273 struct pt_solution ipa_escaped_pt
7274 = { true, false, false, false, false, false, false, false, NULL };
7275
7276 /* Associate node with varinfo DATA. Worker for
7277 cgraph_for_node_and_aliases. */
7278 static bool
7279 associate_varinfo_to_alias (struct cgraph_node *node, void *data)
7280 {
7281 if ((node->alias || node->thunk.thunk_p)
7282 && node->analyzed)
7283 insert_vi_for_tree (node->decl, (varinfo_t)data);
7284 return false;
7285 }
7286
7287 /* Execute the driver for IPA PTA. */
7288 static unsigned int
7289 ipa_pta_execute (void)
7290 {
7291 struct cgraph_node *node;
7292 varpool_node *var;
7293 int from;
7294
7295 in_ipa_mode = 1;
7296
7297 init_alias_vars ();
7298
7299 if (dump_file && (dump_flags & TDF_DETAILS))
7300 {
7301 symtab_node::dump_table (dump_file);
7302 fprintf (dump_file, "\n");
7303 }
7304
7305 /* Build the constraints. */
7306 FOR_EACH_DEFINED_FUNCTION (node)
7307 {
7308 varinfo_t vi;
7309 /* Nodes without a body are not interesting. Especially do not
7310 visit clones at this point for now - we get duplicate decls
7311 there for inline clones at least. */
7312 if (!node->has_gimple_body_p () || node->global.inlined_to)
7313 continue;
7314 node->get_body ();
7315
7316 gcc_assert (!node->clone_of);
7317
7318 vi = create_function_info_for (node->decl,
7319 alias_get_name (node->decl));
7320 node->call_for_symbol_thunks_and_aliases
7321 (associate_varinfo_to_alias, vi, true);
7322 }
7323
7324 /* Create constraints for global variables and their initializers. */
7325 FOR_EACH_VARIABLE (var)
7326 {
7327 if (var->alias && var->analyzed)
7328 continue;
7329
7330 get_vi_for_tree (var->decl);
7331 }
7332
7333 if (dump_file)
7334 {
7335 fprintf (dump_file,
7336 "Generating constraints for global initializers\n\n");
7337 dump_constraints (dump_file, 0);
7338 fprintf (dump_file, "\n");
7339 }
7340 from = constraints.length ();
7341
7342 FOR_EACH_DEFINED_FUNCTION (node)
7343 {
7344 struct function *func;
7345 basic_block bb;
7346
7347 /* Nodes without a body are not interesting. */
7348 if (!node->has_gimple_body_p () || node->clone_of)
7349 continue;
7350
7351 if (dump_file)
7352 {
7353 fprintf (dump_file,
7354 "Generating constraints for %s", node->name ());
7355 if (DECL_ASSEMBLER_NAME_SET_P (node->decl))
7356 fprintf (dump_file, " (%s)",
7357 IDENTIFIER_POINTER
7358 (DECL_ASSEMBLER_NAME (node->decl)));
7359 fprintf (dump_file, "\n");
7360 }
7361
7362 func = DECL_STRUCT_FUNCTION (node->decl);
7363 gcc_assert (cfun == NULL);
7364
7365 /* For externally visible or attribute used annotated functions use
7366 local constraints for their arguments.
7367 For local functions we see all callers and thus do not need initial
7368 constraints for parameters. */
7369 if (node->used_from_other_partition
7370 || node->externally_visible
7371 || node->force_output)
7372 {
7373 intra_create_variable_infos (func);
7374
7375 /* We also need to make function return values escape. Nothing
7376 escapes by returning from main though. */
7377 if (!MAIN_NAME_P (DECL_NAME (node->decl)))
7378 {
7379 varinfo_t fi, rvi;
7380 fi = lookup_vi_for_tree (node->decl);
7381 rvi = first_vi_for_offset (fi, fi_result);
7382 if (rvi && rvi->offset == fi_result)
7383 {
7384 struct constraint_expr includes;
7385 struct constraint_expr var;
7386 includes.var = escaped_id;
7387 includes.offset = 0;
7388 includes.type = SCALAR;
7389 var.var = rvi->id;
7390 var.offset = 0;
7391 var.type = SCALAR;
7392 process_constraint (new_constraint (includes, var));
7393 }
7394 }
7395 }
7396
7397 /* Build constriants for the function body. */
7398 FOR_EACH_BB_FN (bb, func)
7399 {
7400 for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
7401 gsi_next (&gsi))
7402 {
7403 gphi *phi = gsi.phi ();
7404
7405 if (! virtual_operand_p (gimple_phi_result (phi)))
7406 find_func_aliases (func, phi);
7407 }
7408
7409 for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);
7410 gsi_next (&gsi))
7411 {
7412 gimple stmt = gsi_stmt (gsi);
7413
7414 find_func_aliases (func, stmt);
7415 find_func_clobbers (func, stmt);
7416 }
7417 }
7418
7419 if (dump_file)
7420 {
7421 fprintf (dump_file, "\n");
7422 dump_constraints (dump_file, from);
7423 fprintf (dump_file, "\n");
7424 }
7425 from = constraints.length ();
7426 }
7427
7428 /* From the constraints compute the points-to sets. */
7429 solve_constraints ();
7430
7431 /* Compute the global points-to sets for ESCAPED.
7432 ??? Note that the computed escape set is not correct
7433 for the whole unit as we fail to consider graph edges to
7434 externally visible functions. */
7435 ipa_escaped_pt = find_what_var_points_to (get_varinfo (escaped_id));
7436
7437 /* Make sure the ESCAPED solution (which is used as placeholder in
7438 other solutions) does not reference itself. This simplifies
7439 points-to solution queries. */
7440 ipa_escaped_pt.ipa_escaped = 0;
7441
7442 /* Assign the points-to sets to the SSA names in the unit. */
7443 FOR_EACH_DEFINED_FUNCTION (node)
7444 {
7445 tree ptr;
7446 struct function *fn;
7447 unsigned i;
7448 basic_block bb;
7449
7450 /* Nodes without a body are not interesting. */
7451 if (!node->has_gimple_body_p () || node->clone_of)
7452 continue;
7453
7454 fn = DECL_STRUCT_FUNCTION (node->decl);
7455
7456 /* Compute the points-to sets for pointer SSA_NAMEs. */
7457 FOR_EACH_VEC_ELT (*fn->gimple_df->ssa_names, i, ptr)
7458 {
7459 if (ptr
7460 && POINTER_TYPE_P (TREE_TYPE (ptr)))
7461 find_what_p_points_to (ptr);
7462 }
7463
7464 /* Compute the call-use and call-clobber sets for indirect calls
7465 and calls to external functions. */
7466 FOR_EACH_BB_FN (bb, fn)
7467 {
7468 gimple_stmt_iterator gsi;
7469
7470 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
7471 {
7472 gcall *stmt;
7473 struct pt_solution *pt;
7474 varinfo_t vi, fi;
7475 tree decl;
7476
7477 stmt = dyn_cast <gcall *> (gsi_stmt (gsi));
7478 if (!stmt)
7479 continue;
7480
7481 /* Handle direct calls to functions with body. */
7482 decl = gimple_call_fndecl (stmt);
7483 if (decl
7484 && (fi = lookup_vi_for_tree (decl))
7485 && fi->is_fn_info)
7486 {
7487 *gimple_call_clobber_set (stmt)
7488 = find_what_var_points_to
7489 (first_vi_for_offset (fi, fi_clobbers));
7490 *gimple_call_use_set (stmt)
7491 = find_what_var_points_to
7492 (first_vi_for_offset (fi, fi_uses));
7493 }
7494 /* Handle direct calls to external functions. */
7495 else if (decl)
7496 {
7497 pt = gimple_call_use_set (stmt);
7498 if (gimple_call_flags (stmt) & ECF_CONST)
7499 memset (pt, 0, sizeof (struct pt_solution));
7500 else if ((vi = lookup_call_use_vi (stmt)) != NULL)
7501 {
7502 *pt = find_what_var_points_to (vi);
7503 /* Escaped (and thus nonlocal) variables are always
7504 implicitly used by calls. */
7505 /* ??? ESCAPED can be empty even though NONLOCAL
7506 always escaped. */
7507 pt->nonlocal = 1;
7508 pt->ipa_escaped = 1;
7509 }
7510 else
7511 {
7512 /* If there is nothing special about this call then
7513 we have made everything that is used also escape. */
7514 *pt = ipa_escaped_pt;
7515 pt->nonlocal = 1;
7516 }
7517
7518 pt = gimple_call_clobber_set (stmt);
7519 if (gimple_call_flags (stmt) & (ECF_CONST|ECF_PURE|ECF_NOVOPS))
7520 memset (pt, 0, sizeof (struct pt_solution));
7521 else if ((vi = lookup_call_clobber_vi (stmt)) != NULL)
7522 {
7523 *pt = find_what_var_points_to (vi);
7524 /* Escaped (and thus nonlocal) variables are always
7525 implicitly clobbered by calls. */
7526 /* ??? ESCAPED can be empty even though NONLOCAL
7527 always escaped. */
7528 pt->nonlocal = 1;
7529 pt->ipa_escaped = 1;
7530 }
7531 else
7532 {
7533 /* If there is nothing special about this call then
7534 we have made everything that is used also escape. */
7535 *pt = ipa_escaped_pt;
7536 pt->nonlocal = 1;
7537 }
7538 }
7539 /* Handle indirect calls. */
7540 else if (!decl
7541 && (fi = get_fi_for_callee (stmt)))
7542 {
7543 /* We need to accumulate all clobbers/uses of all possible
7544 callees. */
7545 fi = get_varinfo (find (fi->id));
7546 /* If we cannot constrain the set of functions we'll end up
7547 calling we end up using/clobbering everything. */
7548 if (bitmap_bit_p (fi->solution, anything_id)
7549 || bitmap_bit_p (fi->solution, nonlocal_id)
7550 || bitmap_bit_p (fi->solution, escaped_id))
7551 {
7552 pt_solution_reset (gimple_call_clobber_set (stmt));
7553 pt_solution_reset (gimple_call_use_set (stmt));
7554 }
7555 else
7556 {
7557 bitmap_iterator bi;
7558 unsigned i;
7559 struct pt_solution *uses, *clobbers;
7560
7561 uses = gimple_call_use_set (stmt);
7562 clobbers = gimple_call_clobber_set (stmt);
7563 memset (uses, 0, sizeof (struct pt_solution));
7564 memset (clobbers, 0, sizeof (struct pt_solution));
7565 EXECUTE_IF_SET_IN_BITMAP (fi->solution, 0, i, bi)
7566 {
7567 struct pt_solution sol;
7568
7569 vi = get_varinfo (i);
7570 if (!vi->is_fn_info)
7571 {
7572 /* ??? We could be more precise here? */
7573 uses->nonlocal = 1;
7574 uses->ipa_escaped = 1;
7575 clobbers->nonlocal = 1;
7576 clobbers->ipa_escaped = 1;
7577 continue;
7578 }
7579
7580 if (!uses->anything)
7581 {
7582 sol = find_what_var_points_to
7583 (first_vi_for_offset (vi, fi_uses));
7584 pt_solution_ior_into (uses, &sol);
7585 }
7586 if (!clobbers->anything)
7587 {
7588 sol = find_what_var_points_to
7589 (first_vi_for_offset (vi, fi_clobbers));
7590 pt_solution_ior_into (clobbers, &sol);
7591 }
7592 }
7593 }
7594 }
7595 }
7596 }
7597
7598 fn->gimple_df->ipa_pta = true;
7599 }
7600
7601 delete_points_to_sets ();
7602
7603 in_ipa_mode = 0;
7604
7605 return 0;
7606 }
7607
7608 namespace {
7609
7610 const pass_data pass_data_ipa_pta =
7611 {
7612 SIMPLE_IPA_PASS, /* type */
7613 "pta", /* name */
7614 OPTGROUP_NONE, /* optinfo_flags */
7615 TV_IPA_PTA, /* tv_id */
7616 0, /* properties_required */
7617 0, /* properties_provided */
7618 0, /* properties_destroyed */
7619 0, /* todo_flags_start */
7620 0, /* todo_flags_finish */
7621 };
7622
7623 class pass_ipa_pta : public simple_ipa_opt_pass
7624 {
7625 public:
7626 pass_ipa_pta (gcc::context *ctxt)
7627 : simple_ipa_opt_pass (pass_data_ipa_pta, ctxt)
7628 {}
7629
7630 /* opt_pass methods: */
7631 virtual bool gate (function *)
7632 {
7633 return (optimize
7634 && flag_ipa_pta
7635 /* Don't bother doing anything if the program has errors. */
7636 && !seen_error ());
7637 }
7638
7639 virtual unsigned int execute (function *) { return ipa_pta_execute (); }
7640
7641 }; // class pass_ipa_pta
7642
7643 } // anon namespace
7644
7645 simple_ipa_opt_pass *
7646 make_pass_ipa_pta (gcc::context *ctxt)
7647 {
7648 return new pass_ipa_pta (ctxt);
7649 }