]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/tree-ssa-structalias.c
Update copyright years.
[thirdparty/gcc.git] / gcc / tree-ssa-structalias.c
1 /* Tree based points-to analysis
2 Copyright (C) 2005-2020 Free Software Foundation, Inc.
3 Contributed by Daniel Berlin <dberlin@dberlin.org>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3 of the License, or
10 (at your option) any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "gimple.h"
28 #include "alloc-pool.h"
29 #include "tree-pass.h"
30 #include "ssa.h"
31 #include "cgraph.h"
32 #include "tree-pretty-print.h"
33 #include "diagnostic-core.h"
34 #include "fold-const.h"
35 #include "stor-layout.h"
36 #include "stmt.h"
37 #include "gimple-iterator.h"
38 #include "tree-into-ssa.h"
39 #include "tree-dfa.h"
40 #include "gimple-walk.h"
41 #include "varasm.h"
42 #include "stringpool.h"
43 #include "attribs.h"
44 #include "tree-ssa.h"
45 #include "tree-cfg.h"
46
47 /* The idea behind this analyzer is to generate set constraints from the
48 program, then solve the resulting constraints in order to generate the
49 points-to sets.
50
51 Set constraints are a way of modeling program analysis problems that
52 involve sets. They consist of an inclusion constraint language,
53 describing the variables (each variable is a set) and operations that
54 are involved on the variables, and a set of rules that derive facts
55 from these operations. To solve a system of set constraints, you derive
56 all possible facts under the rules, which gives you the correct sets
57 as a consequence.
58
59 See "Efficient Field-sensitive pointer analysis for C" by "David
60 J. Pearce and Paul H. J. Kelly and Chris Hankin, at
61 http://citeseer.ist.psu.edu/pearce04efficient.html
62
63 Also see "Ultra-fast Aliasing Analysis using CLA: A Million Lines
64 of C Code in a Second" by ""Nevin Heintze and Olivier Tardieu" at
65 http://citeseer.ist.psu.edu/heintze01ultrafast.html
66
67 There are three types of real constraint expressions, DEREF,
68 ADDRESSOF, and SCALAR. Each constraint expression consists
69 of a constraint type, a variable, and an offset.
70
71 SCALAR is a constraint expression type used to represent x, whether
72 it appears on the LHS or the RHS of a statement.
73 DEREF is a constraint expression type used to represent *x, whether
74 it appears on the LHS or the RHS of a statement.
75 ADDRESSOF is a constraint expression used to represent &x, whether
76 it appears on the LHS or the RHS of a statement.
77
78 Each pointer variable in the program is assigned an integer id, and
79 each field of a structure variable is assigned an integer id as well.
80
81 Structure variables are linked to their list of fields through a "next
82 field" in each variable that points to the next field in offset
83 order.
84 Each variable for a structure field has
85
86 1. "size", that tells the size in bits of that field.
87 2. "fullsize, that tells the size in bits of the entire structure.
88 3. "offset", that tells the offset in bits from the beginning of the
89 structure to this field.
90
91 Thus,
92 struct f
93 {
94 int a;
95 int b;
96 } foo;
97 int *bar;
98
99 looks like
100
101 foo.a -> id 1, size 32, offset 0, fullsize 64, next foo.b
102 foo.b -> id 2, size 32, offset 32, fullsize 64, next NULL
103 bar -> id 3, size 32, offset 0, fullsize 32, next NULL
104
105
106 In order to solve the system of set constraints, the following is
107 done:
108
109 1. Each constraint variable x has a solution set associated with it,
110 Sol(x).
111
112 2. Constraints are separated into direct, copy, and complex.
113 Direct constraints are ADDRESSOF constraints that require no extra
114 processing, such as P = &Q
115 Copy constraints are those of the form P = Q.
116 Complex constraints are all the constraints involving dereferences
117 and offsets (including offsetted copies).
118
119 3. All direct constraints of the form P = &Q are processed, such
120 that Q is added to Sol(P)
121
122 4. All complex constraints for a given constraint variable are stored in a
123 linked list attached to that variable's node.
124
125 5. A directed graph is built out of the copy constraints. Each
126 constraint variable is a node in the graph, and an edge from
127 Q to P is added for each copy constraint of the form P = Q
128
129 6. The graph is then walked, and solution sets are
130 propagated along the copy edges, such that an edge from Q to P
131 causes Sol(P) <- Sol(P) union Sol(Q).
132
133 7. As we visit each node, all complex constraints associated with
134 that node are processed by adding appropriate copy edges to the graph, or the
135 appropriate variables to the solution set.
136
137 8. The process of walking the graph is iterated until no solution
138 sets change.
139
140 Prior to walking the graph in steps 6 and 7, We perform static
141 cycle elimination on the constraint graph, as well
142 as off-line variable substitution.
143
144 TODO: Adding offsets to pointer-to-structures can be handled (IE not punted
145 on and turned into anything), but isn't. You can just see what offset
146 inside the pointed-to struct it's going to access.
147
148 TODO: Constant bounded arrays can be handled as if they were structs of the
149 same number of elements.
150
151 TODO: Modeling heap and incoming pointers becomes much better if we
152 add fields to them as we discover them, which we could do.
153
154 TODO: We could handle unions, but to be honest, it's probably not
155 worth the pain or slowdown. */
156
157 /* IPA-PTA optimizations possible.
158
159 When the indirect function called is ANYTHING we can add disambiguation
160 based on the function signatures (or simply the parameter count which
161 is the varinfo size). We also do not need to consider functions that
162 do not have their address taken.
163
164 The is_global_var bit which marks escape points is overly conservative
165 in IPA mode. Split it to is_escape_point and is_global_var - only
166 externally visible globals are escape points in IPA mode.
167 There is now is_ipa_escape_point but this is only used in a few
168 selected places.
169
170 The way we introduce DECL_PT_UID to avoid fixing up all points-to
171 sets in the translation unit when we copy a DECL during inlining
172 pessimizes precision. The advantage is that the DECL_PT_UID keeps
173 compile-time and memory usage overhead low - the points-to sets
174 do not grow or get unshared as they would during a fixup phase.
175 An alternative solution is to delay IPA PTA until after all
176 inlining transformations have been applied.
177
178 The way we propagate clobber/use information isn't optimized.
179 It should use a new complex constraint that properly filters
180 out local variables of the callee (though that would make
181 the sets invalid after inlining). OTOH we might as well
182 admit defeat to WHOPR and simply do all the clobber/use analysis
183 and propagation after PTA finished but before we threw away
184 points-to information for memory variables. WHOPR and PTA
185 do not play along well anyway - the whole constraint solving
186 would need to be done in WPA phase and it will be very interesting
187 to apply the results to local SSA names during LTRANS phase.
188
189 We probably should compute a per-function unit-ESCAPE solution
190 propagating it simply like the clobber / uses solutions. The
191 solution can go alongside the non-IPA espaced solution and be
192 used to query which vars escape the unit through a function.
193 This is also required to make the escaped-HEAP trick work in IPA mode.
194
195 We never put function decls in points-to sets so we do not
196 keep the set of called functions for indirect calls.
197
198 And probably more. */
199
200 static bool use_field_sensitive = true;
201 static int in_ipa_mode = 0;
202
203 /* Used for predecessor bitmaps. */
204 static bitmap_obstack predbitmap_obstack;
205
206 /* Used for points-to sets. */
207 static bitmap_obstack pta_obstack;
208
209 /* Used for oldsolution members of variables. */
210 static bitmap_obstack oldpta_obstack;
211
212 /* Used for per-solver-iteration bitmaps. */
213 static bitmap_obstack iteration_obstack;
214
215 static unsigned int create_variable_info_for (tree, const char *, bool);
216 typedef struct constraint_graph *constraint_graph_t;
217 static void unify_nodes (constraint_graph_t, unsigned int, unsigned int, bool);
218
219 struct constraint;
220 typedef struct constraint *constraint_t;
221
222
223 #define EXECUTE_IF_IN_NONNULL_BITMAP(a, b, c, d) \
224 if (a) \
225 EXECUTE_IF_SET_IN_BITMAP (a, b, c, d)
226
227 static struct constraint_stats
228 {
229 unsigned int total_vars;
230 unsigned int nonpointer_vars;
231 unsigned int unified_vars_static;
232 unsigned int unified_vars_dynamic;
233 unsigned int iterations;
234 unsigned int num_edges;
235 unsigned int num_implicit_edges;
236 unsigned int points_to_sets_created;
237 } stats;
238
239 struct variable_info
240 {
241 /* ID of this variable */
242 unsigned int id;
243
244 /* True if this is a variable created by the constraint analysis, such as
245 heap variables and constraints we had to break up. */
246 unsigned int is_artificial_var : 1;
247
248 /* True if this is a special variable whose solution set should not be
249 changed. */
250 unsigned int is_special_var : 1;
251
252 /* True for variables whose size is not known or variable. */
253 unsigned int is_unknown_size_var : 1;
254
255 /* True for (sub-)fields that represent a whole variable. */
256 unsigned int is_full_var : 1;
257
258 /* True if this is a heap variable. */
259 unsigned int is_heap_var : 1;
260
261 /* True if this is a register variable. */
262 unsigned int is_reg_var : 1;
263
264 /* True if this field may contain pointers. */
265 unsigned int may_have_pointers : 1;
266
267 /* True if this field has only restrict qualified pointers. */
268 unsigned int only_restrict_pointers : 1;
269
270 /* True if this represents a heap var created for a restrict qualified
271 pointer. */
272 unsigned int is_restrict_var : 1;
273
274 /* True if this represents a global variable. */
275 unsigned int is_global_var : 1;
276
277 /* True if this represents a module escape point for IPA analysis. */
278 unsigned int is_ipa_escape_point : 1;
279
280 /* True if this represents a IPA function info. */
281 unsigned int is_fn_info : 1;
282
283 /* ??? Store somewhere better. */
284 unsigned short ruid;
285
286 /* The ID of the variable for the next field in this structure
287 or zero for the last field in this structure. */
288 unsigned next;
289
290 /* The ID of the variable for the first field in this structure. */
291 unsigned head;
292
293 /* Offset of this variable, in bits, from the base variable */
294 unsigned HOST_WIDE_INT offset;
295
296 /* Size of the variable, in bits. */
297 unsigned HOST_WIDE_INT size;
298
299 /* Full size of the base variable, in bits. */
300 unsigned HOST_WIDE_INT fullsize;
301
302 /* In IPA mode the shadow UID in case the variable needs to be duplicated in
303 the final points-to solution because it reaches its containing
304 function recursively. Zero if none is needed. */
305 unsigned int shadow_var_uid;
306
307 /* Name of this variable */
308 const char *name;
309
310 /* Tree that this variable is associated with. */
311 tree decl;
312
313 /* Points-to set for this variable. */
314 bitmap solution;
315
316 /* Old points-to set for this variable. */
317 bitmap oldsolution;
318 };
319 typedef struct variable_info *varinfo_t;
320
321 static varinfo_t first_vi_for_offset (varinfo_t, unsigned HOST_WIDE_INT);
322 static varinfo_t first_or_preceding_vi_for_offset (varinfo_t,
323 unsigned HOST_WIDE_INT);
324 static varinfo_t lookup_vi_for_tree (tree);
325 static inline bool type_can_have_subvars (const_tree);
326 static void make_param_constraints (varinfo_t);
327
328 /* Pool of variable info structures. */
329 static object_allocator<variable_info> variable_info_pool
330 ("Variable info pool");
331
332 /* Map varinfo to final pt_solution. */
333 static hash_map<varinfo_t, pt_solution *> *final_solutions;
334 struct obstack final_solutions_obstack;
335
336 /* Table of variable info structures for constraint variables.
337 Indexed directly by variable info id. */
338 static vec<varinfo_t> varmap;
339
340 /* Return the varmap element N */
341
342 static inline varinfo_t
343 get_varinfo (unsigned int n)
344 {
345 return varmap[n];
346 }
347
348 /* Return the next variable in the list of sub-variables of VI
349 or NULL if VI is the last sub-variable. */
350
351 static inline varinfo_t
352 vi_next (varinfo_t vi)
353 {
354 return get_varinfo (vi->next);
355 }
356
357 /* Static IDs for the special variables. Variable ID zero is unused
358 and used as terminator for the sub-variable chain. */
359 enum { nothing_id = 1, anything_id = 2, string_id = 3,
360 escaped_id = 4, nonlocal_id = 5,
361 storedanything_id = 6, integer_id = 7 };
362
363 /* Return a new variable info structure consisting for a variable
364 named NAME, and using constraint graph node NODE. Append it
365 to the vector of variable info structures. */
366
367 static varinfo_t
368 new_var_info (tree t, const char *name, bool add_id)
369 {
370 unsigned index = varmap.length ();
371 varinfo_t ret = variable_info_pool.allocate ();
372
373 if (dump_file && add_id)
374 {
375 char *tempname = xasprintf ("%s(%d)", name, index);
376 name = ggc_strdup (tempname);
377 free (tempname);
378 }
379
380 ret->id = index;
381 ret->name = name;
382 ret->decl = t;
383 /* Vars without decl are artificial and do not have sub-variables. */
384 ret->is_artificial_var = (t == NULL_TREE);
385 ret->is_special_var = false;
386 ret->is_unknown_size_var = false;
387 ret->is_full_var = (t == NULL_TREE);
388 ret->is_heap_var = false;
389 ret->may_have_pointers = true;
390 ret->only_restrict_pointers = false;
391 ret->is_restrict_var = false;
392 ret->ruid = 0;
393 ret->is_global_var = (t == NULL_TREE);
394 ret->is_ipa_escape_point = false;
395 ret->is_fn_info = false;
396 if (t && DECL_P (t))
397 ret->is_global_var = (is_global_var (t)
398 /* We have to treat even local register variables
399 as escape points. */
400 || (VAR_P (t) && DECL_HARD_REGISTER (t)));
401 ret->is_reg_var = (t && TREE_CODE (t) == SSA_NAME);
402 ret->solution = BITMAP_ALLOC (&pta_obstack);
403 ret->oldsolution = NULL;
404 ret->next = 0;
405 ret->shadow_var_uid = 0;
406 ret->head = ret->id;
407
408 stats.total_vars++;
409
410 varmap.safe_push (ret);
411
412 return ret;
413 }
414
415 /* A map mapping call statements to per-stmt variables for uses
416 and clobbers specific to the call. */
417 static hash_map<gimple *, varinfo_t> *call_stmt_vars;
418
419 /* Lookup or create the variable for the call statement CALL. */
420
421 static varinfo_t
422 get_call_vi (gcall *call)
423 {
424 varinfo_t vi, vi2;
425
426 bool existed;
427 varinfo_t *slot_p = &call_stmt_vars->get_or_insert (call, &existed);
428 if (existed)
429 return *slot_p;
430
431 vi = new_var_info (NULL_TREE, "CALLUSED", true);
432 vi->offset = 0;
433 vi->size = 1;
434 vi->fullsize = 2;
435 vi->is_full_var = true;
436 vi->is_reg_var = true;
437
438 vi2 = new_var_info (NULL_TREE, "CALLCLOBBERED", true);
439 vi2->offset = 1;
440 vi2->size = 1;
441 vi2->fullsize = 2;
442 vi2->is_full_var = true;
443 vi2->is_reg_var = true;
444
445 vi->next = vi2->id;
446
447 *slot_p = vi;
448 return vi;
449 }
450
451 /* Lookup the variable for the call statement CALL representing
452 the uses. Returns NULL if there is nothing special about this call. */
453
454 static varinfo_t
455 lookup_call_use_vi (gcall *call)
456 {
457 varinfo_t *slot_p = call_stmt_vars->get (call);
458 if (slot_p)
459 return *slot_p;
460
461 return NULL;
462 }
463
464 /* Lookup the variable for the call statement CALL representing
465 the clobbers. Returns NULL if there is nothing special about this call. */
466
467 static varinfo_t
468 lookup_call_clobber_vi (gcall *call)
469 {
470 varinfo_t uses = lookup_call_use_vi (call);
471 if (!uses)
472 return NULL;
473
474 return vi_next (uses);
475 }
476
477 /* Lookup or create the variable for the call statement CALL representing
478 the uses. */
479
480 static varinfo_t
481 get_call_use_vi (gcall *call)
482 {
483 return get_call_vi (call);
484 }
485
486 /* Lookup or create the variable for the call statement CALL representing
487 the clobbers. */
488
489 static varinfo_t ATTRIBUTE_UNUSED
490 get_call_clobber_vi (gcall *call)
491 {
492 return vi_next (get_call_vi (call));
493 }
494
495
496 enum constraint_expr_type {SCALAR, DEREF, ADDRESSOF};
497
498 /* An expression that appears in a constraint. */
499
500 struct constraint_expr
501 {
502 /* Constraint type. */
503 constraint_expr_type type;
504
505 /* Variable we are referring to in the constraint. */
506 unsigned int var;
507
508 /* Offset, in bits, of this constraint from the beginning of
509 variables it ends up referring to.
510
511 IOW, in a deref constraint, we would deref, get the result set,
512 then add OFFSET to each member. */
513 HOST_WIDE_INT offset;
514 };
515
516 /* Use 0x8000... as special unknown offset. */
517 #define UNKNOWN_OFFSET HOST_WIDE_INT_MIN
518
519 typedef struct constraint_expr ce_s;
520 static void get_constraint_for_1 (tree, vec<ce_s> *, bool, bool);
521 static void get_constraint_for (tree, vec<ce_s> *);
522 static void get_constraint_for_rhs (tree, vec<ce_s> *);
523 static void do_deref (vec<ce_s> *);
524
525 /* Our set constraints are made up of two constraint expressions, one
526 LHS, and one RHS.
527
528 As described in the introduction, our set constraints each represent an
529 operation between set valued variables.
530 */
531 struct constraint
532 {
533 struct constraint_expr lhs;
534 struct constraint_expr rhs;
535 };
536
537 /* List of constraints that we use to build the constraint graph from. */
538
539 static vec<constraint_t> constraints;
540 static object_allocator<constraint> constraint_pool ("Constraint pool");
541
542 /* The constraint graph is represented as an array of bitmaps
543 containing successor nodes. */
544
545 struct constraint_graph
546 {
547 /* Size of this graph, which may be different than the number of
548 nodes in the variable map. */
549 unsigned int size;
550
551 /* Explicit successors of each node. */
552 bitmap *succs;
553
554 /* Implicit predecessors of each node (Used for variable
555 substitution). */
556 bitmap *implicit_preds;
557
558 /* Explicit predecessors of each node (Used for variable substitution). */
559 bitmap *preds;
560
561 /* Indirect cycle representatives, or -1 if the node has no indirect
562 cycles. */
563 int *indirect_cycles;
564
565 /* Representative node for a node. rep[a] == a unless the node has
566 been unified. */
567 unsigned int *rep;
568
569 /* Equivalence class representative for a label. This is used for
570 variable substitution. */
571 int *eq_rep;
572
573 /* Pointer equivalence label for a node. All nodes with the same
574 pointer equivalence label can be unified together at some point
575 (either during constraint optimization or after the constraint
576 graph is built). */
577 unsigned int *pe;
578
579 /* Pointer equivalence representative for a label. This is used to
580 handle nodes that are pointer equivalent but not location
581 equivalent. We can unite these once the addressof constraints
582 are transformed into initial points-to sets. */
583 int *pe_rep;
584
585 /* Pointer equivalence label for each node, used during variable
586 substitution. */
587 unsigned int *pointer_label;
588
589 /* Location equivalence label for each node, used during location
590 equivalence finding. */
591 unsigned int *loc_label;
592
593 /* Pointed-by set for each node, used during location equivalence
594 finding. This is pointed-by rather than pointed-to, because it
595 is constructed using the predecessor graph. */
596 bitmap *pointed_by;
597
598 /* Points to sets for pointer equivalence. This is *not* the actual
599 points-to sets for nodes. */
600 bitmap *points_to;
601
602 /* Bitmap of nodes where the bit is set if the node is a direct
603 node. Used for variable substitution. */
604 sbitmap direct_nodes;
605
606 /* Bitmap of nodes where the bit is set if the node is address
607 taken. Used for variable substitution. */
608 bitmap address_taken;
609
610 /* Vector of complex constraints for each graph node. Complex
611 constraints are those involving dereferences or offsets that are
612 not 0. */
613 vec<constraint_t> *complex;
614 };
615
616 static constraint_graph_t graph;
617
618 /* During variable substitution and the offline version of indirect
619 cycle finding, we create nodes to represent dereferences and
620 address taken constraints. These represent where these start and
621 end. */
622 #define FIRST_REF_NODE (varmap).length ()
623 #define LAST_REF_NODE (FIRST_REF_NODE + (FIRST_REF_NODE - 1))
624
625 /* Return the representative node for NODE, if NODE has been unioned
626 with another NODE.
627 This function performs path compression along the way to finding
628 the representative. */
629
630 static unsigned int
631 find (unsigned int node)
632 {
633 gcc_checking_assert (node < graph->size);
634 if (graph->rep[node] != node)
635 return graph->rep[node] = find (graph->rep[node]);
636 return node;
637 }
638
639 /* Union the TO and FROM nodes to the TO nodes.
640 Note that at some point in the future, we may want to do
641 union-by-rank, in which case we are going to have to return the
642 node we unified to. */
643
644 static bool
645 unite (unsigned int to, unsigned int from)
646 {
647 gcc_checking_assert (to < graph->size && from < graph->size);
648 if (to != from && graph->rep[from] != to)
649 {
650 graph->rep[from] = to;
651 return true;
652 }
653 return false;
654 }
655
656 /* Create a new constraint consisting of LHS and RHS expressions. */
657
658 static constraint_t
659 new_constraint (const struct constraint_expr lhs,
660 const struct constraint_expr rhs)
661 {
662 constraint_t ret = constraint_pool.allocate ();
663 ret->lhs = lhs;
664 ret->rhs = rhs;
665 return ret;
666 }
667
668 /* Print out constraint C to FILE. */
669
670 static void
671 dump_constraint (FILE *file, constraint_t c)
672 {
673 if (c->lhs.type == ADDRESSOF)
674 fprintf (file, "&");
675 else if (c->lhs.type == DEREF)
676 fprintf (file, "*");
677 fprintf (file, "%s", get_varinfo (c->lhs.var)->name);
678 if (c->lhs.offset == UNKNOWN_OFFSET)
679 fprintf (file, " + UNKNOWN");
680 else if (c->lhs.offset != 0)
681 fprintf (file, " + " HOST_WIDE_INT_PRINT_DEC, c->lhs.offset);
682 fprintf (file, " = ");
683 if (c->rhs.type == ADDRESSOF)
684 fprintf (file, "&");
685 else if (c->rhs.type == DEREF)
686 fprintf (file, "*");
687 fprintf (file, "%s", get_varinfo (c->rhs.var)->name);
688 if (c->rhs.offset == UNKNOWN_OFFSET)
689 fprintf (file, " + UNKNOWN");
690 else if (c->rhs.offset != 0)
691 fprintf (file, " + " HOST_WIDE_INT_PRINT_DEC, c->rhs.offset);
692 }
693
694
695 void debug_constraint (constraint_t);
696 void debug_constraints (void);
697 void debug_constraint_graph (void);
698 void debug_solution_for_var (unsigned int);
699 void debug_sa_points_to_info (void);
700 void debug_varinfo (varinfo_t);
701 void debug_varmap (void);
702
703 /* Print out constraint C to stderr. */
704
705 DEBUG_FUNCTION void
706 debug_constraint (constraint_t c)
707 {
708 dump_constraint (stderr, c);
709 fprintf (stderr, "\n");
710 }
711
712 /* Print out all constraints to FILE */
713
714 static void
715 dump_constraints (FILE *file, int from)
716 {
717 int i;
718 constraint_t c;
719 for (i = from; constraints.iterate (i, &c); i++)
720 if (c)
721 {
722 dump_constraint (file, c);
723 fprintf (file, "\n");
724 }
725 }
726
727 /* Print out all constraints to stderr. */
728
729 DEBUG_FUNCTION void
730 debug_constraints (void)
731 {
732 dump_constraints (stderr, 0);
733 }
734
735 /* Print the constraint graph in dot format. */
736
737 static void
738 dump_constraint_graph (FILE *file)
739 {
740 unsigned int i;
741
742 /* Only print the graph if it has already been initialized: */
743 if (!graph)
744 return;
745
746 /* Prints the header of the dot file: */
747 fprintf (file, "strict digraph {\n");
748 fprintf (file, " node [\n shape = box\n ]\n");
749 fprintf (file, " edge [\n fontsize = \"12\"\n ]\n");
750 fprintf (file, "\n // List of nodes and complex constraints in "
751 "the constraint graph:\n");
752
753 /* The next lines print the nodes in the graph together with the
754 complex constraints attached to them. */
755 for (i = 1; i < graph->size; i++)
756 {
757 if (i == FIRST_REF_NODE)
758 continue;
759 if (find (i) != i)
760 continue;
761 if (i < FIRST_REF_NODE)
762 fprintf (file, "\"%s\"", get_varinfo (i)->name);
763 else
764 fprintf (file, "\"*%s\"", get_varinfo (i - FIRST_REF_NODE)->name);
765 if (graph->complex[i].exists ())
766 {
767 unsigned j;
768 constraint_t c;
769 fprintf (file, " [label=\"\\N\\n");
770 for (j = 0; graph->complex[i].iterate (j, &c); ++j)
771 {
772 dump_constraint (file, c);
773 fprintf (file, "\\l");
774 }
775 fprintf (file, "\"]");
776 }
777 fprintf (file, ";\n");
778 }
779
780 /* Go over the edges. */
781 fprintf (file, "\n // Edges in the constraint graph:\n");
782 for (i = 1; i < graph->size; i++)
783 {
784 unsigned j;
785 bitmap_iterator bi;
786 if (find (i) != i)
787 continue;
788 EXECUTE_IF_IN_NONNULL_BITMAP (graph->succs[i], 0, j, bi)
789 {
790 unsigned to = find (j);
791 if (i == to)
792 continue;
793 if (i < FIRST_REF_NODE)
794 fprintf (file, "\"%s\"", get_varinfo (i)->name);
795 else
796 fprintf (file, "\"*%s\"", get_varinfo (i - FIRST_REF_NODE)->name);
797 fprintf (file, " -> ");
798 if (to < FIRST_REF_NODE)
799 fprintf (file, "\"%s\"", get_varinfo (to)->name);
800 else
801 fprintf (file, "\"*%s\"", get_varinfo (to - FIRST_REF_NODE)->name);
802 fprintf (file, ";\n");
803 }
804 }
805
806 /* Prints the tail of the dot file. */
807 fprintf (file, "}\n");
808 }
809
810 /* Print out the constraint graph to stderr. */
811
812 DEBUG_FUNCTION void
813 debug_constraint_graph (void)
814 {
815 dump_constraint_graph (stderr);
816 }
817
818 /* SOLVER FUNCTIONS
819
820 The solver is a simple worklist solver, that works on the following
821 algorithm:
822
823 sbitmap changed_nodes = all zeroes;
824 changed_count = 0;
825 For each node that is not already collapsed:
826 changed_count++;
827 set bit in changed nodes
828
829 while (changed_count > 0)
830 {
831 compute topological ordering for constraint graph
832
833 find and collapse cycles in the constraint graph (updating
834 changed if necessary)
835
836 for each node (n) in the graph in topological order:
837 changed_count--;
838
839 Process each complex constraint associated with the node,
840 updating changed if necessary.
841
842 For each outgoing edge from n, propagate the solution from n to
843 the destination of the edge, updating changed as necessary.
844
845 } */
846
847 /* Return true if two constraint expressions A and B are equal. */
848
849 static bool
850 constraint_expr_equal (struct constraint_expr a, struct constraint_expr b)
851 {
852 return a.type == b.type && a.var == b.var && a.offset == b.offset;
853 }
854
855 /* Return true if constraint expression A is less than constraint expression
856 B. This is just arbitrary, but consistent, in order to give them an
857 ordering. */
858
859 static bool
860 constraint_expr_less (struct constraint_expr a, struct constraint_expr b)
861 {
862 if (a.type == b.type)
863 {
864 if (a.var == b.var)
865 return a.offset < b.offset;
866 else
867 return a.var < b.var;
868 }
869 else
870 return a.type < b.type;
871 }
872
873 /* Return true if constraint A is less than constraint B. This is just
874 arbitrary, but consistent, in order to give them an ordering. */
875
876 static bool
877 constraint_less (const constraint_t &a, const constraint_t &b)
878 {
879 if (constraint_expr_less (a->lhs, b->lhs))
880 return true;
881 else if (constraint_expr_less (b->lhs, a->lhs))
882 return false;
883 else
884 return constraint_expr_less (a->rhs, b->rhs);
885 }
886
887 /* Return true if two constraints A and B are equal. */
888
889 static bool
890 constraint_equal (struct constraint a, struct constraint b)
891 {
892 return constraint_expr_equal (a.lhs, b.lhs)
893 && constraint_expr_equal (a.rhs, b.rhs);
894 }
895
896
897 /* Find a constraint LOOKFOR in the sorted constraint vector VEC */
898
899 static constraint_t
900 constraint_vec_find (vec<constraint_t> vec,
901 struct constraint lookfor)
902 {
903 unsigned int place;
904 constraint_t found;
905
906 if (!vec.exists ())
907 return NULL;
908
909 place = vec.lower_bound (&lookfor, constraint_less);
910 if (place >= vec.length ())
911 return NULL;
912 found = vec[place];
913 if (!constraint_equal (*found, lookfor))
914 return NULL;
915 return found;
916 }
917
918 /* Union two constraint vectors, TO and FROM. Put the result in TO.
919 Returns true of TO set is changed. */
920
921 static bool
922 constraint_set_union (vec<constraint_t> *to,
923 vec<constraint_t> *from)
924 {
925 int i;
926 constraint_t c;
927 bool any_change = false;
928
929 FOR_EACH_VEC_ELT (*from, i, c)
930 {
931 if (constraint_vec_find (*to, *c) == NULL)
932 {
933 unsigned int place = to->lower_bound (c, constraint_less);
934 to->safe_insert (place, c);
935 any_change = true;
936 }
937 }
938 return any_change;
939 }
940
941 /* Expands the solution in SET to all sub-fields of variables included. */
942
943 static bitmap
944 solution_set_expand (bitmap set, bitmap *expanded)
945 {
946 bitmap_iterator bi;
947 unsigned j;
948
949 if (*expanded)
950 return *expanded;
951
952 *expanded = BITMAP_ALLOC (&iteration_obstack);
953
954 /* In a first pass expand to the head of the variables we need to
955 add all sub-fields off. This avoids quadratic behavior. */
956 EXECUTE_IF_SET_IN_BITMAP (set, 0, j, bi)
957 {
958 varinfo_t v = get_varinfo (j);
959 if (v->is_artificial_var
960 || v->is_full_var)
961 continue;
962 bitmap_set_bit (*expanded, v->head);
963 }
964
965 /* In the second pass now expand all head variables with subfields. */
966 EXECUTE_IF_SET_IN_BITMAP (*expanded, 0, j, bi)
967 {
968 varinfo_t v = get_varinfo (j);
969 if (v->head != j)
970 continue;
971 for (v = vi_next (v); v != NULL; v = vi_next (v))
972 bitmap_set_bit (*expanded, v->id);
973 }
974
975 /* And finally set the rest of the bits from SET. */
976 bitmap_ior_into (*expanded, set);
977
978 return *expanded;
979 }
980
981 /* Union solution sets TO and DELTA, and add INC to each member of DELTA in the
982 process. */
983
984 static bool
985 set_union_with_increment (bitmap to, bitmap delta, HOST_WIDE_INT inc,
986 bitmap *expanded_delta)
987 {
988 bool changed = false;
989 bitmap_iterator bi;
990 unsigned int i;
991
992 /* If the solution of DELTA contains anything it is good enough to transfer
993 this to TO. */
994 if (bitmap_bit_p (delta, anything_id))
995 return bitmap_set_bit (to, anything_id);
996
997 /* If the offset is unknown we have to expand the solution to
998 all subfields. */
999 if (inc == UNKNOWN_OFFSET)
1000 {
1001 delta = solution_set_expand (delta, expanded_delta);
1002 changed |= bitmap_ior_into (to, delta);
1003 return changed;
1004 }
1005
1006 /* For non-zero offset union the offsetted solution into the destination. */
1007 EXECUTE_IF_SET_IN_BITMAP (delta, 0, i, bi)
1008 {
1009 varinfo_t vi = get_varinfo (i);
1010
1011 /* If this is a variable with just one field just set its bit
1012 in the result. */
1013 if (vi->is_artificial_var
1014 || vi->is_unknown_size_var
1015 || vi->is_full_var)
1016 changed |= bitmap_set_bit (to, i);
1017 else
1018 {
1019 HOST_WIDE_INT fieldoffset = vi->offset + inc;
1020 unsigned HOST_WIDE_INT size = vi->size;
1021
1022 /* If the offset makes the pointer point to before the
1023 variable use offset zero for the field lookup. */
1024 if (fieldoffset < 0)
1025 vi = get_varinfo (vi->head);
1026 else
1027 vi = first_or_preceding_vi_for_offset (vi, fieldoffset);
1028
1029 do
1030 {
1031 changed |= bitmap_set_bit (to, vi->id);
1032 if (vi->is_full_var
1033 || vi->next == 0)
1034 break;
1035
1036 /* We have to include all fields that overlap the current field
1037 shifted by inc. */
1038 vi = vi_next (vi);
1039 }
1040 while (vi->offset < fieldoffset + size);
1041 }
1042 }
1043
1044 return changed;
1045 }
1046
1047 /* Insert constraint C into the list of complex constraints for graph
1048 node VAR. */
1049
1050 static void
1051 insert_into_complex (constraint_graph_t graph,
1052 unsigned int var, constraint_t c)
1053 {
1054 vec<constraint_t> complex = graph->complex[var];
1055 unsigned int place = complex.lower_bound (c, constraint_less);
1056
1057 /* Only insert constraints that do not already exist. */
1058 if (place >= complex.length ()
1059 || !constraint_equal (*c, *complex[place]))
1060 graph->complex[var].safe_insert (place, c);
1061 }
1062
1063
1064 /* Condense two variable nodes into a single variable node, by moving
1065 all associated info from FROM to TO. Returns true if TO node's
1066 constraint set changes after the merge. */
1067
1068 static bool
1069 merge_node_constraints (constraint_graph_t graph, unsigned int to,
1070 unsigned int from)
1071 {
1072 unsigned int i;
1073 constraint_t c;
1074 bool any_change = false;
1075
1076 gcc_checking_assert (find (from) == to);
1077
1078 /* Move all complex constraints from src node into to node */
1079 FOR_EACH_VEC_ELT (graph->complex[from], i, c)
1080 {
1081 /* In complex constraints for node FROM, we may have either
1082 a = *FROM, and *FROM = a, or an offseted constraint which are
1083 always added to the rhs node's constraints. */
1084
1085 if (c->rhs.type == DEREF)
1086 c->rhs.var = to;
1087 else if (c->lhs.type == DEREF)
1088 c->lhs.var = to;
1089 else
1090 c->rhs.var = to;
1091
1092 }
1093 any_change = constraint_set_union (&graph->complex[to],
1094 &graph->complex[from]);
1095 graph->complex[from].release ();
1096 return any_change;
1097 }
1098
1099
1100 /* Remove edges involving NODE from GRAPH. */
1101
1102 static void
1103 clear_edges_for_node (constraint_graph_t graph, unsigned int node)
1104 {
1105 if (graph->succs[node])
1106 BITMAP_FREE (graph->succs[node]);
1107 }
1108
1109 /* Merge GRAPH nodes FROM and TO into node TO. */
1110
1111 static void
1112 merge_graph_nodes (constraint_graph_t graph, unsigned int to,
1113 unsigned int from)
1114 {
1115 if (graph->indirect_cycles[from] != -1)
1116 {
1117 /* If we have indirect cycles with the from node, and we have
1118 none on the to node, the to node has indirect cycles from the
1119 from node now that they are unified.
1120 If indirect cycles exist on both, unify the nodes that they
1121 are in a cycle with, since we know they are in a cycle with
1122 each other. */
1123 if (graph->indirect_cycles[to] == -1)
1124 graph->indirect_cycles[to] = graph->indirect_cycles[from];
1125 }
1126
1127 /* Merge all the successor edges. */
1128 if (graph->succs[from])
1129 {
1130 if (!graph->succs[to])
1131 graph->succs[to] = BITMAP_ALLOC (&pta_obstack);
1132 bitmap_ior_into (graph->succs[to],
1133 graph->succs[from]);
1134 }
1135
1136 clear_edges_for_node (graph, from);
1137 }
1138
1139
1140 /* Add an indirect graph edge to GRAPH, going from TO to FROM if
1141 it doesn't exist in the graph already. */
1142
1143 static void
1144 add_implicit_graph_edge (constraint_graph_t graph, unsigned int to,
1145 unsigned int from)
1146 {
1147 if (to == from)
1148 return;
1149
1150 if (!graph->implicit_preds[to])
1151 graph->implicit_preds[to] = BITMAP_ALLOC (&predbitmap_obstack);
1152
1153 if (bitmap_set_bit (graph->implicit_preds[to], from))
1154 stats.num_implicit_edges++;
1155 }
1156
1157 /* Add a predecessor graph edge to GRAPH, going from TO to FROM if
1158 it doesn't exist in the graph already.
1159 Return false if the edge already existed, true otherwise. */
1160
1161 static void
1162 add_pred_graph_edge (constraint_graph_t graph, unsigned int to,
1163 unsigned int from)
1164 {
1165 if (!graph->preds[to])
1166 graph->preds[to] = BITMAP_ALLOC (&predbitmap_obstack);
1167 bitmap_set_bit (graph->preds[to], from);
1168 }
1169
1170 /* Add a graph edge to GRAPH, going from FROM to TO if
1171 it doesn't exist in the graph already.
1172 Return false if the edge already existed, true otherwise. */
1173
1174 static bool
1175 add_graph_edge (constraint_graph_t graph, unsigned int to,
1176 unsigned int from)
1177 {
1178 if (to == from)
1179 {
1180 return false;
1181 }
1182 else
1183 {
1184 bool r = false;
1185
1186 if (!graph->succs[from])
1187 graph->succs[from] = BITMAP_ALLOC (&pta_obstack);
1188 if (bitmap_set_bit (graph->succs[from], to))
1189 {
1190 r = true;
1191 if (to < FIRST_REF_NODE && from < FIRST_REF_NODE)
1192 stats.num_edges++;
1193 }
1194 return r;
1195 }
1196 }
1197
1198
1199 /* Initialize the constraint graph structure to contain SIZE nodes. */
1200
1201 static void
1202 init_graph (unsigned int size)
1203 {
1204 unsigned int j;
1205
1206 graph = XCNEW (struct constraint_graph);
1207 graph->size = size;
1208 graph->succs = XCNEWVEC (bitmap, graph->size);
1209 graph->indirect_cycles = XNEWVEC (int, graph->size);
1210 graph->rep = XNEWVEC (unsigned int, graph->size);
1211 /* ??? Macros do not support template types with multiple arguments,
1212 so we use a typedef to work around it. */
1213 typedef vec<constraint_t> vec_constraint_t_heap;
1214 graph->complex = XCNEWVEC (vec_constraint_t_heap, size);
1215 graph->pe = XCNEWVEC (unsigned int, graph->size);
1216 graph->pe_rep = XNEWVEC (int, graph->size);
1217
1218 for (j = 0; j < graph->size; j++)
1219 {
1220 graph->rep[j] = j;
1221 graph->pe_rep[j] = -1;
1222 graph->indirect_cycles[j] = -1;
1223 }
1224 }
1225
1226 /* Build the constraint graph, adding only predecessor edges right now. */
1227
1228 static void
1229 build_pred_graph (void)
1230 {
1231 int i;
1232 constraint_t c;
1233 unsigned int j;
1234
1235 graph->implicit_preds = XCNEWVEC (bitmap, graph->size);
1236 graph->preds = XCNEWVEC (bitmap, graph->size);
1237 graph->pointer_label = XCNEWVEC (unsigned int, graph->size);
1238 graph->loc_label = XCNEWVEC (unsigned int, graph->size);
1239 graph->pointed_by = XCNEWVEC (bitmap, graph->size);
1240 graph->points_to = XCNEWVEC (bitmap, graph->size);
1241 graph->eq_rep = XNEWVEC (int, graph->size);
1242 graph->direct_nodes = sbitmap_alloc (graph->size);
1243 graph->address_taken = BITMAP_ALLOC (&predbitmap_obstack);
1244 bitmap_clear (graph->direct_nodes);
1245
1246 for (j = 1; j < FIRST_REF_NODE; j++)
1247 {
1248 if (!get_varinfo (j)->is_special_var)
1249 bitmap_set_bit (graph->direct_nodes, j);
1250 }
1251
1252 for (j = 0; j < graph->size; j++)
1253 graph->eq_rep[j] = -1;
1254
1255 for (j = 0; j < varmap.length (); j++)
1256 graph->indirect_cycles[j] = -1;
1257
1258 FOR_EACH_VEC_ELT (constraints, i, c)
1259 {
1260 struct constraint_expr lhs = c->lhs;
1261 struct constraint_expr rhs = c->rhs;
1262 unsigned int lhsvar = lhs.var;
1263 unsigned int rhsvar = rhs.var;
1264
1265 if (lhs.type == DEREF)
1266 {
1267 /* *x = y. */
1268 if (rhs.offset == 0 && lhs.offset == 0 && rhs.type == SCALAR)
1269 add_pred_graph_edge (graph, FIRST_REF_NODE + lhsvar, rhsvar);
1270 }
1271 else if (rhs.type == DEREF)
1272 {
1273 /* x = *y */
1274 if (rhs.offset == 0 && lhs.offset == 0 && lhs.type == SCALAR)
1275 add_pred_graph_edge (graph, lhsvar, FIRST_REF_NODE + rhsvar);
1276 else
1277 bitmap_clear_bit (graph->direct_nodes, lhsvar);
1278 }
1279 else if (rhs.type == ADDRESSOF)
1280 {
1281 varinfo_t v;
1282
1283 /* x = &y */
1284 if (graph->points_to[lhsvar] == NULL)
1285 graph->points_to[lhsvar] = BITMAP_ALLOC (&predbitmap_obstack);
1286 bitmap_set_bit (graph->points_to[lhsvar], rhsvar);
1287
1288 if (graph->pointed_by[rhsvar] == NULL)
1289 graph->pointed_by[rhsvar] = BITMAP_ALLOC (&predbitmap_obstack);
1290 bitmap_set_bit (graph->pointed_by[rhsvar], lhsvar);
1291
1292 /* Implicitly, *x = y */
1293 add_implicit_graph_edge (graph, FIRST_REF_NODE + lhsvar, rhsvar);
1294
1295 /* All related variables are no longer direct nodes. */
1296 bitmap_clear_bit (graph->direct_nodes, rhsvar);
1297 v = get_varinfo (rhsvar);
1298 if (!v->is_full_var)
1299 {
1300 v = get_varinfo (v->head);
1301 do
1302 {
1303 bitmap_clear_bit (graph->direct_nodes, v->id);
1304 v = vi_next (v);
1305 }
1306 while (v != NULL);
1307 }
1308 bitmap_set_bit (graph->address_taken, rhsvar);
1309 }
1310 else if (lhsvar > anything_id
1311 && lhsvar != rhsvar && lhs.offset == 0 && rhs.offset == 0)
1312 {
1313 /* x = y */
1314 add_pred_graph_edge (graph, lhsvar, rhsvar);
1315 /* Implicitly, *x = *y */
1316 add_implicit_graph_edge (graph, FIRST_REF_NODE + lhsvar,
1317 FIRST_REF_NODE + rhsvar);
1318 }
1319 else if (lhs.offset != 0 || rhs.offset != 0)
1320 {
1321 if (rhs.offset != 0)
1322 bitmap_clear_bit (graph->direct_nodes, lhs.var);
1323 else if (lhs.offset != 0)
1324 bitmap_clear_bit (graph->direct_nodes, rhs.var);
1325 }
1326 }
1327 }
1328
1329 /* Build the constraint graph, adding successor edges. */
1330
1331 static void
1332 build_succ_graph (void)
1333 {
1334 unsigned i, t;
1335 constraint_t c;
1336
1337 FOR_EACH_VEC_ELT (constraints, i, c)
1338 {
1339 struct constraint_expr lhs;
1340 struct constraint_expr rhs;
1341 unsigned int lhsvar;
1342 unsigned int rhsvar;
1343
1344 if (!c)
1345 continue;
1346
1347 lhs = c->lhs;
1348 rhs = c->rhs;
1349 lhsvar = find (lhs.var);
1350 rhsvar = find (rhs.var);
1351
1352 if (lhs.type == DEREF)
1353 {
1354 if (rhs.offset == 0 && lhs.offset == 0 && rhs.type == SCALAR)
1355 add_graph_edge (graph, FIRST_REF_NODE + lhsvar, rhsvar);
1356 }
1357 else if (rhs.type == DEREF)
1358 {
1359 if (rhs.offset == 0 && lhs.offset == 0 && lhs.type == SCALAR)
1360 add_graph_edge (graph, lhsvar, FIRST_REF_NODE + rhsvar);
1361 }
1362 else if (rhs.type == ADDRESSOF)
1363 {
1364 /* x = &y */
1365 gcc_checking_assert (find (rhs.var) == rhs.var);
1366 bitmap_set_bit (get_varinfo (lhsvar)->solution, rhsvar);
1367 }
1368 else if (lhsvar > anything_id
1369 && lhsvar != rhsvar && lhs.offset == 0 && rhs.offset == 0)
1370 {
1371 add_graph_edge (graph, lhsvar, rhsvar);
1372 }
1373 }
1374
1375 /* Add edges from STOREDANYTHING to all non-direct nodes that can
1376 receive pointers. */
1377 t = find (storedanything_id);
1378 for (i = integer_id + 1; i < FIRST_REF_NODE; ++i)
1379 {
1380 if (!bitmap_bit_p (graph->direct_nodes, i)
1381 && get_varinfo (i)->may_have_pointers)
1382 add_graph_edge (graph, find (i), t);
1383 }
1384
1385 /* Everything stored to ANYTHING also potentially escapes. */
1386 add_graph_edge (graph, find (escaped_id), t);
1387 }
1388
1389
1390 /* Changed variables on the last iteration. */
1391 static bitmap changed;
1392
1393 /* Strongly Connected Component visitation info. */
1394
1395 class scc_info
1396 {
1397 public:
1398 scc_info (size_t size);
1399 ~scc_info ();
1400
1401 auto_sbitmap visited;
1402 auto_sbitmap deleted;
1403 unsigned int *dfs;
1404 unsigned int *node_mapping;
1405 int current_index;
1406 auto_vec<unsigned> scc_stack;
1407 };
1408
1409
1410 /* Recursive routine to find strongly connected components in GRAPH.
1411 SI is the SCC info to store the information in, and N is the id of current
1412 graph node we are processing.
1413
1414 This is Tarjan's strongly connected component finding algorithm, as
1415 modified by Nuutila to keep only non-root nodes on the stack.
1416 The algorithm can be found in "On finding the strongly connected
1417 connected components in a directed graph" by Esko Nuutila and Eljas
1418 Soisalon-Soininen, in Information Processing Letters volume 49,
1419 number 1, pages 9-14. */
1420
1421 static void
1422 scc_visit (constraint_graph_t graph, class scc_info *si, unsigned int n)
1423 {
1424 unsigned int i;
1425 bitmap_iterator bi;
1426 unsigned int my_dfs;
1427
1428 bitmap_set_bit (si->visited, n);
1429 si->dfs[n] = si->current_index ++;
1430 my_dfs = si->dfs[n];
1431
1432 /* Visit all the successors. */
1433 EXECUTE_IF_IN_NONNULL_BITMAP (graph->succs[n], 0, i, bi)
1434 {
1435 unsigned int w;
1436
1437 if (i > LAST_REF_NODE)
1438 break;
1439
1440 w = find (i);
1441 if (bitmap_bit_p (si->deleted, w))
1442 continue;
1443
1444 if (!bitmap_bit_p (si->visited, w))
1445 scc_visit (graph, si, w);
1446
1447 unsigned int t = find (w);
1448 gcc_checking_assert (find (n) == n);
1449 if (si->dfs[t] < si->dfs[n])
1450 si->dfs[n] = si->dfs[t];
1451 }
1452
1453 /* See if any components have been identified. */
1454 if (si->dfs[n] == my_dfs)
1455 {
1456 if (si->scc_stack.length () > 0
1457 && si->dfs[si->scc_stack.last ()] >= my_dfs)
1458 {
1459 bitmap scc = BITMAP_ALLOC (NULL);
1460 unsigned int lowest_node;
1461 bitmap_iterator bi;
1462
1463 bitmap_set_bit (scc, n);
1464
1465 while (si->scc_stack.length () != 0
1466 && si->dfs[si->scc_stack.last ()] >= my_dfs)
1467 {
1468 unsigned int w = si->scc_stack.pop ();
1469
1470 bitmap_set_bit (scc, w);
1471 }
1472
1473 lowest_node = bitmap_first_set_bit (scc);
1474 gcc_assert (lowest_node < FIRST_REF_NODE);
1475
1476 /* Collapse the SCC nodes into a single node, and mark the
1477 indirect cycles. */
1478 EXECUTE_IF_SET_IN_BITMAP (scc, 0, i, bi)
1479 {
1480 if (i < FIRST_REF_NODE)
1481 {
1482 if (unite (lowest_node, i))
1483 unify_nodes (graph, lowest_node, i, false);
1484 }
1485 else
1486 {
1487 unite (lowest_node, i);
1488 graph->indirect_cycles[i - FIRST_REF_NODE] = lowest_node;
1489 }
1490 }
1491 }
1492 bitmap_set_bit (si->deleted, n);
1493 }
1494 else
1495 si->scc_stack.safe_push (n);
1496 }
1497
1498 /* Unify node FROM into node TO, updating the changed count if
1499 necessary when UPDATE_CHANGED is true. */
1500
1501 static void
1502 unify_nodes (constraint_graph_t graph, unsigned int to, unsigned int from,
1503 bool update_changed)
1504 {
1505 gcc_checking_assert (to != from && find (to) == to);
1506
1507 if (dump_file && (dump_flags & TDF_DETAILS))
1508 fprintf (dump_file, "Unifying %s to %s\n",
1509 get_varinfo (from)->name,
1510 get_varinfo (to)->name);
1511
1512 if (update_changed)
1513 stats.unified_vars_dynamic++;
1514 else
1515 stats.unified_vars_static++;
1516
1517 merge_graph_nodes (graph, to, from);
1518 if (merge_node_constraints (graph, to, from))
1519 {
1520 if (update_changed)
1521 bitmap_set_bit (changed, to);
1522 }
1523
1524 /* Mark TO as changed if FROM was changed. If TO was already marked
1525 as changed, decrease the changed count. */
1526
1527 if (update_changed
1528 && bitmap_clear_bit (changed, from))
1529 bitmap_set_bit (changed, to);
1530 varinfo_t fromvi = get_varinfo (from);
1531 if (fromvi->solution)
1532 {
1533 /* If the solution changes because of the merging, we need to mark
1534 the variable as changed. */
1535 varinfo_t tovi = get_varinfo (to);
1536 if (bitmap_ior_into (tovi->solution, fromvi->solution))
1537 {
1538 if (update_changed)
1539 bitmap_set_bit (changed, to);
1540 }
1541
1542 BITMAP_FREE (fromvi->solution);
1543 if (fromvi->oldsolution)
1544 BITMAP_FREE (fromvi->oldsolution);
1545
1546 if (stats.iterations > 0
1547 && tovi->oldsolution)
1548 BITMAP_FREE (tovi->oldsolution);
1549 }
1550 if (graph->succs[to])
1551 bitmap_clear_bit (graph->succs[to], to);
1552 }
1553
1554 /* Information needed to compute the topological ordering of a graph. */
1555
1556 struct topo_info
1557 {
1558 /* sbitmap of visited nodes. */
1559 sbitmap visited;
1560 /* Array that stores the topological order of the graph, *in
1561 reverse*. */
1562 vec<unsigned> topo_order;
1563 };
1564
1565
1566 /* Initialize and return a topological info structure. */
1567
1568 static struct topo_info *
1569 init_topo_info (void)
1570 {
1571 size_t size = graph->size;
1572 struct topo_info *ti = XNEW (struct topo_info);
1573 ti->visited = sbitmap_alloc (size);
1574 bitmap_clear (ti->visited);
1575 ti->topo_order.create (1);
1576 return ti;
1577 }
1578
1579
1580 /* Free the topological sort info pointed to by TI. */
1581
1582 static void
1583 free_topo_info (struct topo_info *ti)
1584 {
1585 sbitmap_free (ti->visited);
1586 ti->topo_order.release ();
1587 free (ti);
1588 }
1589
1590 /* Visit the graph in topological order, and store the order in the
1591 topo_info structure. */
1592
1593 static void
1594 topo_visit (constraint_graph_t graph, struct topo_info *ti,
1595 unsigned int n)
1596 {
1597 bitmap_iterator bi;
1598 unsigned int j;
1599
1600 bitmap_set_bit (ti->visited, n);
1601
1602 if (graph->succs[n])
1603 EXECUTE_IF_SET_IN_BITMAP (graph->succs[n], 0, j, bi)
1604 {
1605 if (!bitmap_bit_p (ti->visited, j))
1606 topo_visit (graph, ti, j);
1607 }
1608
1609 ti->topo_order.safe_push (n);
1610 }
1611
1612 /* Process a constraint C that represents x = *(y + off), using DELTA as the
1613 starting solution for y. */
1614
1615 static void
1616 do_sd_constraint (constraint_graph_t graph, constraint_t c,
1617 bitmap delta, bitmap *expanded_delta)
1618 {
1619 unsigned int lhs = c->lhs.var;
1620 bool flag = false;
1621 bitmap sol = get_varinfo (lhs)->solution;
1622 unsigned int j;
1623 bitmap_iterator bi;
1624 HOST_WIDE_INT roffset = c->rhs.offset;
1625
1626 /* Our IL does not allow this. */
1627 gcc_checking_assert (c->lhs.offset == 0);
1628
1629 /* If the solution of Y contains anything it is good enough to transfer
1630 this to the LHS. */
1631 if (bitmap_bit_p (delta, anything_id))
1632 {
1633 flag |= bitmap_set_bit (sol, anything_id);
1634 goto done;
1635 }
1636
1637 /* If we do not know at with offset the rhs is dereferenced compute
1638 the reachability set of DELTA, conservatively assuming it is
1639 dereferenced at all valid offsets. */
1640 if (roffset == UNKNOWN_OFFSET)
1641 {
1642 delta = solution_set_expand (delta, expanded_delta);
1643 /* No further offset processing is necessary. */
1644 roffset = 0;
1645 }
1646
1647 /* For each variable j in delta (Sol(y)), add
1648 an edge in the graph from j to x, and union Sol(j) into Sol(x). */
1649 EXECUTE_IF_SET_IN_BITMAP (delta, 0, j, bi)
1650 {
1651 varinfo_t v = get_varinfo (j);
1652 HOST_WIDE_INT fieldoffset = v->offset + roffset;
1653 unsigned HOST_WIDE_INT size = v->size;
1654 unsigned int t;
1655
1656 if (v->is_full_var)
1657 ;
1658 else if (roffset != 0)
1659 {
1660 if (fieldoffset < 0)
1661 v = get_varinfo (v->head);
1662 else
1663 v = first_or_preceding_vi_for_offset (v, fieldoffset);
1664 }
1665
1666 /* We have to include all fields that overlap the current field
1667 shifted by roffset. */
1668 do
1669 {
1670 t = find (v->id);
1671
1672 /* Adding edges from the special vars is pointless.
1673 They don't have sets that can change. */
1674 if (get_varinfo (t)->is_special_var)
1675 flag |= bitmap_ior_into (sol, get_varinfo (t)->solution);
1676 /* Merging the solution from ESCAPED needlessly increases
1677 the set. Use ESCAPED as representative instead. */
1678 else if (v->id == escaped_id)
1679 flag |= bitmap_set_bit (sol, escaped_id);
1680 else if (v->may_have_pointers
1681 && add_graph_edge (graph, lhs, t))
1682 flag |= bitmap_ior_into (sol, get_varinfo (t)->solution);
1683
1684 if (v->is_full_var
1685 || v->next == 0)
1686 break;
1687
1688 v = vi_next (v);
1689 }
1690 while (v->offset < fieldoffset + size);
1691 }
1692
1693 done:
1694 /* If the LHS solution changed, mark the var as changed. */
1695 if (flag)
1696 {
1697 get_varinfo (lhs)->solution = sol;
1698 bitmap_set_bit (changed, lhs);
1699 }
1700 }
1701
1702 /* Process a constraint C that represents *(x + off) = y using DELTA
1703 as the starting solution for x. */
1704
1705 static void
1706 do_ds_constraint (constraint_t c, bitmap delta, bitmap *expanded_delta)
1707 {
1708 unsigned int rhs = c->rhs.var;
1709 bitmap sol = get_varinfo (rhs)->solution;
1710 unsigned int j;
1711 bitmap_iterator bi;
1712 HOST_WIDE_INT loff = c->lhs.offset;
1713 bool escaped_p = false;
1714
1715 /* Our IL does not allow this. */
1716 gcc_checking_assert (c->rhs.offset == 0);
1717
1718 /* If the solution of y contains ANYTHING simply use the ANYTHING
1719 solution. This avoids needlessly increasing the points-to sets. */
1720 if (bitmap_bit_p (sol, anything_id))
1721 sol = get_varinfo (find (anything_id))->solution;
1722
1723 /* If the solution for x contains ANYTHING we have to merge the
1724 solution of y into all pointer variables which we do via
1725 STOREDANYTHING. */
1726 if (bitmap_bit_p (delta, anything_id))
1727 {
1728 unsigned t = find (storedanything_id);
1729 if (add_graph_edge (graph, t, rhs))
1730 {
1731 if (bitmap_ior_into (get_varinfo (t)->solution, sol))
1732 bitmap_set_bit (changed, t);
1733 }
1734 return;
1735 }
1736
1737 /* If we do not know at with offset the rhs is dereferenced compute
1738 the reachability set of DELTA, conservatively assuming it is
1739 dereferenced at all valid offsets. */
1740 if (loff == UNKNOWN_OFFSET)
1741 {
1742 delta = solution_set_expand (delta, expanded_delta);
1743 loff = 0;
1744 }
1745
1746 /* For each member j of delta (Sol(x)), add an edge from y to j and
1747 union Sol(y) into Sol(j) */
1748 EXECUTE_IF_SET_IN_BITMAP (delta, 0, j, bi)
1749 {
1750 varinfo_t v = get_varinfo (j);
1751 unsigned int t;
1752 HOST_WIDE_INT fieldoffset = v->offset + loff;
1753 unsigned HOST_WIDE_INT size = v->size;
1754
1755 if (v->is_full_var)
1756 ;
1757 else if (loff != 0)
1758 {
1759 if (fieldoffset < 0)
1760 v = get_varinfo (v->head);
1761 else
1762 v = first_or_preceding_vi_for_offset (v, fieldoffset);
1763 }
1764
1765 /* We have to include all fields that overlap the current field
1766 shifted by loff. */
1767 do
1768 {
1769 if (v->may_have_pointers)
1770 {
1771 /* If v is a global variable then this is an escape point. */
1772 if (v->is_global_var
1773 && !escaped_p)
1774 {
1775 t = find (escaped_id);
1776 if (add_graph_edge (graph, t, rhs)
1777 && bitmap_ior_into (get_varinfo (t)->solution, sol))
1778 bitmap_set_bit (changed, t);
1779 /* Enough to let rhs escape once. */
1780 escaped_p = true;
1781 }
1782
1783 if (v->is_special_var)
1784 break;
1785
1786 t = find (v->id);
1787 if (add_graph_edge (graph, t, rhs)
1788 && bitmap_ior_into (get_varinfo (t)->solution, sol))
1789 bitmap_set_bit (changed, t);
1790 }
1791
1792 if (v->is_full_var
1793 || v->next == 0)
1794 break;
1795
1796 v = vi_next (v);
1797 }
1798 while (v->offset < fieldoffset + size);
1799 }
1800 }
1801
1802 /* Handle a non-simple (simple meaning requires no iteration),
1803 constraint (IE *x = &y, x = *y, *x = y, and x = y with offsets involved). */
1804
1805 static void
1806 do_complex_constraint (constraint_graph_t graph, constraint_t c, bitmap delta,
1807 bitmap *expanded_delta)
1808 {
1809 if (c->lhs.type == DEREF)
1810 {
1811 if (c->rhs.type == ADDRESSOF)
1812 {
1813 gcc_unreachable ();
1814 }
1815 else
1816 {
1817 /* *x = y */
1818 do_ds_constraint (c, delta, expanded_delta);
1819 }
1820 }
1821 else if (c->rhs.type == DEREF)
1822 {
1823 /* x = *y */
1824 if (!(get_varinfo (c->lhs.var)->is_special_var))
1825 do_sd_constraint (graph, c, delta, expanded_delta);
1826 }
1827 else
1828 {
1829 bitmap tmp;
1830 bool flag = false;
1831
1832 gcc_checking_assert (c->rhs.type == SCALAR && c->lhs.type == SCALAR
1833 && c->rhs.offset != 0 && c->lhs.offset == 0);
1834 tmp = get_varinfo (c->lhs.var)->solution;
1835
1836 flag = set_union_with_increment (tmp, delta, c->rhs.offset,
1837 expanded_delta);
1838
1839 if (flag)
1840 bitmap_set_bit (changed, c->lhs.var);
1841 }
1842 }
1843
1844 /* Initialize and return a new SCC info structure. */
1845
1846 scc_info::scc_info (size_t size) :
1847 visited (size), deleted (size), current_index (0), scc_stack (1)
1848 {
1849 bitmap_clear (visited);
1850 bitmap_clear (deleted);
1851 node_mapping = XNEWVEC (unsigned int, size);
1852 dfs = XCNEWVEC (unsigned int, size);
1853
1854 for (size_t i = 0; i < size; i++)
1855 node_mapping[i] = i;
1856 }
1857
1858 /* Free an SCC info structure pointed to by SI */
1859
1860 scc_info::~scc_info ()
1861 {
1862 free (node_mapping);
1863 free (dfs);
1864 }
1865
1866
1867 /* Find indirect cycles in GRAPH that occur, using strongly connected
1868 components, and note them in the indirect cycles map.
1869
1870 This technique comes from Ben Hardekopf and Calvin Lin,
1871 "It Pays to be Lazy: Fast and Accurate Pointer Analysis for Millions of
1872 Lines of Code", submitted to PLDI 2007. */
1873
1874 static void
1875 find_indirect_cycles (constraint_graph_t graph)
1876 {
1877 unsigned int i;
1878 unsigned int size = graph->size;
1879 scc_info si (size);
1880
1881 for (i = 0; i < MIN (LAST_REF_NODE, size); i ++ )
1882 if (!bitmap_bit_p (si.visited, i) && find (i) == i)
1883 scc_visit (graph, &si, i);
1884 }
1885
1886 /* Compute a topological ordering for GRAPH, and store the result in the
1887 topo_info structure TI. */
1888
1889 static void
1890 compute_topo_order (constraint_graph_t graph,
1891 struct topo_info *ti)
1892 {
1893 unsigned int i;
1894 unsigned int size = graph->size;
1895
1896 for (i = 0; i != size; ++i)
1897 if (!bitmap_bit_p (ti->visited, i) && find (i) == i)
1898 topo_visit (graph, ti, i);
1899 }
1900
1901 /* Structure used to for hash value numbering of pointer equivalence
1902 classes. */
1903
1904 typedef struct equiv_class_label
1905 {
1906 hashval_t hashcode;
1907 unsigned int equivalence_class;
1908 bitmap labels;
1909 } *equiv_class_label_t;
1910 typedef const struct equiv_class_label *const_equiv_class_label_t;
1911
1912 /* Equiv_class_label hashtable helpers. */
1913
1914 struct equiv_class_hasher : nofree_ptr_hash <equiv_class_label>
1915 {
1916 static inline hashval_t hash (const equiv_class_label *);
1917 static inline bool equal (const equiv_class_label *,
1918 const equiv_class_label *);
1919 };
1920
1921 /* Hash function for a equiv_class_label_t */
1922
1923 inline hashval_t
1924 equiv_class_hasher::hash (const equiv_class_label *ecl)
1925 {
1926 return ecl->hashcode;
1927 }
1928
1929 /* Equality function for two equiv_class_label_t's. */
1930
1931 inline bool
1932 equiv_class_hasher::equal (const equiv_class_label *eql1,
1933 const equiv_class_label *eql2)
1934 {
1935 return (eql1->hashcode == eql2->hashcode
1936 && bitmap_equal_p (eql1->labels, eql2->labels));
1937 }
1938
1939 /* A hashtable for mapping a bitmap of labels->pointer equivalence
1940 classes. */
1941 static hash_table<equiv_class_hasher> *pointer_equiv_class_table;
1942
1943 /* A hashtable for mapping a bitmap of labels->location equivalence
1944 classes. */
1945 static hash_table<equiv_class_hasher> *location_equiv_class_table;
1946
1947 struct obstack equiv_class_obstack;
1948
1949 /* Lookup a equivalence class in TABLE by the bitmap of LABELS with
1950 hash HAS it contains. Sets *REF_LABELS to the bitmap LABELS
1951 is equivalent to. */
1952
1953 static equiv_class_label *
1954 equiv_class_lookup_or_add (hash_table<equiv_class_hasher> *table,
1955 bitmap labels)
1956 {
1957 equiv_class_label **slot;
1958 equiv_class_label ecl;
1959
1960 ecl.labels = labels;
1961 ecl.hashcode = bitmap_hash (labels);
1962 slot = table->find_slot (&ecl, INSERT);
1963 if (!*slot)
1964 {
1965 *slot = XOBNEW (&equiv_class_obstack, struct equiv_class_label);
1966 (*slot)->labels = labels;
1967 (*slot)->hashcode = ecl.hashcode;
1968 (*slot)->equivalence_class = 0;
1969 }
1970
1971 return *slot;
1972 }
1973
1974 /* Perform offline variable substitution.
1975
1976 This is a worst case quadratic time way of identifying variables
1977 that must have equivalent points-to sets, including those caused by
1978 static cycles, and single entry subgraphs, in the constraint graph.
1979
1980 The technique is described in "Exploiting Pointer and Location
1981 Equivalence to Optimize Pointer Analysis. In the 14th International
1982 Static Analysis Symposium (SAS), August 2007." It is known as the
1983 "HU" algorithm, and is equivalent to value numbering the collapsed
1984 constraint graph including evaluating unions.
1985
1986 The general method of finding equivalence classes is as follows:
1987 Add fake nodes (REF nodes) and edges for *a = b and a = *b constraints.
1988 Initialize all non-REF nodes to be direct nodes.
1989 For each constraint a = a U {b}, we set pts(a) = pts(a) u {fresh
1990 variable}
1991 For each constraint containing the dereference, we also do the same
1992 thing.
1993
1994 We then compute SCC's in the graph and unify nodes in the same SCC,
1995 including pts sets.
1996
1997 For each non-collapsed node x:
1998 Visit all unvisited explicit incoming edges.
1999 Ignoring all non-pointers, set pts(x) = Union of pts(a) for y
2000 where y->x.
2001 Lookup the equivalence class for pts(x).
2002 If we found one, equivalence_class(x) = found class.
2003 Otherwise, equivalence_class(x) = new class, and new_class is
2004 added to the lookup table.
2005
2006 All direct nodes with the same equivalence class can be replaced
2007 with a single representative node.
2008 All unlabeled nodes (label == 0) are not pointers and all edges
2009 involving them can be eliminated.
2010 We perform these optimizations during rewrite_constraints
2011
2012 In addition to pointer equivalence class finding, we also perform
2013 location equivalence class finding. This is the set of variables
2014 that always appear together in points-to sets. We use this to
2015 compress the size of the points-to sets. */
2016
2017 /* Current maximum pointer equivalence class id. */
2018 static int pointer_equiv_class;
2019
2020 /* Current maximum location equivalence class id. */
2021 static int location_equiv_class;
2022
2023 /* Recursive routine to find strongly connected components in GRAPH,
2024 and label it's nodes with DFS numbers. */
2025
2026 static void
2027 condense_visit (constraint_graph_t graph, class scc_info *si, unsigned int n)
2028 {
2029 unsigned int i;
2030 bitmap_iterator bi;
2031 unsigned int my_dfs;
2032
2033 gcc_checking_assert (si->node_mapping[n] == n);
2034 bitmap_set_bit (si->visited, n);
2035 si->dfs[n] = si->current_index ++;
2036 my_dfs = si->dfs[n];
2037
2038 /* Visit all the successors. */
2039 EXECUTE_IF_IN_NONNULL_BITMAP (graph->preds[n], 0, i, bi)
2040 {
2041 unsigned int w = si->node_mapping[i];
2042
2043 if (bitmap_bit_p (si->deleted, w))
2044 continue;
2045
2046 if (!bitmap_bit_p (si->visited, w))
2047 condense_visit (graph, si, w);
2048
2049 unsigned int t = si->node_mapping[w];
2050 gcc_checking_assert (si->node_mapping[n] == n);
2051 if (si->dfs[t] < si->dfs[n])
2052 si->dfs[n] = si->dfs[t];
2053 }
2054
2055 /* Visit all the implicit predecessors. */
2056 EXECUTE_IF_IN_NONNULL_BITMAP (graph->implicit_preds[n], 0, i, bi)
2057 {
2058 unsigned int w = si->node_mapping[i];
2059
2060 if (bitmap_bit_p (si->deleted, w))
2061 continue;
2062
2063 if (!bitmap_bit_p (si->visited, w))
2064 condense_visit (graph, si, w);
2065
2066 unsigned int t = si->node_mapping[w];
2067 gcc_assert (si->node_mapping[n] == n);
2068 if (si->dfs[t] < si->dfs[n])
2069 si->dfs[n] = si->dfs[t];
2070 }
2071
2072 /* See if any components have been identified. */
2073 if (si->dfs[n] == my_dfs)
2074 {
2075 if (si->scc_stack.length () != 0
2076 && si->dfs[si->scc_stack.last ()] >= my_dfs)
2077 {
2078 /* Find the first node of the SCC and do non-bitmap work. */
2079 bool direct_p = true;
2080 unsigned first = si->scc_stack.length ();
2081 do
2082 {
2083 --first;
2084 unsigned int w = si->scc_stack[first];
2085 si->node_mapping[w] = n;
2086 if (!bitmap_bit_p (graph->direct_nodes, w))
2087 direct_p = false;
2088 }
2089 while (first > 0
2090 && si->dfs[si->scc_stack[first - 1]] >= my_dfs);
2091 if (!direct_p)
2092 bitmap_clear_bit (graph->direct_nodes, n);
2093
2094 /* Want to reduce to node n, push that first. */
2095 si->scc_stack.reserve (1);
2096 si->scc_stack.quick_push (si->scc_stack[first]);
2097 si->scc_stack[first] = n;
2098
2099 unsigned scc_size = si->scc_stack.length () - first;
2100 unsigned split = scc_size / 2;
2101 unsigned carry = scc_size - split * 2;
2102 while (split > 0)
2103 {
2104 for (unsigned i = 0; i < split; ++i)
2105 {
2106 unsigned a = si->scc_stack[first + i];
2107 unsigned b = si->scc_stack[first + split + carry + i];
2108
2109 /* Unify our nodes. */
2110 if (graph->preds[b])
2111 {
2112 if (!graph->preds[a])
2113 std::swap (graph->preds[a], graph->preds[b]);
2114 else
2115 bitmap_ior_into_and_free (graph->preds[a],
2116 &graph->preds[b]);
2117 }
2118 if (graph->implicit_preds[b])
2119 {
2120 if (!graph->implicit_preds[a])
2121 std::swap (graph->implicit_preds[a],
2122 graph->implicit_preds[b]);
2123 else
2124 bitmap_ior_into_and_free (graph->implicit_preds[a],
2125 &graph->implicit_preds[b]);
2126 }
2127 if (graph->points_to[b])
2128 {
2129 if (!graph->points_to[a])
2130 std::swap (graph->points_to[a], graph->points_to[b]);
2131 else
2132 bitmap_ior_into_and_free (graph->points_to[a],
2133 &graph->points_to[b]);
2134 }
2135 }
2136 unsigned remain = split + carry;
2137 split = remain / 2;
2138 carry = remain - split * 2;
2139 }
2140 /* Actually pop the SCC. */
2141 si->scc_stack.truncate (first);
2142 }
2143 bitmap_set_bit (si->deleted, n);
2144 }
2145 else
2146 si->scc_stack.safe_push (n);
2147 }
2148
2149 /* Label pointer equivalences.
2150
2151 This performs a value numbering of the constraint graph to
2152 discover which variables will always have the same points-to sets
2153 under the current set of constraints.
2154
2155 The way it value numbers is to store the set of points-to bits
2156 generated by the constraints and graph edges. This is just used as a
2157 hash and equality comparison. The *actual set of points-to bits* is
2158 completely irrelevant, in that we don't care about being able to
2159 extract them later.
2160
2161 The equality values (currently bitmaps) just have to satisfy a few
2162 constraints, the main ones being:
2163 1. The combining operation must be order independent.
2164 2. The end result of a given set of operations must be unique iff the
2165 combination of input values is unique
2166 3. Hashable. */
2167
2168 static void
2169 label_visit (constraint_graph_t graph, class scc_info *si, unsigned int n)
2170 {
2171 unsigned int i, first_pred;
2172 bitmap_iterator bi;
2173
2174 bitmap_set_bit (si->visited, n);
2175
2176 /* Label and union our incoming edges's points to sets. */
2177 first_pred = -1U;
2178 EXECUTE_IF_IN_NONNULL_BITMAP (graph->preds[n], 0, i, bi)
2179 {
2180 unsigned int w = si->node_mapping[i];
2181 if (!bitmap_bit_p (si->visited, w))
2182 label_visit (graph, si, w);
2183
2184 /* Skip unused edges */
2185 if (w == n || graph->pointer_label[w] == 0)
2186 continue;
2187
2188 if (graph->points_to[w])
2189 {
2190 if (!graph->points_to[n])
2191 {
2192 if (first_pred == -1U)
2193 first_pred = w;
2194 else
2195 {
2196 graph->points_to[n] = BITMAP_ALLOC (&predbitmap_obstack);
2197 bitmap_ior (graph->points_to[n],
2198 graph->points_to[first_pred],
2199 graph->points_to[w]);
2200 }
2201 }
2202 else
2203 bitmap_ior_into (graph->points_to[n], graph->points_to[w]);
2204 }
2205 }
2206
2207 /* Indirect nodes get fresh variables and a new pointer equiv class. */
2208 if (!bitmap_bit_p (graph->direct_nodes, n))
2209 {
2210 if (!graph->points_to[n])
2211 {
2212 graph->points_to[n] = BITMAP_ALLOC (&predbitmap_obstack);
2213 if (first_pred != -1U)
2214 bitmap_copy (graph->points_to[n], graph->points_to[first_pred]);
2215 }
2216 bitmap_set_bit (graph->points_to[n], FIRST_REF_NODE + n);
2217 graph->pointer_label[n] = pointer_equiv_class++;
2218 equiv_class_label_t ecl;
2219 ecl = equiv_class_lookup_or_add (pointer_equiv_class_table,
2220 graph->points_to[n]);
2221 ecl->equivalence_class = graph->pointer_label[n];
2222 return;
2223 }
2224
2225 /* If there was only a single non-empty predecessor the pointer equiv
2226 class is the same. */
2227 if (!graph->points_to[n])
2228 {
2229 if (first_pred != -1U)
2230 {
2231 graph->pointer_label[n] = graph->pointer_label[first_pred];
2232 graph->points_to[n] = graph->points_to[first_pred];
2233 }
2234 return;
2235 }
2236
2237 if (!bitmap_empty_p (graph->points_to[n]))
2238 {
2239 equiv_class_label_t ecl;
2240 ecl = equiv_class_lookup_or_add (pointer_equiv_class_table,
2241 graph->points_to[n]);
2242 if (ecl->equivalence_class == 0)
2243 ecl->equivalence_class = pointer_equiv_class++;
2244 else
2245 {
2246 BITMAP_FREE (graph->points_to[n]);
2247 graph->points_to[n] = ecl->labels;
2248 }
2249 graph->pointer_label[n] = ecl->equivalence_class;
2250 }
2251 }
2252
2253 /* Print the pred graph in dot format. */
2254
2255 static void
2256 dump_pred_graph (class scc_info *si, FILE *file)
2257 {
2258 unsigned int i;
2259
2260 /* Only print the graph if it has already been initialized: */
2261 if (!graph)
2262 return;
2263
2264 /* Prints the header of the dot file: */
2265 fprintf (file, "strict digraph {\n");
2266 fprintf (file, " node [\n shape = box\n ]\n");
2267 fprintf (file, " edge [\n fontsize = \"12\"\n ]\n");
2268 fprintf (file, "\n // List of nodes and complex constraints in "
2269 "the constraint graph:\n");
2270
2271 /* The next lines print the nodes in the graph together with the
2272 complex constraints attached to them. */
2273 for (i = 1; i < graph->size; i++)
2274 {
2275 if (i == FIRST_REF_NODE)
2276 continue;
2277 if (si->node_mapping[i] != i)
2278 continue;
2279 if (i < FIRST_REF_NODE)
2280 fprintf (file, "\"%s\"", get_varinfo (i)->name);
2281 else
2282 fprintf (file, "\"*%s\"", get_varinfo (i - FIRST_REF_NODE)->name);
2283 if (graph->points_to[i]
2284 && !bitmap_empty_p (graph->points_to[i]))
2285 {
2286 if (i < FIRST_REF_NODE)
2287 fprintf (file, "[label=\"%s = {", get_varinfo (i)->name);
2288 else
2289 fprintf (file, "[label=\"*%s = {",
2290 get_varinfo (i - FIRST_REF_NODE)->name);
2291 unsigned j;
2292 bitmap_iterator bi;
2293 EXECUTE_IF_SET_IN_BITMAP (graph->points_to[i], 0, j, bi)
2294 fprintf (file, " %d", j);
2295 fprintf (file, " }\"]");
2296 }
2297 fprintf (file, ";\n");
2298 }
2299
2300 /* Go over the edges. */
2301 fprintf (file, "\n // Edges in the constraint graph:\n");
2302 for (i = 1; i < graph->size; i++)
2303 {
2304 unsigned j;
2305 bitmap_iterator bi;
2306 if (si->node_mapping[i] != i)
2307 continue;
2308 EXECUTE_IF_IN_NONNULL_BITMAP (graph->preds[i], 0, j, bi)
2309 {
2310 unsigned from = si->node_mapping[j];
2311 if (from < FIRST_REF_NODE)
2312 fprintf (file, "\"%s\"", get_varinfo (from)->name);
2313 else
2314 fprintf (file, "\"*%s\"", get_varinfo (from - FIRST_REF_NODE)->name);
2315 fprintf (file, " -> ");
2316 if (i < FIRST_REF_NODE)
2317 fprintf (file, "\"%s\"", get_varinfo (i)->name);
2318 else
2319 fprintf (file, "\"*%s\"", get_varinfo (i - FIRST_REF_NODE)->name);
2320 fprintf (file, ";\n");
2321 }
2322 }
2323
2324 /* Prints the tail of the dot file. */
2325 fprintf (file, "}\n");
2326 }
2327
2328 /* Perform offline variable substitution, discovering equivalence
2329 classes, and eliminating non-pointer variables. */
2330
2331 static class scc_info *
2332 perform_var_substitution (constraint_graph_t graph)
2333 {
2334 unsigned int i;
2335 unsigned int size = graph->size;
2336 scc_info *si = new scc_info (size);
2337
2338 bitmap_obstack_initialize (&iteration_obstack);
2339 gcc_obstack_init (&equiv_class_obstack);
2340 pointer_equiv_class_table = new hash_table<equiv_class_hasher> (511);
2341 location_equiv_class_table
2342 = new hash_table<equiv_class_hasher> (511);
2343 pointer_equiv_class = 1;
2344 location_equiv_class = 1;
2345
2346 /* Condense the nodes, which means to find SCC's, count incoming
2347 predecessors, and unite nodes in SCC's. */
2348 for (i = 1; i < FIRST_REF_NODE; i++)
2349 if (!bitmap_bit_p (si->visited, si->node_mapping[i]))
2350 condense_visit (graph, si, si->node_mapping[i]);
2351
2352 if (dump_file && (dump_flags & TDF_GRAPH))
2353 {
2354 fprintf (dump_file, "\n\n// The constraint graph before var-substitution "
2355 "in dot format:\n");
2356 dump_pred_graph (si, dump_file);
2357 fprintf (dump_file, "\n\n");
2358 }
2359
2360 bitmap_clear (si->visited);
2361 /* Actually the label the nodes for pointer equivalences */
2362 for (i = 1; i < FIRST_REF_NODE; i++)
2363 if (!bitmap_bit_p (si->visited, si->node_mapping[i]))
2364 label_visit (graph, si, si->node_mapping[i]);
2365
2366 /* Calculate location equivalence labels. */
2367 for (i = 1; i < FIRST_REF_NODE; i++)
2368 {
2369 bitmap pointed_by;
2370 bitmap_iterator bi;
2371 unsigned int j;
2372
2373 if (!graph->pointed_by[i])
2374 continue;
2375 pointed_by = BITMAP_ALLOC (&iteration_obstack);
2376
2377 /* Translate the pointed-by mapping for pointer equivalence
2378 labels. */
2379 EXECUTE_IF_SET_IN_BITMAP (graph->pointed_by[i], 0, j, bi)
2380 {
2381 bitmap_set_bit (pointed_by,
2382 graph->pointer_label[si->node_mapping[j]]);
2383 }
2384 /* The original pointed_by is now dead. */
2385 BITMAP_FREE (graph->pointed_by[i]);
2386
2387 /* Look up the location equivalence label if one exists, or make
2388 one otherwise. */
2389 equiv_class_label_t ecl;
2390 ecl = equiv_class_lookup_or_add (location_equiv_class_table, pointed_by);
2391 if (ecl->equivalence_class == 0)
2392 ecl->equivalence_class = location_equiv_class++;
2393 else
2394 {
2395 if (dump_file && (dump_flags & TDF_DETAILS))
2396 fprintf (dump_file, "Found location equivalence for node %s\n",
2397 get_varinfo (i)->name);
2398 BITMAP_FREE (pointed_by);
2399 }
2400 graph->loc_label[i] = ecl->equivalence_class;
2401
2402 }
2403
2404 if (dump_file && (dump_flags & TDF_DETAILS))
2405 for (i = 1; i < FIRST_REF_NODE; i++)
2406 {
2407 unsigned j = si->node_mapping[i];
2408 if (j != i)
2409 {
2410 fprintf (dump_file, "%s node id %d ",
2411 bitmap_bit_p (graph->direct_nodes, i)
2412 ? "Direct" : "Indirect", i);
2413 if (i < FIRST_REF_NODE)
2414 fprintf (dump_file, "\"%s\"", get_varinfo (i)->name);
2415 else
2416 fprintf (dump_file, "\"*%s\"",
2417 get_varinfo (i - FIRST_REF_NODE)->name);
2418 fprintf (dump_file, " mapped to SCC leader node id %d ", j);
2419 if (j < FIRST_REF_NODE)
2420 fprintf (dump_file, "\"%s\"\n", get_varinfo (j)->name);
2421 else
2422 fprintf (dump_file, "\"*%s\"\n",
2423 get_varinfo (j - FIRST_REF_NODE)->name);
2424 }
2425 else
2426 {
2427 fprintf (dump_file,
2428 "Equivalence classes for %s node id %d ",
2429 bitmap_bit_p (graph->direct_nodes, i)
2430 ? "direct" : "indirect", i);
2431 if (i < FIRST_REF_NODE)
2432 fprintf (dump_file, "\"%s\"", get_varinfo (i)->name);
2433 else
2434 fprintf (dump_file, "\"*%s\"",
2435 get_varinfo (i - FIRST_REF_NODE)->name);
2436 fprintf (dump_file,
2437 ": pointer %d, location %d\n",
2438 graph->pointer_label[i], graph->loc_label[i]);
2439 }
2440 }
2441
2442 /* Quickly eliminate our non-pointer variables. */
2443
2444 for (i = 1; i < FIRST_REF_NODE; i++)
2445 {
2446 unsigned int node = si->node_mapping[i];
2447
2448 if (graph->pointer_label[node] == 0)
2449 {
2450 if (dump_file && (dump_flags & TDF_DETAILS))
2451 fprintf (dump_file,
2452 "%s is a non-pointer variable, eliminating edges.\n",
2453 get_varinfo (node)->name);
2454 stats.nonpointer_vars++;
2455 clear_edges_for_node (graph, node);
2456 }
2457 }
2458
2459 return si;
2460 }
2461
2462 /* Free information that was only necessary for variable
2463 substitution. */
2464
2465 static void
2466 free_var_substitution_info (class scc_info *si)
2467 {
2468 delete si;
2469 free (graph->pointer_label);
2470 free (graph->loc_label);
2471 free (graph->pointed_by);
2472 free (graph->points_to);
2473 free (graph->eq_rep);
2474 sbitmap_free (graph->direct_nodes);
2475 delete pointer_equiv_class_table;
2476 pointer_equiv_class_table = NULL;
2477 delete location_equiv_class_table;
2478 location_equiv_class_table = NULL;
2479 obstack_free (&equiv_class_obstack, NULL);
2480 bitmap_obstack_release (&iteration_obstack);
2481 }
2482
2483 /* Return an existing node that is equivalent to NODE, which has
2484 equivalence class LABEL, if one exists. Return NODE otherwise. */
2485
2486 static unsigned int
2487 find_equivalent_node (constraint_graph_t graph,
2488 unsigned int node, unsigned int label)
2489 {
2490 /* If the address version of this variable is unused, we can
2491 substitute it for anything else with the same label.
2492 Otherwise, we know the pointers are equivalent, but not the
2493 locations, and we can unite them later. */
2494
2495 if (!bitmap_bit_p (graph->address_taken, node))
2496 {
2497 gcc_checking_assert (label < graph->size);
2498
2499 if (graph->eq_rep[label] != -1)
2500 {
2501 /* Unify the two variables since we know they are equivalent. */
2502 if (unite (graph->eq_rep[label], node))
2503 unify_nodes (graph, graph->eq_rep[label], node, false);
2504 return graph->eq_rep[label];
2505 }
2506 else
2507 {
2508 graph->eq_rep[label] = node;
2509 graph->pe_rep[label] = node;
2510 }
2511 }
2512 else
2513 {
2514 gcc_checking_assert (label < graph->size);
2515 graph->pe[node] = label;
2516 if (graph->pe_rep[label] == -1)
2517 graph->pe_rep[label] = node;
2518 }
2519
2520 return node;
2521 }
2522
2523 /* Unite pointer equivalent but not location equivalent nodes in
2524 GRAPH. This may only be performed once variable substitution is
2525 finished. */
2526
2527 static void
2528 unite_pointer_equivalences (constraint_graph_t graph)
2529 {
2530 unsigned int i;
2531
2532 /* Go through the pointer equivalences and unite them to their
2533 representative, if they aren't already. */
2534 for (i = 1; i < FIRST_REF_NODE; i++)
2535 {
2536 unsigned int label = graph->pe[i];
2537 if (label)
2538 {
2539 int label_rep = graph->pe_rep[label];
2540
2541 if (label_rep == -1)
2542 continue;
2543
2544 label_rep = find (label_rep);
2545 if (label_rep >= 0 && unite (label_rep, find (i)))
2546 unify_nodes (graph, label_rep, i, false);
2547 }
2548 }
2549 }
2550
2551 /* Move complex constraints to the GRAPH nodes they belong to. */
2552
2553 static void
2554 move_complex_constraints (constraint_graph_t graph)
2555 {
2556 int i;
2557 constraint_t c;
2558
2559 FOR_EACH_VEC_ELT (constraints, i, c)
2560 {
2561 if (c)
2562 {
2563 struct constraint_expr lhs = c->lhs;
2564 struct constraint_expr rhs = c->rhs;
2565
2566 if (lhs.type == DEREF)
2567 {
2568 insert_into_complex (graph, lhs.var, c);
2569 }
2570 else if (rhs.type == DEREF)
2571 {
2572 if (!(get_varinfo (lhs.var)->is_special_var))
2573 insert_into_complex (graph, rhs.var, c);
2574 }
2575 else if (rhs.type != ADDRESSOF && lhs.var > anything_id
2576 && (lhs.offset != 0 || rhs.offset != 0))
2577 {
2578 insert_into_complex (graph, rhs.var, c);
2579 }
2580 }
2581 }
2582 }
2583
2584
2585 /* Optimize and rewrite complex constraints while performing
2586 collapsing of equivalent nodes. SI is the SCC_INFO that is the
2587 result of perform_variable_substitution. */
2588
2589 static void
2590 rewrite_constraints (constraint_graph_t graph,
2591 class scc_info *si)
2592 {
2593 int i;
2594 constraint_t c;
2595
2596 if (flag_checking)
2597 {
2598 for (unsigned int j = 0; j < graph->size; j++)
2599 gcc_assert (find (j) == j);
2600 }
2601
2602 FOR_EACH_VEC_ELT (constraints, i, c)
2603 {
2604 struct constraint_expr lhs = c->lhs;
2605 struct constraint_expr rhs = c->rhs;
2606 unsigned int lhsvar = find (lhs.var);
2607 unsigned int rhsvar = find (rhs.var);
2608 unsigned int lhsnode, rhsnode;
2609 unsigned int lhslabel, rhslabel;
2610
2611 lhsnode = si->node_mapping[lhsvar];
2612 rhsnode = si->node_mapping[rhsvar];
2613 lhslabel = graph->pointer_label[lhsnode];
2614 rhslabel = graph->pointer_label[rhsnode];
2615
2616 /* See if it is really a non-pointer variable, and if so, ignore
2617 the constraint. */
2618 if (lhslabel == 0)
2619 {
2620 if (dump_file && (dump_flags & TDF_DETAILS))
2621 {
2622
2623 fprintf (dump_file, "%s is a non-pointer variable, "
2624 "ignoring constraint:",
2625 get_varinfo (lhs.var)->name);
2626 dump_constraint (dump_file, c);
2627 fprintf (dump_file, "\n");
2628 }
2629 constraints[i] = NULL;
2630 continue;
2631 }
2632
2633 if (rhslabel == 0)
2634 {
2635 if (dump_file && (dump_flags & TDF_DETAILS))
2636 {
2637
2638 fprintf (dump_file, "%s is a non-pointer variable, "
2639 "ignoring constraint:",
2640 get_varinfo (rhs.var)->name);
2641 dump_constraint (dump_file, c);
2642 fprintf (dump_file, "\n");
2643 }
2644 constraints[i] = NULL;
2645 continue;
2646 }
2647
2648 lhsvar = find_equivalent_node (graph, lhsvar, lhslabel);
2649 rhsvar = find_equivalent_node (graph, rhsvar, rhslabel);
2650 c->lhs.var = lhsvar;
2651 c->rhs.var = rhsvar;
2652 }
2653 }
2654
2655 /* Eliminate indirect cycles involving NODE. Return true if NODE was
2656 part of an SCC, false otherwise. */
2657
2658 static bool
2659 eliminate_indirect_cycles (unsigned int node)
2660 {
2661 if (graph->indirect_cycles[node] != -1
2662 && !bitmap_empty_p (get_varinfo (node)->solution))
2663 {
2664 unsigned int i;
2665 auto_vec<unsigned> queue;
2666 int queuepos;
2667 unsigned int to = find (graph->indirect_cycles[node]);
2668 bitmap_iterator bi;
2669
2670 /* We can't touch the solution set and call unify_nodes
2671 at the same time, because unify_nodes is going to do
2672 bitmap unions into it. */
2673
2674 EXECUTE_IF_SET_IN_BITMAP (get_varinfo (node)->solution, 0, i, bi)
2675 {
2676 if (find (i) == i && i != to)
2677 {
2678 if (unite (to, i))
2679 queue.safe_push (i);
2680 }
2681 }
2682
2683 for (queuepos = 0;
2684 queue.iterate (queuepos, &i);
2685 queuepos++)
2686 {
2687 unify_nodes (graph, to, i, true);
2688 }
2689 return true;
2690 }
2691 return false;
2692 }
2693
2694 /* Solve the constraint graph GRAPH using our worklist solver.
2695 This is based on the PW* family of solvers from the "Efficient Field
2696 Sensitive Pointer Analysis for C" paper.
2697 It works by iterating over all the graph nodes, processing the complex
2698 constraints and propagating the copy constraints, until everything stops
2699 changed. This corresponds to steps 6-8 in the solving list given above. */
2700
2701 static void
2702 solve_graph (constraint_graph_t graph)
2703 {
2704 unsigned int size = graph->size;
2705 unsigned int i;
2706 bitmap pts;
2707
2708 changed = BITMAP_ALLOC (NULL);
2709
2710 /* Mark all initial non-collapsed nodes as changed. */
2711 for (i = 1; i < size; i++)
2712 {
2713 varinfo_t ivi = get_varinfo (i);
2714 if (find (i) == i && !bitmap_empty_p (ivi->solution)
2715 && ((graph->succs[i] && !bitmap_empty_p (graph->succs[i]))
2716 || graph->complex[i].length () > 0))
2717 bitmap_set_bit (changed, i);
2718 }
2719
2720 /* Allocate a bitmap to be used to store the changed bits. */
2721 pts = BITMAP_ALLOC (&pta_obstack);
2722
2723 while (!bitmap_empty_p (changed))
2724 {
2725 unsigned int i;
2726 struct topo_info *ti = init_topo_info ();
2727 stats.iterations++;
2728
2729 bitmap_obstack_initialize (&iteration_obstack);
2730
2731 compute_topo_order (graph, ti);
2732
2733 while (ti->topo_order.length () != 0)
2734 {
2735
2736 i = ti->topo_order.pop ();
2737
2738 /* If this variable is not a representative, skip it. */
2739 if (find (i) != i)
2740 continue;
2741
2742 /* In certain indirect cycle cases, we may merge this
2743 variable to another. */
2744 if (eliminate_indirect_cycles (i) && find (i) != i)
2745 continue;
2746
2747 /* If the node has changed, we need to process the
2748 complex constraints and outgoing edges again. */
2749 if (bitmap_clear_bit (changed, i))
2750 {
2751 unsigned int j;
2752 constraint_t c;
2753 bitmap solution;
2754 vec<constraint_t> complex = graph->complex[i];
2755 varinfo_t vi = get_varinfo (i);
2756 bool solution_empty;
2757
2758 /* Compute the changed set of solution bits. If anything
2759 is in the solution just propagate that. */
2760 if (bitmap_bit_p (vi->solution, anything_id))
2761 {
2762 /* If anything is also in the old solution there is
2763 nothing to do.
2764 ??? But we shouldn't ended up with "changed" set ... */
2765 if (vi->oldsolution
2766 && bitmap_bit_p (vi->oldsolution, anything_id))
2767 continue;
2768 bitmap_copy (pts, get_varinfo (find (anything_id))->solution);
2769 }
2770 else if (vi->oldsolution)
2771 bitmap_and_compl (pts, vi->solution, vi->oldsolution);
2772 else
2773 bitmap_copy (pts, vi->solution);
2774
2775 if (bitmap_empty_p (pts))
2776 continue;
2777
2778 if (vi->oldsolution)
2779 bitmap_ior_into (vi->oldsolution, pts);
2780 else
2781 {
2782 vi->oldsolution = BITMAP_ALLOC (&oldpta_obstack);
2783 bitmap_copy (vi->oldsolution, pts);
2784 }
2785
2786 solution = vi->solution;
2787 solution_empty = bitmap_empty_p (solution);
2788
2789 /* Process the complex constraints */
2790 bitmap expanded_pts = NULL;
2791 FOR_EACH_VEC_ELT (complex, j, c)
2792 {
2793 /* XXX: This is going to unsort the constraints in
2794 some cases, which will occasionally add duplicate
2795 constraints during unification. This does not
2796 affect correctness. */
2797 c->lhs.var = find (c->lhs.var);
2798 c->rhs.var = find (c->rhs.var);
2799
2800 /* The only complex constraint that can change our
2801 solution to non-empty, given an empty solution,
2802 is a constraint where the lhs side is receiving
2803 some set from elsewhere. */
2804 if (!solution_empty || c->lhs.type != DEREF)
2805 do_complex_constraint (graph, c, pts, &expanded_pts);
2806 }
2807 BITMAP_FREE (expanded_pts);
2808
2809 solution_empty = bitmap_empty_p (solution);
2810
2811 if (!solution_empty)
2812 {
2813 bitmap_iterator bi;
2814 unsigned eff_escaped_id = find (escaped_id);
2815
2816 /* Propagate solution to all successors. */
2817 unsigned to_remove = ~0U;
2818 EXECUTE_IF_IN_NONNULL_BITMAP (graph->succs[i],
2819 0, j, bi)
2820 {
2821 if (to_remove != ~0U)
2822 {
2823 bitmap_clear_bit (graph->succs[i], to_remove);
2824 to_remove = ~0U;
2825 }
2826 unsigned int to = find (j);
2827 if (to != j)
2828 {
2829 /* Update the succ graph, avoiding duplicate
2830 work. */
2831 to_remove = j;
2832 if (! bitmap_set_bit (graph->succs[i], to))
2833 continue;
2834 /* We eventually end up processing 'to' twice
2835 as it is undefined whether bitmap iteration
2836 iterates over bits set during iteration.
2837 Play safe instead of doing tricks. */
2838 }
2839 /* Don't try to propagate to ourselves. */
2840 if (to == i)
2841 continue;
2842
2843 bitmap tmp = get_varinfo (to)->solution;
2844 bool flag = false;
2845
2846 /* If we propagate from ESCAPED use ESCAPED as
2847 placeholder. */
2848 if (i == eff_escaped_id)
2849 flag = bitmap_set_bit (tmp, escaped_id);
2850 else
2851 flag = bitmap_ior_into (tmp, pts);
2852
2853 if (flag)
2854 bitmap_set_bit (changed, to);
2855 }
2856 if (to_remove != ~0U)
2857 bitmap_clear_bit (graph->succs[i], to_remove);
2858 }
2859 }
2860 }
2861 free_topo_info (ti);
2862 bitmap_obstack_release (&iteration_obstack);
2863 }
2864
2865 BITMAP_FREE (pts);
2866 BITMAP_FREE (changed);
2867 bitmap_obstack_release (&oldpta_obstack);
2868 }
2869
2870 /* Map from trees to variable infos. */
2871 static hash_map<tree, varinfo_t> *vi_for_tree;
2872
2873
2874 /* Insert ID as the variable id for tree T in the vi_for_tree map. */
2875
2876 static void
2877 insert_vi_for_tree (tree t, varinfo_t vi)
2878 {
2879 gcc_assert (vi);
2880 gcc_assert (!vi_for_tree->put (t, vi));
2881 }
2882
2883 /* Find the variable info for tree T in VI_FOR_TREE. If T does not
2884 exist in the map, return NULL, otherwise, return the varinfo we found. */
2885
2886 static varinfo_t
2887 lookup_vi_for_tree (tree t)
2888 {
2889 varinfo_t *slot = vi_for_tree->get (t);
2890 if (slot == NULL)
2891 return NULL;
2892
2893 return *slot;
2894 }
2895
2896 /* Return a printable name for DECL */
2897
2898 static const char *
2899 alias_get_name (tree decl)
2900 {
2901 const char *res = "NULL";
2902 if (dump_file)
2903 {
2904 char *temp = NULL;
2905 if (TREE_CODE (decl) == SSA_NAME)
2906 {
2907 res = get_name (decl);
2908 temp = xasprintf ("%s_%u", res ? res : "", SSA_NAME_VERSION (decl));
2909 }
2910 else if (HAS_DECL_ASSEMBLER_NAME_P (decl)
2911 && DECL_ASSEMBLER_NAME_SET_P (decl))
2912 res = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME_RAW (decl));
2913 else if (DECL_P (decl))
2914 {
2915 res = get_name (decl);
2916 if (!res)
2917 temp = xasprintf ("D.%u", DECL_UID (decl));
2918 }
2919
2920 if (temp)
2921 {
2922 res = ggc_strdup (temp);
2923 free (temp);
2924 }
2925 }
2926
2927 return res;
2928 }
2929
2930 /* Find the variable id for tree T in the map.
2931 If T doesn't exist in the map, create an entry for it and return it. */
2932
2933 static varinfo_t
2934 get_vi_for_tree (tree t)
2935 {
2936 varinfo_t *slot = vi_for_tree->get (t);
2937 if (slot == NULL)
2938 {
2939 unsigned int id = create_variable_info_for (t, alias_get_name (t), false);
2940 return get_varinfo (id);
2941 }
2942
2943 return *slot;
2944 }
2945
2946 /* Get a scalar constraint expression for a new temporary variable. */
2947
2948 static struct constraint_expr
2949 new_scalar_tmp_constraint_exp (const char *name, bool add_id)
2950 {
2951 struct constraint_expr tmp;
2952 varinfo_t vi;
2953
2954 vi = new_var_info (NULL_TREE, name, add_id);
2955 vi->offset = 0;
2956 vi->size = -1;
2957 vi->fullsize = -1;
2958 vi->is_full_var = 1;
2959 vi->is_reg_var = 1;
2960
2961 tmp.var = vi->id;
2962 tmp.type = SCALAR;
2963 tmp.offset = 0;
2964
2965 return tmp;
2966 }
2967
2968 /* Get a constraint expression vector from an SSA_VAR_P node.
2969 If address_p is true, the result will be taken its address of. */
2970
2971 static void
2972 get_constraint_for_ssa_var (tree t, vec<ce_s> *results, bool address_p)
2973 {
2974 struct constraint_expr cexpr;
2975 varinfo_t vi;
2976
2977 /* We allow FUNCTION_DECLs here even though it doesn't make much sense. */
2978 gcc_assert (TREE_CODE (t) == SSA_NAME || DECL_P (t));
2979
2980 if (TREE_CODE (t) == SSA_NAME
2981 && SSA_NAME_IS_DEFAULT_DEF (t))
2982 {
2983 /* For parameters, get at the points-to set for the actual parm
2984 decl. */
2985 if (TREE_CODE (SSA_NAME_VAR (t)) == PARM_DECL
2986 || TREE_CODE (SSA_NAME_VAR (t)) == RESULT_DECL)
2987 {
2988 get_constraint_for_ssa_var (SSA_NAME_VAR (t), results, address_p);
2989 return;
2990 }
2991 /* For undefined SSA names return nothing. */
2992 else if (!ssa_defined_default_def_p (t))
2993 {
2994 cexpr.var = nothing_id;
2995 cexpr.type = SCALAR;
2996 cexpr.offset = 0;
2997 results->safe_push (cexpr);
2998 return;
2999 }
3000 }
3001
3002 /* For global variables resort to the alias target. */
3003 if (VAR_P (t) && (TREE_STATIC (t) || DECL_EXTERNAL (t)))
3004 {
3005 varpool_node *node = varpool_node::get (t);
3006 if (node && node->alias && node->analyzed)
3007 {
3008 node = node->ultimate_alias_target ();
3009 /* Canonicalize the PT uid of all aliases to the ultimate target.
3010 ??? Hopefully the set of aliases can't change in a way that
3011 changes the ultimate alias target. */
3012 gcc_assert ((! DECL_PT_UID_SET_P (node->decl)
3013 || DECL_PT_UID (node->decl) == DECL_UID (node->decl))
3014 && (! DECL_PT_UID_SET_P (t)
3015 || DECL_PT_UID (t) == DECL_UID (node->decl)));
3016 DECL_PT_UID (t) = DECL_UID (node->decl);
3017 t = node->decl;
3018 }
3019
3020 /* If this is decl may bind to NULL note that. */
3021 if (address_p
3022 && (! node || ! node->nonzero_address ()))
3023 {
3024 cexpr.var = nothing_id;
3025 cexpr.type = SCALAR;
3026 cexpr.offset = 0;
3027 results->safe_push (cexpr);
3028 }
3029 }
3030
3031 vi = get_vi_for_tree (t);
3032 cexpr.var = vi->id;
3033 cexpr.type = SCALAR;
3034 cexpr.offset = 0;
3035
3036 /* If we are not taking the address of the constraint expr, add all
3037 sub-fiels of the variable as well. */
3038 if (!address_p
3039 && !vi->is_full_var)
3040 {
3041 for (; vi; vi = vi_next (vi))
3042 {
3043 cexpr.var = vi->id;
3044 results->safe_push (cexpr);
3045 }
3046 return;
3047 }
3048
3049 results->safe_push (cexpr);
3050 }
3051
3052 /* Process constraint T, performing various simplifications and then
3053 adding it to our list of overall constraints. */
3054
3055 static void
3056 process_constraint (constraint_t t)
3057 {
3058 struct constraint_expr rhs = t->rhs;
3059 struct constraint_expr lhs = t->lhs;
3060
3061 gcc_assert (rhs.var < varmap.length ());
3062 gcc_assert (lhs.var < varmap.length ());
3063
3064 /* If we didn't get any useful constraint from the lhs we get
3065 &ANYTHING as fallback from get_constraint_for. Deal with
3066 it here by turning it into *ANYTHING. */
3067 if (lhs.type == ADDRESSOF
3068 && lhs.var == anything_id)
3069 lhs.type = DEREF;
3070
3071 /* ADDRESSOF on the lhs is invalid. */
3072 gcc_assert (lhs.type != ADDRESSOF);
3073
3074 /* We shouldn't add constraints from things that cannot have pointers.
3075 It's not completely trivial to avoid in the callers, so do it here. */
3076 if (rhs.type != ADDRESSOF
3077 && !get_varinfo (rhs.var)->may_have_pointers)
3078 return;
3079
3080 /* Likewise adding to the solution of a non-pointer var isn't useful. */
3081 if (!get_varinfo (lhs.var)->may_have_pointers)
3082 return;
3083
3084 /* This can happen in our IR with things like n->a = *p */
3085 if (rhs.type == DEREF && lhs.type == DEREF && rhs.var != anything_id)
3086 {
3087 /* Split into tmp = *rhs, *lhs = tmp */
3088 struct constraint_expr tmplhs;
3089 tmplhs = new_scalar_tmp_constraint_exp ("doubledereftmp", true);
3090 process_constraint (new_constraint (tmplhs, rhs));
3091 process_constraint (new_constraint (lhs, tmplhs));
3092 }
3093 else if ((rhs.type != SCALAR || rhs.offset != 0) && lhs.type == DEREF)
3094 {
3095 /* Split into tmp = &rhs, *lhs = tmp */
3096 struct constraint_expr tmplhs;
3097 tmplhs = new_scalar_tmp_constraint_exp ("derefaddrtmp", true);
3098 process_constraint (new_constraint (tmplhs, rhs));
3099 process_constraint (new_constraint (lhs, tmplhs));
3100 }
3101 else
3102 {
3103 gcc_assert (rhs.type != ADDRESSOF || rhs.offset == 0);
3104 constraints.safe_push (t);
3105 }
3106 }
3107
3108
3109 /* Return the position, in bits, of FIELD_DECL from the beginning of its
3110 structure. */
3111
3112 static HOST_WIDE_INT
3113 bitpos_of_field (const tree fdecl)
3114 {
3115 if (!tree_fits_shwi_p (DECL_FIELD_OFFSET (fdecl))
3116 || !tree_fits_shwi_p (DECL_FIELD_BIT_OFFSET (fdecl)))
3117 return -1;
3118
3119 return (tree_to_shwi (DECL_FIELD_OFFSET (fdecl)) * BITS_PER_UNIT
3120 + tree_to_shwi (DECL_FIELD_BIT_OFFSET (fdecl)));
3121 }
3122
3123
3124 /* Get constraint expressions for offsetting PTR by OFFSET. Stores the
3125 resulting constraint expressions in *RESULTS. */
3126
3127 static void
3128 get_constraint_for_ptr_offset (tree ptr, tree offset,
3129 vec<ce_s> *results)
3130 {
3131 struct constraint_expr c;
3132 unsigned int j, n;
3133 HOST_WIDE_INT rhsoffset;
3134
3135 /* If we do not do field-sensitive PTA adding offsets to pointers
3136 does not change the points-to solution. */
3137 if (!use_field_sensitive)
3138 {
3139 get_constraint_for_rhs (ptr, results);
3140 return;
3141 }
3142
3143 /* If the offset is not a non-negative integer constant that fits
3144 in a HOST_WIDE_INT, we have to fall back to a conservative
3145 solution which includes all sub-fields of all pointed-to
3146 variables of ptr. */
3147 if (offset == NULL_TREE
3148 || TREE_CODE (offset) != INTEGER_CST)
3149 rhsoffset = UNKNOWN_OFFSET;
3150 else
3151 {
3152 /* Sign-extend the offset. */
3153 offset_int soffset = offset_int::from (wi::to_wide (offset), SIGNED);
3154 if (!wi::fits_shwi_p (soffset))
3155 rhsoffset = UNKNOWN_OFFSET;
3156 else
3157 {
3158 /* Make sure the bit-offset also fits. */
3159 HOST_WIDE_INT rhsunitoffset = soffset.to_shwi ();
3160 rhsoffset = rhsunitoffset * (unsigned HOST_WIDE_INT) BITS_PER_UNIT;
3161 if (rhsunitoffset != rhsoffset / BITS_PER_UNIT)
3162 rhsoffset = UNKNOWN_OFFSET;
3163 }
3164 }
3165
3166 get_constraint_for_rhs (ptr, results);
3167 if (rhsoffset == 0)
3168 return;
3169
3170 /* As we are eventually appending to the solution do not use
3171 vec::iterate here. */
3172 n = results->length ();
3173 for (j = 0; j < n; j++)
3174 {
3175 varinfo_t curr;
3176 c = (*results)[j];
3177 curr = get_varinfo (c.var);
3178
3179 if (c.type == ADDRESSOF
3180 /* If this varinfo represents a full variable just use it. */
3181 && curr->is_full_var)
3182 ;
3183 else if (c.type == ADDRESSOF
3184 /* If we do not know the offset add all subfields. */
3185 && rhsoffset == UNKNOWN_OFFSET)
3186 {
3187 varinfo_t temp = get_varinfo (curr->head);
3188 do
3189 {
3190 struct constraint_expr c2;
3191 c2.var = temp->id;
3192 c2.type = ADDRESSOF;
3193 c2.offset = 0;
3194 if (c2.var != c.var)
3195 results->safe_push (c2);
3196 temp = vi_next (temp);
3197 }
3198 while (temp);
3199 }
3200 else if (c.type == ADDRESSOF)
3201 {
3202 varinfo_t temp;
3203 unsigned HOST_WIDE_INT offset = curr->offset + rhsoffset;
3204
3205 /* If curr->offset + rhsoffset is less than zero adjust it. */
3206 if (rhsoffset < 0
3207 && curr->offset < offset)
3208 offset = 0;
3209
3210 /* We have to include all fields that overlap the current
3211 field shifted by rhsoffset. And we include at least
3212 the last or the first field of the variable to represent
3213 reachability of off-bound addresses, in particular &object + 1,
3214 conservatively correct. */
3215 temp = first_or_preceding_vi_for_offset (curr, offset);
3216 c.var = temp->id;
3217 c.offset = 0;
3218 temp = vi_next (temp);
3219 while (temp
3220 && temp->offset < offset + curr->size)
3221 {
3222 struct constraint_expr c2;
3223 c2.var = temp->id;
3224 c2.type = ADDRESSOF;
3225 c2.offset = 0;
3226 results->safe_push (c2);
3227 temp = vi_next (temp);
3228 }
3229 }
3230 else if (c.type == SCALAR)
3231 {
3232 gcc_assert (c.offset == 0);
3233 c.offset = rhsoffset;
3234 }
3235 else
3236 /* We shouldn't get any DEREFs here. */
3237 gcc_unreachable ();
3238
3239 (*results)[j] = c;
3240 }
3241 }
3242
3243
3244 /* Given a COMPONENT_REF T, return the constraint_expr vector for it.
3245 If address_p is true the result will be taken its address of.
3246 If lhs_p is true then the constraint expression is assumed to be used
3247 as the lhs. */
3248
3249 static void
3250 get_constraint_for_component_ref (tree t, vec<ce_s> *results,
3251 bool address_p, bool lhs_p)
3252 {
3253 tree orig_t = t;
3254 poly_int64 bitsize = -1;
3255 poly_int64 bitmaxsize = -1;
3256 poly_int64 bitpos;
3257 bool reverse;
3258 tree forzero;
3259
3260 /* Some people like to do cute things like take the address of
3261 &0->a.b */
3262 forzero = t;
3263 while (handled_component_p (forzero)
3264 || INDIRECT_REF_P (forzero)
3265 || TREE_CODE (forzero) == MEM_REF)
3266 forzero = TREE_OPERAND (forzero, 0);
3267
3268 if (CONSTANT_CLASS_P (forzero) && integer_zerop (forzero))
3269 {
3270 struct constraint_expr temp;
3271
3272 temp.offset = 0;
3273 temp.var = integer_id;
3274 temp.type = SCALAR;
3275 results->safe_push (temp);
3276 return;
3277 }
3278
3279 t = get_ref_base_and_extent (t, &bitpos, &bitsize, &bitmaxsize, &reverse);
3280
3281 /* We can end up here for component references on a
3282 VIEW_CONVERT_EXPR <>(&foobar) or things like a
3283 BIT_FIELD_REF <&MEM[(void *)&b + 4B], ...>. So for
3284 symbolic constants simply give up. */
3285 if (TREE_CODE (t) == ADDR_EXPR)
3286 {
3287 constraint_expr result;
3288 result.type = SCALAR;
3289 result.var = anything_id;
3290 result.offset = 0;
3291 results->safe_push (result);
3292 return;
3293 }
3294
3295 /* Avoid creating pointer-offset constraints, so handle MEM_REF
3296 offsets directly. Pretend to take the address of the base,
3297 we'll take care of adding the required subset of sub-fields below. */
3298 if (TREE_CODE (t) == MEM_REF
3299 && !integer_zerop (TREE_OPERAND (t, 0)))
3300 {
3301 poly_offset_int off = mem_ref_offset (t);
3302 off <<= LOG2_BITS_PER_UNIT;
3303 off += bitpos;
3304 poly_int64 off_hwi;
3305 if (off.to_shwi (&off_hwi))
3306 bitpos = off_hwi;
3307 else
3308 {
3309 bitpos = 0;
3310 bitmaxsize = -1;
3311 }
3312 get_constraint_for_1 (TREE_OPERAND (t, 0), results, false, lhs_p);
3313 do_deref (results);
3314 }
3315 else
3316 get_constraint_for_1 (t, results, true, lhs_p);
3317
3318 /* Strip off nothing_id. */
3319 if (results->length () == 2)
3320 {
3321 gcc_assert ((*results)[0].var == nothing_id);
3322 results->unordered_remove (0);
3323 }
3324 gcc_assert (results->length () == 1);
3325 struct constraint_expr &result = results->last ();
3326
3327 if (result.type == SCALAR
3328 && get_varinfo (result.var)->is_full_var)
3329 /* For single-field vars do not bother about the offset. */
3330 result.offset = 0;
3331 else if (result.type == SCALAR)
3332 {
3333 /* In languages like C, you can access one past the end of an
3334 array. You aren't allowed to dereference it, so we can
3335 ignore this constraint. When we handle pointer subtraction,
3336 we may have to do something cute here. */
3337
3338 if (maybe_lt (poly_uint64 (bitpos), get_varinfo (result.var)->fullsize)
3339 && maybe_ne (bitmaxsize, 0))
3340 {
3341 /* It's also not true that the constraint will actually start at the
3342 right offset, it may start in some padding. We only care about
3343 setting the constraint to the first actual field it touches, so
3344 walk to find it. */
3345 struct constraint_expr cexpr = result;
3346 varinfo_t curr;
3347 results->pop ();
3348 cexpr.offset = 0;
3349 for (curr = get_varinfo (cexpr.var); curr; curr = vi_next (curr))
3350 {
3351 if (ranges_maybe_overlap_p (poly_int64 (curr->offset),
3352 curr->size, bitpos, bitmaxsize))
3353 {
3354 cexpr.var = curr->id;
3355 results->safe_push (cexpr);
3356 if (address_p)
3357 break;
3358 }
3359 }
3360 /* If we are going to take the address of this field then
3361 to be able to compute reachability correctly add at least
3362 the last field of the variable. */
3363 if (address_p && results->length () == 0)
3364 {
3365 curr = get_varinfo (cexpr.var);
3366 while (curr->next != 0)
3367 curr = vi_next (curr);
3368 cexpr.var = curr->id;
3369 results->safe_push (cexpr);
3370 }
3371 else if (results->length () == 0)
3372 /* Assert that we found *some* field there. The user couldn't be
3373 accessing *only* padding. */
3374 /* Still the user could access one past the end of an array
3375 embedded in a struct resulting in accessing *only* padding. */
3376 /* Or accessing only padding via type-punning to a type
3377 that has a filed just in padding space. */
3378 {
3379 cexpr.type = SCALAR;
3380 cexpr.var = anything_id;
3381 cexpr.offset = 0;
3382 results->safe_push (cexpr);
3383 }
3384 }
3385 else if (known_eq (bitmaxsize, 0))
3386 {
3387 if (dump_file && (dump_flags & TDF_DETAILS))
3388 fprintf (dump_file, "Access to zero-sized part of variable, "
3389 "ignoring\n");
3390 }
3391 else
3392 if (dump_file && (dump_flags & TDF_DETAILS))
3393 fprintf (dump_file, "Access to past the end of variable, ignoring\n");
3394 }
3395 else if (result.type == DEREF)
3396 {
3397 /* If we do not know exactly where the access goes say so. Note
3398 that only for non-structure accesses we know that we access
3399 at most one subfiled of any variable. */
3400 HOST_WIDE_INT const_bitpos;
3401 if (!bitpos.is_constant (&const_bitpos)
3402 || const_bitpos == -1
3403 || maybe_ne (bitsize, bitmaxsize)
3404 || AGGREGATE_TYPE_P (TREE_TYPE (orig_t))
3405 || result.offset == UNKNOWN_OFFSET)
3406 result.offset = UNKNOWN_OFFSET;
3407 else
3408 result.offset += const_bitpos;
3409 }
3410 else if (result.type == ADDRESSOF)
3411 {
3412 /* We can end up here for component references on constants like
3413 VIEW_CONVERT_EXPR <>({ 0, 1, 2, 3 })[i]. */
3414 result.type = SCALAR;
3415 result.var = anything_id;
3416 result.offset = 0;
3417 }
3418 else
3419 gcc_unreachable ();
3420 }
3421
3422
3423 /* Dereference the constraint expression CONS, and return the result.
3424 DEREF (ADDRESSOF) = SCALAR
3425 DEREF (SCALAR) = DEREF
3426 DEREF (DEREF) = (temp = DEREF1; result = DEREF(temp))
3427 This is needed so that we can handle dereferencing DEREF constraints. */
3428
3429 static void
3430 do_deref (vec<ce_s> *constraints)
3431 {
3432 struct constraint_expr *c;
3433 unsigned int i = 0;
3434
3435 FOR_EACH_VEC_ELT (*constraints, i, c)
3436 {
3437 if (c->type == SCALAR)
3438 c->type = DEREF;
3439 else if (c->type == ADDRESSOF)
3440 c->type = SCALAR;
3441 else if (c->type == DEREF)
3442 {
3443 struct constraint_expr tmplhs;
3444 tmplhs = new_scalar_tmp_constraint_exp ("dereftmp", true);
3445 process_constraint (new_constraint (tmplhs, *c));
3446 c->var = tmplhs.var;
3447 }
3448 else
3449 gcc_unreachable ();
3450 }
3451 }
3452
3453 /* Given a tree T, return the constraint expression for taking the
3454 address of it. */
3455
3456 static void
3457 get_constraint_for_address_of (tree t, vec<ce_s> *results)
3458 {
3459 struct constraint_expr *c;
3460 unsigned int i;
3461
3462 get_constraint_for_1 (t, results, true, true);
3463
3464 FOR_EACH_VEC_ELT (*results, i, c)
3465 {
3466 if (c->type == DEREF)
3467 c->type = SCALAR;
3468 else
3469 c->type = ADDRESSOF;
3470 }
3471 }
3472
3473 /* Given a tree T, return the constraint expression for it. */
3474
3475 static void
3476 get_constraint_for_1 (tree t, vec<ce_s> *results, bool address_p,
3477 bool lhs_p)
3478 {
3479 struct constraint_expr temp;
3480
3481 /* x = integer is all glommed to a single variable, which doesn't
3482 point to anything by itself. That is, of course, unless it is an
3483 integer constant being treated as a pointer, in which case, we
3484 will return that this is really the addressof anything. This
3485 happens below, since it will fall into the default case. The only
3486 case we know something about an integer treated like a pointer is
3487 when it is the NULL pointer, and then we just say it points to
3488 NULL.
3489
3490 Do not do that if -fno-delete-null-pointer-checks though, because
3491 in that case *NULL does not fail, so it _should_ alias *anything.
3492 It is not worth adding a new option or renaming the existing one,
3493 since this case is relatively obscure. */
3494 if ((TREE_CODE (t) == INTEGER_CST
3495 && integer_zerop (t))
3496 /* The only valid CONSTRUCTORs in gimple with pointer typed
3497 elements are zero-initializer. But in IPA mode we also
3498 process global initializers, so verify at least. */
3499 || (TREE_CODE (t) == CONSTRUCTOR
3500 && CONSTRUCTOR_NELTS (t) == 0))
3501 {
3502 if (flag_delete_null_pointer_checks)
3503 temp.var = nothing_id;
3504 else
3505 temp.var = nonlocal_id;
3506 temp.type = ADDRESSOF;
3507 temp.offset = 0;
3508 results->safe_push (temp);
3509 return;
3510 }
3511
3512 /* String constants are read-only, ideally we'd have a CONST_DECL
3513 for those. */
3514 if (TREE_CODE (t) == STRING_CST)
3515 {
3516 temp.var = string_id;
3517 temp.type = SCALAR;
3518 temp.offset = 0;
3519 results->safe_push (temp);
3520 return;
3521 }
3522
3523 switch (TREE_CODE_CLASS (TREE_CODE (t)))
3524 {
3525 case tcc_expression:
3526 {
3527 switch (TREE_CODE (t))
3528 {
3529 case ADDR_EXPR:
3530 get_constraint_for_address_of (TREE_OPERAND (t, 0), results);
3531 return;
3532 default:;
3533 }
3534 break;
3535 }
3536 case tcc_reference:
3537 {
3538 switch (TREE_CODE (t))
3539 {
3540 case MEM_REF:
3541 {
3542 struct constraint_expr cs;
3543 varinfo_t vi, curr;
3544 get_constraint_for_ptr_offset (TREE_OPERAND (t, 0),
3545 TREE_OPERAND (t, 1), results);
3546 do_deref (results);
3547
3548 /* If we are not taking the address then make sure to process
3549 all subvariables we might access. */
3550 if (address_p)
3551 return;
3552
3553 cs = results->last ();
3554 if (cs.type == DEREF
3555 && type_can_have_subvars (TREE_TYPE (t)))
3556 {
3557 /* For dereferences this means we have to defer it
3558 to solving time. */
3559 results->last ().offset = UNKNOWN_OFFSET;
3560 return;
3561 }
3562 if (cs.type != SCALAR)
3563 return;
3564
3565 vi = get_varinfo (cs.var);
3566 curr = vi_next (vi);
3567 if (!vi->is_full_var
3568 && curr)
3569 {
3570 unsigned HOST_WIDE_INT size;
3571 if (tree_fits_uhwi_p (TYPE_SIZE (TREE_TYPE (t))))
3572 size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (t)));
3573 else
3574 size = -1;
3575 for (; curr; curr = vi_next (curr))
3576 {
3577 if (curr->offset - vi->offset < size)
3578 {
3579 cs.var = curr->id;
3580 results->safe_push (cs);
3581 }
3582 else
3583 break;
3584 }
3585 }
3586 return;
3587 }
3588 case ARRAY_REF:
3589 case ARRAY_RANGE_REF:
3590 case COMPONENT_REF:
3591 case IMAGPART_EXPR:
3592 case REALPART_EXPR:
3593 case BIT_FIELD_REF:
3594 get_constraint_for_component_ref (t, results, address_p, lhs_p);
3595 return;
3596 case VIEW_CONVERT_EXPR:
3597 get_constraint_for_1 (TREE_OPERAND (t, 0), results, address_p,
3598 lhs_p);
3599 return;
3600 /* We are missing handling for TARGET_MEM_REF here. */
3601 default:;
3602 }
3603 break;
3604 }
3605 case tcc_exceptional:
3606 {
3607 switch (TREE_CODE (t))
3608 {
3609 case SSA_NAME:
3610 {
3611 get_constraint_for_ssa_var (t, results, address_p);
3612 return;
3613 }
3614 case CONSTRUCTOR:
3615 {
3616 unsigned int i;
3617 tree val;
3618 auto_vec<ce_s> tmp;
3619 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), i, val)
3620 {
3621 struct constraint_expr *rhsp;
3622 unsigned j;
3623 get_constraint_for_1 (val, &tmp, address_p, lhs_p);
3624 FOR_EACH_VEC_ELT (tmp, j, rhsp)
3625 results->safe_push (*rhsp);
3626 tmp.truncate (0);
3627 }
3628 /* We do not know whether the constructor was complete,
3629 so technically we have to add &NOTHING or &ANYTHING
3630 like we do for an empty constructor as well. */
3631 return;
3632 }
3633 default:;
3634 }
3635 break;
3636 }
3637 case tcc_declaration:
3638 {
3639 get_constraint_for_ssa_var (t, results, address_p);
3640 return;
3641 }
3642 case tcc_constant:
3643 {
3644 /* We cannot refer to automatic variables through constants. */
3645 temp.type = ADDRESSOF;
3646 temp.var = nonlocal_id;
3647 temp.offset = 0;
3648 results->safe_push (temp);
3649 return;
3650 }
3651 default:;
3652 }
3653
3654 /* The default fallback is a constraint from anything. */
3655 temp.type = ADDRESSOF;
3656 temp.var = anything_id;
3657 temp.offset = 0;
3658 results->safe_push (temp);
3659 }
3660
3661 /* Given a gimple tree T, return the constraint expression vector for it. */
3662
3663 static void
3664 get_constraint_for (tree t, vec<ce_s> *results)
3665 {
3666 gcc_assert (results->length () == 0);
3667
3668 get_constraint_for_1 (t, results, false, true);
3669 }
3670
3671 /* Given a gimple tree T, return the constraint expression vector for it
3672 to be used as the rhs of a constraint. */
3673
3674 static void
3675 get_constraint_for_rhs (tree t, vec<ce_s> *results)
3676 {
3677 gcc_assert (results->length () == 0);
3678
3679 get_constraint_for_1 (t, results, false, false);
3680 }
3681
3682
3683 /* Efficiently generates constraints from all entries in *RHSC to all
3684 entries in *LHSC. */
3685
3686 static void
3687 process_all_all_constraints (vec<ce_s> lhsc,
3688 vec<ce_s> rhsc)
3689 {
3690 struct constraint_expr *lhsp, *rhsp;
3691 unsigned i, j;
3692
3693 if (lhsc.length () <= 1 || rhsc.length () <= 1)
3694 {
3695 FOR_EACH_VEC_ELT (lhsc, i, lhsp)
3696 FOR_EACH_VEC_ELT (rhsc, j, rhsp)
3697 process_constraint (new_constraint (*lhsp, *rhsp));
3698 }
3699 else
3700 {
3701 struct constraint_expr tmp;
3702 tmp = new_scalar_tmp_constraint_exp ("allalltmp", true);
3703 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
3704 process_constraint (new_constraint (tmp, *rhsp));
3705 FOR_EACH_VEC_ELT (lhsc, i, lhsp)
3706 process_constraint (new_constraint (*lhsp, tmp));
3707 }
3708 }
3709
3710 /* Handle aggregate copies by expanding into copies of the respective
3711 fields of the structures. */
3712
3713 static void
3714 do_structure_copy (tree lhsop, tree rhsop)
3715 {
3716 struct constraint_expr *lhsp, *rhsp;
3717 auto_vec<ce_s> lhsc;
3718 auto_vec<ce_s> rhsc;
3719 unsigned j;
3720
3721 get_constraint_for (lhsop, &lhsc);
3722 get_constraint_for_rhs (rhsop, &rhsc);
3723 lhsp = &lhsc[0];
3724 rhsp = &rhsc[0];
3725 if (lhsp->type == DEREF
3726 || (lhsp->type == ADDRESSOF && lhsp->var == anything_id)
3727 || rhsp->type == DEREF)
3728 {
3729 if (lhsp->type == DEREF)
3730 {
3731 gcc_assert (lhsc.length () == 1);
3732 lhsp->offset = UNKNOWN_OFFSET;
3733 }
3734 if (rhsp->type == DEREF)
3735 {
3736 gcc_assert (rhsc.length () == 1);
3737 rhsp->offset = UNKNOWN_OFFSET;
3738 }
3739 process_all_all_constraints (lhsc, rhsc);
3740 }
3741 else if (lhsp->type == SCALAR
3742 && (rhsp->type == SCALAR
3743 || rhsp->type == ADDRESSOF))
3744 {
3745 HOST_WIDE_INT lhssize, lhsoffset;
3746 HOST_WIDE_INT rhssize, rhsoffset;
3747 bool reverse;
3748 unsigned k = 0;
3749 if (!get_ref_base_and_extent_hwi (lhsop, &lhsoffset, &lhssize, &reverse)
3750 || !get_ref_base_and_extent_hwi (rhsop, &rhsoffset, &rhssize,
3751 &reverse))
3752 {
3753 process_all_all_constraints (lhsc, rhsc);
3754 return;
3755 }
3756 for (j = 0; lhsc.iterate (j, &lhsp);)
3757 {
3758 varinfo_t lhsv, rhsv;
3759 rhsp = &rhsc[k];
3760 lhsv = get_varinfo (lhsp->var);
3761 rhsv = get_varinfo (rhsp->var);
3762 if (lhsv->may_have_pointers
3763 && (lhsv->is_full_var
3764 || rhsv->is_full_var
3765 || ranges_overlap_p (lhsv->offset + rhsoffset, lhsv->size,
3766 rhsv->offset + lhsoffset, rhsv->size)))
3767 process_constraint (new_constraint (*lhsp, *rhsp));
3768 if (!rhsv->is_full_var
3769 && (lhsv->is_full_var
3770 || (lhsv->offset + rhsoffset + lhsv->size
3771 > rhsv->offset + lhsoffset + rhsv->size)))
3772 {
3773 ++k;
3774 if (k >= rhsc.length ())
3775 break;
3776 }
3777 else
3778 ++j;
3779 }
3780 }
3781 else
3782 gcc_unreachable ();
3783 }
3784
3785 /* Create constraints ID = { rhsc }. */
3786
3787 static void
3788 make_constraints_to (unsigned id, vec<ce_s> rhsc)
3789 {
3790 struct constraint_expr *c;
3791 struct constraint_expr includes;
3792 unsigned int j;
3793
3794 includes.var = id;
3795 includes.offset = 0;
3796 includes.type = SCALAR;
3797
3798 FOR_EACH_VEC_ELT (rhsc, j, c)
3799 process_constraint (new_constraint (includes, *c));
3800 }
3801
3802 /* Create a constraint ID = OP. */
3803
3804 static void
3805 make_constraint_to (unsigned id, tree op)
3806 {
3807 auto_vec<ce_s> rhsc;
3808 get_constraint_for_rhs (op, &rhsc);
3809 make_constraints_to (id, rhsc);
3810 }
3811
3812 /* Create a constraint ID = &FROM. */
3813
3814 static void
3815 make_constraint_from (varinfo_t vi, int from)
3816 {
3817 struct constraint_expr lhs, rhs;
3818
3819 lhs.var = vi->id;
3820 lhs.offset = 0;
3821 lhs.type = SCALAR;
3822
3823 rhs.var = from;
3824 rhs.offset = 0;
3825 rhs.type = ADDRESSOF;
3826 process_constraint (new_constraint (lhs, rhs));
3827 }
3828
3829 /* Create a constraint ID = FROM. */
3830
3831 static void
3832 make_copy_constraint (varinfo_t vi, int from)
3833 {
3834 struct constraint_expr lhs, rhs;
3835
3836 lhs.var = vi->id;
3837 lhs.offset = 0;
3838 lhs.type = SCALAR;
3839
3840 rhs.var = from;
3841 rhs.offset = 0;
3842 rhs.type = SCALAR;
3843 process_constraint (new_constraint (lhs, rhs));
3844 }
3845
3846 /* Make constraints necessary to make OP escape. */
3847
3848 static void
3849 make_escape_constraint (tree op)
3850 {
3851 make_constraint_to (escaped_id, op);
3852 }
3853
3854 /* Add constraints to that the solution of VI is transitively closed. */
3855
3856 static void
3857 make_transitive_closure_constraints (varinfo_t vi)
3858 {
3859 struct constraint_expr lhs, rhs;
3860
3861 /* VAR = *(VAR + UNKNOWN); */
3862 lhs.type = SCALAR;
3863 lhs.var = vi->id;
3864 lhs.offset = 0;
3865 rhs.type = DEREF;
3866 rhs.var = vi->id;
3867 rhs.offset = UNKNOWN_OFFSET;
3868 process_constraint (new_constraint (lhs, rhs));
3869 }
3870
3871 /* Add constraints to that the solution of VI has all subvariables added. */
3872
3873 static void
3874 make_any_offset_constraints (varinfo_t vi)
3875 {
3876 struct constraint_expr lhs, rhs;
3877
3878 /* VAR = VAR + UNKNOWN; */
3879 lhs.type = SCALAR;
3880 lhs.var = vi->id;
3881 lhs.offset = 0;
3882 rhs.type = SCALAR;
3883 rhs.var = vi->id;
3884 rhs.offset = UNKNOWN_OFFSET;
3885 process_constraint (new_constraint (lhs, rhs));
3886 }
3887
3888 /* Temporary storage for fake var decls. */
3889 struct obstack fake_var_decl_obstack;
3890
3891 /* Build a fake VAR_DECL acting as referrer to a DECL_UID. */
3892
3893 static tree
3894 build_fake_var_decl (tree type)
3895 {
3896 tree decl = (tree) XOBNEW (&fake_var_decl_obstack, struct tree_var_decl);
3897 memset (decl, 0, sizeof (struct tree_var_decl));
3898 TREE_SET_CODE (decl, VAR_DECL);
3899 TREE_TYPE (decl) = type;
3900 DECL_UID (decl) = allocate_decl_uid ();
3901 SET_DECL_PT_UID (decl, -1);
3902 layout_decl (decl, 0);
3903 return decl;
3904 }
3905
3906 /* Create a new artificial heap variable with NAME.
3907 Return the created variable. */
3908
3909 static varinfo_t
3910 make_heapvar (const char *name, bool add_id)
3911 {
3912 varinfo_t vi;
3913 tree heapvar;
3914
3915 heapvar = build_fake_var_decl (ptr_type_node);
3916 DECL_EXTERNAL (heapvar) = 1;
3917
3918 vi = new_var_info (heapvar, name, add_id);
3919 vi->is_heap_var = true;
3920 vi->is_unknown_size_var = true;
3921 vi->offset = 0;
3922 vi->fullsize = ~0;
3923 vi->size = ~0;
3924 vi->is_full_var = true;
3925 insert_vi_for_tree (heapvar, vi);
3926
3927 return vi;
3928 }
3929
3930 /* Create a new artificial heap variable with NAME and make a
3931 constraint from it to LHS. Set flags according to a tag used
3932 for tracking restrict pointers. */
3933
3934 static varinfo_t
3935 make_constraint_from_restrict (varinfo_t lhs, const char *name, bool add_id)
3936 {
3937 varinfo_t vi = make_heapvar (name, add_id);
3938 vi->is_restrict_var = 1;
3939 vi->is_global_var = 1;
3940 vi->may_have_pointers = 1;
3941 make_constraint_from (lhs, vi->id);
3942 return vi;
3943 }
3944
3945 /* Create a new artificial heap variable with NAME and make a
3946 constraint from it to LHS. Set flags according to a tag used
3947 for tracking restrict pointers and make the artificial heap
3948 point to global memory. */
3949
3950 static varinfo_t
3951 make_constraint_from_global_restrict (varinfo_t lhs, const char *name,
3952 bool add_id)
3953 {
3954 varinfo_t vi = make_constraint_from_restrict (lhs, name, add_id);
3955 make_copy_constraint (vi, nonlocal_id);
3956 return vi;
3957 }
3958
3959 /* In IPA mode there are varinfos for different aspects of reach
3960 function designator. One for the points-to set of the return
3961 value, one for the variables that are clobbered by the function,
3962 one for its uses and one for each parameter (including a single
3963 glob for remaining variadic arguments). */
3964
3965 enum { fi_clobbers = 1, fi_uses = 2,
3966 fi_static_chain = 3, fi_result = 4, fi_parm_base = 5 };
3967
3968 /* Get a constraint for the requested part of a function designator FI
3969 when operating in IPA mode. */
3970
3971 static struct constraint_expr
3972 get_function_part_constraint (varinfo_t fi, unsigned part)
3973 {
3974 struct constraint_expr c;
3975
3976 gcc_assert (in_ipa_mode);
3977
3978 if (fi->id == anything_id)
3979 {
3980 /* ??? We probably should have a ANYFN special variable. */
3981 c.var = anything_id;
3982 c.offset = 0;
3983 c.type = SCALAR;
3984 }
3985 else if (fi->decl && TREE_CODE (fi->decl) == FUNCTION_DECL)
3986 {
3987 varinfo_t ai = first_vi_for_offset (fi, part);
3988 if (ai)
3989 c.var = ai->id;
3990 else
3991 c.var = anything_id;
3992 c.offset = 0;
3993 c.type = SCALAR;
3994 }
3995 else
3996 {
3997 c.var = fi->id;
3998 c.offset = part;
3999 c.type = DEREF;
4000 }
4001
4002 return c;
4003 }
4004
4005 /* For non-IPA mode, generate constraints necessary for a call on the
4006 RHS. */
4007
4008 static void
4009 handle_rhs_call (gcall *stmt, vec<ce_s> *results)
4010 {
4011 struct constraint_expr rhsc;
4012 unsigned i;
4013 bool returns_uses = false;
4014
4015 for (i = 0; i < gimple_call_num_args (stmt); ++i)
4016 {
4017 tree arg = gimple_call_arg (stmt, i);
4018 int flags = gimple_call_arg_flags (stmt, i);
4019
4020 /* If the argument is not used we can ignore it. */
4021 if (flags & EAF_UNUSED)
4022 continue;
4023
4024 /* As we compute ESCAPED context-insensitive we do not gain
4025 any precision with just EAF_NOCLOBBER but not EAF_NOESCAPE
4026 set. The argument would still get clobbered through the
4027 escape solution. */
4028 if ((flags & EAF_NOCLOBBER)
4029 && (flags & EAF_NOESCAPE))
4030 {
4031 varinfo_t uses = get_call_use_vi (stmt);
4032 varinfo_t tem = new_var_info (NULL_TREE, "callarg", true);
4033 tem->is_reg_var = true;
4034 make_constraint_to (tem->id, arg);
4035 make_any_offset_constraints (tem);
4036 if (!(flags & EAF_DIRECT))
4037 make_transitive_closure_constraints (tem);
4038 make_copy_constraint (uses, tem->id);
4039 returns_uses = true;
4040 }
4041 else if (flags & EAF_NOESCAPE)
4042 {
4043 struct constraint_expr lhs, rhs;
4044 varinfo_t uses = get_call_use_vi (stmt);
4045 varinfo_t clobbers = get_call_clobber_vi (stmt);
4046 varinfo_t tem = new_var_info (NULL_TREE, "callarg", true);
4047 tem->is_reg_var = true;
4048 make_constraint_to (tem->id, arg);
4049 make_any_offset_constraints (tem);
4050 if (!(flags & EAF_DIRECT))
4051 make_transitive_closure_constraints (tem);
4052 make_copy_constraint (uses, tem->id);
4053 make_copy_constraint (clobbers, tem->id);
4054 /* Add *tem = nonlocal, do not add *tem = callused as
4055 EAF_NOESCAPE parameters do not escape to other parameters
4056 and all other uses appear in NONLOCAL as well. */
4057 lhs.type = DEREF;
4058 lhs.var = tem->id;
4059 lhs.offset = 0;
4060 rhs.type = SCALAR;
4061 rhs.var = nonlocal_id;
4062 rhs.offset = 0;
4063 process_constraint (new_constraint (lhs, rhs));
4064 returns_uses = true;
4065 }
4066 else
4067 make_escape_constraint (arg);
4068 }
4069
4070 /* If we added to the calls uses solution make sure we account for
4071 pointers to it to be returned. */
4072 if (returns_uses)
4073 {
4074 rhsc.var = get_call_use_vi (stmt)->id;
4075 rhsc.offset = UNKNOWN_OFFSET;
4076 rhsc.type = SCALAR;
4077 results->safe_push (rhsc);
4078 }
4079
4080 /* The static chain escapes as well. */
4081 if (gimple_call_chain (stmt))
4082 make_escape_constraint (gimple_call_chain (stmt));
4083
4084 /* And if we applied NRV the address of the return slot escapes as well. */
4085 if (gimple_call_return_slot_opt_p (stmt)
4086 && gimple_call_lhs (stmt) != NULL_TREE
4087 && TREE_ADDRESSABLE (TREE_TYPE (gimple_call_lhs (stmt))))
4088 {
4089 auto_vec<ce_s> tmpc;
4090 struct constraint_expr lhsc, *c;
4091 get_constraint_for_address_of (gimple_call_lhs (stmt), &tmpc);
4092 lhsc.var = escaped_id;
4093 lhsc.offset = 0;
4094 lhsc.type = SCALAR;
4095 FOR_EACH_VEC_ELT (tmpc, i, c)
4096 process_constraint (new_constraint (lhsc, *c));
4097 }
4098
4099 /* Regular functions return nonlocal memory. */
4100 rhsc.var = nonlocal_id;
4101 rhsc.offset = 0;
4102 rhsc.type = SCALAR;
4103 results->safe_push (rhsc);
4104 }
4105
4106 /* For non-IPA mode, generate constraints necessary for a call
4107 that returns a pointer and assigns it to LHS. This simply makes
4108 the LHS point to global and escaped variables. */
4109
4110 static void
4111 handle_lhs_call (gcall *stmt, tree lhs, int flags, vec<ce_s> rhsc,
4112 tree fndecl)
4113 {
4114 auto_vec<ce_s> lhsc;
4115
4116 get_constraint_for (lhs, &lhsc);
4117 /* If the store is to a global decl make sure to
4118 add proper escape constraints. */
4119 lhs = get_base_address (lhs);
4120 if (lhs
4121 && DECL_P (lhs)
4122 && is_global_var (lhs))
4123 {
4124 struct constraint_expr tmpc;
4125 tmpc.var = escaped_id;
4126 tmpc.offset = 0;
4127 tmpc.type = SCALAR;
4128 lhsc.safe_push (tmpc);
4129 }
4130
4131 /* If the call returns an argument unmodified override the rhs
4132 constraints. */
4133 if (flags & ERF_RETURNS_ARG
4134 && (flags & ERF_RETURN_ARG_MASK) < gimple_call_num_args (stmt))
4135 {
4136 tree arg;
4137 rhsc.create (0);
4138 arg = gimple_call_arg (stmt, flags & ERF_RETURN_ARG_MASK);
4139 get_constraint_for (arg, &rhsc);
4140 process_all_all_constraints (lhsc, rhsc);
4141 rhsc.release ();
4142 }
4143 else if (flags & ERF_NOALIAS)
4144 {
4145 varinfo_t vi;
4146 struct constraint_expr tmpc;
4147 rhsc.create (0);
4148 vi = make_heapvar ("HEAP", true);
4149 /* We are marking allocated storage local, we deal with it becoming
4150 global by escaping and setting of vars_contains_escaped_heap. */
4151 DECL_EXTERNAL (vi->decl) = 0;
4152 vi->is_global_var = 0;
4153 /* If this is not a real malloc call assume the memory was
4154 initialized and thus may point to global memory. All
4155 builtin functions with the malloc attribute behave in a sane way. */
4156 if (!fndecl
4157 || !fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
4158 make_constraint_from (vi, nonlocal_id);
4159 tmpc.var = vi->id;
4160 tmpc.offset = 0;
4161 tmpc.type = ADDRESSOF;
4162 rhsc.safe_push (tmpc);
4163 process_all_all_constraints (lhsc, rhsc);
4164 rhsc.release ();
4165 }
4166 else
4167 process_all_all_constraints (lhsc, rhsc);
4168 }
4169
4170 /* For non-IPA mode, generate constraints necessary for a call of a
4171 const function that returns a pointer in the statement STMT. */
4172
4173 static void
4174 handle_const_call (gcall *stmt, vec<ce_s> *results)
4175 {
4176 struct constraint_expr rhsc;
4177 unsigned int k;
4178 bool need_uses = false;
4179
4180 /* Treat nested const functions the same as pure functions as far
4181 as the static chain is concerned. */
4182 if (gimple_call_chain (stmt))
4183 {
4184 varinfo_t uses = get_call_use_vi (stmt);
4185 make_constraint_to (uses->id, gimple_call_chain (stmt));
4186 need_uses = true;
4187 }
4188
4189 /* And if we applied NRV the address of the return slot escapes as well. */
4190 if (gimple_call_return_slot_opt_p (stmt)
4191 && gimple_call_lhs (stmt) != NULL_TREE
4192 && TREE_ADDRESSABLE (TREE_TYPE (gimple_call_lhs (stmt))))
4193 {
4194 varinfo_t uses = get_call_use_vi (stmt);
4195 auto_vec<ce_s> tmpc;
4196 get_constraint_for_address_of (gimple_call_lhs (stmt), &tmpc);
4197 make_constraints_to (uses->id, tmpc);
4198 need_uses = true;
4199 }
4200
4201 if (need_uses)
4202 {
4203 varinfo_t uses = get_call_use_vi (stmt);
4204 make_any_offset_constraints (uses);
4205 make_transitive_closure_constraints (uses);
4206 rhsc.var = uses->id;
4207 rhsc.offset = 0;
4208 rhsc.type = SCALAR;
4209 results->safe_push (rhsc);
4210 }
4211
4212 /* May return offsetted arguments. */
4213 varinfo_t tem = NULL;
4214 if (gimple_call_num_args (stmt) != 0)
4215 {
4216 tem = new_var_info (NULL_TREE, "callarg", true);
4217 tem->is_reg_var = true;
4218 }
4219 for (k = 0; k < gimple_call_num_args (stmt); ++k)
4220 {
4221 tree arg = gimple_call_arg (stmt, k);
4222 auto_vec<ce_s> argc;
4223 get_constraint_for_rhs (arg, &argc);
4224 make_constraints_to (tem->id, argc);
4225 }
4226 if (tem)
4227 {
4228 ce_s ce;
4229 ce.type = SCALAR;
4230 ce.var = tem->id;
4231 ce.offset = UNKNOWN_OFFSET;
4232 results->safe_push (ce);
4233 }
4234
4235 /* May return addresses of globals. */
4236 rhsc.var = nonlocal_id;
4237 rhsc.offset = 0;
4238 rhsc.type = ADDRESSOF;
4239 results->safe_push (rhsc);
4240 }
4241
4242 /* For non-IPA mode, generate constraints necessary for a call to a
4243 pure function in statement STMT. */
4244
4245 static void
4246 handle_pure_call (gcall *stmt, vec<ce_s> *results)
4247 {
4248 struct constraint_expr rhsc;
4249 unsigned i;
4250 varinfo_t uses = NULL;
4251
4252 /* Memory reached from pointer arguments is call-used. */
4253 for (i = 0; i < gimple_call_num_args (stmt); ++i)
4254 {
4255 tree arg = gimple_call_arg (stmt, i);
4256 if (!uses)
4257 {
4258 uses = get_call_use_vi (stmt);
4259 make_any_offset_constraints (uses);
4260 make_transitive_closure_constraints (uses);
4261 }
4262 make_constraint_to (uses->id, arg);
4263 }
4264
4265 /* The static chain is used as well. */
4266 if (gimple_call_chain (stmt))
4267 {
4268 if (!uses)
4269 {
4270 uses = get_call_use_vi (stmt);
4271 make_any_offset_constraints (uses);
4272 make_transitive_closure_constraints (uses);
4273 }
4274 make_constraint_to (uses->id, gimple_call_chain (stmt));
4275 }
4276
4277 /* And if we applied NRV the address of the return slot. */
4278 if (gimple_call_return_slot_opt_p (stmt)
4279 && gimple_call_lhs (stmt) != NULL_TREE
4280 && TREE_ADDRESSABLE (TREE_TYPE (gimple_call_lhs (stmt))))
4281 {
4282 if (!uses)
4283 {
4284 uses = get_call_use_vi (stmt);
4285 make_any_offset_constraints (uses);
4286 make_transitive_closure_constraints (uses);
4287 }
4288 auto_vec<ce_s> tmpc;
4289 get_constraint_for_address_of (gimple_call_lhs (stmt), &tmpc);
4290 make_constraints_to (uses->id, tmpc);
4291 }
4292
4293 /* Pure functions may return call-used and nonlocal memory. */
4294 if (uses)
4295 {
4296 rhsc.var = uses->id;
4297 rhsc.offset = 0;
4298 rhsc.type = SCALAR;
4299 results->safe_push (rhsc);
4300 }
4301 rhsc.var = nonlocal_id;
4302 rhsc.offset = 0;
4303 rhsc.type = SCALAR;
4304 results->safe_push (rhsc);
4305 }
4306
4307
4308 /* Return the varinfo for the callee of CALL. */
4309
4310 static varinfo_t
4311 get_fi_for_callee (gcall *call)
4312 {
4313 tree decl, fn = gimple_call_fn (call);
4314
4315 if (fn && TREE_CODE (fn) == OBJ_TYPE_REF)
4316 fn = OBJ_TYPE_REF_EXPR (fn);
4317
4318 /* If we can directly resolve the function being called, do so.
4319 Otherwise, it must be some sort of indirect expression that
4320 we should still be able to handle. */
4321 decl = gimple_call_addr_fndecl (fn);
4322 if (decl)
4323 return get_vi_for_tree (decl);
4324
4325 /* If the function is anything other than a SSA name pointer we have no
4326 clue and should be getting ANYFN (well, ANYTHING for now). */
4327 if (!fn || TREE_CODE (fn) != SSA_NAME)
4328 return get_varinfo (anything_id);
4329
4330 if (SSA_NAME_IS_DEFAULT_DEF (fn)
4331 && (TREE_CODE (SSA_NAME_VAR (fn)) == PARM_DECL
4332 || TREE_CODE (SSA_NAME_VAR (fn)) == RESULT_DECL))
4333 fn = SSA_NAME_VAR (fn);
4334
4335 return get_vi_for_tree (fn);
4336 }
4337
4338 /* Create constraints for assigning call argument ARG to the incoming parameter
4339 INDEX of function FI. */
4340
4341 static void
4342 find_func_aliases_for_call_arg (varinfo_t fi, unsigned index, tree arg)
4343 {
4344 struct constraint_expr lhs;
4345 lhs = get_function_part_constraint (fi, fi_parm_base + index);
4346
4347 auto_vec<ce_s, 2> rhsc;
4348 get_constraint_for_rhs (arg, &rhsc);
4349
4350 unsigned j;
4351 struct constraint_expr *rhsp;
4352 FOR_EACH_VEC_ELT (rhsc, j, rhsp)
4353 process_constraint (new_constraint (lhs, *rhsp));
4354 }
4355
4356 /* Return true if FNDECL may be part of another lto partition. */
4357
4358 static bool
4359 fndecl_maybe_in_other_partition (tree fndecl)
4360 {
4361 cgraph_node *fn_node = cgraph_node::get (fndecl);
4362 if (fn_node == NULL)
4363 return true;
4364
4365 return fn_node->in_other_partition;
4366 }
4367
4368 /* Create constraints for the builtin call T. Return true if the call
4369 was handled, otherwise false. */
4370
4371 static bool
4372 find_func_aliases_for_builtin_call (struct function *fn, gcall *t)
4373 {
4374 tree fndecl = gimple_call_fndecl (t);
4375 auto_vec<ce_s, 2> lhsc;
4376 auto_vec<ce_s, 4> rhsc;
4377 varinfo_t fi;
4378
4379 if (gimple_call_builtin_p (t, BUILT_IN_NORMAL))
4380 /* ??? All builtins that are handled here need to be handled
4381 in the alias-oracle query functions explicitly! */
4382 switch (DECL_FUNCTION_CODE (fndecl))
4383 {
4384 /* All the following functions return a pointer to the same object
4385 as their first argument points to. The functions do not add
4386 to the ESCAPED solution. The functions make the first argument
4387 pointed to memory point to what the second argument pointed to
4388 memory points to. */
4389 case BUILT_IN_STRCPY:
4390 case BUILT_IN_STRNCPY:
4391 case BUILT_IN_BCOPY:
4392 case BUILT_IN_MEMCPY:
4393 case BUILT_IN_MEMMOVE:
4394 case BUILT_IN_MEMPCPY:
4395 case BUILT_IN_STPCPY:
4396 case BUILT_IN_STPNCPY:
4397 case BUILT_IN_STRCAT:
4398 case BUILT_IN_STRNCAT:
4399 case BUILT_IN_STRCPY_CHK:
4400 case BUILT_IN_STRNCPY_CHK:
4401 case BUILT_IN_MEMCPY_CHK:
4402 case BUILT_IN_MEMMOVE_CHK:
4403 case BUILT_IN_MEMPCPY_CHK:
4404 case BUILT_IN_STPCPY_CHK:
4405 case BUILT_IN_STPNCPY_CHK:
4406 case BUILT_IN_STRCAT_CHK:
4407 case BUILT_IN_STRNCAT_CHK:
4408 case BUILT_IN_TM_MEMCPY:
4409 case BUILT_IN_TM_MEMMOVE:
4410 {
4411 tree res = gimple_call_lhs (t);
4412 tree dest = gimple_call_arg (t, (DECL_FUNCTION_CODE (fndecl)
4413 == BUILT_IN_BCOPY ? 1 : 0));
4414 tree src = gimple_call_arg (t, (DECL_FUNCTION_CODE (fndecl)
4415 == BUILT_IN_BCOPY ? 0 : 1));
4416 if (res != NULL_TREE)
4417 {
4418 get_constraint_for (res, &lhsc);
4419 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_MEMPCPY
4420 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STPCPY
4421 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STPNCPY
4422 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_MEMPCPY_CHK
4423 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STPCPY_CHK
4424 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STPNCPY_CHK)
4425 get_constraint_for_ptr_offset (dest, NULL_TREE, &rhsc);
4426 else
4427 get_constraint_for (dest, &rhsc);
4428 process_all_all_constraints (lhsc, rhsc);
4429 lhsc.truncate (0);
4430 rhsc.truncate (0);
4431 }
4432 get_constraint_for_ptr_offset (dest, NULL_TREE, &lhsc);
4433 get_constraint_for_ptr_offset (src, NULL_TREE, &rhsc);
4434 do_deref (&lhsc);
4435 do_deref (&rhsc);
4436 process_all_all_constraints (lhsc, rhsc);
4437 return true;
4438 }
4439 case BUILT_IN_MEMSET:
4440 case BUILT_IN_MEMSET_CHK:
4441 case BUILT_IN_TM_MEMSET:
4442 {
4443 tree res = gimple_call_lhs (t);
4444 tree dest = gimple_call_arg (t, 0);
4445 unsigned i;
4446 ce_s *lhsp;
4447 struct constraint_expr ac;
4448 if (res != NULL_TREE)
4449 {
4450 get_constraint_for (res, &lhsc);
4451 get_constraint_for (dest, &rhsc);
4452 process_all_all_constraints (lhsc, rhsc);
4453 lhsc.truncate (0);
4454 }
4455 get_constraint_for_ptr_offset (dest, NULL_TREE, &lhsc);
4456 do_deref (&lhsc);
4457 if (flag_delete_null_pointer_checks
4458 && integer_zerop (gimple_call_arg (t, 1)))
4459 {
4460 ac.type = ADDRESSOF;
4461 ac.var = nothing_id;
4462 }
4463 else
4464 {
4465 ac.type = SCALAR;
4466 ac.var = integer_id;
4467 }
4468 ac.offset = 0;
4469 FOR_EACH_VEC_ELT (lhsc, i, lhsp)
4470 process_constraint (new_constraint (*lhsp, ac));
4471 return true;
4472 }
4473 case BUILT_IN_STACK_SAVE:
4474 case BUILT_IN_STACK_RESTORE:
4475 /* Nothing interesting happens. */
4476 return true;
4477 case BUILT_IN_ALLOCA:
4478 case BUILT_IN_ALLOCA_WITH_ALIGN:
4479 case BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX:
4480 {
4481 tree ptr = gimple_call_lhs (t);
4482 if (ptr == NULL_TREE)
4483 return true;
4484 get_constraint_for (ptr, &lhsc);
4485 varinfo_t vi = make_heapvar ("HEAP", true);
4486 /* Alloca storage is never global. To exempt it from escaped
4487 handling make it a non-heap var. */
4488 DECL_EXTERNAL (vi->decl) = 0;
4489 vi->is_global_var = 0;
4490 vi->is_heap_var = 0;
4491 struct constraint_expr tmpc;
4492 tmpc.var = vi->id;
4493 tmpc.offset = 0;
4494 tmpc.type = ADDRESSOF;
4495 rhsc.safe_push (tmpc);
4496 process_all_all_constraints (lhsc, rhsc);
4497 return true;
4498 }
4499 case BUILT_IN_POSIX_MEMALIGN:
4500 {
4501 tree ptrptr = gimple_call_arg (t, 0);
4502 get_constraint_for (ptrptr, &lhsc);
4503 do_deref (&lhsc);
4504 varinfo_t vi = make_heapvar ("HEAP", true);
4505 /* We are marking allocated storage local, we deal with it becoming
4506 global by escaping and setting of vars_contains_escaped_heap. */
4507 DECL_EXTERNAL (vi->decl) = 0;
4508 vi->is_global_var = 0;
4509 struct constraint_expr tmpc;
4510 tmpc.var = vi->id;
4511 tmpc.offset = 0;
4512 tmpc.type = ADDRESSOF;
4513 rhsc.safe_push (tmpc);
4514 process_all_all_constraints (lhsc, rhsc);
4515 return true;
4516 }
4517 case BUILT_IN_ASSUME_ALIGNED:
4518 {
4519 tree res = gimple_call_lhs (t);
4520 tree dest = gimple_call_arg (t, 0);
4521 if (res != NULL_TREE)
4522 {
4523 get_constraint_for (res, &lhsc);
4524 get_constraint_for (dest, &rhsc);
4525 process_all_all_constraints (lhsc, rhsc);
4526 }
4527 return true;
4528 }
4529 /* All the following functions do not return pointers, do not
4530 modify the points-to sets of memory reachable from their
4531 arguments and do not add to the ESCAPED solution. */
4532 case BUILT_IN_SINCOS:
4533 case BUILT_IN_SINCOSF:
4534 case BUILT_IN_SINCOSL:
4535 case BUILT_IN_FREXP:
4536 case BUILT_IN_FREXPF:
4537 case BUILT_IN_FREXPL:
4538 case BUILT_IN_GAMMA_R:
4539 case BUILT_IN_GAMMAF_R:
4540 case BUILT_IN_GAMMAL_R:
4541 case BUILT_IN_LGAMMA_R:
4542 case BUILT_IN_LGAMMAF_R:
4543 case BUILT_IN_LGAMMAL_R:
4544 case BUILT_IN_MODF:
4545 case BUILT_IN_MODFF:
4546 case BUILT_IN_MODFL:
4547 case BUILT_IN_REMQUO:
4548 case BUILT_IN_REMQUOF:
4549 case BUILT_IN_REMQUOL:
4550 case BUILT_IN_FREE:
4551 return true;
4552 case BUILT_IN_STRDUP:
4553 case BUILT_IN_STRNDUP:
4554 case BUILT_IN_REALLOC:
4555 if (gimple_call_lhs (t))
4556 {
4557 handle_lhs_call (t, gimple_call_lhs (t),
4558 gimple_call_return_flags (t) | ERF_NOALIAS,
4559 vNULL, fndecl);
4560 get_constraint_for_ptr_offset (gimple_call_lhs (t),
4561 NULL_TREE, &lhsc);
4562 get_constraint_for_ptr_offset (gimple_call_arg (t, 0),
4563 NULL_TREE, &rhsc);
4564 do_deref (&lhsc);
4565 do_deref (&rhsc);
4566 process_all_all_constraints (lhsc, rhsc);
4567 lhsc.truncate (0);
4568 rhsc.truncate (0);
4569 /* For realloc the resulting pointer can be equal to the
4570 argument as well. But only doing this wouldn't be
4571 correct because with ptr == 0 realloc behaves like malloc. */
4572 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_REALLOC)
4573 {
4574 get_constraint_for (gimple_call_lhs (t), &lhsc);
4575 get_constraint_for (gimple_call_arg (t, 0), &rhsc);
4576 process_all_all_constraints (lhsc, rhsc);
4577 }
4578 return true;
4579 }
4580 break;
4581 /* String / character search functions return a pointer into the
4582 source string or NULL. */
4583 case BUILT_IN_INDEX:
4584 case BUILT_IN_STRCHR:
4585 case BUILT_IN_STRRCHR:
4586 case BUILT_IN_MEMCHR:
4587 case BUILT_IN_STRSTR:
4588 case BUILT_IN_STRPBRK:
4589 if (gimple_call_lhs (t))
4590 {
4591 tree src = gimple_call_arg (t, 0);
4592 get_constraint_for_ptr_offset (src, NULL_TREE, &rhsc);
4593 constraint_expr nul;
4594 nul.var = nothing_id;
4595 nul.offset = 0;
4596 nul.type = ADDRESSOF;
4597 rhsc.safe_push (nul);
4598 get_constraint_for (gimple_call_lhs (t), &lhsc);
4599 process_all_all_constraints (lhsc, rhsc);
4600 }
4601 return true;
4602 /* Pure functions that return something not based on any object and
4603 that use the memory pointed to by their arguments (but not
4604 transitively). */
4605 case BUILT_IN_STRCMP:
4606 case BUILT_IN_STRCMP_EQ:
4607 case BUILT_IN_STRNCMP:
4608 case BUILT_IN_STRNCMP_EQ:
4609 case BUILT_IN_STRCASECMP:
4610 case BUILT_IN_STRNCASECMP:
4611 case BUILT_IN_MEMCMP:
4612 case BUILT_IN_BCMP:
4613 case BUILT_IN_STRSPN:
4614 case BUILT_IN_STRCSPN:
4615 {
4616 varinfo_t uses = get_call_use_vi (t);
4617 make_any_offset_constraints (uses);
4618 make_constraint_to (uses->id, gimple_call_arg (t, 0));
4619 make_constraint_to (uses->id, gimple_call_arg (t, 1));
4620 /* No constraints are necessary for the return value. */
4621 return true;
4622 }
4623 case BUILT_IN_STRLEN:
4624 {
4625 varinfo_t uses = get_call_use_vi (t);
4626 make_any_offset_constraints (uses);
4627 make_constraint_to (uses->id, gimple_call_arg (t, 0));
4628 /* No constraints are necessary for the return value. */
4629 return true;
4630 }
4631 case BUILT_IN_OBJECT_SIZE:
4632 case BUILT_IN_CONSTANT_P:
4633 {
4634 /* No constraints are necessary for the return value or the
4635 arguments. */
4636 return true;
4637 }
4638 /* Trampolines are special - they set up passing the static
4639 frame. */
4640 case BUILT_IN_INIT_TRAMPOLINE:
4641 {
4642 tree tramp = gimple_call_arg (t, 0);
4643 tree nfunc = gimple_call_arg (t, 1);
4644 tree frame = gimple_call_arg (t, 2);
4645 unsigned i;
4646 struct constraint_expr lhs, *rhsp;
4647 if (in_ipa_mode)
4648 {
4649 varinfo_t nfi = NULL;
4650 gcc_assert (TREE_CODE (nfunc) == ADDR_EXPR);
4651 nfi = lookup_vi_for_tree (TREE_OPERAND (nfunc, 0));
4652 if (nfi)
4653 {
4654 lhs = get_function_part_constraint (nfi, fi_static_chain);
4655 get_constraint_for (frame, &rhsc);
4656 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
4657 process_constraint (new_constraint (lhs, *rhsp));
4658 rhsc.truncate (0);
4659
4660 /* Make the frame point to the function for
4661 the trampoline adjustment call. */
4662 get_constraint_for (tramp, &lhsc);
4663 do_deref (&lhsc);
4664 get_constraint_for (nfunc, &rhsc);
4665 process_all_all_constraints (lhsc, rhsc);
4666
4667 return true;
4668 }
4669 }
4670 /* Else fallthru to generic handling which will let
4671 the frame escape. */
4672 break;
4673 }
4674 case BUILT_IN_ADJUST_TRAMPOLINE:
4675 {
4676 tree tramp = gimple_call_arg (t, 0);
4677 tree res = gimple_call_lhs (t);
4678 if (in_ipa_mode && res)
4679 {
4680 get_constraint_for (res, &lhsc);
4681 get_constraint_for (tramp, &rhsc);
4682 do_deref (&rhsc);
4683 process_all_all_constraints (lhsc, rhsc);
4684 }
4685 return true;
4686 }
4687 CASE_BUILT_IN_TM_STORE (1):
4688 CASE_BUILT_IN_TM_STORE (2):
4689 CASE_BUILT_IN_TM_STORE (4):
4690 CASE_BUILT_IN_TM_STORE (8):
4691 CASE_BUILT_IN_TM_STORE (FLOAT):
4692 CASE_BUILT_IN_TM_STORE (DOUBLE):
4693 CASE_BUILT_IN_TM_STORE (LDOUBLE):
4694 CASE_BUILT_IN_TM_STORE (M64):
4695 CASE_BUILT_IN_TM_STORE (M128):
4696 CASE_BUILT_IN_TM_STORE (M256):
4697 {
4698 tree addr = gimple_call_arg (t, 0);
4699 tree src = gimple_call_arg (t, 1);
4700
4701 get_constraint_for (addr, &lhsc);
4702 do_deref (&lhsc);
4703 get_constraint_for (src, &rhsc);
4704 process_all_all_constraints (lhsc, rhsc);
4705 return true;
4706 }
4707 CASE_BUILT_IN_TM_LOAD (1):
4708 CASE_BUILT_IN_TM_LOAD (2):
4709 CASE_BUILT_IN_TM_LOAD (4):
4710 CASE_BUILT_IN_TM_LOAD (8):
4711 CASE_BUILT_IN_TM_LOAD (FLOAT):
4712 CASE_BUILT_IN_TM_LOAD (DOUBLE):
4713 CASE_BUILT_IN_TM_LOAD (LDOUBLE):
4714 CASE_BUILT_IN_TM_LOAD (M64):
4715 CASE_BUILT_IN_TM_LOAD (M128):
4716 CASE_BUILT_IN_TM_LOAD (M256):
4717 {
4718 tree dest = gimple_call_lhs (t);
4719 tree addr = gimple_call_arg (t, 0);
4720
4721 get_constraint_for (dest, &lhsc);
4722 get_constraint_for (addr, &rhsc);
4723 do_deref (&rhsc);
4724 process_all_all_constraints (lhsc, rhsc);
4725 return true;
4726 }
4727 /* Variadic argument handling needs to be handled in IPA
4728 mode as well. */
4729 case BUILT_IN_VA_START:
4730 {
4731 tree valist = gimple_call_arg (t, 0);
4732 struct constraint_expr rhs, *lhsp;
4733 unsigned i;
4734 get_constraint_for_ptr_offset (valist, NULL_TREE, &lhsc);
4735 do_deref (&lhsc);
4736 /* The va_list gets access to pointers in variadic
4737 arguments. Which we know in the case of IPA analysis
4738 and otherwise are just all nonlocal variables. */
4739 if (in_ipa_mode)
4740 {
4741 fi = lookup_vi_for_tree (fn->decl);
4742 rhs = get_function_part_constraint (fi, ~0);
4743 rhs.type = ADDRESSOF;
4744 }
4745 else
4746 {
4747 rhs.var = nonlocal_id;
4748 rhs.type = ADDRESSOF;
4749 rhs.offset = 0;
4750 }
4751 FOR_EACH_VEC_ELT (lhsc, i, lhsp)
4752 process_constraint (new_constraint (*lhsp, rhs));
4753 /* va_list is clobbered. */
4754 make_constraint_to (get_call_clobber_vi (t)->id, valist);
4755 return true;
4756 }
4757 /* va_end doesn't have any effect that matters. */
4758 case BUILT_IN_VA_END:
4759 return true;
4760 /* Alternate return. Simply give up for now. */
4761 case BUILT_IN_RETURN:
4762 {
4763 fi = NULL;
4764 if (!in_ipa_mode
4765 || !(fi = get_vi_for_tree (fn->decl)))
4766 make_constraint_from (get_varinfo (escaped_id), anything_id);
4767 else if (in_ipa_mode
4768 && fi != NULL)
4769 {
4770 struct constraint_expr lhs, rhs;
4771 lhs = get_function_part_constraint (fi, fi_result);
4772 rhs.var = anything_id;
4773 rhs.offset = 0;
4774 rhs.type = SCALAR;
4775 process_constraint (new_constraint (lhs, rhs));
4776 }
4777 return true;
4778 }
4779 case BUILT_IN_GOMP_PARALLEL:
4780 case BUILT_IN_GOACC_PARALLEL:
4781 {
4782 if (in_ipa_mode)
4783 {
4784 unsigned int fnpos, argpos;
4785 switch (DECL_FUNCTION_CODE (fndecl))
4786 {
4787 case BUILT_IN_GOMP_PARALLEL:
4788 /* __builtin_GOMP_parallel (fn, data, num_threads, flags). */
4789 fnpos = 0;
4790 argpos = 1;
4791 break;
4792 case BUILT_IN_GOACC_PARALLEL:
4793 /* __builtin_GOACC_parallel (flags_m, fn, mapnum, hostaddrs,
4794 sizes, kinds, ...). */
4795 fnpos = 1;
4796 argpos = 3;
4797 break;
4798 default:
4799 gcc_unreachable ();
4800 }
4801
4802 tree fnarg = gimple_call_arg (t, fnpos);
4803 gcc_assert (TREE_CODE (fnarg) == ADDR_EXPR);
4804 tree fndecl = TREE_OPERAND (fnarg, 0);
4805 if (fndecl_maybe_in_other_partition (fndecl))
4806 /* Fallthru to general call handling. */
4807 break;
4808
4809 tree arg = gimple_call_arg (t, argpos);
4810
4811 varinfo_t fi = get_vi_for_tree (fndecl);
4812 find_func_aliases_for_call_arg (fi, 0, arg);
4813 return true;
4814 }
4815 /* Else fallthru to generic call handling. */
4816 break;
4817 }
4818 /* printf-style functions may have hooks to set pointers to
4819 point to somewhere into the generated string. Leave them
4820 for a later exercise... */
4821 default:
4822 /* Fallthru to general call handling. */;
4823 }
4824
4825 return false;
4826 }
4827
4828 /* Create constraints for the call T. */
4829
4830 static void
4831 find_func_aliases_for_call (struct function *fn, gcall *t)
4832 {
4833 tree fndecl = gimple_call_fndecl (t);
4834 varinfo_t fi;
4835
4836 if (fndecl != NULL_TREE
4837 && fndecl_built_in_p (fndecl)
4838 && find_func_aliases_for_builtin_call (fn, t))
4839 return;
4840
4841 fi = get_fi_for_callee (t);
4842 if (!in_ipa_mode
4843 || (fi->decl && fndecl && !fi->is_fn_info))
4844 {
4845 auto_vec<ce_s, 16> rhsc;
4846 int flags = gimple_call_flags (t);
4847
4848 /* Const functions can return their arguments and addresses
4849 of global memory but not of escaped memory. */
4850 if (flags & (ECF_CONST|ECF_NOVOPS))
4851 {
4852 if (gimple_call_lhs (t))
4853 handle_const_call (t, &rhsc);
4854 }
4855 /* Pure functions can return addresses in and of memory
4856 reachable from their arguments, but they are not an escape
4857 point for reachable memory of their arguments. */
4858 else if (flags & (ECF_PURE|ECF_LOOPING_CONST_OR_PURE))
4859 handle_pure_call (t, &rhsc);
4860 else
4861 handle_rhs_call (t, &rhsc);
4862 if (gimple_call_lhs (t))
4863 handle_lhs_call (t, gimple_call_lhs (t),
4864 gimple_call_return_flags (t), rhsc, fndecl);
4865 }
4866 else
4867 {
4868 auto_vec<ce_s, 2> rhsc;
4869 tree lhsop;
4870 unsigned j;
4871
4872 /* Assign all the passed arguments to the appropriate incoming
4873 parameters of the function. */
4874 for (j = 0; j < gimple_call_num_args (t); j++)
4875 {
4876 tree arg = gimple_call_arg (t, j);
4877 find_func_aliases_for_call_arg (fi, j, arg);
4878 }
4879
4880 /* If we are returning a value, assign it to the result. */
4881 lhsop = gimple_call_lhs (t);
4882 if (lhsop)
4883 {
4884 auto_vec<ce_s, 2> lhsc;
4885 struct constraint_expr rhs;
4886 struct constraint_expr *lhsp;
4887 bool aggr_p = aggregate_value_p (lhsop, gimple_call_fntype (t));
4888
4889 get_constraint_for (lhsop, &lhsc);
4890 rhs = get_function_part_constraint (fi, fi_result);
4891 if (aggr_p)
4892 {
4893 auto_vec<ce_s, 2> tem;
4894 tem.quick_push (rhs);
4895 do_deref (&tem);
4896 gcc_checking_assert (tem.length () == 1);
4897 rhs = tem[0];
4898 }
4899 FOR_EACH_VEC_ELT (lhsc, j, lhsp)
4900 process_constraint (new_constraint (*lhsp, rhs));
4901
4902 /* If we pass the result decl by reference, honor that. */
4903 if (aggr_p)
4904 {
4905 struct constraint_expr lhs;
4906 struct constraint_expr *rhsp;
4907
4908 get_constraint_for_address_of (lhsop, &rhsc);
4909 lhs = get_function_part_constraint (fi, fi_result);
4910 FOR_EACH_VEC_ELT (rhsc, j, rhsp)
4911 process_constraint (new_constraint (lhs, *rhsp));
4912 rhsc.truncate (0);
4913 }
4914 }
4915
4916 /* If we use a static chain, pass it along. */
4917 if (gimple_call_chain (t))
4918 {
4919 struct constraint_expr lhs;
4920 struct constraint_expr *rhsp;
4921
4922 get_constraint_for (gimple_call_chain (t), &rhsc);
4923 lhs = get_function_part_constraint (fi, fi_static_chain);
4924 FOR_EACH_VEC_ELT (rhsc, j, rhsp)
4925 process_constraint (new_constraint (lhs, *rhsp));
4926 }
4927 }
4928 }
4929
4930 /* Walk statement T setting up aliasing constraints according to the
4931 references found in T. This function is the main part of the
4932 constraint builder. AI points to auxiliary alias information used
4933 when building alias sets and computing alias grouping heuristics. */
4934
4935 static void
4936 find_func_aliases (struct function *fn, gimple *origt)
4937 {
4938 gimple *t = origt;
4939 auto_vec<ce_s, 16> lhsc;
4940 auto_vec<ce_s, 16> rhsc;
4941 varinfo_t fi;
4942
4943 /* Now build constraints expressions. */
4944 if (gimple_code (t) == GIMPLE_PHI)
4945 {
4946 /* For a phi node, assign all the arguments to
4947 the result. */
4948 get_constraint_for (gimple_phi_result (t), &lhsc);
4949 for (unsigned i = 0; i < gimple_phi_num_args (t); i++)
4950 {
4951 get_constraint_for_rhs (gimple_phi_arg_def (t, i), &rhsc);
4952 process_all_all_constraints (lhsc, rhsc);
4953 rhsc.truncate (0);
4954 }
4955 }
4956 /* In IPA mode, we need to generate constraints to pass call
4957 arguments through their calls. There are two cases,
4958 either a GIMPLE_CALL returning a value, or just a plain
4959 GIMPLE_CALL when we are not.
4960
4961 In non-ipa mode, we need to generate constraints for each
4962 pointer passed by address. */
4963 else if (is_gimple_call (t))
4964 find_func_aliases_for_call (fn, as_a <gcall *> (t));
4965
4966 /* Otherwise, just a regular assignment statement. Only care about
4967 operations with pointer result, others are dealt with as escape
4968 points if they have pointer operands. */
4969 else if (is_gimple_assign (t))
4970 {
4971 /* Otherwise, just a regular assignment statement. */
4972 tree lhsop = gimple_assign_lhs (t);
4973 tree rhsop = (gimple_num_ops (t) == 2) ? gimple_assign_rhs1 (t) : NULL;
4974
4975 if (rhsop && TREE_CLOBBER_P (rhsop))
4976 /* Ignore clobbers, they don't actually store anything into
4977 the LHS. */
4978 ;
4979 else if (rhsop && AGGREGATE_TYPE_P (TREE_TYPE (lhsop)))
4980 do_structure_copy (lhsop, rhsop);
4981 else
4982 {
4983 enum tree_code code = gimple_assign_rhs_code (t);
4984
4985 get_constraint_for (lhsop, &lhsc);
4986
4987 if (code == POINTER_PLUS_EXPR)
4988 get_constraint_for_ptr_offset (gimple_assign_rhs1 (t),
4989 gimple_assign_rhs2 (t), &rhsc);
4990 else if (code == POINTER_DIFF_EXPR)
4991 /* The result is not a pointer (part). */
4992 ;
4993 else if (code == BIT_AND_EXPR
4994 && TREE_CODE (gimple_assign_rhs2 (t)) == INTEGER_CST)
4995 {
4996 /* Aligning a pointer via a BIT_AND_EXPR is offsetting
4997 the pointer. Handle it by offsetting it by UNKNOWN. */
4998 get_constraint_for_ptr_offset (gimple_assign_rhs1 (t),
4999 NULL_TREE, &rhsc);
5000 }
5001 else if (code == TRUNC_DIV_EXPR
5002 || code == CEIL_DIV_EXPR
5003 || code == FLOOR_DIV_EXPR
5004 || code == ROUND_DIV_EXPR
5005 || code == EXACT_DIV_EXPR
5006 || code == TRUNC_MOD_EXPR
5007 || code == CEIL_MOD_EXPR
5008 || code == FLOOR_MOD_EXPR
5009 || code == ROUND_MOD_EXPR)
5010 /* Division and modulo transfer the pointer from the LHS. */
5011 get_constraint_for_rhs (gimple_assign_rhs1 (t), &rhsc);
5012 else if ((CONVERT_EXPR_CODE_P (code)
5013 && !(POINTER_TYPE_P (gimple_expr_type (t))
5014 && !POINTER_TYPE_P (TREE_TYPE (rhsop))))
5015 || gimple_assign_single_p (t))
5016 get_constraint_for_rhs (rhsop, &rhsc);
5017 else if (code == COND_EXPR)
5018 {
5019 /* The result is a merge of both COND_EXPR arms. */
5020 auto_vec<ce_s, 2> tmp;
5021 struct constraint_expr *rhsp;
5022 unsigned i;
5023 get_constraint_for_rhs (gimple_assign_rhs2 (t), &rhsc);
5024 get_constraint_for_rhs (gimple_assign_rhs3 (t), &tmp);
5025 FOR_EACH_VEC_ELT (tmp, i, rhsp)
5026 rhsc.safe_push (*rhsp);
5027 }
5028 else if (truth_value_p (code))
5029 /* Truth value results are not pointer (parts). Or at least
5030 very unreasonable obfuscation of a part. */
5031 ;
5032 else
5033 {
5034 /* All other operations are merges. */
5035 auto_vec<ce_s, 4> tmp;
5036 struct constraint_expr *rhsp;
5037 unsigned i, j;
5038 get_constraint_for_rhs (gimple_assign_rhs1 (t), &rhsc);
5039 for (i = 2; i < gimple_num_ops (t); ++i)
5040 {
5041 get_constraint_for_rhs (gimple_op (t, i), &tmp);
5042 FOR_EACH_VEC_ELT (tmp, j, rhsp)
5043 rhsc.safe_push (*rhsp);
5044 tmp.truncate (0);
5045 }
5046 }
5047 process_all_all_constraints (lhsc, rhsc);
5048 }
5049 /* If there is a store to a global variable the rhs escapes. */
5050 if ((lhsop = get_base_address (lhsop)) != NULL_TREE
5051 && DECL_P (lhsop))
5052 {
5053 varinfo_t vi = get_vi_for_tree (lhsop);
5054 if ((! in_ipa_mode && vi->is_global_var)
5055 || vi->is_ipa_escape_point)
5056 make_escape_constraint (rhsop);
5057 }
5058 }
5059 /* Handle escapes through return. */
5060 else if (gimple_code (t) == GIMPLE_RETURN
5061 && gimple_return_retval (as_a <greturn *> (t)) != NULL_TREE)
5062 {
5063 greturn *return_stmt = as_a <greturn *> (t);
5064 fi = NULL;
5065 if (!in_ipa_mode
5066 && SSA_VAR_P (gimple_return_retval (return_stmt)))
5067 {
5068 /* We handle simple returns by post-processing the solutions. */
5069 ;
5070 }
5071 if (!(fi = get_vi_for_tree (fn->decl)))
5072 make_escape_constraint (gimple_return_retval (return_stmt));
5073 else if (in_ipa_mode)
5074 {
5075 struct constraint_expr lhs ;
5076 struct constraint_expr *rhsp;
5077 unsigned i;
5078
5079 lhs = get_function_part_constraint (fi, fi_result);
5080 get_constraint_for_rhs (gimple_return_retval (return_stmt), &rhsc);
5081 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
5082 process_constraint (new_constraint (lhs, *rhsp));
5083 }
5084 }
5085 /* Handle asms conservatively by adding escape constraints to everything. */
5086 else if (gasm *asm_stmt = dyn_cast <gasm *> (t))
5087 {
5088 unsigned i, noutputs;
5089 const char **oconstraints;
5090 const char *constraint;
5091 bool allows_mem, allows_reg, is_inout;
5092
5093 noutputs = gimple_asm_noutputs (asm_stmt);
5094 oconstraints = XALLOCAVEC (const char *, noutputs);
5095
5096 for (i = 0; i < noutputs; ++i)
5097 {
5098 tree link = gimple_asm_output_op (asm_stmt, i);
5099 tree op = TREE_VALUE (link);
5100
5101 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
5102 oconstraints[i] = constraint;
5103 parse_output_constraint (&constraint, i, 0, 0, &allows_mem,
5104 &allows_reg, &is_inout);
5105
5106 /* A memory constraint makes the address of the operand escape. */
5107 if (!allows_reg && allows_mem)
5108 make_escape_constraint (build_fold_addr_expr (op));
5109
5110 /* The asm may read global memory, so outputs may point to
5111 any global memory. */
5112 if (op)
5113 {
5114 auto_vec<ce_s, 2> lhsc;
5115 struct constraint_expr rhsc, *lhsp;
5116 unsigned j;
5117 get_constraint_for (op, &lhsc);
5118 rhsc.var = nonlocal_id;
5119 rhsc.offset = 0;
5120 rhsc.type = SCALAR;
5121 FOR_EACH_VEC_ELT (lhsc, j, lhsp)
5122 process_constraint (new_constraint (*lhsp, rhsc));
5123 }
5124 }
5125 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
5126 {
5127 tree link = gimple_asm_input_op (asm_stmt, i);
5128 tree op = TREE_VALUE (link);
5129
5130 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
5131
5132 parse_input_constraint (&constraint, 0, 0, noutputs, 0, oconstraints,
5133 &allows_mem, &allows_reg);
5134
5135 /* A memory constraint makes the address of the operand escape. */
5136 if (!allows_reg && allows_mem)
5137 make_escape_constraint (build_fold_addr_expr (op));
5138 /* Strictly we'd only need the constraint to ESCAPED if
5139 the asm clobbers memory, otherwise using something
5140 along the lines of per-call clobbers/uses would be enough. */
5141 else if (op)
5142 make_escape_constraint (op);
5143 }
5144 }
5145 }
5146
5147
5148 /* Create a constraint adding to the clobber set of FI the memory
5149 pointed to by PTR. */
5150
5151 static void
5152 process_ipa_clobber (varinfo_t fi, tree ptr)
5153 {
5154 vec<ce_s> ptrc = vNULL;
5155 struct constraint_expr *c, lhs;
5156 unsigned i;
5157 get_constraint_for_rhs (ptr, &ptrc);
5158 lhs = get_function_part_constraint (fi, fi_clobbers);
5159 FOR_EACH_VEC_ELT (ptrc, i, c)
5160 process_constraint (new_constraint (lhs, *c));
5161 ptrc.release ();
5162 }
5163
5164 /* Walk statement T setting up clobber and use constraints according to the
5165 references found in T. This function is a main part of the
5166 IPA constraint builder. */
5167
5168 static void
5169 find_func_clobbers (struct function *fn, gimple *origt)
5170 {
5171 gimple *t = origt;
5172 auto_vec<ce_s, 16> lhsc;
5173 auto_vec<ce_s, 16> rhsc;
5174 varinfo_t fi;
5175
5176 /* Add constraints for clobbered/used in IPA mode.
5177 We are not interested in what automatic variables are clobbered
5178 or used as we only use the information in the caller to which
5179 they do not escape. */
5180 gcc_assert (in_ipa_mode);
5181
5182 /* If the stmt refers to memory in any way it better had a VUSE. */
5183 if (gimple_vuse (t) == NULL_TREE)
5184 return;
5185
5186 /* We'd better have function information for the current function. */
5187 fi = lookup_vi_for_tree (fn->decl);
5188 gcc_assert (fi != NULL);
5189
5190 /* Account for stores in assignments and calls. */
5191 if (gimple_vdef (t) != NULL_TREE
5192 && gimple_has_lhs (t))
5193 {
5194 tree lhs = gimple_get_lhs (t);
5195 tree tem = lhs;
5196 while (handled_component_p (tem))
5197 tem = TREE_OPERAND (tem, 0);
5198 if ((DECL_P (tem)
5199 && !auto_var_in_fn_p (tem, fn->decl))
5200 || INDIRECT_REF_P (tem)
5201 || (TREE_CODE (tem) == MEM_REF
5202 && !(TREE_CODE (TREE_OPERAND (tem, 0)) == ADDR_EXPR
5203 && auto_var_in_fn_p
5204 (TREE_OPERAND (TREE_OPERAND (tem, 0), 0), fn->decl))))
5205 {
5206 struct constraint_expr lhsc, *rhsp;
5207 unsigned i;
5208 lhsc = get_function_part_constraint (fi, fi_clobbers);
5209 get_constraint_for_address_of (lhs, &rhsc);
5210 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
5211 process_constraint (new_constraint (lhsc, *rhsp));
5212 rhsc.truncate (0);
5213 }
5214 }
5215
5216 /* Account for uses in assigments and returns. */
5217 if (gimple_assign_single_p (t)
5218 || (gimple_code (t) == GIMPLE_RETURN
5219 && gimple_return_retval (as_a <greturn *> (t)) != NULL_TREE))
5220 {
5221 tree rhs = (gimple_assign_single_p (t)
5222 ? gimple_assign_rhs1 (t)
5223 : gimple_return_retval (as_a <greturn *> (t)));
5224 tree tem = rhs;
5225 while (handled_component_p (tem))
5226 tem = TREE_OPERAND (tem, 0);
5227 if ((DECL_P (tem)
5228 && !auto_var_in_fn_p (tem, fn->decl))
5229 || INDIRECT_REF_P (tem)
5230 || (TREE_CODE (tem) == MEM_REF
5231 && !(TREE_CODE (TREE_OPERAND (tem, 0)) == ADDR_EXPR
5232 && auto_var_in_fn_p
5233 (TREE_OPERAND (TREE_OPERAND (tem, 0), 0), fn->decl))))
5234 {
5235 struct constraint_expr lhs, *rhsp;
5236 unsigned i;
5237 lhs = get_function_part_constraint (fi, fi_uses);
5238 get_constraint_for_address_of (rhs, &rhsc);
5239 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
5240 process_constraint (new_constraint (lhs, *rhsp));
5241 rhsc.truncate (0);
5242 }
5243 }
5244
5245 if (gcall *call_stmt = dyn_cast <gcall *> (t))
5246 {
5247 varinfo_t cfi = NULL;
5248 tree decl = gimple_call_fndecl (t);
5249 struct constraint_expr lhs, rhs;
5250 unsigned i, j;
5251
5252 /* For builtins we do not have separate function info. For those
5253 we do not generate escapes for we have to generate clobbers/uses. */
5254 if (gimple_call_builtin_p (t, BUILT_IN_NORMAL))
5255 switch (DECL_FUNCTION_CODE (decl))
5256 {
5257 /* The following functions use and clobber memory pointed to
5258 by their arguments. */
5259 case BUILT_IN_STRCPY:
5260 case BUILT_IN_STRNCPY:
5261 case BUILT_IN_BCOPY:
5262 case BUILT_IN_MEMCPY:
5263 case BUILT_IN_MEMMOVE:
5264 case BUILT_IN_MEMPCPY:
5265 case BUILT_IN_STPCPY:
5266 case BUILT_IN_STPNCPY:
5267 case BUILT_IN_STRCAT:
5268 case BUILT_IN_STRNCAT:
5269 case BUILT_IN_STRCPY_CHK:
5270 case BUILT_IN_STRNCPY_CHK:
5271 case BUILT_IN_MEMCPY_CHK:
5272 case BUILT_IN_MEMMOVE_CHK:
5273 case BUILT_IN_MEMPCPY_CHK:
5274 case BUILT_IN_STPCPY_CHK:
5275 case BUILT_IN_STPNCPY_CHK:
5276 case BUILT_IN_STRCAT_CHK:
5277 case BUILT_IN_STRNCAT_CHK:
5278 {
5279 tree dest = gimple_call_arg (t, (DECL_FUNCTION_CODE (decl)
5280 == BUILT_IN_BCOPY ? 1 : 0));
5281 tree src = gimple_call_arg (t, (DECL_FUNCTION_CODE (decl)
5282 == BUILT_IN_BCOPY ? 0 : 1));
5283 unsigned i;
5284 struct constraint_expr *rhsp, *lhsp;
5285 get_constraint_for_ptr_offset (dest, NULL_TREE, &lhsc);
5286 lhs = get_function_part_constraint (fi, fi_clobbers);
5287 FOR_EACH_VEC_ELT (lhsc, i, lhsp)
5288 process_constraint (new_constraint (lhs, *lhsp));
5289 get_constraint_for_ptr_offset (src, NULL_TREE, &rhsc);
5290 lhs = get_function_part_constraint (fi, fi_uses);
5291 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
5292 process_constraint (new_constraint (lhs, *rhsp));
5293 return;
5294 }
5295 /* The following function clobbers memory pointed to by
5296 its argument. */
5297 case BUILT_IN_MEMSET:
5298 case BUILT_IN_MEMSET_CHK:
5299 case BUILT_IN_POSIX_MEMALIGN:
5300 {
5301 tree dest = gimple_call_arg (t, 0);
5302 unsigned i;
5303 ce_s *lhsp;
5304 get_constraint_for_ptr_offset (dest, NULL_TREE, &lhsc);
5305 lhs = get_function_part_constraint (fi, fi_clobbers);
5306 FOR_EACH_VEC_ELT (lhsc, i, lhsp)
5307 process_constraint (new_constraint (lhs, *lhsp));
5308 return;
5309 }
5310 /* The following functions clobber their second and third
5311 arguments. */
5312 case BUILT_IN_SINCOS:
5313 case BUILT_IN_SINCOSF:
5314 case BUILT_IN_SINCOSL:
5315 {
5316 process_ipa_clobber (fi, gimple_call_arg (t, 1));
5317 process_ipa_clobber (fi, gimple_call_arg (t, 2));
5318 return;
5319 }
5320 /* The following functions clobber their second argument. */
5321 case BUILT_IN_FREXP:
5322 case BUILT_IN_FREXPF:
5323 case BUILT_IN_FREXPL:
5324 case BUILT_IN_LGAMMA_R:
5325 case BUILT_IN_LGAMMAF_R:
5326 case BUILT_IN_LGAMMAL_R:
5327 case BUILT_IN_GAMMA_R:
5328 case BUILT_IN_GAMMAF_R:
5329 case BUILT_IN_GAMMAL_R:
5330 case BUILT_IN_MODF:
5331 case BUILT_IN_MODFF:
5332 case BUILT_IN_MODFL:
5333 {
5334 process_ipa_clobber (fi, gimple_call_arg (t, 1));
5335 return;
5336 }
5337 /* The following functions clobber their third argument. */
5338 case BUILT_IN_REMQUO:
5339 case BUILT_IN_REMQUOF:
5340 case BUILT_IN_REMQUOL:
5341 {
5342 process_ipa_clobber (fi, gimple_call_arg (t, 2));
5343 return;
5344 }
5345 /* The following functions neither read nor clobber memory. */
5346 case BUILT_IN_ASSUME_ALIGNED:
5347 case BUILT_IN_FREE:
5348 return;
5349 /* Trampolines are of no interest to us. */
5350 case BUILT_IN_INIT_TRAMPOLINE:
5351 case BUILT_IN_ADJUST_TRAMPOLINE:
5352 return;
5353 case BUILT_IN_VA_START:
5354 case BUILT_IN_VA_END:
5355 return;
5356 case BUILT_IN_GOMP_PARALLEL:
5357 case BUILT_IN_GOACC_PARALLEL:
5358 {
5359 unsigned int fnpos, argpos;
5360 unsigned int implicit_use_args[2];
5361 unsigned int num_implicit_use_args = 0;
5362 switch (DECL_FUNCTION_CODE (decl))
5363 {
5364 case BUILT_IN_GOMP_PARALLEL:
5365 /* __builtin_GOMP_parallel (fn, data, num_threads, flags). */
5366 fnpos = 0;
5367 argpos = 1;
5368 break;
5369 case BUILT_IN_GOACC_PARALLEL:
5370 /* __builtin_GOACC_parallel (flags_m, fn, mapnum, hostaddrs,
5371 sizes, kinds, ...). */
5372 fnpos = 1;
5373 argpos = 3;
5374 implicit_use_args[num_implicit_use_args++] = 4;
5375 implicit_use_args[num_implicit_use_args++] = 5;
5376 break;
5377 default:
5378 gcc_unreachable ();
5379 }
5380
5381 tree fnarg = gimple_call_arg (t, fnpos);
5382 gcc_assert (TREE_CODE (fnarg) == ADDR_EXPR);
5383 tree fndecl = TREE_OPERAND (fnarg, 0);
5384 if (fndecl_maybe_in_other_partition (fndecl))
5385 /* Fallthru to general call handling. */
5386 break;
5387
5388 varinfo_t cfi = get_vi_for_tree (fndecl);
5389
5390 tree arg = gimple_call_arg (t, argpos);
5391
5392 /* Parameter passed by value is used. */
5393 lhs = get_function_part_constraint (fi, fi_uses);
5394 struct constraint_expr *rhsp;
5395 get_constraint_for (arg, &rhsc);
5396 FOR_EACH_VEC_ELT (rhsc, j, rhsp)
5397 process_constraint (new_constraint (lhs, *rhsp));
5398 rhsc.truncate (0);
5399
5400 /* Handle parameters used by the call, but not used in cfi, as
5401 implicitly used by cfi. */
5402 lhs = get_function_part_constraint (cfi, fi_uses);
5403 for (unsigned i = 0; i < num_implicit_use_args; ++i)
5404 {
5405 tree arg = gimple_call_arg (t, implicit_use_args[i]);
5406 get_constraint_for (arg, &rhsc);
5407 FOR_EACH_VEC_ELT (rhsc, j, rhsp)
5408 process_constraint (new_constraint (lhs, *rhsp));
5409 rhsc.truncate (0);
5410 }
5411
5412 /* The caller clobbers what the callee does. */
5413 lhs = get_function_part_constraint (fi, fi_clobbers);
5414 rhs = get_function_part_constraint (cfi, fi_clobbers);
5415 process_constraint (new_constraint (lhs, rhs));
5416
5417 /* The caller uses what the callee does. */
5418 lhs = get_function_part_constraint (fi, fi_uses);
5419 rhs = get_function_part_constraint (cfi, fi_uses);
5420 process_constraint (new_constraint (lhs, rhs));
5421
5422 return;
5423 }
5424 /* printf-style functions may have hooks to set pointers to
5425 point to somewhere into the generated string. Leave them
5426 for a later exercise... */
5427 default:
5428 /* Fallthru to general call handling. */;
5429 }
5430
5431 /* Parameters passed by value are used. */
5432 lhs = get_function_part_constraint (fi, fi_uses);
5433 for (i = 0; i < gimple_call_num_args (t); i++)
5434 {
5435 struct constraint_expr *rhsp;
5436 tree arg = gimple_call_arg (t, i);
5437
5438 if (TREE_CODE (arg) == SSA_NAME
5439 || is_gimple_min_invariant (arg))
5440 continue;
5441
5442 get_constraint_for_address_of (arg, &rhsc);
5443 FOR_EACH_VEC_ELT (rhsc, j, rhsp)
5444 process_constraint (new_constraint (lhs, *rhsp));
5445 rhsc.truncate (0);
5446 }
5447
5448 /* Build constraints for propagating clobbers/uses along the
5449 callgraph edges. */
5450 cfi = get_fi_for_callee (call_stmt);
5451 if (cfi->id == anything_id)
5452 {
5453 if (gimple_vdef (t))
5454 make_constraint_from (first_vi_for_offset (fi, fi_clobbers),
5455 anything_id);
5456 make_constraint_from (first_vi_for_offset (fi, fi_uses),
5457 anything_id);
5458 return;
5459 }
5460
5461 /* For callees without function info (that's external functions),
5462 ESCAPED is clobbered and used. */
5463 if (cfi->decl
5464 && TREE_CODE (cfi->decl) == FUNCTION_DECL
5465 && !cfi->is_fn_info)
5466 {
5467 varinfo_t vi;
5468
5469 if (gimple_vdef (t))
5470 make_copy_constraint (first_vi_for_offset (fi, fi_clobbers),
5471 escaped_id);
5472 make_copy_constraint (first_vi_for_offset (fi, fi_uses), escaped_id);
5473
5474 /* Also honor the call statement use/clobber info. */
5475 if ((vi = lookup_call_clobber_vi (call_stmt)) != NULL)
5476 make_copy_constraint (first_vi_for_offset (fi, fi_clobbers),
5477 vi->id);
5478 if ((vi = lookup_call_use_vi (call_stmt)) != NULL)
5479 make_copy_constraint (first_vi_for_offset (fi, fi_uses),
5480 vi->id);
5481 return;
5482 }
5483
5484 /* Otherwise the caller clobbers and uses what the callee does.
5485 ??? This should use a new complex constraint that filters
5486 local variables of the callee. */
5487 if (gimple_vdef (t))
5488 {
5489 lhs = get_function_part_constraint (fi, fi_clobbers);
5490 rhs = get_function_part_constraint (cfi, fi_clobbers);
5491 process_constraint (new_constraint (lhs, rhs));
5492 }
5493 lhs = get_function_part_constraint (fi, fi_uses);
5494 rhs = get_function_part_constraint (cfi, fi_uses);
5495 process_constraint (new_constraint (lhs, rhs));
5496 }
5497 else if (gimple_code (t) == GIMPLE_ASM)
5498 {
5499 /* ??? Ick. We can do better. */
5500 if (gimple_vdef (t))
5501 make_constraint_from (first_vi_for_offset (fi, fi_clobbers),
5502 anything_id);
5503 make_constraint_from (first_vi_for_offset (fi, fi_uses),
5504 anything_id);
5505 }
5506 }
5507
5508
5509 /* Find the first varinfo in the same variable as START that overlaps with
5510 OFFSET. Return NULL if we can't find one. */
5511
5512 static varinfo_t
5513 first_vi_for_offset (varinfo_t start, unsigned HOST_WIDE_INT offset)
5514 {
5515 /* If the offset is outside of the variable, bail out. */
5516 if (offset >= start->fullsize)
5517 return NULL;
5518
5519 /* If we cannot reach offset from start, lookup the first field
5520 and start from there. */
5521 if (start->offset > offset)
5522 start = get_varinfo (start->head);
5523
5524 while (start)
5525 {
5526 /* We may not find a variable in the field list with the actual
5527 offset when we have glommed a structure to a variable.
5528 In that case, however, offset should still be within the size
5529 of the variable. */
5530 if (offset >= start->offset
5531 && (offset - start->offset) < start->size)
5532 return start;
5533
5534 start = vi_next (start);
5535 }
5536
5537 return NULL;
5538 }
5539
5540 /* Find the first varinfo in the same variable as START that overlaps with
5541 OFFSET. If there is no such varinfo the varinfo directly preceding
5542 OFFSET is returned. */
5543
5544 static varinfo_t
5545 first_or_preceding_vi_for_offset (varinfo_t start,
5546 unsigned HOST_WIDE_INT offset)
5547 {
5548 /* If we cannot reach offset from start, lookup the first field
5549 and start from there. */
5550 if (start->offset > offset)
5551 start = get_varinfo (start->head);
5552
5553 /* We may not find a variable in the field list with the actual
5554 offset when we have glommed a structure to a variable.
5555 In that case, however, offset should still be within the size
5556 of the variable.
5557 If we got beyond the offset we look for return the field
5558 directly preceding offset which may be the last field. */
5559 while (start->next
5560 && offset >= start->offset
5561 && !((offset - start->offset) < start->size))
5562 start = vi_next (start);
5563
5564 return start;
5565 }
5566
5567
5568 /* This structure is used during pushing fields onto the fieldstack
5569 to track the offset of the field, since bitpos_of_field gives it
5570 relative to its immediate containing type, and we want it relative
5571 to the ultimate containing object. */
5572
5573 struct fieldoff
5574 {
5575 /* Offset from the base of the base containing object to this field. */
5576 HOST_WIDE_INT offset;
5577
5578 /* Size, in bits, of the field. */
5579 unsigned HOST_WIDE_INT size;
5580
5581 unsigned has_unknown_size : 1;
5582
5583 unsigned must_have_pointers : 1;
5584
5585 unsigned may_have_pointers : 1;
5586
5587 unsigned only_restrict_pointers : 1;
5588
5589 tree restrict_pointed_type;
5590 };
5591 typedef struct fieldoff fieldoff_s;
5592
5593
5594 /* qsort comparison function for two fieldoff's PA and PB */
5595
5596 static int
5597 fieldoff_compare (const void *pa, const void *pb)
5598 {
5599 const fieldoff_s *foa = (const fieldoff_s *)pa;
5600 const fieldoff_s *fob = (const fieldoff_s *)pb;
5601 unsigned HOST_WIDE_INT foasize, fobsize;
5602
5603 if (foa->offset < fob->offset)
5604 return -1;
5605 else if (foa->offset > fob->offset)
5606 return 1;
5607
5608 foasize = foa->size;
5609 fobsize = fob->size;
5610 if (foasize < fobsize)
5611 return -1;
5612 else if (foasize > fobsize)
5613 return 1;
5614 return 0;
5615 }
5616
5617 /* Sort a fieldstack according to the field offset and sizes. */
5618 static void
5619 sort_fieldstack (vec<fieldoff_s> fieldstack)
5620 {
5621 fieldstack.qsort (fieldoff_compare);
5622 }
5623
5624 /* Return true if T is a type that can have subvars. */
5625
5626 static inline bool
5627 type_can_have_subvars (const_tree t)
5628 {
5629 /* Aggregates without overlapping fields can have subvars. */
5630 return TREE_CODE (t) == RECORD_TYPE;
5631 }
5632
5633 /* Return true if V is a tree that we can have subvars for.
5634 Normally, this is any aggregate type. Also complex
5635 types which are not gimple registers can have subvars. */
5636
5637 static inline bool
5638 var_can_have_subvars (const_tree v)
5639 {
5640 /* Volatile variables should never have subvars. */
5641 if (TREE_THIS_VOLATILE (v))
5642 return false;
5643
5644 /* Non decls or memory tags can never have subvars. */
5645 if (!DECL_P (v))
5646 return false;
5647
5648 return type_can_have_subvars (TREE_TYPE (v));
5649 }
5650
5651 /* Return true if T is a type that does contain pointers. */
5652
5653 static bool
5654 type_must_have_pointers (tree type)
5655 {
5656 if (POINTER_TYPE_P (type))
5657 return true;
5658
5659 if (TREE_CODE (type) == ARRAY_TYPE)
5660 return type_must_have_pointers (TREE_TYPE (type));
5661
5662 /* A function or method can have pointers as arguments, so track
5663 those separately. */
5664 if (TREE_CODE (type) == FUNCTION_TYPE
5665 || TREE_CODE (type) == METHOD_TYPE)
5666 return true;
5667
5668 return false;
5669 }
5670
5671 static bool
5672 field_must_have_pointers (tree t)
5673 {
5674 return type_must_have_pointers (TREE_TYPE (t));
5675 }
5676
5677 /* Given a TYPE, and a vector of field offsets FIELDSTACK, push all
5678 the fields of TYPE onto fieldstack, recording their offsets along
5679 the way.
5680
5681 OFFSET is used to keep track of the offset in this entire
5682 structure, rather than just the immediately containing structure.
5683 Returns false if the caller is supposed to handle the field we
5684 recursed for. */
5685
5686 static bool
5687 push_fields_onto_fieldstack (tree type, vec<fieldoff_s> *fieldstack,
5688 HOST_WIDE_INT offset)
5689 {
5690 tree field;
5691 bool empty_p = true;
5692
5693 if (TREE_CODE (type) != RECORD_TYPE)
5694 return false;
5695
5696 /* If the vector of fields is growing too big, bail out early.
5697 Callers check for vec::length <= param_max_fields_for_field_sensitive, make
5698 sure this fails. */
5699 if (fieldstack->length () > (unsigned)param_max_fields_for_field_sensitive)
5700 return false;
5701
5702 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
5703 if (TREE_CODE (field) == FIELD_DECL)
5704 {
5705 bool push = false;
5706 HOST_WIDE_INT foff = bitpos_of_field (field);
5707 tree field_type = TREE_TYPE (field);
5708
5709 if (!var_can_have_subvars (field)
5710 || TREE_CODE (field_type) == QUAL_UNION_TYPE
5711 || TREE_CODE (field_type) == UNION_TYPE)
5712 push = true;
5713 else if (!push_fields_onto_fieldstack
5714 (field_type, fieldstack, offset + foff)
5715 && (DECL_SIZE (field)
5716 && !integer_zerop (DECL_SIZE (field))))
5717 /* Empty structures may have actual size, like in C++. So
5718 see if we didn't push any subfields and the size is
5719 nonzero, push the field onto the stack. */
5720 push = true;
5721
5722 if (push)
5723 {
5724 fieldoff_s *pair = NULL;
5725 bool has_unknown_size = false;
5726 bool must_have_pointers_p;
5727
5728 if (!fieldstack->is_empty ())
5729 pair = &fieldstack->last ();
5730
5731 /* If there isn't anything at offset zero, create sth. */
5732 if (!pair
5733 && offset + foff != 0)
5734 {
5735 fieldoff_s e
5736 = {0, offset + foff, false, false, true, false, NULL_TREE};
5737 pair = fieldstack->safe_push (e);
5738 }
5739
5740 if (!DECL_SIZE (field)
5741 || !tree_fits_uhwi_p (DECL_SIZE (field)))
5742 has_unknown_size = true;
5743
5744 /* If adjacent fields do not contain pointers merge them. */
5745 must_have_pointers_p = field_must_have_pointers (field);
5746 if (pair
5747 && !has_unknown_size
5748 && !must_have_pointers_p
5749 && !pair->must_have_pointers
5750 && !pair->has_unknown_size
5751 && pair->offset + (HOST_WIDE_INT)pair->size == offset + foff)
5752 {
5753 pair->size += tree_to_uhwi (DECL_SIZE (field));
5754 }
5755 else
5756 {
5757 fieldoff_s e;
5758 e.offset = offset + foff;
5759 e.has_unknown_size = has_unknown_size;
5760 if (!has_unknown_size)
5761 e.size = tree_to_uhwi (DECL_SIZE (field));
5762 else
5763 e.size = -1;
5764 e.must_have_pointers = must_have_pointers_p;
5765 e.may_have_pointers = true;
5766 e.only_restrict_pointers
5767 = (!has_unknown_size
5768 && POINTER_TYPE_P (field_type)
5769 && TYPE_RESTRICT (field_type));
5770 if (e.only_restrict_pointers)
5771 e.restrict_pointed_type = TREE_TYPE (field_type);
5772 fieldstack->safe_push (e);
5773 }
5774 }
5775
5776 empty_p = false;
5777 }
5778
5779 return !empty_p;
5780 }
5781
5782 /* Count the number of arguments DECL has, and set IS_VARARGS to true
5783 if it is a varargs function. */
5784
5785 static unsigned int
5786 count_num_arguments (tree decl, bool *is_varargs)
5787 {
5788 unsigned int num = 0;
5789 tree t;
5790
5791 /* Capture named arguments for K&R functions. They do not
5792 have a prototype and thus no TYPE_ARG_TYPES. */
5793 for (t = DECL_ARGUMENTS (decl); t; t = DECL_CHAIN (t))
5794 ++num;
5795
5796 /* Check if the function has variadic arguments. */
5797 for (t = TYPE_ARG_TYPES (TREE_TYPE (decl)); t; t = TREE_CHAIN (t))
5798 if (TREE_VALUE (t) == void_type_node)
5799 break;
5800 if (!t)
5801 *is_varargs = true;
5802
5803 return num;
5804 }
5805
5806 /* Creation function node for DECL, using NAME, and return the index
5807 of the variable we've created for the function. If NONLOCAL_p, create
5808 initial constraints. */
5809
5810 static varinfo_t
5811 create_function_info_for (tree decl, const char *name, bool add_id,
5812 bool nonlocal_p)
5813 {
5814 struct function *fn = DECL_STRUCT_FUNCTION (decl);
5815 varinfo_t vi, prev_vi;
5816 tree arg;
5817 unsigned int i;
5818 bool is_varargs = false;
5819 unsigned int num_args = count_num_arguments (decl, &is_varargs);
5820
5821 /* Create the variable info. */
5822
5823 vi = new_var_info (decl, name, add_id);
5824 vi->offset = 0;
5825 vi->size = 1;
5826 vi->fullsize = fi_parm_base + num_args;
5827 vi->is_fn_info = 1;
5828 vi->may_have_pointers = false;
5829 if (is_varargs)
5830 vi->fullsize = ~0;
5831 insert_vi_for_tree (vi->decl, vi);
5832
5833 prev_vi = vi;
5834
5835 /* Create a variable for things the function clobbers and one for
5836 things the function uses. */
5837 {
5838 varinfo_t clobbervi, usevi;
5839 const char *newname;
5840 char *tempname;
5841
5842 tempname = xasprintf ("%s.clobber", name);
5843 newname = ggc_strdup (tempname);
5844 free (tempname);
5845
5846 clobbervi = new_var_info (NULL, newname, false);
5847 clobbervi->offset = fi_clobbers;
5848 clobbervi->size = 1;
5849 clobbervi->fullsize = vi->fullsize;
5850 clobbervi->is_full_var = true;
5851 clobbervi->is_global_var = false;
5852 clobbervi->is_reg_var = true;
5853
5854 gcc_assert (prev_vi->offset < clobbervi->offset);
5855 prev_vi->next = clobbervi->id;
5856 prev_vi = clobbervi;
5857
5858 tempname = xasprintf ("%s.use", name);
5859 newname = ggc_strdup (tempname);
5860 free (tempname);
5861
5862 usevi = new_var_info (NULL, newname, false);
5863 usevi->offset = fi_uses;
5864 usevi->size = 1;
5865 usevi->fullsize = vi->fullsize;
5866 usevi->is_full_var = true;
5867 usevi->is_global_var = false;
5868 usevi->is_reg_var = true;
5869
5870 gcc_assert (prev_vi->offset < usevi->offset);
5871 prev_vi->next = usevi->id;
5872 prev_vi = usevi;
5873 }
5874
5875 /* And one for the static chain. */
5876 if (fn->static_chain_decl != NULL_TREE)
5877 {
5878 varinfo_t chainvi;
5879 const char *newname;
5880 char *tempname;
5881
5882 tempname = xasprintf ("%s.chain", name);
5883 newname = ggc_strdup (tempname);
5884 free (tempname);
5885
5886 chainvi = new_var_info (fn->static_chain_decl, newname, false);
5887 chainvi->offset = fi_static_chain;
5888 chainvi->size = 1;
5889 chainvi->fullsize = vi->fullsize;
5890 chainvi->is_full_var = true;
5891 chainvi->is_global_var = false;
5892
5893 insert_vi_for_tree (fn->static_chain_decl, chainvi);
5894
5895 if (nonlocal_p
5896 && chainvi->may_have_pointers)
5897 make_constraint_from (chainvi, nonlocal_id);
5898
5899 gcc_assert (prev_vi->offset < chainvi->offset);
5900 prev_vi->next = chainvi->id;
5901 prev_vi = chainvi;
5902 }
5903
5904 /* Create a variable for the return var. */
5905 if (DECL_RESULT (decl) != NULL
5906 || !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (decl))))
5907 {
5908 varinfo_t resultvi;
5909 const char *newname;
5910 char *tempname;
5911 tree resultdecl = decl;
5912
5913 if (DECL_RESULT (decl))
5914 resultdecl = DECL_RESULT (decl);
5915
5916 tempname = xasprintf ("%s.result", name);
5917 newname = ggc_strdup (tempname);
5918 free (tempname);
5919
5920 resultvi = new_var_info (resultdecl, newname, false);
5921 resultvi->offset = fi_result;
5922 resultvi->size = 1;
5923 resultvi->fullsize = vi->fullsize;
5924 resultvi->is_full_var = true;
5925 if (DECL_RESULT (decl))
5926 resultvi->may_have_pointers = true;
5927
5928 if (DECL_RESULT (decl))
5929 insert_vi_for_tree (DECL_RESULT (decl), resultvi);
5930
5931 if (nonlocal_p
5932 && DECL_RESULT (decl)
5933 && DECL_BY_REFERENCE (DECL_RESULT (decl)))
5934 make_constraint_from (resultvi, nonlocal_id);
5935
5936 gcc_assert (prev_vi->offset < resultvi->offset);
5937 prev_vi->next = resultvi->id;
5938 prev_vi = resultvi;
5939 }
5940
5941 /* We also need to make function return values escape. Nothing
5942 escapes by returning from main though. */
5943 if (nonlocal_p
5944 && !MAIN_NAME_P (DECL_NAME (decl)))
5945 {
5946 varinfo_t fi, rvi;
5947 fi = lookup_vi_for_tree (decl);
5948 rvi = first_vi_for_offset (fi, fi_result);
5949 if (rvi && rvi->offset == fi_result)
5950 make_copy_constraint (get_varinfo (escaped_id), rvi->id);
5951 }
5952
5953 /* Set up variables for each argument. */
5954 arg = DECL_ARGUMENTS (decl);
5955 for (i = 0; i < num_args; i++)
5956 {
5957 varinfo_t argvi;
5958 const char *newname;
5959 char *tempname;
5960 tree argdecl = decl;
5961
5962 if (arg)
5963 argdecl = arg;
5964
5965 tempname = xasprintf ("%s.arg%d", name, i);
5966 newname = ggc_strdup (tempname);
5967 free (tempname);
5968
5969 argvi = new_var_info (argdecl, newname, false);
5970 argvi->offset = fi_parm_base + i;
5971 argvi->size = 1;
5972 argvi->is_full_var = true;
5973 argvi->fullsize = vi->fullsize;
5974 if (arg)
5975 argvi->may_have_pointers = true;
5976
5977 if (arg)
5978 insert_vi_for_tree (arg, argvi);
5979
5980 if (nonlocal_p
5981 && argvi->may_have_pointers)
5982 make_constraint_from (argvi, nonlocal_id);
5983
5984 gcc_assert (prev_vi->offset < argvi->offset);
5985 prev_vi->next = argvi->id;
5986 prev_vi = argvi;
5987 if (arg)
5988 arg = DECL_CHAIN (arg);
5989 }
5990
5991 /* Add one representative for all further args. */
5992 if (is_varargs)
5993 {
5994 varinfo_t argvi;
5995 const char *newname;
5996 char *tempname;
5997 tree decl;
5998
5999 tempname = xasprintf ("%s.varargs", name);
6000 newname = ggc_strdup (tempname);
6001 free (tempname);
6002
6003 /* We need sth that can be pointed to for va_start. */
6004 decl = build_fake_var_decl (ptr_type_node);
6005
6006 argvi = new_var_info (decl, newname, false);
6007 argvi->offset = fi_parm_base + num_args;
6008 argvi->size = ~0;
6009 argvi->is_full_var = true;
6010 argvi->is_heap_var = true;
6011 argvi->fullsize = vi->fullsize;
6012
6013 if (nonlocal_p
6014 && argvi->may_have_pointers)
6015 make_constraint_from (argvi, nonlocal_id);
6016
6017 gcc_assert (prev_vi->offset < argvi->offset);
6018 prev_vi->next = argvi->id;
6019 }
6020
6021 return vi;
6022 }
6023
6024
6025 /* Return true if FIELDSTACK contains fields that overlap.
6026 FIELDSTACK is assumed to be sorted by offset. */
6027
6028 static bool
6029 check_for_overlaps (vec<fieldoff_s> fieldstack)
6030 {
6031 fieldoff_s *fo = NULL;
6032 unsigned int i;
6033 HOST_WIDE_INT lastoffset = -1;
6034
6035 FOR_EACH_VEC_ELT (fieldstack, i, fo)
6036 {
6037 if (fo->offset == lastoffset)
6038 return true;
6039 lastoffset = fo->offset;
6040 }
6041 return false;
6042 }
6043
6044 /* Create a varinfo structure for NAME and DECL, and add it to VARMAP.
6045 This will also create any varinfo structures necessary for fields
6046 of DECL. DECL is a function parameter if HANDLE_PARAM is set.
6047 HANDLED_STRUCT_TYPE is used to register struct types reached by following
6048 restrict pointers. This is needed to prevent infinite recursion.
6049 If ADD_RESTRICT, pretend that the pointer NAME is restrict even if DECL
6050 does not advertise it. */
6051
6052 static varinfo_t
6053 create_variable_info_for_1 (tree decl, const char *name, bool add_id,
6054 bool handle_param, bitmap handled_struct_type,
6055 bool add_restrict = false)
6056 {
6057 varinfo_t vi, newvi;
6058 tree decl_type = TREE_TYPE (decl);
6059 tree declsize = DECL_P (decl) ? DECL_SIZE (decl) : TYPE_SIZE (decl_type);
6060 auto_vec<fieldoff_s> fieldstack;
6061 fieldoff_s *fo;
6062 unsigned int i;
6063
6064 if (!declsize
6065 || !tree_fits_uhwi_p (declsize))
6066 {
6067 vi = new_var_info (decl, name, add_id);
6068 vi->offset = 0;
6069 vi->size = ~0;
6070 vi->fullsize = ~0;
6071 vi->is_unknown_size_var = true;
6072 vi->is_full_var = true;
6073 vi->may_have_pointers = true;
6074 return vi;
6075 }
6076
6077 /* Collect field information. */
6078 if (use_field_sensitive
6079 && var_can_have_subvars (decl)
6080 /* ??? Force us to not use subfields for globals in IPA mode.
6081 Else we'd have to parse arbitrary initializers. */
6082 && !(in_ipa_mode
6083 && is_global_var (decl)))
6084 {
6085 fieldoff_s *fo = NULL;
6086 bool notokay = false;
6087 unsigned int i;
6088
6089 push_fields_onto_fieldstack (decl_type, &fieldstack, 0);
6090
6091 for (i = 0; !notokay && fieldstack.iterate (i, &fo); i++)
6092 if (fo->has_unknown_size
6093 || fo->offset < 0)
6094 {
6095 notokay = true;
6096 break;
6097 }
6098
6099 /* We can't sort them if we have a field with a variable sized type,
6100 which will make notokay = true. In that case, we are going to return
6101 without creating varinfos for the fields anyway, so sorting them is a
6102 waste to boot. */
6103 if (!notokay)
6104 {
6105 sort_fieldstack (fieldstack);
6106 /* Due to some C++ FE issues, like PR 22488, we might end up
6107 what appear to be overlapping fields even though they,
6108 in reality, do not overlap. Until the C++ FE is fixed,
6109 we will simply disable field-sensitivity for these cases. */
6110 notokay = check_for_overlaps (fieldstack);
6111 }
6112
6113 if (notokay)
6114 fieldstack.release ();
6115 }
6116
6117 /* If we didn't end up collecting sub-variables create a full
6118 variable for the decl. */
6119 if (fieldstack.length () == 0
6120 || fieldstack.length () > (unsigned)param_max_fields_for_field_sensitive)
6121 {
6122 vi = new_var_info (decl, name, add_id);
6123 vi->offset = 0;
6124 vi->may_have_pointers = true;
6125 vi->fullsize = tree_to_uhwi (declsize);
6126 vi->size = vi->fullsize;
6127 vi->is_full_var = true;
6128 if (POINTER_TYPE_P (decl_type)
6129 && (TYPE_RESTRICT (decl_type) || add_restrict))
6130 vi->only_restrict_pointers = 1;
6131 if (vi->only_restrict_pointers
6132 && !type_contains_placeholder_p (TREE_TYPE (decl_type))
6133 && handle_param
6134 && !bitmap_bit_p (handled_struct_type,
6135 TYPE_UID (TREE_TYPE (decl_type))))
6136 {
6137 varinfo_t rvi;
6138 tree heapvar = build_fake_var_decl (TREE_TYPE (decl_type));
6139 DECL_EXTERNAL (heapvar) = 1;
6140 if (var_can_have_subvars (heapvar))
6141 bitmap_set_bit (handled_struct_type,
6142 TYPE_UID (TREE_TYPE (decl_type)));
6143 rvi = create_variable_info_for_1 (heapvar, "PARM_NOALIAS", true,
6144 true, handled_struct_type);
6145 if (var_can_have_subvars (heapvar))
6146 bitmap_clear_bit (handled_struct_type,
6147 TYPE_UID (TREE_TYPE (decl_type)));
6148 rvi->is_restrict_var = 1;
6149 insert_vi_for_tree (heapvar, rvi);
6150 make_constraint_from (vi, rvi->id);
6151 make_param_constraints (rvi);
6152 }
6153 fieldstack.release ();
6154 return vi;
6155 }
6156
6157 vi = new_var_info (decl, name, add_id);
6158 vi->fullsize = tree_to_uhwi (declsize);
6159 if (fieldstack.length () == 1)
6160 vi->is_full_var = true;
6161 for (i = 0, newvi = vi;
6162 fieldstack.iterate (i, &fo);
6163 ++i, newvi = vi_next (newvi))
6164 {
6165 const char *newname = NULL;
6166 char *tempname;
6167
6168 if (dump_file)
6169 {
6170 if (fieldstack.length () != 1)
6171 {
6172 tempname
6173 = xasprintf ("%s." HOST_WIDE_INT_PRINT_DEC
6174 "+" HOST_WIDE_INT_PRINT_DEC, name,
6175 fo->offset, fo->size);
6176 newname = ggc_strdup (tempname);
6177 free (tempname);
6178 }
6179 }
6180 else
6181 newname = "NULL";
6182
6183 if (newname)
6184 newvi->name = newname;
6185 newvi->offset = fo->offset;
6186 newvi->size = fo->size;
6187 newvi->fullsize = vi->fullsize;
6188 newvi->may_have_pointers = fo->may_have_pointers;
6189 newvi->only_restrict_pointers = fo->only_restrict_pointers;
6190 if (handle_param
6191 && newvi->only_restrict_pointers
6192 && !type_contains_placeholder_p (fo->restrict_pointed_type)
6193 && !bitmap_bit_p (handled_struct_type,
6194 TYPE_UID (fo->restrict_pointed_type)))
6195 {
6196 varinfo_t rvi;
6197 tree heapvar = build_fake_var_decl (fo->restrict_pointed_type);
6198 DECL_EXTERNAL (heapvar) = 1;
6199 if (var_can_have_subvars (heapvar))
6200 bitmap_set_bit (handled_struct_type,
6201 TYPE_UID (fo->restrict_pointed_type));
6202 rvi = create_variable_info_for_1 (heapvar, "PARM_NOALIAS", true,
6203 true, handled_struct_type);
6204 if (var_can_have_subvars (heapvar))
6205 bitmap_clear_bit (handled_struct_type,
6206 TYPE_UID (fo->restrict_pointed_type));
6207 rvi->is_restrict_var = 1;
6208 insert_vi_for_tree (heapvar, rvi);
6209 make_constraint_from (newvi, rvi->id);
6210 make_param_constraints (rvi);
6211 }
6212 if (i + 1 < fieldstack.length ())
6213 {
6214 varinfo_t tem = new_var_info (decl, name, false);
6215 newvi->next = tem->id;
6216 tem->head = vi->id;
6217 }
6218 }
6219
6220 return vi;
6221 }
6222
6223 static unsigned int
6224 create_variable_info_for (tree decl, const char *name, bool add_id)
6225 {
6226 /* First see if we are dealing with an ifunc resolver call and
6227 assiociate that with a call to the resolver function result. */
6228 cgraph_node *node;
6229 if (in_ipa_mode
6230 && TREE_CODE (decl) == FUNCTION_DECL
6231 && (node = cgraph_node::get (decl))
6232 && node->ifunc_resolver)
6233 {
6234 varinfo_t fi = get_vi_for_tree (node->get_alias_target ()->decl);
6235 constraint_expr rhs
6236 = get_function_part_constraint (fi, fi_result);
6237 fi = new_var_info (NULL_TREE, "ifuncres", true);
6238 fi->is_reg_var = true;
6239 constraint_expr lhs;
6240 lhs.type = SCALAR;
6241 lhs.var = fi->id;
6242 lhs.offset = 0;
6243 process_constraint (new_constraint (lhs, rhs));
6244 insert_vi_for_tree (decl, fi);
6245 return fi->id;
6246 }
6247
6248 varinfo_t vi = create_variable_info_for_1 (decl, name, add_id, false, NULL);
6249 unsigned int id = vi->id;
6250
6251 insert_vi_for_tree (decl, vi);
6252
6253 if (!VAR_P (decl))
6254 return id;
6255
6256 /* Create initial constraints for globals. */
6257 for (; vi; vi = vi_next (vi))
6258 {
6259 if (!vi->may_have_pointers
6260 || !vi->is_global_var)
6261 continue;
6262
6263 /* Mark global restrict qualified pointers. */
6264 if ((POINTER_TYPE_P (TREE_TYPE (decl))
6265 && TYPE_RESTRICT (TREE_TYPE (decl)))
6266 || vi->only_restrict_pointers)
6267 {
6268 varinfo_t rvi
6269 = make_constraint_from_global_restrict (vi, "GLOBAL_RESTRICT",
6270 true);
6271 /* ??? For now exclude reads from globals as restrict sources
6272 if those are not (indirectly) from incoming parameters. */
6273 rvi->is_restrict_var = false;
6274 continue;
6275 }
6276
6277 /* In non-IPA mode the initializer from nonlocal is all we need. */
6278 if (!in_ipa_mode
6279 || DECL_HARD_REGISTER (decl))
6280 make_copy_constraint (vi, nonlocal_id);
6281
6282 /* In IPA mode parse the initializer and generate proper constraints
6283 for it. */
6284 else
6285 {
6286 varpool_node *vnode = varpool_node::get (decl);
6287
6288 /* For escaped variables initialize them from nonlocal. */
6289 if (!vnode->all_refs_explicit_p ())
6290 make_copy_constraint (vi, nonlocal_id);
6291
6292 /* If this is a global variable with an initializer and we are in
6293 IPA mode generate constraints for it. */
6294 ipa_ref *ref;
6295 for (unsigned idx = 0; vnode->iterate_reference (idx, ref); ++idx)
6296 {
6297 auto_vec<ce_s> rhsc;
6298 struct constraint_expr lhs, *rhsp;
6299 unsigned i;
6300 get_constraint_for_address_of (ref->referred->decl, &rhsc);
6301 lhs.var = vi->id;
6302 lhs.offset = 0;
6303 lhs.type = SCALAR;
6304 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
6305 process_constraint (new_constraint (lhs, *rhsp));
6306 /* If this is a variable that escapes from the unit
6307 the initializer escapes as well. */
6308 if (!vnode->all_refs_explicit_p ())
6309 {
6310 lhs.var = escaped_id;
6311 lhs.offset = 0;
6312 lhs.type = SCALAR;
6313 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
6314 process_constraint (new_constraint (lhs, *rhsp));
6315 }
6316 }
6317 }
6318 }
6319
6320 return id;
6321 }
6322
6323 /* Print out the points-to solution for VAR to FILE. */
6324
6325 static void
6326 dump_solution_for_var (FILE *file, unsigned int var)
6327 {
6328 varinfo_t vi = get_varinfo (var);
6329 unsigned int i;
6330 bitmap_iterator bi;
6331
6332 /* Dump the solution for unified vars anyway, this avoids difficulties
6333 in scanning dumps in the testsuite. */
6334 fprintf (file, "%s = { ", vi->name);
6335 vi = get_varinfo (find (var));
6336 EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, i, bi)
6337 fprintf (file, "%s ", get_varinfo (i)->name);
6338 fprintf (file, "}");
6339
6340 /* But note when the variable was unified. */
6341 if (vi->id != var)
6342 fprintf (file, " same as %s", vi->name);
6343
6344 fprintf (file, "\n");
6345 }
6346
6347 /* Print the points-to solution for VAR to stderr. */
6348
6349 DEBUG_FUNCTION void
6350 debug_solution_for_var (unsigned int var)
6351 {
6352 dump_solution_for_var (stderr, var);
6353 }
6354
6355 /* Register the constraints for function parameter related VI. */
6356
6357 static void
6358 make_param_constraints (varinfo_t vi)
6359 {
6360 for (; vi; vi = vi_next (vi))
6361 {
6362 if (vi->only_restrict_pointers)
6363 ;
6364 else if (vi->may_have_pointers)
6365 make_constraint_from (vi, nonlocal_id);
6366
6367 if (vi->is_full_var)
6368 break;
6369 }
6370 }
6371
6372 /* Create varinfo structures for all of the variables in the
6373 function for intraprocedural mode. */
6374
6375 static void
6376 intra_create_variable_infos (struct function *fn)
6377 {
6378 tree t;
6379 bitmap handled_struct_type = NULL;
6380 bool this_parm_in_ctor = DECL_CXX_CONSTRUCTOR_P (fn->decl);
6381
6382 /* For each incoming pointer argument arg, create the constraint ARG
6383 = NONLOCAL or a dummy variable if it is a restrict qualified
6384 passed-by-reference argument. */
6385 for (t = DECL_ARGUMENTS (fn->decl); t; t = DECL_CHAIN (t))
6386 {
6387 if (handled_struct_type == NULL)
6388 handled_struct_type = BITMAP_ALLOC (NULL);
6389
6390 varinfo_t p
6391 = create_variable_info_for_1 (t, alias_get_name (t), false, true,
6392 handled_struct_type, this_parm_in_ctor);
6393 insert_vi_for_tree (t, p);
6394
6395 make_param_constraints (p);
6396
6397 this_parm_in_ctor = false;
6398 }
6399
6400 if (handled_struct_type != NULL)
6401 BITMAP_FREE (handled_struct_type);
6402
6403 /* Add a constraint for a result decl that is passed by reference. */
6404 if (DECL_RESULT (fn->decl)
6405 && DECL_BY_REFERENCE (DECL_RESULT (fn->decl)))
6406 {
6407 varinfo_t p, result_vi = get_vi_for_tree (DECL_RESULT (fn->decl));
6408
6409 for (p = result_vi; p; p = vi_next (p))
6410 make_constraint_from (p, nonlocal_id);
6411 }
6412
6413 /* Add a constraint for the incoming static chain parameter. */
6414 if (fn->static_chain_decl != NULL_TREE)
6415 {
6416 varinfo_t p, chain_vi = get_vi_for_tree (fn->static_chain_decl);
6417
6418 for (p = chain_vi; p; p = vi_next (p))
6419 make_constraint_from (p, nonlocal_id);
6420 }
6421 }
6422
6423 /* Structure used to put solution bitmaps in a hashtable so they can
6424 be shared among variables with the same points-to set. */
6425
6426 typedef struct shared_bitmap_info
6427 {
6428 bitmap pt_vars;
6429 hashval_t hashcode;
6430 } *shared_bitmap_info_t;
6431 typedef const struct shared_bitmap_info *const_shared_bitmap_info_t;
6432
6433 /* Shared_bitmap hashtable helpers. */
6434
6435 struct shared_bitmap_hasher : free_ptr_hash <shared_bitmap_info>
6436 {
6437 static inline hashval_t hash (const shared_bitmap_info *);
6438 static inline bool equal (const shared_bitmap_info *,
6439 const shared_bitmap_info *);
6440 };
6441
6442 /* Hash function for a shared_bitmap_info_t */
6443
6444 inline hashval_t
6445 shared_bitmap_hasher::hash (const shared_bitmap_info *bi)
6446 {
6447 return bi->hashcode;
6448 }
6449
6450 /* Equality function for two shared_bitmap_info_t's. */
6451
6452 inline bool
6453 shared_bitmap_hasher::equal (const shared_bitmap_info *sbi1,
6454 const shared_bitmap_info *sbi2)
6455 {
6456 return bitmap_equal_p (sbi1->pt_vars, sbi2->pt_vars);
6457 }
6458
6459 /* Shared_bitmap hashtable. */
6460
6461 static hash_table<shared_bitmap_hasher> *shared_bitmap_table;
6462
6463 /* Lookup a bitmap in the shared bitmap hashtable, and return an already
6464 existing instance if there is one, NULL otherwise. */
6465
6466 static bitmap
6467 shared_bitmap_lookup (bitmap pt_vars)
6468 {
6469 shared_bitmap_info **slot;
6470 struct shared_bitmap_info sbi;
6471
6472 sbi.pt_vars = pt_vars;
6473 sbi.hashcode = bitmap_hash (pt_vars);
6474
6475 slot = shared_bitmap_table->find_slot (&sbi, NO_INSERT);
6476 if (!slot)
6477 return NULL;
6478 else
6479 return (*slot)->pt_vars;
6480 }
6481
6482
6483 /* Add a bitmap to the shared bitmap hashtable. */
6484
6485 static void
6486 shared_bitmap_add (bitmap pt_vars)
6487 {
6488 shared_bitmap_info **slot;
6489 shared_bitmap_info_t sbi = XNEW (struct shared_bitmap_info);
6490
6491 sbi->pt_vars = pt_vars;
6492 sbi->hashcode = bitmap_hash (pt_vars);
6493
6494 slot = shared_bitmap_table->find_slot (sbi, INSERT);
6495 gcc_assert (!*slot);
6496 *slot = sbi;
6497 }
6498
6499
6500 /* Set bits in INTO corresponding to the variable uids in solution set FROM. */
6501
6502 static void
6503 set_uids_in_ptset (bitmap into, bitmap from, struct pt_solution *pt,
6504 tree fndecl)
6505 {
6506 unsigned int i;
6507 bitmap_iterator bi;
6508 varinfo_t escaped_vi = get_varinfo (find (escaped_id));
6509 bool everything_escaped
6510 = escaped_vi->solution && bitmap_bit_p (escaped_vi->solution, anything_id);
6511
6512 EXECUTE_IF_SET_IN_BITMAP (from, 0, i, bi)
6513 {
6514 varinfo_t vi = get_varinfo (i);
6515
6516 if (vi->is_artificial_var)
6517 continue;
6518
6519 if (everything_escaped
6520 || (escaped_vi->solution
6521 && bitmap_bit_p (escaped_vi->solution, i)))
6522 {
6523 pt->vars_contains_escaped = true;
6524 pt->vars_contains_escaped_heap |= vi->is_heap_var;
6525 }
6526
6527 if (vi->is_restrict_var)
6528 pt->vars_contains_restrict = true;
6529
6530 if (VAR_P (vi->decl)
6531 || TREE_CODE (vi->decl) == PARM_DECL
6532 || TREE_CODE (vi->decl) == RESULT_DECL)
6533 {
6534 /* If we are in IPA mode we will not recompute points-to
6535 sets after inlining so make sure they stay valid. */
6536 if (in_ipa_mode
6537 && !DECL_PT_UID_SET_P (vi->decl))
6538 SET_DECL_PT_UID (vi->decl, DECL_UID (vi->decl));
6539
6540 /* Add the decl to the points-to set. Note that the points-to
6541 set contains global variables. */
6542 bitmap_set_bit (into, DECL_PT_UID (vi->decl));
6543 if (vi->is_global_var
6544 /* In IPA mode the escaped_heap trick doesn't work as
6545 ESCAPED is escaped from the unit but
6546 pt_solution_includes_global needs to answer true for
6547 all variables not automatic within a function.
6548 For the same reason is_global_var is not the
6549 correct flag to track - local variables from other
6550 functions also need to be considered global.
6551 Conveniently all HEAP vars are not put in function
6552 scope. */
6553 || (in_ipa_mode
6554 && fndecl
6555 && ! auto_var_in_fn_p (vi->decl, fndecl)))
6556 pt->vars_contains_nonlocal = true;
6557
6558 /* If we have a variable that is interposable record that fact
6559 for pointer comparison simplification. */
6560 if (VAR_P (vi->decl)
6561 && (TREE_STATIC (vi->decl) || DECL_EXTERNAL (vi->decl))
6562 && ! decl_binds_to_current_def_p (vi->decl))
6563 pt->vars_contains_interposable = true;
6564
6565 /* If this is a local variable we can have overlapping lifetime
6566 of different function invocations through recursion duplicate
6567 it with its shadow variable. */
6568 if (in_ipa_mode
6569 && vi->shadow_var_uid != 0)
6570 {
6571 bitmap_set_bit (into, vi->shadow_var_uid);
6572 pt->vars_contains_nonlocal = true;
6573 }
6574 }
6575
6576 else if (TREE_CODE (vi->decl) == FUNCTION_DECL
6577 || TREE_CODE (vi->decl) == LABEL_DECL)
6578 {
6579 /* Nothing should read/write from/to code so we can
6580 save bits by not including them in the points-to bitmaps.
6581 Still mark the points-to set as containing global memory
6582 to make code-patching possible - see PR70128. */
6583 pt->vars_contains_nonlocal = true;
6584 }
6585 }
6586 }
6587
6588
6589 /* Compute the points-to solution *PT for the variable VI. */
6590
6591 static struct pt_solution
6592 find_what_var_points_to (tree fndecl, varinfo_t orig_vi)
6593 {
6594 unsigned int i;
6595 bitmap_iterator bi;
6596 bitmap finished_solution;
6597 bitmap result;
6598 varinfo_t vi;
6599 struct pt_solution *pt;
6600
6601 /* This variable may have been collapsed, let's get the real
6602 variable. */
6603 vi = get_varinfo (find (orig_vi->id));
6604
6605 /* See if we have already computed the solution and return it. */
6606 pt_solution **slot = &final_solutions->get_or_insert (vi);
6607 if (*slot != NULL)
6608 return **slot;
6609
6610 *slot = pt = XOBNEW (&final_solutions_obstack, struct pt_solution);
6611 memset (pt, 0, sizeof (struct pt_solution));
6612
6613 /* Translate artificial variables into SSA_NAME_PTR_INFO
6614 attributes. */
6615 EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, i, bi)
6616 {
6617 varinfo_t vi = get_varinfo (i);
6618
6619 if (vi->is_artificial_var)
6620 {
6621 if (vi->id == nothing_id)
6622 pt->null = 1;
6623 else if (vi->id == escaped_id)
6624 {
6625 if (in_ipa_mode)
6626 pt->ipa_escaped = 1;
6627 else
6628 pt->escaped = 1;
6629 /* Expand some special vars of ESCAPED in-place here. */
6630 varinfo_t evi = get_varinfo (find (escaped_id));
6631 if (bitmap_bit_p (evi->solution, nonlocal_id))
6632 pt->nonlocal = 1;
6633 }
6634 else if (vi->id == nonlocal_id)
6635 pt->nonlocal = 1;
6636 else if (vi->id == string_id)
6637 /* Nobody cares - STRING_CSTs are read-only entities. */
6638 ;
6639 else if (vi->id == anything_id
6640 || vi->id == integer_id)
6641 pt->anything = 1;
6642 }
6643 }
6644
6645 /* Instead of doing extra work, simply do not create
6646 elaborate points-to information for pt_anything pointers. */
6647 if (pt->anything)
6648 return *pt;
6649
6650 /* Share the final set of variables when possible. */
6651 finished_solution = BITMAP_GGC_ALLOC ();
6652 stats.points_to_sets_created++;
6653
6654 set_uids_in_ptset (finished_solution, vi->solution, pt, fndecl);
6655 result = shared_bitmap_lookup (finished_solution);
6656 if (!result)
6657 {
6658 shared_bitmap_add (finished_solution);
6659 pt->vars = finished_solution;
6660 }
6661 else
6662 {
6663 pt->vars = result;
6664 bitmap_clear (finished_solution);
6665 }
6666
6667 return *pt;
6668 }
6669
6670 /* Given a pointer variable P, fill in its points-to set. */
6671
6672 static void
6673 find_what_p_points_to (tree fndecl, tree p)
6674 {
6675 struct ptr_info_def *pi;
6676 tree lookup_p = p;
6677 varinfo_t vi;
6678 bool nonnull = get_ptr_nonnull (p);
6679
6680 /* For parameters, get at the points-to set for the actual parm
6681 decl. */
6682 if (TREE_CODE (p) == SSA_NAME
6683 && SSA_NAME_IS_DEFAULT_DEF (p)
6684 && (TREE_CODE (SSA_NAME_VAR (p)) == PARM_DECL
6685 || TREE_CODE (SSA_NAME_VAR (p)) == RESULT_DECL))
6686 lookup_p = SSA_NAME_VAR (p);
6687
6688 vi = lookup_vi_for_tree (lookup_p);
6689 if (!vi)
6690 return;
6691
6692 pi = get_ptr_info (p);
6693 pi->pt = find_what_var_points_to (fndecl, vi);
6694 /* Conservatively set to NULL from PTA (to true). */
6695 pi->pt.null = 1;
6696 /* Preserve pointer nonnull computed by VRP. See get_ptr_nonnull
6697 in gcc/tree-ssaname.c for more information. */
6698 if (nonnull)
6699 set_ptr_nonnull (p);
6700 }
6701
6702
6703 /* Query statistics for points-to solutions. */
6704
6705 static struct {
6706 unsigned HOST_WIDE_INT pt_solution_includes_may_alias;
6707 unsigned HOST_WIDE_INT pt_solution_includes_no_alias;
6708 unsigned HOST_WIDE_INT pt_solutions_intersect_may_alias;
6709 unsigned HOST_WIDE_INT pt_solutions_intersect_no_alias;
6710 } pta_stats;
6711
6712 void
6713 dump_pta_stats (FILE *s)
6714 {
6715 fprintf (s, "\nPTA query stats:\n");
6716 fprintf (s, " pt_solution_includes: "
6717 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
6718 HOST_WIDE_INT_PRINT_DEC" queries\n",
6719 pta_stats.pt_solution_includes_no_alias,
6720 pta_stats.pt_solution_includes_no_alias
6721 + pta_stats.pt_solution_includes_may_alias);
6722 fprintf (s, " pt_solutions_intersect: "
6723 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
6724 HOST_WIDE_INT_PRINT_DEC" queries\n",
6725 pta_stats.pt_solutions_intersect_no_alias,
6726 pta_stats.pt_solutions_intersect_no_alias
6727 + pta_stats.pt_solutions_intersect_may_alias);
6728 }
6729
6730
6731 /* Reset the points-to solution *PT to a conservative default
6732 (point to anything). */
6733
6734 void
6735 pt_solution_reset (struct pt_solution *pt)
6736 {
6737 memset (pt, 0, sizeof (struct pt_solution));
6738 pt->anything = true;
6739 pt->null = true;
6740 }
6741
6742 /* Set the points-to solution *PT to point only to the variables
6743 in VARS. VARS_CONTAINS_GLOBAL specifies whether that contains
6744 global variables and VARS_CONTAINS_RESTRICT specifies whether
6745 it contains restrict tag variables. */
6746
6747 void
6748 pt_solution_set (struct pt_solution *pt, bitmap vars,
6749 bool vars_contains_nonlocal)
6750 {
6751 memset (pt, 0, sizeof (struct pt_solution));
6752 pt->vars = vars;
6753 pt->vars_contains_nonlocal = vars_contains_nonlocal;
6754 pt->vars_contains_escaped
6755 = (cfun->gimple_df->escaped.anything
6756 || bitmap_intersect_p (cfun->gimple_df->escaped.vars, vars));
6757 }
6758
6759 /* Set the points-to solution *PT to point only to the variable VAR. */
6760
6761 void
6762 pt_solution_set_var (struct pt_solution *pt, tree var)
6763 {
6764 memset (pt, 0, sizeof (struct pt_solution));
6765 pt->vars = BITMAP_GGC_ALLOC ();
6766 bitmap_set_bit (pt->vars, DECL_PT_UID (var));
6767 pt->vars_contains_nonlocal = is_global_var (var);
6768 pt->vars_contains_escaped
6769 = (cfun->gimple_df->escaped.anything
6770 || bitmap_bit_p (cfun->gimple_df->escaped.vars, DECL_PT_UID (var)));
6771 }
6772
6773 /* Computes the union of the points-to solutions *DEST and *SRC and
6774 stores the result in *DEST. This changes the points-to bitmap
6775 of *DEST and thus may not be used if that might be shared.
6776 The points-to bitmap of *SRC and *DEST will not be shared after
6777 this function if they were not before. */
6778
6779 static void
6780 pt_solution_ior_into (struct pt_solution *dest, struct pt_solution *src)
6781 {
6782 dest->anything |= src->anything;
6783 if (dest->anything)
6784 {
6785 pt_solution_reset (dest);
6786 return;
6787 }
6788
6789 dest->nonlocal |= src->nonlocal;
6790 dest->escaped |= src->escaped;
6791 dest->ipa_escaped |= src->ipa_escaped;
6792 dest->null |= src->null;
6793 dest->vars_contains_nonlocal |= src->vars_contains_nonlocal;
6794 dest->vars_contains_escaped |= src->vars_contains_escaped;
6795 dest->vars_contains_escaped_heap |= src->vars_contains_escaped_heap;
6796 if (!src->vars)
6797 return;
6798
6799 if (!dest->vars)
6800 dest->vars = BITMAP_GGC_ALLOC ();
6801 bitmap_ior_into (dest->vars, src->vars);
6802 }
6803
6804 /* Return true if the points-to solution *PT is empty. */
6805
6806 bool
6807 pt_solution_empty_p (const pt_solution *pt)
6808 {
6809 if (pt->anything
6810 || pt->nonlocal)
6811 return false;
6812
6813 if (pt->vars
6814 && !bitmap_empty_p (pt->vars))
6815 return false;
6816
6817 /* If the solution includes ESCAPED, check if that is empty. */
6818 if (pt->escaped
6819 && !pt_solution_empty_p (&cfun->gimple_df->escaped))
6820 return false;
6821
6822 /* If the solution includes ESCAPED, check if that is empty. */
6823 if (pt->ipa_escaped
6824 && !pt_solution_empty_p (&ipa_escaped_pt))
6825 return false;
6826
6827 return true;
6828 }
6829
6830 /* Return true if the points-to solution *PT only point to a single var, and
6831 return the var uid in *UID. */
6832
6833 bool
6834 pt_solution_singleton_or_null_p (struct pt_solution *pt, unsigned *uid)
6835 {
6836 if (pt->anything || pt->nonlocal || pt->escaped || pt->ipa_escaped
6837 || pt->vars == NULL
6838 || !bitmap_single_bit_set_p (pt->vars))
6839 return false;
6840
6841 *uid = bitmap_first_set_bit (pt->vars);
6842 return true;
6843 }
6844
6845 /* Return true if the points-to solution *PT includes global memory. */
6846
6847 bool
6848 pt_solution_includes_global (struct pt_solution *pt)
6849 {
6850 if (pt->anything
6851 || pt->nonlocal
6852 || pt->vars_contains_nonlocal
6853 /* The following is a hack to make the malloc escape hack work.
6854 In reality we'd need different sets for escaped-through-return
6855 and escaped-to-callees and passes would need to be updated. */
6856 || pt->vars_contains_escaped_heap)
6857 return true;
6858
6859 /* 'escaped' is also a placeholder so we have to look into it. */
6860 if (pt->escaped)
6861 return pt_solution_includes_global (&cfun->gimple_df->escaped);
6862
6863 if (pt->ipa_escaped)
6864 return pt_solution_includes_global (&ipa_escaped_pt);
6865
6866 return false;
6867 }
6868
6869 /* Return true if the points-to solution *PT includes the variable
6870 declaration DECL. */
6871
6872 static bool
6873 pt_solution_includes_1 (struct pt_solution *pt, const_tree decl)
6874 {
6875 if (pt->anything)
6876 return true;
6877
6878 if (pt->nonlocal
6879 && is_global_var (decl))
6880 return true;
6881
6882 if (pt->vars
6883 && bitmap_bit_p (pt->vars, DECL_PT_UID (decl)))
6884 return true;
6885
6886 /* If the solution includes ESCAPED, check it. */
6887 if (pt->escaped
6888 && pt_solution_includes_1 (&cfun->gimple_df->escaped, decl))
6889 return true;
6890
6891 /* If the solution includes ESCAPED, check it. */
6892 if (pt->ipa_escaped
6893 && pt_solution_includes_1 (&ipa_escaped_pt, decl))
6894 return true;
6895
6896 return false;
6897 }
6898
6899 bool
6900 pt_solution_includes (struct pt_solution *pt, const_tree decl)
6901 {
6902 bool res = pt_solution_includes_1 (pt, decl);
6903 if (res)
6904 ++pta_stats.pt_solution_includes_may_alias;
6905 else
6906 ++pta_stats.pt_solution_includes_no_alias;
6907 return res;
6908 }
6909
6910 /* Return true if both points-to solutions PT1 and PT2 have a non-empty
6911 intersection. */
6912
6913 static bool
6914 pt_solutions_intersect_1 (struct pt_solution *pt1, struct pt_solution *pt2)
6915 {
6916 if (pt1->anything || pt2->anything)
6917 return true;
6918
6919 /* If either points to unknown global memory and the other points to
6920 any global memory they alias. */
6921 if ((pt1->nonlocal
6922 && (pt2->nonlocal
6923 || pt2->vars_contains_nonlocal))
6924 || (pt2->nonlocal
6925 && pt1->vars_contains_nonlocal))
6926 return true;
6927
6928 /* If either points to all escaped memory and the other points to
6929 any escaped memory they alias. */
6930 if ((pt1->escaped
6931 && (pt2->escaped
6932 || pt2->vars_contains_escaped))
6933 || (pt2->escaped
6934 && pt1->vars_contains_escaped))
6935 return true;
6936
6937 /* Check the escaped solution if required.
6938 ??? Do we need to check the local against the IPA escaped sets? */
6939 if ((pt1->ipa_escaped || pt2->ipa_escaped)
6940 && !pt_solution_empty_p (&ipa_escaped_pt))
6941 {
6942 /* If both point to escaped memory and that solution
6943 is not empty they alias. */
6944 if (pt1->ipa_escaped && pt2->ipa_escaped)
6945 return true;
6946
6947 /* If either points to escaped memory see if the escaped solution
6948 intersects with the other. */
6949 if ((pt1->ipa_escaped
6950 && pt_solutions_intersect_1 (&ipa_escaped_pt, pt2))
6951 || (pt2->ipa_escaped
6952 && pt_solutions_intersect_1 (&ipa_escaped_pt, pt1)))
6953 return true;
6954 }
6955
6956 /* Now both pointers alias if their points-to solution intersects. */
6957 return (pt1->vars
6958 && pt2->vars
6959 && bitmap_intersect_p (pt1->vars, pt2->vars));
6960 }
6961
6962 bool
6963 pt_solutions_intersect (struct pt_solution *pt1, struct pt_solution *pt2)
6964 {
6965 bool res = pt_solutions_intersect_1 (pt1, pt2);
6966 if (res)
6967 ++pta_stats.pt_solutions_intersect_may_alias;
6968 else
6969 ++pta_stats.pt_solutions_intersect_no_alias;
6970 return res;
6971 }
6972
6973
6974 /* Dump points-to information to OUTFILE. */
6975
6976 static void
6977 dump_sa_points_to_info (FILE *outfile)
6978 {
6979 unsigned int i;
6980
6981 fprintf (outfile, "\nPoints-to sets\n\n");
6982
6983 if (dump_flags & TDF_STATS)
6984 {
6985 fprintf (outfile, "Stats:\n");
6986 fprintf (outfile, "Total vars: %d\n", stats.total_vars);
6987 fprintf (outfile, "Non-pointer vars: %d\n",
6988 stats.nonpointer_vars);
6989 fprintf (outfile, "Statically unified vars: %d\n",
6990 stats.unified_vars_static);
6991 fprintf (outfile, "Dynamically unified vars: %d\n",
6992 stats.unified_vars_dynamic);
6993 fprintf (outfile, "Iterations: %d\n", stats.iterations);
6994 fprintf (outfile, "Number of edges: %d\n", stats.num_edges);
6995 fprintf (outfile, "Number of implicit edges: %d\n",
6996 stats.num_implicit_edges);
6997 }
6998
6999 for (i = 1; i < varmap.length (); i++)
7000 {
7001 varinfo_t vi = get_varinfo (i);
7002 if (!vi->may_have_pointers)
7003 continue;
7004 dump_solution_for_var (outfile, i);
7005 }
7006 }
7007
7008
7009 /* Debug points-to information to stderr. */
7010
7011 DEBUG_FUNCTION void
7012 debug_sa_points_to_info (void)
7013 {
7014 dump_sa_points_to_info (stderr);
7015 }
7016
7017
7018 /* Initialize the always-existing constraint variables for NULL
7019 ANYTHING, READONLY, and INTEGER */
7020
7021 static void
7022 init_base_vars (void)
7023 {
7024 struct constraint_expr lhs, rhs;
7025 varinfo_t var_anything;
7026 varinfo_t var_nothing;
7027 varinfo_t var_string;
7028 varinfo_t var_escaped;
7029 varinfo_t var_nonlocal;
7030 varinfo_t var_storedanything;
7031 varinfo_t var_integer;
7032
7033 /* Variable ID zero is reserved and should be NULL. */
7034 varmap.safe_push (NULL);
7035
7036 /* Create the NULL variable, used to represent that a variable points
7037 to NULL. */
7038 var_nothing = new_var_info (NULL_TREE, "NULL", false);
7039 gcc_assert (var_nothing->id == nothing_id);
7040 var_nothing->is_artificial_var = 1;
7041 var_nothing->offset = 0;
7042 var_nothing->size = ~0;
7043 var_nothing->fullsize = ~0;
7044 var_nothing->is_special_var = 1;
7045 var_nothing->may_have_pointers = 0;
7046 var_nothing->is_global_var = 0;
7047
7048 /* Create the ANYTHING variable, used to represent that a variable
7049 points to some unknown piece of memory. */
7050 var_anything = new_var_info (NULL_TREE, "ANYTHING", false);
7051 gcc_assert (var_anything->id == anything_id);
7052 var_anything->is_artificial_var = 1;
7053 var_anything->size = ~0;
7054 var_anything->offset = 0;
7055 var_anything->fullsize = ~0;
7056 var_anything->is_special_var = 1;
7057
7058 /* Anything points to anything. This makes deref constraints just
7059 work in the presence of linked list and other p = *p type loops,
7060 by saying that *ANYTHING = ANYTHING. */
7061 lhs.type = SCALAR;
7062 lhs.var = anything_id;
7063 lhs.offset = 0;
7064 rhs.type = ADDRESSOF;
7065 rhs.var = anything_id;
7066 rhs.offset = 0;
7067
7068 /* This specifically does not use process_constraint because
7069 process_constraint ignores all anything = anything constraints, since all
7070 but this one are redundant. */
7071 constraints.safe_push (new_constraint (lhs, rhs));
7072
7073 /* Create the STRING variable, used to represent that a variable
7074 points to a string literal. String literals don't contain
7075 pointers so STRING doesn't point to anything. */
7076 var_string = new_var_info (NULL_TREE, "STRING", false);
7077 gcc_assert (var_string->id == string_id);
7078 var_string->is_artificial_var = 1;
7079 var_string->offset = 0;
7080 var_string->size = ~0;
7081 var_string->fullsize = ~0;
7082 var_string->is_special_var = 1;
7083 var_string->may_have_pointers = 0;
7084
7085 /* Create the ESCAPED variable, used to represent the set of escaped
7086 memory. */
7087 var_escaped = new_var_info (NULL_TREE, "ESCAPED", false);
7088 gcc_assert (var_escaped->id == escaped_id);
7089 var_escaped->is_artificial_var = 1;
7090 var_escaped->offset = 0;
7091 var_escaped->size = ~0;
7092 var_escaped->fullsize = ~0;
7093 var_escaped->is_special_var = 0;
7094
7095 /* Create the NONLOCAL variable, used to represent the set of nonlocal
7096 memory. */
7097 var_nonlocal = new_var_info (NULL_TREE, "NONLOCAL", false);
7098 gcc_assert (var_nonlocal->id == nonlocal_id);
7099 var_nonlocal->is_artificial_var = 1;
7100 var_nonlocal->offset = 0;
7101 var_nonlocal->size = ~0;
7102 var_nonlocal->fullsize = ~0;
7103 var_nonlocal->is_special_var = 1;
7104
7105 /* ESCAPED = *ESCAPED, because escaped is may-deref'd at calls, etc. */
7106 lhs.type = SCALAR;
7107 lhs.var = escaped_id;
7108 lhs.offset = 0;
7109 rhs.type = DEREF;
7110 rhs.var = escaped_id;
7111 rhs.offset = 0;
7112 process_constraint (new_constraint (lhs, rhs));
7113
7114 /* ESCAPED = ESCAPED + UNKNOWN_OFFSET, because if a sub-field escapes the
7115 whole variable escapes. */
7116 lhs.type = SCALAR;
7117 lhs.var = escaped_id;
7118 lhs.offset = 0;
7119 rhs.type = SCALAR;
7120 rhs.var = escaped_id;
7121 rhs.offset = UNKNOWN_OFFSET;
7122 process_constraint (new_constraint (lhs, rhs));
7123
7124 /* *ESCAPED = NONLOCAL. This is true because we have to assume
7125 everything pointed to by escaped points to what global memory can
7126 point to. */
7127 lhs.type = DEREF;
7128 lhs.var = escaped_id;
7129 lhs.offset = 0;
7130 rhs.type = SCALAR;
7131 rhs.var = nonlocal_id;
7132 rhs.offset = 0;
7133 process_constraint (new_constraint (lhs, rhs));
7134
7135 /* NONLOCAL = &NONLOCAL, NONLOCAL = &ESCAPED. This is true because
7136 global memory may point to global memory and escaped memory. */
7137 lhs.type = SCALAR;
7138 lhs.var = nonlocal_id;
7139 lhs.offset = 0;
7140 rhs.type = ADDRESSOF;
7141 rhs.var = nonlocal_id;
7142 rhs.offset = 0;
7143 process_constraint (new_constraint (lhs, rhs));
7144 rhs.type = ADDRESSOF;
7145 rhs.var = escaped_id;
7146 rhs.offset = 0;
7147 process_constraint (new_constraint (lhs, rhs));
7148
7149 /* Create the STOREDANYTHING variable, used to represent the set of
7150 variables stored to *ANYTHING. */
7151 var_storedanything = new_var_info (NULL_TREE, "STOREDANYTHING", false);
7152 gcc_assert (var_storedanything->id == storedanything_id);
7153 var_storedanything->is_artificial_var = 1;
7154 var_storedanything->offset = 0;
7155 var_storedanything->size = ~0;
7156 var_storedanything->fullsize = ~0;
7157 var_storedanything->is_special_var = 0;
7158
7159 /* Create the INTEGER variable, used to represent that a variable points
7160 to what an INTEGER "points to". */
7161 var_integer = new_var_info (NULL_TREE, "INTEGER", false);
7162 gcc_assert (var_integer->id == integer_id);
7163 var_integer->is_artificial_var = 1;
7164 var_integer->size = ~0;
7165 var_integer->fullsize = ~0;
7166 var_integer->offset = 0;
7167 var_integer->is_special_var = 1;
7168
7169 /* INTEGER = ANYTHING, because we don't know where a dereference of
7170 a random integer will point to. */
7171 lhs.type = SCALAR;
7172 lhs.var = integer_id;
7173 lhs.offset = 0;
7174 rhs.type = ADDRESSOF;
7175 rhs.var = anything_id;
7176 rhs.offset = 0;
7177 process_constraint (new_constraint (lhs, rhs));
7178 }
7179
7180 /* Initialize things necessary to perform PTA */
7181
7182 static void
7183 init_alias_vars (void)
7184 {
7185 use_field_sensitive = (param_max_fields_for_field_sensitive > 1);
7186
7187 bitmap_obstack_initialize (&pta_obstack);
7188 bitmap_obstack_initialize (&oldpta_obstack);
7189 bitmap_obstack_initialize (&predbitmap_obstack);
7190
7191 constraints.create (8);
7192 varmap.create (8);
7193 vi_for_tree = new hash_map<tree, varinfo_t>;
7194 call_stmt_vars = new hash_map<gimple *, varinfo_t>;
7195
7196 memset (&stats, 0, sizeof (stats));
7197 shared_bitmap_table = new hash_table<shared_bitmap_hasher> (511);
7198 init_base_vars ();
7199
7200 gcc_obstack_init (&fake_var_decl_obstack);
7201
7202 final_solutions = new hash_map<varinfo_t, pt_solution *>;
7203 gcc_obstack_init (&final_solutions_obstack);
7204 }
7205
7206 /* Remove the REF and ADDRESS edges from GRAPH, as well as all the
7207 predecessor edges. */
7208
7209 static void
7210 remove_preds_and_fake_succs (constraint_graph_t graph)
7211 {
7212 unsigned int i;
7213
7214 /* Clear the implicit ref and address nodes from the successor
7215 lists. */
7216 for (i = 1; i < FIRST_REF_NODE; i++)
7217 {
7218 if (graph->succs[i])
7219 bitmap_clear_range (graph->succs[i], FIRST_REF_NODE,
7220 FIRST_REF_NODE * 2);
7221 }
7222
7223 /* Free the successor list for the non-ref nodes. */
7224 for (i = FIRST_REF_NODE + 1; i < graph->size; i++)
7225 {
7226 if (graph->succs[i])
7227 BITMAP_FREE (graph->succs[i]);
7228 }
7229
7230 /* Now reallocate the size of the successor list as, and blow away
7231 the predecessor bitmaps. */
7232 graph->size = varmap.length ();
7233 graph->succs = XRESIZEVEC (bitmap, graph->succs, graph->size);
7234
7235 free (graph->implicit_preds);
7236 graph->implicit_preds = NULL;
7237 free (graph->preds);
7238 graph->preds = NULL;
7239 bitmap_obstack_release (&predbitmap_obstack);
7240 }
7241
7242 /* Solve the constraint set. */
7243
7244 static void
7245 solve_constraints (void)
7246 {
7247 class scc_info *si;
7248
7249 /* Sort varinfos so that ones that cannot be pointed to are last.
7250 This makes bitmaps more efficient. */
7251 unsigned int *map = XNEWVEC (unsigned int, varmap.length ());
7252 for (unsigned i = 0; i < integer_id + 1; ++i)
7253 map[i] = i;
7254 /* Start with non-register vars (as possibly address-taken), followed
7255 by register vars as conservative set of vars never appearing in
7256 the points-to solution bitmaps. */
7257 unsigned j = integer_id + 1;
7258 for (unsigned i = integer_id + 1; i < varmap.length (); ++i)
7259 if (! varmap[i]->is_reg_var)
7260 map[i] = j++;
7261 for (unsigned i = integer_id + 1; i < varmap.length (); ++i)
7262 if (varmap[i]->is_reg_var)
7263 map[i] = j++;
7264 /* Shuffle varmap according to map. */
7265 for (unsigned i = integer_id + 1; i < varmap.length (); ++i)
7266 {
7267 while (map[varmap[i]->id] != i)
7268 std::swap (varmap[i], varmap[map[varmap[i]->id]]);
7269 gcc_assert (bitmap_empty_p (varmap[i]->solution));
7270 varmap[i]->id = i;
7271 varmap[i]->next = map[varmap[i]->next];
7272 varmap[i]->head = map[varmap[i]->head];
7273 }
7274 /* Finally rewrite constraints. */
7275 for (unsigned i = 0; i < constraints.length (); ++i)
7276 {
7277 constraints[i]->lhs.var = map[constraints[i]->lhs.var];
7278 constraints[i]->rhs.var = map[constraints[i]->rhs.var];
7279 }
7280 free (map);
7281
7282 if (dump_file)
7283 fprintf (dump_file,
7284 "\nCollapsing static cycles and doing variable "
7285 "substitution\n");
7286
7287 init_graph (varmap.length () * 2);
7288
7289 if (dump_file)
7290 fprintf (dump_file, "Building predecessor graph\n");
7291 build_pred_graph ();
7292
7293 if (dump_file)
7294 fprintf (dump_file, "Detecting pointer and location "
7295 "equivalences\n");
7296 si = perform_var_substitution (graph);
7297
7298 if (dump_file)
7299 fprintf (dump_file, "Rewriting constraints and unifying "
7300 "variables\n");
7301 rewrite_constraints (graph, si);
7302
7303 build_succ_graph ();
7304
7305 free_var_substitution_info (si);
7306
7307 /* Attach complex constraints to graph nodes. */
7308 move_complex_constraints (graph);
7309
7310 if (dump_file)
7311 fprintf (dump_file, "Uniting pointer but not location equivalent "
7312 "variables\n");
7313 unite_pointer_equivalences (graph);
7314
7315 if (dump_file)
7316 fprintf (dump_file, "Finding indirect cycles\n");
7317 find_indirect_cycles (graph);
7318
7319 /* Implicit nodes and predecessors are no longer necessary at this
7320 point. */
7321 remove_preds_and_fake_succs (graph);
7322
7323 if (dump_file && (dump_flags & TDF_GRAPH))
7324 {
7325 fprintf (dump_file, "\n\n// The constraint graph before solve-graph "
7326 "in dot format:\n");
7327 dump_constraint_graph (dump_file);
7328 fprintf (dump_file, "\n\n");
7329 }
7330
7331 if (dump_file)
7332 fprintf (dump_file, "Solving graph\n");
7333
7334 solve_graph (graph);
7335
7336 if (dump_file && (dump_flags & TDF_GRAPH))
7337 {
7338 fprintf (dump_file, "\n\n// The constraint graph after solve-graph "
7339 "in dot format:\n");
7340 dump_constraint_graph (dump_file);
7341 fprintf (dump_file, "\n\n");
7342 }
7343 }
7344
7345 /* Create points-to sets for the current function. See the comments
7346 at the start of the file for an algorithmic overview. */
7347
7348 static void
7349 compute_points_to_sets (void)
7350 {
7351 basic_block bb;
7352 varinfo_t vi;
7353
7354 timevar_push (TV_TREE_PTA);
7355
7356 init_alias_vars ();
7357
7358 intra_create_variable_infos (cfun);
7359
7360 /* Now walk all statements and build the constraint set. */
7361 FOR_EACH_BB_FN (bb, cfun)
7362 {
7363 for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
7364 gsi_next (&gsi))
7365 {
7366 gphi *phi = gsi.phi ();
7367
7368 if (! virtual_operand_p (gimple_phi_result (phi)))
7369 find_func_aliases (cfun, phi);
7370 }
7371
7372 for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);
7373 gsi_next (&gsi))
7374 {
7375 gimple *stmt = gsi_stmt (gsi);
7376
7377 find_func_aliases (cfun, stmt);
7378 }
7379 }
7380
7381 if (dump_file)
7382 {
7383 fprintf (dump_file, "Points-to analysis\n\nConstraints:\n\n");
7384 dump_constraints (dump_file, 0);
7385 }
7386
7387 /* From the constraints compute the points-to sets. */
7388 solve_constraints ();
7389
7390 /* Post-process solutions for escapes through returns. */
7391 edge_iterator ei;
7392 edge e;
7393 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
7394 if (greturn *ret = safe_dyn_cast <greturn *> (last_stmt (e->src)))
7395 {
7396 tree val = gimple_return_retval (ret);
7397 /* ??? Easy to handle simple indirections with some work.
7398 Arbitrary references like foo.bar.baz are more difficult
7399 (but conservatively easy enough with just looking at the base).
7400 Mind to fixup find_func_aliases as well. */
7401 if (!val || !SSA_VAR_P (val))
7402 continue;
7403 /* returns happen last in non-IPA so they only influence
7404 the ESCAPED solution and we can filter local variables. */
7405 varinfo_t escaped_vi = get_varinfo (find (escaped_id));
7406 varinfo_t vi = lookup_vi_for_tree (val);
7407 bitmap delta = BITMAP_ALLOC (&pta_obstack);
7408 bitmap_iterator bi;
7409 unsigned i;
7410 for (; vi; vi = vi_next (vi))
7411 {
7412 varinfo_t part_vi = get_varinfo (find (vi->id));
7413 EXECUTE_IF_AND_COMPL_IN_BITMAP (part_vi->solution,
7414 escaped_vi->solution, 0, i, bi)
7415 {
7416 varinfo_t pointed_to_vi = get_varinfo (i);
7417 if (pointed_to_vi->is_global_var
7418 /* We delay marking of heap memory as global. */
7419 || pointed_to_vi->is_heap_var)
7420 bitmap_set_bit (delta, i);
7421 }
7422 }
7423
7424 /* Now compute the transitive closure. */
7425 bitmap_ior_into (escaped_vi->solution, delta);
7426 bitmap new_delta = BITMAP_ALLOC (&pta_obstack);
7427 while (!bitmap_empty_p (delta))
7428 {
7429 EXECUTE_IF_SET_IN_BITMAP (delta, 0, i, bi)
7430 {
7431 varinfo_t pointed_to_vi = get_varinfo (i);
7432 pointed_to_vi = get_varinfo (find (pointed_to_vi->id));
7433 unsigned j;
7434 bitmap_iterator bi2;
7435 EXECUTE_IF_AND_COMPL_IN_BITMAP (pointed_to_vi->solution,
7436 escaped_vi->solution,
7437 0, j, bi2)
7438 {
7439 varinfo_t pointed_to_vi2 = get_varinfo (j);
7440 if (pointed_to_vi2->is_global_var
7441 /* We delay marking of heap memory as global. */
7442 || pointed_to_vi2->is_heap_var)
7443 bitmap_set_bit (new_delta, j);
7444 }
7445 }
7446 bitmap_ior_into (escaped_vi->solution, new_delta);
7447 bitmap_clear (delta);
7448 std::swap (delta, new_delta);
7449 }
7450 BITMAP_FREE (delta);
7451 BITMAP_FREE (new_delta);
7452 }
7453
7454 if (dump_file)
7455 dump_sa_points_to_info (dump_file);
7456
7457 /* Compute the points-to set for ESCAPED used for call-clobber analysis. */
7458 cfun->gimple_df->escaped = find_what_var_points_to (cfun->decl,
7459 get_varinfo (escaped_id));
7460
7461 /* Make sure the ESCAPED solution (which is used as placeholder in
7462 other solutions) does not reference itself. This simplifies
7463 points-to solution queries. */
7464 cfun->gimple_df->escaped.escaped = 0;
7465
7466 /* Compute the points-to sets for pointer SSA_NAMEs. */
7467 unsigned i;
7468 tree ptr;
7469
7470 FOR_EACH_SSA_NAME (i, ptr, cfun)
7471 {
7472 if (POINTER_TYPE_P (TREE_TYPE (ptr)))
7473 find_what_p_points_to (cfun->decl, ptr);
7474 }
7475
7476 /* Compute the call-used/clobbered sets. */
7477 FOR_EACH_BB_FN (bb, cfun)
7478 {
7479 gimple_stmt_iterator gsi;
7480
7481 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
7482 {
7483 gcall *stmt;
7484 struct pt_solution *pt;
7485
7486 stmt = dyn_cast <gcall *> (gsi_stmt (gsi));
7487 if (!stmt)
7488 continue;
7489
7490 pt = gimple_call_use_set (stmt);
7491 if (gimple_call_flags (stmt) & ECF_CONST)
7492 memset (pt, 0, sizeof (struct pt_solution));
7493 else if ((vi = lookup_call_use_vi (stmt)) != NULL)
7494 {
7495 *pt = find_what_var_points_to (cfun->decl, vi);
7496 /* Escaped (and thus nonlocal) variables are always
7497 implicitly used by calls. */
7498 /* ??? ESCAPED can be empty even though NONLOCAL
7499 always escaped. */
7500 pt->nonlocal = 1;
7501 pt->escaped = 1;
7502 }
7503 else
7504 {
7505 /* If there is nothing special about this call then
7506 we have made everything that is used also escape. */
7507 *pt = cfun->gimple_df->escaped;
7508 pt->nonlocal = 1;
7509 }
7510
7511 pt = gimple_call_clobber_set (stmt);
7512 if (gimple_call_flags (stmt) & (ECF_CONST|ECF_PURE|ECF_NOVOPS))
7513 memset (pt, 0, sizeof (struct pt_solution));
7514 else if ((vi = lookup_call_clobber_vi (stmt)) != NULL)
7515 {
7516 *pt = find_what_var_points_to (cfun->decl, vi);
7517 /* Escaped (and thus nonlocal) variables are always
7518 implicitly clobbered by calls. */
7519 /* ??? ESCAPED can be empty even though NONLOCAL
7520 always escaped. */
7521 pt->nonlocal = 1;
7522 pt->escaped = 1;
7523 }
7524 else
7525 {
7526 /* If there is nothing special about this call then
7527 we have made everything that is used also escape. */
7528 *pt = cfun->gimple_df->escaped;
7529 pt->nonlocal = 1;
7530 }
7531 }
7532 }
7533
7534 timevar_pop (TV_TREE_PTA);
7535 }
7536
7537
7538 /* Delete created points-to sets. */
7539
7540 static void
7541 delete_points_to_sets (void)
7542 {
7543 unsigned int i;
7544
7545 delete shared_bitmap_table;
7546 shared_bitmap_table = NULL;
7547 if (dump_file && (dump_flags & TDF_STATS))
7548 fprintf (dump_file, "Points to sets created:%d\n",
7549 stats.points_to_sets_created);
7550
7551 delete vi_for_tree;
7552 delete call_stmt_vars;
7553 bitmap_obstack_release (&pta_obstack);
7554 constraints.release ();
7555
7556 for (i = 0; i < graph->size; i++)
7557 graph->complex[i].release ();
7558 free (graph->complex);
7559
7560 free (graph->rep);
7561 free (graph->succs);
7562 free (graph->pe);
7563 free (graph->pe_rep);
7564 free (graph->indirect_cycles);
7565 free (graph);
7566
7567 varmap.release ();
7568 variable_info_pool.release ();
7569 constraint_pool.release ();
7570
7571 obstack_free (&fake_var_decl_obstack, NULL);
7572
7573 delete final_solutions;
7574 obstack_free (&final_solutions_obstack, NULL);
7575 }
7576
7577 struct vls_data
7578 {
7579 unsigned short clique;
7580 bool escaped_p;
7581 bitmap rvars;
7582 };
7583
7584 /* Mark "other" loads and stores as belonging to CLIQUE and with
7585 base zero. */
7586
7587 static bool
7588 visit_loadstore (gimple *, tree base, tree ref, void *data)
7589 {
7590 unsigned short clique = ((vls_data *) data)->clique;
7591 bitmap rvars = ((vls_data *) data)->rvars;
7592 bool escaped_p = ((vls_data *) data)->escaped_p;
7593 if (TREE_CODE (base) == MEM_REF
7594 || TREE_CODE (base) == TARGET_MEM_REF)
7595 {
7596 tree ptr = TREE_OPERAND (base, 0);
7597 if (TREE_CODE (ptr) == SSA_NAME)
7598 {
7599 /* For parameters, get at the points-to set for the actual parm
7600 decl. */
7601 if (SSA_NAME_IS_DEFAULT_DEF (ptr)
7602 && (TREE_CODE (SSA_NAME_VAR (ptr)) == PARM_DECL
7603 || TREE_CODE (SSA_NAME_VAR (ptr)) == RESULT_DECL))
7604 ptr = SSA_NAME_VAR (ptr);
7605
7606 /* We need to make sure 'ptr' doesn't include any of
7607 the restrict tags we added bases for in its points-to set. */
7608 varinfo_t vi = lookup_vi_for_tree (ptr);
7609 if (! vi)
7610 return false;
7611
7612 vi = get_varinfo (find (vi->id));
7613 if (bitmap_intersect_p (rvars, vi->solution)
7614 || (escaped_p && bitmap_bit_p (vi->solution, escaped_id)))
7615 return false;
7616 }
7617
7618 /* Do not overwrite existing cliques (that includes clique, base
7619 pairs we just set). */
7620 if (MR_DEPENDENCE_CLIQUE (base) == 0)
7621 {
7622 MR_DEPENDENCE_CLIQUE (base) = clique;
7623 MR_DEPENDENCE_BASE (base) = 0;
7624 }
7625 }
7626
7627 /* For plain decl accesses see whether they are accesses to globals
7628 and rewrite them to MEM_REFs with { clique, 0 }. */
7629 if (VAR_P (base)
7630 && is_global_var (base)
7631 /* ??? We can't rewrite a plain decl with the walk_stmt_load_store
7632 ops callback. */
7633 && base != ref)
7634 {
7635 tree *basep = &ref;
7636 while (handled_component_p (*basep))
7637 basep = &TREE_OPERAND (*basep, 0);
7638 gcc_assert (VAR_P (*basep));
7639 tree ptr = build_fold_addr_expr (*basep);
7640 tree zero = build_int_cst (TREE_TYPE (ptr), 0);
7641 *basep = build2 (MEM_REF, TREE_TYPE (*basep), ptr, zero);
7642 MR_DEPENDENCE_CLIQUE (*basep) = clique;
7643 MR_DEPENDENCE_BASE (*basep) = 0;
7644 }
7645
7646 return false;
7647 }
7648
7649 struct msdi_data {
7650 tree ptr;
7651 unsigned short *clique;
7652 unsigned short *last_ruid;
7653 varinfo_t restrict_var;
7654 };
7655
7656 /* If BASE is a MEM_REF then assign a clique, base pair to it, updating
7657 CLIQUE, *RESTRICT_VAR and LAST_RUID as passed via DATA.
7658 Return whether dependence info was assigned to BASE. */
7659
7660 static bool
7661 maybe_set_dependence_info (gimple *, tree base, tree, void *data)
7662 {
7663 tree ptr = ((msdi_data *)data)->ptr;
7664 unsigned short &clique = *((msdi_data *)data)->clique;
7665 unsigned short &last_ruid = *((msdi_data *)data)->last_ruid;
7666 varinfo_t restrict_var = ((msdi_data *)data)->restrict_var;
7667 if ((TREE_CODE (base) == MEM_REF
7668 || TREE_CODE (base) == TARGET_MEM_REF)
7669 && TREE_OPERAND (base, 0) == ptr)
7670 {
7671 /* Do not overwrite existing cliques. This avoids overwriting dependence
7672 info inlined from a function with restrict parameters inlined
7673 into a function with restrict parameters. This usually means we
7674 prefer to be precise in innermost loops. */
7675 if (MR_DEPENDENCE_CLIQUE (base) == 0)
7676 {
7677 if (clique == 0)
7678 {
7679 if (cfun->last_clique == 0)
7680 cfun->last_clique = 1;
7681 clique = 1;
7682 }
7683 if (restrict_var->ruid == 0)
7684 restrict_var->ruid = ++last_ruid;
7685 MR_DEPENDENCE_CLIQUE (base) = clique;
7686 MR_DEPENDENCE_BASE (base) = restrict_var->ruid;
7687 return true;
7688 }
7689 }
7690 return false;
7691 }
7692
7693 /* Clear dependence info for the clique DATA. */
7694
7695 static bool
7696 clear_dependence_clique (gimple *, tree base, tree, void *data)
7697 {
7698 unsigned short clique = (uintptr_t)data;
7699 if ((TREE_CODE (base) == MEM_REF
7700 || TREE_CODE (base) == TARGET_MEM_REF)
7701 && MR_DEPENDENCE_CLIQUE (base) == clique)
7702 {
7703 MR_DEPENDENCE_CLIQUE (base) = 0;
7704 MR_DEPENDENCE_BASE (base) = 0;
7705 }
7706
7707 return false;
7708 }
7709
7710 /* Compute the set of independend memory references based on restrict
7711 tags and their conservative propagation to the points-to sets. */
7712
7713 static void
7714 compute_dependence_clique (void)
7715 {
7716 /* First clear the special "local" clique. */
7717 basic_block bb;
7718 if (cfun->last_clique != 0)
7719 FOR_EACH_BB_FN (bb, cfun)
7720 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
7721 !gsi_end_p (gsi); gsi_next (&gsi))
7722 {
7723 gimple *stmt = gsi_stmt (gsi);
7724 walk_stmt_load_store_ops (stmt, (void *)(uintptr_t) 1,
7725 clear_dependence_clique,
7726 clear_dependence_clique);
7727 }
7728
7729 unsigned short clique = 0;
7730 unsigned short last_ruid = 0;
7731 bitmap rvars = BITMAP_ALLOC (NULL);
7732 bool escaped_p = false;
7733 for (unsigned i = 0; i < num_ssa_names; ++i)
7734 {
7735 tree ptr = ssa_name (i);
7736 if (!ptr || !POINTER_TYPE_P (TREE_TYPE (ptr)))
7737 continue;
7738
7739 /* Avoid all this when ptr is not dereferenced? */
7740 tree p = ptr;
7741 if (SSA_NAME_IS_DEFAULT_DEF (ptr)
7742 && (TREE_CODE (SSA_NAME_VAR (ptr)) == PARM_DECL
7743 || TREE_CODE (SSA_NAME_VAR (ptr)) == RESULT_DECL))
7744 p = SSA_NAME_VAR (ptr);
7745 varinfo_t vi = lookup_vi_for_tree (p);
7746 if (!vi)
7747 continue;
7748 vi = get_varinfo (find (vi->id));
7749 bitmap_iterator bi;
7750 unsigned j;
7751 varinfo_t restrict_var = NULL;
7752 EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, j, bi)
7753 {
7754 varinfo_t oi = get_varinfo (j);
7755 if (oi->head != j)
7756 oi = get_varinfo (oi->head);
7757 if (oi->is_restrict_var)
7758 {
7759 if (restrict_var
7760 && restrict_var != oi)
7761 {
7762 if (dump_file && (dump_flags & TDF_DETAILS))
7763 {
7764 fprintf (dump_file, "found restrict pointed-to "
7765 "for ");
7766 print_generic_expr (dump_file, ptr);
7767 fprintf (dump_file, " but not exclusively\n");
7768 }
7769 restrict_var = NULL;
7770 break;
7771 }
7772 restrict_var = oi;
7773 }
7774 /* NULL is the only other valid points-to entry. */
7775 else if (oi->id != nothing_id)
7776 {
7777 restrict_var = NULL;
7778 break;
7779 }
7780 }
7781 /* Ok, found that ptr must(!) point to a single(!) restrict
7782 variable. */
7783 /* ??? PTA isn't really a proper propagation engine to compute
7784 this property.
7785 ??? We could handle merging of two restricts by unifying them. */
7786 if (restrict_var)
7787 {
7788 /* Now look at possible dereferences of ptr. */
7789 imm_use_iterator ui;
7790 gimple *use_stmt;
7791 bool used = false;
7792 msdi_data data = { ptr, &clique, &last_ruid, restrict_var };
7793 FOR_EACH_IMM_USE_STMT (use_stmt, ui, ptr)
7794 used |= walk_stmt_load_store_ops (use_stmt, &data,
7795 maybe_set_dependence_info,
7796 maybe_set_dependence_info);
7797 if (used)
7798 {
7799 /* Add all subvars to the set of restrict pointed-to set. */
7800 for (unsigned sv = restrict_var->head; sv != 0;
7801 sv = get_varinfo (sv)->next)
7802 bitmap_set_bit (rvars, sv);
7803 varinfo_t escaped = get_varinfo (find (escaped_id));
7804 if (bitmap_bit_p (escaped->solution, restrict_var->id))
7805 escaped_p = true;
7806 }
7807 }
7808 }
7809
7810 if (clique != 0)
7811 {
7812 /* Assign the BASE id zero to all accesses not based on a restrict
7813 pointer. That way they get disambiguated against restrict
7814 accesses but not against each other. */
7815 /* ??? For restricts derived from globals (thus not incoming
7816 parameters) we can't restrict scoping properly thus the following
7817 is too aggressive there. For now we have excluded those globals from
7818 getting into the MR_DEPENDENCE machinery. */
7819 vls_data data = { clique, escaped_p, rvars };
7820 basic_block bb;
7821 FOR_EACH_BB_FN (bb, cfun)
7822 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
7823 !gsi_end_p (gsi); gsi_next (&gsi))
7824 {
7825 gimple *stmt = gsi_stmt (gsi);
7826 walk_stmt_load_store_ops (stmt, &data,
7827 visit_loadstore, visit_loadstore);
7828 }
7829 }
7830
7831 BITMAP_FREE (rvars);
7832 }
7833
7834 /* Compute points-to information for every SSA_NAME pointer in the
7835 current function and compute the transitive closure of escaped
7836 variables to re-initialize the call-clobber states of local variables. */
7837
7838 unsigned int
7839 compute_may_aliases (void)
7840 {
7841 if (cfun->gimple_df->ipa_pta)
7842 {
7843 if (dump_file)
7844 {
7845 fprintf (dump_file, "\nNot re-computing points-to information "
7846 "because IPA points-to information is available.\n\n");
7847
7848 /* But still dump what we have remaining it. */
7849 dump_alias_info (dump_file);
7850 }
7851
7852 return 0;
7853 }
7854
7855 /* For each pointer P_i, determine the sets of variables that P_i may
7856 point-to. Compute the reachability set of escaped and call-used
7857 variables. */
7858 compute_points_to_sets ();
7859
7860 /* Debugging dumps. */
7861 if (dump_file)
7862 dump_alias_info (dump_file);
7863
7864 /* Compute restrict-based memory disambiguations. */
7865 compute_dependence_clique ();
7866
7867 /* Deallocate memory used by aliasing data structures and the internal
7868 points-to solution. */
7869 delete_points_to_sets ();
7870
7871 gcc_assert (!need_ssa_update_p (cfun));
7872
7873 return 0;
7874 }
7875
7876 /* A dummy pass to cause points-to information to be computed via
7877 TODO_rebuild_alias. */
7878
7879 namespace {
7880
7881 const pass_data pass_data_build_alias =
7882 {
7883 GIMPLE_PASS, /* type */
7884 "alias", /* name */
7885 OPTGROUP_NONE, /* optinfo_flags */
7886 TV_NONE, /* tv_id */
7887 ( PROP_cfg | PROP_ssa ), /* properties_required */
7888 0, /* properties_provided */
7889 0, /* properties_destroyed */
7890 0, /* todo_flags_start */
7891 TODO_rebuild_alias, /* todo_flags_finish */
7892 };
7893
7894 class pass_build_alias : public gimple_opt_pass
7895 {
7896 public:
7897 pass_build_alias (gcc::context *ctxt)
7898 : gimple_opt_pass (pass_data_build_alias, ctxt)
7899 {}
7900
7901 /* opt_pass methods: */
7902 virtual bool gate (function *) { return flag_tree_pta; }
7903
7904 }; // class pass_build_alias
7905
7906 } // anon namespace
7907
7908 gimple_opt_pass *
7909 make_pass_build_alias (gcc::context *ctxt)
7910 {
7911 return new pass_build_alias (ctxt);
7912 }
7913
7914 /* A dummy pass to cause points-to information to be computed via
7915 TODO_rebuild_alias. */
7916
7917 namespace {
7918
7919 const pass_data pass_data_build_ealias =
7920 {
7921 GIMPLE_PASS, /* type */
7922 "ealias", /* name */
7923 OPTGROUP_NONE, /* optinfo_flags */
7924 TV_NONE, /* tv_id */
7925 ( PROP_cfg | PROP_ssa ), /* properties_required */
7926 0, /* properties_provided */
7927 0, /* properties_destroyed */
7928 0, /* todo_flags_start */
7929 TODO_rebuild_alias, /* todo_flags_finish */
7930 };
7931
7932 class pass_build_ealias : public gimple_opt_pass
7933 {
7934 public:
7935 pass_build_ealias (gcc::context *ctxt)
7936 : gimple_opt_pass (pass_data_build_ealias, ctxt)
7937 {}
7938
7939 /* opt_pass methods: */
7940 virtual bool gate (function *) { return flag_tree_pta; }
7941
7942 }; // class pass_build_ealias
7943
7944 } // anon namespace
7945
7946 gimple_opt_pass *
7947 make_pass_build_ealias (gcc::context *ctxt)
7948 {
7949 return new pass_build_ealias (ctxt);
7950 }
7951
7952
7953 /* IPA PTA solutions for ESCAPED. */
7954 struct pt_solution ipa_escaped_pt
7955 = { true, false, false, false, false,
7956 false, false, false, false, false, NULL };
7957
7958 /* Associate node with varinfo DATA. Worker for
7959 cgraph_for_symbol_thunks_and_aliases. */
7960 static bool
7961 associate_varinfo_to_alias (struct cgraph_node *node, void *data)
7962 {
7963 if ((node->alias
7964 || (node->thunk.thunk_p
7965 && ! node->inlined_to))
7966 && node->analyzed
7967 && !node->ifunc_resolver)
7968 insert_vi_for_tree (node->decl, (varinfo_t)data);
7969 return false;
7970 }
7971
7972 /* Dump varinfo VI to FILE. */
7973
7974 static void
7975 dump_varinfo (FILE *file, varinfo_t vi)
7976 {
7977 if (vi == NULL)
7978 return;
7979
7980 fprintf (file, "%u: %s\n", vi->id, vi->name);
7981
7982 const char *sep = " ";
7983 if (vi->is_artificial_var)
7984 fprintf (file, "%sartificial", sep);
7985 if (vi->is_special_var)
7986 fprintf (file, "%sspecial", sep);
7987 if (vi->is_unknown_size_var)
7988 fprintf (file, "%sunknown-size", sep);
7989 if (vi->is_full_var)
7990 fprintf (file, "%sfull", sep);
7991 if (vi->is_heap_var)
7992 fprintf (file, "%sheap", sep);
7993 if (vi->may_have_pointers)
7994 fprintf (file, "%smay-have-pointers", sep);
7995 if (vi->only_restrict_pointers)
7996 fprintf (file, "%sonly-restrict-pointers", sep);
7997 if (vi->is_restrict_var)
7998 fprintf (file, "%sis-restrict-var", sep);
7999 if (vi->is_global_var)
8000 fprintf (file, "%sglobal", sep);
8001 if (vi->is_ipa_escape_point)
8002 fprintf (file, "%sipa-escape-point", sep);
8003 if (vi->is_fn_info)
8004 fprintf (file, "%sfn-info", sep);
8005 if (vi->ruid)
8006 fprintf (file, "%srestrict-uid:%u", sep, vi->ruid);
8007 if (vi->next)
8008 fprintf (file, "%snext:%u", sep, vi->next);
8009 if (vi->head != vi->id)
8010 fprintf (file, "%shead:%u", sep, vi->head);
8011 if (vi->offset)
8012 fprintf (file, "%soffset:" HOST_WIDE_INT_PRINT_DEC, sep, vi->offset);
8013 if (vi->size != ~(unsigned HOST_WIDE_INT)0)
8014 fprintf (file, "%ssize:" HOST_WIDE_INT_PRINT_DEC, sep, vi->size);
8015 if (vi->fullsize != ~(unsigned HOST_WIDE_INT)0
8016 && vi->fullsize != vi->size)
8017 fprintf (file, "%sfullsize:" HOST_WIDE_INT_PRINT_DEC, sep,
8018 vi->fullsize);
8019 fprintf (file, "\n");
8020
8021 if (vi->solution && !bitmap_empty_p (vi->solution))
8022 {
8023 bitmap_iterator bi;
8024 unsigned i;
8025 fprintf (file, " solution: {");
8026 EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, i, bi)
8027 fprintf (file, " %u", i);
8028 fprintf (file, " }\n");
8029 }
8030
8031 if (vi->oldsolution && !bitmap_empty_p (vi->oldsolution)
8032 && !bitmap_equal_p (vi->solution, vi->oldsolution))
8033 {
8034 bitmap_iterator bi;
8035 unsigned i;
8036 fprintf (file, " oldsolution: {");
8037 EXECUTE_IF_SET_IN_BITMAP (vi->oldsolution, 0, i, bi)
8038 fprintf (file, " %u", i);
8039 fprintf (file, " }\n");
8040 }
8041 }
8042
8043 /* Dump varinfo VI to stderr. */
8044
8045 DEBUG_FUNCTION void
8046 debug_varinfo (varinfo_t vi)
8047 {
8048 dump_varinfo (stderr, vi);
8049 }
8050
8051 /* Dump varmap to FILE. */
8052
8053 static void
8054 dump_varmap (FILE *file)
8055 {
8056 if (varmap.length () == 0)
8057 return;
8058
8059 fprintf (file, "variables:\n");
8060
8061 for (unsigned int i = 0; i < varmap.length (); ++i)
8062 {
8063 varinfo_t vi = get_varinfo (i);
8064 dump_varinfo (file, vi);
8065 }
8066
8067 fprintf (file, "\n");
8068 }
8069
8070 /* Dump varmap to stderr. */
8071
8072 DEBUG_FUNCTION void
8073 debug_varmap (void)
8074 {
8075 dump_varmap (stderr);
8076 }
8077
8078 /* Compute whether node is refered to non-locally. Worker for
8079 cgraph_for_symbol_thunks_and_aliases. */
8080 static bool
8081 refered_from_nonlocal_fn (struct cgraph_node *node, void *data)
8082 {
8083 bool *nonlocal_p = (bool *)data;
8084 *nonlocal_p |= (node->used_from_other_partition
8085 || node->externally_visible
8086 || node->force_output
8087 || lookup_attribute ("noipa", DECL_ATTRIBUTES (node->decl)));
8088 return false;
8089 }
8090
8091 /* Same for varpool nodes. */
8092 static bool
8093 refered_from_nonlocal_var (struct varpool_node *node, void *data)
8094 {
8095 bool *nonlocal_p = (bool *)data;
8096 *nonlocal_p |= (node->used_from_other_partition
8097 || node->externally_visible
8098 || node->force_output);
8099 return false;
8100 }
8101
8102 /* Execute the driver for IPA PTA. */
8103 static unsigned int
8104 ipa_pta_execute (void)
8105 {
8106 struct cgraph_node *node;
8107 varpool_node *var;
8108 unsigned int from = 0;
8109
8110 in_ipa_mode = 1;
8111
8112 init_alias_vars ();
8113
8114 if (dump_file && (dump_flags & TDF_DETAILS))
8115 {
8116 symtab->dump (dump_file);
8117 fprintf (dump_file, "\n");
8118 }
8119
8120 if (dump_file)
8121 {
8122 fprintf (dump_file, "Generating generic constraints\n\n");
8123 dump_constraints (dump_file, from);
8124 fprintf (dump_file, "\n");
8125 from = constraints.length ();
8126 }
8127
8128 /* Build the constraints. */
8129 FOR_EACH_DEFINED_FUNCTION (node)
8130 {
8131 varinfo_t vi;
8132 /* Nodes without a body are not interesting. Especially do not
8133 visit clones at this point for now - we get duplicate decls
8134 there for inline clones at least. */
8135 if (!node->has_gimple_body_p () || node->inlined_to)
8136 continue;
8137 node->get_body ();
8138
8139 gcc_assert (!node->clone_of);
8140
8141 /* For externally visible or attribute used annotated functions use
8142 local constraints for their arguments.
8143 For local functions we see all callers and thus do not need initial
8144 constraints for parameters. */
8145 bool nonlocal_p = (node->used_from_other_partition
8146 || node->externally_visible
8147 || node->force_output
8148 || lookup_attribute ("noipa",
8149 DECL_ATTRIBUTES (node->decl)));
8150 node->call_for_symbol_thunks_and_aliases (refered_from_nonlocal_fn,
8151 &nonlocal_p, true);
8152
8153 vi = create_function_info_for (node->decl,
8154 alias_get_name (node->decl), false,
8155 nonlocal_p);
8156 if (dump_file
8157 && from != constraints.length ())
8158 {
8159 fprintf (dump_file,
8160 "Generating intial constraints for %s", node->name ());
8161 if (DECL_ASSEMBLER_NAME_SET_P (node->decl))
8162 fprintf (dump_file, " (%s)",
8163 IDENTIFIER_POINTER
8164 (DECL_ASSEMBLER_NAME (node->decl)));
8165 fprintf (dump_file, "\n\n");
8166 dump_constraints (dump_file, from);
8167 fprintf (dump_file, "\n");
8168
8169 from = constraints.length ();
8170 }
8171
8172 node->call_for_symbol_thunks_and_aliases
8173 (associate_varinfo_to_alias, vi, true);
8174 }
8175
8176 /* Create constraints for global variables and their initializers. */
8177 FOR_EACH_VARIABLE (var)
8178 {
8179 if (var->alias && var->analyzed)
8180 continue;
8181
8182 varinfo_t vi = get_vi_for_tree (var->decl);
8183
8184 /* For the purpose of IPA PTA unit-local globals are not
8185 escape points. */
8186 bool nonlocal_p = (var->used_from_other_partition
8187 || var->externally_visible
8188 || var->force_output);
8189 var->call_for_symbol_and_aliases (refered_from_nonlocal_var,
8190 &nonlocal_p, true);
8191 if (nonlocal_p)
8192 vi->is_ipa_escape_point = true;
8193 }
8194
8195 if (dump_file
8196 && from != constraints.length ())
8197 {
8198 fprintf (dump_file,
8199 "Generating constraints for global initializers\n\n");
8200 dump_constraints (dump_file, from);
8201 fprintf (dump_file, "\n");
8202 from = constraints.length ();
8203 }
8204
8205 FOR_EACH_DEFINED_FUNCTION (node)
8206 {
8207 struct function *func;
8208 basic_block bb;
8209
8210 /* Nodes without a body are not interesting. */
8211 if (!node->has_gimple_body_p () || node->clone_of)
8212 continue;
8213
8214 if (dump_file)
8215 {
8216 fprintf (dump_file,
8217 "Generating constraints for %s", node->name ());
8218 if (DECL_ASSEMBLER_NAME_SET_P (node->decl))
8219 fprintf (dump_file, " (%s)",
8220 IDENTIFIER_POINTER
8221 (DECL_ASSEMBLER_NAME (node->decl)));
8222 fprintf (dump_file, "\n");
8223 }
8224
8225 func = DECL_STRUCT_FUNCTION (node->decl);
8226 gcc_assert (cfun == NULL);
8227
8228 /* Build constriants for the function body. */
8229 FOR_EACH_BB_FN (bb, func)
8230 {
8231 for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
8232 gsi_next (&gsi))
8233 {
8234 gphi *phi = gsi.phi ();
8235
8236 if (! virtual_operand_p (gimple_phi_result (phi)))
8237 find_func_aliases (func, phi);
8238 }
8239
8240 for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);
8241 gsi_next (&gsi))
8242 {
8243 gimple *stmt = gsi_stmt (gsi);
8244
8245 find_func_aliases (func, stmt);
8246 find_func_clobbers (func, stmt);
8247 }
8248 }
8249
8250 if (dump_file)
8251 {
8252 fprintf (dump_file, "\n");
8253 dump_constraints (dump_file, from);
8254 fprintf (dump_file, "\n");
8255 from = constraints.length ();
8256 }
8257 }
8258
8259 /* From the constraints compute the points-to sets. */
8260 solve_constraints ();
8261
8262 if (dump_file)
8263 dump_sa_points_to_info (dump_file);
8264
8265 /* Now post-process solutions to handle locals from different
8266 runtime instantiations coming in through recursive invocations. */
8267 unsigned shadow_var_cnt = 0;
8268 for (unsigned i = 1; i < varmap.length (); ++i)
8269 {
8270 varinfo_t fi = get_varinfo (i);
8271 if (fi->is_fn_info
8272 && fi->decl)
8273 /* Automatic variables pointed to by their containing functions
8274 parameters need this treatment. */
8275 for (varinfo_t ai = first_vi_for_offset (fi, fi_parm_base);
8276 ai; ai = vi_next (ai))
8277 {
8278 varinfo_t vi = get_varinfo (find (ai->id));
8279 bitmap_iterator bi;
8280 unsigned j;
8281 EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, j, bi)
8282 {
8283 varinfo_t pt = get_varinfo (j);
8284 if (pt->shadow_var_uid == 0
8285 && pt->decl
8286 && auto_var_in_fn_p (pt->decl, fi->decl))
8287 {
8288 pt->shadow_var_uid = allocate_decl_uid ();
8289 shadow_var_cnt++;
8290 }
8291 }
8292 }
8293 /* As well as global variables which are another way of passing
8294 arguments to recursive invocations. */
8295 else if (fi->is_global_var)
8296 {
8297 for (varinfo_t ai = fi; ai; ai = vi_next (ai))
8298 {
8299 varinfo_t vi = get_varinfo (find (ai->id));
8300 bitmap_iterator bi;
8301 unsigned j;
8302 EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, j, bi)
8303 {
8304 varinfo_t pt = get_varinfo (j);
8305 if (pt->shadow_var_uid == 0
8306 && pt->decl
8307 && auto_var_p (pt->decl))
8308 {
8309 pt->shadow_var_uid = allocate_decl_uid ();
8310 shadow_var_cnt++;
8311 }
8312 }
8313 }
8314 }
8315 }
8316 if (shadow_var_cnt && dump_file && (dump_flags & TDF_DETAILS))
8317 fprintf (dump_file, "Allocated %u shadow variables for locals "
8318 "maybe leaking into recursive invocations of their containing "
8319 "functions\n", shadow_var_cnt);
8320
8321 /* Compute the global points-to sets for ESCAPED.
8322 ??? Note that the computed escape set is not correct
8323 for the whole unit as we fail to consider graph edges to
8324 externally visible functions. */
8325 ipa_escaped_pt = find_what_var_points_to (NULL, get_varinfo (escaped_id));
8326
8327 /* Make sure the ESCAPED solution (which is used as placeholder in
8328 other solutions) does not reference itself. This simplifies
8329 points-to solution queries. */
8330 ipa_escaped_pt.ipa_escaped = 0;
8331
8332 /* Assign the points-to sets to the SSA names in the unit. */
8333 FOR_EACH_DEFINED_FUNCTION (node)
8334 {
8335 tree ptr;
8336 struct function *fn;
8337 unsigned i;
8338 basic_block bb;
8339
8340 /* Nodes without a body are not interesting. */
8341 if (!node->has_gimple_body_p () || node->clone_of)
8342 continue;
8343
8344 fn = DECL_STRUCT_FUNCTION (node->decl);
8345
8346 /* Compute the points-to sets for pointer SSA_NAMEs. */
8347 FOR_EACH_VEC_ELT (*fn->gimple_df->ssa_names, i, ptr)
8348 {
8349 if (ptr
8350 && POINTER_TYPE_P (TREE_TYPE (ptr)))
8351 find_what_p_points_to (node->decl, ptr);
8352 }
8353
8354 /* Compute the call-use and call-clobber sets for indirect calls
8355 and calls to external functions. */
8356 FOR_EACH_BB_FN (bb, fn)
8357 {
8358 gimple_stmt_iterator gsi;
8359
8360 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
8361 {
8362 gcall *stmt;
8363 struct pt_solution *pt;
8364 varinfo_t vi, fi;
8365 tree decl;
8366
8367 stmt = dyn_cast <gcall *> (gsi_stmt (gsi));
8368 if (!stmt)
8369 continue;
8370
8371 /* Handle direct calls to functions with body. */
8372 decl = gimple_call_fndecl (stmt);
8373
8374 {
8375 tree called_decl = NULL_TREE;
8376 if (gimple_call_builtin_p (stmt, BUILT_IN_GOMP_PARALLEL))
8377 called_decl = TREE_OPERAND (gimple_call_arg (stmt, 0), 0);
8378 else if (gimple_call_builtin_p (stmt, BUILT_IN_GOACC_PARALLEL))
8379 called_decl = TREE_OPERAND (gimple_call_arg (stmt, 1), 0);
8380
8381 if (called_decl != NULL_TREE
8382 && !fndecl_maybe_in_other_partition (called_decl))
8383 decl = called_decl;
8384 }
8385
8386 if (decl
8387 && (fi = lookup_vi_for_tree (decl))
8388 && fi->is_fn_info)
8389 {
8390 *gimple_call_clobber_set (stmt)
8391 = find_what_var_points_to
8392 (node->decl, first_vi_for_offset (fi, fi_clobbers));
8393 *gimple_call_use_set (stmt)
8394 = find_what_var_points_to
8395 (node->decl, first_vi_for_offset (fi, fi_uses));
8396 }
8397 /* Handle direct calls to external functions. */
8398 else if (decl && (!fi || fi->decl))
8399 {
8400 pt = gimple_call_use_set (stmt);
8401 if (gimple_call_flags (stmt) & ECF_CONST)
8402 memset (pt, 0, sizeof (struct pt_solution));
8403 else if ((vi = lookup_call_use_vi (stmt)) != NULL)
8404 {
8405 *pt = find_what_var_points_to (node->decl, vi);
8406 /* Escaped (and thus nonlocal) variables are always
8407 implicitly used by calls. */
8408 /* ??? ESCAPED can be empty even though NONLOCAL
8409 always escaped. */
8410 pt->nonlocal = 1;
8411 pt->ipa_escaped = 1;
8412 }
8413 else
8414 {
8415 /* If there is nothing special about this call then
8416 we have made everything that is used also escape. */
8417 *pt = ipa_escaped_pt;
8418 pt->nonlocal = 1;
8419 }
8420
8421 pt = gimple_call_clobber_set (stmt);
8422 if (gimple_call_flags (stmt) & (ECF_CONST|ECF_PURE|ECF_NOVOPS))
8423 memset (pt, 0, sizeof (struct pt_solution));
8424 else if ((vi = lookup_call_clobber_vi (stmt)) != NULL)
8425 {
8426 *pt = find_what_var_points_to (node->decl, vi);
8427 /* Escaped (and thus nonlocal) variables are always
8428 implicitly clobbered by calls. */
8429 /* ??? ESCAPED can be empty even though NONLOCAL
8430 always escaped. */
8431 pt->nonlocal = 1;
8432 pt->ipa_escaped = 1;
8433 }
8434 else
8435 {
8436 /* If there is nothing special about this call then
8437 we have made everything that is used also escape. */
8438 *pt = ipa_escaped_pt;
8439 pt->nonlocal = 1;
8440 }
8441 }
8442 /* Handle indirect calls. */
8443 else if ((fi = get_fi_for_callee (stmt)))
8444 {
8445 /* We need to accumulate all clobbers/uses of all possible
8446 callees. */
8447 fi = get_varinfo (find (fi->id));
8448 /* If we cannot constrain the set of functions we'll end up
8449 calling we end up using/clobbering everything. */
8450 if (bitmap_bit_p (fi->solution, anything_id)
8451 || bitmap_bit_p (fi->solution, nonlocal_id)
8452 || bitmap_bit_p (fi->solution, escaped_id))
8453 {
8454 pt_solution_reset (gimple_call_clobber_set (stmt));
8455 pt_solution_reset (gimple_call_use_set (stmt));
8456 }
8457 else
8458 {
8459 bitmap_iterator bi;
8460 unsigned i;
8461 struct pt_solution *uses, *clobbers;
8462
8463 uses = gimple_call_use_set (stmt);
8464 clobbers = gimple_call_clobber_set (stmt);
8465 memset (uses, 0, sizeof (struct pt_solution));
8466 memset (clobbers, 0, sizeof (struct pt_solution));
8467 EXECUTE_IF_SET_IN_BITMAP (fi->solution, 0, i, bi)
8468 {
8469 struct pt_solution sol;
8470
8471 vi = get_varinfo (i);
8472 if (!vi->is_fn_info)
8473 {
8474 /* ??? We could be more precise here? */
8475 uses->nonlocal = 1;
8476 uses->ipa_escaped = 1;
8477 clobbers->nonlocal = 1;
8478 clobbers->ipa_escaped = 1;
8479 continue;
8480 }
8481
8482 if (!uses->anything)
8483 {
8484 sol = find_what_var_points_to
8485 (node->decl,
8486 first_vi_for_offset (vi, fi_uses));
8487 pt_solution_ior_into (uses, &sol);
8488 }
8489 if (!clobbers->anything)
8490 {
8491 sol = find_what_var_points_to
8492 (node->decl,
8493 first_vi_for_offset (vi, fi_clobbers));
8494 pt_solution_ior_into (clobbers, &sol);
8495 }
8496 }
8497 }
8498 }
8499 else
8500 gcc_unreachable ();
8501 }
8502 }
8503
8504 fn->gimple_df->ipa_pta = true;
8505
8506 /* We have to re-set the final-solution cache after each function
8507 because what is a "global" is dependent on function context. */
8508 final_solutions->empty ();
8509 obstack_free (&final_solutions_obstack, NULL);
8510 gcc_obstack_init (&final_solutions_obstack);
8511 }
8512
8513 delete_points_to_sets ();
8514
8515 in_ipa_mode = 0;
8516
8517 return 0;
8518 }
8519
8520 namespace {
8521
8522 const pass_data pass_data_ipa_pta =
8523 {
8524 SIMPLE_IPA_PASS, /* type */
8525 "pta", /* name */
8526 OPTGROUP_NONE, /* optinfo_flags */
8527 TV_IPA_PTA, /* tv_id */
8528 0, /* properties_required */
8529 0, /* properties_provided */
8530 0, /* properties_destroyed */
8531 0, /* todo_flags_start */
8532 0, /* todo_flags_finish */
8533 };
8534
8535 class pass_ipa_pta : public simple_ipa_opt_pass
8536 {
8537 public:
8538 pass_ipa_pta (gcc::context *ctxt)
8539 : simple_ipa_opt_pass (pass_data_ipa_pta, ctxt)
8540 {}
8541
8542 /* opt_pass methods: */
8543 virtual bool gate (function *)
8544 {
8545 return (optimize
8546 && flag_ipa_pta
8547 /* Don't bother doing anything if the program has errors. */
8548 && !seen_error ());
8549 }
8550
8551 opt_pass * clone () { return new pass_ipa_pta (m_ctxt); }
8552
8553 virtual unsigned int execute (function *) { return ipa_pta_execute (); }
8554
8555 }; // class pass_ipa_pta
8556
8557 } // anon namespace
8558
8559 simple_ipa_opt_pass *
8560 make_pass_ipa_pta (gcc::context *ctxt)
8561 {
8562 return new pass_ipa_pta (ctxt);
8563 }