]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/tree-ssa-structalias.c
In PR70010, a function is marked with target(no-vsx) to disable VSX code
[thirdparty/gcc.git] / gcc / tree-ssa-structalias.c
1 /* Tree based points-to analysis
2 Copyright (C) 2005-2019 Free Software Foundation, Inc.
3 Contributed by Daniel Berlin <dberlin@dberlin.org>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3 of the License, or
10 (at your option) any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "gimple.h"
28 #include "alloc-pool.h"
29 #include "tree-pass.h"
30 #include "ssa.h"
31 #include "cgraph.h"
32 #include "tree-pretty-print.h"
33 #include "diagnostic-core.h"
34 #include "fold-const.h"
35 #include "stor-layout.h"
36 #include "stmt.h"
37 #include "gimple-iterator.h"
38 #include "tree-into-ssa.h"
39 #include "tree-dfa.h"
40 #include "params.h"
41 #include "gimple-walk.h"
42 #include "varasm.h"
43 #include "stringpool.h"
44 #include "attribs.h"
45 #include "tree-ssa.h"
46 #include "tree-cfg.h"
47
48 /* The idea behind this analyzer is to generate set constraints from the
49 program, then solve the resulting constraints in order to generate the
50 points-to sets.
51
52 Set constraints are a way of modeling program analysis problems that
53 involve sets. They consist of an inclusion constraint language,
54 describing the variables (each variable is a set) and operations that
55 are involved on the variables, and a set of rules that derive facts
56 from these operations. To solve a system of set constraints, you derive
57 all possible facts under the rules, which gives you the correct sets
58 as a consequence.
59
60 See "Efficient Field-sensitive pointer analysis for C" by "David
61 J. Pearce and Paul H. J. Kelly and Chris Hankin, at
62 http://citeseer.ist.psu.edu/pearce04efficient.html
63
64 Also see "Ultra-fast Aliasing Analysis using CLA: A Million Lines
65 of C Code in a Second" by ""Nevin Heintze and Olivier Tardieu" at
66 http://citeseer.ist.psu.edu/heintze01ultrafast.html
67
68 There are three types of real constraint expressions, DEREF,
69 ADDRESSOF, and SCALAR. Each constraint expression consists
70 of a constraint type, a variable, and an offset.
71
72 SCALAR is a constraint expression type used to represent x, whether
73 it appears on the LHS or the RHS of a statement.
74 DEREF is a constraint expression type used to represent *x, whether
75 it appears on the LHS or the RHS of a statement.
76 ADDRESSOF is a constraint expression used to represent &x, whether
77 it appears on the LHS or the RHS of a statement.
78
79 Each pointer variable in the program is assigned an integer id, and
80 each field of a structure variable is assigned an integer id as well.
81
82 Structure variables are linked to their list of fields through a "next
83 field" in each variable that points to the next field in offset
84 order.
85 Each variable for a structure field has
86
87 1. "size", that tells the size in bits of that field.
88 2. "fullsize, that tells the size in bits of the entire structure.
89 3. "offset", that tells the offset in bits from the beginning of the
90 structure to this field.
91
92 Thus,
93 struct f
94 {
95 int a;
96 int b;
97 } foo;
98 int *bar;
99
100 looks like
101
102 foo.a -> id 1, size 32, offset 0, fullsize 64, next foo.b
103 foo.b -> id 2, size 32, offset 32, fullsize 64, next NULL
104 bar -> id 3, size 32, offset 0, fullsize 32, next NULL
105
106
107 In order to solve the system of set constraints, the following is
108 done:
109
110 1. Each constraint variable x has a solution set associated with it,
111 Sol(x).
112
113 2. Constraints are separated into direct, copy, and complex.
114 Direct constraints are ADDRESSOF constraints that require no extra
115 processing, such as P = &Q
116 Copy constraints are those of the form P = Q.
117 Complex constraints are all the constraints involving dereferences
118 and offsets (including offsetted copies).
119
120 3. All direct constraints of the form P = &Q are processed, such
121 that Q is added to Sol(P)
122
123 4. All complex constraints for a given constraint variable are stored in a
124 linked list attached to that variable's node.
125
126 5. A directed graph is built out of the copy constraints. Each
127 constraint variable is a node in the graph, and an edge from
128 Q to P is added for each copy constraint of the form P = Q
129
130 6. The graph is then walked, and solution sets are
131 propagated along the copy edges, such that an edge from Q to P
132 causes Sol(P) <- Sol(P) union Sol(Q).
133
134 7. As we visit each node, all complex constraints associated with
135 that node are processed by adding appropriate copy edges to the graph, or the
136 appropriate variables to the solution set.
137
138 8. The process of walking the graph is iterated until no solution
139 sets change.
140
141 Prior to walking the graph in steps 6 and 7, We perform static
142 cycle elimination on the constraint graph, as well
143 as off-line variable substitution.
144
145 TODO: Adding offsets to pointer-to-structures can be handled (IE not punted
146 on and turned into anything), but isn't. You can just see what offset
147 inside the pointed-to struct it's going to access.
148
149 TODO: Constant bounded arrays can be handled as if they were structs of the
150 same number of elements.
151
152 TODO: Modeling heap and incoming pointers becomes much better if we
153 add fields to them as we discover them, which we could do.
154
155 TODO: We could handle unions, but to be honest, it's probably not
156 worth the pain or slowdown. */
157
158 /* IPA-PTA optimizations possible.
159
160 When the indirect function called is ANYTHING we can add disambiguation
161 based on the function signatures (or simply the parameter count which
162 is the varinfo size). We also do not need to consider functions that
163 do not have their address taken.
164
165 The is_global_var bit which marks escape points is overly conservative
166 in IPA mode. Split it to is_escape_point and is_global_var - only
167 externally visible globals are escape points in IPA mode.
168 There is now is_ipa_escape_point but this is only used in a few
169 selected places.
170
171 The way we introduce DECL_PT_UID to avoid fixing up all points-to
172 sets in the translation unit when we copy a DECL during inlining
173 pessimizes precision. The advantage is that the DECL_PT_UID keeps
174 compile-time and memory usage overhead low - the points-to sets
175 do not grow or get unshared as they would during a fixup phase.
176 An alternative solution is to delay IPA PTA until after all
177 inlining transformations have been applied.
178
179 The way we propagate clobber/use information isn't optimized.
180 It should use a new complex constraint that properly filters
181 out local variables of the callee (though that would make
182 the sets invalid after inlining). OTOH we might as well
183 admit defeat to WHOPR and simply do all the clobber/use analysis
184 and propagation after PTA finished but before we threw away
185 points-to information for memory variables. WHOPR and PTA
186 do not play along well anyway - the whole constraint solving
187 would need to be done in WPA phase and it will be very interesting
188 to apply the results to local SSA names during LTRANS phase.
189
190 We probably should compute a per-function unit-ESCAPE solution
191 propagating it simply like the clobber / uses solutions. The
192 solution can go alongside the non-IPA espaced solution and be
193 used to query which vars escape the unit through a function.
194 This is also required to make the escaped-HEAP trick work in IPA mode.
195
196 We never put function decls in points-to sets so we do not
197 keep the set of called functions for indirect calls.
198
199 And probably more. */
200
201 static bool use_field_sensitive = true;
202 static int in_ipa_mode = 0;
203
204 /* Used for predecessor bitmaps. */
205 static bitmap_obstack predbitmap_obstack;
206
207 /* Used for points-to sets. */
208 static bitmap_obstack pta_obstack;
209
210 /* Used for oldsolution members of variables. */
211 static bitmap_obstack oldpta_obstack;
212
213 /* Used for per-solver-iteration bitmaps. */
214 static bitmap_obstack iteration_obstack;
215
216 static unsigned int create_variable_info_for (tree, const char *, bool);
217 typedef struct constraint_graph *constraint_graph_t;
218 static void unify_nodes (constraint_graph_t, unsigned int, unsigned int, bool);
219
220 struct constraint;
221 typedef struct constraint *constraint_t;
222
223
224 #define EXECUTE_IF_IN_NONNULL_BITMAP(a, b, c, d) \
225 if (a) \
226 EXECUTE_IF_SET_IN_BITMAP (a, b, c, d)
227
228 static struct constraint_stats
229 {
230 unsigned int total_vars;
231 unsigned int nonpointer_vars;
232 unsigned int unified_vars_static;
233 unsigned int unified_vars_dynamic;
234 unsigned int iterations;
235 unsigned int num_edges;
236 unsigned int num_implicit_edges;
237 unsigned int points_to_sets_created;
238 } stats;
239
240 struct variable_info
241 {
242 /* ID of this variable */
243 unsigned int id;
244
245 /* True if this is a variable created by the constraint analysis, such as
246 heap variables and constraints we had to break up. */
247 unsigned int is_artificial_var : 1;
248
249 /* True if this is a special variable whose solution set should not be
250 changed. */
251 unsigned int is_special_var : 1;
252
253 /* True for variables whose size is not known or variable. */
254 unsigned int is_unknown_size_var : 1;
255
256 /* True for (sub-)fields that represent a whole variable. */
257 unsigned int is_full_var : 1;
258
259 /* True if this is a heap variable. */
260 unsigned int is_heap_var : 1;
261
262 /* True if this is a register variable. */
263 unsigned int is_reg_var : 1;
264
265 /* True if this field may contain pointers. */
266 unsigned int may_have_pointers : 1;
267
268 /* True if this field has only restrict qualified pointers. */
269 unsigned int only_restrict_pointers : 1;
270
271 /* True if this represents a heap var created for a restrict qualified
272 pointer. */
273 unsigned int is_restrict_var : 1;
274
275 /* True if this represents a global variable. */
276 unsigned int is_global_var : 1;
277
278 /* True if this represents a module escape point for IPA analysis. */
279 unsigned int is_ipa_escape_point : 1;
280
281 /* True if this represents a IPA function info. */
282 unsigned int is_fn_info : 1;
283
284 /* ??? Store somewhere better. */
285 unsigned short ruid;
286
287 /* The ID of the variable for the next field in this structure
288 or zero for the last field in this structure. */
289 unsigned next;
290
291 /* The ID of the variable for the first field in this structure. */
292 unsigned head;
293
294 /* Offset of this variable, in bits, from the base variable */
295 unsigned HOST_WIDE_INT offset;
296
297 /* Size of the variable, in bits. */
298 unsigned HOST_WIDE_INT size;
299
300 /* Full size of the base variable, in bits. */
301 unsigned HOST_WIDE_INT fullsize;
302
303 /* In IPA mode the shadow UID in case the variable needs to be duplicated in
304 the final points-to solution because it reaches its containing
305 function recursively. Zero if none is needed. */
306 unsigned int shadow_var_uid;
307
308 /* Name of this variable */
309 const char *name;
310
311 /* Tree that this variable is associated with. */
312 tree decl;
313
314 /* Points-to set for this variable. */
315 bitmap solution;
316
317 /* Old points-to set for this variable. */
318 bitmap oldsolution;
319 };
320 typedef struct variable_info *varinfo_t;
321
322 static varinfo_t first_vi_for_offset (varinfo_t, unsigned HOST_WIDE_INT);
323 static varinfo_t first_or_preceding_vi_for_offset (varinfo_t,
324 unsigned HOST_WIDE_INT);
325 static varinfo_t lookup_vi_for_tree (tree);
326 static inline bool type_can_have_subvars (const_tree);
327 static void make_param_constraints (varinfo_t);
328
329 /* Pool of variable info structures. */
330 static object_allocator<variable_info> variable_info_pool
331 ("Variable info pool");
332
333 /* Map varinfo to final pt_solution. */
334 static hash_map<varinfo_t, pt_solution *> *final_solutions;
335 struct obstack final_solutions_obstack;
336
337 /* Table of variable info structures for constraint variables.
338 Indexed directly by variable info id. */
339 static vec<varinfo_t> varmap;
340
341 /* Return the varmap element N */
342
343 static inline varinfo_t
344 get_varinfo (unsigned int n)
345 {
346 return varmap[n];
347 }
348
349 /* Return the next variable in the list of sub-variables of VI
350 or NULL if VI is the last sub-variable. */
351
352 static inline varinfo_t
353 vi_next (varinfo_t vi)
354 {
355 return get_varinfo (vi->next);
356 }
357
358 /* Static IDs for the special variables. Variable ID zero is unused
359 and used as terminator for the sub-variable chain. */
360 enum { nothing_id = 1, anything_id = 2, string_id = 3,
361 escaped_id = 4, nonlocal_id = 5,
362 storedanything_id = 6, integer_id = 7 };
363
364 /* Return a new variable info structure consisting for a variable
365 named NAME, and using constraint graph node NODE. Append it
366 to the vector of variable info structures. */
367
368 static varinfo_t
369 new_var_info (tree t, const char *name, bool add_id)
370 {
371 unsigned index = varmap.length ();
372 varinfo_t ret = variable_info_pool.allocate ();
373
374 if (dump_file && add_id)
375 {
376 char *tempname = xasprintf ("%s(%d)", name, index);
377 name = ggc_strdup (tempname);
378 free (tempname);
379 }
380
381 ret->id = index;
382 ret->name = name;
383 ret->decl = t;
384 /* Vars without decl are artificial and do not have sub-variables. */
385 ret->is_artificial_var = (t == NULL_TREE);
386 ret->is_special_var = false;
387 ret->is_unknown_size_var = false;
388 ret->is_full_var = (t == NULL_TREE);
389 ret->is_heap_var = false;
390 ret->may_have_pointers = true;
391 ret->only_restrict_pointers = false;
392 ret->is_restrict_var = false;
393 ret->ruid = 0;
394 ret->is_global_var = (t == NULL_TREE);
395 ret->is_ipa_escape_point = false;
396 ret->is_fn_info = false;
397 if (t && DECL_P (t))
398 ret->is_global_var = (is_global_var (t)
399 /* We have to treat even local register variables
400 as escape points. */
401 || (VAR_P (t) && DECL_HARD_REGISTER (t)));
402 ret->is_reg_var = (t && TREE_CODE (t) == SSA_NAME);
403 ret->solution = BITMAP_ALLOC (&pta_obstack);
404 ret->oldsolution = NULL;
405 ret->next = 0;
406 ret->shadow_var_uid = 0;
407 ret->head = ret->id;
408
409 stats.total_vars++;
410
411 varmap.safe_push (ret);
412
413 return ret;
414 }
415
416 /* A map mapping call statements to per-stmt variables for uses
417 and clobbers specific to the call. */
418 static hash_map<gimple *, varinfo_t> *call_stmt_vars;
419
420 /* Lookup or create the variable for the call statement CALL. */
421
422 static varinfo_t
423 get_call_vi (gcall *call)
424 {
425 varinfo_t vi, vi2;
426
427 bool existed;
428 varinfo_t *slot_p = &call_stmt_vars->get_or_insert (call, &existed);
429 if (existed)
430 return *slot_p;
431
432 vi = new_var_info (NULL_TREE, "CALLUSED", true);
433 vi->offset = 0;
434 vi->size = 1;
435 vi->fullsize = 2;
436 vi->is_full_var = true;
437 vi->is_reg_var = true;
438
439 vi2 = new_var_info (NULL_TREE, "CALLCLOBBERED", true);
440 vi2->offset = 1;
441 vi2->size = 1;
442 vi2->fullsize = 2;
443 vi2->is_full_var = true;
444 vi2->is_reg_var = true;
445
446 vi->next = vi2->id;
447
448 *slot_p = vi;
449 return vi;
450 }
451
452 /* Lookup the variable for the call statement CALL representing
453 the uses. Returns NULL if there is nothing special about this call. */
454
455 static varinfo_t
456 lookup_call_use_vi (gcall *call)
457 {
458 varinfo_t *slot_p = call_stmt_vars->get (call);
459 if (slot_p)
460 return *slot_p;
461
462 return NULL;
463 }
464
465 /* Lookup the variable for the call statement CALL representing
466 the clobbers. Returns NULL if there is nothing special about this call. */
467
468 static varinfo_t
469 lookup_call_clobber_vi (gcall *call)
470 {
471 varinfo_t uses = lookup_call_use_vi (call);
472 if (!uses)
473 return NULL;
474
475 return vi_next (uses);
476 }
477
478 /* Lookup or create the variable for the call statement CALL representing
479 the uses. */
480
481 static varinfo_t
482 get_call_use_vi (gcall *call)
483 {
484 return get_call_vi (call);
485 }
486
487 /* Lookup or create the variable for the call statement CALL representing
488 the clobbers. */
489
490 static varinfo_t ATTRIBUTE_UNUSED
491 get_call_clobber_vi (gcall *call)
492 {
493 return vi_next (get_call_vi (call));
494 }
495
496
497 enum constraint_expr_type {SCALAR, DEREF, ADDRESSOF};
498
499 /* An expression that appears in a constraint. */
500
501 struct constraint_expr
502 {
503 /* Constraint type. */
504 constraint_expr_type type;
505
506 /* Variable we are referring to in the constraint. */
507 unsigned int var;
508
509 /* Offset, in bits, of this constraint from the beginning of
510 variables it ends up referring to.
511
512 IOW, in a deref constraint, we would deref, get the result set,
513 then add OFFSET to each member. */
514 HOST_WIDE_INT offset;
515 };
516
517 /* Use 0x8000... as special unknown offset. */
518 #define UNKNOWN_OFFSET HOST_WIDE_INT_MIN
519
520 typedef struct constraint_expr ce_s;
521 static void get_constraint_for_1 (tree, vec<ce_s> *, bool, bool);
522 static void get_constraint_for (tree, vec<ce_s> *);
523 static void get_constraint_for_rhs (tree, vec<ce_s> *);
524 static void do_deref (vec<ce_s> *);
525
526 /* Our set constraints are made up of two constraint expressions, one
527 LHS, and one RHS.
528
529 As described in the introduction, our set constraints each represent an
530 operation between set valued variables.
531 */
532 struct constraint
533 {
534 struct constraint_expr lhs;
535 struct constraint_expr rhs;
536 };
537
538 /* List of constraints that we use to build the constraint graph from. */
539
540 static vec<constraint_t> constraints;
541 static object_allocator<constraint> constraint_pool ("Constraint pool");
542
543 /* The constraint graph is represented as an array of bitmaps
544 containing successor nodes. */
545
546 struct constraint_graph
547 {
548 /* Size of this graph, which may be different than the number of
549 nodes in the variable map. */
550 unsigned int size;
551
552 /* Explicit successors of each node. */
553 bitmap *succs;
554
555 /* Implicit predecessors of each node (Used for variable
556 substitution). */
557 bitmap *implicit_preds;
558
559 /* Explicit predecessors of each node (Used for variable substitution). */
560 bitmap *preds;
561
562 /* Indirect cycle representatives, or -1 if the node has no indirect
563 cycles. */
564 int *indirect_cycles;
565
566 /* Representative node for a node. rep[a] == a unless the node has
567 been unified. */
568 unsigned int *rep;
569
570 /* Equivalence class representative for a label. This is used for
571 variable substitution. */
572 int *eq_rep;
573
574 /* Pointer equivalence label for a node. All nodes with the same
575 pointer equivalence label can be unified together at some point
576 (either during constraint optimization or after the constraint
577 graph is built). */
578 unsigned int *pe;
579
580 /* Pointer equivalence representative for a label. This is used to
581 handle nodes that are pointer equivalent but not location
582 equivalent. We can unite these once the addressof constraints
583 are transformed into initial points-to sets. */
584 int *pe_rep;
585
586 /* Pointer equivalence label for each node, used during variable
587 substitution. */
588 unsigned int *pointer_label;
589
590 /* Location equivalence label for each node, used during location
591 equivalence finding. */
592 unsigned int *loc_label;
593
594 /* Pointed-by set for each node, used during location equivalence
595 finding. This is pointed-by rather than pointed-to, because it
596 is constructed using the predecessor graph. */
597 bitmap *pointed_by;
598
599 /* Points to sets for pointer equivalence. This is *not* the actual
600 points-to sets for nodes. */
601 bitmap *points_to;
602
603 /* Bitmap of nodes where the bit is set if the node is a direct
604 node. Used for variable substitution. */
605 sbitmap direct_nodes;
606
607 /* Bitmap of nodes where the bit is set if the node is address
608 taken. Used for variable substitution. */
609 bitmap address_taken;
610
611 /* Vector of complex constraints for each graph node. Complex
612 constraints are those involving dereferences or offsets that are
613 not 0. */
614 vec<constraint_t> *complex;
615 };
616
617 static constraint_graph_t graph;
618
619 /* During variable substitution and the offline version of indirect
620 cycle finding, we create nodes to represent dereferences and
621 address taken constraints. These represent where these start and
622 end. */
623 #define FIRST_REF_NODE (varmap).length ()
624 #define LAST_REF_NODE (FIRST_REF_NODE + (FIRST_REF_NODE - 1))
625
626 /* Return the representative node for NODE, if NODE has been unioned
627 with another NODE.
628 This function performs path compression along the way to finding
629 the representative. */
630
631 static unsigned int
632 find (unsigned int node)
633 {
634 gcc_checking_assert (node < graph->size);
635 if (graph->rep[node] != node)
636 return graph->rep[node] = find (graph->rep[node]);
637 return node;
638 }
639
640 /* Union the TO and FROM nodes to the TO nodes.
641 Note that at some point in the future, we may want to do
642 union-by-rank, in which case we are going to have to return the
643 node we unified to. */
644
645 static bool
646 unite (unsigned int to, unsigned int from)
647 {
648 gcc_checking_assert (to < graph->size && from < graph->size);
649 if (to != from && graph->rep[from] != to)
650 {
651 graph->rep[from] = to;
652 return true;
653 }
654 return false;
655 }
656
657 /* Create a new constraint consisting of LHS and RHS expressions. */
658
659 static constraint_t
660 new_constraint (const struct constraint_expr lhs,
661 const struct constraint_expr rhs)
662 {
663 constraint_t ret = constraint_pool.allocate ();
664 ret->lhs = lhs;
665 ret->rhs = rhs;
666 return ret;
667 }
668
669 /* Print out constraint C to FILE. */
670
671 static void
672 dump_constraint (FILE *file, constraint_t c)
673 {
674 if (c->lhs.type == ADDRESSOF)
675 fprintf (file, "&");
676 else if (c->lhs.type == DEREF)
677 fprintf (file, "*");
678 fprintf (file, "%s", get_varinfo (c->lhs.var)->name);
679 if (c->lhs.offset == UNKNOWN_OFFSET)
680 fprintf (file, " + UNKNOWN");
681 else if (c->lhs.offset != 0)
682 fprintf (file, " + " HOST_WIDE_INT_PRINT_DEC, c->lhs.offset);
683 fprintf (file, " = ");
684 if (c->rhs.type == ADDRESSOF)
685 fprintf (file, "&");
686 else if (c->rhs.type == DEREF)
687 fprintf (file, "*");
688 fprintf (file, "%s", get_varinfo (c->rhs.var)->name);
689 if (c->rhs.offset == UNKNOWN_OFFSET)
690 fprintf (file, " + UNKNOWN");
691 else if (c->rhs.offset != 0)
692 fprintf (file, " + " HOST_WIDE_INT_PRINT_DEC, c->rhs.offset);
693 }
694
695
696 void debug_constraint (constraint_t);
697 void debug_constraints (void);
698 void debug_constraint_graph (void);
699 void debug_solution_for_var (unsigned int);
700 void debug_sa_points_to_info (void);
701 void debug_varinfo (varinfo_t);
702 void debug_varmap (void);
703
704 /* Print out constraint C to stderr. */
705
706 DEBUG_FUNCTION void
707 debug_constraint (constraint_t c)
708 {
709 dump_constraint (stderr, c);
710 fprintf (stderr, "\n");
711 }
712
713 /* Print out all constraints to FILE */
714
715 static void
716 dump_constraints (FILE *file, int from)
717 {
718 int i;
719 constraint_t c;
720 for (i = from; constraints.iterate (i, &c); i++)
721 if (c)
722 {
723 dump_constraint (file, c);
724 fprintf (file, "\n");
725 }
726 }
727
728 /* Print out all constraints to stderr. */
729
730 DEBUG_FUNCTION void
731 debug_constraints (void)
732 {
733 dump_constraints (stderr, 0);
734 }
735
736 /* Print the constraint graph in dot format. */
737
738 static void
739 dump_constraint_graph (FILE *file)
740 {
741 unsigned int i;
742
743 /* Only print the graph if it has already been initialized: */
744 if (!graph)
745 return;
746
747 /* Prints the header of the dot file: */
748 fprintf (file, "strict digraph {\n");
749 fprintf (file, " node [\n shape = box\n ]\n");
750 fprintf (file, " edge [\n fontsize = \"12\"\n ]\n");
751 fprintf (file, "\n // List of nodes and complex constraints in "
752 "the constraint graph:\n");
753
754 /* The next lines print the nodes in the graph together with the
755 complex constraints attached to them. */
756 for (i = 1; i < graph->size; i++)
757 {
758 if (i == FIRST_REF_NODE)
759 continue;
760 if (find (i) != i)
761 continue;
762 if (i < FIRST_REF_NODE)
763 fprintf (file, "\"%s\"", get_varinfo (i)->name);
764 else
765 fprintf (file, "\"*%s\"", get_varinfo (i - FIRST_REF_NODE)->name);
766 if (graph->complex[i].exists ())
767 {
768 unsigned j;
769 constraint_t c;
770 fprintf (file, " [label=\"\\N\\n");
771 for (j = 0; graph->complex[i].iterate (j, &c); ++j)
772 {
773 dump_constraint (file, c);
774 fprintf (file, "\\l");
775 }
776 fprintf (file, "\"]");
777 }
778 fprintf (file, ";\n");
779 }
780
781 /* Go over the edges. */
782 fprintf (file, "\n // Edges in the constraint graph:\n");
783 for (i = 1; i < graph->size; i++)
784 {
785 unsigned j;
786 bitmap_iterator bi;
787 if (find (i) != i)
788 continue;
789 EXECUTE_IF_IN_NONNULL_BITMAP (graph->succs[i], 0, j, bi)
790 {
791 unsigned to = find (j);
792 if (i == to)
793 continue;
794 if (i < FIRST_REF_NODE)
795 fprintf (file, "\"%s\"", get_varinfo (i)->name);
796 else
797 fprintf (file, "\"*%s\"", get_varinfo (i - FIRST_REF_NODE)->name);
798 fprintf (file, " -> ");
799 if (to < FIRST_REF_NODE)
800 fprintf (file, "\"%s\"", get_varinfo (to)->name);
801 else
802 fprintf (file, "\"*%s\"", get_varinfo (to - FIRST_REF_NODE)->name);
803 fprintf (file, ";\n");
804 }
805 }
806
807 /* Prints the tail of the dot file. */
808 fprintf (file, "}\n");
809 }
810
811 /* Print out the constraint graph to stderr. */
812
813 DEBUG_FUNCTION void
814 debug_constraint_graph (void)
815 {
816 dump_constraint_graph (stderr);
817 }
818
819 /* SOLVER FUNCTIONS
820
821 The solver is a simple worklist solver, that works on the following
822 algorithm:
823
824 sbitmap changed_nodes = all zeroes;
825 changed_count = 0;
826 For each node that is not already collapsed:
827 changed_count++;
828 set bit in changed nodes
829
830 while (changed_count > 0)
831 {
832 compute topological ordering for constraint graph
833
834 find and collapse cycles in the constraint graph (updating
835 changed if necessary)
836
837 for each node (n) in the graph in topological order:
838 changed_count--;
839
840 Process each complex constraint associated with the node,
841 updating changed if necessary.
842
843 For each outgoing edge from n, propagate the solution from n to
844 the destination of the edge, updating changed as necessary.
845
846 } */
847
848 /* Return true if two constraint expressions A and B are equal. */
849
850 static bool
851 constraint_expr_equal (struct constraint_expr a, struct constraint_expr b)
852 {
853 return a.type == b.type && a.var == b.var && a.offset == b.offset;
854 }
855
856 /* Return true if constraint expression A is less than constraint expression
857 B. This is just arbitrary, but consistent, in order to give them an
858 ordering. */
859
860 static bool
861 constraint_expr_less (struct constraint_expr a, struct constraint_expr b)
862 {
863 if (a.type == b.type)
864 {
865 if (a.var == b.var)
866 return a.offset < b.offset;
867 else
868 return a.var < b.var;
869 }
870 else
871 return a.type < b.type;
872 }
873
874 /* Return true if constraint A is less than constraint B. This is just
875 arbitrary, but consistent, in order to give them an ordering. */
876
877 static bool
878 constraint_less (const constraint_t &a, const constraint_t &b)
879 {
880 if (constraint_expr_less (a->lhs, b->lhs))
881 return true;
882 else if (constraint_expr_less (b->lhs, a->lhs))
883 return false;
884 else
885 return constraint_expr_less (a->rhs, b->rhs);
886 }
887
888 /* Return true if two constraints A and B are equal. */
889
890 static bool
891 constraint_equal (struct constraint a, struct constraint b)
892 {
893 return constraint_expr_equal (a.lhs, b.lhs)
894 && constraint_expr_equal (a.rhs, b.rhs);
895 }
896
897
898 /* Find a constraint LOOKFOR in the sorted constraint vector VEC */
899
900 static constraint_t
901 constraint_vec_find (vec<constraint_t> vec,
902 struct constraint lookfor)
903 {
904 unsigned int place;
905 constraint_t found;
906
907 if (!vec.exists ())
908 return NULL;
909
910 place = vec.lower_bound (&lookfor, constraint_less);
911 if (place >= vec.length ())
912 return NULL;
913 found = vec[place];
914 if (!constraint_equal (*found, lookfor))
915 return NULL;
916 return found;
917 }
918
919 /* Union two constraint vectors, TO and FROM. Put the result in TO.
920 Returns true of TO set is changed. */
921
922 static bool
923 constraint_set_union (vec<constraint_t> *to,
924 vec<constraint_t> *from)
925 {
926 int i;
927 constraint_t c;
928 bool any_change = false;
929
930 FOR_EACH_VEC_ELT (*from, i, c)
931 {
932 if (constraint_vec_find (*to, *c) == NULL)
933 {
934 unsigned int place = to->lower_bound (c, constraint_less);
935 to->safe_insert (place, c);
936 any_change = true;
937 }
938 }
939 return any_change;
940 }
941
942 /* Expands the solution in SET to all sub-fields of variables included. */
943
944 static bitmap
945 solution_set_expand (bitmap set, bitmap *expanded)
946 {
947 bitmap_iterator bi;
948 unsigned j;
949
950 if (*expanded)
951 return *expanded;
952
953 *expanded = BITMAP_ALLOC (&iteration_obstack);
954
955 /* In a first pass expand to the head of the variables we need to
956 add all sub-fields off. This avoids quadratic behavior. */
957 EXECUTE_IF_SET_IN_BITMAP (set, 0, j, bi)
958 {
959 varinfo_t v = get_varinfo (j);
960 if (v->is_artificial_var
961 || v->is_full_var)
962 continue;
963 bitmap_set_bit (*expanded, v->head);
964 }
965
966 /* In the second pass now expand all head variables with subfields. */
967 EXECUTE_IF_SET_IN_BITMAP (*expanded, 0, j, bi)
968 {
969 varinfo_t v = get_varinfo (j);
970 if (v->head != j)
971 continue;
972 for (v = vi_next (v); v != NULL; v = vi_next (v))
973 bitmap_set_bit (*expanded, v->id);
974 }
975
976 /* And finally set the rest of the bits from SET. */
977 bitmap_ior_into (*expanded, set);
978
979 return *expanded;
980 }
981
982 /* Union solution sets TO and DELTA, and add INC to each member of DELTA in the
983 process. */
984
985 static bool
986 set_union_with_increment (bitmap to, bitmap delta, HOST_WIDE_INT inc,
987 bitmap *expanded_delta)
988 {
989 bool changed = false;
990 bitmap_iterator bi;
991 unsigned int i;
992
993 /* If the solution of DELTA contains anything it is good enough to transfer
994 this to TO. */
995 if (bitmap_bit_p (delta, anything_id))
996 return bitmap_set_bit (to, anything_id);
997
998 /* If the offset is unknown we have to expand the solution to
999 all subfields. */
1000 if (inc == UNKNOWN_OFFSET)
1001 {
1002 delta = solution_set_expand (delta, expanded_delta);
1003 changed |= bitmap_ior_into (to, delta);
1004 return changed;
1005 }
1006
1007 /* For non-zero offset union the offsetted solution into the destination. */
1008 EXECUTE_IF_SET_IN_BITMAP (delta, 0, i, bi)
1009 {
1010 varinfo_t vi = get_varinfo (i);
1011
1012 /* If this is a variable with just one field just set its bit
1013 in the result. */
1014 if (vi->is_artificial_var
1015 || vi->is_unknown_size_var
1016 || vi->is_full_var)
1017 changed |= bitmap_set_bit (to, i);
1018 else
1019 {
1020 HOST_WIDE_INT fieldoffset = vi->offset + inc;
1021 unsigned HOST_WIDE_INT size = vi->size;
1022
1023 /* If the offset makes the pointer point to before the
1024 variable use offset zero for the field lookup. */
1025 if (fieldoffset < 0)
1026 vi = get_varinfo (vi->head);
1027 else
1028 vi = first_or_preceding_vi_for_offset (vi, fieldoffset);
1029
1030 do
1031 {
1032 changed |= bitmap_set_bit (to, vi->id);
1033 if (vi->is_full_var
1034 || vi->next == 0)
1035 break;
1036
1037 /* We have to include all fields that overlap the current field
1038 shifted by inc. */
1039 vi = vi_next (vi);
1040 }
1041 while (vi->offset < fieldoffset + size);
1042 }
1043 }
1044
1045 return changed;
1046 }
1047
1048 /* Insert constraint C into the list of complex constraints for graph
1049 node VAR. */
1050
1051 static void
1052 insert_into_complex (constraint_graph_t graph,
1053 unsigned int var, constraint_t c)
1054 {
1055 vec<constraint_t> complex = graph->complex[var];
1056 unsigned int place = complex.lower_bound (c, constraint_less);
1057
1058 /* Only insert constraints that do not already exist. */
1059 if (place >= complex.length ()
1060 || !constraint_equal (*c, *complex[place]))
1061 graph->complex[var].safe_insert (place, c);
1062 }
1063
1064
1065 /* Condense two variable nodes into a single variable node, by moving
1066 all associated info from FROM to TO. Returns true if TO node's
1067 constraint set changes after the merge. */
1068
1069 static bool
1070 merge_node_constraints (constraint_graph_t graph, unsigned int to,
1071 unsigned int from)
1072 {
1073 unsigned int i;
1074 constraint_t c;
1075 bool any_change = false;
1076
1077 gcc_checking_assert (find (from) == to);
1078
1079 /* Move all complex constraints from src node into to node */
1080 FOR_EACH_VEC_ELT (graph->complex[from], i, c)
1081 {
1082 /* In complex constraints for node FROM, we may have either
1083 a = *FROM, and *FROM = a, or an offseted constraint which are
1084 always added to the rhs node's constraints. */
1085
1086 if (c->rhs.type == DEREF)
1087 c->rhs.var = to;
1088 else if (c->lhs.type == DEREF)
1089 c->lhs.var = to;
1090 else
1091 c->rhs.var = to;
1092
1093 }
1094 any_change = constraint_set_union (&graph->complex[to],
1095 &graph->complex[from]);
1096 graph->complex[from].release ();
1097 return any_change;
1098 }
1099
1100
1101 /* Remove edges involving NODE from GRAPH. */
1102
1103 static void
1104 clear_edges_for_node (constraint_graph_t graph, unsigned int node)
1105 {
1106 if (graph->succs[node])
1107 BITMAP_FREE (graph->succs[node]);
1108 }
1109
1110 /* Merge GRAPH nodes FROM and TO into node TO. */
1111
1112 static void
1113 merge_graph_nodes (constraint_graph_t graph, unsigned int to,
1114 unsigned int from)
1115 {
1116 if (graph->indirect_cycles[from] != -1)
1117 {
1118 /* If we have indirect cycles with the from node, and we have
1119 none on the to node, the to node has indirect cycles from the
1120 from node now that they are unified.
1121 If indirect cycles exist on both, unify the nodes that they
1122 are in a cycle with, since we know they are in a cycle with
1123 each other. */
1124 if (graph->indirect_cycles[to] == -1)
1125 graph->indirect_cycles[to] = graph->indirect_cycles[from];
1126 }
1127
1128 /* Merge all the successor edges. */
1129 if (graph->succs[from])
1130 {
1131 if (!graph->succs[to])
1132 graph->succs[to] = BITMAP_ALLOC (&pta_obstack);
1133 bitmap_ior_into (graph->succs[to],
1134 graph->succs[from]);
1135 }
1136
1137 clear_edges_for_node (graph, from);
1138 }
1139
1140
1141 /* Add an indirect graph edge to GRAPH, going from TO to FROM if
1142 it doesn't exist in the graph already. */
1143
1144 static void
1145 add_implicit_graph_edge (constraint_graph_t graph, unsigned int to,
1146 unsigned int from)
1147 {
1148 if (to == from)
1149 return;
1150
1151 if (!graph->implicit_preds[to])
1152 graph->implicit_preds[to] = BITMAP_ALLOC (&predbitmap_obstack);
1153
1154 if (bitmap_set_bit (graph->implicit_preds[to], from))
1155 stats.num_implicit_edges++;
1156 }
1157
1158 /* Add a predecessor graph edge to GRAPH, going from TO to FROM if
1159 it doesn't exist in the graph already.
1160 Return false if the edge already existed, true otherwise. */
1161
1162 static void
1163 add_pred_graph_edge (constraint_graph_t graph, unsigned int to,
1164 unsigned int from)
1165 {
1166 if (!graph->preds[to])
1167 graph->preds[to] = BITMAP_ALLOC (&predbitmap_obstack);
1168 bitmap_set_bit (graph->preds[to], from);
1169 }
1170
1171 /* Add a graph edge to GRAPH, going from FROM to TO if
1172 it doesn't exist in the graph already.
1173 Return false if the edge already existed, true otherwise. */
1174
1175 static bool
1176 add_graph_edge (constraint_graph_t graph, unsigned int to,
1177 unsigned int from)
1178 {
1179 if (to == from)
1180 {
1181 return false;
1182 }
1183 else
1184 {
1185 bool r = false;
1186
1187 if (!graph->succs[from])
1188 graph->succs[from] = BITMAP_ALLOC (&pta_obstack);
1189 if (bitmap_set_bit (graph->succs[from], to))
1190 {
1191 r = true;
1192 if (to < FIRST_REF_NODE && from < FIRST_REF_NODE)
1193 stats.num_edges++;
1194 }
1195 return r;
1196 }
1197 }
1198
1199
1200 /* Initialize the constraint graph structure to contain SIZE nodes. */
1201
1202 static void
1203 init_graph (unsigned int size)
1204 {
1205 unsigned int j;
1206
1207 graph = XCNEW (struct constraint_graph);
1208 graph->size = size;
1209 graph->succs = XCNEWVEC (bitmap, graph->size);
1210 graph->indirect_cycles = XNEWVEC (int, graph->size);
1211 graph->rep = XNEWVEC (unsigned int, graph->size);
1212 /* ??? Macros do not support template types with multiple arguments,
1213 so we use a typedef to work around it. */
1214 typedef vec<constraint_t> vec_constraint_t_heap;
1215 graph->complex = XCNEWVEC (vec_constraint_t_heap, size);
1216 graph->pe = XCNEWVEC (unsigned int, graph->size);
1217 graph->pe_rep = XNEWVEC (int, graph->size);
1218
1219 for (j = 0; j < graph->size; j++)
1220 {
1221 graph->rep[j] = j;
1222 graph->pe_rep[j] = -1;
1223 graph->indirect_cycles[j] = -1;
1224 }
1225 }
1226
1227 /* Build the constraint graph, adding only predecessor edges right now. */
1228
1229 static void
1230 build_pred_graph (void)
1231 {
1232 int i;
1233 constraint_t c;
1234 unsigned int j;
1235
1236 graph->implicit_preds = XCNEWVEC (bitmap, graph->size);
1237 graph->preds = XCNEWVEC (bitmap, graph->size);
1238 graph->pointer_label = XCNEWVEC (unsigned int, graph->size);
1239 graph->loc_label = XCNEWVEC (unsigned int, graph->size);
1240 graph->pointed_by = XCNEWVEC (bitmap, graph->size);
1241 graph->points_to = XCNEWVEC (bitmap, graph->size);
1242 graph->eq_rep = XNEWVEC (int, graph->size);
1243 graph->direct_nodes = sbitmap_alloc (graph->size);
1244 graph->address_taken = BITMAP_ALLOC (&predbitmap_obstack);
1245 bitmap_clear (graph->direct_nodes);
1246
1247 for (j = 1; j < FIRST_REF_NODE; j++)
1248 {
1249 if (!get_varinfo (j)->is_special_var)
1250 bitmap_set_bit (graph->direct_nodes, j);
1251 }
1252
1253 for (j = 0; j < graph->size; j++)
1254 graph->eq_rep[j] = -1;
1255
1256 for (j = 0; j < varmap.length (); j++)
1257 graph->indirect_cycles[j] = -1;
1258
1259 FOR_EACH_VEC_ELT (constraints, i, c)
1260 {
1261 struct constraint_expr lhs = c->lhs;
1262 struct constraint_expr rhs = c->rhs;
1263 unsigned int lhsvar = lhs.var;
1264 unsigned int rhsvar = rhs.var;
1265
1266 if (lhs.type == DEREF)
1267 {
1268 /* *x = y. */
1269 if (rhs.offset == 0 && lhs.offset == 0 && rhs.type == SCALAR)
1270 add_pred_graph_edge (graph, FIRST_REF_NODE + lhsvar, rhsvar);
1271 }
1272 else if (rhs.type == DEREF)
1273 {
1274 /* x = *y */
1275 if (rhs.offset == 0 && lhs.offset == 0 && lhs.type == SCALAR)
1276 add_pred_graph_edge (graph, lhsvar, FIRST_REF_NODE + rhsvar);
1277 else
1278 bitmap_clear_bit (graph->direct_nodes, lhsvar);
1279 }
1280 else if (rhs.type == ADDRESSOF)
1281 {
1282 varinfo_t v;
1283
1284 /* x = &y */
1285 if (graph->points_to[lhsvar] == NULL)
1286 graph->points_to[lhsvar] = BITMAP_ALLOC (&predbitmap_obstack);
1287 bitmap_set_bit (graph->points_to[lhsvar], rhsvar);
1288
1289 if (graph->pointed_by[rhsvar] == NULL)
1290 graph->pointed_by[rhsvar] = BITMAP_ALLOC (&predbitmap_obstack);
1291 bitmap_set_bit (graph->pointed_by[rhsvar], lhsvar);
1292
1293 /* Implicitly, *x = y */
1294 add_implicit_graph_edge (graph, FIRST_REF_NODE + lhsvar, rhsvar);
1295
1296 /* All related variables are no longer direct nodes. */
1297 bitmap_clear_bit (graph->direct_nodes, rhsvar);
1298 v = get_varinfo (rhsvar);
1299 if (!v->is_full_var)
1300 {
1301 v = get_varinfo (v->head);
1302 do
1303 {
1304 bitmap_clear_bit (graph->direct_nodes, v->id);
1305 v = vi_next (v);
1306 }
1307 while (v != NULL);
1308 }
1309 bitmap_set_bit (graph->address_taken, rhsvar);
1310 }
1311 else if (lhsvar > anything_id
1312 && lhsvar != rhsvar && lhs.offset == 0 && rhs.offset == 0)
1313 {
1314 /* x = y */
1315 add_pred_graph_edge (graph, lhsvar, rhsvar);
1316 /* Implicitly, *x = *y */
1317 add_implicit_graph_edge (graph, FIRST_REF_NODE + lhsvar,
1318 FIRST_REF_NODE + rhsvar);
1319 }
1320 else if (lhs.offset != 0 || rhs.offset != 0)
1321 {
1322 if (rhs.offset != 0)
1323 bitmap_clear_bit (graph->direct_nodes, lhs.var);
1324 else if (lhs.offset != 0)
1325 bitmap_clear_bit (graph->direct_nodes, rhs.var);
1326 }
1327 }
1328 }
1329
1330 /* Build the constraint graph, adding successor edges. */
1331
1332 static void
1333 build_succ_graph (void)
1334 {
1335 unsigned i, t;
1336 constraint_t c;
1337
1338 FOR_EACH_VEC_ELT (constraints, i, c)
1339 {
1340 struct constraint_expr lhs;
1341 struct constraint_expr rhs;
1342 unsigned int lhsvar;
1343 unsigned int rhsvar;
1344
1345 if (!c)
1346 continue;
1347
1348 lhs = c->lhs;
1349 rhs = c->rhs;
1350 lhsvar = find (lhs.var);
1351 rhsvar = find (rhs.var);
1352
1353 if (lhs.type == DEREF)
1354 {
1355 if (rhs.offset == 0 && lhs.offset == 0 && rhs.type == SCALAR)
1356 add_graph_edge (graph, FIRST_REF_NODE + lhsvar, rhsvar);
1357 }
1358 else if (rhs.type == DEREF)
1359 {
1360 if (rhs.offset == 0 && lhs.offset == 0 && lhs.type == SCALAR)
1361 add_graph_edge (graph, lhsvar, FIRST_REF_NODE + rhsvar);
1362 }
1363 else if (rhs.type == ADDRESSOF)
1364 {
1365 /* x = &y */
1366 gcc_checking_assert (find (rhs.var) == rhs.var);
1367 bitmap_set_bit (get_varinfo (lhsvar)->solution, rhsvar);
1368 }
1369 else if (lhsvar > anything_id
1370 && lhsvar != rhsvar && lhs.offset == 0 && rhs.offset == 0)
1371 {
1372 add_graph_edge (graph, lhsvar, rhsvar);
1373 }
1374 }
1375
1376 /* Add edges from STOREDANYTHING to all non-direct nodes that can
1377 receive pointers. */
1378 t = find (storedanything_id);
1379 for (i = integer_id + 1; i < FIRST_REF_NODE; ++i)
1380 {
1381 if (!bitmap_bit_p (graph->direct_nodes, i)
1382 && get_varinfo (i)->may_have_pointers)
1383 add_graph_edge (graph, find (i), t);
1384 }
1385
1386 /* Everything stored to ANYTHING also potentially escapes. */
1387 add_graph_edge (graph, find (escaped_id), t);
1388 }
1389
1390
1391 /* Changed variables on the last iteration. */
1392 static bitmap changed;
1393
1394 /* Strongly Connected Component visitation info. */
1395
1396 class scc_info
1397 {
1398 public:
1399 scc_info (size_t size);
1400 ~scc_info ();
1401
1402 auto_sbitmap visited;
1403 auto_sbitmap deleted;
1404 unsigned int *dfs;
1405 unsigned int *node_mapping;
1406 int current_index;
1407 auto_vec<unsigned> scc_stack;
1408 };
1409
1410
1411 /* Recursive routine to find strongly connected components in GRAPH.
1412 SI is the SCC info to store the information in, and N is the id of current
1413 graph node we are processing.
1414
1415 This is Tarjan's strongly connected component finding algorithm, as
1416 modified by Nuutila to keep only non-root nodes on the stack.
1417 The algorithm can be found in "On finding the strongly connected
1418 connected components in a directed graph" by Esko Nuutila and Eljas
1419 Soisalon-Soininen, in Information Processing Letters volume 49,
1420 number 1, pages 9-14. */
1421
1422 static void
1423 scc_visit (constraint_graph_t graph, class scc_info *si, unsigned int n)
1424 {
1425 unsigned int i;
1426 bitmap_iterator bi;
1427 unsigned int my_dfs;
1428
1429 bitmap_set_bit (si->visited, n);
1430 si->dfs[n] = si->current_index ++;
1431 my_dfs = si->dfs[n];
1432
1433 /* Visit all the successors. */
1434 EXECUTE_IF_IN_NONNULL_BITMAP (graph->succs[n], 0, i, bi)
1435 {
1436 unsigned int w;
1437
1438 if (i > LAST_REF_NODE)
1439 break;
1440
1441 w = find (i);
1442 if (bitmap_bit_p (si->deleted, w))
1443 continue;
1444
1445 if (!bitmap_bit_p (si->visited, w))
1446 scc_visit (graph, si, w);
1447
1448 unsigned int t = find (w);
1449 gcc_checking_assert (find (n) == n);
1450 if (si->dfs[t] < si->dfs[n])
1451 si->dfs[n] = si->dfs[t];
1452 }
1453
1454 /* See if any components have been identified. */
1455 if (si->dfs[n] == my_dfs)
1456 {
1457 if (si->scc_stack.length () > 0
1458 && si->dfs[si->scc_stack.last ()] >= my_dfs)
1459 {
1460 bitmap scc = BITMAP_ALLOC (NULL);
1461 unsigned int lowest_node;
1462 bitmap_iterator bi;
1463
1464 bitmap_set_bit (scc, n);
1465
1466 while (si->scc_stack.length () != 0
1467 && si->dfs[si->scc_stack.last ()] >= my_dfs)
1468 {
1469 unsigned int w = si->scc_stack.pop ();
1470
1471 bitmap_set_bit (scc, w);
1472 }
1473
1474 lowest_node = bitmap_first_set_bit (scc);
1475 gcc_assert (lowest_node < FIRST_REF_NODE);
1476
1477 /* Collapse the SCC nodes into a single node, and mark the
1478 indirect cycles. */
1479 EXECUTE_IF_SET_IN_BITMAP (scc, 0, i, bi)
1480 {
1481 if (i < FIRST_REF_NODE)
1482 {
1483 if (unite (lowest_node, i))
1484 unify_nodes (graph, lowest_node, i, false);
1485 }
1486 else
1487 {
1488 unite (lowest_node, i);
1489 graph->indirect_cycles[i - FIRST_REF_NODE] = lowest_node;
1490 }
1491 }
1492 }
1493 bitmap_set_bit (si->deleted, n);
1494 }
1495 else
1496 si->scc_stack.safe_push (n);
1497 }
1498
1499 /* Unify node FROM into node TO, updating the changed count if
1500 necessary when UPDATE_CHANGED is true. */
1501
1502 static void
1503 unify_nodes (constraint_graph_t graph, unsigned int to, unsigned int from,
1504 bool update_changed)
1505 {
1506 gcc_checking_assert (to != from && find (to) == to);
1507
1508 if (dump_file && (dump_flags & TDF_DETAILS))
1509 fprintf (dump_file, "Unifying %s to %s\n",
1510 get_varinfo (from)->name,
1511 get_varinfo (to)->name);
1512
1513 if (update_changed)
1514 stats.unified_vars_dynamic++;
1515 else
1516 stats.unified_vars_static++;
1517
1518 merge_graph_nodes (graph, to, from);
1519 if (merge_node_constraints (graph, to, from))
1520 {
1521 if (update_changed)
1522 bitmap_set_bit (changed, to);
1523 }
1524
1525 /* Mark TO as changed if FROM was changed. If TO was already marked
1526 as changed, decrease the changed count. */
1527
1528 if (update_changed
1529 && bitmap_clear_bit (changed, from))
1530 bitmap_set_bit (changed, to);
1531 varinfo_t fromvi = get_varinfo (from);
1532 if (fromvi->solution)
1533 {
1534 /* If the solution changes because of the merging, we need to mark
1535 the variable as changed. */
1536 varinfo_t tovi = get_varinfo (to);
1537 if (bitmap_ior_into (tovi->solution, fromvi->solution))
1538 {
1539 if (update_changed)
1540 bitmap_set_bit (changed, to);
1541 }
1542
1543 BITMAP_FREE (fromvi->solution);
1544 if (fromvi->oldsolution)
1545 BITMAP_FREE (fromvi->oldsolution);
1546
1547 if (stats.iterations > 0
1548 && tovi->oldsolution)
1549 BITMAP_FREE (tovi->oldsolution);
1550 }
1551 if (graph->succs[to])
1552 bitmap_clear_bit (graph->succs[to], to);
1553 }
1554
1555 /* Information needed to compute the topological ordering of a graph. */
1556
1557 struct topo_info
1558 {
1559 /* sbitmap of visited nodes. */
1560 sbitmap visited;
1561 /* Array that stores the topological order of the graph, *in
1562 reverse*. */
1563 vec<unsigned> topo_order;
1564 };
1565
1566
1567 /* Initialize and return a topological info structure. */
1568
1569 static struct topo_info *
1570 init_topo_info (void)
1571 {
1572 size_t size = graph->size;
1573 struct topo_info *ti = XNEW (struct topo_info);
1574 ti->visited = sbitmap_alloc (size);
1575 bitmap_clear (ti->visited);
1576 ti->topo_order.create (1);
1577 return ti;
1578 }
1579
1580
1581 /* Free the topological sort info pointed to by TI. */
1582
1583 static void
1584 free_topo_info (struct topo_info *ti)
1585 {
1586 sbitmap_free (ti->visited);
1587 ti->topo_order.release ();
1588 free (ti);
1589 }
1590
1591 /* Visit the graph in topological order, and store the order in the
1592 topo_info structure. */
1593
1594 static void
1595 topo_visit (constraint_graph_t graph, struct topo_info *ti,
1596 unsigned int n)
1597 {
1598 bitmap_iterator bi;
1599 unsigned int j;
1600
1601 bitmap_set_bit (ti->visited, n);
1602
1603 if (graph->succs[n])
1604 EXECUTE_IF_SET_IN_BITMAP (graph->succs[n], 0, j, bi)
1605 {
1606 if (!bitmap_bit_p (ti->visited, j))
1607 topo_visit (graph, ti, j);
1608 }
1609
1610 ti->topo_order.safe_push (n);
1611 }
1612
1613 /* Process a constraint C that represents x = *(y + off), using DELTA as the
1614 starting solution for y. */
1615
1616 static void
1617 do_sd_constraint (constraint_graph_t graph, constraint_t c,
1618 bitmap delta, bitmap *expanded_delta)
1619 {
1620 unsigned int lhs = c->lhs.var;
1621 bool flag = false;
1622 bitmap sol = get_varinfo (lhs)->solution;
1623 unsigned int j;
1624 bitmap_iterator bi;
1625 HOST_WIDE_INT roffset = c->rhs.offset;
1626
1627 /* Our IL does not allow this. */
1628 gcc_checking_assert (c->lhs.offset == 0);
1629
1630 /* If the solution of Y contains anything it is good enough to transfer
1631 this to the LHS. */
1632 if (bitmap_bit_p (delta, anything_id))
1633 {
1634 flag |= bitmap_set_bit (sol, anything_id);
1635 goto done;
1636 }
1637
1638 /* If we do not know at with offset the rhs is dereferenced compute
1639 the reachability set of DELTA, conservatively assuming it is
1640 dereferenced at all valid offsets. */
1641 if (roffset == UNKNOWN_OFFSET)
1642 {
1643 delta = solution_set_expand (delta, expanded_delta);
1644 /* No further offset processing is necessary. */
1645 roffset = 0;
1646 }
1647
1648 /* For each variable j in delta (Sol(y)), add
1649 an edge in the graph from j to x, and union Sol(j) into Sol(x). */
1650 EXECUTE_IF_SET_IN_BITMAP (delta, 0, j, bi)
1651 {
1652 varinfo_t v = get_varinfo (j);
1653 HOST_WIDE_INT fieldoffset = v->offset + roffset;
1654 unsigned HOST_WIDE_INT size = v->size;
1655 unsigned int t;
1656
1657 if (v->is_full_var)
1658 ;
1659 else if (roffset != 0)
1660 {
1661 if (fieldoffset < 0)
1662 v = get_varinfo (v->head);
1663 else
1664 v = first_or_preceding_vi_for_offset (v, fieldoffset);
1665 }
1666
1667 /* We have to include all fields that overlap the current field
1668 shifted by roffset. */
1669 do
1670 {
1671 t = find (v->id);
1672
1673 /* Adding edges from the special vars is pointless.
1674 They don't have sets that can change. */
1675 if (get_varinfo (t)->is_special_var)
1676 flag |= bitmap_ior_into (sol, get_varinfo (t)->solution);
1677 /* Merging the solution from ESCAPED needlessly increases
1678 the set. Use ESCAPED as representative instead. */
1679 else if (v->id == escaped_id)
1680 flag |= bitmap_set_bit (sol, escaped_id);
1681 else if (v->may_have_pointers
1682 && add_graph_edge (graph, lhs, t))
1683 flag |= bitmap_ior_into (sol, get_varinfo (t)->solution);
1684
1685 if (v->is_full_var
1686 || v->next == 0)
1687 break;
1688
1689 v = vi_next (v);
1690 }
1691 while (v->offset < fieldoffset + size);
1692 }
1693
1694 done:
1695 /* If the LHS solution changed, mark the var as changed. */
1696 if (flag)
1697 {
1698 get_varinfo (lhs)->solution = sol;
1699 bitmap_set_bit (changed, lhs);
1700 }
1701 }
1702
1703 /* Process a constraint C that represents *(x + off) = y using DELTA
1704 as the starting solution for x. */
1705
1706 static void
1707 do_ds_constraint (constraint_t c, bitmap delta, bitmap *expanded_delta)
1708 {
1709 unsigned int rhs = c->rhs.var;
1710 bitmap sol = get_varinfo (rhs)->solution;
1711 unsigned int j;
1712 bitmap_iterator bi;
1713 HOST_WIDE_INT loff = c->lhs.offset;
1714 bool escaped_p = false;
1715
1716 /* Our IL does not allow this. */
1717 gcc_checking_assert (c->rhs.offset == 0);
1718
1719 /* If the solution of y contains ANYTHING simply use the ANYTHING
1720 solution. This avoids needlessly increasing the points-to sets. */
1721 if (bitmap_bit_p (sol, anything_id))
1722 sol = get_varinfo (find (anything_id))->solution;
1723
1724 /* If the solution for x contains ANYTHING we have to merge the
1725 solution of y into all pointer variables which we do via
1726 STOREDANYTHING. */
1727 if (bitmap_bit_p (delta, anything_id))
1728 {
1729 unsigned t = find (storedanything_id);
1730 if (add_graph_edge (graph, t, rhs))
1731 {
1732 if (bitmap_ior_into (get_varinfo (t)->solution, sol))
1733 bitmap_set_bit (changed, t);
1734 }
1735 return;
1736 }
1737
1738 /* If we do not know at with offset the rhs is dereferenced compute
1739 the reachability set of DELTA, conservatively assuming it is
1740 dereferenced at all valid offsets. */
1741 if (loff == UNKNOWN_OFFSET)
1742 {
1743 delta = solution_set_expand (delta, expanded_delta);
1744 loff = 0;
1745 }
1746
1747 /* For each member j of delta (Sol(x)), add an edge from y to j and
1748 union Sol(y) into Sol(j) */
1749 EXECUTE_IF_SET_IN_BITMAP (delta, 0, j, bi)
1750 {
1751 varinfo_t v = get_varinfo (j);
1752 unsigned int t;
1753 HOST_WIDE_INT fieldoffset = v->offset + loff;
1754 unsigned HOST_WIDE_INT size = v->size;
1755
1756 if (v->is_full_var)
1757 ;
1758 else if (loff != 0)
1759 {
1760 if (fieldoffset < 0)
1761 v = get_varinfo (v->head);
1762 else
1763 v = first_or_preceding_vi_for_offset (v, fieldoffset);
1764 }
1765
1766 /* We have to include all fields that overlap the current field
1767 shifted by loff. */
1768 do
1769 {
1770 if (v->may_have_pointers)
1771 {
1772 /* If v is a global variable then this is an escape point. */
1773 if (v->is_global_var
1774 && !escaped_p)
1775 {
1776 t = find (escaped_id);
1777 if (add_graph_edge (graph, t, rhs)
1778 && bitmap_ior_into (get_varinfo (t)->solution, sol))
1779 bitmap_set_bit (changed, t);
1780 /* Enough to let rhs escape once. */
1781 escaped_p = true;
1782 }
1783
1784 if (v->is_special_var)
1785 break;
1786
1787 t = find (v->id);
1788 if (add_graph_edge (graph, t, rhs)
1789 && bitmap_ior_into (get_varinfo (t)->solution, sol))
1790 bitmap_set_bit (changed, t);
1791 }
1792
1793 if (v->is_full_var
1794 || v->next == 0)
1795 break;
1796
1797 v = vi_next (v);
1798 }
1799 while (v->offset < fieldoffset + size);
1800 }
1801 }
1802
1803 /* Handle a non-simple (simple meaning requires no iteration),
1804 constraint (IE *x = &y, x = *y, *x = y, and x = y with offsets involved). */
1805
1806 static void
1807 do_complex_constraint (constraint_graph_t graph, constraint_t c, bitmap delta,
1808 bitmap *expanded_delta)
1809 {
1810 if (c->lhs.type == DEREF)
1811 {
1812 if (c->rhs.type == ADDRESSOF)
1813 {
1814 gcc_unreachable ();
1815 }
1816 else
1817 {
1818 /* *x = y */
1819 do_ds_constraint (c, delta, expanded_delta);
1820 }
1821 }
1822 else if (c->rhs.type == DEREF)
1823 {
1824 /* x = *y */
1825 if (!(get_varinfo (c->lhs.var)->is_special_var))
1826 do_sd_constraint (graph, c, delta, expanded_delta);
1827 }
1828 else
1829 {
1830 bitmap tmp;
1831 bool flag = false;
1832
1833 gcc_checking_assert (c->rhs.type == SCALAR && c->lhs.type == SCALAR
1834 && c->rhs.offset != 0 && c->lhs.offset == 0);
1835 tmp = get_varinfo (c->lhs.var)->solution;
1836
1837 flag = set_union_with_increment (tmp, delta, c->rhs.offset,
1838 expanded_delta);
1839
1840 if (flag)
1841 bitmap_set_bit (changed, c->lhs.var);
1842 }
1843 }
1844
1845 /* Initialize and return a new SCC info structure. */
1846
1847 scc_info::scc_info (size_t size) :
1848 visited (size), deleted (size), current_index (0), scc_stack (1)
1849 {
1850 bitmap_clear (visited);
1851 bitmap_clear (deleted);
1852 node_mapping = XNEWVEC (unsigned int, size);
1853 dfs = XCNEWVEC (unsigned int, size);
1854
1855 for (size_t i = 0; i < size; i++)
1856 node_mapping[i] = i;
1857 }
1858
1859 /* Free an SCC info structure pointed to by SI */
1860
1861 scc_info::~scc_info ()
1862 {
1863 free (node_mapping);
1864 free (dfs);
1865 }
1866
1867
1868 /* Find indirect cycles in GRAPH that occur, using strongly connected
1869 components, and note them in the indirect cycles map.
1870
1871 This technique comes from Ben Hardekopf and Calvin Lin,
1872 "It Pays to be Lazy: Fast and Accurate Pointer Analysis for Millions of
1873 Lines of Code", submitted to PLDI 2007. */
1874
1875 static void
1876 find_indirect_cycles (constraint_graph_t graph)
1877 {
1878 unsigned int i;
1879 unsigned int size = graph->size;
1880 scc_info si (size);
1881
1882 for (i = 0; i < MIN (LAST_REF_NODE, size); i ++ )
1883 if (!bitmap_bit_p (si.visited, i) && find (i) == i)
1884 scc_visit (graph, &si, i);
1885 }
1886
1887 /* Compute a topological ordering for GRAPH, and store the result in the
1888 topo_info structure TI. */
1889
1890 static void
1891 compute_topo_order (constraint_graph_t graph,
1892 struct topo_info *ti)
1893 {
1894 unsigned int i;
1895 unsigned int size = graph->size;
1896
1897 for (i = 0; i != size; ++i)
1898 if (!bitmap_bit_p (ti->visited, i) && find (i) == i)
1899 topo_visit (graph, ti, i);
1900 }
1901
1902 /* Structure used to for hash value numbering of pointer equivalence
1903 classes. */
1904
1905 typedef struct equiv_class_label
1906 {
1907 hashval_t hashcode;
1908 unsigned int equivalence_class;
1909 bitmap labels;
1910 } *equiv_class_label_t;
1911 typedef const struct equiv_class_label *const_equiv_class_label_t;
1912
1913 /* Equiv_class_label hashtable helpers. */
1914
1915 struct equiv_class_hasher : free_ptr_hash <equiv_class_label>
1916 {
1917 static inline hashval_t hash (const equiv_class_label *);
1918 static inline bool equal (const equiv_class_label *,
1919 const equiv_class_label *);
1920 };
1921
1922 /* Hash function for a equiv_class_label_t */
1923
1924 inline hashval_t
1925 equiv_class_hasher::hash (const equiv_class_label *ecl)
1926 {
1927 return ecl->hashcode;
1928 }
1929
1930 /* Equality function for two equiv_class_label_t's. */
1931
1932 inline bool
1933 equiv_class_hasher::equal (const equiv_class_label *eql1,
1934 const equiv_class_label *eql2)
1935 {
1936 return (eql1->hashcode == eql2->hashcode
1937 && bitmap_equal_p (eql1->labels, eql2->labels));
1938 }
1939
1940 /* A hashtable for mapping a bitmap of labels->pointer equivalence
1941 classes. */
1942 static hash_table<equiv_class_hasher> *pointer_equiv_class_table;
1943
1944 /* A hashtable for mapping a bitmap of labels->location equivalence
1945 classes. */
1946 static hash_table<equiv_class_hasher> *location_equiv_class_table;
1947
1948 /* Lookup a equivalence class in TABLE by the bitmap of LABELS with
1949 hash HAS it contains. Sets *REF_LABELS to the bitmap LABELS
1950 is equivalent to. */
1951
1952 static equiv_class_label *
1953 equiv_class_lookup_or_add (hash_table<equiv_class_hasher> *table,
1954 bitmap labels)
1955 {
1956 equiv_class_label **slot;
1957 equiv_class_label ecl;
1958
1959 ecl.labels = labels;
1960 ecl.hashcode = bitmap_hash (labels);
1961 slot = table->find_slot (&ecl, INSERT);
1962 if (!*slot)
1963 {
1964 *slot = XNEW (struct equiv_class_label);
1965 (*slot)->labels = labels;
1966 (*slot)->hashcode = ecl.hashcode;
1967 (*slot)->equivalence_class = 0;
1968 }
1969
1970 return *slot;
1971 }
1972
1973 /* Perform offline variable substitution.
1974
1975 This is a worst case quadratic time way of identifying variables
1976 that must have equivalent points-to sets, including those caused by
1977 static cycles, and single entry subgraphs, in the constraint graph.
1978
1979 The technique is described in "Exploiting Pointer and Location
1980 Equivalence to Optimize Pointer Analysis. In the 14th International
1981 Static Analysis Symposium (SAS), August 2007." It is known as the
1982 "HU" algorithm, and is equivalent to value numbering the collapsed
1983 constraint graph including evaluating unions.
1984
1985 The general method of finding equivalence classes is as follows:
1986 Add fake nodes (REF nodes) and edges for *a = b and a = *b constraints.
1987 Initialize all non-REF nodes to be direct nodes.
1988 For each constraint a = a U {b}, we set pts(a) = pts(a) u {fresh
1989 variable}
1990 For each constraint containing the dereference, we also do the same
1991 thing.
1992
1993 We then compute SCC's in the graph and unify nodes in the same SCC,
1994 including pts sets.
1995
1996 For each non-collapsed node x:
1997 Visit all unvisited explicit incoming edges.
1998 Ignoring all non-pointers, set pts(x) = Union of pts(a) for y
1999 where y->x.
2000 Lookup the equivalence class for pts(x).
2001 If we found one, equivalence_class(x) = found class.
2002 Otherwise, equivalence_class(x) = new class, and new_class is
2003 added to the lookup table.
2004
2005 All direct nodes with the same equivalence class can be replaced
2006 with a single representative node.
2007 All unlabeled nodes (label == 0) are not pointers and all edges
2008 involving them can be eliminated.
2009 We perform these optimizations during rewrite_constraints
2010
2011 In addition to pointer equivalence class finding, we also perform
2012 location equivalence class finding. This is the set of variables
2013 that always appear together in points-to sets. We use this to
2014 compress the size of the points-to sets. */
2015
2016 /* Current maximum pointer equivalence class id. */
2017 static int pointer_equiv_class;
2018
2019 /* Current maximum location equivalence class id. */
2020 static int location_equiv_class;
2021
2022 /* Recursive routine to find strongly connected components in GRAPH,
2023 and label it's nodes with DFS numbers. */
2024
2025 static void
2026 condense_visit (constraint_graph_t graph, class scc_info *si, unsigned int n)
2027 {
2028 unsigned int i;
2029 bitmap_iterator bi;
2030 unsigned int my_dfs;
2031
2032 gcc_checking_assert (si->node_mapping[n] == n);
2033 bitmap_set_bit (si->visited, n);
2034 si->dfs[n] = si->current_index ++;
2035 my_dfs = si->dfs[n];
2036
2037 /* Visit all the successors. */
2038 EXECUTE_IF_IN_NONNULL_BITMAP (graph->preds[n], 0, i, bi)
2039 {
2040 unsigned int w = si->node_mapping[i];
2041
2042 if (bitmap_bit_p (si->deleted, w))
2043 continue;
2044
2045 if (!bitmap_bit_p (si->visited, w))
2046 condense_visit (graph, si, w);
2047
2048 unsigned int t = si->node_mapping[w];
2049 gcc_checking_assert (si->node_mapping[n] == n);
2050 if (si->dfs[t] < si->dfs[n])
2051 si->dfs[n] = si->dfs[t];
2052 }
2053
2054 /* Visit all the implicit predecessors. */
2055 EXECUTE_IF_IN_NONNULL_BITMAP (graph->implicit_preds[n], 0, i, bi)
2056 {
2057 unsigned int w = si->node_mapping[i];
2058
2059 if (bitmap_bit_p (si->deleted, w))
2060 continue;
2061
2062 if (!bitmap_bit_p (si->visited, w))
2063 condense_visit (graph, si, w);
2064
2065 unsigned int t = si->node_mapping[w];
2066 gcc_assert (si->node_mapping[n] == n);
2067 if (si->dfs[t] < si->dfs[n])
2068 si->dfs[n] = si->dfs[t];
2069 }
2070
2071 /* See if any components have been identified. */
2072 if (si->dfs[n] == my_dfs)
2073 {
2074 if (si->scc_stack.length () != 0
2075 && si->dfs[si->scc_stack.last ()] >= my_dfs)
2076 {
2077 /* Find the first node of the SCC and do non-bitmap work. */
2078 bool direct_p = true;
2079 unsigned first = si->scc_stack.length ();
2080 do
2081 {
2082 --first;
2083 unsigned int w = si->scc_stack[first];
2084 si->node_mapping[w] = n;
2085 if (!bitmap_bit_p (graph->direct_nodes, w))
2086 direct_p = false;
2087 }
2088 while (first > 0
2089 && si->dfs[si->scc_stack[first - 1]] >= my_dfs);
2090 if (!direct_p)
2091 bitmap_clear_bit (graph->direct_nodes, n);
2092
2093 /* Want to reduce to node n, push that first. */
2094 si->scc_stack.reserve (1);
2095 si->scc_stack.quick_push (si->scc_stack[first]);
2096 si->scc_stack[first] = n;
2097
2098 unsigned scc_size = si->scc_stack.length () - first;
2099 unsigned split = scc_size / 2;
2100 unsigned carry = scc_size - split * 2;
2101 while (split > 0)
2102 {
2103 for (unsigned i = 0; i < split; ++i)
2104 {
2105 unsigned a = si->scc_stack[first + i];
2106 unsigned b = si->scc_stack[first + split + carry + i];
2107
2108 /* Unify our nodes. */
2109 if (graph->preds[b])
2110 {
2111 if (!graph->preds[a])
2112 std::swap (graph->preds[a], graph->preds[b]);
2113 else
2114 bitmap_ior_into_and_free (graph->preds[a],
2115 &graph->preds[b]);
2116 }
2117 if (graph->implicit_preds[b])
2118 {
2119 if (!graph->implicit_preds[a])
2120 std::swap (graph->implicit_preds[a],
2121 graph->implicit_preds[b]);
2122 else
2123 bitmap_ior_into_and_free (graph->implicit_preds[a],
2124 &graph->implicit_preds[b]);
2125 }
2126 if (graph->points_to[b])
2127 {
2128 if (!graph->points_to[a])
2129 std::swap (graph->points_to[a], graph->points_to[b]);
2130 else
2131 bitmap_ior_into_and_free (graph->points_to[a],
2132 &graph->points_to[b]);
2133 }
2134 }
2135 unsigned remain = split + carry;
2136 split = remain / 2;
2137 carry = remain - split * 2;
2138 }
2139 /* Actually pop the SCC. */
2140 si->scc_stack.truncate (first);
2141 }
2142 bitmap_set_bit (si->deleted, n);
2143 }
2144 else
2145 si->scc_stack.safe_push (n);
2146 }
2147
2148 /* Label pointer equivalences.
2149
2150 This performs a value numbering of the constraint graph to
2151 discover which variables will always have the same points-to sets
2152 under the current set of constraints.
2153
2154 The way it value numbers is to store the set of points-to bits
2155 generated by the constraints and graph edges. This is just used as a
2156 hash and equality comparison. The *actual set of points-to bits* is
2157 completely irrelevant, in that we don't care about being able to
2158 extract them later.
2159
2160 The equality values (currently bitmaps) just have to satisfy a few
2161 constraints, the main ones being:
2162 1. The combining operation must be order independent.
2163 2. The end result of a given set of operations must be unique iff the
2164 combination of input values is unique
2165 3. Hashable. */
2166
2167 static void
2168 label_visit (constraint_graph_t graph, class scc_info *si, unsigned int n)
2169 {
2170 unsigned int i, first_pred;
2171 bitmap_iterator bi;
2172
2173 bitmap_set_bit (si->visited, n);
2174
2175 /* Label and union our incoming edges's points to sets. */
2176 first_pred = -1U;
2177 EXECUTE_IF_IN_NONNULL_BITMAP (graph->preds[n], 0, i, bi)
2178 {
2179 unsigned int w = si->node_mapping[i];
2180 if (!bitmap_bit_p (si->visited, w))
2181 label_visit (graph, si, w);
2182
2183 /* Skip unused edges */
2184 if (w == n || graph->pointer_label[w] == 0)
2185 continue;
2186
2187 if (graph->points_to[w])
2188 {
2189 if (!graph->points_to[n])
2190 {
2191 if (first_pred == -1U)
2192 first_pred = w;
2193 else
2194 {
2195 graph->points_to[n] = BITMAP_ALLOC (&predbitmap_obstack);
2196 bitmap_ior (graph->points_to[n],
2197 graph->points_to[first_pred],
2198 graph->points_to[w]);
2199 }
2200 }
2201 else
2202 bitmap_ior_into (graph->points_to[n], graph->points_to[w]);
2203 }
2204 }
2205
2206 /* Indirect nodes get fresh variables and a new pointer equiv class. */
2207 if (!bitmap_bit_p (graph->direct_nodes, n))
2208 {
2209 if (!graph->points_to[n])
2210 {
2211 graph->points_to[n] = BITMAP_ALLOC (&predbitmap_obstack);
2212 if (first_pred != -1U)
2213 bitmap_copy (graph->points_to[n], graph->points_to[first_pred]);
2214 }
2215 bitmap_set_bit (graph->points_to[n], FIRST_REF_NODE + n);
2216 graph->pointer_label[n] = pointer_equiv_class++;
2217 equiv_class_label_t ecl;
2218 ecl = equiv_class_lookup_or_add (pointer_equiv_class_table,
2219 graph->points_to[n]);
2220 ecl->equivalence_class = graph->pointer_label[n];
2221 return;
2222 }
2223
2224 /* If there was only a single non-empty predecessor the pointer equiv
2225 class is the same. */
2226 if (!graph->points_to[n])
2227 {
2228 if (first_pred != -1U)
2229 {
2230 graph->pointer_label[n] = graph->pointer_label[first_pred];
2231 graph->points_to[n] = graph->points_to[first_pred];
2232 }
2233 return;
2234 }
2235
2236 if (!bitmap_empty_p (graph->points_to[n]))
2237 {
2238 equiv_class_label_t ecl;
2239 ecl = equiv_class_lookup_or_add (pointer_equiv_class_table,
2240 graph->points_to[n]);
2241 if (ecl->equivalence_class == 0)
2242 ecl->equivalence_class = pointer_equiv_class++;
2243 else
2244 {
2245 BITMAP_FREE (graph->points_to[n]);
2246 graph->points_to[n] = ecl->labels;
2247 }
2248 graph->pointer_label[n] = ecl->equivalence_class;
2249 }
2250 }
2251
2252 /* Print the pred graph in dot format. */
2253
2254 static void
2255 dump_pred_graph (class scc_info *si, FILE *file)
2256 {
2257 unsigned int i;
2258
2259 /* Only print the graph if it has already been initialized: */
2260 if (!graph)
2261 return;
2262
2263 /* Prints the header of the dot file: */
2264 fprintf (file, "strict digraph {\n");
2265 fprintf (file, " node [\n shape = box\n ]\n");
2266 fprintf (file, " edge [\n fontsize = \"12\"\n ]\n");
2267 fprintf (file, "\n // List of nodes and complex constraints in "
2268 "the constraint graph:\n");
2269
2270 /* The next lines print the nodes in the graph together with the
2271 complex constraints attached to them. */
2272 for (i = 1; i < graph->size; i++)
2273 {
2274 if (i == FIRST_REF_NODE)
2275 continue;
2276 if (si->node_mapping[i] != i)
2277 continue;
2278 if (i < FIRST_REF_NODE)
2279 fprintf (file, "\"%s\"", get_varinfo (i)->name);
2280 else
2281 fprintf (file, "\"*%s\"", get_varinfo (i - FIRST_REF_NODE)->name);
2282 if (graph->points_to[i]
2283 && !bitmap_empty_p (graph->points_to[i]))
2284 {
2285 if (i < FIRST_REF_NODE)
2286 fprintf (file, "[label=\"%s = {", get_varinfo (i)->name);
2287 else
2288 fprintf (file, "[label=\"*%s = {",
2289 get_varinfo (i - FIRST_REF_NODE)->name);
2290 unsigned j;
2291 bitmap_iterator bi;
2292 EXECUTE_IF_SET_IN_BITMAP (graph->points_to[i], 0, j, bi)
2293 fprintf (file, " %d", j);
2294 fprintf (file, " }\"]");
2295 }
2296 fprintf (file, ";\n");
2297 }
2298
2299 /* Go over the edges. */
2300 fprintf (file, "\n // Edges in the constraint graph:\n");
2301 for (i = 1; i < graph->size; i++)
2302 {
2303 unsigned j;
2304 bitmap_iterator bi;
2305 if (si->node_mapping[i] != i)
2306 continue;
2307 EXECUTE_IF_IN_NONNULL_BITMAP (graph->preds[i], 0, j, bi)
2308 {
2309 unsigned from = si->node_mapping[j];
2310 if (from < FIRST_REF_NODE)
2311 fprintf (file, "\"%s\"", get_varinfo (from)->name);
2312 else
2313 fprintf (file, "\"*%s\"", get_varinfo (from - FIRST_REF_NODE)->name);
2314 fprintf (file, " -> ");
2315 if (i < FIRST_REF_NODE)
2316 fprintf (file, "\"%s\"", get_varinfo (i)->name);
2317 else
2318 fprintf (file, "\"*%s\"", get_varinfo (i - FIRST_REF_NODE)->name);
2319 fprintf (file, ";\n");
2320 }
2321 }
2322
2323 /* Prints the tail of the dot file. */
2324 fprintf (file, "}\n");
2325 }
2326
2327 /* Perform offline variable substitution, discovering equivalence
2328 classes, and eliminating non-pointer variables. */
2329
2330 static class scc_info *
2331 perform_var_substitution (constraint_graph_t graph)
2332 {
2333 unsigned int i;
2334 unsigned int size = graph->size;
2335 scc_info *si = new scc_info (size);
2336
2337 bitmap_obstack_initialize (&iteration_obstack);
2338 pointer_equiv_class_table = new hash_table<equiv_class_hasher> (511);
2339 location_equiv_class_table
2340 = new hash_table<equiv_class_hasher> (511);
2341 pointer_equiv_class = 1;
2342 location_equiv_class = 1;
2343
2344 /* Condense the nodes, which means to find SCC's, count incoming
2345 predecessors, and unite nodes in SCC's. */
2346 for (i = 1; i < FIRST_REF_NODE; i++)
2347 if (!bitmap_bit_p (si->visited, si->node_mapping[i]))
2348 condense_visit (graph, si, si->node_mapping[i]);
2349
2350 if (dump_file && (dump_flags & TDF_GRAPH))
2351 {
2352 fprintf (dump_file, "\n\n// The constraint graph before var-substitution "
2353 "in dot format:\n");
2354 dump_pred_graph (si, dump_file);
2355 fprintf (dump_file, "\n\n");
2356 }
2357
2358 bitmap_clear (si->visited);
2359 /* Actually the label the nodes for pointer equivalences */
2360 for (i = 1; i < FIRST_REF_NODE; i++)
2361 if (!bitmap_bit_p (si->visited, si->node_mapping[i]))
2362 label_visit (graph, si, si->node_mapping[i]);
2363
2364 /* Calculate location equivalence labels. */
2365 for (i = 1; i < FIRST_REF_NODE; i++)
2366 {
2367 bitmap pointed_by;
2368 bitmap_iterator bi;
2369 unsigned int j;
2370
2371 if (!graph->pointed_by[i])
2372 continue;
2373 pointed_by = BITMAP_ALLOC (&iteration_obstack);
2374
2375 /* Translate the pointed-by mapping for pointer equivalence
2376 labels. */
2377 EXECUTE_IF_SET_IN_BITMAP (graph->pointed_by[i], 0, j, bi)
2378 {
2379 bitmap_set_bit (pointed_by,
2380 graph->pointer_label[si->node_mapping[j]]);
2381 }
2382 /* The original pointed_by is now dead. */
2383 BITMAP_FREE (graph->pointed_by[i]);
2384
2385 /* Look up the location equivalence label if one exists, or make
2386 one otherwise. */
2387 equiv_class_label_t ecl;
2388 ecl = equiv_class_lookup_or_add (location_equiv_class_table, pointed_by);
2389 if (ecl->equivalence_class == 0)
2390 ecl->equivalence_class = location_equiv_class++;
2391 else
2392 {
2393 if (dump_file && (dump_flags & TDF_DETAILS))
2394 fprintf (dump_file, "Found location equivalence for node %s\n",
2395 get_varinfo (i)->name);
2396 BITMAP_FREE (pointed_by);
2397 }
2398 graph->loc_label[i] = ecl->equivalence_class;
2399
2400 }
2401
2402 if (dump_file && (dump_flags & TDF_DETAILS))
2403 for (i = 1; i < FIRST_REF_NODE; i++)
2404 {
2405 unsigned j = si->node_mapping[i];
2406 if (j != i)
2407 {
2408 fprintf (dump_file, "%s node id %d ",
2409 bitmap_bit_p (graph->direct_nodes, i)
2410 ? "Direct" : "Indirect", i);
2411 if (i < FIRST_REF_NODE)
2412 fprintf (dump_file, "\"%s\"", get_varinfo (i)->name);
2413 else
2414 fprintf (dump_file, "\"*%s\"",
2415 get_varinfo (i - FIRST_REF_NODE)->name);
2416 fprintf (dump_file, " mapped to SCC leader node id %d ", j);
2417 if (j < FIRST_REF_NODE)
2418 fprintf (dump_file, "\"%s\"\n", get_varinfo (j)->name);
2419 else
2420 fprintf (dump_file, "\"*%s\"\n",
2421 get_varinfo (j - FIRST_REF_NODE)->name);
2422 }
2423 else
2424 {
2425 fprintf (dump_file,
2426 "Equivalence classes for %s node id %d ",
2427 bitmap_bit_p (graph->direct_nodes, i)
2428 ? "direct" : "indirect", i);
2429 if (i < FIRST_REF_NODE)
2430 fprintf (dump_file, "\"%s\"", get_varinfo (i)->name);
2431 else
2432 fprintf (dump_file, "\"*%s\"",
2433 get_varinfo (i - FIRST_REF_NODE)->name);
2434 fprintf (dump_file,
2435 ": pointer %d, location %d\n",
2436 graph->pointer_label[i], graph->loc_label[i]);
2437 }
2438 }
2439
2440 /* Quickly eliminate our non-pointer variables. */
2441
2442 for (i = 1; i < FIRST_REF_NODE; i++)
2443 {
2444 unsigned int node = si->node_mapping[i];
2445
2446 if (graph->pointer_label[node] == 0)
2447 {
2448 if (dump_file && (dump_flags & TDF_DETAILS))
2449 fprintf (dump_file,
2450 "%s is a non-pointer variable, eliminating edges.\n",
2451 get_varinfo (node)->name);
2452 stats.nonpointer_vars++;
2453 clear_edges_for_node (graph, node);
2454 }
2455 }
2456
2457 return si;
2458 }
2459
2460 /* Free information that was only necessary for variable
2461 substitution. */
2462
2463 static void
2464 free_var_substitution_info (class scc_info *si)
2465 {
2466 delete si;
2467 free (graph->pointer_label);
2468 free (graph->loc_label);
2469 free (graph->pointed_by);
2470 free (graph->points_to);
2471 free (graph->eq_rep);
2472 sbitmap_free (graph->direct_nodes);
2473 delete pointer_equiv_class_table;
2474 pointer_equiv_class_table = NULL;
2475 delete location_equiv_class_table;
2476 location_equiv_class_table = NULL;
2477 bitmap_obstack_release (&iteration_obstack);
2478 }
2479
2480 /* Return an existing node that is equivalent to NODE, which has
2481 equivalence class LABEL, if one exists. Return NODE otherwise. */
2482
2483 static unsigned int
2484 find_equivalent_node (constraint_graph_t graph,
2485 unsigned int node, unsigned int label)
2486 {
2487 /* If the address version of this variable is unused, we can
2488 substitute it for anything else with the same label.
2489 Otherwise, we know the pointers are equivalent, but not the
2490 locations, and we can unite them later. */
2491
2492 if (!bitmap_bit_p (graph->address_taken, node))
2493 {
2494 gcc_checking_assert (label < graph->size);
2495
2496 if (graph->eq_rep[label] != -1)
2497 {
2498 /* Unify the two variables since we know they are equivalent. */
2499 if (unite (graph->eq_rep[label], node))
2500 unify_nodes (graph, graph->eq_rep[label], node, false);
2501 return graph->eq_rep[label];
2502 }
2503 else
2504 {
2505 graph->eq_rep[label] = node;
2506 graph->pe_rep[label] = node;
2507 }
2508 }
2509 else
2510 {
2511 gcc_checking_assert (label < graph->size);
2512 graph->pe[node] = label;
2513 if (graph->pe_rep[label] == -1)
2514 graph->pe_rep[label] = node;
2515 }
2516
2517 return node;
2518 }
2519
2520 /* Unite pointer equivalent but not location equivalent nodes in
2521 GRAPH. This may only be performed once variable substitution is
2522 finished. */
2523
2524 static void
2525 unite_pointer_equivalences (constraint_graph_t graph)
2526 {
2527 unsigned int i;
2528
2529 /* Go through the pointer equivalences and unite them to their
2530 representative, if they aren't already. */
2531 for (i = 1; i < FIRST_REF_NODE; i++)
2532 {
2533 unsigned int label = graph->pe[i];
2534 if (label)
2535 {
2536 int label_rep = graph->pe_rep[label];
2537
2538 if (label_rep == -1)
2539 continue;
2540
2541 label_rep = find (label_rep);
2542 if (label_rep >= 0 && unite (label_rep, find (i)))
2543 unify_nodes (graph, label_rep, i, false);
2544 }
2545 }
2546 }
2547
2548 /* Move complex constraints to the GRAPH nodes they belong to. */
2549
2550 static void
2551 move_complex_constraints (constraint_graph_t graph)
2552 {
2553 int i;
2554 constraint_t c;
2555
2556 FOR_EACH_VEC_ELT (constraints, i, c)
2557 {
2558 if (c)
2559 {
2560 struct constraint_expr lhs = c->lhs;
2561 struct constraint_expr rhs = c->rhs;
2562
2563 if (lhs.type == DEREF)
2564 {
2565 insert_into_complex (graph, lhs.var, c);
2566 }
2567 else if (rhs.type == DEREF)
2568 {
2569 if (!(get_varinfo (lhs.var)->is_special_var))
2570 insert_into_complex (graph, rhs.var, c);
2571 }
2572 else if (rhs.type != ADDRESSOF && lhs.var > anything_id
2573 && (lhs.offset != 0 || rhs.offset != 0))
2574 {
2575 insert_into_complex (graph, rhs.var, c);
2576 }
2577 }
2578 }
2579 }
2580
2581
2582 /* Optimize and rewrite complex constraints while performing
2583 collapsing of equivalent nodes. SI is the SCC_INFO that is the
2584 result of perform_variable_substitution. */
2585
2586 static void
2587 rewrite_constraints (constraint_graph_t graph,
2588 class scc_info *si)
2589 {
2590 int i;
2591 constraint_t c;
2592
2593 if (flag_checking)
2594 {
2595 for (unsigned int j = 0; j < graph->size; j++)
2596 gcc_assert (find (j) == j);
2597 }
2598
2599 FOR_EACH_VEC_ELT (constraints, i, c)
2600 {
2601 struct constraint_expr lhs = c->lhs;
2602 struct constraint_expr rhs = c->rhs;
2603 unsigned int lhsvar = find (lhs.var);
2604 unsigned int rhsvar = find (rhs.var);
2605 unsigned int lhsnode, rhsnode;
2606 unsigned int lhslabel, rhslabel;
2607
2608 lhsnode = si->node_mapping[lhsvar];
2609 rhsnode = si->node_mapping[rhsvar];
2610 lhslabel = graph->pointer_label[lhsnode];
2611 rhslabel = graph->pointer_label[rhsnode];
2612
2613 /* See if it is really a non-pointer variable, and if so, ignore
2614 the constraint. */
2615 if (lhslabel == 0)
2616 {
2617 if (dump_file && (dump_flags & TDF_DETAILS))
2618 {
2619
2620 fprintf (dump_file, "%s is a non-pointer variable, "
2621 "ignoring constraint:",
2622 get_varinfo (lhs.var)->name);
2623 dump_constraint (dump_file, c);
2624 fprintf (dump_file, "\n");
2625 }
2626 constraints[i] = NULL;
2627 continue;
2628 }
2629
2630 if (rhslabel == 0)
2631 {
2632 if (dump_file && (dump_flags & TDF_DETAILS))
2633 {
2634
2635 fprintf (dump_file, "%s is a non-pointer variable, "
2636 "ignoring constraint:",
2637 get_varinfo (rhs.var)->name);
2638 dump_constraint (dump_file, c);
2639 fprintf (dump_file, "\n");
2640 }
2641 constraints[i] = NULL;
2642 continue;
2643 }
2644
2645 lhsvar = find_equivalent_node (graph, lhsvar, lhslabel);
2646 rhsvar = find_equivalent_node (graph, rhsvar, rhslabel);
2647 c->lhs.var = lhsvar;
2648 c->rhs.var = rhsvar;
2649 }
2650 }
2651
2652 /* Eliminate indirect cycles involving NODE. Return true if NODE was
2653 part of an SCC, false otherwise. */
2654
2655 static bool
2656 eliminate_indirect_cycles (unsigned int node)
2657 {
2658 if (graph->indirect_cycles[node] != -1
2659 && !bitmap_empty_p (get_varinfo (node)->solution))
2660 {
2661 unsigned int i;
2662 auto_vec<unsigned> queue;
2663 int queuepos;
2664 unsigned int to = find (graph->indirect_cycles[node]);
2665 bitmap_iterator bi;
2666
2667 /* We can't touch the solution set and call unify_nodes
2668 at the same time, because unify_nodes is going to do
2669 bitmap unions into it. */
2670
2671 EXECUTE_IF_SET_IN_BITMAP (get_varinfo (node)->solution, 0, i, bi)
2672 {
2673 if (find (i) == i && i != to)
2674 {
2675 if (unite (to, i))
2676 queue.safe_push (i);
2677 }
2678 }
2679
2680 for (queuepos = 0;
2681 queue.iterate (queuepos, &i);
2682 queuepos++)
2683 {
2684 unify_nodes (graph, to, i, true);
2685 }
2686 return true;
2687 }
2688 return false;
2689 }
2690
2691 /* Solve the constraint graph GRAPH using our worklist solver.
2692 This is based on the PW* family of solvers from the "Efficient Field
2693 Sensitive Pointer Analysis for C" paper.
2694 It works by iterating over all the graph nodes, processing the complex
2695 constraints and propagating the copy constraints, until everything stops
2696 changed. This corresponds to steps 6-8 in the solving list given above. */
2697
2698 static void
2699 solve_graph (constraint_graph_t graph)
2700 {
2701 unsigned int size = graph->size;
2702 unsigned int i;
2703 bitmap pts;
2704
2705 changed = BITMAP_ALLOC (NULL);
2706
2707 /* Mark all initial non-collapsed nodes as changed. */
2708 for (i = 1; i < size; i++)
2709 {
2710 varinfo_t ivi = get_varinfo (i);
2711 if (find (i) == i && !bitmap_empty_p (ivi->solution)
2712 && ((graph->succs[i] && !bitmap_empty_p (graph->succs[i]))
2713 || graph->complex[i].length () > 0))
2714 bitmap_set_bit (changed, i);
2715 }
2716
2717 /* Allocate a bitmap to be used to store the changed bits. */
2718 pts = BITMAP_ALLOC (&pta_obstack);
2719
2720 while (!bitmap_empty_p (changed))
2721 {
2722 unsigned int i;
2723 struct topo_info *ti = init_topo_info ();
2724 stats.iterations++;
2725
2726 bitmap_obstack_initialize (&iteration_obstack);
2727
2728 compute_topo_order (graph, ti);
2729
2730 while (ti->topo_order.length () != 0)
2731 {
2732
2733 i = ti->topo_order.pop ();
2734
2735 /* If this variable is not a representative, skip it. */
2736 if (find (i) != i)
2737 continue;
2738
2739 /* In certain indirect cycle cases, we may merge this
2740 variable to another. */
2741 if (eliminate_indirect_cycles (i) && find (i) != i)
2742 continue;
2743
2744 /* If the node has changed, we need to process the
2745 complex constraints and outgoing edges again. */
2746 if (bitmap_clear_bit (changed, i))
2747 {
2748 unsigned int j;
2749 constraint_t c;
2750 bitmap solution;
2751 vec<constraint_t> complex = graph->complex[i];
2752 varinfo_t vi = get_varinfo (i);
2753 bool solution_empty;
2754
2755 /* Compute the changed set of solution bits. If anything
2756 is in the solution just propagate that. */
2757 if (bitmap_bit_p (vi->solution, anything_id))
2758 {
2759 /* If anything is also in the old solution there is
2760 nothing to do.
2761 ??? But we shouldn't ended up with "changed" set ... */
2762 if (vi->oldsolution
2763 && bitmap_bit_p (vi->oldsolution, anything_id))
2764 continue;
2765 bitmap_copy (pts, get_varinfo (find (anything_id))->solution);
2766 }
2767 else if (vi->oldsolution)
2768 bitmap_and_compl (pts, vi->solution, vi->oldsolution);
2769 else
2770 bitmap_copy (pts, vi->solution);
2771
2772 if (bitmap_empty_p (pts))
2773 continue;
2774
2775 if (vi->oldsolution)
2776 bitmap_ior_into (vi->oldsolution, pts);
2777 else
2778 {
2779 vi->oldsolution = BITMAP_ALLOC (&oldpta_obstack);
2780 bitmap_copy (vi->oldsolution, pts);
2781 }
2782
2783 solution = vi->solution;
2784 solution_empty = bitmap_empty_p (solution);
2785
2786 /* Process the complex constraints */
2787 bitmap expanded_pts = NULL;
2788 FOR_EACH_VEC_ELT (complex, j, c)
2789 {
2790 /* XXX: This is going to unsort the constraints in
2791 some cases, which will occasionally add duplicate
2792 constraints during unification. This does not
2793 affect correctness. */
2794 c->lhs.var = find (c->lhs.var);
2795 c->rhs.var = find (c->rhs.var);
2796
2797 /* The only complex constraint that can change our
2798 solution to non-empty, given an empty solution,
2799 is a constraint where the lhs side is receiving
2800 some set from elsewhere. */
2801 if (!solution_empty || c->lhs.type != DEREF)
2802 do_complex_constraint (graph, c, pts, &expanded_pts);
2803 }
2804 BITMAP_FREE (expanded_pts);
2805
2806 solution_empty = bitmap_empty_p (solution);
2807
2808 if (!solution_empty)
2809 {
2810 bitmap_iterator bi;
2811 unsigned eff_escaped_id = find (escaped_id);
2812
2813 /* Propagate solution to all successors. */
2814 unsigned to_remove = ~0U;
2815 EXECUTE_IF_IN_NONNULL_BITMAP (graph->succs[i],
2816 0, j, bi)
2817 {
2818 if (to_remove != ~0U)
2819 {
2820 bitmap_clear_bit (graph->succs[i], to_remove);
2821 to_remove = ~0U;
2822 }
2823 unsigned int to = find (j);
2824 if (to != j)
2825 {
2826 /* Update the succ graph, avoiding duplicate
2827 work. */
2828 to_remove = j;
2829 if (! bitmap_set_bit (graph->succs[i], to))
2830 continue;
2831 /* We eventually end up processing 'to' twice
2832 as it is undefined whether bitmap iteration
2833 iterates over bits set during iteration.
2834 Play safe instead of doing tricks. */
2835 }
2836 /* Don't try to propagate to ourselves. */
2837 if (to == i)
2838 continue;
2839
2840 bitmap tmp = get_varinfo (to)->solution;
2841 bool flag = false;
2842
2843 /* If we propagate from ESCAPED use ESCAPED as
2844 placeholder. */
2845 if (i == eff_escaped_id)
2846 flag = bitmap_set_bit (tmp, escaped_id);
2847 else
2848 flag = bitmap_ior_into (tmp, pts);
2849
2850 if (flag)
2851 bitmap_set_bit (changed, to);
2852 }
2853 if (to_remove != ~0U)
2854 bitmap_clear_bit (graph->succs[i], to_remove);
2855 }
2856 }
2857 }
2858 free_topo_info (ti);
2859 bitmap_obstack_release (&iteration_obstack);
2860 }
2861
2862 BITMAP_FREE (pts);
2863 BITMAP_FREE (changed);
2864 bitmap_obstack_release (&oldpta_obstack);
2865 }
2866
2867 /* Map from trees to variable infos. */
2868 static hash_map<tree, varinfo_t> *vi_for_tree;
2869
2870
2871 /* Insert ID as the variable id for tree T in the vi_for_tree map. */
2872
2873 static void
2874 insert_vi_for_tree (tree t, varinfo_t vi)
2875 {
2876 gcc_assert (vi);
2877 gcc_assert (!vi_for_tree->put (t, vi));
2878 }
2879
2880 /* Find the variable info for tree T in VI_FOR_TREE. If T does not
2881 exist in the map, return NULL, otherwise, return the varinfo we found. */
2882
2883 static varinfo_t
2884 lookup_vi_for_tree (tree t)
2885 {
2886 varinfo_t *slot = vi_for_tree->get (t);
2887 if (slot == NULL)
2888 return NULL;
2889
2890 return *slot;
2891 }
2892
2893 /* Return a printable name for DECL */
2894
2895 static const char *
2896 alias_get_name (tree decl)
2897 {
2898 const char *res = "NULL";
2899 if (dump_file)
2900 {
2901 char *temp = NULL;
2902 if (TREE_CODE (decl) == SSA_NAME)
2903 {
2904 res = get_name (decl);
2905 temp = xasprintf ("%s_%u", res ? res : "", SSA_NAME_VERSION (decl));
2906 }
2907 else if (HAS_DECL_ASSEMBLER_NAME_P (decl)
2908 && DECL_ASSEMBLER_NAME_SET_P (decl))
2909 res = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME_RAW (decl));
2910 else if (DECL_P (decl))
2911 {
2912 res = get_name (decl);
2913 if (!res)
2914 temp = xasprintf ("D.%u", DECL_UID (decl));
2915 }
2916
2917 if (temp)
2918 {
2919 res = ggc_strdup (temp);
2920 free (temp);
2921 }
2922 }
2923
2924 return res;
2925 }
2926
2927 /* Find the variable id for tree T in the map.
2928 If T doesn't exist in the map, create an entry for it and return it. */
2929
2930 static varinfo_t
2931 get_vi_for_tree (tree t)
2932 {
2933 varinfo_t *slot = vi_for_tree->get (t);
2934 if (slot == NULL)
2935 {
2936 unsigned int id = create_variable_info_for (t, alias_get_name (t), false);
2937 return get_varinfo (id);
2938 }
2939
2940 return *slot;
2941 }
2942
2943 /* Get a scalar constraint expression for a new temporary variable. */
2944
2945 static struct constraint_expr
2946 new_scalar_tmp_constraint_exp (const char *name, bool add_id)
2947 {
2948 struct constraint_expr tmp;
2949 varinfo_t vi;
2950
2951 vi = new_var_info (NULL_TREE, name, add_id);
2952 vi->offset = 0;
2953 vi->size = -1;
2954 vi->fullsize = -1;
2955 vi->is_full_var = 1;
2956 vi->is_reg_var = 1;
2957
2958 tmp.var = vi->id;
2959 tmp.type = SCALAR;
2960 tmp.offset = 0;
2961
2962 return tmp;
2963 }
2964
2965 /* Get a constraint expression vector from an SSA_VAR_P node.
2966 If address_p is true, the result will be taken its address of. */
2967
2968 static void
2969 get_constraint_for_ssa_var (tree t, vec<ce_s> *results, bool address_p)
2970 {
2971 struct constraint_expr cexpr;
2972 varinfo_t vi;
2973
2974 /* We allow FUNCTION_DECLs here even though it doesn't make much sense. */
2975 gcc_assert (TREE_CODE (t) == SSA_NAME || DECL_P (t));
2976
2977 if (TREE_CODE (t) == SSA_NAME
2978 && SSA_NAME_IS_DEFAULT_DEF (t))
2979 {
2980 /* For parameters, get at the points-to set for the actual parm
2981 decl. */
2982 if (TREE_CODE (SSA_NAME_VAR (t)) == PARM_DECL
2983 || TREE_CODE (SSA_NAME_VAR (t)) == RESULT_DECL)
2984 {
2985 get_constraint_for_ssa_var (SSA_NAME_VAR (t), results, address_p);
2986 return;
2987 }
2988 /* For undefined SSA names return nothing. */
2989 else if (!ssa_defined_default_def_p (t))
2990 {
2991 cexpr.var = nothing_id;
2992 cexpr.type = SCALAR;
2993 cexpr.offset = 0;
2994 results->safe_push (cexpr);
2995 return;
2996 }
2997 }
2998
2999 /* For global variables resort to the alias target. */
3000 if (VAR_P (t) && (TREE_STATIC (t) || DECL_EXTERNAL (t)))
3001 {
3002 varpool_node *node = varpool_node::get (t);
3003 if (node && node->alias && node->analyzed)
3004 {
3005 node = node->ultimate_alias_target ();
3006 /* Canonicalize the PT uid of all aliases to the ultimate target.
3007 ??? Hopefully the set of aliases can't change in a way that
3008 changes the ultimate alias target. */
3009 gcc_assert ((! DECL_PT_UID_SET_P (node->decl)
3010 || DECL_PT_UID (node->decl) == DECL_UID (node->decl))
3011 && (! DECL_PT_UID_SET_P (t)
3012 || DECL_PT_UID (t) == DECL_UID (node->decl)));
3013 DECL_PT_UID (t) = DECL_UID (node->decl);
3014 t = node->decl;
3015 }
3016
3017 /* If this is decl may bind to NULL note that. */
3018 if (address_p
3019 && (! node || ! node->nonzero_address ()))
3020 {
3021 cexpr.var = nothing_id;
3022 cexpr.type = SCALAR;
3023 cexpr.offset = 0;
3024 results->safe_push (cexpr);
3025 }
3026 }
3027
3028 vi = get_vi_for_tree (t);
3029 cexpr.var = vi->id;
3030 cexpr.type = SCALAR;
3031 cexpr.offset = 0;
3032
3033 /* If we are not taking the address of the constraint expr, add all
3034 sub-fiels of the variable as well. */
3035 if (!address_p
3036 && !vi->is_full_var)
3037 {
3038 for (; vi; vi = vi_next (vi))
3039 {
3040 cexpr.var = vi->id;
3041 results->safe_push (cexpr);
3042 }
3043 return;
3044 }
3045
3046 results->safe_push (cexpr);
3047 }
3048
3049 /* Process constraint T, performing various simplifications and then
3050 adding it to our list of overall constraints. */
3051
3052 static void
3053 process_constraint (constraint_t t)
3054 {
3055 struct constraint_expr rhs = t->rhs;
3056 struct constraint_expr lhs = t->lhs;
3057
3058 gcc_assert (rhs.var < varmap.length ());
3059 gcc_assert (lhs.var < varmap.length ());
3060
3061 /* If we didn't get any useful constraint from the lhs we get
3062 &ANYTHING as fallback from get_constraint_for. Deal with
3063 it here by turning it into *ANYTHING. */
3064 if (lhs.type == ADDRESSOF
3065 && lhs.var == anything_id)
3066 lhs.type = DEREF;
3067
3068 /* ADDRESSOF on the lhs is invalid. */
3069 gcc_assert (lhs.type != ADDRESSOF);
3070
3071 /* We shouldn't add constraints from things that cannot have pointers.
3072 It's not completely trivial to avoid in the callers, so do it here. */
3073 if (rhs.type != ADDRESSOF
3074 && !get_varinfo (rhs.var)->may_have_pointers)
3075 return;
3076
3077 /* Likewise adding to the solution of a non-pointer var isn't useful. */
3078 if (!get_varinfo (lhs.var)->may_have_pointers)
3079 return;
3080
3081 /* This can happen in our IR with things like n->a = *p */
3082 if (rhs.type == DEREF && lhs.type == DEREF && rhs.var != anything_id)
3083 {
3084 /* Split into tmp = *rhs, *lhs = tmp */
3085 struct constraint_expr tmplhs;
3086 tmplhs = new_scalar_tmp_constraint_exp ("doubledereftmp", true);
3087 process_constraint (new_constraint (tmplhs, rhs));
3088 process_constraint (new_constraint (lhs, tmplhs));
3089 }
3090 else if ((rhs.type != SCALAR || rhs.offset != 0) && lhs.type == DEREF)
3091 {
3092 /* Split into tmp = &rhs, *lhs = tmp */
3093 struct constraint_expr tmplhs;
3094 tmplhs = new_scalar_tmp_constraint_exp ("derefaddrtmp", true);
3095 process_constraint (new_constraint (tmplhs, rhs));
3096 process_constraint (new_constraint (lhs, tmplhs));
3097 }
3098 else
3099 {
3100 gcc_assert (rhs.type != ADDRESSOF || rhs.offset == 0);
3101 constraints.safe_push (t);
3102 }
3103 }
3104
3105
3106 /* Return the position, in bits, of FIELD_DECL from the beginning of its
3107 structure. */
3108
3109 static HOST_WIDE_INT
3110 bitpos_of_field (const tree fdecl)
3111 {
3112 if (!tree_fits_shwi_p (DECL_FIELD_OFFSET (fdecl))
3113 || !tree_fits_shwi_p (DECL_FIELD_BIT_OFFSET (fdecl)))
3114 return -1;
3115
3116 return (tree_to_shwi (DECL_FIELD_OFFSET (fdecl)) * BITS_PER_UNIT
3117 + tree_to_shwi (DECL_FIELD_BIT_OFFSET (fdecl)));
3118 }
3119
3120
3121 /* Get constraint expressions for offsetting PTR by OFFSET. Stores the
3122 resulting constraint expressions in *RESULTS. */
3123
3124 static void
3125 get_constraint_for_ptr_offset (tree ptr, tree offset,
3126 vec<ce_s> *results)
3127 {
3128 struct constraint_expr c;
3129 unsigned int j, n;
3130 HOST_WIDE_INT rhsoffset;
3131
3132 /* If we do not do field-sensitive PTA adding offsets to pointers
3133 does not change the points-to solution. */
3134 if (!use_field_sensitive)
3135 {
3136 get_constraint_for_rhs (ptr, results);
3137 return;
3138 }
3139
3140 /* If the offset is not a non-negative integer constant that fits
3141 in a HOST_WIDE_INT, we have to fall back to a conservative
3142 solution which includes all sub-fields of all pointed-to
3143 variables of ptr. */
3144 if (offset == NULL_TREE
3145 || TREE_CODE (offset) != INTEGER_CST)
3146 rhsoffset = UNKNOWN_OFFSET;
3147 else
3148 {
3149 /* Sign-extend the offset. */
3150 offset_int soffset = offset_int::from (wi::to_wide (offset), SIGNED);
3151 if (!wi::fits_shwi_p (soffset))
3152 rhsoffset = UNKNOWN_OFFSET;
3153 else
3154 {
3155 /* Make sure the bit-offset also fits. */
3156 HOST_WIDE_INT rhsunitoffset = soffset.to_shwi ();
3157 rhsoffset = rhsunitoffset * (unsigned HOST_WIDE_INT) BITS_PER_UNIT;
3158 if (rhsunitoffset != rhsoffset / BITS_PER_UNIT)
3159 rhsoffset = UNKNOWN_OFFSET;
3160 }
3161 }
3162
3163 get_constraint_for_rhs (ptr, results);
3164 if (rhsoffset == 0)
3165 return;
3166
3167 /* As we are eventually appending to the solution do not use
3168 vec::iterate here. */
3169 n = results->length ();
3170 for (j = 0; j < n; j++)
3171 {
3172 varinfo_t curr;
3173 c = (*results)[j];
3174 curr = get_varinfo (c.var);
3175
3176 if (c.type == ADDRESSOF
3177 /* If this varinfo represents a full variable just use it. */
3178 && curr->is_full_var)
3179 ;
3180 else if (c.type == ADDRESSOF
3181 /* If we do not know the offset add all subfields. */
3182 && rhsoffset == UNKNOWN_OFFSET)
3183 {
3184 varinfo_t temp = get_varinfo (curr->head);
3185 do
3186 {
3187 struct constraint_expr c2;
3188 c2.var = temp->id;
3189 c2.type = ADDRESSOF;
3190 c2.offset = 0;
3191 if (c2.var != c.var)
3192 results->safe_push (c2);
3193 temp = vi_next (temp);
3194 }
3195 while (temp);
3196 }
3197 else if (c.type == ADDRESSOF)
3198 {
3199 varinfo_t temp;
3200 unsigned HOST_WIDE_INT offset = curr->offset + rhsoffset;
3201
3202 /* If curr->offset + rhsoffset is less than zero adjust it. */
3203 if (rhsoffset < 0
3204 && curr->offset < offset)
3205 offset = 0;
3206
3207 /* We have to include all fields that overlap the current
3208 field shifted by rhsoffset. And we include at least
3209 the last or the first field of the variable to represent
3210 reachability of off-bound addresses, in particular &object + 1,
3211 conservatively correct. */
3212 temp = first_or_preceding_vi_for_offset (curr, offset);
3213 c.var = temp->id;
3214 c.offset = 0;
3215 temp = vi_next (temp);
3216 while (temp
3217 && temp->offset < offset + curr->size)
3218 {
3219 struct constraint_expr c2;
3220 c2.var = temp->id;
3221 c2.type = ADDRESSOF;
3222 c2.offset = 0;
3223 results->safe_push (c2);
3224 temp = vi_next (temp);
3225 }
3226 }
3227 else if (c.type == SCALAR)
3228 {
3229 gcc_assert (c.offset == 0);
3230 c.offset = rhsoffset;
3231 }
3232 else
3233 /* We shouldn't get any DEREFs here. */
3234 gcc_unreachable ();
3235
3236 (*results)[j] = c;
3237 }
3238 }
3239
3240
3241 /* Given a COMPONENT_REF T, return the constraint_expr vector for it.
3242 If address_p is true the result will be taken its address of.
3243 If lhs_p is true then the constraint expression is assumed to be used
3244 as the lhs. */
3245
3246 static void
3247 get_constraint_for_component_ref (tree t, vec<ce_s> *results,
3248 bool address_p, bool lhs_p)
3249 {
3250 tree orig_t = t;
3251 poly_int64 bitsize = -1;
3252 poly_int64 bitmaxsize = -1;
3253 poly_int64 bitpos;
3254 bool reverse;
3255 tree forzero;
3256
3257 /* Some people like to do cute things like take the address of
3258 &0->a.b */
3259 forzero = t;
3260 while (handled_component_p (forzero)
3261 || INDIRECT_REF_P (forzero)
3262 || TREE_CODE (forzero) == MEM_REF)
3263 forzero = TREE_OPERAND (forzero, 0);
3264
3265 if (CONSTANT_CLASS_P (forzero) && integer_zerop (forzero))
3266 {
3267 struct constraint_expr temp;
3268
3269 temp.offset = 0;
3270 temp.var = integer_id;
3271 temp.type = SCALAR;
3272 results->safe_push (temp);
3273 return;
3274 }
3275
3276 t = get_ref_base_and_extent (t, &bitpos, &bitsize, &bitmaxsize, &reverse);
3277
3278 /* We can end up here for component references on a
3279 VIEW_CONVERT_EXPR <>(&foobar) or things like a
3280 BIT_FIELD_REF <&MEM[(void *)&b + 4B], ...>. So for
3281 symbolic constants simply give up. */
3282 if (TREE_CODE (t) == ADDR_EXPR)
3283 {
3284 constraint_expr result;
3285 result.type = SCALAR;
3286 result.var = anything_id;
3287 result.offset = 0;
3288 results->safe_push (result);
3289 return;
3290 }
3291
3292 /* Avoid creating pointer-offset constraints, so handle MEM_REF
3293 offsets directly. Pretend to take the address of the base,
3294 we'll take care of adding the required subset of sub-fields below. */
3295 if (TREE_CODE (t) == MEM_REF
3296 && !integer_zerop (TREE_OPERAND (t, 0)))
3297 {
3298 poly_offset_int off = mem_ref_offset (t);
3299 off <<= LOG2_BITS_PER_UNIT;
3300 off += bitpos;
3301 poly_int64 off_hwi;
3302 if (off.to_shwi (&off_hwi))
3303 bitpos = off_hwi;
3304 else
3305 {
3306 bitpos = 0;
3307 bitmaxsize = -1;
3308 }
3309 get_constraint_for_1 (TREE_OPERAND (t, 0), results, false, lhs_p);
3310 do_deref (results);
3311 }
3312 else
3313 get_constraint_for_1 (t, results, true, lhs_p);
3314
3315 /* Strip off nothing_id. */
3316 if (results->length () == 2)
3317 {
3318 gcc_assert ((*results)[0].var == nothing_id);
3319 results->unordered_remove (0);
3320 }
3321 gcc_assert (results->length () == 1);
3322 struct constraint_expr &result = results->last ();
3323
3324 if (result.type == SCALAR
3325 && get_varinfo (result.var)->is_full_var)
3326 /* For single-field vars do not bother about the offset. */
3327 result.offset = 0;
3328 else if (result.type == SCALAR)
3329 {
3330 /* In languages like C, you can access one past the end of an
3331 array. You aren't allowed to dereference it, so we can
3332 ignore this constraint. When we handle pointer subtraction,
3333 we may have to do something cute here. */
3334
3335 if (maybe_lt (poly_uint64 (bitpos), get_varinfo (result.var)->fullsize)
3336 && maybe_ne (bitmaxsize, 0))
3337 {
3338 /* It's also not true that the constraint will actually start at the
3339 right offset, it may start in some padding. We only care about
3340 setting the constraint to the first actual field it touches, so
3341 walk to find it. */
3342 struct constraint_expr cexpr = result;
3343 varinfo_t curr;
3344 results->pop ();
3345 cexpr.offset = 0;
3346 for (curr = get_varinfo (cexpr.var); curr; curr = vi_next (curr))
3347 {
3348 if (ranges_maybe_overlap_p (poly_int64 (curr->offset),
3349 curr->size, bitpos, bitmaxsize))
3350 {
3351 cexpr.var = curr->id;
3352 results->safe_push (cexpr);
3353 if (address_p)
3354 break;
3355 }
3356 }
3357 /* If we are going to take the address of this field then
3358 to be able to compute reachability correctly add at least
3359 the last field of the variable. */
3360 if (address_p && results->length () == 0)
3361 {
3362 curr = get_varinfo (cexpr.var);
3363 while (curr->next != 0)
3364 curr = vi_next (curr);
3365 cexpr.var = curr->id;
3366 results->safe_push (cexpr);
3367 }
3368 else if (results->length () == 0)
3369 /* Assert that we found *some* field there. The user couldn't be
3370 accessing *only* padding. */
3371 /* Still the user could access one past the end of an array
3372 embedded in a struct resulting in accessing *only* padding. */
3373 /* Or accessing only padding via type-punning to a type
3374 that has a filed just in padding space. */
3375 {
3376 cexpr.type = SCALAR;
3377 cexpr.var = anything_id;
3378 cexpr.offset = 0;
3379 results->safe_push (cexpr);
3380 }
3381 }
3382 else if (known_eq (bitmaxsize, 0))
3383 {
3384 if (dump_file && (dump_flags & TDF_DETAILS))
3385 fprintf (dump_file, "Access to zero-sized part of variable, "
3386 "ignoring\n");
3387 }
3388 else
3389 if (dump_file && (dump_flags & TDF_DETAILS))
3390 fprintf (dump_file, "Access to past the end of variable, ignoring\n");
3391 }
3392 else if (result.type == DEREF)
3393 {
3394 /* If we do not know exactly where the access goes say so. Note
3395 that only for non-structure accesses we know that we access
3396 at most one subfiled of any variable. */
3397 HOST_WIDE_INT const_bitpos;
3398 if (!bitpos.is_constant (&const_bitpos)
3399 || const_bitpos == -1
3400 || maybe_ne (bitsize, bitmaxsize)
3401 || AGGREGATE_TYPE_P (TREE_TYPE (orig_t))
3402 || result.offset == UNKNOWN_OFFSET)
3403 result.offset = UNKNOWN_OFFSET;
3404 else
3405 result.offset += const_bitpos;
3406 }
3407 else if (result.type == ADDRESSOF)
3408 {
3409 /* We can end up here for component references on constants like
3410 VIEW_CONVERT_EXPR <>({ 0, 1, 2, 3 })[i]. */
3411 result.type = SCALAR;
3412 result.var = anything_id;
3413 result.offset = 0;
3414 }
3415 else
3416 gcc_unreachable ();
3417 }
3418
3419
3420 /* Dereference the constraint expression CONS, and return the result.
3421 DEREF (ADDRESSOF) = SCALAR
3422 DEREF (SCALAR) = DEREF
3423 DEREF (DEREF) = (temp = DEREF1; result = DEREF(temp))
3424 This is needed so that we can handle dereferencing DEREF constraints. */
3425
3426 static void
3427 do_deref (vec<ce_s> *constraints)
3428 {
3429 struct constraint_expr *c;
3430 unsigned int i = 0;
3431
3432 FOR_EACH_VEC_ELT (*constraints, i, c)
3433 {
3434 if (c->type == SCALAR)
3435 c->type = DEREF;
3436 else if (c->type == ADDRESSOF)
3437 c->type = SCALAR;
3438 else if (c->type == DEREF)
3439 {
3440 struct constraint_expr tmplhs;
3441 tmplhs = new_scalar_tmp_constraint_exp ("dereftmp", true);
3442 process_constraint (new_constraint (tmplhs, *c));
3443 c->var = tmplhs.var;
3444 }
3445 else
3446 gcc_unreachable ();
3447 }
3448 }
3449
3450 /* Given a tree T, return the constraint expression for taking the
3451 address of it. */
3452
3453 static void
3454 get_constraint_for_address_of (tree t, vec<ce_s> *results)
3455 {
3456 struct constraint_expr *c;
3457 unsigned int i;
3458
3459 get_constraint_for_1 (t, results, true, true);
3460
3461 FOR_EACH_VEC_ELT (*results, i, c)
3462 {
3463 if (c->type == DEREF)
3464 c->type = SCALAR;
3465 else
3466 c->type = ADDRESSOF;
3467 }
3468 }
3469
3470 /* Given a tree T, return the constraint expression for it. */
3471
3472 static void
3473 get_constraint_for_1 (tree t, vec<ce_s> *results, bool address_p,
3474 bool lhs_p)
3475 {
3476 struct constraint_expr temp;
3477
3478 /* x = integer is all glommed to a single variable, which doesn't
3479 point to anything by itself. That is, of course, unless it is an
3480 integer constant being treated as a pointer, in which case, we
3481 will return that this is really the addressof anything. This
3482 happens below, since it will fall into the default case. The only
3483 case we know something about an integer treated like a pointer is
3484 when it is the NULL pointer, and then we just say it points to
3485 NULL.
3486
3487 Do not do that if -fno-delete-null-pointer-checks though, because
3488 in that case *NULL does not fail, so it _should_ alias *anything.
3489 It is not worth adding a new option or renaming the existing one,
3490 since this case is relatively obscure. */
3491 if ((TREE_CODE (t) == INTEGER_CST
3492 && integer_zerop (t))
3493 /* The only valid CONSTRUCTORs in gimple with pointer typed
3494 elements are zero-initializer. But in IPA mode we also
3495 process global initializers, so verify at least. */
3496 || (TREE_CODE (t) == CONSTRUCTOR
3497 && CONSTRUCTOR_NELTS (t) == 0))
3498 {
3499 if (flag_delete_null_pointer_checks)
3500 temp.var = nothing_id;
3501 else
3502 temp.var = nonlocal_id;
3503 temp.type = ADDRESSOF;
3504 temp.offset = 0;
3505 results->safe_push (temp);
3506 return;
3507 }
3508
3509 /* String constants are read-only, ideally we'd have a CONST_DECL
3510 for those. */
3511 if (TREE_CODE (t) == STRING_CST)
3512 {
3513 temp.var = string_id;
3514 temp.type = SCALAR;
3515 temp.offset = 0;
3516 results->safe_push (temp);
3517 return;
3518 }
3519
3520 switch (TREE_CODE_CLASS (TREE_CODE (t)))
3521 {
3522 case tcc_expression:
3523 {
3524 switch (TREE_CODE (t))
3525 {
3526 case ADDR_EXPR:
3527 get_constraint_for_address_of (TREE_OPERAND (t, 0), results);
3528 return;
3529 default:;
3530 }
3531 break;
3532 }
3533 case tcc_reference:
3534 {
3535 switch (TREE_CODE (t))
3536 {
3537 case MEM_REF:
3538 {
3539 struct constraint_expr cs;
3540 varinfo_t vi, curr;
3541 get_constraint_for_ptr_offset (TREE_OPERAND (t, 0),
3542 TREE_OPERAND (t, 1), results);
3543 do_deref (results);
3544
3545 /* If we are not taking the address then make sure to process
3546 all subvariables we might access. */
3547 if (address_p)
3548 return;
3549
3550 cs = results->last ();
3551 if (cs.type == DEREF
3552 && type_can_have_subvars (TREE_TYPE (t)))
3553 {
3554 /* For dereferences this means we have to defer it
3555 to solving time. */
3556 results->last ().offset = UNKNOWN_OFFSET;
3557 return;
3558 }
3559 if (cs.type != SCALAR)
3560 return;
3561
3562 vi = get_varinfo (cs.var);
3563 curr = vi_next (vi);
3564 if (!vi->is_full_var
3565 && curr)
3566 {
3567 unsigned HOST_WIDE_INT size;
3568 if (tree_fits_uhwi_p (TYPE_SIZE (TREE_TYPE (t))))
3569 size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (t)));
3570 else
3571 size = -1;
3572 for (; curr; curr = vi_next (curr))
3573 {
3574 if (curr->offset - vi->offset < size)
3575 {
3576 cs.var = curr->id;
3577 results->safe_push (cs);
3578 }
3579 else
3580 break;
3581 }
3582 }
3583 return;
3584 }
3585 case ARRAY_REF:
3586 case ARRAY_RANGE_REF:
3587 case COMPONENT_REF:
3588 case IMAGPART_EXPR:
3589 case REALPART_EXPR:
3590 case BIT_FIELD_REF:
3591 get_constraint_for_component_ref (t, results, address_p, lhs_p);
3592 return;
3593 case VIEW_CONVERT_EXPR:
3594 get_constraint_for_1 (TREE_OPERAND (t, 0), results, address_p,
3595 lhs_p);
3596 return;
3597 /* We are missing handling for TARGET_MEM_REF here. */
3598 default:;
3599 }
3600 break;
3601 }
3602 case tcc_exceptional:
3603 {
3604 switch (TREE_CODE (t))
3605 {
3606 case SSA_NAME:
3607 {
3608 get_constraint_for_ssa_var (t, results, address_p);
3609 return;
3610 }
3611 case CONSTRUCTOR:
3612 {
3613 unsigned int i;
3614 tree val;
3615 auto_vec<ce_s> tmp;
3616 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), i, val)
3617 {
3618 struct constraint_expr *rhsp;
3619 unsigned j;
3620 get_constraint_for_1 (val, &tmp, address_p, lhs_p);
3621 FOR_EACH_VEC_ELT (tmp, j, rhsp)
3622 results->safe_push (*rhsp);
3623 tmp.truncate (0);
3624 }
3625 /* We do not know whether the constructor was complete,
3626 so technically we have to add &NOTHING or &ANYTHING
3627 like we do for an empty constructor as well. */
3628 return;
3629 }
3630 default:;
3631 }
3632 break;
3633 }
3634 case tcc_declaration:
3635 {
3636 get_constraint_for_ssa_var (t, results, address_p);
3637 return;
3638 }
3639 case tcc_constant:
3640 {
3641 /* We cannot refer to automatic variables through constants. */
3642 temp.type = ADDRESSOF;
3643 temp.var = nonlocal_id;
3644 temp.offset = 0;
3645 results->safe_push (temp);
3646 return;
3647 }
3648 default:;
3649 }
3650
3651 /* The default fallback is a constraint from anything. */
3652 temp.type = ADDRESSOF;
3653 temp.var = anything_id;
3654 temp.offset = 0;
3655 results->safe_push (temp);
3656 }
3657
3658 /* Given a gimple tree T, return the constraint expression vector for it. */
3659
3660 static void
3661 get_constraint_for (tree t, vec<ce_s> *results)
3662 {
3663 gcc_assert (results->length () == 0);
3664
3665 get_constraint_for_1 (t, results, false, true);
3666 }
3667
3668 /* Given a gimple tree T, return the constraint expression vector for it
3669 to be used as the rhs of a constraint. */
3670
3671 static void
3672 get_constraint_for_rhs (tree t, vec<ce_s> *results)
3673 {
3674 gcc_assert (results->length () == 0);
3675
3676 get_constraint_for_1 (t, results, false, false);
3677 }
3678
3679
3680 /* Efficiently generates constraints from all entries in *RHSC to all
3681 entries in *LHSC. */
3682
3683 static void
3684 process_all_all_constraints (vec<ce_s> lhsc,
3685 vec<ce_s> rhsc)
3686 {
3687 struct constraint_expr *lhsp, *rhsp;
3688 unsigned i, j;
3689
3690 if (lhsc.length () <= 1 || rhsc.length () <= 1)
3691 {
3692 FOR_EACH_VEC_ELT (lhsc, i, lhsp)
3693 FOR_EACH_VEC_ELT (rhsc, j, rhsp)
3694 process_constraint (new_constraint (*lhsp, *rhsp));
3695 }
3696 else
3697 {
3698 struct constraint_expr tmp;
3699 tmp = new_scalar_tmp_constraint_exp ("allalltmp", true);
3700 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
3701 process_constraint (new_constraint (tmp, *rhsp));
3702 FOR_EACH_VEC_ELT (lhsc, i, lhsp)
3703 process_constraint (new_constraint (*lhsp, tmp));
3704 }
3705 }
3706
3707 /* Handle aggregate copies by expanding into copies of the respective
3708 fields of the structures. */
3709
3710 static void
3711 do_structure_copy (tree lhsop, tree rhsop)
3712 {
3713 struct constraint_expr *lhsp, *rhsp;
3714 auto_vec<ce_s> lhsc;
3715 auto_vec<ce_s> rhsc;
3716 unsigned j;
3717
3718 get_constraint_for (lhsop, &lhsc);
3719 get_constraint_for_rhs (rhsop, &rhsc);
3720 lhsp = &lhsc[0];
3721 rhsp = &rhsc[0];
3722 if (lhsp->type == DEREF
3723 || (lhsp->type == ADDRESSOF && lhsp->var == anything_id)
3724 || rhsp->type == DEREF)
3725 {
3726 if (lhsp->type == DEREF)
3727 {
3728 gcc_assert (lhsc.length () == 1);
3729 lhsp->offset = UNKNOWN_OFFSET;
3730 }
3731 if (rhsp->type == DEREF)
3732 {
3733 gcc_assert (rhsc.length () == 1);
3734 rhsp->offset = UNKNOWN_OFFSET;
3735 }
3736 process_all_all_constraints (lhsc, rhsc);
3737 }
3738 else if (lhsp->type == SCALAR
3739 && (rhsp->type == SCALAR
3740 || rhsp->type == ADDRESSOF))
3741 {
3742 HOST_WIDE_INT lhssize, lhsoffset;
3743 HOST_WIDE_INT rhssize, rhsoffset;
3744 bool reverse;
3745 unsigned k = 0;
3746 if (!get_ref_base_and_extent_hwi (lhsop, &lhsoffset, &lhssize, &reverse)
3747 || !get_ref_base_and_extent_hwi (rhsop, &rhsoffset, &rhssize,
3748 &reverse))
3749 {
3750 process_all_all_constraints (lhsc, rhsc);
3751 return;
3752 }
3753 for (j = 0; lhsc.iterate (j, &lhsp);)
3754 {
3755 varinfo_t lhsv, rhsv;
3756 rhsp = &rhsc[k];
3757 lhsv = get_varinfo (lhsp->var);
3758 rhsv = get_varinfo (rhsp->var);
3759 if (lhsv->may_have_pointers
3760 && (lhsv->is_full_var
3761 || rhsv->is_full_var
3762 || ranges_overlap_p (lhsv->offset + rhsoffset, lhsv->size,
3763 rhsv->offset + lhsoffset, rhsv->size)))
3764 process_constraint (new_constraint (*lhsp, *rhsp));
3765 if (!rhsv->is_full_var
3766 && (lhsv->is_full_var
3767 || (lhsv->offset + rhsoffset + lhsv->size
3768 > rhsv->offset + lhsoffset + rhsv->size)))
3769 {
3770 ++k;
3771 if (k >= rhsc.length ())
3772 break;
3773 }
3774 else
3775 ++j;
3776 }
3777 }
3778 else
3779 gcc_unreachable ();
3780 }
3781
3782 /* Create constraints ID = { rhsc }. */
3783
3784 static void
3785 make_constraints_to (unsigned id, vec<ce_s> rhsc)
3786 {
3787 struct constraint_expr *c;
3788 struct constraint_expr includes;
3789 unsigned int j;
3790
3791 includes.var = id;
3792 includes.offset = 0;
3793 includes.type = SCALAR;
3794
3795 FOR_EACH_VEC_ELT (rhsc, j, c)
3796 process_constraint (new_constraint (includes, *c));
3797 }
3798
3799 /* Create a constraint ID = OP. */
3800
3801 static void
3802 make_constraint_to (unsigned id, tree op)
3803 {
3804 auto_vec<ce_s> rhsc;
3805 get_constraint_for_rhs (op, &rhsc);
3806 make_constraints_to (id, rhsc);
3807 }
3808
3809 /* Create a constraint ID = &FROM. */
3810
3811 static void
3812 make_constraint_from (varinfo_t vi, int from)
3813 {
3814 struct constraint_expr lhs, rhs;
3815
3816 lhs.var = vi->id;
3817 lhs.offset = 0;
3818 lhs.type = SCALAR;
3819
3820 rhs.var = from;
3821 rhs.offset = 0;
3822 rhs.type = ADDRESSOF;
3823 process_constraint (new_constraint (lhs, rhs));
3824 }
3825
3826 /* Create a constraint ID = FROM. */
3827
3828 static void
3829 make_copy_constraint (varinfo_t vi, int from)
3830 {
3831 struct constraint_expr lhs, rhs;
3832
3833 lhs.var = vi->id;
3834 lhs.offset = 0;
3835 lhs.type = SCALAR;
3836
3837 rhs.var = from;
3838 rhs.offset = 0;
3839 rhs.type = SCALAR;
3840 process_constraint (new_constraint (lhs, rhs));
3841 }
3842
3843 /* Make constraints necessary to make OP escape. */
3844
3845 static void
3846 make_escape_constraint (tree op)
3847 {
3848 make_constraint_to (escaped_id, op);
3849 }
3850
3851 /* Add constraints to that the solution of VI is transitively closed. */
3852
3853 static void
3854 make_transitive_closure_constraints (varinfo_t vi)
3855 {
3856 struct constraint_expr lhs, rhs;
3857
3858 /* VAR = *(VAR + UNKNOWN); */
3859 lhs.type = SCALAR;
3860 lhs.var = vi->id;
3861 lhs.offset = 0;
3862 rhs.type = DEREF;
3863 rhs.var = vi->id;
3864 rhs.offset = UNKNOWN_OFFSET;
3865 process_constraint (new_constraint (lhs, rhs));
3866 }
3867
3868 /* Add constraints to that the solution of VI has all subvariables added. */
3869
3870 static void
3871 make_any_offset_constraints (varinfo_t vi)
3872 {
3873 struct constraint_expr lhs, rhs;
3874
3875 /* VAR = VAR + UNKNOWN; */
3876 lhs.type = SCALAR;
3877 lhs.var = vi->id;
3878 lhs.offset = 0;
3879 rhs.type = SCALAR;
3880 rhs.var = vi->id;
3881 rhs.offset = UNKNOWN_OFFSET;
3882 process_constraint (new_constraint (lhs, rhs));
3883 }
3884
3885 /* Temporary storage for fake var decls. */
3886 struct obstack fake_var_decl_obstack;
3887
3888 /* Build a fake VAR_DECL acting as referrer to a DECL_UID. */
3889
3890 static tree
3891 build_fake_var_decl (tree type)
3892 {
3893 tree decl = (tree) XOBNEW (&fake_var_decl_obstack, struct tree_var_decl);
3894 memset (decl, 0, sizeof (struct tree_var_decl));
3895 TREE_SET_CODE (decl, VAR_DECL);
3896 TREE_TYPE (decl) = type;
3897 DECL_UID (decl) = allocate_decl_uid ();
3898 SET_DECL_PT_UID (decl, -1);
3899 layout_decl (decl, 0);
3900 return decl;
3901 }
3902
3903 /* Create a new artificial heap variable with NAME.
3904 Return the created variable. */
3905
3906 static varinfo_t
3907 make_heapvar (const char *name, bool add_id)
3908 {
3909 varinfo_t vi;
3910 tree heapvar;
3911
3912 heapvar = build_fake_var_decl (ptr_type_node);
3913 DECL_EXTERNAL (heapvar) = 1;
3914
3915 vi = new_var_info (heapvar, name, add_id);
3916 vi->is_heap_var = true;
3917 vi->is_unknown_size_var = true;
3918 vi->offset = 0;
3919 vi->fullsize = ~0;
3920 vi->size = ~0;
3921 vi->is_full_var = true;
3922 insert_vi_for_tree (heapvar, vi);
3923
3924 return vi;
3925 }
3926
3927 /* Create a new artificial heap variable with NAME and make a
3928 constraint from it to LHS. Set flags according to a tag used
3929 for tracking restrict pointers. */
3930
3931 static varinfo_t
3932 make_constraint_from_restrict (varinfo_t lhs, const char *name, bool add_id)
3933 {
3934 varinfo_t vi = make_heapvar (name, add_id);
3935 vi->is_restrict_var = 1;
3936 vi->is_global_var = 1;
3937 vi->may_have_pointers = 1;
3938 make_constraint_from (lhs, vi->id);
3939 return vi;
3940 }
3941
3942 /* Create a new artificial heap variable with NAME and make a
3943 constraint from it to LHS. Set flags according to a tag used
3944 for tracking restrict pointers and make the artificial heap
3945 point to global memory. */
3946
3947 static varinfo_t
3948 make_constraint_from_global_restrict (varinfo_t lhs, const char *name,
3949 bool add_id)
3950 {
3951 varinfo_t vi = make_constraint_from_restrict (lhs, name, add_id);
3952 make_copy_constraint (vi, nonlocal_id);
3953 return vi;
3954 }
3955
3956 /* In IPA mode there are varinfos for different aspects of reach
3957 function designator. One for the points-to set of the return
3958 value, one for the variables that are clobbered by the function,
3959 one for its uses and one for each parameter (including a single
3960 glob for remaining variadic arguments). */
3961
3962 enum { fi_clobbers = 1, fi_uses = 2,
3963 fi_static_chain = 3, fi_result = 4, fi_parm_base = 5 };
3964
3965 /* Get a constraint for the requested part of a function designator FI
3966 when operating in IPA mode. */
3967
3968 static struct constraint_expr
3969 get_function_part_constraint (varinfo_t fi, unsigned part)
3970 {
3971 struct constraint_expr c;
3972
3973 gcc_assert (in_ipa_mode);
3974
3975 if (fi->id == anything_id)
3976 {
3977 /* ??? We probably should have a ANYFN special variable. */
3978 c.var = anything_id;
3979 c.offset = 0;
3980 c.type = SCALAR;
3981 }
3982 else if (fi->decl && TREE_CODE (fi->decl) == FUNCTION_DECL)
3983 {
3984 varinfo_t ai = first_vi_for_offset (fi, part);
3985 if (ai)
3986 c.var = ai->id;
3987 else
3988 c.var = anything_id;
3989 c.offset = 0;
3990 c.type = SCALAR;
3991 }
3992 else
3993 {
3994 c.var = fi->id;
3995 c.offset = part;
3996 c.type = DEREF;
3997 }
3998
3999 return c;
4000 }
4001
4002 /* For non-IPA mode, generate constraints necessary for a call on the
4003 RHS. */
4004
4005 static void
4006 handle_rhs_call (gcall *stmt, vec<ce_s> *results)
4007 {
4008 struct constraint_expr rhsc;
4009 unsigned i;
4010 bool returns_uses = false;
4011
4012 for (i = 0; i < gimple_call_num_args (stmt); ++i)
4013 {
4014 tree arg = gimple_call_arg (stmt, i);
4015 int flags = gimple_call_arg_flags (stmt, i);
4016
4017 /* If the argument is not used we can ignore it. */
4018 if (flags & EAF_UNUSED)
4019 continue;
4020
4021 /* As we compute ESCAPED context-insensitive we do not gain
4022 any precision with just EAF_NOCLOBBER but not EAF_NOESCAPE
4023 set. The argument would still get clobbered through the
4024 escape solution. */
4025 if ((flags & EAF_NOCLOBBER)
4026 && (flags & EAF_NOESCAPE))
4027 {
4028 varinfo_t uses = get_call_use_vi (stmt);
4029 varinfo_t tem = new_var_info (NULL_TREE, "callarg", true);
4030 tem->is_reg_var = true;
4031 make_constraint_to (tem->id, arg);
4032 make_any_offset_constraints (tem);
4033 if (!(flags & EAF_DIRECT))
4034 make_transitive_closure_constraints (tem);
4035 make_copy_constraint (uses, tem->id);
4036 returns_uses = true;
4037 }
4038 else if (flags & EAF_NOESCAPE)
4039 {
4040 struct constraint_expr lhs, rhs;
4041 varinfo_t uses = get_call_use_vi (stmt);
4042 varinfo_t clobbers = get_call_clobber_vi (stmt);
4043 varinfo_t tem = new_var_info (NULL_TREE, "callarg", true);
4044 tem->is_reg_var = true;
4045 make_constraint_to (tem->id, arg);
4046 make_any_offset_constraints (tem);
4047 if (!(flags & EAF_DIRECT))
4048 make_transitive_closure_constraints (tem);
4049 make_copy_constraint (uses, tem->id);
4050 make_copy_constraint (clobbers, tem->id);
4051 /* Add *tem = nonlocal, do not add *tem = callused as
4052 EAF_NOESCAPE parameters do not escape to other parameters
4053 and all other uses appear in NONLOCAL as well. */
4054 lhs.type = DEREF;
4055 lhs.var = tem->id;
4056 lhs.offset = 0;
4057 rhs.type = SCALAR;
4058 rhs.var = nonlocal_id;
4059 rhs.offset = 0;
4060 process_constraint (new_constraint (lhs, rhs));
4061 returns_uses = true;
4062 }
4063 else
4064 make_escape_constraint (arg);
4065 }
4066
4067 /* If we added to the calls uses solution make sure we account for
4068 pointers to it to be returned. */
4069 if (returns_uses)
4070 {
4071 rhsc.var = get_call_use_vi (stmt)->id;
4072 rhsc.offset = UNKNOWN_OFFSET;
4073 rhsc.type = SCALAR;
4074 results->safe_push (rhsc);
4075 }
4076
4077 /* The static chain escapes as well. */
4078 if (gimple_call_chain (stmt))
4079 make_escape_constraint (gimple_call_chain (stmt));
4080
4081 /* And if we applied NRV the address of the return slot escapes as well. */
4082 if (gimple_call_return_slot_opt_p (stmt)
4083 && gimple_call_lhs (stmt) != NULL_TREE
4084 && TREE_ADDRESSABLE (TREE_TYPE (gimple_call_lhs (stmt))))
4085 {
4086 auto_vec<ce_s> tmpc;
4087 struct constraint_expr lhsc, *c;
4088 get_constraint_for_address_of (gimple_call_lhs (stmt), &tmpc);
4089 lhsc.var = escaped_id;
4090 lhsc.offset = 0;
4091 lhsc.type = SCALAR;
4092 FOR_EACH_VEC_ELT (tmpc, i, c)
4093 process_constraint (new_constraint (lhsc, *c));
4094 }
4095
4096 /* Regular functions return nonlocal memory. */
4097 rhsc.var = nonlocal_id;
4098 rhsc.offset = 0;
4099 rhsc.type = SCALAR;
4100 results->safe_push (rhsc);
4101 }
4102
4103 /* For non-IPA mode, generate constraints necessary for a call
4104 that returns a pointer and assigns it to LHS. This simply makes
4105 the LHS point to global and escaped variables. */
4106
4107 static void
4108 handle_lhs_call (gcall *stmt, tree lhs, int flags, vec<ce_s> rhsc,
4109 tree fndecl)
4110 {
4111 auto_vec<ce_s> lhsc;
4112
4113 get_constraint_for (lhs, &lhsc);
4114 /* If the store is to a global decl make sure to
4115 add proper escape constraints. */
4116 lhs = get_base_address (lhs);
4117 if (lhs
4118 && DECL_P (lhs)
4119 && is_global_var (lhs))
4120 {
4121 struct constraint_expr tmpc;
4122 tmpc.var = escaped_id;
4123 tmpc.offset = 0;
4124 tmpc.type = SCALAR;
4125 lhsc.safe_push (tmpc);
4126 }
4127
4128 /* If the call returns an argument unmodified override the rhs
4129 constraints. */
4130 if (flags & ERF_RETURNS_ARG
4131 && (flags & ERF_RETURN_ARG_MASK) < gimple_call_num_args (stmt))
4132 {
4133 tree arg;
4134 rhsc.create (0);
4135 arg = gimple_call_arg (stmt, flags & ERF_RETURN_ARG_MASK);
4136 get_constraint_for (arg, &rhsc);
4137 process_all_all_constraints (lhsc, rhsc);
4138 rhsc.release ();
4139 }
4140 else if (flags & ERF_NOALIAS)
4141 {
4142 varinfo_t vi;
4143 struct constraint_expr tmpc;
4144 rhsc.create (0);
4145 vi = make_heapvar ("HEAP", true);
4146 /* We are marking allocated storage local, we deal with it becoming
4147 global by escaping and setting of vars_contains_escaped_heap. */
4148 DECL_EXTERNAL (vi->decl) = 0;
4149 vi->is_global_var = 0;
4150 /* If this is not a real malloc call assume the memory was
4151 initialized and thus may point to global memory. All
4152 builtin functions with the malloc attribute behave in a sane way. */
4153 if (!fndecl
4154 || !fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
4155 make_constraint_from (vi, nonlocal_id);
4156 tmpc.var = vi->id;
4157 tmpc.offset = 0;
4158 tmpc.type = ADDRESSOF;
4159 rhsc.safe_push (tmpc);
4160 process_all_all_constraints (lhsc, rhsc);
4161 rhsc.release ();
4162 }
4163 else
4164 process_all_all_constraints (lhsc, rhsc);
4165 }
4166
4167 /* For non-IPA mode, generate constraints necessary for a call of a
4168 const function that returns a pointer in the statement STMT. */
4169
4170 static void
4171 handle_const_call (gcall *stmt, vec<ce_s> *results)
4172 {
4173 struct constraint_expr rhsc;
4174 unsigned int k;
4175 bool need_uses = false;
4176
4177 /* Treat nested const functions the same as pure functions as far
4178 as the static chain is concerned. */
4179 if (gimple_call_chain (stmt))
4180 {
4181 varinfo_t uses = get_call_use_vi (stmt);
4182 make_constraint_to (uses->id, gimple_call_chain (stmt));
4183 need_uses = true;
4184 }
4185
4186 /* And if we applied NRV the address of the return slot escapes as well. */
4187 if (gimple_call_return_slot_opt_p (stmt)
4188 && gimple_call_lhs (stmt) != NULL_TREE
4189 && TREE_ADDRESSABLE (TREE_TYPE (gimple_call_lhs (stmt))))
4190 {
4191 varinfo_t uses = get_call_use_vi (stmt);
4192 auto_vec<ce_s> tmpc;
4193 get_constraint_for_address_of (gimple_call_lhs (stmt), &tmpc);
4194 make_constraints_to (uses->id, tmpc);
4195 need_uses = true;
4196 }
4197
4198 if (need_uses)
4199 {
4200 varinfo_t uses = get_call_use_vi (stmt);
4201 make_any_offset_constraints (uses);
4202 make_transitive_closure_constraints (uses);
4203 rhsc.var = uses->id;
4204 rhsc.offset = 0;
4205 rhsc.type = SCALAR;
4206 results->safe_push (rhsc);
4207 }
4208
4209 /* May return offsetted arguments. */
4210 varinfo_t tem = NULL;
4211 if (gimple_call_num_args (stmt) != 0)
4212 {
4213 tem = new_var_info (NULL_TREE, "callarg", true);
4214 tem->is_reg_var = true;
4215 }
4216 for (k = 0; k < gimple_call_num_args (stmt); ++k)
4217 {
4218 tree arg = gimple_call_arg (stmt, k);
4219 auto_vec<ce_s> argc;
4220 get_constraint_for_rhs (arg, &argc);
4221 make_constraints_to (tem->id, argc);
4222 }
4223 if (tem)
4224 {
4225 ce_s ce;
4226 ce.type = SCALAR;
4227 ce.var = tem->id;
4228 ce.offset = UNKNOWN_OFFSET;
4229 results->safe_push (ce);
4230 }
4231
4232 /* May return addresses of globals. */
4233 rhsc.var = nonlocal_id;
4234 rhsc.offset = 0;
4235 rhsc.type = ADDRESSOF;
4236 results->safe_push (rhsc);
4237 }
4238
4239 /* For non-IPA mode, generate constraints necessary for a call to a
4240 pure function in statement STMT. */
4241
4242 static void
4243 handle_pure_call (gcall *stmt, vec<ce_s> *results)
4244 {
4245 struct constraint_expr rhsc;
4246 unsigned i;
4247 varinfo_t uses = NULL;
4248
4249 /* Memory reached from pointer arguments is call-used. */
4250 for (i = 0; i < gimple_call_num_args (stmt); ++i)
4251 {
4252 tree arg = gimple_call_arg (stmt, i);
4253 if (!uses)
4254 {
4255 uses = get_call_use_vi (stmt);
4256 make_any_offset_constraints (uses);
4257 make_transitive_closure_constraints (uses);
4258 }
4259 make_constraint_to (uses->id, arg);
4260 }
4261
4262 /* The static chain is used as well. */
4263 if (gimple_call_chain (stmt))
4264 {
4265 if (!uses)
4266 {
4267 uses = get_call_use_vi (stmt);
4268 make_any_offset_constraints (uses);
4269 make_transitive_closure_constraints (uses);
4270 }
4271 make_constraint_to (uses->id, gimple_call_chain (stmt));
4272 }
4273
4274 /* And if we applied NRV the address of the return slot. */
4275 if (gimple_call_return_slot_opt_p (stmt)
4276 && gimple_call_lhs (stmt) != NULL_TREE
4277 && TREE_ADDRESSABLE (TREE_TYPE (gimple_call_lhs (stmt))))
4278 {
4279 if (!uses)
4280 {
4281 uses = get_call_use_vi (stmt);
4282 make_any_offset_constraints (uses);
4283 make_transitive_closure_constraints (uses);
4284 }
4285 auto_vec<ce_s> tmpc;
4286 get_constraint_for_address_of (gimple_call_lhs (stmt), &tmpc);
4287 make_constraints_to (uses->id, tmpc);
4288 }
4289
4290 /* Pure functions may return call-used and nonlocal memory. */
4291 if (uses)
4292 {
4293 rhsc.var = uses->id;
4294 rhsc.offset = 0;
4295 rhsc.type = SCALAR;
4296 results->safe_push (rhsc);
4297 }
4298 rhsc.var = nonlocal_id;
4299 rhsc.offset = 0;
4300 rhsc.type = SCALAR;
4301 results->safe_push (rhsc);
4302 }
4303
4304
4305 /* Return the varinfo for the callee of CALL. */
4306
4307 static varinfo_t
4308 get_fi_for_callee (gcall *call)
4309 {
4310 tree decl, fn = gimple_call_fn (call);
4311
4312 if (fn && TREE_CODE (fn) == OBJ_TYPE_REF)
4313 fn = OBJ_TYPE_REF_EXPR (fn);
4314
4315 /* If we can directly resolve the function being called, do so.
4316 Otherwise, it must be some sort of indirect expression that
4317 we should still be able to handle. */
4318 decl = gimple_call_addr_fndecl (fn);
4319 if (decl)
4320 return get_vi_for_tree (decl);
4321
4322 /* If the function is anything other than a SSA name pointer we have no
4323 clue and should be getting ANYFN (well, ANYTHING for now). */
4324 if (!fn || TREE_CODE (fn) != SSA_NAME)
4325 return get_varinfo (anything_id);
4326
4327 if (SSA_NAME_IS_DEFAULT_DEF (fn)
4328 && (TREE_CODE (SSA_NAME_VAR (fn)) == PARM_DECL
4329 || TREE_CODE (SSA_NAME_VAR (fn)) == RESULT_DECL))
4330 fn = SSA_NAME_VAR (fn);
4331
4332 return get_vi_for_tree (fn);
4333 }
4334
4335 /* Create constraints for assigning call argument ARG to the incoming parameter
4336 INDEX of function FI. */
4337
4338 static void
4339 find_func_aliases_for_call_arg (varinfo_t fi, unsigned index, tree arg)
4340 {
4341 struct constraint_expr lhs;
4342 lhs = get_function_part_constraint (fi, fi_parm_base + index);
4343
4344 auto_vec<ce_s, 2> rhsc;
4345 get_constraint_for_rhs (arg, &rhsc);
4346
4347 unsigned j;
4348 struct constraint_expr *rhsp;
4349 FOR_EACH_VEC_ELT (rhsc, j, rhsp)
4350 process_constraint (new_constraint (lhs, *rhsp));
4351 }
4352
4353 /* Return true if FNDECL may be part of another lto partition. */
4354
4355 static bool
4356 fndecl_maybe_in_other_partition (tree fndecl)
4357 {
4358 cgraph_node *fn_node = cgraph_node::get (fndecl);
4359 if (fn_node == NULL)
4360 return true;
4361
4362 return fn_node->in_other_partition;
4363 }
4364
4365 /* Create constraints for the builtin call T. Return true if the call
4366 was handled, otherwise false. */
4367
4368 static bool
4369 find_func_aliases_for_builtin_call (struct function *fn, gcall *t)
4370 {
4371 tree fndecl = gimple_call_fndecl (t);
4372 auto_vec<ce_s, 2> lhsc;
4373 auto_vec<ce_s, 4> rhsc;
4374 varinfo_t fi;
4375
4376 if (gimple_call_builtin_p (t, BUILT_IN_NORMAL))
4377 /* ??? All builtins that are handled here need to be handled
4378 in the alias-oracle query functions explicitly! */
4379 switch (DECL_FUNCTION_CODE (fndecl))
4380 {
4381 /* All the following functions return a pointer to the same object
4382 as their first argument points to. The functions do not add
4383 to the ESCAPED solution. The functions make the first argument
4384 pointed to memory point to what the second argument pointed to
4385 memory points to. */
4386 case BUILT_IN_STRCPY:
4387 case BUILT_IN_STRNCPY:
4388 case BUILT_IN_BCOPY:
4389 case BUILT_IN_MEMCPY:
4390 case BUILT_IN_MEMMOVE:
4391 case BUILT_IN_MEMPCPY:
4392 case BUILT_IN_STPCPY:
4393 case BUILT_IN_STPNCPY:
4394 case BUILT_IN_STRCAT:
4395 case BUILT_IN_STRNCAT:
4396 case BUILT_IN_STRCPY_CHK:
4397 case BUILT_IN_STRNCPY_CHK:
4398 case BUILT_IN_MEMCPY_CHK:
4399 case BUILT_IN_MEMMOVE_CHK:
4400 case BUILT_IN_MEMPCPY_CHK:
4401 case BUILT_IN_STPCPY_CHK:
4402 case BUILT_IN_STPNCPY_CHK:
4403 case BUILT_IN_STRCAT_CHK:
4404 case BUILT_IN_STRNCAT_CHK:
4405 case BUILT_IN_TM_MEMCPY:
4406 case BUILT_IN_TM_MEMMOVE:
4407 {
4408 tree res = gimple_call_lhs (t);
4409 tree dest = gimple_call_arg (t, (DECL_FUNCTION_CODE (fndecl)
4410 == BUILT_IN_BCOPY ? 1 : 0));
4411 tree src = gimple_call_arg (t, (DECL_FUNCTION_CODE (fndecl)
4412 == BUILT_IN_BCOPY ? 0 : 1));
4413 if (res != NULL_TREE)
4414 {
4415 get_constraint_for (res, &lhsc);
4416 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_MEMPCPY
4417 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STPCPY
4418 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STPNCPY
4419 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_MEMPCPY_CHK
4420 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STPCPY_CHK
4421 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STPNCPY_CHK)
4422 get_constraint_for_ptr_offset (dest, NULL_TREE, &rhsc);
4423 else
4424 get_constraint_for (dest, &rhsc);
4425 process_all_all_constraints (lhsc, rhsc);
4426 lhsc.truncate (0);
4427 rhsc.truncate (0);
4428 }
4429 get_constraint_for_ptr_offset (dest, NULL_TREE, &lhsc);
4430 get_constraint_for_ptr_offset (src, NULL_TREE, &rhsc);
4431 do_deref (&lhsc);
4432 do_deref (&rhsc);
4433 process_all_all_constraints (lhsc, rhsc);
4434 return true;
4435 }
4436 case BUILT_IN_MEMSET:
4437 case BUILT_IN_MEMSET_CHK:
4438 case BUILT_IN_TM_MEMSET:
4439 {
4440 tree res = gimple_call_lhs (t);
4441 tree dest = gimple_call_arg (t, 0);
4442 unsigned i;
4443 ce_s *lhsp;
4444 struct constraint_expr ac;
4445 if (res != NULL_TREE)
4446 {
4447 get_constraint_for (res, &lhsc);
4448 get_constraint_for (dest, &rhsc);
4449 process_all_all_constraints (lhsc, rhsc);
4450 lhsc.truncate (0);
4451 }
4452 get_constraint_for_ptr_offset (dest, NULL_TREE, &lhsc);
4453 do_deref (&lhsc);
4454 if (flag_delete_null_pointer_checks
4455 && integer_zerop (gimple_call_arg (t, 1)))
4456 {
4457 ac.type = ADDRESSOF;
4458 ac.var = nothing_id;
4459 }
4460 else
4461 {
4462 ac.type = SCALAR;
4463 ac.var = integer_id;
4464 }
4465 ac.offset = 0;
4466 FOR_EACH_VEC_ELT (lhsc, i, lhsp)
4467 process_constraint (new_constraint (*lhsp, ac));
4468 return true;
4469 }
4470 case BUILT_IN_STACK_SAVE:
4471 case BUILT_IN_STACK_RESTORE:
4472 /* Nothing interesting happens. */
4473 return true;
4474 case BUILT_IN_ALLOCA:
4475 case BUILT_IN_ALLOCA_WITH_ALIGN:
4476 case BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX:
4477 {
4478 tree ptr = gimple_call_lhs (t);
4479 if (ptr == NULL_TREE)
4480 return true;
4481 get_constraint_for (ptr, &lhsc);
4482 varinfo_t vi = make_heapvar ("HEAP", true);
4483 /* Alloca storage is never global. To exempt it from escaped
4484 handling make it a non-heap var. */
4485 DECL_EXTERNAL (vi->decl) = 0;
4486 vi->is_global_var = 0;
4487 vi->is_heap_var = 0;
4488 struct constraint_expr tmpc;
4489 tmpc.var = vi->id;
4490 tmpc.offset = 0;
4491 tmpc.type = ADDRESSOF;
4492 rhsc.safe_push (tmpc);
4493 process_all_all_constraints (lhsc, rhsc);
4494 return true;
4495 }
4496 case BUILT_IN_POSIX_MEMALIGN:
4497 {
4498 tree ptrptr = gimple_call_arg (t, 0);
4499 get_constraint_for (ptrptr, &lhsc);
4500 do_deref (&lhsc);
4501 varinfo_t vi = make_heapvar ("HEAP", true);
4502 /* We are marking allocated storage local, we deal with it becoming
4503 global by escaping and setting of vars_contains_escaped_heap. */
4504 DECL_EXTERNAL (vi->decl) = 0;
4505 vi->is_global_var = 0;
4506 struct constraint_expr tmpc;
4507 tmpc.var = vi->id;
4508 tmpc.offset = 0;
4509 tmpc.type = ADDRESSOF;
4510 rhsc.safe_push (tmpc);
4511 process_all_all_constraints (lhsc, rhsc);
4512 return true;
4513 }
4514 case BUILT_IN_ASSUME_ALIGNED:
4515 {
4516 tree res = gimple_call_lhs (t);
4517 tree dest = gimple_call_arg (t, 0);
4518 if (res != NULL_TREE)
4519 {
4520 get_constraint_for (res, &lhsc);
4521 get_constraint_for (dest, &rhsc);
4522 process_all_all_constraints (lhsc, rhsc);
4523 }
4524 return true;
4525 }
4526 /* All the following functions do not return pointers, do not
4527 modify the points-to sets of memory reachable from their
4528 arguments and do not add to the ESCAPED solution. */
4529 case BUILT_IN_SINCOS:
4530 case BUILT_IN_SINCOSF:
4531 case BUILT_IN_SINCOSL:
4532 case BUILT_IN_FREXP:
4533 case BUILT_IN_FREXPF:
4534 case BUILT_IN_FREXPL:
4535 case BUILT_IN_GAMMA_R:
4536 case BUILT_IN_GAMMAF_R:
4537 case BUILT_IN_GAMMAL_R:
4538 case BUILT_IN_LGAMMA_R:
4539 case BUILT_IN_LGAMMAF_R:
4540 case BUILT_IN_LGAMMAL_R:
4541 case BUILT_IN_MODF:
4542 case BUILT_IN_MODFF:
4543 case BUILT_IN_MODFL:
4544 case BUILT_IN_REMQUO:
4545 case BUILT_IN_REMQUOF:
4546 case BUILT_IN_REMQUOL:
4547 case BUILT_IN_FREE:
4548 return true;
4549 case BUILT_IN_STRDUP:
4550 case BUILT_IN_STRNDUP:
4551 case BUILT_IN_REALLOC:
4552 if (gimple_call_lhs (t))
4553 {
4554 handle_lhs_call (t, gimple_call_lhs (t),
4555 gimple_call_return_flags (t) | ERF_NOALIAS,
4556 vNULL, fndecl);
4557 get_constraint_for_ptr_offset (gimple_call_lhs (t),
4558 NULL_TREE, &lhsc);
4559 get_constraint_for_ptr_offset (gimple_call_arg (t, 0),
4560 NULL_TREE, &rhsc);
4561 do_deref (&lhsc);
4562 do_deref (&rhsc);
4563 process_all_all_constraints (lhsc, rhsc);
4564 lhsc.truncate (0);
4565 rhsc.truncate (0);
4566 /* For realloc the resulting pointer can be equal to the
4567 argument as well. But only doing this wouldn't be
4568 correct because with ptr == 0 realloc behaves like malloc. */
4569 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_REALLOC)
4570 {
4571 get_constraint_for (gimple_call_lhs (t), &lhsc);
4572 get_constraint_for (gimple_call_arg (t, 0), &rhsc);
4573 process_all_all_constraints (lhsc, rhsc);
4574 }
4575 return true;
4576 }
4577 break;
4578 /* String / character search functions return a pointer into the
4579 source string or NULL. */
4580 case BUILT_IN_INDEX:
4581 case BUILT_IN_STRCHR:
4582 case BUILT_IN_STRRCHR:
4583 case BUILT_IN_MEMCHR:
4584 case BUILT_IN_STRSTR:
4585 case BUILT_IN_STRPBRK:
4586 if (gimple_call_lhs (t))
4587 {
4588 tree src = gimple_call_arg (t, 0);
4589 get_constraint_for_ptr_offset (src, NULL_TREE, &rhsc);
4590 constraint_expr nul;
4591 nul.var = nothing_id;
4592 nul.offset = 0;
4593 nul.type = ADDRESSOF;
4594 rhsc.safe_push (nul);
4595 get_constraint_for (gimple_call_lhs (t), &lhsc);
4596 process_all_all_constraints (lhsc, rhsc);
4597 }
4598 return true;
4599 /* Pure functions that return something not based on any object and
4600 that use the memory pointed to by their arguments (but not
4601 transitively). */
4602 case BUILT_IN_STRCMP:
4603 case BUILT_IN_STRCMP_EQ:
4604 case BUILT_IN_STRNCMP:
4605 case BUILT_IN_STRNCMP_EQ:
4606 case BUILT_IN_STRCASECMP:
4607 case BUILT_IN_STRNCASECMP:
4608 case BUILT_IN_MEMCMP:
4609 case BUILT_IN_BCMP:
4610 case BUILT_IN_STRSPN:
4611 case BUILT_IN_STRCSPN:
4612 {
4613 varinfo_t uses = get_call_use_vi (t);
4614 make_any_offset_constraints (uses);
4615 make_constraint_to (uses->id, gimple_call_arg (t, 0));
4616 make_constraint_to (uses->id, gimple_call_arg (t, 1));
4617 /* No constraints are necessary for the return value. */
4618 return true;
4619 }
4620 case BUILT_IN_STRLEN:
4621 {
4622 varinfo_t uses = get_call_use_vi (t);
4623 make_any_offset_constraints (uses);
4624 make_constraint_to (uses->id, gimple_call_arg (t, 0));
4625 /* No constraints are necessary for the return value. */
4626 return true;
4627 }
4628 case BUILT_IN_OBJECT_SIZE:
4629 case BUILT_IN_CONSTANT_P:
4630 {
4631 /* No constraints are necessary for the return value or the
4632 arguments. */
4633 return true;
4634 }
4635 /* Trampolines are special - they set up passing the static
4636 frame. */
4637 case BUILT_IN_INIT_TRAMPOLINE:
4638 {
4639 tree tramp = gimple_call_arg (t, 0);
4640 tree nfunc = gimple_call_arg (t, 1);
4641 tree frame = gimple_call_arg (t, 2);
4642 unsigned i;
4643 struct constraint_expr lhs, *rhsp;
4644 if (in_ipa_mode)
4645 {
4646 varinfo_t nfi = NULL;
4647 gcc_assert (TREE_CODE (nfunc) == ADDR_EXPR);
4648 nfi = lookup_vi_for_tree (TREE_OPERAND (nfunc, 0));
4649 if (nfi)
4650 {
4651 lhs = get_function_part_constraint (nfi, fi_static_chain);
4652 get_constraint_for (frame, &rhsc);
4653 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
4654 process_constraint (new_constraint (lhs, *rhsp));
4655 rhsc.truncate (0);
4656
4657 /* Make the frame point to the function for
4658 the trampoline adjustment call. */
4659 get_constraint_for (tramp, &lhsc);
4660 do_deref (&lhsc);
4661 get_constraint_for (nfunc, &rhsc);
4662 process_all_all_constraints (lhsc, rhsc);
4663
4664 return true;
4665 }
4666 }
4667 /* Else fallthru to generic handling which will let
4668 the frame escape. */
4669 break;
4670 }
4671 case BUILT_IN_ADJUST_TRAMPOLINE:
4672 {
4673 tree tramp = gimple_call_arg (t, 0);
4674 tree res = gimple_call_lhs (t);
4675 if (in_ipa_mode && res)
4676 {
4677 get_constraint_for (res, &lhsc);
4678 get_constraint_for (tramp, &rhsc);
4679 do_deref (&rhsc);
4680 process_all_all_constraints (lhsc, rhsc);
4681 }
4682 return true;
4683 }
4684 CASE_BUILT_IN_TM_STORE (1):
4685 CASE_BUILT_IN_TM_STORE (2):
4686 CASE_BUILT_IN_TM_STORE (4):
4687 CASE_BUILT_IN_TM_STORE (8):
4688 CASE_BUILT_IN_TM_STORE (FLOAT):
4689 CASE_BUILT_IN_TM_STORE (DOUBLE):
4690 CASE_BUILT_IN_TM_STORE (LDOUBLE):
4691 CASE_BUILT_IN_TM_STORE (M64):
4692 CASE_BUILT_IN_TM_STORE (M128):
4693 CASE_BUILT_IN_TM_STORE (M256):
4694 {
4695 tree addr = gimple_call_arg (t, 0);
4696 tree src = gimple_call_arg (t, 1);
4697
4698 get_constraint_for (addr, &lhsc);
4699 do_deref (&lhsc);
4700 get_constraint_for (src, &rhsc);
4701 process_all_all_constraints (lhsc, rhsc);
4702 return true;
4703 }
4704 CASE_BUILT_IN_TM_LOAD (1):
4705 CASE_BUILT_IN_TM_LOAD (2):
4706 CASE_BUILT_IN_TM_LOAD (4):
4707 CASE_BUILT_IN_TM_LOAD (8):
4708 CASE_BUILT_IN_TM_LOAD (FLOAT):
4709 CASE_BUILT_IN_TM_LOAD (DOUBLE):
4710 CASE_BUILT_IN_TM_LOAD (LDOUBLE):
4711 CASE_BUILT_IN_TM_LOAD (M64):
4712 CASE_BUILT_IN_TM_LOAD (M128):
4713 CASE_BUILT_IN_TM_LOAD (M256):
4714 {
4715 tree dest = gimple_call_lhs (t);
4716 tree addr = gimple_call_arg (t, 0);
4717
4718 get_constraint_for (dest, &lhsc);
4719 get_constraint_for (addr, &rhsc);
4720 do_deref (&rhsc);
4721 process_all_all_constraints (lhsc, rhsc);
4722 return true;
4723 }
4724 /* Variadic argument handling needs to be handled in IPA
4725 mode as well. */
4726 case BUILT_IN_VA_START:
4727 {
4728 tree valist = gimple_call_arg (t, 0);
4729 struct constraint_expr rhs, *lhsp;
4730 unsigned i;
4731 get_constraint_for_ptr_offset (valist, NULL_TREE, &lhsc);
4732 do_deref (&lhsc);
4733 /* The va_list gets access to pointers in variadic
4734 arguments. Which we know in the case of IPA analysis
4735 and otherwise are just all nonlocal variables. */
4736 if (in_ipa_mode)
4737 {
4738 fi = lookup_vi_for_tree (fn->decl);
4739 rhs = get_function_part_constraint (fi, ~0);
4740 rhs.type = ADDRESSOF;
4741 }
4742 else
4743 {
4744 rhs.var = nonlocal_id;
4745 rhs.type = ADDRESSOF;
4746 rhs.offset = 0;
4747 }
4748 FOR_EACH_VEC_ELT (lhsc, i, lhsp)
4749 process_constraint (new_constraint (*lhsp, rhs));
4750 /* va_list is clobbered. */
4751 make_constraint_to (get_call_clobber_vi (t)->id, valist);
4752 return true;
4753 }
4754 /* va_end doesn't have any effect that matters. */
4755 case BUILT_IN_VA_END:
4756 return true;
4757 /* Alternate return. Simply give up for now. */
4758 case BUILT_IN_RETURN:
4759 {
4760 fi = NULL;
4761 if (!in_ipa_mode
4762 || !(fi = get_vi_for_tree (fn->decl)))
4763 make_constraint_from (get_varinfo (escaped_id), anything_id);
4764 else if (in_ipa_mode
4765 && fi != NULL)
4766 {
4767 struct constraint_expr lhs, rhs;
4768 lhs = get_function_part_constraint (fi, fi_result);
4769 rhs.var = anything_id;
4770 rhs.offset = 0;
4771 rhs.type = SCALAR;
4772 process_constraint (new_constraint (lhs, rhs));
4773 }
4774 return true;
4775 }
4776 case BUILT_IN_GOMP_PARALLEL:
4777 case BUILT_IN_GOACC_PARALLEL:
4778 {
4779 if (in_ipa_mode)
4780 {
4781 unsigned int fnpos, argpos;
4782 switch (DECL_FUNCTION_CODE (fndecl))
4783 {
4784 case BUILT_IN_GOMP_PARALLEL:
4785 /* __builtin_GOMP_parallel (fn, data, num_threads, flags). */
4786 fnpos = 0;
4787 argpos = 1;
4788 break;
4789 case BUILT_IN_GOACC_PARALLEL:
4790 /* __builtin_GOACC_parallel (flags_m, fn, mapnum, hostaddrs,
4791 sizes, kinds, ...). */
4792 fnpos = 1;
4793 argpos = 3;
4794 break;
4795 default:
4796 gcc_unreachable ();
4797 }
4798
4799 tree fnarg = gimple_call_arg (t, fnpos);
4800 gcc_assert (TREE_CODE (fnarg) == ADDR_EXPR);
4801 tree fndecl = TREE_OPERAND (fnarg, 0);
4802 if (fndecl_maybe_in_other_partition (fndecl))
4803 /* Fallthru to general call handling. */
4804 break;
4805
4806 tree arg = gimple_call_arg (t, argpos);
4807
4808 varinfo_t fi = get_vi_for_tree (fndecl);
4809 find_func_aliases_for_call_arg (fi, 0, arg);
4810 return true;
4811 }
4812 /* Else fallthru to generic call handling. */
4813 break;
4814 }
4815 /* printf-style functions may have hooks to set pointers to
4816 point to somewhere into the generated string. Leave them
4817 for a later exercise... */
4818 default:
4819 /* Fallthru to general call handling. */;
4820 }
4821
4822 return false;
4823 }
4824
4825 /* Create constraints for the call T. */
4826
4827 static void
4828 find_func_aliases_for_call (struct function *fn, gcall *t)
4829 {
4830 tree fndecl = gimple_call_fndecl (t);
4831 varinfo_t fi;
4832
4833 if (fndecl != NULL_TREE
4834 && fndecl_built_in_p (fndecl)
4835 && find_func_aliases_for_builtin_call (fn, t))
4836 return;
4837
4838 fi = get_fi_for_callee (t);
4839 if (!in_ipa_mode
4840 || (fi->decl && fndecl && !fi->is_fn_info))
4841 {
4842 auto_vec<ce_s, 16> rhsc;
4843 int flags = gimple_call_flags (t);
4844
4845 /* Const functions can return their arguments and addresses
4846 of global memory but not of escaped memory. */
4847 if (flags & (ECF_CONST|ECF_NOVOPS))
4848 {
4849 if (gimple_call_lhs (t))
4850 handle_const_call (t, &rhsc);
4851 }
4852 /* Pure functions can return addresses in and of memory
4853 reachable from their arguments, but they are not an escape
4854 point for reachable memory of their arguments. */
4855 else if (flags & (ECF_PURE|ECF_LOOPING_CONST_OR_PURE))
4856 handle_pure_call (t, &rhsc);
4857 else
4858 handle_rhs_call (t, &rhsc);
4859 if (gimple_call_lhs (t))
4860 handle_lhs_call (t, gimple_call_lhs (t),
4861 gimple_call_return_flags (t), rhsc, fndecl);
4862 }
4863 else
4864 {
4865 auto_vec<ce_s, 2> rhsc;
4866 tree lhsop;
4867 unsigned j;
4868
4869 /* Assign all the passed arguments to the appropriate incoming
4870 parameters of the function. */
4871 for (j = 0; j < gimple_call_num_args (t); j++)
4872 {
4873 tree arg = gimple_call_arg (t, j);
4874 find_func_aliases_for_call_arg (fi, j, arg);
4875 }
4876
4877 /* If we are returning a value, assign it to the result. */
4878 lhsop = gimple_call_lhs (t);
4879 if (lhsop)
4880 {
4881 auto_vec<ce_s, 2> lhsc;
4882 struct constraint_expr rhs;
4883 struct constraint_expr *lhsp;
4884 bool aggr_p = aggregate_value_p (lhsop, gimple_call_fntype (t));
4885
4886 get_constraint_for (lhsop, &lhsc);
4887 rhs = get_function_part_constraint (fi, fi_result);
4888 if (aggr_p)
4889 {
4890 auto_vec<ce_s, 2> tem;
4891 tem.quick_push (rhs);
4892 do_deref (&tem);
4893 gcc_checking_assert (tem.length () == 1);
4894 rhs = tem[0];
4895 }
4896 FOR_EACH_VEC_ELT (lhsc, j, lhsp)
4897 process_constraint (new_constraint (*lhsp, rhs));
4898
4899 /* If we pass the result decl by reference, honor that. */
4900 if (aggr_p)
4901 {
4902 struct constraint_expr lhs;
4903 struct constraint_expr *rhsp;
4904
4905 get_constraint_for_address_of (lhsop, &rhsc);
4906 lhs = get_function_part_constraint (fi, fi_result);
4907 FOR_EACH_VEC_ELT (rhsc, j, rhsp)
4908 process_constraint (new_constraint (lhs, *rhsp));
4909 rhsc.truncate (0);
4910 }
4911 }
4912
4913 /* If we use a static chain, pass it along. */
4914 if (gimple_call_chain (t))
4915 {
4916 struct constraint_expr lhs;
4917 struct constraint_expr *rhsp;
4918
4919 get_constraint_for (gimple_call_chain (t), &rhsc);
4920 lhs = get_function_part_constraint (fi, fi_static_chain);
4921 FOR_EACH_VEC_ELT (rhsc, j, rhsp)
4922 process_constraint (new_constraint (lhs, *rhsp));
4923 }
4924 }
4925 }
4926
4927 /* Walk statement T setting up aliasing constraints according to the
4928 references found in T. This function is the main part of the
4929 constraint builder. AI points to auxiliary alias information used
4930 when building alias sets and computing alias grouping heuristics. */
4931
4932 static void
4933 find_func_aliases (struct function *fn, gimple *origt)
4934 {
4935 gimple *t = origt;
4936 auto_vec<ce_s, 16> lhsc;
4937 auto_vec<ce_s, 16> rhsc;
4938 varinfo_t fi;
4939
4940 /* Now build constraints expressions. */
4941 if (gimple_code (t) == GIMPLE_PHI)
4942 {
4943 /* For a phi node, assign all the arguments to
4944 the result. */
4945 get_constraint_for (gimple_phi_result (t), &lhsc);
4946 for (unsigned i = 0; i < gimple_phi_num_args (t); i++)
4947 {
4948 get_constraint_for_rhs (gimple_phi_arg_def (t, i), &rhsc);
4949 process_all_all_constraints (lhsc, rhsc);
4950 rhsc.truncate (0);
4951 }
4952 }
4953 /* In IPA mode, we need to generate constraints to pass call
4954 arguments through their calls. There are two cases,
4955 either a GIMPLE_CALL returning a value, or just a plain
4956 GIMPLE_CALL when we are not.
4957
4958 In non-ipa mode, we need to generate constraints for each
4959 pointer passed by address. */
4960 else if (is_gimple_call (t))
4961 find_func_aliases_for_call (fn, as_a <gcall *> (t));
4962
4963 /* Otherwise, just a regular assignment statement. Only care about
4964 operations with pointer result, others are dealt with as escape
4965 points if they have pointer operands. */
4966 else if (is_gimple_assign (t))
4967 {
4968 /* Otherwise, just a regular assignment statement. */
4969 tree lhsop = gimple_assign_lhs (t);
4970 tree rhsop = (gimple_num_ops (t) == 2) ? gimple_assign_rhs1 (t) : NULL;
4971
4972 if (rhsop && TREE_CLOBBER_P (rhsop))
4973 /* Ignore clobbers, they don't actually store anything into
4974 the LHS. */
4975 ;
4976 else if (rhsop && AGGREGATE_TYPE_P (TREE_TYPE (lhsop)))
4977 do_structure_copy (lhsop, rhsop);
4978 else
4979 {
4980 enum tree_code code = gimple_assign_rhs_code (t);
4981
4982 get_constraint_for (lhsop, &lhsc);
4983
4984 if (code == POINTER_PLUS_EXPR)
4985 get_constraint_for_ptr_offset (gimple_assign_rhs1 (t),
4986 gimple_assign_rhs2 (t), &rhsc);
4987 else if (code == POINTER_DIFF_EXPR)
4988 /* The result is not a pointer (part). */
4989 ;
4990 else if (code == BIT_AND_EXPR
4991 && TREE_CODE (gimple_assign_rhs2 (t)) == INTEGER_CST)
4992 {
4993 /* Aligning a pointer via a BIT_AND_EXPR is offsetting
4994 the pointer. Handle it by offsetting it by UNKNOWN. */
4995 get_constraint_for_ptr_offset (gimple_assign_rhs1 (t),
4996 NULL_TREE, &rhsc);
4997 }
4998 else if (code == TRUNC_DIV_EXPR
4999 || code == CEIL_DIV_EXPR
5000 || code == FLOOR_DIV_EXPR
5001 || code == ROUND_DIV_EXPR
5002 || code == EXACT_DIV_EXPR
5003 || code == TRUNC_MOD_EXPR
5004 || code == CEIL_MOD_EXPR
5005 || code == FLOOR_MOD_EXPR
5006 || code == ROUND_MOD_EXPR)
5007 /* Division and modulo transfer the pointer from the LHS. */
5008 get_constraint_for_rhs (gimple_assign_rhs1 (t), &rhsc);
5009 else if ((CONVERT_EXPR_CODE_P (code)
5010 && !(POINTER_TYPE_P (gimple_expr_type (t))
5011 && !POINTER_TYPE_P (TREE_TYPE (rhsop))))
5012 || gimple_assign_single_p (t))
5013 get_constraint_for_rhs (rhsop, &rhsc);
5014 else if (code == COND_EXPR)
5015 {
5016 /* The result is a merge of both COND_EXPR arms. */
5017 auto_vec<ce_s, 2> tmp;
5018 struct constraint_expr *rhsp;
5019 unsigned i;
5020 get_constraint_for_rhs (gimple_assign_rhs2 (t), &rhsc);
5021 get_constraint_for_rhs (gimple_assign_rhs3 (t), &tmp);
5022 FOR_EACH_VEC_ELT (tmp, i, rhsp)
5023 rhsc.safe_push (*rhsp);
5024 }
5025 else if (truth_value_p (code))
5026 /* Truth value results are not pointer (parts). Or at least
5027 very unreasonable obfuscation of a part. */
5028 ;
5029 else
5030 {
5031 /* All other operations are merges. */
5032 auto_vec<ce_s, 4> tmp;
5033 struct constraint_expr *rhsp;
5034 unsigned i, j;
5035 get_constraint_for_rhs (gimple_assign_rhs1 (t), &rhsc);
5036 for (i = 2; i < gimple_num_ops (t); ++i)
5037 {
5038 get_constraint_for_rhs (gimple_op (t, i), &tmp);
5039 FOR_EACH_VEC_ELT (tmp, j, rhsp)
5040 rhsc.safe_push (*rhsp);
5041 tmp.truncate (0);
5042 }
5043 }
5044 process_all_all_constraints (lhsc, rhsc);
5045 }
5046 /* If there is a store to a global variable the rhs escapes. */
5047 if ((lhsop = get_base_address (lhsop)) != NULL_TREE
5048 && DECL_P (lhsop))
5049 {
5050 varinfo_t vi = get_vi_for_tree (lhsop);
5051 if ((! in_ipa_mode && vi->is_global_var)
5052 || vi->is_ipa_escape_point)
5053 make_escape_constraint (rhsop);
5054 }
5055 }
5056 /* Handle escapes through return. */
5057 else if (gimple_code (t) == GIMPLE_RETURN
5058 && gimple_return_retval (as_a <greturn *> (t)) != NULL_TREE)
5059 {
5060 greturn *return_stmt = as_a <greturn *> (t);
5061 fi = NULL;
5062 if (!in_ipa_mode
5063 && SSA_VAR_P (gimple_return_retval (return_stmt)))
5064 {
5065 /* We handle simple returns by post-processing the solutions. */
5066 ;
5067 }
5068 if (!(fi = get_vi_for_tree (fn->decl)))
5069 make_escape_constraint (gimple_return_retval (return_stmt));
5070 else if (in_ipa_mode)
5071 {
5072 struct constraint_expr lhs ;
5073 struct constraint_expr *rhsp;
5074 unsigned i;
5075
5076 lhs = get_function_part_constraint (fi, fi_result);
5077 get_constraint_for_rhs (gimple_return_retval (return_stmt), &rhsc);
5078 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
5079 process_constraint (new_constraint (lhs, *rhsp));
5080 }
5081 }
5082 /* Handle asms conservatively by adding escape constraints to everything. */
5083 else if (gasm *asm_stmt = dyn_cast <gasm *> (t))
5084 {
5085 unsigned i, noutputs;
5086 const char **oconstraints;
5087 const char *constraint;
5088 bool allows_mem, allows_reg, is_inout;
5089
5090 noutputs = gimple_asm_noutputs (asm_stmt);
5091 oconstraints = XALLOCAVEC (const char *, noutputs);
5092
5093 for (i = 0; i < noutputs; ++i)
5094 {
5095 tree link = gimple_asm_output_op (asm_stmt, i);
5096 tree op = TREE_VALUE (link);
5097
5098 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
5099 oconstraints[i] = constraint;
5100 parse_output_constraint (&constraint, i, 0, 0, &allows_mem,
5101 &allows_reg, &is_inout);
5102
5103 /* A memory constraint makes the address of the operand escape. */
5104 if (!allows_reg && allows_mem)
5105 make_escape_constraint (build_fold_addr_expr (op));
5106
5107 /* The asm may read global memory, so outputs may point to
5108 any global memory. */
5109 if (op)
5110 {
5111 auto_vec<ce_s, 2> lhsc;
5112 struct constraint_expr rhsc, *lhsp;
5113 unsigned j;
5114 get_constraint_for (op, &lhsc);
5115 rhsc.var = nonlocal_id;
5116 rhsc.offset = 0;
5117 rhsc.type = SCALAR;
5118 FOR_EACH_VEC_ELT (lhsc, j, lhsp)
5119 process_constraint (new_constraint (*lhsp, rhsc));
5120 }
5121 }
5122 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
5123 {
5124 tree link = gimple_asm_input_op (asm_stmt, i);
5125 tree op = TREE_VALUE (link);
5126
5127 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
5128
5129 parse_input_constraint (&constraint, 0, 0, noutputs, 0, oconstraints,
5130 &allows_mem, &allows_reg);
5131
5132 /* A memory constraint makes the address of the operand escape. */
5133 if (!allows_reg && allows_mem)
5134 make_escape_constraint (build_fold_addr_expr (op));
5135 /* Strictly we'd only need the constraint to ESCAPED if
5136 the asm clobbers memory, otherwise using something
5137 along the lines of per-call clobbers/uses would be enough. */
5138 else if (op)
5139 make_escape_constraint (op);
5140 }
5141 }
5142 }
5143
5144
5145 /* Create a constraint adding to the clobber set of FI the memory
5146 pointed to by PTR. */
5147
5148 static void
5149 process_ipa_clobber (varinfo_t fi, tree ptr)
5150 {
5151 vec<ce_s> ptrc = vNULL;
5152 struct constraint_expr *c, lhs;
5153 unsigned i;
5154 get_constraint_for_rhs (ptr, &ptrc);
5155 lhs = get_function_part_constraint (fi, fi_clobbers);
5156 FOR_EACH_VEC_ELT (ptrc, i, c)
5157 process_constraint (new_constraint (lhs, *c));
5158 ptrc.release ();
5159 }
5160
5161 /* Walk statement T setting up clobber and use constraints according to the
5162 references found in T. This function is a main part of the
5163 IPA constraint builder. */
5164
5165 static void
5166 find_func_clobbers (struct function *fn, gimple *origt)
5167 {
5168 gimple *t = origt;
5169 auto_vec<ce_s, 16> lhsc;
5170 auto_vec<ce_s, 16> rhsc;
5171 varinfo_t fi;
5172
5173 /* Add constraints for clobbered/used in IPA mode.
5174 We are not interested in what automatic variables are clobbered
5175 or used as we only use the information in the caller to which
5176 they do not escape. */
5177 gcc_assert (in_ipa_mode);
5178
5179 /* If the stmt refers to memory in any way it better had a VUSE. */
5180 if (gimple_vuse (t) == NULL_TREE)
5181 return;
5182
5183 /* We'd better have function information for the current function. */
5184 fi = lookup_vi_for_tree (fn->decl);
5185 gcc_assert (fi != NULL);
5186
5187 /* Account for stores in assignments and calls. */
5188 if (gimple_vdef (t) != NULL_TREE
5189 && gimple_has_lhs (t))
5190 {
5191 tree lhs = gimple_get_lhs (t);
5192 tree tem = lhs;
5193 while (handled_component_p (tem))
5194 tem = TREE_OPERAND (tem, 0);
5195 if ((DECL_P (tem)
5196 && !auto_var_in_fn_p (tem, fn->decl))
5197 || INDIRECT_REF_P (tem)
5198 || (TREE_CODE (tem) == MEM_REF
5199 && !(TREE_CODE (TREE_OPERAND (tem, 0)) == ADDR_EXPR
5200 && auto_var_in_fn_p
5201 (TREE_OPERAND (TREE_OPERAND (tem, 0), 0), fn->decl))))
5202 {
5203 struct constraint_expr lhsc, *rhsp;
5204 unsigned i;
5205 lhsc = get_function_part_constraint (fi, fi_clobbers);
5206 get_constraint_for_address_of (lhs, &rhsc);
5207 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
5208 process_constraint (new_constraint (lhsc, *rhsp));
5209 rhsc.truncate (0);
5210 }
5211 }
5212
5213 /* Account for uses in assigments and returns. */
5214 if (gimple_assign_single_p (t)
5215 || (gimple_code (t) == GIMPLE_RETURN
5216 && gimple_return_retval (as_a <greturn *> (t)) != NULL_TREE))
5217 {
5218 tree rhs = (gimple_assign_single_p (t)
5219 ? gimple_assign_rhs1 (t)
5220 : gimple_return_retval (as_a <greturn *> (t)));
5221 tree tem = rhs;
5222 while (handled_component_p (tem))
5223 tem = TREE_OPERAND (tem, 0);
5224 if ((DECL_P (tem)
5225 && !auto_var_in_fn_p (tem, fn->decl))
5226 || INDIRECT_REF_P (tem)
5227 || (TREE_CODE (tem) == MEM_REF
5228 && !(TREE_CODE (TREE_OPERAND (tem, 0)) == ADDR_EXPR
5229 && auto_var_in_fn_p
5230 (TREE_OPERAND (TREE_OPERAND (tem, 0), 0), fn->decl))))
5231 {
5232 struct constraint_expr lhs, *rhsp;
5233 unsigned i;
5234 lhs = get_function_part_constraint (fi, fi_uses);
5235 get_constraint_for_address_of (rhs, &rhsc);
5236 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
5237 process_constraint (new_constraint (lhs, *rhsp));
5238 rhsc.truncate (0);
5239 }
5240 }
5241
5242 if (gcall *call_stmt = dyn_cast <gcall *> (t))
5243 {
5244 varinfo_t cfi = NULL;
5245 tree decl = gimple_call_fndecl (t);
5246 struct constraint_expr lhs, rhs;
5247 unsigned i, j;
5248
5249 /* For builtins we do not have separate function info. For those
5250 we do not generate escapes for we have to generate clobbers/uses. */
5251 if (gimple_call_builtin_p (t, BUILT_IN_NORMAL))
5252 switch (DECL_FUNCTION_CODE (decl))
5253 {
5254 /* The following functions use and clobber memory pointed to
5255 by their arguments. */
5256 case BUILT_IN_STRCPY:
5257 case BUILT_IN_STRNCPY:
5258 case BUILT_IN_BCOPY:
5259 case BUILT_IN_MEMCPY:
5260 case BUILT_IN_MEMMOVE:
5261 case BUILT_IN_MEMPCPY:
5262 case BUILT_IN_STPCPY:
5263 case BUILT_IN_STPNCPY:
5264 case BUILT_IN_STRCAT:
5265 case BUILT_IN_STRNCAT:
5266 case BUILT_IN_STRCPY_CHK:
5267 case BUILT_IN_STRNCPY_CHK:
5268 case BUILT_IN_MEMCPY_CHK:
5269 case BUILT_IN_MEMMOVE_CHK:
5270 case BUILT_IN_MEMPCPY_CHK:
5271 case BUILT_IN_STPCPY_CHK:
5272 case BUILT_IN_STPNCPY_CHK:
5273 case BUILT_IN_STRCAT_CHK:
5274 case BUILT_IN_STRNCAT_CHK:
5275 {
5276 tree dest = gimple_call_arg (t, (DECL_FUNCTION_CODE (decl)
5277 == BUILT_IN_BCOPY ? 1 : 0));
5278 tree src = gimple_call_arg (t, (DECL_FUNCTION_CODE (decl)
5279 == BUILT_IN_BCOPY ? 0 : 1));
5280 unsigned i;
5281 struct constraint_expr *rhsp, *lhsp;
5282 get_constraint_for_ptr_offset (dest, NULL_TREE, &lhsc);
5283 lhs = get_function_part_constraint (fi, fi_clobbers);
5284 FOR_EACH_VEC_ELT (lhsc, i, lhsp)
5285 process_constraint (new_constraint (lhs, *lhsp));
5286 get_constraint_for_ptr_offset (src, NULL_TREE, &rhsc);
5287 lhs = get_function_part_constraint (fi, fi_uses);
5288 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
5289 process_constraint (new_constraint (lhs, *rhsp));
5290 return;
5291 }
5292 /* The following function clobbers memory pointed to by
5293 its argument. */
5294 case BUILT_IN_MEMSET:
5295 case BUILT_IN_MEMSET_CHK:
5296 case BUILT_IN_POSIX_MEMALIGN:
5297 {
5298 tree dest = gimple_call_arg (t, 0);
5299 unsigned i;
5300 ce_s *lhsp;
5301 get_constraint_for_ptr_offset (dest, NULL_TREE, &lhsc);
5302 lhs = get_function_part_constraint (fi, fi_clobbers);
5303 FOR_EACH_VEC_ELT (lhsc, i, lhsp)
5304 process_constraint (new_constraint (lhs, *lhsp));
5305 return;
5306 }
5307 /* The following functions clobber their second and third
5308 arguments. */
5309 case BUILT_IN_SINCOS:
5310 case BUILT_IN_SINCOSF:
5311 case BUILT_IN_SINCOSL:
5312 {
5313 process_ipa_clobber (fi, gimple_call_arg (t, 1));
5314 process_ipa_clobber (fi, gimple_call_arg (t, 2));
5315 return;
5316 }
5317 /* The following functions clobber their second argument. */
5318 case BUILT_IN_FREXP:
5319 case BUILT_IN_FREXPF:
5320 case BUILT_IN_FREXPL:
5321 case BUILT_IN_LGAMMA_R:
5322 case BUILT_IN_LGAMMAF_R:
5323 case BUILT_IN_LGAMMAL_R:
5324 case BUILT_IN_GAMMA_R:
5325 case BUILT_IN_GAMMAF_R:
5326 case BUILT_IN_GAMMAL_R:
5327 case BUILT_IN_MODF:
5328 case BUILT_IN_MODFF:
5329 case BUILT_IN_MODFL:
5330 {
5331 process_ipa_clobber (fi, gimple_call_arg (t, 1));
5332 return;
5333 }
5334 /* The following functions clobber their third argument. */
5335 case BUILT_IN_REMQUO:
5336 case BUILT_IN_REMQUOF:
5337 case BUILT_IN_REMQUOL:
5338 {
5339 process_ipa_clobber (fi, gimple_call_arg (t, 2));
5340 return;
5341 }
5342 /* The following functions neither read nor clobber memory. */
5343 case BUILT_IN_ASSUME_ALIGNED:
5344 case BUILT_IN_FREE:
5345 return;
5346 /* Trampolines are of no interest to us. */
5347 case BUILT_IN_INIT_TRAMPOLINE:
5348 case BUILT_IN_ADJUST_TRAMPOLINE:
5349 return;
5350 case BUILT_IN_VA_START:
5351 case BUILT_IN_VA_END:
5352 return;
5353 case BUILT_IN_GOMP_PARALLEL:
5354 case BUILT_IN_GOACC_PARALLEL:
5355 {
5356 unsigned int fnpos, argpos;
5357 unsigned int implicit_use_args[2];
5358 unsigned int num_implicit_use_args = 0;
5359 switch (DECL_FUNCTION_CODE (decl))
5360 {
5361 case BUILT_IN_GOMP_PARALLEL:
5362 /* __builtin_GOMP_parallel (fn, data, num_threads, flags). */
5363 fnpos = 0;
5364 argpos = 1;
5365 break;
5366 case BUILT_IN_GOACC_PARALLEL:
5367 /* __builtin_GOACC_parallel (flags_m, fn, mapnum, hostaddrs,
5368 sizes, kinds, ...). */
5369 fnpos = 1;
5370 argpos = 3;
5371 implicit_use_args[num_implicit_use_args++] = 4;
5372 implicit_use_args[num_implicit_use_args++] = 5;
5373 break;
5374 default:
5375 gcc_unreachable ();
5376 }
5377
5378 tree fnarg = gimple_call_arg (t, fnpos);
5379 gcc_assert (TREE_CODE (fnarg) == ADDR_EXPR);
5380 tree fndecl = TREE_OPERAND (fnarg, 0);
5381 if (fndecl_maybe_in_other_partition (fndecl))
5382 /* Fallthru to general call handling. */
5383 break;
5384
5385 varinfo_t cfi = get_vi_for_tree (fndecl);
5386
5387 tree arg = gimple_call_arg (t, argpos);
5388
5389 /* Parameter passed by value is used. */
5390 lhs = get_function_part_constraint (fi, fi_uses);
5391 struct constraint_expr *rhsp;
5392 get_constraint_for (arg, &rhsc);
5393 FOR_EACH_VEC_ELT (rhsc, j, rhsp)
5394 process_constraint (new_constraint (lhs, *rhsp));
5395 rhsc.truncate (0);
5396
5397 /* Handle parameters used by the call, but not used in cfi, as
5398 implicitly used by cfi. */
5399 lhs = get_function_part_constraint (cfi, fi_uses);
5400 for (unsigned i = 0; i < num_implicit_use_args; ++i)
5401 {
5402 tree arg = gimple_call_arg (t, implicit_use_args[i]);
5403 get_constraint_for (arg, &rhsc);
5404 FOR_EACH_VEC_ELT (rhsc, j, rhsp)
5405 process_constraint (new_constraint (lhs, *rhsp));
5406 rhsc.truncate (0);
5407 }
5408
5409 /* The caller clobbers what the callee does. */
5410 lhs = get_function_part_constraint (fi, fi_clobbers);
5411 rhs = get_function_part_constraint (cfi, fi_clobbers);
5412 process_constraint (new_constraint (lhs, rhs));
5413
5414 /* The caller uses what the callee does. */
5415 lhs = get_function_part_constraint (fi, fi_uses);
5416 rhs = get_function_part_constraint (cfi, fi_uses);
5417 process_constraint (new_constraint (lhs, rhs));
5418
5419 return;
5420 }
5421 /* printf-style functions may have hooks to set pointers to
5422 point to somewhere into the generated string. Leave them
5423 for a later exercise... */
5424 default:
5425 /* Fallthru to general call handling. */;
5426 }
5427
5428 /* Parameters passed by value are used. */
5429 lhs = get_function_part_constraint (fi, fi_uses);
5430 for (i = 0; i < gimple_call_num_args (t); i++)
5431 {
5432 struct constraint_expr *rhsp;
5433 tree arg = gimple_call_arg (t, i);
5434
5435 if (TREE_CODE (arg) == SSA_NAME
5436 || is_gimple_min_invariant (arg))
5437 continue;
5438
5439 get_constraint_for_address_of (arg, &rhsc);
5440 FOR_EACH_VEC_ELT (rhsc, j, rhsp)
5441 process_constraint (new_constraint (lhs, *rhsp));
5442 rhsc.truncate (0);
5443 }
5444
5445 /* Build constraints for propagating clobbers/uses along the
5446 callgraph edges. */
5447 cfi = get_fi_for_callee (call_stmt);
5448 if (cfi->id == anything_id)
5449 {
5450 if (gimple_vdef (t))
5451 make_constraint_from (first_vi_for_offset (fi, fi_clobbers),
5452 anything_id);
5453 make_constraint_from (first_vi_for_offset (fi, fi_uses),
5454 anything_id);
5455 return;
5456 }
5457
5458 /* For callees without function info (that's external functions),
5459 ESCAPED is clobbered and used. */
5460 if (cfi->decl
5461 && TREE_CODE (cfi->decl) == FUNCTION_DECL
5462 && !cfi->is_fn_info)
5463 {
5464 varinfo_t vi;
5465
5466 if (gimple_vdef (t))
5467 make_copy_constraint (first_vi_for_offset (fi, fi_clobbers),
5468 escaped_id);
5469 make_copy_constraint (first_vi_for_offset (fi, fi_uses), escaped_id);
5470
5471 /* Also honor the call statement use/clobber info. */
5472 if ((vi = lookup_call_clobber_vi (call_stmt)) != NULL)
5473 make_copy_constraint (first_vi_for_offset (fi, fi_clobbers),
5474 vi->id);
5475 if ((vi = lookup_call_use_vi (call_stmt)) != NULL)
5476 make_copy_constraint (first_vi_for_offset (fi, fi_uses),
5477 vi->id);
5478 return;
5479 }
5480
5481 /* Otherwise the caller clobbers and uses what the callee does.
5482 ??? This should use a new complex constraint that filters
5483 local variables of the callee. */
5484 if (gimple_vdef (t))
5485 {
5486 lhs = get_function_part_constraint (fi, fi_clobbers);
5487 rhs = get_function_part_constraint (cfi, fi_clobbers);
5488 process_constraint (new_constraint (lhs, rhs));
5489 }
5490 lhs = get_function_part_constraint (fi, fi_uses);
5491 rhs = get_function_part_constraint (cfi, fi_uses);
5492 process_constraint (new_constraint (lhs, rhs));
5493 }
5494 else if (gimple_code (t) == GIMPLE_ASM)
5495 {
5496 /* ??? Ick. We can do better. */
5497 if (gimple_vdef (t))
5498 make_constraint_from (first_vi_for_offset (fi, fi_clobbers),
5499 anything_id);
5500 make_constraint_from (first_vi_for_offset (fi, fi_uses),
5501 anything_id);
5502 }
5503 }
5504
5505
5506 /* Find the first varinfo in the same variable as START that overlaps with
5507 OFFSET. Return NULL if we can't find one. */
5508
5509 static varinfo_t
5510 first_vi_for_offset (varinfo_t start, unsigned HOST_WIDE_INT offset)
5511 {
5512 /* If the offset is outside of the variable, bail out. */
5513 if (offset >= start->fullsize)
5514 return NULL;
5515
5516 /* If we cannot reach offset from start, lookup the first field
5517 and start from there. */
5518 if (start->offset > offset)
5519 start = get_varinfo (start->head);
5520
5521 while (start)
5522 {
5523 /* We may not find a variable in the field list with the actual
5524 offset when we have glommed a structure to a variable.
5525 In that case, however, offset should still be within the size
5526 of the variable. */
5527 if (offset >= start->offset
5528 && (offset - start->offset) < start->size)
5529 return start;
5530
5531 start = vi_next (start);
5532 }
5533
5534 return NULL;
5535 }
5536
5537 /* Find the first varinfo in the same variable as START that overlaps with
5538 OFFSET. If there is no such varinfo the varinfo directly preceding
5539 OFFSET is returned. */
5540
5541 static varinfo_t
5542 first_or_preceding_vi_for_offset (varinfo_t start,
5543 unsigned HOST_WIDE_INT offset)
5544 {
5545 /* If we cannot reach offset from start, lookup the first field
5546 and start from there. */
5547 if (start->offset > offset)
5548 start = get_varinfo (start->head);
5549
5550 /* We may not find a variable in the field list with the actual
5551 offset when we have glommed a structure to a variable.
5552 In that case, however, offset should still be within the size
5553 of the variable.
5554 If we got beyond the offset we look for return the field
5555 directly preceding offset which may be the last field. */
5556 while (start->next
5557 && offset >= start->offset
5558 && !((offset - start->offset) < start->size))
5559 start = vi_next (start);
5560
5561 return start;
5562 }
5563
5564
5565 /* This structure is used during pushing fields onto the fieldstack
5566 to track the offset of the field, since bitpos_of_field gives it
5567 relative to its immediate containing type, and we want it relative
5568 to the ultimate containing object. */
5569
5570 struct fieldoff
5571 {
5572 /* Offset from the base of the base containing object to this field. */
5573 HOST_WIDE_INT offset;
5574
5575 /* Size, in bits, of the field. */
5576 unsigned HOST_WIDE_INT size;
5577
5578 unsigned has_unknown_size : 1;
5579
5580 unsigned must_have_pointers : 1;
5581
5582 unsigned may_have_pointers : 1;
5583
5584 unsigned only_restrict_pointers : 1;
5585
5586 tree restrict_pointed_type;
5587 };
5588 typedef struct fieldoff fieldoff_s;
5589
5590
5591 /* qsort comparison function for two fieldoff's PA and PB */
5592
5593 static int
5594 fieldoff_compare (const void *pa, const void *pb)
5595 {
5596 const fieldoff_s *foa = (const fieldoff_s *)pa;
5597 const fieldoff_s *fob = (const fieldoff_s *)pb;
5598 unsigned HOST_WIDE_INT foasize, fobsize;
5599
5600 if (foa->offset < fob->offset)
5601 return -1;
5602 else if (foa->offset > fob->offset)
5603 return 1;
5604
5605 foasize = foa->size;
5606 fobsize = fob->size;
5607 if (foasize < fobsize)
5608 return -1;
5609 else if (foasize > fobsize)
5610 return 1;
5611 return 0;
5612 }
5613
5614 /* Sort a fieldstack according to the field offset and sizes. */
5615 static void
5616 sort_fieldstack (vec<fieldoff_s> fieldstack)
5617 {
5618 fieldstack.qsort (fieldoff_compare);
5619 }
5620
5621 /* Return true if T is a type that can have subvars. */
5622
5623 static inline bool
5624 type_can_have_subvars (const_tree t)
5625 {
5626 /* Aggregates without overlapping fields can have subvars. */
5627 return TREE_CODE (t) == RECORD_TYPE;
5628 }
5629
5630 /* Return true if V is a tree that we can have subvars for.
5631 Normally, this is any aggregate type. Also complex
5632 types which are not gimple registers can have subvars. */
5633
5634 static inline bool
5635 var_can_have_subvars (const_tree v)
5636 {
5637 /* Volatile variables should never have subvars. */
5638 if (TREE_THIS_VOLATILE (v))
5639 return false;
5640
5641 /* Non decls or memory tags can never have subvars. */
5642 if (!DECL_P (v))
5643 return false;
5644
5645 return type_can_have_subvars (TREE_TYPE (v));
5646 }
5647
5648 /* Return true if T is a type that does contain pointers. */
5649
5650 static bool
5651 type_must_have_pointers (tree type)
5652 {
5653 if (POINTER_TYPE_P (type))
5654 return true;
5655
5656 if (TREE_CODE (type) == ARRAY_TYPE)
5657 return type_must_have_pointers (TREE_TYPE (type));
5658
5659 /* A function or method can have pointers as arguments, so track
5660 those separately. */
5661 if (TREE_CODE (type) == FUNCTION_TYPE
5662 || TREE_CODE (type) == METHOD_TYPE)
5663 return true;
5664
5665 return false;
5666 }
5667
5668 static bool
5669 field_must_have_pointers (tree t)
5670 {
5671 return type_must_have_pointers (TREE_TYPE (t));
5672 }
5673
5674 /* Given a TYPE, and a vector of field offsets FIELDSTACK, push all
5675 the fields of TYPE onto fieldstack, recording their offsets along
5676 the way.
5677
5678 OFFSET is used to keep track of the offset in this entire
5679 structure, rather than just the immediately containing structure.
5680 Returns false if the caller is supposed to handle the field we
5681 recursed for. */
5682
5683 static bool
5684 push_fields_onto_fieldstack (tree type, vec<fieldoff_s> *fieldstack,
5685 HOST_WIDE_INT offset)
5686 {
5687 tree field;
5688 bool empty_p = true;
5689
5690 if (TREE_CODE (type) != RECORD_TYPE)
5691 return false;
5692
5693 /* If the vector of fields is growing too big, bail out early.
5694 Callers check for vec::length <= MAX_FIELDS_FOR_FIELD_SENSITIVE, make
5695 sure this fails. */
5696 if (fieldstack->length () > MAX_FIELDS_FOR_FIELD_SENSITIVE)
5697 return false;
5698
5699 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
5700 if (TREE_CODE (field) == FIELD_DECL)
5701 {
5702 bool push = false;
5703 HOST_WIDE_INT foff = bitpos_of_field (field);
5704 tree field_type = TREE_TYPE (field);
5705
5706 if (!var_can_have_subvars (field)
5707 || TREE_CODE (field_type) == QUAL_UNION_TYPE
5708 || TREE_CODE (field_type) == UNION_TYPE)
5709 push = true;
5710 else if (!push_fields_onto_fieldstack
5711 (field_type, fieldstack, offset + foff)
5712 && (DECL_SIZE (field)
5713 && !integer_zerop (DECL_SIZE (field))))
5714 /* Empty structures may have actual size, like in C++. So
5715 see if we didn't push any subfields and the size is
5716 nonzero, push the field onto the stack. */
5717 push = true;
5718
5719 if (push)
5720 {
5721 fieldoff_s *pair = NULL;
5722 bool has_unknown_size = false;
5723 bool must_have_pointers_p;
5724
5725 if (!fieldstack->is_empty ())
5726 pair = &fieldstack->last ();
5727
5728 /* If there isn't anything at offset zero, create sth. */
5729 if (!pair
5730 && offset + foff != 0)
5731 {
5732 fieldoff_s e
5733 = {0, offset + foff, false, false, true, false, NULL_TREE};
5734 pair = fieldstack->safe_push (e);
5735 }
5736
5737 if (!DECL_SIZE (field)
5738 || !tree_fits_uhwi_p (DECL_SIZE (field)))
5739 has_unknown_size = true;
5740
5741 /* If adjacent fields do not contain pointers merge them. */
5742 must_have_pointers_p = field_must_have_pointers (field);
5743 if (pair
5744 && !has_unknown_size
5745 && !must_have_pointers_p
5746 && !pair->must_have_pointers
5747 && !pair->has_unknown_size
5748 && pair->offset + (HOST_WIDE_INT)pair->size == offset + foff)
5749 {
5750 pair->size += tree_to_uhwi (DECL_SIZE (field));
5751 }
5752 else
5753 {
5754 fieldoff_s e;
5755 e.offset = offset + foff;
5756 e.has_unknown_size = has_unknown_size;
5757 if (!has_unknown_size)
5758 e.size = tree_to_uhwi (DECL_SIZE (field));
5759 else
5760 e.size = -1;
5761 e.must_have_pointers = must_have_pointers_p;
5762 e.may_have_pointers = true;
5763 e.only_restrict_pointers
5764 = (!has_unknown_size
5765 && POINTER_TYPE_P (field_type)
5766 && TYPE_RESTRICT (field_type));
5767 if (e.only_restrict_pointers)
5768 e.restrict_pointed_type = TREE_TYPE (field_type);
5769 fieldstack->safe_push (e);
5770 }
5771 }
5772
5773 empty_p = false;
5774 }
5775
5776 return !empty_p;
5777 }
5778
5779 /* Count the number of arguments DECL has, and set IS_VARARGS to true
5780 if it is a varargs function. */
5781
5782 static unsigned int
5783 count_num_arguments (tree decl, bool *is_varargs)
5784 {
5785 unsigned int num = 0;
5786 tree t;
5787
5788 /* Capture named arguments for K&R functions. They do not
5789 have a prototype and thus no TYPE_ARG_TYPES. */
5790 for (t = DECL_ARGUMENTS (decl); t; t = DECL_CHAIN (t))
5791 ++num;
5792
5793 /* Check if the function has variadic arguments. */
5794 for (t = TYPE_ARG_TYPES (TREE_TYPE (decl)); t; t = TREE_CHAIN (t))
5795 if (TREE_VALUE (t) == void_type_node)
5796 break;
5797 if (!t)
5798 *is_varargs = true;
5799
5800 return num;
5801 }
5802
5803 /* Creation function node for DECL, using NAME, and return the index
5804 of the variable we've created for the function. If NONLOCAL_p, create
5805 initial constraints. */
5806
5807 static varinfo_t
5808 create_function_info_for (tree decl, const char *name, bool add_id,
5809 bool nonlocal_p)
5810 {
5811 struct function *fn = DECL_STRUCT_FUNCTION (decl);
5812 varinfo_t vi, prev_vi;
5813 tree arg;
5814 unsigned int i;
5815 bool is_varargs = false;
5816 unsigned int num_args = count_num_arguments (decl, &is_varargs);
5817
5818 /* Create the variable info. */
5819
5820 vi = new_var_info (decl, name, add_id);
5821 vi->offset = 0;
5822 vi->size = 1;
5823 vi->fullsize = fi_parm_base + num_args;
5824 vi->is_fn_info = 1;
5825 vi->may_have_pointers = false;
5826 if (is_varargs)
5827 vi->fullsize = ~0;
5828 insert_vi_for_tree (vi->decl, vi);
5829
5830 prev_vi = vi;
5831
5832 /* Create a variable for things the function clobbers and one for
5833 things the function uses. */
5834 {
5835 varinfo_t clobbervi, usevi;
5836 const char *newname;
5837 char *tempname;
5838
5839 tempname = xasprintf ("%s.clobber", name);
5840 newname = ggc_strdup (tempname);
5841 free (tempname);
5842
5843 clobbervi = new_var_info (NULL, newname, false);
5844 clobbervi->offset = fi_clobbers;
5845 clobbervi->size = 1;
5846 clobbervi->fullsize = vi->fullsize;
5847 clobbervi->is_full_var = true;
5848 clobbervi->is_global_var = false;
5849 clobbervi->is_reg_var = true;
5850
5851 gcc_assert (prev_vi->offset < clobbervi->offset);
5852 prev_vi->next = clobbervi->id;
5853 prev_vi = clobbervi;
5854
5855 tempname = xasprintf ("%s.use", name);
5856 newname = ggc_strdup (tempname);
5857 free (tempname);
5858
5859 usevi = new_var_info (NULL, newname, false);
5860 usevi->offset = fi_uses;
5861 usevi->size = 1;
5862 usevi->fullsize = vi->fullsize;
5863 usevi->is_full_var = true;
5864 usevi->is_global_var = false;
5865 usevi->is_reg_var = true;
5866
5867 gcc_assert (prev_vi->offset < usevi->offset);
5868 prev_vi->next = usevi->id;
5869 prev_vi = usevi;
5870 }
5871
5872 /* And one for the static chain. */
5873 if (fn->static_chain_decl != NULL_TREE)
5874 {
5875 varinfo_t chainvi;
5876 const char *newname;
5877 char *tempname;
5878
5879 tempname = xasprintf ("%s.chain", name);
5880 newname = ggc_strdup (tempname);
5881 free (tempname);
5882
5883 chainvi = new_var_info (fn->static_chain_decl, newname, false);
5884 chainvi->offset = fi_static_chain;
5885 chainvi->size = 1;
5886 chainvi->fullsize = vi->fullsize;
5887 chainvi->is_full_var = true;
5888 chainvi->is_global_var = false;
5889
5890 insert_vi_for_tree (fn->static_chain_decl, chainvi);
5891
5892 if (nonlocal_p
5893 && chainvi->may_have_pointers)
5894 make_constraint_from (chainvi, nonlocal_id);
5895
5896 gcc_assert (prev_vi->offset < chainvi->offset);
5897 prev_vi->next = chainvi->id;
5898 prev_vi = chainvi;
5899 }
5900
5901 /* Create a variable for the return var. */
5902 if (DECL_RESULT (decl) != NULL
5903 || !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (decl))))
5904 {
5905 varinfo_t resultvi;
5906 const char *newname;
5907 char *tempname;
5908 tree resultdecl = decl;
5909
5910 if (DECL_RESULT (decl))
5911 resultdecl = DECL_RESULT (decl);
5912
5913 tempname = xasprintf ("%s.result", name);
5914 newname = ggc_strdup (tempname);
5915 free (tempname);
5916
5917 resultvi = new_var_info (resultdecl, newname, false);
5918 resultvi->offset = fi_result;
5919 resultvi->size = 1;
5920 resultvi->fullsize = vi->fullsize;
5921 resultvi->is_full_var = true;
5922 if (DECL_RESULT (decl))
5923 resultvi->may_have_pointers = true;
5924
5925 if (DECL_RESULT (decl))
5926 insert_vi_for_tree (DECL_RESULT (decl), resultvi);
5927
5928 if (nonlocal_p
5929 && DECL_RESULT (decl)
5930 && DECL_BY_REFERENCE (DECL_RESULT (decl)))
5931 make_constraint_from (resultvi, nonlocal_id);
5932
5933 gcc_assert (prev_vi->offset < resultvi->offset);
5934 prev_vi->next = resultvi->id;
5935 prev_vi = resultvi;
5936 }
5937
5938 /* We also need to make function return values escape. Nothing
5939 escapes by returning from main though. */
5940 if (nonlocal_p
5941 && !MAIN_NAME_P (DECL_NAME (decl)))
5942 {
5943 varinfo_t fi, rvi;
5944 fi = lookup_vi_for_tree (decl);
5945 rvi = first_vi_for_offset (fi, fi_result);
5946 if (rvi && rvi->offset == fi_result)
5947 make_copy_constraint (get_varinfo (escaped_id), rvi->id);
5948 }
5949
5950 /* Set up variables for each argument. */
5951 arg = DECL_ARGUMENTS (decl);
5952 for (i = 0; i < num_args; i++)
5953 {
5954 varinfo_t argvi;
5955 const char *newname;
5956 char *tempname;
5957 tree argdecl = decl;
5958
5959 if (arg)
5960 argdecl = arg;
5961
5962 tempname = xasprintf ("%s.arg%d", name, i);
5963 newname = ggc_strdup (tempname);
5964 free (tempname);
5965
5966 argvi = new_var_info (argdecl, newname, false);
5967 argvi->offset = fi_parm_base + i;
5968 argvi->size = 1;
5969 argvi->is_full_var = true;
5970 argvi->fullsize = vi->fullsize;
5971 if (arg)
5972 argvi->may_have_pointers = true;
5973
5974 if (arg)
5975 insert_vi_for_tree (arg, argvi);
5976
5977 if (nonlocal_p
5978 && argvi->may_have_pointers)
5979 make_constraint_from (argvi, nonlocal_id);
5980
5981 gcc_assert (prev_vi->offset < argvi->offset);
5982 prev_vi->next = argvi->id;
5983 prev_vi = argvi;
5984 if (arg)
5985 arg = DECL_CHAIN (arg);
5986 }
5987
5988 /* Add one representative for all further args. */
5989 if (is_varargs)
5990 {
5991 varinfo_t argvi;
5992 const char *newname;
5993 char *tempname;
5994 tree decl;
5995
5996 tempname = xasprintf ("%s.varargs", name);
5997 newname = ggc_strdup (tempname);
5998 free (tempname);
5999
6000 /* We need sth that can be pointed to for va_start. */
6001 decl = build_fake_var_decl (ptr_type_node);
6002
6003 argvi = new_var_info (decl, newname, false);
6004 argvi->offset = fi_parm_base + num_args;
6005 argvi->size = ~0;
6006 argvi->is_full_var = true;
6007 argvi->is_heap_var = true;
6008 argvi->fullsize = vi->fullsize;
6009
6010 if (nonlocal_p
6011 && argvi->may_have_pointers)
6012 make_constraint_from (argvi, nonlocal_id);
6013
6014 gcc_assert (prev_vi->offset < argvi->offset);
6015 prev_vi->next = argvi->id;
6016 }
6017
6018 return vi;
6019 }
6020
6021
6022 /* Return true if FIELDSTACK contains fields that overlap.
6023 FIELDSTACK is assumed to be sorted by offset. */
6024
6025 static bool
6026 check_for_overlaps (vec<fieldoff_s> fieldstack)
6027 {
6028 fieldoff_s *fo = NULL;
6029 unsigned int i;
6030 HOST_WIDE_INT lastoffset = -1;
6031
6032 FOR_EACH_VEC_ELT (fieldstack, i, fo)
6033 {
6034 if (fo->offset == lastoffset)
6035 return true;
6036 lastoffset = fo->offset;
6037 }
6038 return false;
6039 }
6040
6041 /* Create a varinfo structure for NAME and DECL, and add it to VARMAP.
6042 This will also create any varinfo structures necessary for fields
6043 of DECL. DECL is a function parameter if HANDLE_PARAM is set.
6044 HANDLED_STRUCT_TYPE is used to register struct types reached by following
6045 restrict pointers. This is needed to prevent infinite recursion.
6046 If ADD_RESTRICT, pretend that the pointer NAME is restrict even if DECL
6047 does not advertise it. */
6048
6049 static varinfo_t
6050 create_variable_info_for_1 (tree decl, const char *name, bool add_id,
6051 bool handle_param, bitmap handled_struct_type,
6052 bool add_restrict = false)
6053 {
6054 varinfo_t vi, newvi;
6055 tree decl_type = TREE_TYPE (decl);
6056 tree declsize = DECL_P (decl) ? DECL_SIZE (decl) : TYPE_SIZE (decl_type);
6057 auto_vec<fieldoff_s> fieldstack;
6058 fieldoff_s *fo;
6059 unsigned int i;
6060
6061 if (!declsize
6062 || !tree_fits_uhwi_p (declsize))
6063 {
6064 vi = new_var_info (decl, name, add_id);
6065 vi->offset = 0;
6066 vi->size = ~0;
6067 vi->fullsize = ~0;
6068 vi->is_unknown_size_var = true;
6069 vi->is_full_var = true;
6070 vi->may_have_pointers = true;
6071 return vi;
6072 }
6073
6074 /* Collect field information. */
6075 if (use_field_sensitive
6076 && var_can_have_subvars (decl)
6077 /* ??? Force us to not use subfields for globals in IPA mode.
6078 Else we'd have to parse arbitrary initializers. */
6079 && !(in_ipa_mode
6080 && is_global_var (decl)))
6081 {
6082 fieldoff_s *fo = NULL;
6083 bool notokay = false;
6084 unsigned int i;
6085
6086 push_fields_onto_fieldstack (decl_type, &fieldstack, 0);
6087
6088 for (i = 0; !notokay && fieldstack.iterate (i, &fo); i++)
6089 if (fo->has_unknown_size
6090 || fo->offset < 0)
6091 {
6092 notokay = true;
6093 break;
6094 }
6095
6096 /* We can't sort them if we have a field with a variable sized type,
6097 which will make notokay = true. In that case, we are going to return
6098 without creating varinfos for the fields anyway, so sorting them is a
6099 waste to boot. */
6100 if (!notokay)
6101 {
6102 sort_fieldstack (fieldstack);
6103 /* Due to some C++ FE issues, like PR 22488, we might end up
6104 what appear to be overlapping fields even though they,
6105 in reality, do not overlap. Until the C++ FE is fixed,
6106 we will simply disable field-sensitivity for these cases. */
6107 notokay = check_for_overlaps (fieldstack);
6108 }
6109
6110 if (notokay)
6111 fieldstack.release ();
6112 }
6113
6114 /* If we didn't end up collecting sub-variables create a full
6115 variable for the decl. */
6116 if (fieldstack.length () == 0
6117 || fieldstack.length () > MAX_FIELDS_FOR_FIELD_SENSITIVE)
6118 {
6119 vi = new_var_info (decl, name, add_id);
6120 vi->offset = 0;
6121 vi->may_have_pointers = true;
6122 vi->fullsize = tree_to_uhwi (declsize);
6123 vi->size = vi->fullsize;
6124 vi->is_full_var = true;
6125 if (POINTER_TYPE_P (decl_type)
6126 && (TYPE_RESTRICT (decl_type) || add_restrict))
6127 vi->only_restrict_pointers = 1;
6128 if (vi->only_restrict_pointers
6129 && !type_contains_placeholder_p (TREE_TYPE (decl_type))
6130 && handle_param
6131 && !bitmap_bit_p (handled_struct_type,
6132 TYPE_UID (TREE_TYPE (decl_type))))
6133 {
6134 varinfo_t rvi;
6135 tree heapvar = build_fake_var_decl (TREE_TYPE (decl_type));
6136 DECL_EXTERNAL (heapvar) = 1;
6137 if (var_can_have_subvars (heapvar))
6138 bitmap_set_bit (handled_struct_type,
6139 TYPE_UID (TREE_TYPE (decl_type)));
6140 rvi = create_variable_info_for_1 (heapvar, "PARM_NOALIAS", true,
6141 true, handled_struct_type);
6142 if (var_can_have_subvars (heapvar))
6143 bitmap_clear_bit (handled_struct_type,
6144 TYPE_UID (TREE_TYPE (decl_type)));
6145 rvi->is_restrict_var = 1;
6146 insert_vi_for_tree (heapvar, rvi);
6147 make_constraint_from (vi, rvi->id);
6148 make_param_constraints (rvi);
6149 }
6150 fieldstack.release ();
6151 return vi;
6152 }
6153
6154 vi = new_var_info (decl, name, add_id);
6155 vi->fullsize = tree_to_uhwi (declsize);
6156 if (fieldstack.length () == 1)
6157 vi->is_full_var = true;
6158 for (i = 0, newvi = vi;
6159 fieldstack.iterate (i, &fo);
6160 ++i, newvi = vi_next (newvi))
6161 {
6162 const char *newname = NULL;
6163 char *tempname;
6164
6165 if (dump_file)
6166 {
6167 if (fieldstack.length () != 1)
6168 {
6169 tempname
6170 = xasprintf ("%s." HOST_WIDE_INT_PRINT_DEC
6171 "+" HOST_WIDE_INT_PRINT_DEC, name,
6172 fo->offset, fo->size);
6173 newname = ggc_strdup (tempname);
6174 free (tempname);
6175 }
6176 }
6177 else
6178 newname = "NULL";
6179
6180 if (newname)
6181 newvi->name = newname;
6182 newvi->offset = fo->offset;
6183 newvi->size = fo->size;
6184 newvi->fullsize = vi->fullsize;
6185 newvi->may_have_pointers = fo->may_have_pointers;
6186 newvi->only_restrict_pointers = fo->only_restrict_pointers;
6187 if (handle_param
6188 && newvi->only_restrict_pointers
6189 && !type_contains_placeholder_p (fo->restrict_pointed_type)
6190 && !bitmap_bit_p (handled_struct_type,
6191 TYPE_UID (fo->restrict_pointed_type)))
6192 {
6193 varinfo_t rvi;
6194 tree heapvar = build_fake_var_decl (fo->restrict_pointed_type);
6195 DECL_EXTERNAL (heapvar) = 1;
6196 if (var_can_have_subvars (heapvar))
6197 bitmap_set_bit (handled_struct_type,
6198 TYPE_UID (fo->restrict_pointed_type));
6199 rvi = create_variable_info_for_1 (heapvar, "PARM_NOALIAS", true,
6200 true, handled_struct_type);
6201 if (var_can_have_subvars (heapvar))
6202 bitmap_clear_bit (handled_struct_type,
6203 TYPE_UID (fo->restrict_pointed_type));
6204 rvi->is_restrict_var = 1;
6205 insert_vi_for_tree (heapvar, rvi);
6206 make_constraint_from (newvi, rvi->id);
6207 make_param_constraints (rvi);
6208 }
6209 if (i + 1 < fieldstack.length ())
6210 {
6211 varinfo_t tem = new_var_info (decl, name, false);
6212 newvi->next = tem->id;
6213 tem->head = vi->id;
6214 }
6215 }
6216
6217 return vi;
6218 }
6219
6220 static unsigned int
6221 create_variable_info_for (tree decl, const char *name, bool add_id)
6222 {
6223 /* First see if we are dealing with an ifunc resolver call and
6224 assiociate that with a call to the resolver function result. */
6225 cgraph_node *node;
6226 if (in_ipa_mode
6227 && TREE_CODE (decl) == FUNCTION_DECL
6228 && (node = cgraph_node::get (decl))
6229 && node->ifunc_resolver)
6230 {
6231 varinfo_t fi = get_vi_for_tree (node->get_alias_target ()->decl);
6232 constraint_expr rhs
6233 = get_function_part_constraint (fi, fi_result);
6234 fi = new_var_info (NULL_TREE, "ifuncres", true);
6235 fi->is_reg_var = true;
6236 constraint_expr lhs;
6237 lhs.type = SCALAR;
6238 lhs.var = fi->id;
6239 lhs.offset = 0;
6240 process_constraint (new_constraint (lhs, rhs));
6241 insert_vi_for_tree (decl, fi);
6242 return fi->id;
6243 }
6244
6245 varinfo_t vi = create_variable_info_for_1 (decl, name, add_id, false, NULL);
6246 unsigned int id = vi->id;
6247
6248 insert_vi_for_tree (decl, vi);
6249
6250 if (!VAR_P (decl))
6251 return id;
6252
6253 /* Create initial constraints for globals. */
6254 for (; vi; vi = vi_next (vi))
6255 {
6256 if (!vi->may_have_pointers
6257 || !vi->is_global_var)
6258 continue;
6259
6260 /* Mark global restrict qualified pointers. */
6261 if ((POINTER_TYPE_P (TREE_TYPE (decl))
6262 && TYPE_RESTRICT (TREE_TYPE (decl)))
6263 || vi->only_restrict_pointers)
6264 {
6265 varinfo_t rvi
6266 = make_constraint_from_global_restrict (vi, "GLOBAL_RESTRICT",
6267 true);
6268 /* ??? For now exclude reads from globals as restrict sources
6269 if those are not (indirectly) from incoming parameters. */
6270 rvi->is_restrict_var = false;
6271 continue;
6272 }
6273
6274 /* In non-IPA mode the initializer from nonlocal is all we need. */
6275 if (!in_ipa_mode
6276 || DECL_HARD_REGISTER (decl))
6277 make_copy_constraint (vi, nonlocal_id);
6278
6279 /* In IPA mode parse the initializer and generate proper constraints
6280 for it. */
6281 else
6282 {
6283 varpool_node *vnode = varpool_node::get (decl);
6284
6285 /* For escaped variables initialize them from nonlocal. */
6286 if (!vnode->all_refs_explicit_p ())
6287 make_copy_constraint (vi, nonlocal_id);
6288
6289 /* If this is a global variable with an initializer and we are in
6290 IPA mode generate constraints for it. */
6291 ipa_ref *ref;
6292 for (unsigned idx = 0; vnode->iterate_reference (idx, ref); ++idx)
6293 {
6294 auto_vec<ce_s> rhsc;
6295 struct constraint_expr lhs, *rhsp;
6296 unsigned i;
6297 get_constraint_for_address_of (ref->referred->decl, &rhsc);
6298 lhs.var = vi->id;
6299 lhs.offset = 0;
6300 lhs.type = SCALAR;
6301 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
6302 process_constraint (new_constraint (lhs, *rhsp));
6303 /* If this is a variable that escapes from the unit
6304 the initializer escapes as well. */
6305 if (!vnode->all_refs_explicit_p ())
6306 {
6307 lhs.var = escaped_id;
6308 lhs.offset = 0;
6309 lhs.type = SCALAR;
6310 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
6311 process_constraint (new_constraint (lhs, *rhsp));
6312 }
6313 }
6314 }
6315 }
6316
6317 return id;
6318 }
6319
6320 /* Print out the points-to solution for VAR to FILE. */
6321
6322 static void
6323 dump_solution_for_var (FILE *file, unsigned int var)
6324 {
6325 varinfo_t vi = get_varinfo (var);
6326 unsigned int i;
6327 bitmap_iterator bi;
6328
6329 /* Dump the solution for unified vars anyway, this avoids difficulties
6330 in scanning dumps in the testsuite. */
6331 fprintf (file, "%s = { ", vi->name);
6332 vi = get_varinfo (find (var));
6333 EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, i, bi)
6334 fprintf (file, "%s ", get_varinfo (i)->name);
6335 fprintf (file, "}");
6336
6337 /* But note when the variable was unified. */
6338 if (vi->id != var)
6339 fprintf (file, " same as %s", vi->name);
6340
6341 fprintf (file, "\n");
6342 }
6343
6344 /* Print the points-to solution for VAR to stderr. */
6345
6346 DEBUG_FUNCTION void
6347 debug_solution_for_var (unsigned int var)
6348 {
6349 dump_solution_for_var (stderr, var);
6350 }
6351
6352 /* Register the constraints for function parameter related VI. */
6353
6354 static void
6355 make_param_constraints (varinfo_t vi)
6356 {
6357 for (; vi; vi = vi_next (vi))
6358 {
6359 if (vi->only_restrict_pointers)
6360 ;
6361 else if (vi->may_have_pointers)
6362 make_constraint_from (vi, nonlocal_id);
6363
6364 if (vi->is_full_var)
6365 break;
6366 }
6367 }
6368
6369 /* Create varinfo structures for all of the variables in the
6370 function for intraprocedural mode. */
6371
6372 static void
6373 intra_create_variable_infos (struct function *fn)
6374 {
6375 tree t;
6376 bitmap handled_struct_type = NULL;
6377 bool this_parm_in_ctor = DECL_CXX_CONSTRUCTOR_P (fn->decl);
6378
6379 /* For each incoming pointer argument arg, create the constraint ARG
6380 = NONLOCAL or a dummy variable if it is a restrict qualified
6381 passed-by-reference argument. */
6382 for (t = DECL_ARGUMENTS (fn->decl); t; t = DECL_CHAIN (t))
6383 {
6384 if (handled_struct_type == NULL)
6385 handled_struct_type = BITMAP_ALLOC (NULL);
6386
6387 varinfo_t p
6388 = create_variable_info_for_1 (t, alias_get_name (t), false, true,
6389 handled_struct_type, this_parm_in_ctor);
6390 insert_vi_for_tree (t, p);
6391
6392 make_param_constraints (p);
6393
6394 this_parm_in_ctor = false;
6395 }
6396
6397 if (handled_struct_type != NULL)
6398 BITMAP_FREE (handled_struct_type);
6399
6400 /* Add a constraint for a result decl that is passed by reference. */
6401 if (DECL_RESULT (fn->decl)
6402 && DECL_BY_REFERENCE (DECL_RESULT (fn->decl)))
6403 {
6404 varinfo_t p, result_vi = get_vi_for_tree (DECL_RESULT (fn->decl));
6405
6406 for (p = result_vi; p; p = vi_next (p))
6407 make_constraint_from (p, nonlocal_id);
6408 }
6409
6410 /* Add a constraint for the incoming static chain parameter. */
6411 if (fn->static_chain_decl != NULL_TREE)
6412 {
6413 varinfo_t p, chain_vi = get_vi_for_tree (fn->static_chain_decl);
6414
6415 for (p = chain_vi; p; p = vi_next (p))
6416 make_constraint_from (p, nonlocal_id);
6417 }
6418 }
6419
6420 /* Structure used to put solution bitmaps in a hashtable so they can
6421 be shared among variables with the same points-to set. */
6422
6423 typedef struct shared_bitmap_info
6424 {
6425 bitmap pt_vars;
6426 hashval_t hashcode;
6427 } *shared_bitmap_info_t;
6428 typedef const struct shared_bitmap_info *const_shared_bitmap_info_t;
6429
6430 /* Shared_bitmap hashtable helpers. */
6431
6432 struct shared_bitmap_hasher : free_ptr_hash <shared_bitmap_info>
6433 {
6434 static inline hashval_t hash (const shared_bitmap_info *);
6435 static inline bool equal (const shared_bitmap_info *,
6436 const shared_bitmap_info *);
6437 };
6438
6439 /* Hash function for a shared_bitmap_info_t */
6440
6441 inline hashval_t
6442 shared_bitmap_hasher::hash (const shared_bitmap_info *bi)
6443 {
6444 return bi->hashcode;
6445 }
6446
6447 /* Equality function for two shared_bitmap_info_t's. */
6448
6449 inline bool
6450 shared_bitmap_hasher::equal (const shared_bitmap_info *sbi1,
6451 const shared_bitmap_info *sbi2)
6452 {
6453 return bitmap_equal_p (sbi1->pt_vars, sbi2->pt_vars);
6454 }
6455
6456 /* Shared_bitmap hashtable. */
6457
6458 static hash_table<shared_bitmap_hasher> *shared_bitmap_table;
6459
6460 /* Lookup a bitmap in the shared bitmap hashtable, and return an already
6461 existing instance if there is one, NULL otherwise. */
6462
6463 static bitmap
6464 shared_bitmap_lookup (bitmap pt_vars)
6465 {
6466 shared_bitmap_info **slot;
6467 struct shared_bitmap_info sbi;
6468
6469 sbi.pt_vars = pt_vars;
6470 sbi.hashcode = bitmap_hash (pt_vars);
6471
6472 slot = shared_bitmap_table->find_slot (&sbi, NO_INSERT);
6473 if (!slot)
6474 return NULL;
6475 else
6476 return (*slot)->pt_vars;
6477 }
6478
6479
6480 /* Add a bitmap to the shared bitmap hashtable. */
6481
6482 static void
6483 shared_bitmap_add (bitmap pt_vars)
6484 {
6485 shared_bitmap_info **slot;
6486 shared_bitmap_info_t sbi = XNEW (struct shared_bitmap_info);
6487
6488 sbi->pt_vars = pt_vars;
6489 sbi->hashcode = bitmap_hash (pt_vars);
6490
6491 slot = shared_bitmap_table->find_slot (sbi, INSERT);
6492 gcc_assert (!*slot);
6493 *slot = sbi;
6494 }
6495
6496
6497 /* Set bits in INTO corresponding to the variable uids in solution set FROM. */
6498
6499 static void
6500 set_uids_in_ptset (bitmap into, bitmap from, struct pt_solution *pt,
6501 tree fndecl)
6502 {
6503 unsigned int i;
6504 bitmap_iterator bi;
6505 varinfo_t escaped_vi = get_varinfo (find (escaped_id));
6506 bool everything_escaped
6507 = escaped_vi->solution && bitmap_bit_p (escaped_vi->solution, anything_id);
6508
6509 EXECUTE_IF_SET_IN_BITMAP (from, 0, i, bi)
6510 {
6511 varinfo_t vi = get_varinfo (i);
6512
6513 if (vi->is_artificial_var)
6514 continue;
6515
6516 if (everything_escaped
6517 || (escaped_vi->solution
6518 && bitmap_bit_p (escaped_vi->solution, i)))
6519 {
6520 pt->vars_contains_escaped = true;
6521 pt->vars_contains_escaped_heap |= vi->is_heap_var;
6522 }
6523
6524 if (vi->is_restrict_var)
6525 pt->vars_contains_restrict = true;
6526
6527 if (VAR_P (vi->decl)
6528 || TREE_CODE (vi->decl) == PARM_DECL
6529 || TREE_CODE (vi->decl) == RESULT_DECL)
6530 {
6531 /* If we are in IPA mode we will not recompute points-to
6532 sets after inlining so make sure they stay valid. */
6533 if (in_ipa_mode
6534 && !DECL_PT_UID_SET_P (vi->decl))
6535 SET_DECL_PT_UID (vi->decl, DECL_UID (vi->decl));
6536
6537 /* Add the decl to the points-to set. Note that the points-to
6538 set contains global variables. */
6539 bitmap_set_bit (into, DECL_PT_UID (vi->decl));
6540 if (vi->is_global_var
6541 /* In IPA mode the escaped_heap trick doesn't work as
6542 ESCAPED is escaped from the unit but
6543 pt_solution_includes_global needs to answer true for
6544 all variables not automatic within a function.
6545 For the same reason is_global_var is not the
6546 correct flag to track - local variables from other
6547 functions also need to be considered global.
6548 Conveniently all HEAP vars are not put in function
6549 scope. */
6550 || (in_ipa_mode
6551 && fndecl
6552 && ! auto_var_in_fn_p (vi->decl, fndecl)))
6553 pt->vars_contains_nonlocal = true;
6554
6555 /* If we have a variable that is interposable record that fact
6556 for pointer comparison simplification. */
6557 if (VAR_P (vi->decl)
6558 && (TREE_STATIC (vi->decl) || DECL_EXTERNAL (vi->decl))
6559 && ! decl_binds_to_current_def_p (vi->decl))
6560 pt->vars_contains_interposable = true;
6561
6562 /* If this is a local variable we can have overlapping lifetime
6563 of different function invocations through recursion duplicate
6564 it with its shadow variable. */
6565 if (in_ipa_mode
6566 && vi->shadow_var_uid != 0)
6567 {
6568 bitmap_set_bit (into, vi->shadow_var_uid);
6569 pt->vars_contains_nonlocal = true;
6570 }
6571 }
6572
6573 else if (TREE_CODE (vi->decl) == FUNCTION_DECL
6574 || TREE_CODE (vi->decl) == LABEL_DECL)
6575 {
6576 /* Nothing should read/write from/to code so we can
6577 save bits by not including them in the points-to bitmaps.
6578 Still mark the points-to set as containing global memory
6579 to make code-patching possible - see PR70128. */
6580 pt->vars_contains_nonlocal = true;
6581 }
6582 }
6583 }
6584
6585
6586 /* Compute the points-to solution *PT for the variable VI. */
6587
6588 static struct pt_solution
6589 find_what_var_points_to (tree fndecl, varinfo_t orig_vi)
6590 {
6591 unsigned int i;
6592 bitmap_iterator bi;
6593 bitmap finished_solution;
6594 bitmap result;
6595 varinfo_t vi;
6596 struct pt_solution *pt;
6597
6598 /* This variable may have been collapsed, let's get the real
6599 variable. */
6600 vi = get_varinfo (find (orig_vi->id));
6601
6602 /* See if we have already computed the solution and return it. */
6603 pt_solution **slot = &final_solutions->get_or_insert (vi);
6604 if (*slot != NULL)
6605 return **slot;
6606
6607 *slot = pt = XOBNEW (&final_solutions_obstack, struct pt_solution);
6608 memset (pt, 0, sizeof (struct pt_solution));
6609
6610 /* Translate artificial variables into SSA_NAME_PTR_INFO
6611 attributes. */
6612 EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, i, bi)
6613 {
6614 varinfo_t vi = get_varinfo (i);
6615
6616 if (vi->is_artificial_var)
6617 {
6618 if (vi->id == nothing_id)
6619 pt->null = 1;
6620 else if (vi->id == escaped_id)
6621 {
6622 if (in_ipa_mode)
6623 pt->ipa_escaped = 1;
6624 else
6625 pt->escaped = 1;
6626 /* Expand some special vars of ESCAPED in-place here. */
6627 varinfo_t evi = get_varinfo (find (escaped_id));
6628 if (bitmap_bit_p (evi->solution, nonlocal_id))
6629 pt->nonlocal = 1;
6630 }
6631 else if (vi->id == nonlocal_id)
6632 pt->nonlocal = 1;
6633 else if (vi->id == string_id)
6634 /* Nobody cares - STRING_CSTs are read-only entities. */
6635 ;
6636 else if (vi->id == anything_id
6637 || vi->id == integer_id)
6638 pt->anything = 1;
6639 }
6640 }
6641
6642 /* Instead of doing extra work, simply do not create
6643 elaborate points-to information for pt_anything pointers. */
6644 if (pt->anything)
6645 return *pt;
6646
6647 /* Share the final set of variables when possible. */
6648 finished_solution = BITMAP_GGC_ALLOC ();
6649 stats.points_to_sets_created++;
6650
6651 set_uids_in_ptset (finished_solution, vi->solution, pt, fndecl);
6652 result = shared_bitmap_lookup (finished_solution);
6653 if (!result)
6654 {
6655 shared_bitmap_add (finished_solution);
6656 pt->vars = finished_solution;
6657 }
6658 else
6659 {
6660 pt->vars = result;
6661 bitmap_clear (finished_solution);
6662 }
6663
6664 return *pt;
6665 }
6666
6667 /* Given a pointer variable P, fill in its points-to set. */
6668
6669 static void
6670 find_what_p_points_to (tree fndecl, tree p)
6671 {
6672 struct ptr_info_def *pi;
6673 tree lookup_p = p;
6674 varinfo_t vi;
6675 bool nonnull = get_ptr_nonnull (p);
6676
6677 /* For parameters, get at the points-to set for the actual parm
6678 decl. */
6679 if (TREE_CODE (p) == SSA_NAME
6680 && SSA_NAME_IS_DEFAULT_DEF (p)
6681 && (TREE_CODE (SSA_NAME_VAR (p)) == PARM_DECL
6682 || TREE_CODE (SSA_NAME_VAR (p)) == RESULT_DECL))
6683 lookup_p = SSA_NAME_VAR (p);
6684
6685 vi = lookup_vi_for_tree (lookup_p);
6686 if (!vi)
6687 return;
6688
6689 pi = get_ptr_info (p);
6690 pi->pt = find_what_var_points_to (fndecl, vi);
6691 /* Conservatively set to NULL from PTA (to true). */
6692 pi->pt.null = 1;
6693 /* Preserve pointer nonnull computed by VRP. See get_ptr_nonnull
6694 in gcc/tree-ssaname.c for more information. */
6695 if (nonnull)
6696 set_ptr_nonnull (p);
6697 }
6698
6699
6700 /* Query statistics for points-to solutions. */
6701
6702 static struct {
6703 unsigned HOST_WIDE_INT pt_solution_includes_may_alias;
6704 unsigned HOST_WIDE_INT pt_solution_includes_no_alias;
6705 unsigned HOST_WIDE_INT pt_solutions_intersect_may_alias;
6706 unsigned HOST_WIDE_INT pt_solutions_intersect_no_alias;
6707 } pta_stats;
6708
6709 void
6710 dump_pta_stats (FILE *s)
6711 {
6712 fprintf (s, "\nPTA query stats:\n");
6713 fprintf (s, " pt_solution_includes: "
6714 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
6715 HOST_WIDE_INT_PRINT_DEC" queries\n",
6716 pta_stats.pt_solution_includes_no_alias,
6717 pta_stats.pt_solution_includes_no_alias
6718 + pta_stats.pt_solution_includes_may_alias);
6719 fprintf (s, " pt_solutions_intersect: "
6720 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
6721 HOST_WIDE_INT_PRINT_DEC" queries\n",
6722 pta_stats.pt_solutions_intersect_no_alias,
6723 pta_stats.pt_solutions_intersect_no_alias
6724 + pta_stats.pt_solutions_intersect_may_alias);
6725 }
6726
6727
6728 /* Reset the points-to solution *PT to a conservative default
6729 (point to anything). */
6730
6731 void
6732 pt_solution_reset (struct pt_solution *pt)
6733 {
6734 memset (pt, 0, sizeof (struct pt_solution));
6735 pt->anything = true;
6736 pt->null = true;
6737 }
6738
6739 /* Set the points-to solution *PT to point only to the variables
6740 in VARS. VARS_CONTAINS_GLOBAL specifies whether that contains
6741 global variables and VARS_CONTAINS_RESTRICT specifies whether
6742 it contains restrict tag variables. */
6743
6744 void
6745 pt_solution_set (struct pt_solution *pt, bitmap vars,
6746 bool vars_contains_nonlocal)
6747 {
6748 memset (pt, 0, sizeof (struct pt_solution));
6749 pt->vars = vars;
6750 pt->vars_contains_nonlocal = vars_contains_nonlocal;
6751 pt->vars_contains_escaped
6752 = (cfun->gimple_df->escaped.anything
6753 || bitmap_intersect_p (cfun->gimple_df->escaped.vars, vars));
6754 }
6755
6756 /* Set the points-to solution *PT to point only to the variable VAR. */
6757
6758 void
6759 pt_solution_set_var (struct pt_solution *pt, tree var)
6760 {
6761 memset (pt, 0, sizeof (struct pt_solution));
6762 pt->vars = BITMAP_GGC_ALLOC ();
6763 bitmap_set_bit (pt->vars, DECL_PT_UID (var));
6764 pt->vars_contains_nonlocal = is_global_var (var);
6765 pt->vars_contains_escaped
6766 = (cfun->gimple_df->escaped.anything
6767 || bitmap_bit_p (cfun->gimple_df->escaped.vars, DECL_PT_UID (var)));
6768 }
6769
6770 /* Computes the union of the points-to solutions *DEST and *SRC and
6771 stores the result in *DEST. This changes the points-to bitmap
6772 of *DEST and thus may not be used if that might be shared.
6773 The points-to bitmap of *SRC and *DEST will not be shared after
6774 this function if they were not before. */
6775
6776 static void
6777 pt_solution_ior_into (struct pt_solution *dest, struct pt_solution *src)
6778 {
6779 dest->anything |= src->anything;
6780 if (dest->anything)
6781 {
6782 pt_solution_reset (dest);
6783 return;
6784 }
6785
6786 dest->nonlocal |= src->nonlocal;
6787 dest->escaped |= src->escaped;
6788 dest->ipa_escaped |= src->ipa_escaped;
6789 dest->null |= src->null;
6790 dest->vars_contains_nonlocal |= src->vars_contains_nonlocal;
6791 dest->vars_contains_escaped |= src->vars_contains_escaped;
6792 dest->vars_contains_escaped_heap |= src->vars_contains_escaped_heap;
6793 if (!src->vars)
6794 return;
6795
6796 if (!dest->vars)
6797 dest->vars = BITMAP_GGC_ALLOC ();
6798 bitmap_ior_into (dest->vars, src->vars);
6799 }
6800
6801 /* Return true if the points-to solution *PT is empty. */
6802
6803 bool
6804 pt_solution_empty_p (struct pt_solution *pt)
6805 {
6806 if (pt->anything
6807 || pt->nonlocal)
6808 return false;
6809
6810 if (pt->vars
6811 && !bitmap_empty_p (pt->vars))
6812 return false;
6813
6814 /* If the solution includes ESCAPED, check if that is empty. */
6815 if (pt->escaped
6816 && !pt_solution_empty_p (&cfun->gimple_df->escaped))
6817 return false;
6818
6819 /* If the solution includes ESCAPED, check if that is empty. */
6820 if (pt->ipa_escaped
6821 && !pt_solution_empty_p (&ipa_escaped_pt))
6822 return false;
6823
6824 return true;
6825 }
6826
6827 /* Return true if the points-to solution *PT only point to a single var, and
6828 return the var uid in *UID. */
6829
6830 bool
6831 pt_solution_singleton_or_null_p (struct pt_solution *pt, unsigned *uid)
6832 {
6833 if (pt->anything || pt->nonlocal || pt->escaped || pt->ipa_escaped
6834 || pt->vars == NULL
6835 || !bitmap_single_bit_set_p (pt->vars))
6836 return false;
6837
6838 *uid = bitmap_first_set_bit (pt->vars);
6839 return true;
6840 }
6841
6842 /* Return true if the points-to solution *PT includes global memory. */
6843
6844 bool
6845 pt_solution_includes_global (struct pt_solution *pt)
6846 {
6847 if (pt->anything
6848 || pt->nonlocal
6849 || pt->vars_contains_nonlocal
6850 /* The following is a hack to make the malloc escape hack work.
6851 In reality we'd need different sets for escaped-through-return
6852 and escaped-to-callees and passes would need to be updated. */
6853 || pt->vars_contains_escaped_heap)
6854 return true;
6855
6856 /* 'escaped' is also a placeholder so we have to look into it. */
6857 if (pt->escaped)
6858 return pt_solution_includes_global (&cfun->gimple_df->escaped);
6859
6860 if (pt->ipa_escaped)
6861 return pt_solution_includes_global (&ipa_escaped_pt);
6862
6863 return false;
6864 }
6865
6866 /* Return true if the points-to solution *PT includes the variable
6867 declaration DECL. */
6868
6869 static bool
6870 pt_solution_includes_1 (struct pt_solution *pt, const_tree decl)
6871 {
6872 if (pt->anything)
6873 return true;
6874
6875 if (pt->nonlocal
6876 && is_global_var (decl))
6877 return true;
6878
6879 if (pt->vars
6880 && bitmap_bit_p (pt->vars, DECL_PT_UID (decl)))
6881 return true;
6882
6883 /* If the solution includes ESCAPED, check it. */
6884 if (pt->escaped
6885 && pt_solution_includes_1 (&cfun->gimple_df->escaped, decl))
6886 return true;
6887
6888 /* If the solution includes ESCAPED, check it. */
6889 if (pt->ipa_escaped
6890 && pt_solution_includes_1 (&ipa_escaped_pt, decl))
6891 return true;
6892
6893 return false;
6894 }
6895
6896 bool
6897 pt_solution_includes (struct pt_solution *pt, const_tree decl)
6898 {
6899 bool res = pt_solution_includes_1 (pt, decl);
6900 if (res)
6901 ++pta_stats.pt_solution_includes_may_alias;
6902 else
6903 ++pta_stats.pt_solution_includes_no_alias;
6904 return res;
6905 }
6906
6907 /* Return true if both points-to solutions PT1 and PT2 have a non-empty
6908 intersection. */
6909
6910 static bool
6911 pt_solutions_intersect_1 (struct pt_solution *pt1, struct pt_solution *pt2)
6912 {
6913 if (pt1->anything || pt2->anything)
6914 return true;
6915
6916 /* If either points to unknown global memory and the other points to
6917 any global memory they alias. */
6918 if ((pt1->nonlocal
6919 && (pt2->nonlocal
6920 || pt2->vars_contains_nonlocal))
6921 || (pt2->nonlocal
6922 && pt1->vars_contains_nonlocal))
6923 return true;
6924
6925 /* If either points to all escaped memory and the other points to
6926 any escaped memory they alias. */
6927 if ((pt1->escaped
6928 && (pt2->escaped
6929 || pt2->vars_contains_escaped))
6930 || (pt2->escaped
6931 && pt1->vars_contains_escaped))
6932 return true;
6933
6934 /* Check the escaped solution if required.
6935 ??? Do we need to check the local against the IPA escaped sets? */
6936 if ((pt1->ipa_escaped || pt2->ipa_escaped)
6937 && !pt_solution_empty_p (&ipa_escaped_pt))
6938 {
6939 /* If both point to escaped memory and that solution
6940 is not empty they alias. */
6941 if (pt1->ipa_escaped && pt2->ipa_escaped)
6942 return true;
6943
6944 /* If either points to escaped memory see if the escaped solution
6945 intersects with the other. */
6946 if ((pt1->ipa_escaped
6947 && pt_solutions_intersect_1 (&ipa_escaped_pt, pt2))
6948 || (pt2->ipa_escaped
6949 && pt_solutions_intersect_1 (&ipa_escaped_pt, pt1)))
6950 return true;
6951 }
6952
6953 /* Now both pointers alias if their points-to solution intersects. */
6954 return (pt1->vars
6955 && pt2->vars
6956 && bitmap_intersect_p (pt1->vars, pt2->vars));
6957 }
6958
6959 bool
6960 pt_solutions_intersect (struct pt_solution *pt1, struct pt_solution *pt2)
6961 {
6962 bool res = pt_solutions_intersect_1 (pt1, pt2);
6963 if (res)
6964 ++pta_stats.pt_solutions_intersect_may_alias;
6965 else
6966 ++pta_stats.pt_solutions_intersect_no_alias;
6967 return res;
6968 }
6969
6970
6971 /* Dump points-to information to OUTFILE. */
6972
6973 static void
6974 dump_sa_points_to_info (FILE *outfile)
6975 {
6976 unsigned int i;
6977
6978 fprintf (outfile, "\nPoints-to sets\n\n");
6979
6980 if (dump_flags & TDF_STATS)
6981 {
6982 fprintf (outfile, "Stats:\n");
6983 fprintf (outfile, "Total vars: %d\n", stats.total_vars);
6984 fprintf (outfile, "Non-pointer vars: %d\n",
6985 stats.nonpointer_vars);
6986 fprintf (outfile, "Statically unified vars: %d\n",
6987 stats.unified_vars_static);
6988 fprintf (outfile, "Dynamically unified vars: %d\n",
6989 stats.unified_vars_dynamic);
6990 fprintf (outfile, "Iterations: %d\n", stats.iterations);
6991 fprintf (outfile, "Number of edges: %d\n", stats.num_edges);
6992 fprintf (outfile, "Number of implicit edges: %d\n",
6993 stats.num_implicit_edges);
6994 }
6995
6996 for (i = 1; i < varmap.length (); i++)
6997 {
6998 varinfo_t vi = get_varinfo (i);
6999 if (!vi->may_have_pointers)
7000 continue;
7001 dump_solution_for_var (outfile, i);
7002 }
7003 }
7004
7005
7006 /* Debug points-to information to stderr. */
7007
7008 DEBUG_FUNCTION void
7009 debug_sa_points_to_info (void)
7010 {
7011 dump_sa_points_to_info (stderr);
7012 }
7013
7014
7015 /* Initialize the always-existing constraint variables for NULL
7016 ANYTHING, READONLY, and INTEGER */
7017
7018 static void
7019 init_base_vars (void)
7020 {
7021 struct constraint_expr lhs, rhs;
7022 varinfo_t var_anything;
7023 varinfo_t var_nothing;
7024 varinfo_t var_string;
7025 varinfo_t var_escaped;
7026 varinfo_t var_nonlocal;
7027 varinfo_t var_storedanything;
7028 varinfo_t var_integer;
7029
7030 /* Variable ID zero is reserved and should be NULL. */
7031 varmap.safe_push (NULL);
7032
7033 /* Create the NULL variable, used to represent that a variable points
7034 to NULL. */
7035 var_nothing = new_var_info (NULL_TREE, "NULL", false);
7036 gcc_assert (var_nothing->id == nothing_id);
7037 var_nothing->is_artificial_var = 1;
7038 var_nothing->offset = 0;
7039 var_nothing->size = ~0;
7040 var_nothing->fullsize = ~0;
7041 var_nothing->is_special_var = 1;
7042 var_nothing->may_have_pointers = 0;
7043 var_nothing->is_global_var = 0;
7044
7045 /* Create the ANYTHING variable, used to represent that a variable
7046 points to some unknown piece of memory. */
7047 var_anything = new_var_info (NULL_TREE, "ANYTHING", false);
7048 gcc_assert (var_anything->id == anything_id);
7049 var_anything->is_artificial_var = 1;
7050 var_anything->size = ~0;
7051 var_anything->offset = 0;
7052 var_anything->fullsize = ~0;
7053 var_anything->is_special_var = 1;
7054
7055 /* Anything points to anything. This makes deref constraints just
7056 work in the presence of linked list and other p = *p type loops,
7057 by saying that *ANYTHING = ANYTHING. */
7058 lhs.type = SCALAR;
7059 lhs.var = anything_id;
7060 lhs.offset = 0;
7061 rhs.type = ADDRESSOF;
7062 rhs.var = anything_id;
7063 rhs.offset = 0;
7064
7065 /* This specifically does not use process_constraint because
7066 process_constraint ignores all anything = anything constraints, since all
7067 but this one are redundant. */
7068 constraints.safe_push (new_constraint (lhs, rhs));
7069
7070 /* Create the STRING variable, used to represent that a variable
7071 points to a string literal. String literals don't contain
7072 pointers so STRING doesn't point to anything. */
7073 var_string = new_var_info (NULL_TREE, "STRING", false);
7074 gcc_assert (var_string->id == string_id);
7075 var_string->is_artificial_var = 1;
7076 var_string->offset = 0;
7077 var_string->size = ~0;
7078 var_string->fullsize = ~0;
7079 var_string->is_special_var = 1;
7080 var_string->may_have_pointers = 0;
7081
7082 /* Create the ESCAPED variable, used to represent the set of escaped
7083 memory. */
7084 var_escaped = new_var_info (NULL_TREE, "ESCAPED", false);
7085 gcc_assert (var_escaped->id == escaped_id);
7086 var_escaped->is_artificial_var = 1;
7087 var_escaped->offset = 0;
7088 var_escaped->size = ~0;
7089 var_escaped->fullsize = ~0;
7090 var_escaped->is_special_var = 0;
7091
7092 /* Create the NONLOCAL variable, used to represent the set of nonlocal
7093 memory. */
7094 var_nonlocal = new_var_info (NULL_TREE, "NONLOCAL", false);
7095 gcc_assert (var_nonlocal->id == nonlocal_id);
7096 var_nonlocal->is_artificial_var = 1;
7097 var_nonlocal->offset = 0;
7098 var_nonlocal->size = ~0;
7099 var_nonlocal->fullsize = ~0;
7100 var_nonlocal->is_special_var = 1;
7101
7102 /* ESCAPED = *ESCAPED, because escaped is may-deref'd at calls, etc. */
7103 lhs.type = SCALAR;
7104 lhs.var = escaped_id;
7105 lhs.offset = 0;
7106 rhs.type = DEREF;
7107 rhs.var = escaped_id;
7108 rhs.offset = 0;
7109 process_constraint (new_constraint (lhs, rhs));
7110
7111 /* ESCAPED = ESCAPED + UNKNOWN_OFFSET, because if a sub-field escapes the
7112 whole variable escapes. */
7113 lhs.type = SCALAR;
7114 lhs.var = escaped_id;
7115 lhs.offset = 0;
7116 rhs.type = SCALAR;
7117 rhs.var = escaped_id;
7118 rhs.offset = UNKNOWN_OFFSET;
7119 process_constraint (new_constraint (lhs, rhs));
7120
7121 /* *ESCAPED = NONLOCAL. This is true because we have to assume
7122 everything pointed to by escaped points to what global memory can
7123 point to. */
7124 lhs.type = DEREF;
7125 lhs.var = escaped_id;
7126 lhs.offset = 0;
7127 rhs.type = SCALAR;
7128 rhs.var = nonlocal_id;
7129 rhs.offset = 0;
7130 process_constraint (new_constraint (lhs, rhs));
7131
7132 /* NONLOCAL = &NONLOCAL, NONLOCAL = &ESCAPED. This is true because
7133 global memory may point to global memory and escaped memory. */
7134 lhs.type = SCALAR;
7135 lhs.var = nonlocal_id;
7136 lhs.offset = 0;
7137 rhs.type = ADDRESSOF;
7138 rhs.var = nonlocal_id;
7139 rhs.offset = 0;
7140 process_constraint (new_constraint (lhs, rhs));
7141 rhs.type = ADDRESSOF;
7142 rhs.var = escaped_id;
7143 rhs.offset = 0;
7144 process_constraint (new_constraint (lhs, rhs));
7145
7146 /* Create the STOREDANYTHING variable, used to represent the set of
7147 variables stored to *ANYTHING. */
7148 var_storedanything = new_var_info (NULL_TREE, "STOREDANYTHING", false);
7149 gcc_assert (var_storedanything->id == storedanything_id);
7150 var_storedanything->is_artificial_var = 1;
7151 var_storedanything->offset = 0;
7152 var_storedanything->size = ~0;
7153 var_storedanything->fullsize = ~0;
7154 var_storedanything->is_special_var = 0;
7155
7156 /* Create the INTEGER variable, used to represent that a variable points
7157 to what an INTEGER "points to". */
7158 var_integer = new_var_info (NULL_TREE, "INTEGER", false);
7159 gcc_assert (var_integer->id == integer_id);
7160 var_integer->is_artificial_var = 1;
7161 var_integer->size = ~0;
7162 var_integer->fullsize = ~0;
7163 var_integer->offset = 0;
7164 var_integer->is_special_var = 1;
7165
7166 /* INTEGER = ANYTHING, because we don't know where a dereference of
7167 a random integer will point to. */
7168 lhs.type = SCALAR;
7169 lhs.var = integer_id;
7170 lhs.offset = 0;
7171 rhs.type = ADDRESSOF;
7172 rhs.var = anything_id;
7173 rhs.offset = 0;
7174 process_constraint (new_constraint (lhs, rhs));
7175 }
7176
7177 /* Initialize things necessary to perform PTA */
7178
7179 static void
7180 init_alias_vars (void)
7181 {
7182 use_field_sensitive = (MAX_FIELDS_FOR_FIELD_SENSITIVE > 1);
7183
7184 bitmap_obstack_initialize (&pta_obstack);
7185 bitmap_obstack_initialize (&oldpta_obstack);
7186 bitmap_obstack_initialize (&predbitmap_obstack);
7187
7188 constraints.create (8);
7189 varmap.create (8);
7190 vi_for_tree = new hash_map<tree, varinfo_t>;
7191 call_stmt_vars = new hash_map<gimple *, varinfo_t>;
7192
7193 memset (&stats, 0, sizeof (stats));
7194 shared_bitmap_table = new hash_table<shared_bitmap_hasher> (511);
7195 init_base_vars ();
7196
7197 gcc_obstack_init (&fake_var_decl_obstack);
7198
7199 final_solutions = new hash_map<varinfo_t, pt_solution *>;
7200 gcc_obstack_init (&final_solutions_obstack);
7201 }
7202
7203 /* Remove the REF and ADDRESS edges from GRAPH, as well as all the
7204 predecessor edges. */
7205
7206 static void
7207 remove_preds_and_fake_succs (constraint_graph_t graph)
7208 {
7209 unsigned int i;
7210
7211 /* Clear the implicit ref and address nodes from the successor
7212 lists. */
7213 for (i = 1; i < FIRST_REF_NODE; i++)
7214 {
7215 if (graph->succs[i])
7216 bitmap_clear_range (graph->succs[i], FIRST_REF_NODE,
7217 FIRST_REF_NODE * 2);
7218 }
7219
7220 /* Free the successor list for the non-ref nodes. */
7221 for (i = FIRST_REF_NODE + 1; i < graph->size; i++)
7222 {
7223 if (graph->succs[i])
7224 BITMAP_FREE (graph->succs[i]);
7225 }
7226
7227 /* Now reallocate the size of the successor list as, and blow away
7228 the predecessor bitmaps. */
7229 graph->size = varmap.length ();
7230 graph->succs = XRESIZEVEC (bitmap, graph->succs, graph->size);
7231
7232 free (graph->implicit_preds);
7233 graph->implicit_preds = NULL;
7234 free (graph->preds);
7235 graph->preds = NULL;
7236 bitmap_obstack_release (&predbitmap_obstack);
7237 }
7238
7239 /* Solve the constraint set. */
7240
7241 static void
7242 solve_constraints (void)
7243 {
7244 class scc_info *si;
7245
7246 /* Sort varinfos so that ones that cannot be pointed to are last.
7247 This makes bitmaps more efficient. */
7248 unsigned int *map = XNEWVEC (unsigned int, varmap.length ());
7249 for (unsigned i = 0; i < integer_id + 1; ++i)
7250 map[i] = i;
7251 /* Start with non-register vars (as possibly address-taken), followed
7252 by register vars as conservative set of vars never appearing in
7253 the points-to solution bitmaps. */
7254 unsigned j = integer_id + 1;
7255 for (unsigned i = integer_id + 1; i < varmap.length (); ++i)
7256 if (! varmap[i]->is_reg_var)
7257 map[i] = j++;
7258 for (unsigned i = integer_id + 1; i < varmap.length (); ++i)
7259 if (varmap[i]->is_reg_var)
7260 map[i] = j++;
7261 /* Shuffle varmap according to map. */
7262 for (unsigned i = integer_id + 1; i < varmap.length (); ++i)
7263 {
7264 while (map[varmap[i]->id] != i)
7265 std::swap (varmap[i], varmap[map[varmap[i]->id]]);
7266 gcc_assert (bitmap_empty_p (varmap[i]->solution));
7267 varmap[i]->id = i;
7268 varmap[i]->next = map[varmap[i]->next];
7269 varmap[i]->head = map[varmap[i]->head];
7270 }
7271 /* Finally rewrite constraints. */
7272 for (unsigned i = 0; i < constraints.length (); ++i)
7273 {
7274 constraints[i]->lhs.var = map[constraints[i]->lhs.var];
7275 constraints[i]->rhs.var = map[constraints[i]->rhs.var];
7276 }
7277 free (map);
7278
7279 if (dump_file)
7280 fprintf (dump_file,
7281 "\nCollapsing static cycles and doing variable "
7282 "substitution\n");
7283
7284 init_graph (varmap.length () * 2);
7285
7286 if (dump_file)
7287 fprintf (dump_file, "Building predecessor graph\n");
7288 build_pred_graph ();
7289
7290 if (dump_file)
7291 fprintf (dump_file, "Detecting pointer and location "
7292 "equivalences\n");
7293 si = perform_var_substitution (graph);
7294
7295 if (dump_file)
7296 fprintf (dump_file, "Rewriting constraints and unifying "
7297 "variables\n");
7298 rewrite_constraints (graph, si);
7299
7300 build_succ_graph ();
7301
7302 free_var_substitution_info (si);
7303
7304 /* Attach complex constraints to graph nodes. */
7305 move_complex_constraints (graph);
7306
7307 if (dump_file)
7308 fprintf (dump_file, "Uniting pointer but not location equivalent "
7309 "variables\n");
7310 unite_pointer_equivalences (graph);
7311
7312 if (dump_file)
7313 fprintf (dump_file, "Finding indirect cycles\n");
7314 find_indirect_cycles (graph);
7315
7316 /* Implicit nodes and predecessors are no longer necessary at this
7317 point. */
7318 remove_preds_and_fake_succs (graph);
7319
7320 if (dump_file && (dump_flags & TDF_GRAPH))
7321 {
7322 fprintf (dump_file, "\n\n// The constraint graph before solve-graph "
7323 "in dot format:\n");
7324 dump_constraint_graph (dump_file);
7325 fprintf (dump_file, "\n\n");
7326 }
7327
7328 if (dump_file)
7329 fprintf (dump_file, "Solving graph\n");
7330
7331 solve_graph (graph);
7332
7333 if (dump_file && (dump_flags & TDF_GRAPH))
7334 {
7335 fprintf (dump_file, "\n\n// The constraint graph after solve-graph "
7336 "in dot format:\n");
7337 dump_constraint_graph (dump_file);
7338 fprintf (dump_file, "\n\n");
7339 }
7340 }
7341
7342 /* Create points-to sets for the current function. See the comments
7343 at the start of the file for an algorithmic overview. */
7344
7345 static void
7346 compute_points_to_sets (void)
7347 {
7348 basic_block bb;
7349 varinfo_t vi;
7350
7351 timevar_push (TV_TREE_PTA);
7352
7353 init_alias_vars ();
7354
7355 intra_create_variable_infos (cfun);
7356
7357 /* Now walk all statements and build the constraint set. */
7358 FOR_EACH_BB_FN (bb, cfun)
7359 {
7360 for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
7361 gsi_next (&gsi))
7362 {
7363 gphi *phi = gsi.phi ();
7364
7365 if (! virtual_operand_p (gimple_phi_result (phi)))
7366 find_func_aliases (cfun, phi);
7367 }
7368
7369 for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);
7370 gsi_next (&gsi))
7371 {
7372 gimple *stmt = gsi_stmt (gsi);
7373
7374 find_func_aliases (cfun, stmt);
7375 }
7376 }
7377
7378 if (dump_file)
7379 {
7380 fprintf (dump_file, "Points-to analysis\n\nConstraints:\n\n");
7381 dump_constraints (dump_file, 0);
7382 }
7383
7384 /* From the constraints compute the points-to sets. */
7385 solve_constraints ();
7386
7387 /* Post-process solutions for escapes through returns. */
7388 edge_iterator ei;
7389 edge e;
7390 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
7391 if (greturn *ret = safe_dyn_cast <greturn *> (last_stmt (e->src)))
7392 {
7393 tree val = gimple_return_retval (ret);
7394 /* ??? Easy to handle simple indirections with some work.
7395 Arbitrary references like foo.bar.baz are more difficult
7396 (but conservatively easy enough with just looking at the base).
7397 Mind to fixup find_func_aliases as well. */
7398 if (!val || !SSA_VAR_P (val))
7399 continue;
7400 /* returns happen last in non-IPA so they only influence
7401 the ESCAPED solution and we can filter local variables. */
7402 varinfo_t escaped_vi = get_varinfo (find (escaped_id));
7403 varinfo_t vi = lookup_vi_for_tree (val);
7404 bitmap delta = BITMAP_ALLOC (&pta_obstack);
7405 bitmap_iterator bi;
7406 unsigned i;
7407 for (; vi; vi = vi_next (vi))
7408 {
7409 varinfo_t part_vi = get_varinfo (find (vi->id));
7410 EXECUTE_IF_AND_COMPL_IN_BITMAP (part_vi->solution,
7411 escaped_vi->solution, 0, i, bi)
7412 {
7413 varinfo_t pointed_to_vi = get_varinfo (i);
7414 if (pointed_to_vi->is_global_var
7415 /* We delay marking of heap memory as global. */
7416 || pointed_to_vi->is_heap_var)
7417 bitmap_set_bit (delta, i);
7418 }
7419 }
7420
7421 /* Now compute the transitive closure. */
7422 bitmap_ior_into (escaped_vi->solution, delta);
7423 bitmap new_delta = BITMAP_ALLOC (&pta_obstack);
7424 while (!bitmap_empty_p (delta))
7425 {
7426 EXECUTE_IF_SET_IN_BITMAP (delta, 0, i, bi)
7427 {
7428 varinfo_t pointed_to_vi = get_varinfo (i);
7429 pointed_to_vi = get_varinfo (find (pointed_to_vi->id));
7430 unsigned j;
7431 bitmap_iterator bi2;
7432 EXECUTE_IF_AND_COMPL_IN_BITMAP (pointed_to_vi->solution,
7433 escaped_vi->solution,
7434 0, j, bi2)
7435 {
7436 varinfo_t pointed_to_vi2 = get_varinfo (j);
7437 if (pointed_to_vi2->is_global_var
7438 /* We delay marking of heap memory as global. */
7439 || pointed_to_vi2->is_heap_var)
7440 bitmap_set_bit (new_delta, j);
7441 }
7442 }
7443 bitmap_ior_into (escaped_vi->solution, new_delta);
7444 bitmap_clear (delta);
7445 std::swap (delta, new_delta);
7446 }
7447 BITMAP_FREE (delta);
7448 BITMAP_FREE (new_delta);
7449 }
7450
7451 if (dump_file)
7452 dump_sa_points_to_info (dump_file);
7453
7454 /* Compute the points-to set for ESCAPED used for call-clobber analysis. */
7455 cfun->gimple_df->escaped = find_what_var_points_to (cfun->decl,
7456 get_varinfo (escaped_id));
7457
7458 /* Make sure the ESCAPED solution (which is used as placeholder in
7459 other solutions) does not reference itself. This simplifies
7460 points-to solution queries. */
7461 cfun->gimple_df->escaped.escaped = 0;
7462
7463 /* Compute the points-to sets for pointer SSA_NAMEs. */
7464 unsigned i;
7465 tree ptr;
7466
7467 FOR_EACH_SSA_NAME (i, ptr, cfun)
7468 {
7469 if (POINTER_TYPE_P (TREE_TYPE (ptr)))
7470 find_what_p_points_to (cfun->decl, ptr);
7471 }
7472
7473 /* Compute the call-used/clobbered sets. */
7474 FOR_EACH_BB_FN (bb, cfun)
7475 {
7476 gimple_stmt_iterator gsi;
7477
7478 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
7479 {
7480 gcall *stmt;
7481 struct pt_solution *pt;
7482
7483 stmt = dyn_cast <gcall *> (gsi_stmt (gsi));
7484 if (!stmt)
7485 continue;
7486
7487 pt = gimple_call_use_set (stmt);
7488 if (gimple_call_flags (stmt) & ECF_CONST)
7489 memset (pt, 0, sizeof (struct pt_solution));
7490 else if ((vi = lookup_call_use_vi (stmt)) != NULL)
7491 {
7492 *pt = find_what_var_points_to (cfun->decl, vi);
7493 /* Escaped (and thus nonlocal) variables are always
7494 implicitly used by calls. */
7495 /* ??? ESCAPED can be empty even though NONLOCAL
7496 always escaped. */
7497 pt->nonlocal = 1;
7498 pt->escaped = 1;
7499 }
7500 else
7501 {
7502 /* If there is nothing special about this call then
7503 we have made everything that is used also escape. */
7504 *pt = cfun->gimple_df->escaped;
7505 pt->nonlocal = 1;
7506 }
7507
7508 pt = gimple_call_clobber_set (stmt);
7509 if (gimple_call_flags (stmt) & (ECF_CONST|ECF_PURE|ECF_NOVOPS))
7510 memset (pt, 0, sizeof (struct pt_solution));
7511 else if ((vi = lookup_call_clobber_vi (stmt)) != NULL)
7512 {
7513 *pt = find_what_var_points_to (cfun->decl, vi);
7514 /* Escaped (and thus nonlocal) variables are always
7515 implicitly clobbered by calls. */
7516 /* ??? ESCAPED can be empty even though NONLOCAL
7517 always escaped. */
7518 pt->nonlocal = 1;
7519 pt->escaped = 1;
7520 }
7521 else
7522 {
7523 /* If there is nothing special about this call then
7524 we have made everything that is used also escape. */
7525 *pt = cfun->gimple_df->escaped;
7526 pt->nonlocal = 1;
7527 }
7528 }
7529 }
7530
7531 timevar_pop (TV_TREE_PTA);
7532 }
7533
7534
7535 /* Delete created points-to sets. */
7536
7537 static void
7538 delete_points_to_sets (void)
7539 {
7540 unsigned int i;
7541
7542 delete shared_bitmap_table;
7543 shared_bitmap_table = NULL;
7544 if (dump_file && (dump_flags & TDF_STATS))
7545 fprintf (dump_file, "Points to sets created:%d\n",
7546 stats.points_to_sets_created);
7547
7548 delete vi_for_tree;
7549 delete call_stmt_vars;
7550 bitmap_obstack_release (&pta_obstack);
7551 constraints.release ();
7552
7553 for (i = 0; i < graph->size; i++)
7554 graph->complex[i].release ();
7555 free (graph->complex);
7556
7557 free (graph->rep);
7558 free (graph->succs);
7559 free (graph->pe);
7560 free (graph->pe_rep);
7561 free (graph->indirect_cycles);
7562 free (graph);
7563
7564 varmap.release ();
7565 variable_info_pool.release ();
7566 constraint_pool.release ();
7567
7568 obstack_free (&fake_var_decl_obstack, NULL);
7569
7570 delete final_solutions;
7571 obstack_free (&final_solutions_obstack, NULL);
7572 }
7573
7574 struct vls_data
7575 {
7576 unsigned short clique;
7577 bool escaped_p;
7578 bitmap rvars;
7579 };
7580
7581 /* Mark "other" loads and stores as belonging to CLIQUE and with
7582 base zero. */
7583
7584 static bool
7585 visit_loadstore (gimple *, tree base, tree ref, void *data)
7586 {
7587 unsigned short clique = ((vls_data *) data)->clique;
7588 bitmap rvars = ((vls_data *) data)->rvars;
7589 bool escaped_p = ((vls_data *) data)->escaped_p;
7590 if (TREE_CODE (base) == MEM_REF
7591 || TREE_CODE (base) == TARGET_MEM_REF)
7592 {
7593 tree ptr = TREE_OPERAND (base, 0);
7594 if (TREE_CODE (ptr) == SSA_NAME)
7595 {
7596 /* For parameters, get at the points-to set for the actual parm
7597 decl. */
7598 if (SSA_NAME_IS_DEFAULT_DEF (ptr)
7599 && (TREE_CODE (SSA_NAME_VAR (ptr)) == PARM_DECL
7600 || TREE_CODE (SSA_NAME_VAR (ptr)) == RESULT_DECL))
7601 ptr = SSA_NAME_VAR (ptr);
7602
7603 /* We need to make sure 'ptr' doesn't include any of
7604 the restrict tags we added bases for in its points-to set. */
7605 varinfo_t vi = lookup_vi_for_tree (ptr);
7606 if (! vi)
7607 return false;
7608
7609 vi = get_varinfo (find (vi->id));
7610 if (bitmap_intersect_p (rvars, vi->solution)
7611 || (escaped_p && bitmap_bit_p (vi->solution, escaped_id)))
7612 return false;
7613 }
7614
7615 /* Do not overwrite existing cliques (that includes clique, base
7616 pairs we just set). */
7617 if (MR_DEPENDENCE_CLIQUE (base) == 0)
7618 {
7619 MR_DEPENDENCE_CLIQUE (base) = clique;
7620 MR_DEPENDENCE_BASE (base) = 0;
7621 }
7622 }
7623
7624 /* For plain decl accesses see whether they are accesses to globals
7625 and rewrite them to MEM_REFs with { clique, 0 }. */
7626 if (VAR_P (base)
7627 && is_global_var (base)
7628 /* ??? We can't rewrite a plain decl with the walk_stmt_load_store
7629 ops callback. */
7630 && base != ref)
7631 {
7632 tree *basep = &ref;
7633 while (handled_component_p (*basep))
7634 basep = &TREE_OPERAND (*basep, 0);
7635 gcc_assert (VAR_P (*basep));
7636 tree ptr = build_fold_addr_expr (*basep);
7637 tree zero = build_int_cst (TREE_TYPE (ptr), 0);
7638 *basep = build2 (MEM_REF, TREE_TYPE (*basep), ptr, zero);
7639 MR_DEPENDENCE_CLIQUE (*basep) = clique;
7640 MR_DEPENDENCE_BASE (*basep) = 0;
7641 }
7642
7643 return false;
7644 }
7645
7646 struct msdi_data {
7647 tree ptr;
7648 unsigned short *clique;
7649 unsigned short *last_ruid;
7650 varinfo_t restrict_var;
7651 };
7652
7653 /* If BASE is a MEM_REF then assign a clique, base pair to it, updating
7654 CLIQUE, *RESTRICT_VAR and LAST_RUID as passed via DATA.
7655 Return whether dependence info was assigned to BASE. */
7656
7657 static bool
7658 maybe_set_dependence_info (gimple *, tree base, tree, void *data)
7659 {
7660 tree ptr = ((msdi_data *)data)->ptr;
7661 unsigned short &clique = *((msdi_data *)data)->clique;
7662 unsigned short &last_ruid = *((msdi_data *)data)->last_ruid;
7663 varinfo_t restrict_var = ((msdi_data *)data)->restrict_var;
7664 if ((TREE_CODE (base) == MEM_REF
7665 || TREE_CODE (base) == TARGET_MEM_REF)
7666 && TREE_OPERAND (base, 0) == ptr)
7667 {
7668 /* Do not overwrite existing cliques. This avoids overwriting dependence
7669 info inlined from a function with restrict parameters inlined
7670 into a function with restrict parameters. This usually means we
7671 prefer to be precise in innermost loops. */
7672 if (MR_DEPENDENCE_CLIQUE (base) == 0)
7673 {
7674 if (clique == 0)
7675 {
7676 if (cfun->last_clique == 0)
7677 cfun->last_clique = 1;
7678 clique = 1;
7679 }
7680 if (restrict_var->ruid == 0)
7681 restrict_var->ruid = ++last_ruid;
7682 MR_DEPENDENCE_CLIQUE (base) = clique;
7683 MR_DEPENDENCE_BASE (base) = restrict_var->ruid;
7684 return true;
7685 }
7686 }
7687 return false;
7688 }
7689
7690 /* Clear dependence info for the clique DATA. */
7691
7692 static bool
7693 clear_dependence_clique (gimple *, tree base, tree, void *data)
7694 {
7695 unsigned short clique = (uintptr_t)data;
7696 if ((TREE_CODE (base) == MEM_REF
7697 || TREE_CODE (base) == TARGET_MEM_REF)
7698 && MR_DEPENDENCE_CLIQUE (base) == clique)
7699 {
7700 MR_DEPENDENCE_CLIQUE (base) = 0;
7701 MR_DEPENDENCE_BASE (base) = 0;
7702 }
7703
7704 return false;
7705 }
7706
7707 /* Compute the set of independend memory references based on restrict
7708 tags and their conservative propagation to the points-to sets. */
7709
7710 static void
7711 compute_dependence_clique (void)
7712 {
7713 /* First clear the special "local" clique. */
7714 basic_block bb;
7715 if (cfun->last_clique != 0)
7716 FOR_EACH_BB_FN (bb, cfun)
7717 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
7718 !gsi_end_p (gsi); gsi_next (&gsi))
7719 {
7720 gimple *stmt = gsi_stmt (gsi);
7721 walk_stmt_load_store_ops (stmt, (void *)(uintptr_t) 1,
7722 clear_dependence_clique,
7723 clear_dependence_clique);
7724 }
7725
7726 unsigned short clique = 0;
7727 unsigned short last_ruid = 0;
7728 bitmap rvars = BITMAP_ALLOC (NULL);
7729 bool escaped_p = false;
7730 for (unsigned i = 0; i < num_ssa_names; ++i)
7731 {
7732 tree ptr = ssa_name (i);
7733 if (!ptr || !POINTER_TYPE_P (TREE_TYPE (ptr)))
7734 continue;
7735
7736 /* Avoid all this when ptr is not dereferenced? */
7737 tree p = ptr;
7738 if (SSA_NAME_IS_DEFAULT_DEF (ptr)
7739 && (TREE_CODE (SSA_NAME_VAR (ptr)) == PARM_DECL
7740 || TREE_CODE (SSA_NAME_VAR (ptr)) == RESULT_DECL))
7741 p = SSA_NAME_VAR (ptr);
7742 varinfo_t vi = lookup_vi_for_tree (p);
7743 if (!vi)
7744 continue;
7745 vi = get_varinfo (find (vi->id));
7746 bitmap_iterator bi;
7747 unsigned j;
7748 varinfo_t restrict_var = NULL;
7749 EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, j, bi)
7750 {
7751 varinfo_t oi = get_varinfo (j);
7752 if (oi->head != j)
7753 oi = get_varinfo (oi->head);
7754 if (oi->is_restrict_var)
7755 {
7756 if (restrict_var
7757 && restrict_var != oi)
7758 {
7759 if (dump_file && (dump_flags & TDF_DETAILS))
7760 {
7761 fprintf (dump_file, "found restrict pointed-to "
7762 "for ");
7763 print_generic_expr (dump_file, ptr);
7764 fprintf (dump_file, " but not exclusively\n");
7765 }
7766 restrict_var = NULL;
7767 break;
7768 }
7769 restrict_var = oi;
7770 }
7771 /* NULL is the only other valid points-to entry. */
7772 else if (oi->id != nothing_id)
7773 {
7774 restrict_var = NULL;
7775 break;
7776 }
7777 }
7778 /* Ok, found that ptr must(!) point to a single(!) restrict
7779 variable. */
7780 /* ??? PTA isn't really a proper propagation engine to compute
7781 this property.
7782 ??? We could handle merging of two restricts by unifying them. */
7783 if (restrict_var)
7784 {
7785 /* Now look at possible dereferences of ptr. */
7786 imm_use_iterator ui;
7787 gimple *use_stmt;
7788 bool used = false;
7789 msdi_data data = { ptr, &clique, &last_ruid, restrict_var };
7790 FOR_EACH_IMM_USE_STMT (use_stmt, ui, ptr)
7791 used |= walk_stmt_load_store_ops (use_stmt, &data,
7792 maybe_set_dependence_info,
7793 maybe_set_dependence_info);
7794 if (used)
7795 {
7796 /* Add all subvars to the set of restrict pointed-to set. */
7797 for (unsigned sv = restrict_var->head; sv != 0;
7798 sv = get_varinfo (sv)->next)
7799 bitmap_set_bit (rvars, sv);
7800 varinfo_t escaped = get_varinfo (find (escaped_id));
7801 if (bitmap_bit_p (escaped->solution, restrict_var->id))
7802 escaped_p = true;
7803 }
7804 }
7805 }
7806
7807 if (clique != 0)
7808 {
7809 /* Assign the BASE id zero to all accesses not based on a restrict
7810 pointer. That way they get disambiguated against restrict
7811 accesses but not against each other. */
7812 /* ??? For restricts derived from globals (thus not incoming
7813 parameters) we can't restrict scoping properly thus the following
7814 is too aggressive there. For now we have excluded those globals from
7815 getting into the MR_DEPENDENCE machinery. */
7816 vls_data data = { clique, escaped_p, rvars };
7817 basic_block bb;
7818 FOR_EACH_BB_FN (bb, cfun)
7819 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
7820 !gsi_end_p (gsi); gsi_next (&gsi))
7821 {
7822 gimple *stmt = gsi_stmt (gsi);
7823 walk_stmt_load_store_ops (stmt, &data,
7824 visit_loadstore, visit_loadstore);
7825 }
7826 }
7827
7828 BITMAP_FREE (rvars);
7829 }
7830
7831 /* Compute points-to information for every SSA_NAME pointer in the
7832 current function and compute the transitive closure of escaped
7833 variables to re-initialize the call-clobber states of local variables. */
7834
7835 unsigned int
7836 compute_may_aliases (void)
7837 {
7838 if (cfun->gimple_df->ipa_pta)
7839 {
7840 if (dump_file)
7841 {
7842 fprintf (dump_file, "\nNot re-computing points-to information "
7843 "because IPA points-to information is available.\n\n");
7844
7845 /* But still dump what we have remaining it. */
7846 dump_alias_info (dump_file);
7847 }
7848
7849 return 0;
7850 }
7851
7852 /* For each pointer P_i, determine the sets of variables that P_i may
7853 point-to. Compute the reachability set of escaped and call-used
7854 variables. */
7855 compute_points_to_sets ();
7856
7857 /* Debugging dumps. */
7858 if (dump_file)
7859 dump_alias_info (dump_file);
7860
7861 /* Compute restrict-based memory disambiguations. */
7862 compute_dependence_clique ();
7863
7864 /* Deallocate memory used by aliasing data structures and the internal
7865 points-to solution. */
7866 delete_points_to_sets ();
7867
7868 gcc_assert (!need_ssa_update_p (cfun));
7869
7870 return 0;
7871 }
7872
7873 /* A dummy pass to cause points-to information to be computed via
7874 TODO_rebuild_alias. */
7875
7876 namespace {
7877
7878 const pass_data pass_data_build_alias =
7879 {
7880 GIMPLE_PASS, /* type */
7881 "alias", /* name */
7882 OPTGROUP_NONE, /* optinfo_flags */
7883 TV_NONE, /* tv_id */
7884 ( PROP_cfg | PROP_ssa ), /* properties_required */
7885 0, /* properties_provided */
7886 0, /* properties_destroyed */
7887 0, /* todo_flags_start */
7888 TODO_rebuild_alias, /* todo_flags_finish */
7889 };
7890
7891 class pass_build_alias : public gimple_opt_pass
7892 {
7893 public:
7894 pass_build_alias (gcc::context *ctxt)
7895 : gimple_opt_pass (pass_data_build_alias, ctxt)
7896 {}
7897
7898 /* opt_pass methods: */
7899 virtual bool gate (function *) { return flag_tree_pta; }
7900
7901 }; // class pass_build_alias
7902
7903 } // anon namespace
7904
7905 gimple_opt_pass *
7906 make_pass_build_alias (gcc::context *ctxt)
7907 {
7908 return new pass_build_alias (ctxt);
7909 }
7910
7911 /* A dummy pass to cause points-to information to be computed via
7912 TODO_rebuild_alias. */
7913
7914 namespace {
7915
7916 const pass_data pass_data_build_ealias =
7917 {
7918 GIMPLE_PASS, /* type */
7919 "ealias", /* name */
7920 OPTGROUP_NONE, /* optinfo_flags */
7921 TV_NONE, /* tv_id */
7922 ( PROP_cfg | PROP_ssa ), /* properties_required */
7923 0, /* properties_provided */
7924 0, /* properties_destroyed */
7925 0, /* todo_flags_start */
7926 TODO_rebuild_alias, /* todo_flags_finish */
7927 };
7928
7929 class pass_build_ealias : public gimple_opt_pass
7930 {
7931 public:
7932 pass_build_ealias (gcc::context *ctxt)
7933 : gimple_opt_pass (pass_data_build_ealias, ctxt)
7934 {}
7935
7936 /* opt_pass methods: */
7937 virtual bool gate (function *) { return flag_tree_pta; }
7938
7939 }; // class pass_build_ealias
7940
7941 } // anon namespace
7942
7943 gimple_opt_pass *
7944 make_pass_build_ealias (gcc::context *ctxt)
7945 {
7946 return new pass_build_ealias (ctxt);
7947 }
7948
7949
7950 /* IPA PTA solutions for ESCAPED. */
7951 struct pt_solution ipa_escaped_pt
7952 = { true, false, false, false, false,
7953 false, false, false, false, false, NULL };
7954
7955 /* Associate node with varinfo DATA. Worker for
7956 cgraph_for_symbol_thunks_and_aliases. */
7957 static bool
7958 associate_varinfo_to_alias (struct cgraph_node *node, void *data)
7959 {
7960 if ((node->alias
7961 || (node->thunk.thunk_p
7962 && ! node->global.inlined_to))
7963 && node->analyzed
7964 && !node->ifunc_resolver)
7965 insert_vi_for_tree (node->decl, (varinfo_t)data);
7966 return false;
7967 }
7968
7969 /* Dump varinfo VI to FILE. */
7970
7971 static void
7972 dump_varinfo (FILE *file, varinfo_t vi)
7973 {
7974 if (vi == NULL)
7975 return;
7976
7977 fprintf (file, "%u: %s\n", vi->id, vi->name);
7978
7979 const char *sep = " ";
7980 if (vi->is_artificial_var)
7981 fprintf (file, "%sartificial", sep);
7982 if (vi->is_special_var)
7983 fprintf (file, "%sspecial", sep);
7984 if (vi->is_unknown_size_var)
7985 fprintf (file, "%sunknown-size", sep);
7986 if (vi->is_full_var)
7987 fprintf (file, "%sfull", sep);
7988 if (vi->is_heap_var)
7989 fprintf (file, "%sheap", sep);
7990 if (vi->may_have_pointers)
7991 fprintf (file, "%smay-have-pointers", sep);
7992 if (vi->only_restrict_pointers)
7993 fprintf (file, "%sonly-restrict-pointers", sep);
7994 if (vi->is_restrict_var)
7995 fprintf (file, "%sis-restrict-var", sep);
7996 if (vi->is_global_var)
7997 fprintf (file, "%sglobal", sep);
7998 if (vi->is_ipa_escape_point)
7999 fprintf (file, "%sipa-escape-point", sep);
8000 if (vi->is_fn_info)
8001 fprintf (file, "%sfn-info", sep);
8002 if (vi->ruid)
8003 fprintf (file, "%srestrict-uid:%u", sep, vi->ruid);
8004 if (vi->next)
8005 fprintf (file, "%snext:%u", sep, vi->next);
8006 if (vi->head != vi->id)
8007 fprintf (file, "%shead:%u", sep, vi->head);
8008 if (vi->offset)
8009 fprintf (file, "%soffset:" HOST_WIDE_INT_PRINT_DEC, sep, vi->offset);
8010 if (vi->size != ~(unsigned HOST_WIDE_INT)0)
8011 fprintf (file, "%ssize:" HOST_WIDE_INT_PRINT_DEC, sep, vi->size);
8012 if (vi->fullsize != ~(unsigned HOST_WIDE_INT)0
8013 && vi->fullsize != vi->size)
8014 fprintf (file, "%sfullsize:" HOST_WIDE_INT_PRINT_DEC, sep,
8015 vi->fullsize);
8016 fprintf (file, "\n");
8017
8018 if (vi->solution && !bitmap_empty_p (vi->solution))
8019 {
8020 bitmap_iterator bi;
8021 unsigned i;
8022 fprintf (file, " solution: {");
8023 EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, i, bi)
8024 fprintf (file, " %u", i);
8025 fprintf (file, " }\n");
8026 }
8027
8028 if (vi->oldsolution && !bitmap_empty_p (vi->oldsolution)
8029 && !bitmap_equal_p (vi->solution, vi->oldsolution))
8030 {
8031 bitmap_iterator bi;
8032 unsigned i;
8033 fprintf (file, " oldsolution: {");
8034 EXECUTE_IF_SET_IN_BITMAP (vi->oldsolution, 0, i, bi)
8035 fprintf (file, " %u", i);
8036 fprintf (file, " }\n");
8037 }
8038 }
8039
8040 /* Dump varinfo VI to stderr. */
8041
8042 DEBUG_FUNCTION void
8043 debug_varinfo (varinfo_t vi)
8044 {
8045 dump_varinfo (stderr, vi);
8046 }
8047
8048 /* Dump varmap to FILE. */
8049
8050 static void
8051 dump_varmap (FILE *file)
8052 {
8053 if (varmap.length () == 0)
8054 return;
8055
8056 fprintf (file, "variables:\n");
8057
8058 for (unsigned int i = 0; i < varmap.length (); ++i)
8059 {
8060 varinfo_t vi = get_varinfo (i);
8061 dump_varinfo (file, vi);
8062 }
8063
8064 fprintf (file, "\n");
8065 }
8066
8067 /* Dump varmap to stderr. */
8068
8069 DEBUG_FUNCTION void
8070 debug_varmap (void)
8071 {
8072 dump_varmap (stderr);
8073 }
8074
8075 /* Compute whether node is refered to non-locally. Worker for
8076 cgraph_for_symbol_thunks_and_aliases. */
8077 static bool
8078 refered_from_nonlocal_fn (struct cgraph_node *node, void *data)
8079 {
8080 bool *nonlocal_p = (bool *)data;
8081 *nonlocal_p |= (node->used_from_other_partition
8082 || node->externally_visible
8083 || node->force_output
8084 || lookup_attribute ("noipa", DECL_ATTRIBUTES (node->decl)));
8085 return false;
8086 }
8087
8088 /* Same for varpool nodes. */
8089 static bool
8090 refered_from_nonlocal_var (struct varpool_node *node, void *data)
8091 {
8092 bool *nonlocal_p = (bool *)data;
8093 *nonlocal_p |= (node->used_from_other_partition
8094 || node->externally_visible
8095 || node->force_output);
8096 return false;
8097 }
8098
8099 /* Execute the driver for IPA PTA. */
8100 static unsigned int
8101 ipa_pta_execute (void)
8102 {
8103 struct cgraph_node *node;
8104 varpool_node *var;
8105 unsigned int from = 0;
8106
8107 in_ipa_mode = 1;
8108
8109 init_alias_vars ();
8110
8111 if (dump_file && (dump_flags & TDF_DETAILS))
8112 {
8113 symtab->dump (dump_file);
8114 fprintf (dump_file, "\n");
8115 }
8116
8117 if (dump_file)
8118 {
8119 fprintf (dump_file, "Generating generic constraints\n\n");
8120 dump_constraints (dump_file, from);
8121 fprintf (dump_file, "\n");
8122 from = constraints.length ();
8123 }
8124
8125 /* Build the constraints. */
8126 FOR_EACH_DEFINED_FUNCTION (node)
8127 {
8128 varinfo_t vi;
8129 /* Nodes without a body are not interesting. Especially do not
8130 visit clones at this point for now - we get duplicate decls
8131 there for inline clones at least. */
8132 if (!node->has_gimple_body_p () || node->global.inlined_to)
8133 continue;
8134 node->get_body ();
8135
8136 gcc_assert (!node->clone_of);
8137
8138 /* For externally visible or attribute used annotated functions use
8139 local constraints for their arguments.
8140 For local functions we see all callers and thus do not need initial
8141 constraints for parameters. */
8142 bool nonlocal_p = (node->used_from_other_partition
8143 || node->externally_visible
8144 || node->force_output
8145 || lookup_attribute ("noipa",
8146 DECL_ATTRIBUTES (node->decl)));
8147 node->call_for_symbol_thunks_and_aliases (refered_from_nonlocal_fn,
8148 &nonlocal_p, true);
8149
8150 vi = create_function_info_for (node->decl,
8151 alias_get_name (node->decl), false,
8152 nonlocal_p);
8153 if (dump_file
8154 && from != constraints.length ())
8155 {
8156 fprintf (dump_file,
8157 "Generating intial constraints for %s", node->name ());
8158 if (DECL_ASSEMBLER_NAME_SET_P (node->decl))
8159 fprintf (dump_file, " (%s)",
8160 IDENTIFIER_POINTER
8161 (DECL_ASSEMBLER_NAME (node->decl)));
8162 fprintf (dump_file, "\n\n");
8163 dump_constraints (dump_file, from);
8164 fprintf (dump_file, "\n");
8165
8166 from = constraints.length ();
8167 }
8168
8169 node->call_for_symbol_thunks_and_aliases
8170 (associate_varinfo_to_alias, vi, true);
8171 }
8172
8173 /* Create constraints for global variables and their initializers. */
8174 FOR_EACH_VARIABLE (var)
8175 {
8176 if (var->alias && var->analyzed)
8177 continue;
8178
8179 varinfo_t vi = get_vi_for_tree (var->decl);
8180
8181 /* For the purpose of IPA PTA unit-local globals are not
8182 escape points. */
8183 bool nonlocal_p = (var->used_from_other_partition
8184 || var->externally_visible
8185 || var->force_output);
8186 var->call_for_symbol_and_aliases (refered_from_nonlocal_var,
8187 &nonlocal_p, true);
8188 if (nonlocal_p)
8189 vi->is_ipa_escape_point = true;
8190 }
8191
8192 if (dump_file
8193 && from != constraints.length ())
8194 {
8195 fprintf (dump_file,
8196 "Generating constraints for global initializers\n\n");
8197 dump_constraints (dump_file, from);
8198 fprintf (dump_file, "\n");
8199 from = constraints.length ();
8200 }
8201
8202 FOR_EACH_DEFINED_FUNCTION (node)
8203 {
8204 struct function *func;
8205 basic_block bb;
8206
8207 /* Nodes without a body are not interesting. */
8208 if (!node->has_gimple_body_p () || node->clone_of)
8209 continue;
8210
8211 if (dump_file)
8212 {
8213 fprintf (dump_file,
8214 "Generating constraints for %s", node->name ());
8215 if (DECL_ASSEMBLER_NAME_SET_P (node->decl))
8216 fprintf (dump_file, " (%s)",
8217 IDENTIFIER_POINTER
8218 (DECL_ASSEMBLER_NAME (node->decl)));
8219 fprintf (dump_file, "\n");
8220 }
8221
8222 func = DECL_STRUCT_FUNCTION (node->decl);
8223 gcc_assert (cfun == NULL);
8224
8225 /* Build constriants for the function body. */
8226 FOR_EACH_BB_FN (bb, func)
8227 {
8228 for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
8229 gsi_next (&gsi))
8230 {
8231 gphi *phi = gsi.phi ();
8232
8233 if (! virtual_operand_p (gimple_phi_result (phi)))
8234 find_func_aliases (func, phi);
8235 }
8236
8237 for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);
8238 gsi_next (&gsi))
8239 {
8240 gimple *stmt = gsi_stmt (gsi);
8241
8242 find_func_aliases (func, stmt);
8243 find_func_clobbers (func, stmt);
8244 }
8245 }
8246
8247 if (dump_file)
8248 {
8249 fprintf (dump_file, "\n");
8250 dump_constraints (dump_file, from);
8251 fprintf (dump_file, "\n");
8252 from = constraints.length ();
8253 }
8254 }
8255
8256 /* From the constraints compute the points-to sets. */
8257 solve_constraints ();
8258
8259 if (dump_file)
8260 dump_sa_points_to_info (dump_file);
8261
8262 /* Now post-process solutions to handle locals from different
8263 runtime instantiations coming in through recursive invocations. */
8264 unsigned shadow_var_cnt = 0;
8265 for (unsigned i = 1; i < varmap.length (); ++i)
8266 {
8267 varinfo_t fi = get_varinfo (i);
8268 if (fi->is_fn_info
8269 && fi->decl)
8270 /* Automatic variables pointed to by their containing functions
8271 parameters need this treatment. */
8272 for (varinfo_t ai = first_vi_for_offset (fi, fi_parm_base);
8273 ai; ai = vi_next (ai))
8274 {
8275 varinfo_t vi = get_varinfo (find (ai->id));
8276 bitmap_iterator bi;
8277 unsigned j;
8278 EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, j, bi)
8279 {
8280 varinfo_t pt = get_varinfo (j);
8281 if (pt->shadow_var_uid == 0
8282 && pt->decl
8283 && auto_var_in_fn_p (pt->decl, fi->decl))
8284 {
8285 pt->shadow_var_uid = allocate_decl_uid ();
8286 shadow_var_cnt++;
8287 }
8288 }
8289 }
8290 /* As well as global variables which are another way of passing
8291 arguments to recursive invocations. */
8292 else if (fi->is_global_var)
8293 {
8294 for (varinfo_t ai = fi; ai; ai = vi_next (ai))
8295 {
8296 varinfo_t vi = get_varinfo (find (ai->id));
8297 bitmap_iterator bi;
8298 unsigned j;
8299 EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, j, bi)
8300 {
8301 varinfo_t pt = get_varinfo (j);
8302 if (pt->shadow_var_uid == 0
8303 && pt->decl
8304 && auto_var_p (pt->decl))
8305 {
8306 pt->shadow_var_uid = allocate_decl_uid ();
8307 shadow_var_cnt++;
8308 }
8309 }
8310 }
8311 }
8312 }
8313 if (shadow_var_cnt && dump_file && (dump_flags & TDF_DETAILS))
8314 fprintf (dump_file, "Allocated %u shadow variables for locals "
8315 "maybe leaking into recursive invocations of their containing "
8316 "functions\n", shadow_var_cnt);
8317
8318 /* Compute the global points-to sets for ESCAPED.
8319 ??? Note that the computed escape set is not correct
8320 for the whole unit as we fail to consider graph edges to
8321 externally visible functions. */
8322 ipa_escaped_pt = find_what_var_points_to (NULL, get_varinfo (escaped_id));
8323
8324 /* Make sure the ESCAPED solution (which is used as placeholder in
8325 other solutions) does not reference itself. This simplifies
8326 points-to solution queries. */
8327 ipa_escaped_pt.ipa_escaped = 0;
8328
8329 /* Assign the points-to sets to the SSA names in the unit. */
8330 FOR_EACH_DEFINED_FUNCTION (node)
8331 {
8332 tree ptr;
8333 struct function *fn;
8334 unsigned i;
8335 basic_block bb;
8336
8337 /* Nodes without a body are not interesting. */
8338 if (!node->has_gimple_body_p () || node->clone_of)
8339 continue;
8340
8341 fn = DECL_STRUCT_FUNCTION (node->decl);
8342
8343 /* Compute the points-to sets for pointer SSA_NAMEs. */
8344 FOR_EACH_VEC_ELT (*fn->gimple_df->ssa_names, i, ptr)
8345 {
8346 if (ptr
8347 && POINTER_TYPE_P (TREE_TYPE (ptr)))
8348 find_what_p_points_to (node->decl, ptr);
8349 }
8350
8351 /* Compute the call-use and call-clobber sets for indirect calls
8352 and calls to external functions. */
8353 FOR_EACH_BB_FN (bb, fn)
8354 {
8355 gimple_stmt_iterator gsi;
8356
8357 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
8358 {
8359 gcall *stmt;
8360 struct pt_solution *pt;
8361 varinfo_t vi, fi;
8362 tree decl;
8363
8364 stmt = dyn_cast <gcall *> (gsi_stmt (gsi));
8365 if (!stmt)
8366 continue;
8367
8368 /* Handle direct calls to functions with body. */
8369 decl = gimple_call_fndecl (stmt);
8370
8371 {
8372 tree called_decl = NULL_TREE;
8373 if (gimple_call_builtin_p (stmt, BUILT_IN_GOMP_PARALLEL))
8374 called_decl = TREE_OPERAND (gimple_call_arg (stmt, 0), 0);
8375 else if (gimple_call_builtin_p (stmt, BUILT_IN_GOACC_PARALLEL))
8376 called_decl = TREE_OPERAND (gimple_call_arg (stmt, 1), 0);
8377
8378 if (called_decl != NULL_TREE
8379 && !fndecl_maybe_in_other_partition (called_decl))
8380 decl = called_decl;
8381 }
8382
8383 if (decl
8384 && (fi = lookup_vi_for_tree (decl))
8385 && fi->is_fn_info)
8386 {
8387 *gimple_call_clobber_set (stmt)
8388 = find_what_var_points_to
8389 (node->decl, first_vi_for_offset (fi, fi_clobbers));
8390 *gimple_call_use_set (stmt)
8391 = find_what_var_points_to
8392 (node->decl, first_vi_for_offset (fi, fi_uses));
8393 }
8394 /* Handle direct calls to external functions. */
8395 else if (decl && (!fi || fi->decl))
8396 {
8397 pt = gimple_call_use_set (stmt);
8398 if (gimple_call_flags (stmt) & ECF_CONST)
8399 memset (pt, 0, sizeof (struct pt_solution));
8400 else if ((vi = lookup_call_use_vi (stmt)) != NULL)
8401 {
8402 *pt = find_what_var_points_to (node->decl, vi);
8403 /* Escaped (and thus nonlocal) variables are always
8404 implicitly used by calls. */
8405 /* ??? ESCAPED can be empty even though NONLOCAL
8406 always escaped. */
8407 pt->nonlocal = 1;
8408 pt->ipa_escaped = 1;
8409 }
8410 else
8411 {
8412 /* If there is nothing special about this call then
8413 we have made everything that is used also escape. */
8414 *pt = ipa_escaped_pt;
8415 pt->nonlocal = 1;
8416 }
8417
8418 pt = gimple_call_clobber_set (stmt);
8419 if (gimple_call_flags (stmt) & (ECF_CONST|ECF_PURE|ECF_NOVOPS))
8420 memset (pt, 0, sizeof (struct pt_solution));
8421 else if ((vi = lookup_call_clobber_vi (stmt)) != NULL)
8422 {
8423 *pt = find_what_var_points_to (node->decl, vi);
8424 /* Escaped (and thus nonlocal) variables are always
8425 implicitly clobbered by calls. */
8426 /* ??? ESCAPED can be empty even though NONLOCAL
8427 always escaped. */
8428 pt->nonlocal = 1;
8429 pt->ipa_escaped = 1;
8430 }
8431 else
8432 {
8433 /* If there is nothing special about this call then
8434 we have made everything that is used also escape. */
8435 *pt = ipa_escaped_pt;
8436 pt->nonlocal = 1;
8437 }
8438 }
8439 /* Handle indirect calls. */
8440 else if ((fi = get_fi_for_callee (stmt)))
8441 {
8442 /* We need to accumulate all clobbers/uses of all possible
8443 callees. */
8444 fi = get_varinfo (find (fi->id));
8445 /* If we cannot constrain the set of functions we'll end up
8446 calling we end up using/clobbering everything. */
8447 if (bitmap_bit_p (fi->solution, anything_id)
8448 || bitmap_bit_p (fi->solution, nonlocal_id)
8449 || bitmap_bit_p (fi->solution, escaped_id))
8450 {
8451 pt_solution_reset (gimple_call_clobber_set (stmt));
8452 pt_solution_reset (gimple_call_use_set (stmt));
8453 }
8454 else
8455 {
8456 bitmap_iterator bi;
8457 unsigned i;
8458 struct pt_solution *uses, *clobbers;
8459
8460 uses = gimple_call_use_set (stmt);
8461 clobbers = gimple_call_clobber_set (stmt);
8462 memset (uses, 0, sizeof (struct pt_solution));
8463 memset (clobbers, 0, sizeof (struct pt_solution));
8464 EXECUTE_IF_SET_IN_BITMAP (fi->solution, 0, i, bi)
8465 {
8466 struct pt_solution sol;
8467
8468 vi = get_varinfo (i);
8469 if (!vi->is_fn_info)
8470 {
8471 /* ??? We could be more precise here? */
8472 uses->nonlocal = 1;
8473 uses->ipa_escaped = 1;
8474 clobbers->nonlocal = 1;
8475 clobbers->ipa_escaped = 1;
8476 continue;
8477 }
8478
8479 if (!uses->anything)
8480 {
8481 sol = find_what_var_points_to
8482 (node->decl,
8483 first_vi_for_offset (vi, fi_uses));
8484 pt_solution_ior_into (uses, &sol);
8485 }
8486 if (!clobbers->anything)
8487 {
8488 sol = find_what_var_points_to
8489 (node->decl,
8490 first_vi_for_offset (vi, fi_clobbers));
8491 pt_solution_ior_into (clobbers, &sol);
8492 }
8493 }
8494 }
8495 }
8496 else
8497 gcc_unreachable ();
8498 }
8499 }
8500
8501 fn->gimple_df->ipa_pta = true;
8502
8503 /* We have to re-set the final-solution cache after each function
8504 because what is a "global" is dependent on function context. */
8505 final_solutions->empty ();
8506 obstack_free (&final_solutions_obstack, NULL);
8507 gcc_obstack_init (&final_solutions_obstack);
8508 }
8509
8510 delete_points_to_sets ();
8511
8512 in_ipa_mode = 0;
8513
8514 return 0;
8515 }
8516
8517 namespace {
8518
8519 const pass_data pass_data_ipa_pta =
8520 {
8521 SIMPLE_IPA_PASS, /* type */
8522 "pta", /* name */
8523 OPTGROUP_NONE, /* optinfo_flags */
8524 TV_IPA_PTA, /* tv_id */
8525 0, /* properties_required */
8526 0, /* properties_provided */
8527 0, /* properties_destroyed */
8528 0, /* todo_flags_start */
8529 0, /* todo_flags_finish */
8530 };
8531
8532 class pass_ipa_pta : public simple_ipa_opt_pass
8533 {
8534 public:
8535 pass_ipa_pta (gcc::context *ctxt)
8536 : simple_ipa_opt_pass (pass_data_ipa_pta, ctxt)
8537 {}
8538
8539 /* opt_pass methods: */
8540 virtual bool gate (function *)
8541 {
8542 return (optimize
8543 && flag_ipa_pta
8544 /* Don't bother doing anything if the program has errors. */
8545 && !seen_error ());
8546 }
8547
8548 opt_pass * clone () { return new pass_ipa_pta (m_ctxt); }
8549
8550 virtual unsigned int execute (function *) { return ipa_pta_execute (); }
8551
8552 }; // class pass_ipa_pta
8553
8554 } // anon namespace
8555
8556 simple_ipa_opt_pass *
8557 make_pass_ipa_pta (gcc::context *ctxt)
8558 {
8559 return new pass_ipa_pta (ctxt);
8560 }