]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/tree-ssa-structalias.c
Update libbid according to the latest Intel Decimal Floating-Point Math Library.
[thirdparty/gcc.git] / gcc / tree-ssa-structalias.c
1 /* Tree based points-to analysis
2 Copyright (C) 2005-2019 Free Software Foundation, Inc.
3 Contributed by Daniel Berlin <dberlin@dberlin.org>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3 of the License, or
10 (at your option) any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "gimple.h"
28 #include "alloc-pool.h"
29 #include "tree-pass.h"
30 #include "ssa.h"
31 #include "cgraph.h"
32 #include "tree-pretty-print.h"
33 #include "diagnostic-core.h"
34 #include "fold-const.h"
35 #include "stor-layout.h"
36 #include "stmt.h"
37 #include "gimple-iterator.h"
38 #include "tree-into-ssa.h"
39 #include "tree-dfa.h"
40 #include "params.h"
41 #include "gimple-walk.h"
42 #include "varasm.h"
43 #include "stringpool.h"
44 #include "attribs.h"
45 #include "tree-ssa.h"
46 #include "tree-cfg.h"
47
48 /* The idea behind this analyzer is to generate set constraints from the
49 program, then solve the resulting constraints in order to generate the
50 points-to sets.
51
52 Set constraints are a way of modeling program analysis problems that
53 involve sets. They consist of an inclusion constraint language,
54 describing the variables (each variable is a set) and operations that
55 are involved on the variables, and a set of rules that derive facts
56 from these operations. To solve a system of set constraints, you derive
57 all possible facts under the rules, which gives you the correct sets
58 as a consequence.
59
60 See "Efficient Field-sensitive pointer analysis for C" by "David
61 J. Pearce and Paul H. J. Kelly and Chris Hankin, at
62 http://citeseer.ist.psu.edu/pearce04efficient.html
63
64 Also see "Ultra-fast Aliasing Analysis using CLA: A Million Lines
65 of C Code in a Second" by ""Nevin Heintze and Olivier Tardieu" at
66 http://citeseer.ist.psu.edu/heintze01ultrafast.html
67
68 There are three types of real constraint expressions, DEREF,
69 ADDRESSOF, and SCALAR. Each constraint expression consists
70 of a constraint type, a variable, and an offset.
71
72 SCALAR is a constraint expression type used to represent x, whether
73 it appears on the LHS or the RHS of a statement.
74 DEREF is a constraint expression type used to represent *x, whether
75 it appears on the LHS or the RHS of a statement.
76 ADDRESSOF is a constraint expression used to represent &x, whether
77 it appears on the LHS or the RHS of a statement.
78
79 Each pointer variable in the program is assigned an integer id, and
80 each field of a structure variable is assigned an integer id as well.
81
82 Structure variables are linked to their list of fields through a "next
83 field" in each variable that points to the next field in offset
84 order.
85 Each variable for a structure field has
86
87 1. "size", that tells the size in bits of that field.
88 2. "fullsize, that tells the size in bits of the entire structure.
89 3. "offset", that tells the offset in bits from the beginning of the
90 structure to this field.
91
92 Thus,
93 struct f
94 {
95 int a;
96 int b;
97 } foo;
98 int *bar;
99
100 looks like
101
102 foo.a -> id 1, size 32, offset 0, fullsize 64, next foo.b
103 foo.b -> id 2, size 32, offset 32, fullsize 64, next NULL
104 bar -> id 3, size 32, offset 0, fullsize 32, next NULL
105
106
107 In order to solve the system of set constraints, the following is
108 done:
109
110 1. Each constraint variable x has a solution set associated with it,
111 Sol(x).
112
113 2. Constraints are separated into direct, copy, and complex.
114 Direct constraints are ADDRESSOF constraints that require no extra
115 processing, such as P = &Q
116 Copy constraints are those of the form P = Q.
117 Complex constraints are all the constraints involving dereferences
118 and offsets (including offsetted copies).
119
120 3. All direct constraints of the form P = &Q are processed, such
121 that Q is added to Sol(P)
122
123 4. All complex constraints for a given constraint variable are stored in a
124 linked list attached to that variable's node.
125
126 5. A directed graph is built out of the copy constraints. Each
127 constraint variable is a node in the graph, and an edge from
128 Q to P is added for each copy constraint of the form P = Q
129
130 6. The graph is then walked, and solution sets are
131 propagated along the copy edges, such that an edge from Q to P
132 causes Sol(P) <- Sol(P) union Sol(Q).
133
134 7. As we visit each node, all complex constraints associated with
135 that node are processed by adding appropriate copy edges to the graph, or the
136 appropriate variables to the solution set.
137
138 8. The process of walking the graph is iterated until no solution
139 sets change.
140
141 Prior to walking the graph in steps 6 and 7, We perform static
142 cycle elimination on the constraint graph, as well
143 as off-line variable substitution.
144
145 TODO: Adding offsets to pointer-to-structures can be handled (IE not punted
146 on and turned into anything), but isn't. You can just see what offset
147 inside the pointed-to struct it's going to access.
148
149 TODO: Constant bounded arrays can be handled as if they were structs of the
150 same number of elements.
151
152 TODO: Modeling heap and incoming pointers becomes much better if we
153 add fields to them as we discover them, which we could do.
154
155 TODO: We could handle unions, but to be honest, it's probably not
156 worth the pain or slowdown. */
157
158 /* IPA-PTA optimizations possible.
159
160 When the indirect function called is ANYTHING we can add disambiguation
161 based on the function signatures (or simply the parameter count which
162 is the varinfo size). We also do not need to consider functions that
163 do not have their address taken.
164
165 The is_global_var bit which marks escape points is overly conservative
166 in IPA mode. Split it to is_escape_point and is_global_var - only
167 externally visible globals are escape points in IPA mode.
168 There is now is_ipa_escape_point but this is only used in a few
169 selected places.
170
171 The way we introduce DECL_PT_UID to avoid fixing up all points-to
172 sets in the translation unit when we copy a DECL during inlining
173 pessimizes precision. The advantage is that the DECL_PT_UID keeps
174 compile-time and memory usage overhead low - the points-to sets
175 do not grow or get unshared as they would during a fixup phase.
176 An alternative solution is to delay IPA PTA until after all
177 inlining transformations have been applied.
178
179 The way we propagate clobber/use information isn't optimized.
180 It should use a new complex constraint that properly filters
181 out local variables of the callee (though that would make
182 the sets invalid after inlining). OTOH we might as well
183 admit defeat to WHOPR and simply do all the clobber/use analysis
184 and propagation after PTA finished but before we threw away
185 points-to information for memory variables. WHOPR and PTA
186 do not play along well anyway - the whole constraint solving
187 would need to be done in WPA phase and it will be very interesting
188 to apply the results to local SSA names during LTRANS phase.
189
190 We probably should compute a per-function unit-ESCAPE solution
191 propagating it simply like the clobber / uses solutions. The
192 solution can go alongside the non-IPA espaced solution and be
193 used to query which vars escape the unit through a function.
194 This is also required to make the escaped-HEAP trick work in IPA mode.
195
196 We never put function decls in points-to sets so we do not
197 keep the set of called functions for indirect calls.
198
199 And probably more. */
200
201 static bool use_field_sensitive = true;
202 static int in_ipa_mode = 0;
203
204 /* Used for predecessor bitmaps. */
205 static bitmap_obstack predbitmap_obstack;
206
207 /* Used for points-to sets. */
208 static bitmap_obstack pta_obstack;
209
210 /* Used for oldsolution members of variables. */
211 static bitmap_obstack oldpta_obstack;
212
213 /* Used for per-solver-iteration bitmaps. */
214 static bitmap_obstack iteration_obstack;
215
216 static unsigned int create_variable_info_for (tree, const char *, bool);
217 typedef struct constraint_graph *constraint_graph_t;
218 static void unify_nodes (constraint_graph_t, unsigned int, unsigned int, bool);
219
220 struct constraint;
221 typedef struct constraint *constraint_t;
222
223
224 #define EXECUTE_IF_IN_NONNULL_BITMAP(a, b, c, d) \
225 if (a) \
226 EXECUTE_IF_SET_IN_BITMAP (a, b, c, d)
227
228 static struct constraint_stats
229 {
230 unsigned int total_vars;
231 unsigned int nonpointer_vars;
232 unsigned int unified_vars_static;
233 unsigned int unified_vars_dynamic;
234 unsigned int iterations;
235 unsigned int num_edges;
236 unsigned int num_implicit_edges;
237 unsigned int points_to_sets_created;
238 } stats;
239
240 struct variable_info
241 {
242 /* ID of this variable */
243 unsigned int id;
244
245 /* True if this is a variable created by the constraint analysis, such as
246 heap variables and constraints we had to break up. */
247 unsigned int is_artificial_var : 1;
248
249 /* True if this is a special variable whose solution set should not be
250 changed. */
251 unsigned int is_special_var : 1;
252
253 /* True for variables whose size is not known or variable. */
254 unsigned int is_unknown_size_var : 1;
255
256 /* True for (sub-)fields that represent a whole variable. */
257 unsigned int is_full_var : 1;
258
259 /* True if this is a heap variable. */
260 unsigned int is_heap_var : 1;
261
262 /* True if this is a register variable. */
263 unsigned int is_reg_var : 1;
264
265 /* True if this field may contain pointers. */
266 unsigned int may_have_pointers : 1;
267
268 /* True if this field has only restrict qualified pointers. */
269 unsigned int only_restrict_pointers : 1;
270
271 /* True if this represents a heap var created for a restrict qualified
272 pointer. */
273 unsigned int is_restrict_var : 1;
274
275 /* True if this represents a global variable. */
276 unsigned int is_global_var : 1;
277
278 /* True if this represents a module escape point for IPA analysis. */
279 unsigned int is_ipa_escape_point : 1;
280
281 /* True if this represents a IPA function info. */
282 unsigned int is_fn_info : 1;
283
284 /* ??? Store somewhere better. */
285 unsigned short ruid;
286
287 /* The ID of the variable for the next field in this structure
288 or zero for the last field in this structure. */
289 unsigned next;
290
291 /* The ID of the variable for the first field in this structure. */
292 unsigned head;
293
294 /* Offset of this variable, in bits, from the base variable */
295 unsigned HOST_WIDE_INT offset;
296
297 /* Size of the variable, in bits. */
298 unsigned HOST_WIDE_INT size;
299
300 /* Full size of the base variable, in bits. */
301 unsigned HOST_WIDE_INT fullsize;
302
303 /* In IPA mode the shadow UID in case the variable needs to be duplicated in
304 the final points-to solution because it reaches its containing
305 function recursively. Zero if none is needed. */
306 unsigned int shadow_var_uid;
307
308 /* Name of this variable */
309 const char *name;
310
311 /* Tree that this variable is associated with. */
312 tree decl;
313
314 /* Points-to set for this variable. */
315 bitmap solution;
316
317 /* Old points-to set for this variable. */
318 bitmap oldsolution;
319 };
320 typedef struct variable_info *varinfo_t;
321
322 static varinfo_t first_vi_for_offset (varinfo_t, unsigned HOST_WIDE_INT);
323 static varinfo_t first_or_preceding_vi_for_offset (varinfo_t,
324 unsigned HOST_WIDE_INT);
325 static varinfo_t lookup_vi_for_tree (tree);
326 static inline bool type_can_have_subvars (const_tree);
327 static void make_param_constraints (varinfo_t);
328
329 /* Pool of variable info structures. */
330 static object_allocator<variable_info> variable_info_pool
331 ("Variable info pool");
332
333 /* Map varinfo to final pt_solution. */
334 static hash_map<varinfo_t, pt_solution *> *final_solutions;
335 struct obstack final_solutions_obstack;
336
337 /* Table of variable info structures for constraint variables.
338 Indexed directly by variable info id. */
339 static vec<varinfo_t> varmap;
340
341 /* Return the varmap element N */
342
343 static inline varinfo_t
344 get_varinfo (unsigned int n)
345 {
346 return varmap[n];
347 }
348
349 /* Return the next variable in the list of sub-variables of VI
350 or NULL if VI is the last sub-variable. */
351
352 static inline varinfo_t
353 vi_next (varinfo_t vi)
354 {
355 return get_varinfo (vi->next);
356 }
357
358 /* Static IDs for the special variables. Variable ID zero is unused
359 and used as terminator for the sub-variable chain. */
360 enum { nothing_id = 1, anything_id = 2, string_id = 3,
361 escaped_id = 4, nonlocal_id = 5,
362 storedanything_id = 6, integer_id = 7 };
363
364 /* Return a new variable info structure consisting for a variable
365 named NAME, and using constraint graph node NODE. Append it
366 to the vector of variable info structures. */
367
368 static varinfo_t
369 new_var_info (tree t, const char *name, bool add_id)
370 {
371 unsigned index = varmap.length ();
372 varinfo_t ret = variable_info_pool.allocate ();
373
374 if (dump_file && add_id)
375 {
376 char *tempname = xasprintf ("%s(%d)", name, index);
377 name = ggc_strdup (tempname);
378 free (tempname);
379 }
380
381 ret->id = index;
382 ret->name = name;
383 ret->decl = t;
384 /* Vars without decl are artificial and do not have sub-variables. */
385 ret->is_artificial_var = (t == NULL_TREE);
386 ret->is_special_var = false;
387 ret->is_unknown_size_var = false;
388 ret->is_full_var = (t == NULL_TREE);
389 ret->is_heap_var = false;
390 ret->may_have_pointers = true;
391 ret->only_restrict_pointers = false;
392 ret->is_restrict_var = false;
393 ret->ruid = 0;
394 ret->is_global_var = (t == NULL_TREE);
395 ret->is_ipa_escape_point = false;
396 ret->is_fn_info = false;
397 if (t && DECL_P (t))
398 ret->is_global_var = (is_global_var (t)
399 /* We have to treat even local register variables
400 as escape points. */
401 || (VAR_P (t) && DECL_HARD_REGISTER (t)));
402 ret->is_reg_var = (t && TREE_CODE (t) == SSA_NAME);
403 ret->solution = BITMAP_ALLOC (&pta_obstack);
404 ret->oldsolution = NULL;
405 ret->next = 0;
406 ret->shadow_var_uid = 0;
407 ret->head = ret->id;
408
409 stats.total_vars++;
410
411 varmap.safe_push (ret);
412
413 return ret;
414 }
415
416 /* A map mapping call statements to per-stmt variables for uses
417 and clobbers specific to the call. */
418 static hash_map<gimple *, varinfo_t> *call_stmt_vars;
419
420 /* Lookup or create the variable for the call statement CALL. */
421
422 static varinfo_t
423 get_call_vi (gcall *call)
424 {
425 varinfo_t vi, vi2;
426
427 bool existed;
428 varinfo_t *slot_p = &call_stmt_vars->get_or_insert (call, &existed);
429 if (existed)
430 return *slot_p;
431
432 vi = new_var_info (NULL_TREE, "CALLUSED", true);
433 vi->offset = 0;
434 vi->size = 1;
435 vi->fullsize = 2;
436 vi->is_full_var = true;
437 vi->is_reg_var = true;
438
439 vi2 = new_var_info (NULL_TREE, "CALLCLOBBERED", true);
440 vi2->offset = 1;
441 vi2->size = 1;
442 vi2->fullsize = 2;
443 vi2->is_full_var = true;
444 vi2->is_reg_var = true;
445
446 vi->next = vi2->id;
447
448 *slot_p = vi;
449 return vi;
450 }
451
452 /* Lookup the variable for the call statement CALL representing
453 the uses. Returns NULL if there is nothing special about this call. */
454
455 static varinfo_t
456 lookup_call_use_vi (gcall *call)
457 {
458 varinfo_t *slot_p = call_stmt_vars->get (call);
459 if (slot_p)
460 return *slot_p;
461
462 return NULL;
463 }
464
465 /* Lookup the variable for the call statement CALL representing
466 the clobbers. Returns NULL if there is nothing special about this call. */
467
468 static varinfo_t
469 lookup_call_clobber_vi (gcall *call)
470 {
471 varinfo_t uses = lookup_call_use_vi (call);
472 if (!uses)
473 return NULL;
474
475 return vi_next (uses);
476 }
477
478 /* Lookup or create the variable for the call statement CALL representing
479 the uses. */
480
481 static varinfo_t
482 get_call_use_vi (gcall *call)
483 {
484 return get_call_vi (call);
485 }
486
487 /* Lookup or create the variable for the call statement CALL representing
488 the clobbers. */
489
490 static varinfo_t ATTRIBUTE_UNUSED
491 get_call_clobber_vi (gcall *call)
492 {
493 return vi_next (get_call_vi (call));
494 }
495
496
497 enum constraint_expr_type {SCALAR, DEREF, ADDRESSOF};
498
499 /* An expression that appears in a constraint. */
500
501 struct constraint_expr
502 {
503 /* Constraint type. */
504 constraint_expr_type type;
505
506 /* Variable we are referring to in the constraint. */
507 unsigned int var;
508
509 /* Offset, in bits, of this constraint from the beginning of
510 variables it ends up referring to.
511
512 IOW, in a deref constraint, we would deref, get the result set,
513 then add OFFSET to each member. */
514 HOST_WIDE_INT offset;
515 };
516
517 /* Use 0x8000... as special unknown offset. */
518 #define UNKNOWN_OFFSET HOST_WIDE_INT_MIN
519
520 typedef struct constraint_expr ce_s;
521 static void get_constraint_for_1 (tree, vec<ce_s> *, bool, bool);
522 static void get_constraint_for (tree, vec<ce_s> *);
523 static void get_constraint_for_rhs (tree, vec<ce_s> *);
524 static void do_deref (vec<ce_s> *);
525
526 /* Our set constraints are made up of two constraint expressions, one
527 LHS, and one RHS.
528
529 As described in the introduction, our set constraints each represent an
530 operation between set valued variables.
531 */
532 struct constraint
533 {
534 struct constraint_expr lhs;
535 struct constraint_expr rhs;
536 };
537
538 /* List of constraints that we use to build the constraint graph from. */
539
540 static vec<constraint_t> constraints;
541 static object_allocator<constraint> constraint_pool ("Constraint pool");
542
543 /* The constraint graph is represented as an array of bitmaps
544 containing successor nodes. */
545
546 struct constraint_graph
547 {
548 /* Size of this graph, which may be different than the number of
549 nodes in the variable map. */
550 unsigned int size;
551
552 /* Explicit successors of each node. */
553 bitmap *succs;
554
555 /* Implicit predecessors of each node (Used for variable
556 substitution). */
557 bitmap *implicit_preds;
558
559 /* Explicit predecessors of each node (Used for variable substitution). */
560 bitmap *preds;
561
562 /* Indirect cycle representatives, or -1 if the node has no indirect
563 cycles. */
564 int *indirect_cycles;
565
566 /* Representative node for a node. rep[a] == a unless the node has
567 been unified. */
568 unsigned int *rep;
569
570 /* Equivalence class representative for a label. This is used for
571 variable substitution. */
572 int *eq_rep;
573
574 /* Pointer equivalence label for a node. All nodes with the same
575 pointer equivalence label can be unified together at some point
576 (either during constraint optimization or after the constraint
577 graph is built). */
578 unsigned int *pe;
579
580 /* Pointer equivalence representative for a label. This is used to
581 handle nodes that are pointer equivalent but not location
582 equivalent. We can unite these once the addressof constraints
583 are transformed into initial points-to sets. */
584 int *pe_rep;
585
586 /* Pointer equivalence label for each node, used during variable
587 substitution. */
588 unsigned int *pointer_label;
589
590 /* Location equivalence label for each node, used during location
591 equivalence finding. */
592 unsigned int *loc_label;
593
594 /* Pointed-by set for each node, used during location equivalence
595 finding. This is pointed-by rather than pointed-to, because it
596 is constructed using the predecessor graph. */
597 bitmap *pointed_by;
598
599 /* Points to sets for pointer equivalence. This is *not* the actual
600 points-to sets for nodes. */
601 bitmap *points_to;
602
603 /* Bitmap of nodes where the bit is set if the node is a direct
604 node. Used for variable substitution. */
605 sbitmap direct_nodes;
606
607 /* Bitmap of nodes where the bit is set if the node is address
608 taken. Used for variable substitution. */
609 bitmap address_taken;
610
611 /* Vector of complex constraints for each graph node. Complex
612 constraints are those involving dereferences or offsets that are
613 not 0. */
614 vec<constraint_t> *complex;
615 };
616
617 static constraint_graph_t graph;
618
619 /* During variable substitution and the offline version of indirect
620 cycle finding, we create nodes to represent dereferences and
621 address taken constraints. These represent where these start and
622 end. */
623 #define FIRST_REF_NODE (varmap).length ()
624 #define LAST_REF_NODE (FIRST_REF_NODE + (FIRST_REF_NODE - 1))
625
626 /* Return the representative node for NODE, if NODE has been unioned
627 with another NODE.
628 This function performs path compression along the way to finding
629 the representative. */
630
631 static unsigned int
632 find (unsigned int node)
633 {
634 gcc_checking_assert (node < graph->size);
635 if (graph->rep[node] != node)
636 return graph->rep[node] = find (graph->rep[node]);
637 return node;
638 }
639
640 /* Union the TO and FROM nodes to the TO nodes.
641 Note that at some point in the future, we may want to do
642 union-by-rank, in which case we are going to have to return the
643 node we unified to. */
644
645 static bool
646 unite (unsigned int to, unsigned int from)
647 {
648 gcc_checking_assert (to < graph->size && from < graph->size);
649 if (to != from && graph->rep[from] != to)
650 {
651 graph->rep[from] = to;
652 return true;
653 }
654 return false;
655 }
656
657 /* Create a new constraint consisting of LHS and RHS expressions. */
658
659 static constraint_t
660 new_constraint (const struct constraint_expr lhs,
661 const struct constraint_expr rhs)
662 {
663 constraint_t ret = constraint_pool.allocate ();
664 ret->lhs = lhs;
665 ret->rhs = rhs;
666 return ret;
667 }
668
669 /* Print out constraint C to FILE. */
670
671 static void
672 dump_constraint (FILE *file, constraint_t c)
673 {
674 if (c->lhs.type == ADDRESSOF)
675 fprintf (file, "&");
676 else if (c->lhs.type == DEREF)
677 fprintf (file, "*");
678 fprintf (file, "%s", get_varinfo (c->lhs.var)->name);
679 if (c->lhs.offset == UNKNOWN_OFFSET)
680 fprintf (file, " + UNKNOWN");
681 else if (c->lhs.offset != 0)
682 fprintf (file, " + " HOST_WIDE_INT_PRINT_DEC, c->lhs.offset);
683 fprintf (file, " = ");
684 if (c->rhs.type == ADDRESSOF)
685 fprintf (file, "&");
686 else if (c->rhs.type == DEREF)
687 fprintf (file, "*");
688 fprintf (file, "%s", get_varinfo (c->rhs.var)->name);
689 if (c->rhs.offset == UNKNOWN_OFFSET)
690 fprintf (file, " + UNKNOWN");
691 else if (c->rhs.offset != 0)
692 fprintf (file, " + " HOST_WIDE_INT_PRINT_DEC, c->rhs.offset);
693 }
694
695
696 void debug_constraint (constraint_t);
697 void debug_constraints (void);
698 void debug_constraint_graph (void);
699 void debug_solution_for_var (unsigned int);
700 void debug_sa_points_to_info (void);
701 void debug_varinfo (varinfo_t);
702 void debug_varmap (void);
703
704 /* Print out constraint C to stderr. */
705
706 DEBUG_FUNCTION void
707 debug_constraint (constraint_t c)
708 {
709 dump_constraint (stderr, c);
710 fprintf (stderr, "\n");
711 }
712
713 /* Print out all constraints to FILE */
714
715 static void
716 dump_constraints (FILE *file, int from)
717 {
718 int i;
719 constraint_t c;
720 for (i = from; constraints.iterate (i, &c); i++)
721 if (c)
722 {
723 dump_constraint (file, c);
724 fprintf (file, "\n");
725 }
726 }
727
728 /* Print out all constraints to stderr. */
729
730 DEBUG_FUNCTION void
731 debug_constraints (void)
732 {
733 dump_constraints (stderr, 0);
734 }
735
736 /* Print the constraint graph in dot format. */
737
738 static void
739 dump_constraint_graph (FILE *file)
740 {
741 unsigned int i;
742
743 /* Only print the graph if it has already been initialized: */
744 if (!graph)
745 return;
746
747 /* Prints the header of the dot file: */
748 fprintf (file, "strict digraph {\n");
749 fprintf (file, " node [\n shape = box\n ]\n");
750 fprintf (file, " edge [\n fontsize = \"12\"\n ]\n");
751 fprintf (file, "\n // List of nodes and complex constraints in "
752 "the constraint graph:\n");
753
754 /* The next lines print the nodes in the graph together with the
755 complex constraints attached to them. */
756 for (i = 1; i < graph->size; i++)
757 {
758 if (i == FIRST_REF_NODE)
759 continue;
760 if (find (i) != i)
761 continue;
762 if (i < FIRST_REF_NODE)
763 fprintf (file, "\"%s\"", get_varinfo (i)->name);
764 else
765 fprintf (file, "\"*%s\"", get_varinfo (i - FIRST_REF_NODE)->name);
766 if (graph->complex[i].exists ())
767 {
768 unsigned j;
769 constraint_t c;
770 fprintf (file, " [label=\"\\N\\n");
771 for (j = 0; graph->complex[i].iterate (j, &c); ++j)
772 {
773 dump_constraint (file, c);
774 fprintf (file, "\\l");
775 }
776 fprintf (file, "\"]");
777 }
778 fprintf (file, ";\n");
779 }
780
781 /* Go over the edges. */
782 fprintf (file, "\n // Edges in the constraint graph:\n");
783 for (i = 1; i < graph->size; i++)
784 {
785 unsigned j;
786 bitmap_iterator bi;
787 if (find (i) != i)
788 continue;
789 EXECUTE_IF_IN_NONNULL_BITMAP (graph->succs[i], 0, j, bi)
790 {
791 unsigned to = find (j);
792 if (i == to)
793 continue;
794 if (i < FIRST_REF_NODE)
795 fprintf (file, "\"%s\"", get_varinfo (i)->name);
796 else
797 fprintf (file, "\"*%s\"", get_varinfo (i - FIRST_REF_NODE)->name);
798 fprintf (file, " -> ");
799 if (to < FIRST_REF_NODE)
800 fprintf (file, "\"%s\"", get_varinfo (to)->name);
801 else
802 fprintf (file, "\"*%s\"", get_varinfo (to - FIRST_REF_NODE)->name);
803 fprintf (file, ";\n");
804 }
805 }
806
807 /* Prints the tail of the dot file. */
808 fprintf (file, "}\n");
809 }
810
811 /* Print out the constraint graph to stderr. */
812
813 DEBUG_FUNCTION void
814 debug_constraint_graph (void)
815 {
816 dump_constraint_graph (stderr);
817 }
818
819 /* SOLVER FUNCTIONS
820
821 The solver is a simple worklist solver, that works on the following
822 algorithm:
823
824 sbitmap changed_nodes = all zeroes;
825 changed_count = 0;
826 For each node that is not already collapsed:
827 changed_count++;
828 set bit in changed nodes
829
830 while (changed_count > 0)
831 {
832 compute topological ordering for constraint graph
833
834 find and collapse cycles in the constraint graph (updating
835 changed if necessary)
836
837 for each node (n) in the graph in topological order:
838 changed_count--;
839
840 Process each complex constraint associated with the node,
841 updating changed if necessary.
842
843 For each outgoing edge from n, propagate the solution from n to
844 the destination of the edge, updating changed as necessary.
845
846 } */
847
848 /* Return true if two constraint expressions A and B are equal. */
849
850 static bool
851 constraint_expr_equal (struct constraint_expr a, struct constraint_expr b)
852 {
853 return a.type == b.type && a.var == b.var && a.offset == b.offset;
854 }
855
856 /* Return true if constraint expression A is less than constraint expression
857 B. This is just arbitrary, but consistent, in order to give them an
858 ordering. */
859
860 static bool
861 constraint_expr_less (struct constraint_expr a, struct constraint_expr b)
862 {
863 if (a.type == b.type)
864 {
865 if (a.var == b.var)
866 return a.offset < b.offset;
867 else
868 return a.var < b.var;
869 }
870 else
871 return a.type < b.type;
872 }
873
874 /* Return true if constraint A is less than constraint B. This is just
875 arbitrary, but consistent, in order to give them an ordering. */
876
877 static bool
878 constraint_less (const constraint_t &a, const constraint_t &b)
879 {
880 if (constraint_expr_less (a->lhs, b->lhs))
881 return true;
882 else if (constraint_expr_less (b->lhs, a->lhs))
883 return false;
884 else
885 return constraint_expr_less (a->rhs, b->rhs);
886 }
887
888 /* Return true if two constraints A and B are equal. */
889
890 static bool
891 constraint_equal (struct constraint a, struct constraint b)
892 {
893 return constraint_expr_equal (a.lhs, b.lhs)
894 && constraint_expr_equal (a.rhs, b.rhs);
895 }
896
897
898 /* Find a constraint LOOKFOR in the sorted constraint vector VEC */
899
900 static constraint_t
901 constraint_vec_find (vec<constraint_t> vec,
902 struct constraint lookfor)
903 {
904 unsigned int place;
905 constraint_t found;
906
907 if (!vec.exists ())
908 return NULL;
909
910 place = vec.lower_bound (&lookfor, constraint_less);
911 if (place >= vec.length ())
912 return NULL;
913 found = vec[place];
914 if (!constraint_equal (*found, lookfor))
915 return NULL;
916 return found;
917 }
918
919 /* Union two constraint vectors, TO and FROM. Put the result in TO.
920 Returns true of TO set is changed. */
921
922 static bool
923 constraint_set_union (vec<constraint_t> *to,
924 vec<constraint_t> *from)
925 {
926 int i;
927 constraint_t c;
928 bool any_change = false;
929
930 FOR_EACH_VEC_ELT (*from, i, c)
931 {
932 if (constraint_vec_find (*to, *c) == NULL)
933 {
934 unsigned int place = to->lower_bound (c, constraint_less);
935 to->safe_insert (place, c);
936 any_change = true;
937 }
938 }
939 return any_change;
940 }
941
942 /* Expands the solution in SET to all sub-fields of variables included. */
943
944 static bitmap
945 solution_set_expand (bitmap set, bitmap *expanded)
946 {
947 bitmap_iterator bi;
948 unsigned j;
949
950 if (*expanded)
951 return *expanded;
952
953 *expanded = BITMAP_ALLOC (&iteration_obstack);
954
955 /* In a first pass expand to the head of the variables we need to
956 add all sub-fields off. This avoids quadratic behavior. */
957 EXECUTE_IF_SET_IN_BITMAP (set, 0, j, bi)
958 {
959 varinfo_t v = get_varinfo (j);
960 if (v->is_artificial_var
961 || v->is_full_var)
962 continue;
963 bitmap_set_bit (*expanded, v->head);
964 }
965
966 /* In the second pass now expand all head variables with subfields. */
967 EXECUTE_IF_SET_IN_BITMAP (*expanded, 0, j, bi)
968 {
969 varinfo_t v = get_varinfo (j);
970 if (v->head != j)
971 continue;
972 for (v = vi_next (v); v != NULL; v = vi_next (v))
973 bitmap_set_bit (*expanded, v->id);
974 }
975
976 /* And finally set the rest of the bits from SET. */
977 bitmap_ior_into (*expanded, set);
978
979 return *expanded;
980 }
981
982 /* Union solution sets TO and DELTA, and add INC to each member of DELTA in the
983 process. */
984
985 static bool
986 set_union_with_increment (bitmap to, bitmap delta, HOST_WIDE_INT inc,
987 bitmap *expanded_delta)
988 {
989 bool changed = false;
990 bitmap_iterator bi;
991 unsigned int i;
992
993 /* If the solution of DELTA contains anything it is good enough to transfer
994 this to TO. */
995 if (bitmap_bit_p (delta, anything_id))
996 return bitmap_set_bit (to, anything_id);
997
998 /* If the offset is unknown we have to expand the solution to
999 all subfields. */
1000 if (inc == UNKNOWN_OFFSET)
1001 {
1002 delta = solution_set_expand (delta, expanded_delta);
1003 changed |= bitmap_ior_into (to, delta);
1004 return changed;
1005 }
1006
1007 /* For non-zero offset union the offsetted solution into the destination. */
1008 EXECUTE_IF_SET_IN_BITMAP (delta, 0, i, bi)
1009 {
1010 varinfo_t vi = get_varinfo (i);
1011
1012 /* If this is a variable with just one field just set its bit
1013 in the result. */
1014 if (vi->is_artificial_var
1015 || vi->is_unknown_size_var
1016 || vi->is_full_var)
1017 changed |= bitmap_set_bit (to, i);
1018 else
1019 {
1020 HOST_WIDE_INT fieldoffset = vi->offset + inc;
1021 unsigned HOST_WIDE_INT size = vi->size;
1022
1023 /* If the offset makes the pointer point to before the
1024 variable use offset zero for the field lookup. */
1025 if (fieldoffset < 0)
1026 vi = get_varinfo (vi->head);
1027 else
1028 vi = first_or_preceding_vi_for_offset (vi, fieldoffset);
1029
1030 do
1031 {
1032 changed |= bitmap_set_bit (to, vi->id);
1033 if (vi->is_full_var
1034 || vi->next == 0)
1035 break;
1036
1037 /* We have to include all fields that overlap the current field
1038 shifted by inc. */
1039 vi = vi_next (vi);
1040 }
1041 while (vi->offset < fieldoffset + size);
1042 }
1043 }
1044
1045 return changed;
1046 }
1047
1048 /* Insert constraint C into the list of complex constraints for graph
1049 node VAR. */
1050
1051 static void
1052 insert_into_complex (constraint_graph_t graph,
1053 unsigned int var, constraint_t c)
1054 {
1055 vec<constraint_t> complex = graph->complex[var];
1056 unsigned int place = complex.lower_bound (c, constraint_less);
1057
1058 /* Only insert constraints that do not already exist. */
1059 if (place >= complex.length ()
1060 || !constraint_equal (*c, *complex[place]))
1061 graph->complex[var].safe_insert (place, c);
1062 }
1063
1064
1065 /* Condense two variable nodes into a single variable node, by moving
1066 all associated info from FROM to TO. Returns true if TO node's
1067 constraint set changes after the merge. */
1068
1069 static bool
1070 merge_node_constraints (constraint_graph_t graph, unsigned int to,
1071 unsigned int from)
1072 {
1073 unsigned int i;
1074 constraint_t c;
1075 bool any_change = false;
1076
1077 gcc_checking_assert (find (from) == to);
1078
1079 /* Move all complex constraints from src node into to node */
1080 FOR_EACH_VEC_ELT (graph->complex[from], i, c)
1081 {
1082 /* In complex constraints for node FROM, we may have either
1083 a = *FROM, and *FROM = a, or an offseted constraint which are
1084 always added to the rhs node's constraints. */
1085
1086 if (c->rhs.type == DEREF)
1087 c->rhs.var = to;
1088 else if (c->lhs.type == DEREF)
1089 c->lhs.var = to;
1090 else
1091 c->rhs.var = to;
1092
1093 }
1094 any_change = constraint_set_union (&graph->complex[to],
1095 &graph->complex[from]);
1096 graph->complex[from].release ();
1097 return any_change;
1098 }
1099
1100
1101 /* Remove edges involving NODE from GRAPH. */
1102
1103 static void
1104 clear_edges_for_node (constraint_graph_t graph, unsigned int node)
1105 {
1106 if (graph->succs[node])
1107 BITMAP_FREE (graph->succs[node]);
1108 }
1109
1110 /* Merge GRAPH nodes FROM and TO into node TO. */
1111
1112 static void
1113 merge_graph_nodes (constraint_graph_t graph, unsigned int to,
1114 unsigned int from)
1115 {
1116 if (graph->indirect_cycles[from] != -1)
1117 {
1118 /* If we have indirect cycles with the from node, and we have
1119 none on the to node, the to node has indirect cycles from the
1120 from node now that they are unified.
1121 If indirect cycles exist on both, unify the nodes that they
1122 are in a cycle with, since we know they are in a cycle with
1123 each other. */
1124 if (graph->indirect_cycles[to] == -1)
1125 graph->indirect_cycles[to] = graph->indirect_cycles[from];
1126 }
1127
1128 /* Merge all the successor edges. */
1129 if (graph->succs[from])
1130 {
1131 if (!graph->succs[to])
1132 graph->succs[to] = BITMAP_ALLOC (&pta_obstack);
1133 bitmap_ior_into (graph->succs[to],
1134 graph->succs[from]);
1135 }
1136
1137 clear_edges_for_node (graph, from);
1138 }
1139
1140
1141 /* Add an indirect graph edge to GRAPH, going from TO to FROM if
1142 it doesn't exist in the graph already. */
1143
1144 static void
1145 add_implicit_graph_edge (constraint_graph_t graph, unsigned int to,
1146 unsigned int from)
1147 {
1148 if (to == from)
1149 return;
1150
1151 if (!graph->implicit_preds[to])
1152 graph->implicit_preds[to] = BITMAP_ALLOC (&predbitmap_obstack);
1153
1154 if (bitmap_set_bit (graph->implicit_preds[to], from))
1155 stats.num_implicit_edges++;
1156 }
1157
1158 /* Add a predecessor graph edge to GRAPH, going from TO to FROM if
1159 it doesn't exist in the graph already.
1160 Return false if the edge already existed, true otherwise. */
1161
1162 static void
1163 add_pred_graph_edge (constraint_graph_t graph, unsigned int to,
1164 unsigned int from)
1165 {
1166 if (!graph->preds[to])
1167 graph->preds[to] = BITMAP_ALLOC (&predbitmap_obstack);
1168 bitmap_set_bit (graph->preds[to], from);
1169 }
1170
1171 /* Add a graph edge to GRAPH, going from FROM to TO if
1172 it doesn't exist in the graph already.
1173 Return false if the edge already existed, true otherwise. */
1174
1175 static bool
1176 add_graph_edge (constraint_graph_t graph, unsigned int to,
1177 unsigned int from)
1178 {
1179 if (to == from)
1180 {
1181 return false;
1182 }
1183 else
1184 {
1185 bool r = false;
1186
1187 if (!graph->succs[from])
1188 graph->succs[from] = BITMAP_ALLOC (&pta_obstack);
1189 if (bitmap_set_bit (graph->succs[from], to))
1190 {
1191 r = true;
1192 if (to < FIRST_REF_NODE && from < FIRST_REF_NODE)
1193 stats.num_edges++;
1194 }
1195 return r;
1196 }
1197 }
1198
1199
1200 /* Initialize the constraint graph structure to contain SIZE nodes. */
1201
1202 static void
1203 init_graph (unsigned int size)
1204 {
1205 unsigned int j;
1206
1207 graph = XCNEW (struct constraint_graph);
1208 graph->size = size;
1209 graph->succs = XCNEWVEC (bitmap, graph->size);
1210 graph->indirect_cycles = XNEWVEC (int, graph->size);
1211 graph->rep = XNEWVEC (unsigned int, graph->size);
1212 /* ??? Macros do not support template types with multiple arguments,
1213 so we use a typedef to work around it. */
1214 typedef vec<constraint_t> vec_constraint_t_heap;
1215 graph->complex = XCNEWVEC (vec_constraint_t_heap, size);
1216 graph->pe = XCNEWVEC (unsigned int, graph->size);
1217 graph->pe_rep = XNEWVEC (int, graph->size);
1218
1219 for (j = 0; j < graph->size; j++)
1220 {
1221 graph->rep[j] = j;
1222 graph->pe_rep[j] = -1;
1223 graph->indirect_cycles[j] = -1;
1224 }
1225 }
1226
1227 /* Build the constraint graph, adding only predecessor edges right now. */
1228
1229 static void
1230 build_pred_graph (void)
1231 {
1232 int i;
1233 constraint_t c;
1234 unsigned int j;
1235
1236 graph->implicit_preds = XCNEWVEC (bitmap, graph->size);
1237 graph->preds = XCNEWVEC (bitmap, graph->size);
1238 graph->pointer_label = XCNEWVEC (unsigned int, graph->size);
1239 graph->loc_label = XCNEWVEC (unsigned int, graph->size);
1240 graph->pointed_by = XCNEWVEC (bitmap, graph->size);
1241 graph->points_to = XCNEWVEC (bitmap, graph->size);
1242 graph->eq_rep = XNEWVEC (int, graph->size);
1243 graph->direct_nodes = sbitmap_alloc (graph->size);
1244 graph->address_taken = BITMAP_ALLOC (&predbitmap_obstack);
1245 bitmap_clear (graph->direct_nodes);
1246
1247 for (j = 1; j < FIRST_REF_NODE; j++)
1248 {
1249 if (!get_varinfo (j)->is_special_var)
1250 bitmap_set_bit (graph->direct_nodes, j);
1251 }
1252
1253 for (j = 0; j < graph->size; j++)
1254 graph->eq_rep[j] = -1;
1255
1256 for (j = 0; j < varmap.length (); j++)
1257 graph->indirect_cycles[j] = -1;
1258
1259 FOR_EACH_VEC_ELT (constraints, i, c)
1260 {
1261 struct constraint_expr lhs = c->lhs;
1262 struct constraint_expr rhs = c->rhs;
1263 unsigned int lhsvar = lhs.var;
1264 unsigned int rhsvar = rhs.var;
1265
1266 if (lhs.type == DEREF)
1267 {
1268 /* *x = y. */
1269 if (rhs.offset == 0 && lhs.offset == 0 && rhs.type == SCALAR)
1270 add_pred_graph_edge (graph, FIRST_REF_NODE + lhsvar, rhsvar);
1271 }
1272 else if (rhs.type == DEREF)
1273 {
1274 /* x = *y */
1275 if (rhs.offset == 0 && lhs.offset == 0 && lhs.type == SCALAR)
1276 add_pred_graph_edge (graph, lhsvar, FIRST_REF_NODE + rhsvar);
1277 else
1278 bitmap_clear_bit (graph->direct_nodes, lhsvar);
1279 }
1280 else if (rhs.type == ADDRESSOF)
1281 {
1282 varinfo_t v;
1283
1284 /* x = &y */
1285 if (graph->points_to[lhsvar] == NULL)
1286 graph->points_to[lhsvar] = BITMAP_ALLOC (&predbitmap_obstack);
1287 bitmap_set_bit (graph->points_to[lhsvar], rhsvar);
1288
1289 if (graph->pointed_by[rhsvar] == NULL)
1290 graph->pointed_by[rhsvar] = BITMAP_ALLOC (&predbitmap_obstack);
1291 bitmap_set_bit (graph->pointed_by[rhsvar], lhsvar);
1292
1293 /* Implicitly, *x = y */
1294 add_implicit_graph_edge (graph, FIRST_REF_NODE + lhsvar, rhsvar);
1295
1296 /* All related variables are no longer direct nodes. */
1297 bitmap_clear_bit (graph->direct_nodes, rhsvar);
1298 v = get_varinfo (rhsvar);
1299 if (!v->is_full_var)
1300 {
1301 v = get_varinfo (v->head);
1302 do
1303 {
1304 bitmap_clear_bit (graph->direct_nodes, v->id);
1305 v = vi_next (v);
1306 }
1307 while (v != NULL);
1308 }
1309 bitmap_set_bit (graph->address_taken, rhsvar);
1310 }
1311 else if (lhsvar > anything_id
1312 && lhsvar != rhsvar && lhs.offset == 0 && rhs.offset == 0)
1313 {
1314 /* x = y */
1315 add_pred_graph_edge (graph, lhsvar, rhsvar);
1316 /* Implicitly, *x = *y */
1317 add_implicit_graph_edge (graph, FIRST_REF_NODE + lhsvar,
1318 FIRST_REF_NODE + rhsvar);
1319 }
1320 else if (lhs.offset != 0 || rhs.offset != 0)
1321 {
1322 if (rhs.offset != 0)
1323 bitmap_clear_bit (graph->direct_nodes, lhs.var);
1324 else if (lhs.offset != 0)
1325 bitmap_clear_bit (graph->direct_nodes, rhs.var);
1326 }
1327 }
1328 }
1329
1330 /* Build the constraint graph, adding successor edges. */
1331
1332 static void
1333 build_succ_graph (void)
1334 {
1335 unsigned i, t;
1336 constraint_t c;
1337
1338 FOR_EACH_VEC_ELT (constraints, i, c)
1339 {
1340 struct constraint_expr lhs;
1341 struct constraint_expr rhs;
1342 unsigned int lhsvar;
1343 unsigned int rhsvar;
1344
1345 if (!c)
1346 continue;
1347
1348 lhs = c->lhs;
1349 rhs = c->rhs;
1350 lhsvar = find (lhs.var);
1351 rhsvar = find (rhs.var);
1352
1353 if (lhs.type == DEREF)
1354 {
1355 if (rhs.offset == 0 && lhs.offset == 0 && rhs.type == SCALAR)
1356 add_graph_edge (graph, FIRST_REF_NODE + lhsvar, rhsvar);
1357 }
1358 else if (rhs.type == DEREF)
1359 {
1360 if (rhs.offset == 0 && lhs.offset == 0 && lhs.type == SCALAR)
1361 add_graph_edge (graph, lhsvar, FIRST_REF_NODE + rhsvar);
1362 }
1363 else if (rhs.type == ADDRESSOF)
1364 {
1365 /* x = &y */
1366 gcc_checking_assert (find (rhs.var) == rhs.var);
1367 bitmap_set_bit (get_varinfo (lhsvar)->solution, rhsvar);
1368 }
1369 else if (lhsvar > anything_id
1370 && lhsvar != rhsvar && lhs.offset == 0 && rhs.offset == 0)
1371 {
1372 add_graph_edge (graph, lhsvar, rhsvar);
1373 }
1374 }
1375
1376 /* Add edges from STOREDANYTHING to all non-direct nodes that can
1377 receive pointers. */
1378 t = find (storedanything_id);
1379 for (i = integer_id + 1; i < FIRST_REF_NODE; ++i)
1380 {
1381 if (!bitmap_bit_p (graph->direct_nodes, i)
1382 && get_varinfo (i)->may_have_pointers)
1383 add_graph_edge (graph, find (i), t);
1384 }
1385
1386 /* Everything stored to ANYTHING also potentially escapes. */
1387 add_graph_edge (graph, find (escaped_id), t);
1388 }
1389
1390
1391 /* Changed variables on the last iteration. */
1392 static bitmap changed;
1393
1394 /* Strongly Connected Component visitation info. */
1395
1396 struct scc_info
1397 {
1398 scc_info (size_t size);
1399 ~scc_info ();
1400
1401 auto_sbitmap visited;
1402 auto_sbitmap deleted;
1403 unsigned int *dfs;
1404 unsigned int *node_mapping;
1405 int current_index;
1406 auto_vec<unsigned> scc_stack;
1407 };
1408
1409
1410 /* Recursive routine to find strongly connected components in GRAPH.
1411 SI is the SCC info to store the information in, and N is the id of current
1412 graph node we are processing.
1413
1414 This is Tarjan's strongly connected component finding algorithm, as
1415 modified by Nuutila to keep only non-root nodes on the stack.
1416 The algorithm can be found in "On finding the strongly connected
1417 connected components in a directed graph" by Esko Nuutila and Eljas
1418 Soisalon-Soininen, in Information Processing Letters volume 49,
1419 number 1, pages 9-14. */
1420
1421 static void
1422 scc_visit (constraint_graph_t graph, struct scc_info *si, unsigned int n)
1423 {
1424 unsigned int i;
1425 bitmap_iterator bi;
1426 unsigned int my_dfs;
1427
1428 bitmap_set_bit (si->visited, n);
1429 si->dfs[n] = si->current_index ++;
1430 my_dfs = si->dfs[n];
1431
1432 /* Visit all the successors. */
1433 EXECUTE_IF_IN_NONNULL_BITMAP (graph->succs[n], 0, i, bi)
1434 {
1435 unsigned int w;
1436
1437 if (i > LAST_REF_NODE)
1438 break;
1439
1440 w = find (i);
1441 if (bitmap_bit_p (si->deleted, w))
1442 continue;
1443
1444 if (!bitmap_bit_p (si->visited, w))
1445 scc_visit (graph, si, w);
1446
1447 unsigned int t = find (w);
1448 gcc_checking_assert (find (n) == n);
1449 if (si->dfs[t] < si->dfs[n])
1450 si->dfs[n] = si->dfs[t];
1451 }
1452
1453 /* See if any components have been identified. */
1454 if (si->dfs[n] == my_dfs)
1455 {
1456 if (si->scc_stack.length () > 0
1457 && si->dfs[si->scc_stack.last ()] >= my_dfs)
1458 {
1459 bitmap scc = BITMAP_ALLOC (NULL);
1460 unsigned int lowest_node;
1461 bitmap_iterator bi;
1462
1463 bitmap_set_bit (scc, n);
1464
1465 while (si->scc_stack.length () != 0
1466 && si->dfs[si->scc_stack.last ()] >= my_dfs)
1467 {
1468 unsigned int w = si->scc_stack.pop ();
1469
1470 bitmap_set_bit (scc, w);
1471 }
1472
1473 lowest_node = bitmap_first_set_bit (scc);
1474 gcc_assert (lowest_node < FIRST_REF_NODE);
1475
1476 /* Collapse the SCC nodes into a single node, and mark the
1477 indirect cycles. */
1478 EXECUTE_IF_SET_IN_BITMAP (scc, 0, i, bi)
1479 {
1480 if (i < FIRST_REF_NODE)
1481 {
1482 if (unite (lowest_node, i))
1483 unify_nodes (graph, lowest_node, i, false);
1484 }
1485 else
1486 {
1487 unite (lowest_node, i);
1488 graph->indirect_cycles[i - FIRST_REF_NODE] = lowest_node;
1489 }
1490 }
1491 }
1492 bitmap_set_bit (si->deleted, n);
1493 }
1494 else
1495 si->scc_stack.safe_push (n);
1496 }
1497
1498 /* Unify node FROM into node TO, updating the changed count if
1499 necessary when UPDATE_CHANGED is true. */
1500
1501 static void
1502 unify_nodes (constraint_graph_t graph, unsigned int to, unsigned int from,
1503 bool update_changed)
1504 {
1505 gcc_checking_assert (to != from && find (to) == to);
1506
1507 if (dump_file && (dump_flags & TDF_DETAILS))
1508 fprintf (dump_file, "Unifying %s to %s\n",
1509 get_varinfo (from)->name,
1510 get_varinfo (to)->name);
1511
1512 if (update_changed)
1513 stats.unified_vars_dynamic++;
1514 else
1515 stats.unified_vars_static++;
1516
1517 merge_graph_nodes (graph, to, from);
1518 if (merge_node_constraints (graph, to, from))
1519 {
1520 if (update_changed)
1521 bitmap_set_bit (changed, to);
1522 }
1523
1524 /* Mark TO as changed if FROM was changed. If TO was already marked
1525 as changed, decrease the changed count. */
1526
1527 if (update_changed
1528 && bitmap_clear_bit (changed, from))
1529 bitmap_set_bit (changed, to);
1530 varinfo_t fromvi = get_varinfo (from);
1531 if (fromvi->solution)
1532 {
1533 /* If the solution changes because of the merging, we need to mark
1534 the variable as changed. */
1535 varinfo_t tovi = get_varinfo (to);
1536 if (bitmap_ior_into (tovi->solution, fromvi->solution))
1537 {
1538 if (update_changed)
1539 bitmap_set_bit (changed, to);
1540 }
1541
1542 BITMAP_FREE (fromvi->solution);
1543 if (fromvi->oldsolution)
1544 BITMAP_FREE (fromvi->oldsolution);
1545
1546 if (stats.iterations > 0
1547 && tovi->oldsolution)
1548 BITMAP_FREE (tovi->oldsolution);
1549 }
1550 if (graph->succs[to])
1551 bitmap_clear_bit (graph->succs[to], to);
1552 }
1553
1554 /* Information needed to compute the topological ordering of a graph. */
1555
1556 struct topo_info
1557 {
1558 /* sbitmap of visited nodes. */
1559 sbitmap visited;
1560 /* Array that stores the topological order of the graph, *in
1561 reverse*. */
1562 vec<unsigned> topo_order;
1563 };
1564
1565
1566 /* Initialize and return a topological info structure. */
1567
1568 static struct topo_info *
1569 init_topo_info (void)
1570 {
1571 size_t size = graph->size;
1572 struct topo_info *ti = XNEW (struct topo_info);
1573 ti->visited = sbitmap_alloc (size);
1574 bitmap_clear (ti->visited);
1575 ti->topo_order.create (1);
1576 return ti;
1577 }
1578
1579
1580 /* Free the topological sort info pointed to by TI. */
1581
1582 static void
1583 free_topo_info (struct topo_info *ti)
1584 {
1585 sbitmap_free (ti->visited);
1586 ti->topo_order.release ();
1587 free (ti);
1588 }
1589
1590 /* Visit the graph in topological order, and store the order in the
1591 topo_info structure. */
1592
1593 static void
1594 topo_visit (constraint_graph_t graph, struct topo_info *ti,
1595 unsigned int n)
1596 {
1597 bitmap_iterator bi;
1598 unsigned int j;
1599
1600 bitmap_set_bit (ti->visited, n);
1601
1602 if (graph->succs[n])
1603 EXECUTE_IF_SET_IN_BITMAP (graph->succs[n], 0, j, bi)
1604 {
1605 if (!bitmap_bit_p (ti->visited, j))
1606 topo_visit (graph, ti, j);
1607 }
1608
1609 ti->topo_order.safe_push (n);
1610 }
1611
1612 /* Process a constraint C that represents x = *(y + off), using DELTA as the
1613 starting solution for y. */
1614
1615 static void
1616 do_sd_constraint (constraint_graph_t graph, constraint_t c,
1617 bitmap delta, bitmap *expanded_delta)
1618 {
1619 unsigned int lhs = c->lhs.var;
1620 bool flag = false;
1621 bitmap sol = get_varinfo (lhs)->solution;
1622 unsigned int j;
1623 bitmap_iterator bi;
1624 HOST_WIDE_INT roffset = c->rhs.offset;
1625
1626 /* Our IL does not allow this. */
1627 gcc_checking_assert (c->lhs.offset == 0);
1628
1629 /* If the solution of Y contains anything it is good enough to transfer
1630 this to the LHS. */
1631 if (bitmap_bit_p (delta, anything_id))
1632 {
1633 flag |= bitmap_set_bit (sol, anything_id);
1634 goto done;
1635 }
1636
1637 /* If we do not know at with offset the rhs is dereferenced compute
1638 the reachability set of DELTA, conservatively assuming it is
1639 dereferenced at all valid offsets. */
1640 if (roffset == UNKNOWN_OFFSET)
1641 {
1642 delta = solution_set_expand (delta, expanded_delta);
1643 /* No further offset processing is necessary. */
1644 roffset = 0;
1645 }
1646
1647 /* For each variable j in delta (Sol(y)), add
1648 an edge in the graph from j to x, and union Sol(j) into Sol(x). */
1649 EXECUTE_IF_SET_IN_BITMAP (delta, 0, j, bi)
1650 {
1651 varinfo_t v = get_varinfo (j);
1652 HOST_WIDE_INT fieldoffset = v->offset + roffset;
1653 unsigned HOST_WIDE_INT size = v->size;
1654 unsigned int t;
1655
1656 if (v->is_full_var)
1657 ;
1658 else if (roffset != 0)
1659 {
1660 if (fieldoffset < 0)
1661 v = get_varinfo (v->head);
1662 else
1663 v = first_or_preceding_vi_for_offset (v, fieldoffset);
1664 }
1665
1666 /* We have to include all fields that overlap the current field
1667 shifted by roffset. */
1668 do
1669 {
1670 t = find (v->id);
1671
1672 /* Adding edges from the special vars is pointless.
1673 They don't have sets that can change. */
1674 if (get_varinfo (t)->is_special_var)
1675 flag |= bitmap_ior_into (sol, get_varinfo (t)->solution);
1676 /* Merging the solution from ESCAPED needlessly increases
1677 the set. Use ESCAPED as representative instead. */
1678 else if (v->id == escaped_id)
1679 flag |= bitmap_set_bit (sol, escaped_id);
1680 else if (v->may_have_pointers
1681 && add_graph_edge (graph, lhs, t))
1682 flag |= bitmap_ior_into (sol, get_varinfo (t)->solution);
1683
1684 if (v->is_full_var
1685 || v->next == 0)
1686 break;
1687
1688 v = vi_next (v);
1689 }
1690 while (v->offset < fieldoffset + size);
1691 }
1692
1693 done:
1694 /* If the LHS solution changed, mark the var as changed. */
1695 if (flag)
1696 {
1697 get_varinfo (lhs)->solution = sol;
1698 bitmap_set_bit (changed, lhs);
1699 }
1700 }
1701
1702 /* Process a constraint C that represents *(x + off) = y using DELTA
1703 as the starting solution for x. */
1704
1705 static void
1706 do_ds_constraint (constraint_t c, bitmap delta, bitmap *expanded_delta)
1707 {
1708 unsigned int rhs = c->rhs.var;
1709 bitmap sol = get_varinfo (rhs)->solution;
1710 unsigned int j;
1711 bitmap_iterator bi;
1712 HOST_WIDE_INT loff = c->lhs.offset;
1713 bool escaped_p = false;
1714
1715 /* Our IL does not allow this. */
1716 gcc_checking_assert (c->rhs.offset == 0);
1717
1718 /* If the solution of y contains ANYTHING simply use the ANYTHING
1719 solution. This avoids needlessly increasing the points-to sets. */
1720 if (bitmap_bit_p (sol, anything_id))
1721 sol = get_varinfo (find (anything_id))->solution;
1722
1723 /* If the solution for x contains ANYTHING we have to merge the
1724 solution of y into all pointer variables which we do via
1725 STOREDANYTHING. */
1726 if (bitmap_bit_p (delta, anything_id))
1727 {
1728 unsigned t = find (storedanything_id);
1729 if (add_graph_edge (graph, t, rhs))
1730 {
1731 if (bitmap_ior_into (get_varinfo (t)->solution, sol))
1732 bitmap_set_bit (changed, t);
1733 }
1734 return;
1735 }
1736
1737 /* If we do not know at with offset the rhs is dereferenced compute
1738 the reachability set of DELTA, conservatively assuming it is
1739 dereferenced at all valid offsets. */
1740 if (loff == UNKNOWN_OFFSET)
1741 {
1742 delta = solution_set_expand (delta, expanded_delta);
1743 loff = 0;
1744 }
1745
1746 /* For each member j of delta (Sol(x)), add an edge from y to j and
1747 union Sol(y) into Sol(j) */
1748 EXECUTE_IF_SET_IN_BITMAP (delta, 0, j, bi)
1749 {
1750 varinfo_t v = get_varinfo (j);
1751 unsigned int t;
1752 HOST_WIDE_INT fieldoffset = v->offset + loff;
1753 unsigned HOST_WIDE_INT size = v->size;
1754
1755 if (v->is_full_var)
1756 ;
1757 else if (loff != 0)
1758 {
1759 if (fieldoffset < 0)
1760 v = get_varinfo (v->head);
1761 else
1762 v = first_or_preceding_vi_for_offset (v, fieldoffset);
1763 }
1764
1765 /* We have to include all fields that overlap the current field
1766 shifted by loff. */
1767 do
1768 {
1769 if (v->may_have_pointers)
1770 {
1771 /* If v is a global variable then this is an escape point. */
1772 if (v->is_global_var
1773 && !escaped_p)
1774 {
1775 t = find (escaped_id);
1776 if (add_graph_edge (graph, t, rhs)
1777 && bitmap_ior_into (get_varinfo (t)->solution, sol))
1778 bitmap_set_bit (changed, t);
1779 /* Enough to let rhs escape once. */
1780 escaped_p = true;
1781 }
1782
1783 if (v->is_special_var)
1784 break;
1785
1786 t = find (v->id);
1787 if (add_graph_edge (graph, t, rhs)
1788 && bitmap_ior_into (get_varinfo (t)->solution, sol))
1789 bitmap_set_bit (changed, t);
1790 }
1791
1792 if (v->is_full_var
1793 || v->next == 0)
1794 break;
1795
1796 v = vi_next (v);
1797 }
1798 while (v->offset < fieldoffset + size);
1799 }
1800 }
1801
1802 /* Handle a non-simple (simple meaning requires no iteration),
1803 constraint (IE *x = &y, x = *y, *x = y, and x = y with offsets involved). */
1804
1805 static void
1806 do_complex_constraint (constraint_graph_t graph, constraint_t c, bitmap delta,
1807 bitmap *expanded_delta)
1808 {
1809 if (c->lhs.type == DEREF)
1810 {
1811 if (c->rhs.type == ADDRESSOF)
1812 {
1813 gcc_unreachable ();
1814 }
1815 else
1816 {
1817 /* *x = y */
1818 do_ds_constraint (c, delta, expanded_delta);
1819 }
1820 }
1821 else if (c->rhs.type == DEREF)
1822 {
1823 /* x = *y */
1824 if (!(get_varinfo (c->lhs.var)->is_special_var))
1825 do_sd_constraint (graph, c, delta, expanded_delta);
1826 }
1827 else
1828 {
1829 bitmap tmp;
1830 bool flag = false;
1831
1832 gcc_checking_assert (c->rhs.type == SCALAR && c->lhs.type == SCALAR
1833 && c->rhs.offset != 0 && c->lhs.offset == 0);
1834 tmp = get_varinfo (c->lhs.var)->solution;
1835
1836 flag = set_union_with_increment (tmp, delta, c->rhs.offset,
1837 expanded_delta);
1838
1839 if (flag)
1840 bitmap_set_bit (changed, c->lhs.var);
1841 }
1842 }
1843
1844 /* Initialize and return a new SCC info structure. */
1845
1846 scc_info::scc_info (size_t size) :
1847 visited (size), deleted (size), current_index (0), scc_stack (1)
1848 {
1849 bitmap_clear (visited);
1850 bitmap_clear (deleted);
1851 node_mapping = XNEWVEC (unsigned int, size);
1852 dfs = XCNEWVEC (unsigned int, size);
1853
1854 for (size_t i = 0; i < size; i++)
1855 node_mapping[i] = i;
1856 }
1857
1858 /* Free an SCC info structure pointed to by SI */
1859
1860 scc_info::~scc_info ()
1861 {
1862 free (node_mapping);
1863 free (dfs);
1864 }
1865
1866
1867 /* Find indirect cycles in GRAPH that occur, using strongly connected
1868 components, and note them in the indirect cycles map.
1869
1870 This technique comes from Ben Hardekopf and Calvin Lin,
1871 "It Pays to be Lazy: Fast and Accurate Pointer Analysis for Millions of
1872 Lines of Code", submitted to PLDI 2007. */
1873
1874 static void
1875 find_indirect_cycles (constraint_graph_t graph)
1876 {
1877 unsigned int i;
1878 unsigned int size = graph->size;
1879 scc_info si (size);
1880
1881 for (i = 0; i < MIN (LAST_REF_NODE, size); i ++ )
1882 if (!bitmap_bit_p (si.visited, i) && find (i) == i)
1883 scc_visit (graph, &si, i);
1884 }
1885
1886 /* Compute a topological ordering for GRAPH, and store the result in the
1887 topo_info structure TI. */
1888
1889 static void
1890 compute_topo_order (constraint_graph_t graph,
1891 struct topo_info *ti)
1892 {
1893 unsigned int i;
1894 unsigned int size = graph->size;
1895
1896 for (i = 0; i != size; ++i)
1897 if (!bitmap_bit_p (ti->visited, i) && find (i) == i)
1898 topo_visit (graph, ti, i);
1899 }
1900
1901 /* Structure used to for hash value numbering of pointer equivalence
1902 classes. */
1903
1904 typedef struct equiv_class_label
1905 {
1906 hashval_t hashcode;
1907 unsigned int equivalence_class;
1908 bitmap labels;
1909 } *equiv_class_label_t;
1910 typedef const struct equiv_class_label *const_equiv_class_label_t;
1911
1912 /* Equiv_class_label hashtable helpers. */
1913
1914 struct equiv_class_hasher : free_ptr_hash <equiv_class_label>
1915 {
1916 static inline hashval_t hash (const equiv_class_label *);
1917 static inline bool equal (const equiv_class_label *,
1918 const equiv_class_label *);
1919 };
1920
1921 /* Hash function for a equiv_class_label_t */
1922
1923 inline hashval_t
1924 equiv_class_hasher::hash (const equiv_class_label *ecl)
1925 {
1926 return ecl->hashcode;
1927 }
1928
1929 /* Equality function for two equiv_class_label_t's. */
1930
1931 inline bool
1932 equiv_class_hasher::equal (const equiv_class_label *eql1,
1933 const equiv_class_label *eql2)
1934 {
1935 return (eql1->hashcode == eql2->hashcode
1936 && bitmap_equal_p (eql1->labels, eql2->labels));
1937 }
1938
1939 /* A hashtable for mapping a bitmap of labels->pointer equivalence
1940 classes. */
1941 static hash_table<equiv_class_hasher> *pointer_equiv_class_table;
1942
1943 /* A hashtable for mapping a bitmap of labels->location equivalence
1944 classes. */
1945 static hash_table<equiv_class_hasher> *location_equiv_class_table;
1946
1947 /* Lookup a equivalence class in TABLE by the bitmap of LABELS with
1948 hash HAS it contains. Sets *REF_LABELS to the bitmap LABELS
1949 is equivalent to. */
1950
1951 static equiv_class_label *
1952 equiv_class_lookup_or_add (hash_table<equiv_class_hasher> *table,
1953 bitmap labels)
1954 {
1955 equiv_class_label **slot;
1956 equiv_class_label ecl;
1957
1958 ecl.labels = labels;
1959 ecl.hashcode = bitmap_hash (labels);
1960 slot = table->find_slot (&ecl, INSERT);
1961 if (!*slot)
1962 {
1963 *slot = XNEW (struct equiv_class_label);
1964 (*slot)->labels = labels;
1965 (*slot)->hashcode = ecl.hashcode;
1966 (*slot)->equivalence_class = 0;
1967 }
1968
1969 return *slot;
1970 }
1971
1972 /* Perform offline variable substitution.
1973
1974 This is a worst case quadratic time way of identifying variables
1975 that must have equivalent points-to sets, including those caused by
1976 static cycles, and single entry subgraphs, in the constraint graph.
1977
1978 The technique is described in "Exploiting Pointer and Location
1979 Equivalence to Optimize Pointer Analysis. In the 14th International
1980 Static Analysis Symposium (SAS), August 2007." It is known as the
1981 "HU" algorithm, and is equivalent to value numbering the collapsed
1982 constraint graph including evaluating unions.
1983
1984 The general method of finding equivalence classes is as follows:
1985 Add fake nodes (REF nodes) and edges for *a = b and a = *b constraints.
1986 Initialize all non-REF nodes to be direct nodes.
1987 For each constraint a = a U {b}, we set pts(a) = pts(a) u {fresh
1988 variable}
1989 For each constraint containing the dereference, we also do the same
1990 thing.
1991
1992 We then compute SCC's in the graph and unify nodes in the same SCC,
1993 including pts sets.
1994
1995 For each non-collapsed node x:
1996 Visit all unvisited explicit incoming edges.
1997 Ignoring all non-pointers, set pts(x) = Union of pts(a) for y
1998 where y->x.
1999 Lookup the equivalence class for pts(x).
2000 If we found one, equivalence_class(x) = found class.
2001 Otherwise, equivalence_class(x) = new class, and new_class is
2002 added to the lookup table.
2003
2004 All direct nodes with the same equivalence class can be replaced
2005 with a single representative node.
2006 All unlabeled nodes (label == 0) are not pointers and all edges
2007 involving them can be eliminated.
2008 We perform these optimizations during rewrite_constraints
2009
2010 In addition to pointer equivalence class finding, we also perform
2011 location equivalence class finding. This is the set of variables
2012 that always appear together in points-to sets. We use this to
2013 compress the size of the points-to sets. */
2014
2015 /* Current maximum pointer equivalence class id. */
2016 static int pointer_equiv_class;
2017
2018 /* Current maximum location equivalence class id. */
2019 static int location_equiv_class;
2020
2021 /* Recursive routine to find strongly connected components in GRAPH,
2022 and label it's nodes with DFS numbers. */
2023
2024 static void
2025 condense_visit (constraint_graph_t graph, struct scc_info *si, unsigned int n)
2026 {
2027 unsigned int i;
2028 bitmap_iterator bi;
2029 unsigned int my_dfs;
2030
2031 gcc_checking_assert (si->node_mapping[n] == n);
2032 bitmap_set_bit (si->visited, n);
2033 si->dfs[n] = si->current_index ++;
2034 my_dfs = si->dfs[n];
2035
2036 /* Visit all the successors. */
2037 EXECUTE_IF_IN_NONNULL_BITMAP (graph->preds[n], 0, i, bi)
2038 {
2039 unsigned int w = si->node_mapping[i];
2040
2041 if (bitmap_bit_p (si->deleted, w))
2042 continue;
2043
2044 if (!bitmap_bit_p (si->visited, w))
2045 condense_visit (graph, si, w);
2046
2047 unsigned int t = si->node_mapping[w];
2048 gcc_checking_assert (si->node_mapping[n] == n);
2049 if (si->dfs[t] < si->dfs[n])
2050 si->dfs[n] = si->dfs[t];
2051 }
2052
2053 /* Visit all the implicit predecessors. */
2054 EXECUTE_IF_IN_NONNULL_BITMAP (graph->implicit_preds[n], 0, i, bi)
2055 {
2056 unsigned int w = si->node_mapping[i];
2057
2058 if (bitmap_bit_p (si->deleted, w))
2059 continue;
2060
2061 if (!bitmap_bit_p (si->visited, w))
2062 condense_visit (graph, si, w);
2063
2064 unsigned int t = si->node_mapping[w];
2065 gcc_assert (si->node_mapping[n] == n);
2066 if (si->dfs[t] < si->dfs[n])
2067 si->dfs[n] = si->dfs[t];
2068 }
2069
2070 /* See if any components have been identified. */
2071 if (si->dfs[n] == my_dfs)
2072 {
2073 while (si->scc_stack.length () != 0
2074 && si->dfs[si->scc_stack.last ()] >= my_dfs)
2075 {
2076 unsigned int w = si->scc_stack.pop ();
2077 si->node_mapping[w] = n;
2078
2079 if (!bitmap_bit_p (graph->direct_nodes, w))
2080 bitmap_clear_bit (graph->direct_nodes, n);
2081
2082 /* Unify our nodes. */
2083 if (graph->preds[w])
2084 {
2085 if (!graph->preds[n])
2086 graph->preds[n] = BITMAP_ALLOC (&predbitmap_obstack);
2087 bitmap_ior_into (graph->preds[n], graph->preds[w]);
2088 }
2089 if (graph->implicit_preds[w])
2090 {
2091 if (!graph->implicit_preds[n])
2092 graph->implicit_preds[n] = BITMAP_ALLOC (&predbitmap_obstack);
2093 bitmap_ior_into (graph->implicit_preds[n],
2094 graph->implicit_preds[w]);
2095 }
2096 if (graph->points_to[w])
2097 {
2098 if (!graph->points_to[n])
2099 graph->points_to[n] = BITMAP_ALLOC (&predbitmap_obstack);
2100 bitmap_ior_into (graph->points_to[n],
2101 graph->points_to[w]);
2102 }
2103 }
2104 bitmap_set_bit (si->deleted, n);
2105 }
2106 else
2107 si->scc_stack.safe_push (n);
2108 }
2109
2110 /* Label pointer equivalences.
2111
2112 This performs a value numbering of the constraint graph to
2113 discover which variables will always have the same points-to sets
2114 under the current set of constraints.
2115
2116 The way it value numbers is to store the set of points-to bits
2117 generated by the constraints and graph edges. This is just used as a
2118 hash and equality comparison. The *actual set of points-to bits* is
2119 completely irrelevant, in that we don't care about being able to
2120 extract them later.
2121
2122 The equality values (currently bitmaps) just have to satisfy a few
2123 constraints, the main ones being:
2124 1. The combining operation must be order independent.
2125 2. The end result of a given set of operations must be unique iff the
2126 combination of input values is unique
2127 3. Hashable. */
2128
2129 static void
2130 label_visit (constraint_graph_t graph, struct scc_info *si, unsigned int n)
2131 {
2132 unsigned int i, first_pred;
2133 bitmap_iterator bi;
2134
2135 bitmap_set_bit (si->visited, n);
2136
2137 /* Label and union our incoming edges's points to sets. */
2138 first_pred = -1U;
2139 EXECUTE_IF_IN_NONNULL_BITMAP (graph->preds[n], 0, i, bi)
2140 {
2141 unsigned int w = si->node_mapping[i];
2142 if (!bitmap_bit_p (si->visited, w))
2143 label_visit (graph, si, w);
2144
2145 /* Skip unused edges */
2146 if (w == n || graph->pointer_label[w] == 0)
2147 continue;
2148
2149 if (graph->points_to[w])
2150 {
2151 if (!graph->points_to[n])
2152 {
2153 if (first_pred == -1U)
2154 first_pred = w;
2155 else
2156 {
2157 graph->points_to[n] = BITMAP_ALLOC (&predbitmap_obstack);
2158 bitmap_ior (graph->points_to[n],
2159 graph->points_to[first_pred],
2160 graph->points_to[w]);
2161 }
2162 }
2163 else
2164 bitmap_ior_into (graph->points_to[n], graph->points_to[w]);
2165 }
2166 }
2167
2168 /* Indirect nodes get fresh variables and a new pointer equiv class. */
2169 if (!bitmap_bit_p (graph->direct_nodes, n))
2170 {
2171 if (!graph->points_to[n])
2172 {
2173 graph->points_to[n] = BITMAP_ALLOC (&predbitmap_obstack);
2174 if (first_pred != -1U)
2175 bitmap_copy (graph->points_to[n], graph->points_to[first_pred]);
2176 }
2177 bitmap_set_bit (graph->points_to[n], FIRST_REF_NODE + n);
2178 graph->pointer_label[n] = pointer_equiv_class++;
2179 equiv_class_label_t ecl;
2180 ecl = equiv_class_lookup_or_add (pointer_equiv_class_table,
2181 graph->points_to[n]);
2182 ecl->equivalence_class = graph->pointer_label[n];
2183 return;
2184 }
2185
2186 /* If there was only a single non-empty predecessor the pointer equiv
2187 class is the same. */
2188 if (!graph->points_to[n])
2189 {
2190 if (first_pred != -1U)
2191 {
2192 graph->pointer_label[n] = graph->pointer_label[first_pred];
2193 graph->points_to[n] = graph->points_to[first_pred];
2194 }
2195 return;
2196 }
2197
2198 if (!bitmap_empty_p (graph->points_to[n]))
2199 {
2200 equiv_class_label_t ecl;
2201 ecl = equiv_class_lookup_or_add (pointer_equiv_class_table,
2202 graph->points_to[n]);
2203 if (ecl->equivalence_class == 0)
2204 ecl->equivalence_class = pointer_equiv_class++;
2205 else
2206 {
2207 BITMAP_FREE (graph->points_to[n]);
2208 graph->points_to[n] = ecl->labels;
2209 }
2210 graph->pointer_label[n] = ecl->equivalence_class;
2211 }
2212 }
2213
2214 /* Print the pred graph in dot format. */
2215
2216 static void
2217 dump_pred_graph (struct scc_info *si, FILE *file)
2218 {
2219 unsigned int i;
2220
2221 /* Only print the graph if it has already been initialized: */
2222 if (!graph)
2223 return;
2224
2225 /* Prints the header of the dot file: */
2226 fprintf (file, "strict digraph {\n");
2227 fprintf (file, " node [\n shape = box\n ]\n");
2228 fprintf (file, " edge [\n fontsize = \"12\"\n ]\n");
2229 fprintf (file, "\n // List of nodes and complex constraints in "
2230 "the constraint graph:\n");
2231
2232 /* The next lines print the nodes in the graph together with the
2233 complex constraints attached to them. */
2234 for (i = 1; i < graph->size; i++)
2235 {
2236 if (i == FIRST_REF_NODE)
2237 continue;
2238 if (si->node_mapping[i] != i)
2239 continue;
2240 if (i < FIRST_REF_NODE)
2241 fprintf (file, "\"%s\"", get_varinfo (i)->name);
2242 else
2243 fprintf (file, "\"*%s\"", get_varinfo (i - FIRST_REF_NODE)->name);
2244 if (graph->points_to[i]
2245 && !bitmap_empty_p (graph->points_to[i]))
2246 {
2247 if (i < FIRST_REF_NODE)
2248 fprintf (file, "[label=\"%s = {", get_varinfo (i)->name);
2249 else
2250 fprintf (file, "[label=\"*%s = {",
2251 get_varinfo (i - FIRST_REF_NODE)->name);
2252 unsigned j;
2253 bitmap_iterator bi;
2254 EXECUTE_IF_SET_IN_BITMAP (graph->points_to[i], 0, j, bi)
2255 fprintf (file, " %d", j);
2256 fprintf (file, " }\"]");
2257 }
2258 fprintf (file, ";\n");
2259 }
2260
2261 /* Go over the edges. */
2262 fprintf (file, "\n // Edges in the constraint graph:\n");
2263 for (i = 1; i < graph->size; i++)
2264 {
2265 unsigned j;
2266 bitmap_iterator bi;
2267 if (si->node_mapping[i] != i)
2268 continue;
2269 EXECUTE_IF_IN_NONNULL_BITMAP (graph->preds[i], 0, j, bi)
2270 {
2271 unsigned from = si->node_mapping[j];
2272 if (from < FIRST_REF_NODE)
2273 fprintf (file, "\"%s\"", get_varinfo (from)->name);
2274 else
2275 fprintf (file, "\"*%s\"", get_varinfo (from - FIRST_REF_NODE)->name);
2276 fprintf (file, " -> ");
2277 if (i < FIRST_REF_NODE)
2278 fprintf (file, "\"%s\"", get_varinfo (i)->name);
2279 else
2280 fprintf (file, "\"*%s\"", get_varinfo (i - FIRST_REF_NODE)->name);
2281 fprintf (file, ";\n");
2282 }
2283 }
2284
2285 /* Prints the tail of the dot file. */
2286 fprintf (file, "}\n");
2287 }
2288
2289 /* Perform offline variable substitution, discovering equivalence
2290 classes, and eliminating non-pointer variables. */
2291
2292 static struct scc_info *
2293 perform_var_substitution (constraint_graph_t graph)
2294 {
2295 unsigned int i;
2296 unsigned int size = graph->size;
2297 scc_info *si = new scc_info (size);
2298
2299 bitmap_obstack_initialize (&iteration_obstack);
2300 pointer_equiv_class_table = new hash_table<equiv_class_hasher> (511);
2301 location_equiv_class_table
2302 = new hash_table<equiv_class_hasher> (511);
2303 pointer_equiv_class = 1;
2304 location_equiv_class = 1;
2305
2306 /* Condense the nodes, which means to find SCC's, count incoming
2307 predecessors, and unite nodes in SCC's. */
2308 for (i = 1; i < FIRST_REF_NODE; i++)
2309 if (!bitmap_bit_p (si->visited, si->node_mapping[i]))
2310 condense_visit (graph, si, si->node_mapping[i]);
2311
2312 if (dump_file && (dump_flags & TDF_GRAPH))
2313 {
2314 fprintf (dump_file, "\n\n// The constraint graph before var-substitution "
2315 "in dot format:\n");
2316 dump_pred_graph (si, dump_file);
2317 fprintf (dump_file, "\n\n");
2318 }
2319
2320 bitmap_clear (si->visited);
2321 /* Actually the label the nodes for pointer equivalences */
2322 for (i = 1; i < FIRST_REF_NODE; i++)
2323 if (!bitmap_bit_p (si->visited, si->node_mapping[i]))
2324 label_visit (graph, si, si->node_mapping[i]);
2325
2326 /* Calculate location equivalence labels. */
2327 for (i = 1; i < FIRST_REF_NODE; i++)
2328 {
2329 bitmap pointed_by;
2330 bitmap_iterator bi;
2331 unsigned int j;
2332
2333 if (!graph->pointed_by[i])
2334 continue;
2335 pointed_by = BITMAP_ALLOC (&iteration_obstack);
2336
2337 /* Translate the pointed-by mapping for pointer equivalence
2338 labels. */
2339 EXECUTE_IF_SET_IN_BITMAP (graph->pointed_by[i], 0, j, bi)
2340 {
2341 bitmap_set_bit (pointed_by,
2342 graph->pointer_label[si->node_mapping[j]]);
2343 }
2344 /* The original pointed_by is now dead. */
2345 BITMAP_FREE (graph->pointed_by[i]);
2346
2347 /* Look up the location equivalence label if one exists, or make
2348 one otherwise. */
2349 equiv_class_label_t ecl;
2350 ecl = equiv_class_lookup_or_add (location_equiv_class_table, pointed_by);
2351 if (ecl->equivalence_class == 0)
2352 ecl->equivalence_class = location_equiv_class++;
2353 else
2354 {
2355 if (dump_file && (dump_flags & TDF_DETAILS))
2356 fprintf (dump_file, "Found location equivalence for node %s\n",
2357 get_varinfo (i)->name);
2358 BITMAP_FREE (pointed_by);
2359 }
2360 graph->loc_label[i] = ecl->equivalence_class;
2361
2362 }
2363
2364 if (dump_file && (dump_flags & TDF_DETAILS))
2365 for (i = 1; i < FIRST_REF_NODE; i++)
2366 {
2367 unsigned j = si->node_mapping[i];
2368 if (j != i)
2369 {
2370 fprintf (dump_file, "%s node id %d ",
2371 bitmap_bit_p (graph->direct_nodes, i)
2372 ? "Direct" : "Indirect", i);
2373 if (i < FIRST_REF_NODE)
2374 fprintf (dump_file, "\"%s\"", get_varinfo (i)->name);
2375 else
2376 fprintf (dump_file, "\"*%s\"",
2377 get_varinfo (i - FIRST_REF_NODE)->name);
2378 fprintf (dump_file, " mapped to SCC leader node id %d ", j);
2379 if (j < FIRST_REF_NODE)
2380 fprintf (dump_file, "\"%s\"\n", get_varinfo (j)->name);
2381 else
2382 fprintf (dump_file, "\"*%s\"\n",
2383 get_varinfo (j - FIRST_REF_NODE)->name);
2384 }
2385 else
2386 {
2387 fprintf (dump_file,
2388 "Equivalence classes for %s node id %d ",
2389 bitmap_bit_p (graph->direct_nodes, i)
2390 ? "direct" : "indirect", i);
2391 if (i < FIRST_REF_NODE)
2392 fprintf (dump_file, "\"%s\"", get_varinfo (i)->name);
2393 else
2394 fprintf (dump_file, "\"*%s\"",
2395 get_varinfo (i - FIRST_REF_NODE)->name);
2396 fprintf (dump_file,
2397 ": pointer %d, location %d\n",
2398 graph->pointer_label[i], graph->loc_label[i]);
2399 }
2400 }
2401
2402 /* Quickly eliminate our non-pointer variables. */
2403
2404 for (i = 1; i < FIRST_REF_NODE; i++)
2405 {
2406 unsigned int node = si->node_mapping[i];
2407
2408 if (graph->pointer_label[node] == 0)
2409 {
2410 if (dump_file && (dump_flags & TDF_DETAILS))
2411 fprintf (dump_file,
2412 "%s is a non-pointer variable, eliminating edges.\n",
2413 get_varinfo (node)->name);
2414 stats.nonpointer_vars++;
2415 clear_edges_for_node (graph, node);
2416 }
2417 }
2418
2419 return si;
2420 }
2421
2422 /* Free information that was only necessary for variable
2423 substitution. */
2424
2425 static void
2426 free_var_substitution_info (struct scc_info *si)
2427 {
2428 delete si;
2429 free (graph->pointer_label);
2430 free (graph->loc_label);
2431 free (graph->pointed_by);
2432 free (graph->points_to);
2433 free (graph->eq_rep);
2434 sbitmap_free (graph->direct_nodes);
2435 delete pointer_equiv_class_table;
2436 pointer_equiv_class_table = NULL;
2437 delete location_equiv_class_table;
2438 location_equiv_class_table = NULL;
2439 bitmap_obstack_release (&iteration_obstack);
2440 }
2441
2442 /* Return an existing node that is equivalent to NODE, which has
2443 equivalence class LABEL, if one exists. Return NODE otherwise. */
2444
2445 static unsigned int
2446 find_equivalent_node (constraint_graph_t graph,
2447 unsigned int node, unsigned int label)
2448 {
2449 /* If the address version of this variable is unused, we can
2450 substitute it for anything else with the same label.
2451 Otherwise, we know the pointers are equivalent, but not the
2452 locations, and we can unite them later. */
2453
2454 if (!bitmap_bit_p (graph->address_taken, node))
2455 {
2456 gcc_checking_assert (label < graph->size);
2457
2458 if (graph->eq_rep[label] != -1)
2459 {
2460 /* Unify the two variables since we know they are equivalent. */
2461 if (unite (graph->eq_rep[label], node))
2462 unify_nodes (graph, graph->eq_rep[label], node, false);
2463 return graph->eq_rep[label];
2464 }
2465 else
2466 {
2467 graph->eq_rep[label] = node;
2468 graph->pe_rep[label] = node;
2469 }
2470 }
2471 else
2472 {
2473 gcc_checking_assert (label < graph->size);
2474 graph->pe[node] = label;
2475 if (graph->pe_rep[label] == -1)
2476 graph->pe_rep[label] = node;
2477 }
2478
2479 return node;
2480 }
2481
2482 /* Unite pointer equivalent but not location equivalent nodes in
2483 GRAPH. This may only be performed once variable substitution is
2484 finished. */
2485
2486 static void
2487 unite_pointer_equivalences (constraint_graph_t graph)
2488 {
2489 unsigned int i;
2490
2491 /* Go through the pointer equivalences and unite them to their
2492 representative, if they aren't already. */
2493 for (i = 1; i < FIRST_REF_NODE; i++)
2494 {
2495 unsigned int label = graph->pe[i];
2496 if (label)
2497 {
2498 int label_rep = graph->pe_rep[label];
2499
2500 if (label_rep == -1)
2501 continue;
2502
2503 label_rep = find (label_rep);
2504 if (label_rep >= 0 && unite (label_rep, find (i)))
2505 unify_nodes (graph, label_rep, i, false);
2506 }
2507 }
2508 }
2509
2510 /* Move complex constraints to the GRAPH nodes they belong to. */
2511
2512 static void
2513 move_complex_constraints (constraint_graph_t graph)
2514 {
2515 int i;
2516 constraint_t c;
2517
2518 FOR_EACH_VEC_ELT (constraints, i, c)
2519 {
2520 if (c)
2521 {
2522 struct constraint_expr lhs = c->lhs;
2523 struct constraint_expr rhs = c->rhs;
2524
2525 if (lhs.type == DEREF)
2526 {
2527 insert_into_complex (graph, lhs.var, c);
2528 }
2529 else if (rhs.type == DEREF)
2530 {
2531 if (!(get_varinfo (lhs.var)->is_special_var))
2532 insert_into_complex (graph, rhs.var, c);
2533 }
2534 else if (rhs.type != ADDRESSOF && lhs.var > anything_id
2535 && (lhs.offset != 0 || rhs.offset != 0))
2536 {
2537 insert_into_complex (graph, rhs.var, c);
2538 }
2539 }
2540 }
2541 }
2542
2543
2544 /* Optimize and rewrite complex constraints while performing
2545 collapsing of equivalent nodes. SI is the SCC_INFO that is the
2546 result of perform_variable_substitution. */
2547
2548 static void
2549 rewrite_constraints (constraint_graph_t graph,
2550 struct scc_info *si)
2551 {
2552 int i;
2553 constraint_t c;
2554
2555 if (flag_checking)
2556 {
2557 for (unsigned int j = 0; j < graph->size; j++)
2558 gcc_assert (find (j) == j);
2559 }
2560
2561 FOR_EACH_VEC_ELT (constraints, i, c)
2562 {
2563 struct constraint_expr lhs = c->lhs;
2564 struct constraint_expr rhs = c->rhs;
2565 unsigned int lhsvar = find (lhs.var);
2566 unsigned int rhsvar = find (rhs.var);
2567 unsigned int lhsnode, rhsnode;
2568 unsigned int lhslabel, rhslabel;
2569
2570 lhsnode = si->node_mapping[lhsvar];
2571 rhsnode = si->node_mapping[rhsvar];
2572 lhslabel = graph->pointer_label[lhsnode];
2573 rhslabel = graph->pointer_label[rhsnode];
2574
2575 /* See if it is really a non-pointer variable, and if so, ignore
2576 the constraint. */
2577 if (lhslabel == 0)
2578 {
2579 if (dump_file && (dump_flags & TDF_DETAILS))
2580 {
2581
2582 fprintf (dump_file, "%s is a non-pointer variable, "
2583 "ignoring constraint:",
2584 get_varinfo (lhs.var)->name);
2585 dump_constraint (dump_file, c);
2586 fprintf (dump_file, "\n");
2587 }
2588 constraints[i] = NULL;
2589 continue;
2590 }
2591
2592 if (rhslabel == 0)
2593 {
2594 if (dump_file && (dump_flags & TDF_DETAILS))
2595 {
2596
2597 fprintf (dump_file, "%s is a non-pointer variable, "
2598 "ignoring constraint:",
2599 get_varinfo (rhs.var)->name);
2600 dump_constraint (dump_file, c);
2601 fprintf (dump_file, "\n");
2602 }
2603 constraints[i] = NULL;
2604 continue;
2605 }
2606
2607 lhsvar = find_equivalent_node (graph, lhsvar, lhslabel);
2608 rhsvar = find_equivalent_node (graph, rhsvar, rhslabel);
2609 c->lhs.var = lhsvar;
2610 c->rhs.var = rhsvar;
2611 }
2612 }
2613
2614 /* Eliminate indirect cycles involving NODE. Return true if NODE was
2615 part of an SCC, false otherwise. */
2616
2617 static bool
2618 eliminate_indirect_cycles (unsigned int node)
2619 {
2620 if (graph->indirect_cycles[node] != -1
2621 && !bitmap_empty_p (get_varinfo (node)->solution))
2622 {
2623 unsigned int i;
2624 auto_vec<unsigned> queue;
2625 int queuepos;
2626 unsigned int to = find (graph->indirect_cycles[node]);
2627 bitmap_iterator bi;
2628
2629 /* We can't touch the solution set and call unify_nodes
2630 at the same time, because unify_nodes is going to do
2631 bitmap unions into it. */
2632
2633 EXECUTE_IF_SET_IN_BITMAP (get_varinfo (node)->solution, 0, i, bi)
2634 {
2635 if (find (i) == i && i != to)
2636 {
2637 if (unite (to, i))
2638 queue.safe_push (i);
2639 }
2640 }
2641
2642 for (queuepos = 0;
2643 queue.iterate (queuepos, &i);
2644 queuepos++)
2645 {
2646 unify_nodes (graph, to, i, true);
2647 }
2648 return true;
2649 }
2650 return false;
2651 }
2652
2653 /* Solve the constraint graph GRAPH using our worklist solver.
2654 This is based on the PW* family of solvers from the "Efficient Field
2655 Sensitive Pointer Analysis for C" paper.
2656 It works by iterating over all the graph nodes, processing the complex
2657 constraints and propagating the copy constraints, until everything stops
2658 changed. This corresponds to steps 6-8 in the solving list given above. */
2659
2660 static void
2661 solve_graph (constraint_graph_t graph)
2662 {
2663 unsigned int size = graph->size;
2664 unsigned int i;
2665 bitmap pts;
2666
2667 changed = BITMAP_ALLOC (NULL);
2668
2669 /* Mark all initial non-collapsed nodes as changed. */
2670 for (i = 1; i < size; i++)
2671 {
2672 varinfo_t ivi = get_varinfo (i);
2673 if (find (i) == i && !bitmap_empty_p (ivi->solution)
2674 && ((graph->succs[i] && !bitmap_empty_p (graph->succs[i]))
2675 || graph->complex[i].length () > 0))
2676 bitmap_set_bit (changed, i);
2677 }
2678
2679 /* Allocate a bitmap to be used to store the changed bits. */
2680 pts = BITMAP_ALLOC (&pta_obstack);
2681
2682 while (!bitmap_empty_p (changed))
2683 {
2684 unsigned int i;
2685 struct topo_info *ti = init_topo_info ();
2686 stats.iterations++;
2687
2688 bitmap_obstack_initialize (&iteration_obstack);
2689
2690 compute_topo_order (graph, ti);
2691
2692 while (ti->topo_order.length () != 0)
2693 {
2694
2695 i = ti->topo_order.pop ();
2696
2697 /* If this variable is not a representative, skip it. */
2698 if (find (i) != i)
2699 continue;
2700
2701 /* In certain indirect cycle cases, we may merge this
2702 variable to another. */
2703 if (eliminate_indirect_cycles (i) && find (i) != i)
2704 continue;
2705
2706 /* If the node has changed, we need to process the
2707 complex constraints and outgoing edges again. */
2708 if (bitmap_clear_bit (changed, i))
2709 {
2710 unsigned int j;
2711 constraint_t c;
2712 bitmap solution;
2713 vec<constraint_t> complex = graph->complex[i];
2714 varinfo_t vi = get_varinfo (i);
2715 bool solution_empty;
2716
2717 /* Compute the changed set of solution bits. If anything
2718 is in the solution just propagate that. */
2719 if (bitmap_bit_p (vi->solution, anything_id))
2720 {
2721 /* If anything is also in the old solution there is
2722 nothing to do.
2723 ??? But we shouldn't ended up with "changed" set ... */
2724 if (vi->oldsolution
2725 && bitmap_bit_p (vi->oldsolution, anything_id))
2726 continue;
2727 bitmap_copy (pts, get_varinfo (find (anything_id))->solution);
2728 }
2729 else if (vi->oldsolution)
2730 bitmap_and_compl (pts, vi->solution, vi->oldsolution);
2731 else
2732 bitmap_copy (pts, vi->solution);
2733
2734 if (bitmap_empty_p (pts))
2735 continue;
2736
2737 if (vi->oldsolution)
2738 bitmap_ior_into (vi->oldsolution, pts);
2739 else
2740 {
2741 vi->oldsolution = BITMAP_ALLOC (&oldpta_obstack);
2742 bitmap_copy (vi->oldsolution, pts);
2743 }
2744
2745 solution = vi->solution;
2746 solution_empty = bitmap_empty_p (solution);
2747
2748 /* Process the complex constraints */
2749 bitmap expanded_pts = NULL;
2750 FOR_EACH_VEC_ELT (complex, j, c)
2751 {
2752 /* XXX: This is going to unsort the constraints in
2753 some cases, which will occasionally add duplicate
2754 constraints during unification. This does not
2755 affect correctness. */
2756 c->lhs.var = find (c->lhs.var);
2757 c->rhs.var = find (c->rhs.var);
2758
2759 /* The only complex constraint that can change our
2760 solution to non-empty, given an empty solution,
2761 is a constraint where the lhs side is receiving
2762 some set from elsewhere. */
2763 if (!solution_empty || c->lhs.type != DEREF)
2764 do_complex_constraint (graph, c, pts, &expanded_pts);
2765 }
2766 BITMAP_FREE (expanded_pts);
2767
2768 solution_empty = bitmap_empty_p (solution);
2769
2770 if (!solution_empty)
2771 {
2772 bitmap_iterator bi;
2773 unsigned eff_escaped_id = find (escaped_id);
2774
2775 /* Propagate solution to all successors. */
2776 unsigned to_remove = ~0U;
2777 EXECUTE_IF_IN_NONNULL_BITMAP (graph->succs[i],
2778 0, j, bi)
2779 {
2780 if (to_remove != ~0U)
2781 {
2782 bitmap_clear_bit (graph->succs[i], to_remove);
2783 to_remove = ~0U;
2784 }
2785 unsigned int to = find (j);
2786 if (to != j)
2787 {
2788 /* Update the succ graph, avoiding duplicate
2789 work. */
2790 to_remove = j;
2791 if (! bitmap_set_bit (graph->succs[i], to))
2792 continue;
2793 /* We eventually end up processing 'to' twice
2794 as it is undefined whether bitmap iteration
2795 iterates over bits set during iteration.
2796 Play safe instead of doing tricks. */
2797 }
2798 /* Don't try to propagate to ourselves. */
2799 if (to == i)
2800 continue;
2801
2802 bitmap tmp = get_varinfo (to)->solution;
2803 bool flag = false;
2804
2805 /* If we propagate from ESCAPED use ESCAPED as
2806 placeholder. */
2807 if (i == eff_escaped_id)
2808 flag = bitmap_set_bit (tmp, escaped_id);
2809 else
2810 flag = bitmap_ior_into (tmp, pts);
2811
2812 if (flag)
2813 bitmap_set_bit (changed, to);
2814 }
2815 if (to_remove != ~0U)
2816 bitmap_clear_bit (graph->succs[i], to_remove);
2817 }
2818 }
2819 }
2820 free_topo_info (ti);
2821 bitmap_obstack_release (&iteration_obstack);
2822 }
2823
2824 BITMAP_FREE (pts);
2825 BITMAP_FREE (changed);
2826 bitmap_obstack_release (&oldpta_obstack);
2827 }
2828
2829 /* Map from trees to variable infos. */
2830 static hash_map<tree, varinfo_t> *vi_for_tree;
2831
2832
2833 /* Insert ID as the variable id for tree T in the vi_for_tree map. */
2834
2835 static void
2836 insert_vi_for_tree (tree t, varinfo_t vi)
2837 {
2838 gcc_assert (vi);
2839 gcc_assert (!vi_for_tree->put (t, vi));
2840 }
2841
2842 /* Find the variable info for tree T in VI_FOR_TREE. If T does not
2843 exist in the map, return NULL, otherwise, return the varinfo we found. */
2844
2845 static varinfo_t
2846 lookup_vi_for_tree (tree t)
2847 {
2848 varinfo_t *slot = vi_for_tree->get (t);
2849 if (slot == NULL)
2850 return NULL;
2851
2852 return *slot;
2853 }
2854
2855 /* Return a printable name for DECL */
2856
2857 static const char *
2858 alias_get_name (tree decl)
2859 {
2860 const char *res = "NULL";
2861 if (dump_file)
2862 {
2863 char *temp = NULL;
2864 if (TREE_CODE (decl) == SSA_NAME)
2865 {
2866 res = get_name (decl);
2867 temp = xasprintf ("%s_%u", res ? res : "", SSA_NAME_VERSION (decl));
2868 }
2869 else if (HAS_DECL_ASSEMBLER_NAME_P (decl)
2870 && DECL_ASSEMBLER_NAME_SET_P (decl))
2871 res = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME_RAW (decl));
2872 else if (DECL_P (decl))
2873 {
2874 res = get_name (decl);
2875 if (!res)
2876 temp = xasprintf ("D.%u", DECL_UID (decl));
2877 }
2878
2879 if (temp)
2880 {
2881 res = ggc_strdup (temp);
2882 free (temp);
2883 }
2884 }
2885
2886 return res;
2887 }
2888
2889 /* Find the variable id for tree T in the map.
2890 If T doesn't exist in the map, create an entry for it and return it. */
2891
2892 static varinfo_t
2893 get_vi_for_tree (tree t)
2894 {
2895 varinfo_t *slot = vi_for_tree->get (t);
2896 if (slot == NULL)
2897 {
2898 unsigned int id = create_variable_info_for (t, alias_get_name (t), false);
2899 return get_varinfo (id);
2900 }
2901
2902 return *slot;
2903 }
2904
2905 /* Get a scalar constraint expression for a new temporary variable. */
2906
2907 static struct constraint_expr
2908 new_scalar_tmp_constraint_exp (const char *name, bool add_id)
2909 {
2910 struct constraint_expr tmp;
2911 varinfo_t vi;
2912
2913 vi = new_var_info (NULL_TREE, name, add_id);
2914 vi->offset = 0;
2915 vi->size = -1;
2916 vi->fullsize = -1;
2917 vi->is_full_var = 1;
2918 vi->is_reg_var = 1;
2919
2920 tmp.var = vi->id;
2921 tmp.type = SCALAR;
2922 tmp.offset = 0;
2923
2924 return tmp;
2925 }
2926
2927 /* Get a constraint expression vector from an SSA_VAR_P node.
2928 If address_p is true, the result will be taken its address of. */
2929
2930 static void
2931 get_constraint_for_ssa_var (tree t, vec<ce_s> *results, bool address_p)
2932 {
2933 struct constraint_expr cexpr;
2934 varinfo_t vi;
2935
2936 /* We allow FUNCTION_DECLs here even though it doesn't make much sense. */
2937 gcc_assert (TREE_CODE (t) == SSA_NAME || DECL_P (t));
2938
2939 if (TREE_CODE (t) == SSA_NAME
2940 && SSA_NAME_IS_DEFAULT_DEF (t))
2941 {
2942 /* For parameters, get at the points-to set for the actual parm
2943 decl. */
2944 if (TREE_CODE (SSA_NAME_VAR (t)) == PARM_DECL
2945 || TREE_CODE (SSA_NAME_VAR (t)) == RESULT_DECL)
2946 {
2947 get_constraint_for_ssa_var (SSA_NAME_VAR (t), results, address_p);
2948 return;
2949 }
2950 /* For undefined SSA names return nothing. */
2951 else if (!ssa_defined_default_def_p (t))
2952 {
2953 cexpr.var = nothing_id;
2954 cexpr.type = SCALAR;
2955 cexpr.offset = 0;
2956 results->safe_push (cexpr);
2957 return;
2958 }
2959 }
2960
2961 /* For global variables resort to the alias target. */
2962 if (VAR_P (t) && (TREE_STATIC (t) || DECL_EXTERNAL (t)))
2963 {
2964 varpool_node *node = varpool_node::get (t);
2965 if (node && node->alias && node->analyzed)
2966 {
2967 node = node->ultimate_alias_target ();
2968 /* Canonicalize the PT uid of all aliases to the ultimate target.
2969 ??? Hopefully the set of aliases can't change in a way that
2970 changes the ultimate alias target. */
2971 gcc_assert ((! DECL_PT_UID_SET_P (node->decl)
2972 || DECL_PT_UID (node->decl) == DECL_UID (node->decl))
2973 && (! DECL_PT_UID_SET_P (t)
2974 || DECL_PT_UID (t) == DECL_UID (node->decl)));
2975 DECL_PT_UID (t) = DECL_UID (node->decl);
2976 t = node->decl;
2977 }
2978
2979 /* If this is decl may bind to NULL note that. */
2980 if (address_p
2981 && (! node || ! node->nonzero_address ()))
2982 {
2983 cexpr.var = nothing_id;
2984 cexpr.type = SCALAR;
2985 cexpr.offset = 0;
2986 results->safe_push (cexpr);
2987 }
2988 }
2989
2990 vi = get_vi_for_tree (t);
2991 cexpr.var = vi->id;
2992 cexpr.type = SCALAR;
2993 cexpr.offset = 0;
2994
2995 /* If we are not taking the address of the constraint expr, add all
2996 sub-fiels of the variable as well. */
2997 if (!address_p
2998 && !vi->is_full_var)
2999 {
3000 for (; vi; vi = vi_next (vi))
3001 {
3002 cexpr.var = vi->id;
3003 results->safe_push (cexpr);
3004 }
3005 return;
3006 }
3007
3008 results->safe_push (cexpr);
3009 }
3010
3011 /* Process constraint T, performing various simplifications and then
3012 adding it to our list of overall constraints. */
3013
3014 static void
3015 process_constraint (constraint_t t)
3016 {
3017 struct constraint_expr rhs = t->rhs;
3018 struct constraint_expr lhs = t->lhs;
3019
3020 gcc_assert (rhs.var < varmap.length ());
3021 gcc_assert (lhs.var < varmap.length ());
3022
3023 /* If we didn't get any useful constraint from the lhs we get
3024 &ANYTHING as fallback from get_constraint_for. Deal with
3025 it here by turning it into *ANYTHING. */
3026 if (lhs.type == ADDRESSOF
3027 && lhs.var == anything_id)
3028 lhs.type = DEREF;
3029
3030 /* ADDRESSOF on the lhs is invalid. */
3031 gcc_assert (lhs.type != ADDRESSOF);
3032
3033 /* We shouldn't add constraints from things that cannot have pointers.
3034 It's not completely trivial to avoid in the callers, so do it here. */
3035 if (rhs.type != ADDRESSOF
3036 && !get_varinfo (rhs.var)->may_have_pointers)
3037 return;
3038
3039 /* Likewise adding to the solution of a non-pointer var isn't useful. */
3040 if (!get_varinfo (lhs.var)->may_have_pointers)
3041 return;
3042
3043 /* This can happen in our IR with things like n->a = *p */
3044 if (rhs.type == DEREF && lhs.type == DEREF && rhs.var != anything_id)
3045 {
3046 /* Split into tmp = *rhs, *lhs = tmp */
3047 struct constraint_expr tmplhs;
3048 tmplhs = new_scalar_tmp_constraint_exp ("doubledereftmp", true);
3049 process_constraint (new_constraint (tmplhs, rhs));
3050 process_constraint (new_constraint (lhs, tmplhs));
3051 }
3052 else if ((rhs.type != SCALAR || rhs.offset != 0) && lhs.type == DEREF)
3053 {
3054 /* Split into tmp = &rhs, *lhs = tmp */
3055 struct constraint_expr tmplhs;
3056 tmplhs = new_scalar_tmp_constraint_exp ("derefaddrtmp", true);
3057 process_constraint (new_constraint (tmplhs, rhs));
3058 process_constraint (new_constraint (lhs, tmplhs));
3059 }
3060 else
3061 {
3062 gcc_assert (rhs.type != ADDRESSOF || rhs.offset == 0);
3063 constraints.safe_push (t);
3064 }
3065 }
3066
3067
3068 /* Return the position, in bits, of FIELD_DECL from the beginning of its
3069 structure. */
3070
3071 static HOST_WIDE_INT
3072 bitpos_of_field (const tree fdecl)
3073 {
3074 if (!tree_fits_shwi_p (DECL_FIELD_OFFSET (fdecl))
3075 || !tree_fits_shwi_p (DECL_FIELD_BIT_OFFSET (fdecl)))
3076 return -1;
3077
3078 return (tree_to_shwi (DECL_FIELD_OFFSET (fdecl)) * BITS_PER_UNIT
3079 + tree_to_shwi (DECL_FIELD_BIT_OFFSET (fdecl)));
3080 }
3081
3082
3083 /* Get constraint expressions for offsetting PTR by OFFSET. Stores the
3084 resulting constraint expressions in *RESULTS. */
3085
3086 static void
3087 get_constraint_for_ptr_offset (tree ptr, tree offset,
3088 vec<ce_s> *results)
3089 {
3090 struct constraint_expr c;
3091 unsigned int j, n;
3092 HOST_WIDE_INT rhsoffset;
3093
3094 /* If we do not do field-sensitive PTA adding offsets to pointers
3095 does not change the points-to solution. */
3096 if (!use_field_sensitive)
3097 {
3098 get_constraint_for_rhs (ptr, results);
3099 return;
3100 }
3101
3102 /* If the offset is not a non-negative integer constant that fits
3103 in a HOST_WIDE_INT, we have to fall back to a conservative
3104 solution which includes all sub-fields of all pointed-to
3105 variables of ptr. */
3106 if (offset == NULL_TREE
3107 || TREE_CODE (offset) != INTEGER_CST)
3108 rhsoffset = UNKNOWN_OFFSET;
3109 else
3110 {
3111 /* Sign-extend the offset. */
3112 offset_int soffset = offset_int::from (wi::to_wide (offset), SIGNED);
3113 if (!wi::fits_shwi_p (soffset))
3114 rhsoffset = UNKNOWN_OFFSET;
3115 else
3116 {
3117 /* Make sure the bit-offset also fits. */
3118 HOST_WIDE_INT rhsunitoffset = soffset.to_shwi ();
3119 rhsoffset = rhsunitoffset * (unsigned HOST_WIDE_INT) BITS_PER_UNIT;
3120 if (rhsunitoffset != rhsoffset / BITS_PER_UNIT)
3121 rhsoffset = UNKNOWN_OFFSET;
3122 }
3123 }
3124
3125 get_constraint_for_rhs (ptr, results);
3126 if (rhsoffset == 0)
3127 return;
3128
3129 /* As we are eventually appending to the solution do not use
3130 vec::iterate here. */
3131 n = results->length ();
3132 for (j = 0; j < n; j++)
3133 {
3134 varinfo_t curr;
3135 c = (*results)[j];
3136 curr = get_varinfo (c.var);
3137
3138 if (c.type == ADDRESSOF
3139 /* If this varinfo represents a full variable just use it. */
3140 && curr->is_full_var)
3141 ;
3142 else if (c.type == ADDRESSOF
3143 /* If we do not know the offset add all subfields. */
3144 && rhsoffset == UNKNOWN_OFFSET)
3145 {
3146 varinfo_t temp = get_varinfo (curr->head);
3147 do
3148 {
3149 struct constraint_expr c2;
3150 c2.var = temp->id;
3151 c2.type = ADDRESSOF;
3152 c2.offset = 0;
3153 if (c2.var != c.var)
3154 results->safe_push (c2);
3155 temp = vi_next (temp);
3156 }
3157 while (temp);
3158 }
3159 else if (c.type == ADDRESSOF)
3160 {
3161 varinfo_t temp;
3162 unsigned HOST_WIDE_INT offset = curr->offset + rhsoffset;
3163
3164 /* If curr->offset + rhsoffset is less than zero adjust it. */
3165 if (rhsoffset < 0
3166 && curr->offset < offset)
3167 offset = 0;
3168
3169 /* We have to include all fields that overlap the current
3170 field shifted by rhsoffset. And we include at least
3171 the last or the first field of the variable to represent
3172 reachability of off-bound addresses, in particular &object + 1,
3173 conservatively correct. */
3174 temp = first_or_preceding_vi_for_offset (curr, offset);
3175 c.var = temp->id;
3176 c.offset = 0;
3177 temp = vi_next (temp);
3178 while (temp
3179 && temp->offset < offset + curr->size)
3180 {
3181 struct constraint_expr c2;
3182 c2.var = temp->id;
3183 c2.type = ADDRESSOF;
3184 c2.offset = 0;
3185 results->safe_push (c2);
3186 temp = vi_next (temp);
3187 }
3188 }
3189 else if (c.type == SCALAR)
3190 {
3191 gcc_assert (c.offset == 0);
3192 c.offset = rhsoffset;
3193 }
3194 else
3195 /* We shouldn't get any DEREFs here. */
3196 gcc_unreachable ();
3197
3198 (*results)[j] = c;
3199 }
3200 }
3201
3202
3203 /* Given a COMPONENT_REF T, return the constraint_expr vector for it.
3204 If address_p is true the result will be taken its address of.
3205 If lhs_p is true then the constraint expression is assumed to be used
3206 as the lhs. */
3207
3208 static void
3209 get_constraint_for_component_ref (tree t, vec<ce_s> *results,
3210 bool address_p, bool lhs_p)
3211 {
3212 tree orig_t = t;
3213 poly_int64 bitsize = -1;
3214 poly_int64 bitmaxsize = -1;
3215 poly_int64 bitpos;
3216 bool reverse;
3217 tree forzero;
3218
3219 /* Some people like to do cute things like take the address of
3220 &0->a.b */
3221 forzero = t;
3222 while (handled_component_p (forzero)
3223 || INDIRECT_REF_P (forzero)
3224 || TREE_CODE (forzero) == MEM_REF)
3225 forzero = TREE_OPERAND (forzero, 0);
3226
3227 if (CONSTANT_CLASS_P (forzero) && integer_zerop (forzero))
3228 {
3229 struct constraint_expr temp;
3230
3231 temp.offset = 0;
3232 temp.var = integer_id;
3233 temp.type = SCALAR;
3234 results->safe_push (temp);
3235 return;
3236 }
3237
3238 t = get_ref_base_and_extent (t, &bitpos, &bitsize, &bitmaxsize, &reverse);
3239
3240 /* We can end up here for component references on a
3241 VIEW_CONVERT_EXPR <>(&foobar) or things like a
3242 BIT_FIELD_REF <&MEM[(void *)&b + 4B], ...>. So for
3243 symbolic constants simply give up. */
3244 if (TREE_CODE (t) == ADDR_EXPR)
3245 {
3246 constraint_expr result;
3247 result.type = SCALAR;
3248 result.var = anything_id;
3249 result.offset = 0;
3250 results->safe_push (result);
3251 return;
3252 }
3253
3254 /* Pretend to take the address of the base, we'll take care of
3255 adding the required subset of sub-fields below. */
3256 get_constraint_for_1 (t, results, true, lhs_p);
3257 /* Strip off nothing_id. */
3258 if (results->length () == 2)
3259 {
3260 gcc_assert ((*results)[0].var == nothing_id);
3261 results->unordered_remove (0);
3262 }
3263 gcc_assert (results->length () == 1);
3264 struct constraint_expr &result = results->last ();
3265
3266 if (result.type == SCALAR
3267 && get_varinfo (result.var)->is_full_var)
3268 /* For single-field vars do not bother about the offset. */
3269 result.offset = 0;
3270 else if (result.type == SCALAR)
3271 {
3272 /* In languages like C, you can access one past the end of an
3273 array. You aren't allowed to dereference it, so we can
3274 ignore this constraint. When we handle pointer subtraction,
3275 we may have to do something cute here. */
3276
3277 if (maybe_lt (poly_uint64 (bitpos), get_varinfo (result.var)->fullsize)
3278 && maybe_ne (bitmaxsize, 0))
3279 {
3280 /* It's also not true that the constraint will actually start at the
3281 right offset, it may start in some padding. We only care about
3282 setting the constraint to the first actual field it touches, so
3283 walk to find it. */
3284 struct constraint_expr cexpr = result;
3285 varinfo_t curr;
3286 results->pop ();
3287 cexpr.offset = 0;
3288 for (curr = get_varinfo (cexpr.var); curr; curr = vi_next (curr))
3289 {
3290 if (ranges_maybe_overlap_p (poly_int64 (curr->offset),
3291 curr->size, bitpos, bitmaxsize))
3292 {
3293 cexpr.var = curr->id;
3294 results->safe_push (cexpr);
3295 if (address_p)
3296 break;
3297 }
3298 }
3299 /* If we are going to take the address of this field then
3300 to be able to compute reachability correctly add at least
3301 the last field of the variable. */
3302 if (address_p && results->length () == 0)
3303 {
3304 curr = get_varinfo (cexpr.var);
3305 while (curr->next != 0)
3306 curr = vi_next (curr);
3307 cexpr.var = curr->id;
3308 results->safe_push (cexpr);
3309 }
3310 else if (results->length () == 0)
3311 /* Assert that we found *some* field there. The user couldn't be
3312 accessing *only* padding. */
3313 /* Still the user could access one past the end of an array
3314 embedded in a struct resulting in accessing *only* padding. */
3315 /* Or accessing only padding via type-punning to a type
3316 that has a filed just in padding space. */
3317 {
3318 cexpr.type = SCALAR;
3319 cexpr.var = anything_id;
3320 cexpr.offset = 0;
3321 results->safe_push (cexpr);
3322 }
3323 }
3324 else if (known_eq (bitmaxsize, 0))
3325 {
3326 if (dump_file && (dump_flags & TDF_DETAILS))
3327 fprintf (dump_file, "Access to zero-sized part of variable, "
3328 "ignoring\n");
3329 }
3330 else
3331 if (dump_file && (dump_flags & TDF_DETAILS))
3332 fprintf (dump_file, "Access to past the end of variable, ignoring\n");
3333 }
3334 else if (result.type == DEREF)
3335 {
3336 /* If we do not know exactly where the access goes say so. Note
3337 that only for non-structure accesses we know that we access
3338 at most one subfiled of any variable. */
3339 HOST_WIDE_INT const_bitpos;
3340 if (!bitpos.is_constant (&const_bitpos)
3341 || const_bitpos == -1
3342 || maybe_ne (bitsize, bitmaxsize)
3343 || AGGREGATE_TYPE_P (TREE_TYPE (orig_t))
3344 || result.offset == UNKNOWN_OFFSET)
3345 result.offset = UNKNOWN_OFFSET;
3346 else
3347 result.offset += const_bitpos;
3348 }
3349 else if (result.type == ADDRESSOF)
3350 {
3351 /* We can end up here for component references on constants like
3352 VIEW_CONVERT_EXPR <>({ 0, 1, 2, 3 })[i]. */
3353 result.type = SCALAR;
3354 result.var = anything_id;
3355 result.offset = 0;
3356 }
3357 else
3358 gcc_unreachable ();
3359 }
3360
3361
3362 /* Dereference the constraint expression CONS, and return the result.
3363 DEREF (ADDRESSOF) = SCALAR
3364 DEREF (SCALAR) = DEREF
3365 DEREF (DEREF) = (temp = DEREF1; result = DEREF(temp))
3366 This is needed so that we can handle dereferencing DEREF constraints. */
3367
3368 static void
3369 do_deref (vec<ce_s> *constraints)
3370 {
3371 struct constraint_expr *c;
3372 unsigned int i = 0;
3373
3374 FOR_EACH_VEC_ELT (*constraints, i, c)
3375 {
3376 if (c->type == SCALAR)
3377 c->type = DEREF;
3378 else if (c->type == ADDRESSOF)
3379 c->type = SCALAR;
3380 else if (c->type == DEREF)
3381 {
3382 struct constraint_expr tmplhs;
3383 tmplhs = new_scalar_tmp_constraint_exp ("dereftmp", true);
3384 process_constraint (new_constraint (tmplhs, *c));
3385 c->var = tmplhs.var;
3386 }
3387 else
3388 gcc_unreachable ();
3389 }
3390 }
3391
3392 /* Given a tree T, return the constraint expression for taking the
3393 address of it. */
3394
3395 static void
3396 get_constraint_for_address_of (tree t, vec<ce_s> *results)
3397 {
3398 struct constraint_expr *c;
3399 unsigned int i;
3400
3401 get_constraint_for_1 (t, results, true, true);
3402
3403 FOR_EACH_VEC_ELT (*results, i, c)
3404 {
3405 if (c->type == DEREF)
3406 c->type = SCALAR;
3407 else
3408 c->type = ADDRESSOF;
3409 }
3410 }
3411
3412 /* Given a tree T, return the constraint expression for it. */
3413
3414 static void
3415 get_constraint_for_1 (tree t, vec<ce_s> *results, bool address_p,
3416 bool lhs_p)
3417 {
3418 struct constraint_expr temp;
3419
3420 /* x = integer is all glommed to a single variable, which doesn't
3421 point to anything by itself. That is, of course, unless it is an
3422 integer constant being treated as a pointer, in which case, we
3423 will return that this is really the addressof anything. This
3424 happens below, since it will fall into the default case. The only
3425 case we know something about an integer treated like a pointer is
3426 when it is the NULL pointer, and then we just say it points to
3427 NULL.
3428
3429 Do not do that if -fno-delete-null-pointer-checks though, because
3430 in that case *NULL does not fail, so it _should_ alias *anything.
3431 It is not worth adding a new option or renaming the existing one,
3432 since this case is relatively obscure. */
3433 if ((TREE_CODE (t) == INTEGER_CST
3434 && integer_zerop (t))
3435 /* The only valid CONSTRUCTORs in gimple with pointer typed
3436 elements are zero-initializer. But in IPA mode we also
3437 process global initializers, so verify at least. */
3438 || (TREE_CODE (t) == CONSTRUCTOR
3439 && CONSTRUCTOR_NELTS (t) == 0))
3440 {
3441 if (flag_delete_null_pointer_checks)
3442 temp.var = nothing_id;
3443 else
3444 temp.var = nonlocal_id;
3445 temp.type = ADDRESSOF;
3446 temp.offset = 0;
3447 results->safe_push (temp);
3448 return;
3449 }
3450
3451 /* String constants are read-only, ideally we'd have a CONST_DECL
3452 for those. */
3453 if (TREE_CODE (t) == STRING_CST)
3454 {
3455 temp.var = string_id;
3456 temp.type = SCALAR;
3457 temp.offset = 0;
3458 results->safe_push (temp);
3459 return;
3460 }
3461
3462 switch (TREE_CODE_CLASS (TREE_CODE (t)))
3463 {
3464 case tcc_expression:
3465 {
3466 switch (TREE_CODE (t))
3467 {
3468 case ADDR_EXPR:
3469 get_constraint_for_address_of (TREE_OPERAND (t, 0), results);
3470 return;
3471 default:;
3472 }
3473 break;
3474 }
3475 case tcc_reference:
3476 {
3477 switch (TREE_CODE (t))
3478 {
3479 case MEM_REF:
3480 {
3481 struct constraint_expr cs;
3482 varinfo_t vi, curr;
3483 get_constraint_for_ptr_offset (TREE_OPERAND (t, 0),
3484 TREE_OPERAND (t, 1), results);
3485 do_deref (results);
3486
3487 /* If we are not taking the address then make sure to process
3488 all subvariables we might access. */
3489 if (address_p)
3490 return;
3491
3492 cs = results->last ();
3493 if (cs.type == DEREF
3494 && type_can_have_subvars (TREE_TYPE (t)))
3495 {
3496 /* For dereferences this means we have to defer it
3497 to solving time. */
3498 results->last ().offset = UNKNOWN_OFFSET;
3499 return;
3500 }
3501 if (cs.type != SCALAR)
3502 return;
3503
3504 vi = get_varinfo (cs.var);
3505 curr = vi_next (vi);
3506 if (!vi->is_full_var
3507 && curr)
3508 {
3509 unsigned HOST_WIDE_INT size;
3510 if (tree_fits_uhwi_p (TYPE_SIZE (TREE_TYPE (t))))
3511 size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (t)));
3512 else
3513 size = -1;
3514 for (; curr; curr = vi_next (curr))
3515 {
3516 if (curr->offset - vi->offset < size)
3517 {
3518 cs.var = curr->id;
3519 results->safe_push (cs);
3520 }
3521 else
3522 break;
3523 }
3524 }
3525 return;
3526 }
3527 case ARRAY_REF:
3528 case ARRAY_RANGE_REF:
3529 case COMPONENT_REF:
3530 case IMAGPART_EXPR:
3531 case REALPART_EXPR:
3532 case BIT_FIELD_REF:
3533 get_constraint_for_component_ref (t, results, address_p, lhs_p);
3534 return;
3535 case VIEW_CONVERT_EXPR:
3536 get_constraint_for_1 (TREE_OPERAND (t, 0), results, address_p,
3537 lhs_p);
3538 return;
3539 /* We are missing handling for TARGET_MEM_REF here. */
3540 default:;
3541 }
3542 break;
3543 }
3544 case tcc_exceptional:
3545 {
3546 switch (TREE_CODE (t))
3547 {
3548 case SSA_NAME:
3549 {
3550 get_constraint_for_ssa_var (t, results, address_p);
3551 return;
3552 }
3553 case CONSTRUCTOR:
3554 {
3555 unsigned int i;
3556 tree val;
3557 auto_vec<ce_s> tmp;
3558 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), i, val)
3559 {
3560 struct constraint_expr *rhsp;
3561 unsigned j;
3562 get_constraint_for_1 (val, &tmp, address_p, lhs_p);
3563 FOR_EACH_VEC_ELT (tmp, j, rhsp)
3564 results->safe_push (*rhsp);
3565 tmp.truncate (0);
3566 }
3567 /* We do not know whether the constructor was complete,
3568 so technically we have to add &NOTHING or &ANYTHING
3569 like we do for an empty constructor as well. */
3570 return;
3571 }
3572 default:;
3573 }
3574 break;
3575 }
3576 case tcc_declaration:
3577 {
3578 get_constraint_for_ssa_var (t, results, address_p);
3579 return;
3580 }
3581 case tcc_constant:
3582 {
3583 /* We cannot refer to automatic variables through constants. */
3584 temp.type = ADDRESSOF;
3585 temp.var = nonlocal_id;
3586 temp.offset = 0;
3587 results->safe_push (temp);
3588 return;
3589 }
3590 default:;
3591 }
3592
3593 /* The default fallback is a constraint from anything. */
3594 temp.type = ADDRESSOF;
3595 temp.var = anything_id;
3596 temp.offset = 0;
3597 results->safe_push (temp);
3598 }
3599
3600 /* Given a gimple tree T, return the constraint expression vector for it. */
3601
3602 static void
3603 get_constraint_for (tree t, vec<ce_s> *results)
3604 {
3605 gcc_assert (results->length () == 0);
3606
3607 get_constraint_for_1 (t, results, false, true);
3608 }
3609
3610 /* Given a gimple tree T, return the constraint expression vector for it
3611 to be used as the rhs of a constraint. */
3612
3613 static void
3614 get_constraint_for_rhs (tree t, vec<ce_s> *results)
3615 {
3616 gcc_assert (results->length () == 0);
3617
3618 get_constraint_for_1 (t, results, false, false);
3619 }
3620
3621
3622 /* Efficiently generates constraints from all entries in *RHSC to all
3623 entries in *LHSC. */
3624
3625 static void
3626 process_all_all_constraints (vec<ce_s> lhsc,
3627 vec<ce_s> rhsc)
3628 {
3629 struct constraint_expr *lhsp, *rhsp;
3630 unsigned i, j;
3631
3632 if (lhsc.length () <= 1 || rhsc.length () <= 1)
3633 {
3634 FOR_EACH_VEC_ELT (lhsc, i, lhsp)
3635 FOR_EACH_VEC_ELT (rhsc, j, rhsp)
3636 process_constraint (new_constraint (*lhsp, *rhsp));
3637 }
3638 else
3639 {
3640 struct constraint_expr tmp;
3641 tmp = new_scalar_tmp_constraint_exp ("allalltmp", true);
3642 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
3643 process_constraint (new_constraint (tmp, *rhsp));
3644 FOR_EACH_VEC_ELT (lhsc, i, lhsp)
3645 process_constraint (new_constraint (*lhsp, tmp));
3646 }
3647 }
3648
3649 /* Handle aggregate copies by expanding into copies of the respective
3650 fields of the structures. */
3651
3652 static void
3653 do_structure_copy (tree lhsop, tree rhsop)
3654 {
3655 struct constraint_expr *lhsp, *rhsp;
3656 auto_vec<ce_s> lhsc;
3657 auto_vec<ce_s> rhsc;
3658 unsigned j;
3659
3660 get_constraint_for (lhsop, &lhsc);
3661 get_constraint_for_rhs (rhsop, &rhsc);
3662 lhsp = &lhsc[0];
3663 rhsp = &rhsc[0];
3664 if (lhsp->type == DEREF
3665 || (lhsp->type == ADDRESSOF && lhsp->var == anything_id)
3666 || rhsp->type == DEREF)
3667 {
3668 if (lhsp->type == DEREF)
3669 {
3670 gcc_assert (lhsc.length () == 1);
3671 lhsp->offset = UNKNOWN_OFFSET;
3672 }
3673 if (rhsp->type == DEREF)
3674 {
3675 gcc_assert (rhsc.length () == 1);
3676 rhsp->offset = UNKNOWN_OFFSET;
3677 }
3678 process_all_all_constraints (lhsc, rhsc);
3679 }
3680 else if (lhsp->type == SCALAR
3681 && (rhsp->type == SCALAR
3682 || rhsp->type == ADDRESSOF))
3683 {
3684 HOST_WIDE_INT lhssize, lhsoffset;
3685 HOST_WIDE_INT rhssize, rhsoffset;
3686 bool reverse;
3687 unsigned k = 0;
3688 if (!get_ref_base_and_extent_hwi (lhsop, &lhsoffset, &lhssize, &reverse)
3689 || !get_ref_base_and_extent_hwi (rhsop, &rhsoffset, &rhssize,
3690 &reverse))
3691 {
3692 process_all_all_constraints (lhsc, rhsc);
3693 return;
3694 }
3695 for (j = 0; lhsc.iterate (j, &lhsp);)
3696 {
3697 varinfo_t lhsv, rhsv;
3698 rhsp = &rhsc[k];
3699 lhsv = get_varinfo (lhsp->var);
3700 rhsv = get_varinfo (rhsp->var);
3701 if (lhsv->may_have_pointers
3702 && (lhsv->is_full_var
3703 || rhsv->is_full_var
3704 || ranges_overlap_p (lhsv->offset + rhsoffset, lhsv->size,
3705 rhsv->offset + lhsoffset, rhsv->size)))
3706 process_constraint (new_constraint (*lhsp, *rhsp));
3707 if (!rhsv->is_full_var
3708 && (lhsv->is_full_var
3709 || (lhsv->offset + rhsoffset + lhsv->size
3710 > rhsv->offset + lhsoffset + rhsv->size)))
3711 {
3712 ++k;
3713 if (k >= rhsc.length ())
3714 break;
3715 }
3716 else
3717 ++j;
3718 }
3719 }
3720 else
3721 gcc_unreachable ();
3722 }
3723
3724 /* Create constraints ID = { rhsc }. */
3725
3726 static void
3727 make_constraints_to (unsigned id, vec<ce_s> rhsc)
3728 {
3729 struct constraint_expr *c;
3730 struct constraint_expr includes;
3731 unsigned int j;
3732
3733 includes.var = id;
3734 includes.offset = 0;
3735 includes.type = SCALAR;
3736
3737 FOR_EACH_VEC_ELT (rhsc, j, c)
3738 process_constraint (new_constraint (includes, *c));
3739 }
3740
3741 /* Create a constraint ID = OP. */
3742
3743 static void
3744 make_constraint_to (unsigned id, tree op)
3745 {
3746 auto_vec<ce_s> rhsc;
3747 get_constraint_for_rhs (op, &rhsc);
3748 make_constraints_to (id, rhsc);
3749 }
3750
3751 /* Create a constraint ID = &FROM. */
3752
3753 static void
3754 make_constraint_from (varinfo_t vi, int from)
3755 {
3756 struct constraint_expr lhs, rhs;
3757
3758 lhs.var = vi->id;
3759 lhs.offset = 0;
3760 lhs.type = SCALAR;
3761
3762 rhs.var = from;
3763 rhs.offset = 0;
3764 rhs.type = ADDRESSOF;
3765 process_constraint (new_constraint (lhs, rhs));
3766 }
3767
3768 /* Create a constraint ID = FROM. */
3769
3770 static void
3771 make_copy_constraint (varinfo_t vi, int from)
3772 {
3773 struct constraint_expr lhs, rhs;
3774
3775 lhs.var = vi->id;
3776 lhs.offset = 0;
3777 lhs.type = SCALAR;
3778
3779 rhs.var = from;
3780 rhs.offset = 0;
3781 rhs.type = SCALAR;
3782 process_constraint (new_constraint (lhs, rhs));
3783 }
3784
3785 /* Make constraints necessary to make OP escape. */
3786
3787 static void
3788 make_escape_constraint (tree op)
3789 {
3790 make_constraint_to (escaped_id, op);
3791 }
3792
3793 /* Add constraints to that the solution of VI is transitively closed. */
3794
3795 static void
3796 make_transitive_closure_constraints (varinfo_t vi)
3797 {
3798 struct constraint_expr lhs, rhs;
3799
3800 /* VAR = *(VAR + UNKNOWN); */
3801 lhs.type = SCALAR;
3802 lhs.var = vi->id;
3803 lhs.offset = 0;
3804 rhs.type = DEREF;
3805 rhs.var = vi->id;
3806 rhs.offset = UNKNOWN_OFFSET;
3807 process_constraint (new_constraint (lhs, rhs));
3808 }
3809
3810 /* Add constraints to that the solution of VI has all subvariables added. */
3811
3812 static void
3813 make_any_offset_constraints (varinfo_t vi)
3814 {
3815 struct constraint_expr lhs, rhs;
3816
3817 /* VAR = VAR + UNKNOWN; */
3818 lhs.type = SCALAR;
3819 lhs.var = vi->id;
3820 lhs.offset = 0;
3821 rhs.type = SCALAR;
3822 rhs.var = vi->id;
3823 rhs.offset = UNKNOWN_OFFSET;
3824 process_constraint (new_constraint (lhs, rhs));
3825 }
3826
3827 /* Temporary storage for fake var decls. */
3828 struct obstack fake_var_decl_obstack;
3829
3830 /* Build a fake VAR_DECL acting as referrer to a DECL_UID. */
3831
3832 static tree
3833 build_fake_var_decl (tree type)
3834 {
3835 tree decl = (tree) XOBNEW (&fake_var_decl_obstack, struct tree_var_decl);
3836 memset (decl, 0, sizeof (struct tree_var_decl));
3837 TREE_SET_CODE (decl, VAR_DECL);
3838 TREE_TYPE (decl) = type;
3839 DECL_UID (decl) = allocate_decl_uid ();
3840 SET_DECL_PT_UID (decl, -1);
3841 layout_decl (decl, 0);
3842 return decl;
3843 }
3844
3845 /* Create a new artificial heap variable with NAME.
3846 Return the created variable. */
3847
3848 static varinfo_t
3849 make_heapvar (const char *name, bool add_id)
3850 {
3851 varinfo_t vi;
3852 tree heapvar;
3853
3854 heapvar = build_fake_var_decl (ptr_type_node);
3855 DECL_EXTERNAL (heapvar) = 1;
3856
3857 vi = new_var_info (heapvar, name, add_id);
3858 vi->is_heap_var = true;
3859 vi->is_unknown_size_var = true;
3860 vi->offset = 0;
3861 vi->fullsize = ~0;
3862 vi->size = ~0;
3863 vi->is_full_var = true;
3864 insert_vi_for_tree (heapvar, vi);
3865
3866 return vi;
3867 }
3868
3869 /* Create a new artificial heap variable with NAME and make a
3870 constraint from it to LHS. Set flags according to a tag used
3871 for tracking restrict pointers. */
3872
3873 static varinfo_t
3874 make_constraint_from_restrict (varinfo_t lhs, const char *name, bool add_id)
3875 {
3876 varinfo_t vi = make_heapvar (name, add_id);
3877 vi->is_restrict_var = 1;
3878 vi->is_global_var = 1;
3879 vi->may_have_pointers = 1;
3880 make_constraint_from (lhs, vi->id);
3881 return vi;
3882 }
3883
3884 /* Create a new artificial heap variable with NAME and make a
3885 constraint from it to LHS. Set flags according to a tag used
3886 for tracking restrict pointers and make the artificial heap
3887 point to global memory. */
3888
3889 static varinfo_t
3890 make_constraint_from_global_restrict (varinfo_t lhs, const char *name,
3891 bool add_id)
3892 {
3893 varinfo_t vi = make_constraint_from_restrict (lhs, name, add_id);
3894 make_copy_constraint (vi, nonlocal_id);
3895 return vi;
3896 }
3897
3898 /* In IPA mode there are varinfos for different aspects of reach
3899 function designator. One for the points-to set of the return
3900 value, one for the variables that are clobbered by the function,
3901 one for its uses and one for each parameter (including a single
3902 glob for remaining variadic arguments). */
3903
3904 enum { fi_clobbers = 1, fi_uses = 2,
3905 fi_static_chain = 3, fi_result = 4, fi_parm_base = 5 };
3906
3907 /* Get a constraint for the requested part of a function designator FI
3908 when operating in IPA mode. */
3909
3910 static struct constraint_expr
3911 get_function_part_constraint (varinfo_t fi, unsigned part)
3912 {
3913 struct constraint_expr c;
3914
3915 gcc_assert (in_ipa_mode);
3916
3917 if (fi->id == anything_id)
3918 {
3919 /* ??? We probably should have a ANYFN special variable. */
3920 c.var = anything_id;
3921 c.offset = 0;
3922 c.type = SCALAR;
3923 }
3924 else if (fi->decl && TREE_CODE (fi->decl) == FUNCTION_DECL)
3925 {
3926 varinfo_t ai = first_vi_for_offset (fi, part);
3927 if (ai)
3928 c.var = ai->id;
3929 else
3930 c.var = anything_id;
3931 c.offset = 0;
3932 c.type = SCALAR;
3933 }
3934 else
3935 {
3936 c.var = fi->id;
3937 c.offset = part;
3938 c.type = DEREF;
3939 }
3940
3941 return c;
3942 }
3943
3944 /* For non-IPA mode, generate constraints necessary for a call on the
3945 RHS. */
3946
3947 static void
3948 handle_rhs_call (gcall *stmt, vec<ce_s> *results)
3949 {
3950 struct constraint_expr rhsc;
3951 unsigned i;
3952 bool returns_uses = false;
3953
3954 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3955 {
3956 tree arg = gimple_call_arg (stmt, i);
3957 int flags = gimple_call_arg_flags (stmt, i);
3958
3959 /* If the argument is not used we can ignore it. */
3960 if (flags & EAF_UNUSED)
3961 continue;
3962
3963 /* As we compute ESCAPED context-insensitive we do not gain
3964 any precision with just EAF_NOCLOBBER but not EAF_NOESCAPE
3965 set. The argument would still get clobbered through the
3966 escape solution. */
3967 if ((flags & EAF_NOCLOBBER)
3968 && (flags & EAF_NOESCAPE))
3969 {
3970 varinfo_t uses = get_call_use_vi (stmt);
3971 varinfo_t tem = new_var_info (NULL_TREE, "callarg", true);
3972 tem->is_reg_var = true;
3973 make_constraint_to (tem->id, arg);
3974 make_any_offset_constraints (tem);
3975 if (!(flags & EAF_DIRECT))
3976 make_transitive_closure_constraints (tem);
3977 make_copy_constraint (uses, tem->id);
3978 returns_uses = true;
3979 }
3980 else if (flags & EAF_NOESCAPE)
3981 {
3982 struct constraint_expr lhs, rhs;
3983 varinfo_t uses = get_call_use_vi (stmt);
3984 varinfo_t clobbers = get_call_clobber_vi (stmt);
3985 varinfo_t tem = new_var_info (NULL_TREE, "callarg", true);
3986 tem->is_reg_var = true;
3987 make_constraint_to (tem->id, arg);
3988 make_any_offset_constraints (tem);
3989 if (!(flags & EAF_DIRECT))
3990 make_transitive_closure_constraints (tem);
3991 make_copy_constraint (uses, tem->id);
3992 make_copy_constraint (clobbers, tem->id);
3993 /* Add *tem = nonlocal, do not add *tem = callused as
3994 EAF_NOESCAPE parameters do not escape to other parameters
3995 and all other uses appear in NONLOCAL as well. */
3996 lhs.type = DEREF;
3997 lhs.var = tem->id;
3998 lhs.offset = 0;
3999 rhs.type = SCALAR;
4000 rhs.var = nonlocal_id;
4001 rhs.offset = 0;
4002 process_constraint (new_constraint (lhs, rhs));
4003 returns_uses = true;
4004 }
4005 else
4006 make_escape_constraint (arg);
4007 }
4008
4009 /* If we added to the calls uses solution make sure we account for
4010 pointers to it to be returned. */
4011 if (returns_uses)
4012 {
4013 rhsc.var = get_call_use_vi (stmt)->id;
4014 rhsc.offset = UNKNOWN_OFFSET;
4015 rhsc.type = SCALAR;
4016 results->safe_push (rhsc);
4017 }
4018
4019 /* The static chain escapes as well. */
4020 if (gimple_call_chain (stmt))
4021 make_escape_constraint (gimple_call_chain (stmt));
4022
4023 /* And if we applied NRV the address of the return slot escapes as well. */
4024 if (gimple_call_return_slot_opt_p (stmt)
4025 && gimple_call_lhs (stmt) != NULL_TREE
4026 && TREE_ADDRESSABLE (TREE_TYPE (gimple_call_lhs (stmt))))
4027 {
4028 auto_vec<ce_s> tmpc;
4029 struct constraint_expr lhsc, *c;
4030 get_constraint_for_address_of (gimple_call_lhs (stmt), &tmpc);
4031 lhsc.var = escaped_id;
4032 lhsc.offset = 0;
4033 lhsc.type = SCALAR;
4034 FOR_EACH_VEC_ELT (tmpc, i, c)
4035 process_constraint (new_constraint (lhsc, *c));
4036 }
4037
4038 /* Regular functions return nonlocal memory. */
4039 rhsc.var = nonlocal_id;
4040 rhsc.offset = 0;
4041 rhsc.type = SCALAR;
4042 results->safe_push (rhsc);
4043 }
4044
4045 /* For non-IPA mode, generate constraints necessary for a call
4046 that returns a pointer and assigns it to LHS. This simply makes
4047 the LHS point to global and escaped variables. */
4048
4049 static void
4050 handle_lhs_call (gcall *stmt, tree lhs, int flags, vec<ce_s> rhsc,
4051 tree fndecl)
4052 {
4053 auto_vec<ce_s> lhsc;
4054
4055 get_constraint_for (lhs, &lhsc);
4056 /* If the store is to a global decl make sure to
4057 add proper escape constraints. */
4058 lhs = get_base_address (lhs);
4059 if (lhs
4060 && DECL_P (lhs)
4061 && is_global_var (lhs))
4062 {
4063 struct constraint_expr tmpc;
4064 tmpc.var = escaped_id;
4065 tmpc.offset = 0;
4066 tmpc.type = SCALAR;
4067 lhsc.safe_push (tmpc);
4068 }
4069
4070 /* If the call returns an argument unmodified override the rhs
4071 constraints. */
4072 if (flags & ERF_RETURNS_ARG
4073 && (flags & ERF_RETURN_ARG_MASK) < gimple_call_num_args (stmt))
4074 {
4075 tree arg;
4076 rhsc.create (0);
4077 arg = gimple_call_arg (stmt, flags & ERF_RETURN_ARG_MASK);
4078 get_constraint_for (arg, &rhsc);
4079 process_all_all_constraints (lhsc, rhsc);
4080 rhsc.release ();
4081 }
4082 else if (flags & ERF_NOALIAS)
4083 {
4084 varinfo_t vi;
4085 struct constraint_expr tmpc;
4086 rhsc.create (0);
4087 vi = make_heapvar ("HEAP", true);
4088 /* We are marking allocated storage local, we deal with it becoming
4089 global by escaping and setting of vars_contains_escaped_heap. */
4090 DECL_EXTERNAL (vi->decl) = 0;
4091 vi->is_global_var = 0;
4092 /* If this is not a real malloc call assume the memory was
4093 initialized and thus may point to global memory. All
4094 builtin functions with the malloc attribute behave in a sane way. */
4095 if (!fndecl
4096 || !fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
4097 make_constraint_from (vi, nonlocal_id);
4098 tmpc.var = vi->id;
4099 tmpc.offset = 0;
4100 tmpc.type = ADDRESSOF;
4101 rhsc.safe_push (tmpc);
4102 process_all_all_constraints (lhsc, rhsc);
4103 rhsc.release ();
4104 }
4105 else
4106 process_all_all_constraints (lhsc, rhsc);
4107 }
4108
4109 /* For non-IPA mode, generate constraints necessary for a call of a
4110 const function that returns a pointer in the statement STMT. */
4111
4112 static void
4113 handle_const_call (gcall *stmt, vec<ce_s> *results)
4114 {
4115 struct constraint_expr rhsc;
4116 unsigned int k;
4117 bool need_uses = false;
4118
4119 /* Treat nested const functions the same as pure functions as far
4120 as the static chain is concerned. */
4121 if (gimple_call_chain (stmt))
4122 {
4123 varinfo_t uses = get_call_use_vi (stmt);
4124 make_constraint_to (uses->id, gimple_call_chain (stmt));
4125 need_uses = true;
4126 }
4127
4128 /* And if we applied NRV the address of the return slot escapes as well. */
4129 if (gimple_call_return_slot_opt_p (stmt)
4130 && gimple_call_lhs (stmt) != NULL_TREE
4131 && TREE_ADDRESSABLE (TREE_TYPE (gimple_call_lhs (stmt))))
4132 {
4133 varinfo_t uses = get_call_use_vi (stmt);
4134 auto_vec<ce_s> tmpc;
4135 get_constraint_for_address_of (gimple_call_lhs (stmt), &tmpc);
4136 make_constraints_to (uses->id, tmpc);
4137 need_uses = true;
4138 }
4139
4140 if (need_uses)
4141 {
4142 varinfo_t uses = get_call_use_vi (stmt);
4143 make_any_offset_constraints (uses);
4144 make_transitive_closure_constraints (uses);
4145 rhsc.var = uses->id;
4146 rhsc.offset = 0;
4147 rhsc.type = SCALAR;
4148 results->safe_push (rhsc);
4149 }
4150
4151 /* May return offsetted arguments. */
4152 varinfo_t tem = NULL;
4153 if (gimple_call_num_args (stmt) != 0)
4154 {
4155 tem = new_var_info (NULL_TREE, "callarg", true);
4156 tem->is_reg_var = true;
4157 }
4158 for (k = 0; k < gimple_call_num_args (stmt); ++k)
4159 {
4160 tree arg = gimple_call_arg (stmt, k);
4161 auto_vec<ce_s> argc;
4162 get_constraint_for_rhs (arg, &argc);
4163 make_constraints_to (tem->id, argc);
4164 }
4165 if (tem)
4166 {
4167 ce_s ce;
4168 ce.type = SCALAR;
4169 ce.var = tem->id;
4170 ce.offset = UNKNOWN_OFFSET;
4171 results->safe_push (ce);
4172 }
4173
4174 /* May return addresses of globals. */
4175 rhsc.var = nonlocal_id;
4176 rhsc.offset = 0;
4177 rhsc.type = ADDRESSOF;
4178 results->safe_push (rhsc);
4179 }
4180
4181 /* For non-IPA mode, generate constraints necessary for a call to a
4182 pure function in statement STMT. */
4183
4184 static void
4185 handle_pure_call (gcall *stmt, vec<ce_s> *results)
4186 {
4187 struct constraint_expr rhsc;
4188 unsigned i;
4189 varinfo_t uses = NULL;
4190
4191 /* Memory reached from pointer arguments is call-used. */
4192 for (i = 0; i < gimple_call_num_args (stmt); ++i)
4193 {
4194 tree arg = gimple_call_arg (stmt, i);
4195 if (!uses)
4196 {
4197 uses = get_call_use_vi (stmt);
4198 make_any_offset_constraints (uses);
4199 make_transitive_closure_constraints (uses);
4200 }
4201 make_constraint_to (uses->id, arg);
4202 }
4203
4204 /* The static chain is used as well. */
4205 if (gimple_call_chain (stmt))
4206 {
4207 if (!uses)
4208 {
4209 uses = get_call_use_vi (stmt);
4210 make_any_offset_constraints (uses);
4211 make_transitive_closure_constraints (uses);
4212 }
4213 make_constraint_to (uses->id, gimple_call_chain (stmt));
4214 }
4215
4216 /* And if we applied NRV the address of the return slot. */
4217 if (gimple_call_return_slot_opt_p (stmt)
4218 && gimple_call_lhs (stmt) != NULL_TREE
4219 && TREE_ADDRESSABLE (TREE_TYPE (gimple_call_lhs (stmt))))
4220 {
4221 if (!uses)
4222 {
4223 uses = get_call_use_vi (stmt);
4224 make_any_offset_constraints (uses);
4225 make_transitive_closure_constraints (uses);
4226 }
4227 auto_vec<ce_s> tmpc;
4228 get_constraint_for_address_of (gimple_call_lhs (stmt), &tmpc);
4229 make_constraints_to (uses->id, tmpc);
4230 }
4231
4232 /* Pure functions may return call-used and nonlocal memory. */
4233 if (uses)
4234 {
4235 rhsc.var = uses->id;
4236 rhsc.offset = 0;
4237 rhsc.type = SCALAR;
4238 results->safe_push (rhsc);
4239 }
4240 rhsc.var = nonlocal_id;
4241 rhsc.offset = 0;
4242 rhsc.type = SCALAR;
4243 results->safe_push (rhsc);
4244 }
4245
4246
4247 /* Return the varinfo for the callee of CALL. */
4248
4249 static varinfo_t
4250 get_fi_for_callee (gcall *call)
4251 {
4252 tree decl, fn = gimple_call_fn (call);
4253
4254 if (fn && TREE_CODE (fn) == OBJ_TYPE_REF)
4255 fn = OBJ_TYPE_REF_EXPR (fn);
4256
4257 /* If we can directly resolve the function being called, do so.
4258 Otherwise, it must be some sort of indirect expression that
4259 we should still be able to handle. */
4260 decl = gimple_call_addr_fndecl (fn);
4261 if (decl)
4262 return get_vi_for_tree (decl);
4263
4264 /* If the function is anything other than a SSA name pointer we have no
4265 clue and should be getting ANYFN (well, ANYTHING for now). */
4266 if (!fn || TREE_CODE (fn) != SSA_NAME)
4267 return get_varinfo (anything_id);
4268
4269 if (SSA_NAME_IS_DEFAULT_DEF (fn)
4270 && (TREE_CODE (SSA_NAME_VAR (fn)) == PARM_DECL
4271 || TREE_CODE (SSA_NAME_VAR (fn)) == RESULT_DECL))
4272 fn = SSA_NAME_VAR (fn);
4273
4274 return get_vi_for_tree (fn);
4275 }
4276
4277 /* Create constraints for assigning call argument ARG to the incoming parameter
4278 INDEX of function FI. */
4279
4280 static void
4281 find_func_aliases_for_call_arg (varinfo_t fi, unsigned index, tree arg)
4282 {
4283 struct constraint_expr lhs;
4284 lhs = get_function_part_constraint (fi, fi_parm_base + index);
4285
4286 auto_vec<ce_s, 2> rhsc;
4287 get_constraint_for_rhs (arg, &rhsc);
4288
4289 unsigned j;
4290 struct constraint_expr *rhsp;
4291 FOR_EACH_VEC_ELT (rhsc, j, rhsp)
4292 process_constraint (new_constraint (lhs, *rhsp));
4293 }
4294
4295 /* Return true if FNDECL may be part of another lto partition. */
4296
4297 static bool
4298 fndecl_maybe_in_other_partition (tree fndecl)
4299 {
4300 cgraph_node *fn_node = cgraph_node::get (fndecl);
4301 if (fn_node == NULL)
4302 return true;
4303
4304 return fn_node->in_other_partition;
4305 }
4306
4307 /* Create constraints for the builtin call T. Return true if the call
4308 was handled, otherwise false. */
4309
4310 static bool
4311 find_func_aliases_for_builtin_call (struct function *fn, gcall *t)
4312 {
4313 tree fndecl = gimple_call_fndecl (t);
4314 auto_vec<ce_s, 2> lhsc;
4315 auto_vec<ce_s, 4> rhsc;
4316 varinfo_t fi;
4317
4318 if (gimple_call_builtin_p (t, BUILT_IN_NORMAL))
4319 /* ??? All builtins that are handled here need to be handled
4320 in the alias-oracle query functions explicitly! */
4321 switch (DECL_FUNCTION_CODE (fndecl))
4322 {
4323 /* All the following functions return a pointer to the same object
4324 as their first argument points to. The functions do not add
4325 to the ESCAPED solution. The functions make the first argument
4326 pointed to memory point to what the second argument pointed to
4327 memory points to. */
4328 case BUILT_IN_STRCPY:
4329 case BUILT_IN_STRNCPY:
4330 case BUILT_IN_BCOPY:
4331 case BUILT_IN_MEMCPY:
4332 case BUILT_IN_MEMMOVE:
4333 case BUILT_IN_MEMPCPY:
4334 case BUILT_IN_STPCPY:
4335 case BUILT_IN_STPNCPY:
4336 case BUILT_IN_STRCAT:
4337 case BUILT_IN_STRNCAT:
4338 case BUILT_IN_STRCPY_CHK:
4339 case BUILT_IN_STRNCPY_CHK:
4340 case BUILT_IN_MEMCPY_CHK:
4341 case BUILT_IN_MEMMOVE_CHK:
4342 case BUILT_IN_MEMPCPY_CHK:
4343 case BUILT_IN_STPCPY_CHK:
4344 case BUILT_IN_STPNCPY_CHK:
4345 case BUILT_IN_STRCAT_CHK:
4346 case BUILT_IN_STRNCAT_CHK:
4347 case BUILT_IN_TM_MEMCPY:
4348 case BUILT_IN_TM_MEMMOVE:
4349 {
4350 tree res = gimple_call_lhs (t);
4351 tree dest = gimple_call_arg (t, (DECL_FUNCTION_CODE (fndecl)
4352 == BUILT_IN_BCOPY ? 1 : 0));
4353 tree src = gimple_call_arg (t, (DECL_FUNCTION_CODE (fndecl)
4354 == BUILT_IN_BCOPY ? 0 : 1));
4355 if (res != NULL_TREE)
4356 {
4357 get_constraint_for (res, &lhsc);
4358 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_MEMPCPY
4359 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STPCPY
4360 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STPNCPY
4361 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_MEMPCPY_CHK
4362 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STPCPY_CHK
4363 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STPNCPY_CHK)
4364 get_constraint_for_ptr_offset (dest, NULL_TREE, &rhsc);
4365 else
4366 get_constraint_for (dest, &rhsc);
4367 process_all_all_constraints (lhsc, rhsc);
4368 lhsc.truncate (0);
4369 rhsc.truncate (0);
4370 }
4371 get_constraint_for_ptr_offset (dest, NULL_TREE, &lhsc);
4372 get_constraint_for_ptr_offset (src, NULL_TREE, &rhsc);
4373 do_deref (&lhsc);
4374 do_deref (&rhsc);
4375 process_all_all_constraints (lhsc, rhsc);
4376 return true;
4377 }
4378 case BUILT_IN_MEMSET:
4379 case BUILT_IN_MEMSET_CHK:
4380 case BUILT_IN_TM_MEMSET:
4381 {
4382 tree res = gimple_call_lhs (t);
4383 tree dest = gimple_call_arg (t, 0);
4384 unsigned i;
4385 ce_s *lhsp;
4386 struct constraint_expr ac;
4387 if (res != NULL_TREE)
4388 {
4389 get_constraint_for (res, &lhsc);
4390 get_constraint_for (dest, &rhsc);
4391 process_all_all_constraints (lhsc, rhsc);
4392 lhsc.truncate (0);
4393 }
4394 get_constraint_for_ptr_offset (dest, NULL_TREE, &lhsc);
4395 do_deref (&lhsc);
4396 if (flag_delete_null_pointer_checks
4397 && integer_zerop (gimple_call_arg (t, 1)))
4398 {
4399 ac.type = ADDRESSOF;
4400 ac.var = nothing_id;
4401 }
4402 else
4403 {
4404 ac.type = SCALAR;
4405 ac.var = integer_id;
4406 }
4407 ac.offset = 0;
4408 FOR_EACH_VEC_ELT (lhsc, i, lhsp)
4409 process_constraint (new_constraint (*lhsp, ac));
4410 return true;
4411 }
4412 case BUILT_IN_STACK_SAVE:
4413 case BUILT_IN_STACK_RESTORE:
4414 /* Nothing interesting happens. */
4415 return true;
4416 case BUILT_IN_ALLOCA:
4417 case BUILT_IN_ALLOCA_WITH_ALIGN:
4418 case BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX:
4419 {
4420 tree ptr = gimple_call_lhs (t);
4421 if (ptr == NULL_TREE)
4422 return true;
4423 get_constraint_for (ptr, &lhsc);
4424 varinfo_t vi = make_heapvar ("HEAP", true);
4425 /* Alloca storage is never global. To exempt it from escaped
4426 handling make it a non-heap var. */
4427 DECL_EXTERNAL (vi->decl) = 0;
4428 vi->is_global_var = 0;
4429 vi->is_heap_var = 0;
4430 struct constraint_expr tmpc;
4431 tmpc.var = vi->id;
4432 tmpc.offset = 0;
4433 tmpc.type = ADDRESSOF;
4434 rhsc.safe_push (tmpc);
4435 process_all_all_constraints (lhsc, rhsc);
4436 return true;
4437 }
4438 case BUILT_IN_POSIX_MEMALIGN:
4439 {
4440 tree ptrptr = gimple_call_arg (t, 0);
4441 get_constraint_for (ptrptr, &lhsc);
4442 do_deref (&lhsc);
4443 varinfo_t vi = make_heapvar ("HEAP", true);
4444 /* We are marking allocated storage local, we deal with it becoming
4445 global by escaping and setting of vars_contains_escaped_heap. */
4446 DECL_EXTERNAL (vi->decl) = 0;
4447 vi->is_global_var = 0;
4448 struct constraint_expr tmpc;
4449 tmpc.var = vi->id;
4450 tmpc.offset = 0;
4451 tmpc.type = ADDRESSOF;
4452 rhsc.safe_push (tmpc);
4453 process_all_all_constraints (lhsc, rhsc);
4454 return true;
4455 }
4456 case BUILT_IN_ASSUME_ALIGNED:
4457 {
4458 tree res = gimple_call_lhs (t);
4459 tree dest = gimple_call_arg (t, 0);
4460 if (res != NULL_TREE)
4461 {
4462 get_constraint_for (res, &lhsc);
4463 get_constraint_for (dest, &rhsc);
4464 process_all_all_constraints (lhsc, rhsc);
4465 }
4466 return true;
4467 }
4468 /* All the following functions do not return pointers, do not
4469 modify the points-to sets of memory reachable from their
4470 arguments and do not add to the ESCAPED solution. */
4471 case BUILT_IN_SINCOS:
4472 case BUILT_IN_SINCOSF:
4473 case BUILT_IN_SINCOSL:
4474 case BUILT_IN_FREXP:
4475 case BUILT_IN_FREXPF:
4476 case BUILT_IN_FREXPL:
4477 case BUILT_IN_GAMMA_R:
4478 case BUILT_IN_GAMMAF_R:
4479 case BUILT_IN_GAMMAL_R:
4480 case BUILT_IN_LGAMMA_R:
4481 case BUILT_IN_LGAMMAF_R:
4482 case BUILT_IN_LGAMMAL_R:
4483 case BUILT_IN_MODF:
4484 case BUILT_IN_MODFF:
4485 case BUILT_IN_MODFL:
4486 case BUILT_IN_REMQUO:
4487 case BUILT_IN_REMQUOF:
4488 case BUILT_IN_REMQUOL:
4489 case BUILT_IN_FREE:
4490 return true;
4491 case BUILT_IN_STRDUP:
4492 case BUILT_IN_STRNDUP:
4493 case BUILT_IN_REALLOC:
4494 if (gimple_call_lhs (t))
4495 {
4496 handle_lhs_call (t, gimple_call_lhs (t),
4497 gimple_call_return_flags (t) | ERF_NOALIAS,
4498 vNULL, fndecl);
4499 get_constraint_for_ptr_offset (gimple_call_lhs (t),
4500 NULL_TREE, &lhsc);
4501 get_constraint_for_ptr_offset (gimple_call_arg (t, 0),
4502 NULL_TREE, &rhsc);
4503 do_deref (&lhsc);
4504 do_deref (&rhsc);
4505 process_all_all_constraints (lhsc, rhsc);
4506 lhsc.truncate (0);
4507 rhsc.truncate (0);
4508 /* For realloc the resulting pointer can be equal to the
4509 argument as well. But only doing this wouldn't be
4510 correct because with ptr == 0 realloc behaves like malloc. */
4511 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_REALLOC)
4512 {
4513 get_constraint_for (gimple_call_lhs (t), &lhsc);
4514 get_constraint_for (gimple_call_arg (t, 0), &rhsc);
4515 process_all_all_constraints (lhsc, rhsc);
4516 }
4517 return true;
4518 }
4519 break;
4520 /* String / character search functions return a pointer into the
4521 source string or NULL. */
4522 case BUILT_IN_INDEX:
4523 case BUILT_IN_STRCHR:
4524 case BUILT_IN_STRRCHR:
4525 case BUILT_IN_MEMCHR:
4526 case BUILT_IN_STRSTR:
4527 case BUILT_IN_STRPBRK:
4528 if (gimple_call_lhs (t))
4529 {
4530 tree src = gimple_call_arg (t, 0);
4531 get_constraint_for_ptr_offset (src, NULL_TREE, &rhsc);
4532 constraint_expr nul;
4533 nul.var = nothing_id;
4534 nul.offset = 0;
4535 nul.type = ADDRESSOF;
4536 rhsc.safe_push (nul);
4537 get_constraint_for (gimple_call_lhs (t), &lhsc);
4538 process_all_all_constraints (lhsc, rhsc);
4539 }
4540 return true;
4541 /* Pure functions that return something not based on any object and
4542 that use the memory pointed to by their arguments (but not
4543 transitively). */
4544 case BUILT_IN_STRCMP:
4545 case BUILT_IN_STRCMP_EQ:
4546 case BUILT_IN_STRNCMP:
4547 case BUILT_IN_STRNCMP_EQ:
4548 case BUILT_IN_STRCASECMP:
4549 case BUILT_IN_STRNCASECMP:
4550 case BUILT_IN_MEMCMP:
4551 case BUILT_IN_BCMP:
4552 case BUILT_IN_STRSPN:
4553 case BUILT_IN_STRCSPN:
4554 {
4555 varinfo_t uses = get_call_use_vi (t);
4556 make_any_offset_constraints (uses);
4557 make_constraint_to (uses->id, gimple_call_arg (t, 0));
4558 make_constraint_to (uses->id, gimple_call_arg (t, 1));
4559 /* No constraints are necessary for the return value. */
4560 return true;
4561 }
4562 case BUILT_IN_STRLEN:
4563 {
4564 varinfo_t uses = get_call_use_vi (t);
4565 make_any_offset_constraints (uses);
4566 make_constraint_to (uses->id, gimple_call_arg (t, 0));
4567 /* No constraints are necessary for the return value. */
4568 return true;
4569 }
4570 case BUILT_IN_OBJECT_SIZE:
4571 case BUILT_IN_CONSTANT_P:
4572 {
4573 /* No constraints are necessary for the return value or the
4574 arguments. */
4575 return true;
4576 }
4577 /* Trampolines are special - they set up passing the static
4578 frame. */
4579 case BUILT_IN_INIT_TRAMPOLINE:
4580 {
4581 tree tramp = gimple_call_arg (t, 0);
4582 tree nfunc = gimple_call_arg (t, 1);
4583 tree frame = gimple_call_arg (t, 2);
4584 unsigned i;
4585 struct constraint_expr lhs, *rhsp;
4586 if (in_ipa_mode)
4587 {
4588 varinfo_t nfi = NULL;
4589 gcc_assert (TREE_CODE (nfunc) == ADDR_EXPR);
4590 nfi = lookup_vi_for_tree (TREE_OPERAND (nfunc, 0));
4591 if (nfi)
4592 {
4593 lhs = get_function_part_constraint (nfi, fi_static_chain);
4594 get_constraint_for (frame, &rhsc);
4595 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
4596 process_constraint (new_constraint (lhs, *rhsp));
4597 rhsc.truncate (0);
4598
4599 /* Make the frame point to the function for
4600 the trampoline adjustment call. */
4601 get_constraint_for (tramp, &lhsc);
4602 do_deref (&lhsc);
4603 get_constraint_for (nfunc, &rhsc);
4604 process_all_all_constraints (lhsc, rhsc);
4605
4606 return true;
4607 }
4608 }
4609 /* Else fallthru to generic handling which will let
4610 the frame escape. */
4611 break;
4612 }
4613 case BUILT_IN_ADJUST_TRAMPOLINE:
4614 {
4615 tree tramp = gimple_call_arg (t, 0);
4616 tree res = gimple_call_lhs (t);
4617 if (in_ipa_mode && res)
4618 {
4619 get_constraint_for (res, &lhsc);
4620 get_constraint_for (tramp, &rhsc);
4621 do_deref (&rhsc);
4622 process_all_all_constraints (lhsc, rhsc);
4623 }
4624 return true;
4625 }
4626 CASE_BUILT_IN_TM_STORE (1):
4627 CASE_BUILT_IN_TM_STORE (2):
4628 CASE_BUILT_IN_TM_STORE (4):
4629 CASE_BUILT_IN_TM_STORE (8):
4630 CASE_BUILT_IN_TM_STORE (FLOAT):
4631 CASE_BUILT_IN_TM_STORE (DOUBLE):
4632 CASE_BUILT_IN_TM_STORE (LDOUBLE):
4633 CASE_BUILT_IN_TM_STORE (M64):
4634 CASE_BUILT_IN_TM_STORE (M128):
4635 CASE_BUILT_IN_TM_STORE (M256):
4636 {
4637 tree addr = gimple_call_arg (t, 0);
4638 tree src = gimple_call_arg (t, 1);
4639
4640 get_constraint_for (addr, &lhsc);
4641 do_deref (&lhsc);
4642 get_constraint_for (src, &rhsc);
4643 process_all_all_constraints (lhsc, rhsc);
4644 return true;
4645 }
4646 CASE_BUILT_IN_TM_LOAD (1):
4647 CASE_BUILT_IN_TM_LOAD (2):
4648 CASE_BUILT_IN_TM_LOAD (4):
4649 CASE_BUILT_IN_TM_LOAD (8):
4650 CASE_BUILT_IN_TM_LOAD (FLOAT):
4651 CASE_BUILT_IN_TM_LOAD (DOUBLE):
4652 CASE_BUILT_IN_TM_LOAD (LDOUBLE):
4653 CASE_BUILT_IN_TM_LOAD (M64):
4654 CASE_BUILT_IN_TM_LOAD (M128):
4655 CASE_BUILT_IN_TM_LOAD (M256):
4656 {
4657 tree dest = gimple_call_lhs (t);
4658 tree addr = gimple_call_arg (t, 0);
4659
4660 get_constraint_for (dest, &lhsc);
4661 get_constraint_for (addr, &rhsc);
4662 do_deref (&rhsc);
4663 process_all_all_constraints (lhsc, rhsc);
4664 return true;
4665 }
4666 /* Variadic argument handling needs to be handled in IPA
4667 mode as well. */
4668 case BUILT_IN_VA_START:
4669 {
4670 tree valist = gimple_call_arg (t, 0);
4671 struct constraint_expr rhs, *lhsp;
4672 unsigned i;
4673 get_constraint_for_ptr_offset (valist, NULL_TREE, &lhsc);
4674 do_deref (&lhsc);
4675 /* The va_list gets access to pointers in variadic
4676 arguments. Which we know in the case of IPA analysis
4677 and otherwise are just all nonlocal variables. */
4678 if (in_ipa_mode)
4679 {
4680 fi = lookup_vi_for_tree (fn->decl);
4681 rhs = get_function_part_constraint (fi, ~0);
4682 rhs.type = ADDRESSOF;
4683 }
4684 else
4685 {
4686 rhs.var = nonlocal_id;
4687 rhs.type = ADDRESSOF;
4688 rhs.offset = 0;
4689 }
4690 FOR_EACH_VEC_ELT (lhsc, i, lhsp)
4691 process_constraint (new_constraint (*lhsp, rhs));
4692 /* va_list is clobbered. */
4693 make_constraint_to (get_call_clobber_vi (t)->id, valist);
4694 return true;
4695 }
4696 /* va_end doesn't have any effect that matters. */
4697 case BUILT_IN_VA_END:
4698 return true;
4699 /* Alternate return. Simply give up for now. */
4700 case BUILT_IN_RETURN:
4701 {
4702 fi = NULL;
4703 if (!in_ipa_mode
4704 || !(fi = get_vi_for_tree (fn->decl)))
4705 make_constraint_from (get_varinfo (escaped_id), anything_id);
4706 else if (in_ipa_mode
4707 && fi != NULL)
4708 {
4709 struct constraint_expr lhs, rhs;
4710 lhs = get_function_part_constraint (fi, fi_result);
4711 rhs.var = anything_id;
4712 rhs.offset = 0;
4713 rhs.type = SCALAR;
4714 process_constraint (new_constraint (lhs, rhs));
4715 }
4716 return true;
4717 }
4718 case BUILT_IN_GOMP_PARALLEL:
4719 case BUILT_IN_GOACC_PARALLEL:
4720 {
4721 if (in_ipa_mode)
4722 {
4723 unsigned int fnpos, argpos;
4724 switch (DECL_FUNCTION_CODE (fndecl))
4725 {
4726 case BUILT_IN_GOMP_PARALLEL:
4727 /* __builtin_GOMP_parallel (fn, data, num_threads, flags). */
4728 fnpos = 0;
4729 argpos = 1;
4730 break;
4731 case BUILT_IN_GOACC_PARALLEL:
4732 /* __builtin_GOACC_parallel (flags_m, fn, mapnum, hostaddrs,
4733 sizes, kinds, ...). */
4734 fnpos = 1;
4735 argpos = 3;
4736 break;
4737 default:
4738 gcc_unreachable ();
4739 }
4740
4741 tree fnarg = gimple_call_arg (t, fnpos);
4742 gcc_assert (TREE_CODE (fnarg) == ADDR_EXPR);
4743 tree fndecl = TREE_OPERAND (fnarg, 0);
4744 if (fndecl_maybe_in_other_partition (fndecl))
4745 /* Fallthru to general call handling. */
4746 break;
4747
4748 tree arg = gimple_call_arg (t, argpos);
4749
4750 varinfo_t fi = get_vi_for_tree (fndecl);
4751 find_func_aliases_for_call_arg (fi, 0, arg);
4752 return true;
4753 }
4754 /* Else fallthru to generic call handling. */
4755 break;
4756 }
4757 /* printf-style functions may have hooks to set pointers to
4758 point to somewhere into the generated string. Leave them
4759 for a later exercise... */
4760 default:
4761 /* Fallthru to general call handling. */;
4762 }
4763
4764 return false;
4765 }
4766
4767 /* Create constraints for the call T. */
4768
4769 static void
4770 find_func_aliases_for_call (struct function *fn, gcall *t)
4771 {
4772 tree fndecl = gimple_call_fndecl (t);
4773 varinfo_t fi;
4774
4775 if (fndecl != NULL_TREE
4776 && fndecl_built_in_p (fndecl)
4777 && find_func_aliases_for_builtin_call (fn, t))
4778 return;
4779
4780 fi = get_fi_for_callee (t);
4781 if (!in_ipa_mode
4782 || (fi->decl && fndecl && !fi->is_fn_info))
4783 {
4784 auto_vec<ce_s, 16> rhsc;
4785 int flags = gimple_call_flags (t);
4786
4787 /* Const functions can return their arguments and addresses
4788 of global memory but not of escaped memory. */
4789 if (flags & (ECF_CONST|ECF_NOVOPS))
4790 {
4791 if (gimple_call_lhs (t))
4792 handle_const_call (t, &rhsc);
4793 }
4794 /* Pure functions can return addresses in and of memory
4795 reachable from their arguments, but they are not an escape
4796 point for reachable memory of their arguments. */
4797 else if (flags & (ECF_PURE|ECF_LOOPING_CONST_OR_PURE))
4798 handle_pure_call (t, &rhsc);
4799 else
4800 handle_rhs_call (t, &rhsc);
4801 if (gimple_call_lhs (t))
4802 handle_lhs_call (t, gimple_call_lhs (t),
4803 gimple_call_return_flags (t), rhsc, fndecl);
4804 }
4805 else
4806 {
4807 auto_vec<ce_s, 2> rhsc;
4808 tree lhsop;
4809 unsigned j;
4810
4811 /* Assign all the passed arguments to the appropriate incoming
4812 parameters of the function. */
4813 for (j = 0; j < gimple_call_num_args (t); j++)
4814 {
4815 tree arg = gimple_call_arg (t, j);
4816 find_func_aliases_for_call_arg (fi, j, arg);
4817 }
4818
4819 /* If we are returning a value, assign it to the result. */
4820 lhsop = gimple_call_lhs (t);
4821 if (lhsop)
4822 {
4823 auto_vec<ce_s, 2> lhsc;
4824 struct constraint_expr rhs;
4825 struct constraint_expr *lhsp;
4826 bool aggr_p = aggregate_value_p (lhsop, gimple_call_fntype (t));
4827
4828 get_constraint_for (lhsop, &lhsc);
4829 rhs = get_function_part_constraint (fi, fi_result);
4830 if (aggr_p)
4831 {
4832 auto_vec<ce_s, 2> tem;
4833 tem.quick_push (rhs);
4834 do_deref (&tem);
4835 gcc_checking_assert (tem.length () == 1);
4836 rhs = tem[0];
4837 }
4838 FOR_EACH_VEC_ELT (lhsc, j, lhsp)
4839 process_constraint (new_constraint (*lhsp, rhs));
4840
4841 /* If we pass the result decl by reference, honor that. */
4842 if (aggr_p)
4843 {
4844 struct constraint_expr lhs;
4845 struct constraint_expr *rhsp;
4846
4847 get_constraint_for_address_of (lhsop, &rhsc);
4848 lhs = get_function_part_constraint (fi, fi_result);
4849 FOR_EACH_VEC_ELT (rhsc, j, rhsp)
4850 process_constraint (new_constraint (lhs, *rhsp));
4851 rhsc.truncate (0);
4852 }
4853 }
4854
4855 /* If we use a static chain, pass it along. */
4856 if (gimple_call_chain (t))
4857 {
4858 struct constraint_expr lhs;
4859 struct constraint_expr *rhsp;
4860
4861 get_constraint_for (gimple_call_chain (t), &rhsc);
4862 lhs = get_function_part_constraint (fi, fi_static_chain);
4863 FOR_EACH_VEC_ELT (rhsc, j, rhsp)
4864 process_constraint (new_constraint (lhs, *rhsp));
4865 }
4866 }
4867 }
4868
4869 /* Walk statement T setting up aliasing constraints according to the
4870 references found in T. This function is the main part of the
4871 constraint builder. AI points to auxiliary alias information used
4872 when building alias sets and computing alias grouping heuristics. */
4873
4874 static void
4875 find_func_aliases (struct function *fn, gimple *origt)
4876 {
4877 gimple *t = origt;
4878 auto_vec<ce_s, 16> lhsc;
4879 auto_vec<ce_s, 16> rhsc;
4880 varinfo_t fi;
4881
4882 /* Now build constraints expressions. */
4883 if (gimple_code (t) == GIMPLE_PHI)
4884 {
4885 /* For a phi node, assign all the arguments to
4886 the result. */
4887 get_constraint_for (gimple_phi_result (t), &lhsc);
4888 for (unsigned i = 0; i < gimple_phi_num_args (t); i++)
4889 {
4890 get_constraint_for_rhs (gimple_phi_arg_def (t, i), &rhsc);
4891 process_all_all_constraints (lhsc, rhsc);
4892 rhsc.truncate (0);
4893 }
4894 }
4895 /* In IPA mode, we need to generate constraints to pass call
4896 arguments through their calls. There are two cases,
4897 either a GIMPLE_CALL returning a value, or just a plain
4898 GIMPLE_CALL when we are not.
4899
4900 In non-ipa mode, we need to generate constraints for each
4901 pointer passed by address. */
4902 else if (is_gimple_call (t))
4903 find_func_aliases_for_call (fn, as_a <gcall *> (t));
4904
4905 /* Otherwise, just a regular assignment statement. Only care about
4906 operations with pointer result, others are dealt with as escape
4907 points if they have pointer operands. */
4908 else if (is_gimple_assign (t))
4909 {
4910 /* Otherwise, just a regular assignment statement. */
4911 tree lhsop = gimple_assign_lhs (t);
4912 tree rhsop = (gimple_num_ops (t) == 2) ? gimple_assign_rhs1 (t) : NULL;
4913
4914 if (rhsop && TREE_CLOBBER_P (rhsop))
4915 /* Ignore clobbers, they don't actually store anything into
4916 the LHS. */
4917 ;
4918 else if (rhsop && AGGREGATE_TYPE_P (TREE_TYPE (lhsop)))
4919 do_structure_copy (lhsop, rhsop);
4920 else
4921 {
4922 enum tree_code code = gimple_assign_rhs_code (t);
4923
4924 get_constraint_for (lhsop, &lhsc);
4925
4926 if (code == POINTER_PLUS_EXPR)
4927 get_constraint_for_ptr_offset (gimple_assign_rhs1 (t),
4928 gimple_assign_rhs2 (t), &rhsc);
4929 else if (code == POINTER_DIFF_EXPR)
4930 /* The result is not a pointer (part). */
4931 ;
4932 else if (code == BIT_AND_EXPR
4933 && TREE_CODE (gimple_assign_rhs2 (t)) == INTEGER_CST)
4934 {
4935 /* Aligning a pointer via a BIT_AND_EXPR is offsetting
4936 the pointer. Handle it by offsetting it by UNKNOWN. */
4937 get_constraint_for_ptr_offset (gimple_assign_rhs1 (t),
4938 NULL_TREE, &rhsc);
4939 }
4940 else if (code == TRUNC_DIV_EXPR
4941 || code == CEIL_DIV_EXPR
4942 || code == FLOOR_DIV_EXPR
4943 || code == ROUND_DIV_EXPR
4944 || code == EXACT_DIV_EXPR
4945 || code == TRUNC_MOD_EXPR
4946 || code == CEIL_MOD_EXPR
4947 || code == FLOOR_MOD_EXPR
4948 || code == ROUND_MOD_EXPR)
4949 /* Division and modulo transfer the pointer from the LHS. */
4950 get_constraint_for_rhs (gimple_assign_rhs1 (t), &rhsc);
4951 else if ((CONVERT_EXPR_CODE_P (code)
4952 && !(POINTER_TYPE_P (gimple_expr_type (t))
4953 && !POINTER_TYPE_P (TREE_TYPE (rhsop))))
4954 || gimple_assign_single_p (t))
4955 get_constraint_for_rhs (rhsop, &rhsc);
4956 else if (code == COND_EXPR)
4957 {
4958 /* The result is a merge of both COND_EXPR arms. */
4959 auto_vec<ce_s, 2> tmp;
4960 struct constraint_expr *rhsp;
4961 unsigned i;
4962 get_constraint_for_rhs (gimple_assign_rhs2 (t), &rhsc);
4963 get_constraint_for_rhs (gimple_assign_rhs3 (t), &tmp);
4964 FOR_EACH_VEC_ELT (tmp, i, rhsp)
4965 rhsc.safe_push (*rhsp);
4966 }
4967 else if (truth_value_p (code))
4968 /* Truth value results are not pointer (parts). Or at least
4969 very unreasonable obfuscation of a part. */
4970 ;
4971 else
4972 {
4973 /* All other operations are merges. */
4974 auto_vec<ce_s, 4> tmp;
4975 struct constraint_expr *rhsp;
4976 unsigned i, j;
4977 get_constraint_for_rhs (gimple_assign_rhs1 (t), &rhsc);
4978 for (i = 2; i < gimple_num_ops (t); ++i)
4979 {
4980 get_constraint_for_rhs (gimple_op (t, i), &tmp);
4981 FOR_EACH_VEC_ELT (tmp, j, rhsp)
4982 rhsc.safe_push (*rhsp);
4983 tmp.truncate (0);
4984 }
4985 }
4986 process_all_all_constraints (lhsc, rhsc);
4987 }
4988 /* If there is a store to a global variable the rhs escapes. */
4989 if ((lhsop = get_base_address (lhsop)) != NULL_TREE
4990 && DECL_P (lhsop))
4991 {
4992 varinfo_t vi = get_vi_for_tree (lhsop);
4993 if ((! in_ipa_mode && vi->is_global_var)
4994 || vi->is_ipa_escape_point)
4995 make_escape_constraint (rhsop);
4996 }
4997 }
4998 /* Handle escapes through return. */
4999 else if (gimple_code (t) == GIMPLE_RETURN
5000 && gimple_return_retval (as_a <greturn *> (t)) != NULL_TREE)
5001 {
5002 greturn *return_stmt = as_a <greturn *> (t);
5003 fi = NULL;
5004 if (!in_ipa_mode
5005 && SSA_VAR_P (gimple_return_retval (return_stmt)))
5006 {
5007 /* We handle simple returns by post-processing the solutions. */
5008 ;
5009 }
5010 if (!(fi = get_vi_for_tree (fn->decl)))
5011 make_escape_constraint (gimple_return_retval (return_stmt));
5012 else if (in_ipa_mode)
5013 {
5014 struct constraint_expr lhs ;
5015 struct constraint_expr *rhsp;
5016 unsigned i;
5017
5018 lhs = get_function_part_constraint (fi, fi_result);
5019 get_constraint_for_rhs (gimple_return_retval (return_stmt), &rhsc);
5020 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
5021 process_constraint (new_constraint (lhs, *rhsp));
5022 }
5023 }
5024 /* Handle asms conservatively by adding escape constraints to everything. */
5025 else if (gasm *asm_stmt = dyn_cast <gasm *> (t))
5026 {
5027 unsigned i, noutputs;
5028 const char **oconstraints;
5029 const char *constraint;
5030 bool allows_mem, allows_reg, is_inout;
5031
5032 noutputs = gimple_asm_noutputs (asm_stmt);
5033 oconstraints = XALLOCAVEC (const char *, noutputs);
5034
5035 for (i = 0; i < noutputs; ++i)
5036 {
5037 tree link = gimple_asm_output_op (asm_stmt, i);
5038 tree op = TREE_VALUE (link);
5039
5040 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
5041 oconstraints[i] = constraint;
5042 parse_output_constraint (&constraint, i, 0, 0, &allows_mem,
5043 &allows_reg, &is_inout);
5044
5045 /* A memory constraint makes the address of the operand escape. */
5046 if (!allows_reg && allows_mem)
5047 make_escape_constraint (build_fold_addr_expr (op));
5048
5049 /* The asm may read global memory, so outputs may point to
5050 any global memory. */
5051 if (op)
5052 {
5053 auto_vec<ce_s, 2> lhsc;
5054 struct constraint_expr rhsc, *lhsp;
5055 unsigned j;
5056 get_constraint_for (op, &lhsc);
5057 rhsc.var = nonlocal_id;
5058 rhsc.offset = 0;
5059 rhsc.type = SCALAR;
5060 FOR_EACH_VEC_ELT (lhsc, j, lhsp)
5061 process_constraint (new_constraint (*lhsp, rhsc));
5062 }
5063 }
5064 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
5065 {
5066 tree link = gimple_asm_input_op (asm_stmt, i);
5067 tree op = TREE_VALUE (link);
5068
5069 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
5070
5071 parse_input_constraint (&constraint, 0, 0, noutputs, 0, oconstraints,
5072 &allows_mem, &allows_reg);
5073
5074 /* A memory constraint makes the address of the operand escape. */
5075 if (!allows_reg && allows_mem)
5076 make_escape_constraint (build_fold_addr_expr (op));
5077 /* Strictly we'd only need the constraint to ESCAPED if
5078 the asm clobbers memory, otherwise using something
5079 along the lines of per-call clobbers/uses would be enough. */
5080 else if (op)
5081 make_escape_constraint (op);
5082 }
5083 }
5084 }
5085
5086
5087 /* Create a constraint adding to the clobber set of FI the memory
5088 pointed to by PTR. */
5089
5090 static void
5091 process_ipa_clobber (varinfo_t fi, tree ptr)
5092 {
5093 vec<ce_s> ptrc = vNULL;
5094 struct constraint_expr *c, lhs;
5095 unsigned i;
5096 get_constraint_for_rhs (ptr, &ptrc);
5097 lhs = get_function_part_constraint (fi, fi_clobbers);
5098 FOR_EACH_VEC_ELT (ptrc, i, c)
5099 process_constraint (new_constraint (lhs, *c));
5100 ptrc.release ();
5101 }
5102
5103 /* Walk statement T setting up clobber and use constraints according to the
5104 references found in T. This function is a main part of the
5105 IPA constraint builder. */
5106
5107 static void
5108 find_func_clobbers (struct function *fn, gimple *origt)
5109 {
5110 gimple *t = origt;
5111 auto_vec<ce_s, 16> lhsc;
5112 auto_vec<ce_s, 16> rhsc;
5113 varinfo_t fi;
5114
5115 /* Add constraints for clobbered/used in IPA mode.
5116 We are not interested in what automatic variables are clobbered
5117 or used as we only use the information in the caller to which
5118 they do not escape. */
5119 gcc_assert (in_ipa_mode);
5120
5121 /* If the stmt refers to memory in any way it better had a VUSE. */
5122 if (gimple_vuse (t) == NULL_TREE)
5123 return;
5124
5125 /* We'd better have function information for the current function. */
5126 fi = lookup_vi_for_tree (fn->decl);
5127 gcc_assert (fi != NULL);
5128
5129 /* Account for stores in assignments and calls. */
5130 if (gimple_vdef (t) != NULL_TREE
5131 && gimple_has_lhs (t))
5132 {
5133 tree lhs = gimple_get_lhs (t);
5134 tree tem = lhs;
5135 while (handled_component_p (tem))
5136 tem = TREE_OPERAND (tem, 0);
5137 if ((DECL_P (tem)
5138 && !auto_var_in_fn_p (tem, fn->decl))
5139 || INDIRECT_REF_P (tem)
5140 || (TREE_CODE (tem) == MEM_REF
5141 && !(TREE_CODE (TREE_OPERAND (tem, 0)) == ADDR_EXPR
5142 && auto_var_in_fn_p
5143 (TREE_OPERAND (TREE_OPERAND (tem, 0), 0), fn->decl))))
5144 {
5145 struct constraint_expr lhsc, *rhsp;
5146 unsigned i;
5147 lhsc = get_function_part_constraint (fi, fi_clobbers);
5148 get_constraint_for_address_of (lhs, &rhsc);
5149 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
5150 process_constraint (new_constraint (lhsc, *rhsp));
5151 rhsc.truncate (0);
5152 }
5153 }
5154
5155 /* Account for uses in assigments and returns. */
5156 if (gimple_assign_single_p (t)
5157 || (gimple_code (t) == GIMPLE_RETURN
5158 && gimple_return_retval (as_a <greturn *> (t)) != NULL_TREE))
5159 {
5160 tree rhs = (gimple_assign_single_p (t)
5161 ? gimple_assign_rhs1 (t)
5162 : gimple_return_retval (as_a <greturn *> (t)));
5163 tree tem = rhs;
5164 while (handled_component_p (tem))
5165 tem = TREE_OPERAND (tem, 0);
5166 if ((DECL_P (tem)
5167 && !auto_var_in_fn_p (tem, fn->decl))
5168 || INDIRECT_REF_P (tem)
5169 || (TREE_CODE (tem) == MEM_REF
5170 && !(TREE_CODE (TREE_OPERAND (tem, 0)) == ADDR_EXPR
5171 && auto_var_in_fn_p
5172 (TREE_OPERAND (TREE_OPERAND (tem, 0), 0), fn->decl))))
5173 {
5174 struct constraint_expr lhs, *rhsp;
5175 unsigned i;
5176 lhs = get_function_part_constraint (fi, fi_uses);
5177 get_constraint_for_address_of (rhs, &rhsc);
5178 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
5179 process_constraint (new_constraint (lhs, *rhsp));
5180 rhsc.truncate (0);
5181 }
5182 }
5183
5184 if (gcall *call_stmt = dyn_cast <gcall *> (t))
5185 {
5186 varinfo_t cfi = NULL;
5187 tree decl = gimple_call_fndecl (t);
5188 struct constraint_expr lhs, rhs;
5189 unsigned i, j;
5190
5191 /* For builtins we do not have separate function info. For those
5192 we do not generate escapes for we have to generate clobbers/uses. */
5193 if (gimple_call_builtin_p (t, BUILT_IN_NORMAL))
5194 switch (DECL_FUNCTION_CODE (decl))
5195 {
5196 /* The following functions use and clobber memory pointed to
5197 by their arguments. */
5198 case BUILT_IN_STRCPY:
5199 case BUILT_IN_STRNCPY:
5200 case BUILT_IN_BCOPY:
5201 case BUILT_IN_MEMCPY:
5202 case BUILT_IN_MEMMOVE:
5203 case BUILT_IN_MEMPCPY:
5204 case BUILT_IN_STPCPY:
5205 case BUILT_IN_STPNCPY:
5206 case BUILT_IN_STRCAT:
5207 case BUILT_IN_STRNCAT:
5208 case BUILT_IN_STRCPY_CHK:
5209 case BUILT_IN_STRNCPY_CHK:
5210 case BUILT_IN_MEMCPY_CHK:
5211 case BUILT_IN_MEMMOVE_CHK:
5212 case BUILT_IN_MEMPCPY_CHK:
5213 case BUILT_IN_STPCPY_CHK:
5214 case BUILT_IN_STPNCPY_CHK:
5215 case BUILT_IN_STRCAT_CHK:
5216 case BUILT_IN_STRNCAT_CHK:
5217 {
5218 tree dest = gimple_call_arg (t, (DECL_FUNCTION_CODE (decl)
5219 == BUILT_IN_BCOPY ? 1 : 0));
5220 tree src = gimple_call_arg (t, (DECL_FUNCTION_CODE (decl)
5221 == BUILT_IN_BCOPY ? 0 : 1));
5222 unsigned i;
5223 struct constraint_expr *rhsp, *lhsp;
5224 get_constraint_for_ptr_offset (dest, NULL_TREE, &lhsc);
5225 lhs = get_function_part_constraint (fi, fi_clobbers);
5226 FOR_EACH_VEC_ELT (lhsc, i, lhsp)
5227 process_constraint (new_constraint (lhs, *lhsp));
5228 get_constraint_for_ptr_offset (src, NULL_TREE, &rhsc);
5229 lhs = get_function_part_constraint (fi, fi_uses);
5230 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
5231 process_constraint (new_constraint (lhs, *rhsp));
5232 return;
5233 }
5234 /* The following function clobbers memory pointed to by
5235 its argument. */
5236 case BUILT_IN_MEMSET:
5237 case BUILT_IN_MEMSET_CHK:
5238 case BUILT_IN_POSIX_MEMALIGN:
5239 {
5240 tree dest = gimple_call_arg (t, 0);
5241 unsigned i;
5242 ce_s *lhsp;
5243 get_constraint_for_ptr_offset (dest, NULL_TREE, &lhsc);
5244 lhs = get_function_part_constraint (fi, fi_clobbers);
5245 FOR_EACH_VEC_ELT (lhsc, i, lhsp)
5246 process_constraint (new_constraint (lhs, *lhsp));
5247 return;
5248 }
5249 /* The following functions clobber their second and third
5250 arguments. */
5251 case BUILT_IN_SINCOS:
5252 case BUILT_IN_SINCOSF:
5253 case BUILT_IN_SINCOSL:
5254 {
5255 process_ipa_clobber (fi, gimple_call_arg (t, 1));
5256 process_ipa_clobber (fi, gimple_call_arg (t, 2));
5257 return;
5258 }
5259 /* The following functions clobber their second argument. */
5260 case BUILT_IN_FREXP:
5261 case BUILT_IN_FREXPF:
5262 case BUILT_IN_FREXPL:
5263 case BUILT_IN_LGAMMA_R:
5264 case BUILT_IN_LGAMMAF_R:
5265 case BUILT_IN_LGAMMAL_R:
5266 case BUILT_IN_GAMMA_R:
5267 case BUILT_IN_GAMMAF_R:
5268 case BUILT_IN_GAMMAL_R:
5269 case BUILT_IN_MODF:
5270 case BUILT_IN_MODFF:
5271 case BUILT_IN_MODFL:
5272 {
5273 process_ipa_clobber (fi, gimple_call_arg (t, 1));
5274 return;
5275 }
5276 /* The following functions clobber their third argument. */
5277 case BUILT_IN_REMQUO:
5278 case BUILT_IN_REMQUOF:
5279 case BUILT_IN_REMQUOL:
5280 {
5281 process_ipa_clobber (fi, gimple_call_arg (t, 2));
5282 return;
5283 }
5284 /* The following functions neither read nor clobber memory. */
5285 case BUILT_IN_ASSUME_ALIGNED:
5286 case BUILT_IN_FREE:
5287 return;
5288 /* Trampolines are of no interest to us. */
5289 case BUILT_IN_INIT_TRAMPOLINE:
5290 case BUILT_IN_ADJUST_TRAMPOLINE:
5291 return;
5292 case BUILT_IN_VA_START:
5293 case BUILT_IN_VA_END:
5294 return;
5295 case BUILT_IN_GOMP_PARALLEL:
5296 case BUILT_IN_GOACC_PARALLEL:
5297 {
5298 unsigned int fnpos, argpos;
5299 unsigned int implicit_use_args[2];
5300 unsigned int num_implicit_use_args = 0;
5301 switch (DECL_FUNCTION_CODE (decl))
5302 {
5303 case BUILT_IN_GOMP_PARALLEL:
5304 /* __builtin_GOMP_parallel (fn, data, num_threads, flags). */
5305 fnpos = 0;
5306 argpos = 1;
5307 break;
5308 case BUILT_IN_GOACC_PARALLEL:
5309 /* __builtin_GOACC_parallel (flags_m, fn, mapnum, hostaddrs,
5310 sizes, kinds, ...). */
5311 fnpos = 1;
5312 argpos = 3;
5313 implicit_use_args[num_implicit_use_args++] = 4;
5314 implicit_use_args[num_implicit_use_args++] = 5;
5315 break;
5316 default:
5317 gcc_unreachable ();
5318 }
5319
5320 tree fnarg = gimple_call_arg (t, fnpos);
5321 gcc_assert (TREE_CODE (fnarg) == ADDR_EXPR);
5322 tree fndecl = TREE_OPERAND (fnarg, 0);
5323 if (fndecl_maybe_in_other_partition (fndecl))
5324 /* Fallthru to general call handling. */
5325 break;
5326
5327 varinfo_t cfi = get_vi_for_tree (fndecl);
5328
5329 tree arg = gimple_call_arg (t, argpos);
5330
5331 /* Parameter passed by value is used. */
5332 lhs = get_function_part_constraint (fi, fi_uses);
5333 struct constraint_expr *rhsp;
5334 get_constraint_for (arg, &rhsc);
5335 FOR_EACH_VEC_ELT (rhsc, j, rhsp)
5336 process_constraint (new_constraint (lhs, *rhsp));
5337 rhsc.truncate (0);
5338
5339 /* Handle parameters used by the call, but not used in cfi, as
5340 implicitly used by cfi. */
5341 lhs = get_function_part_constraint (cfi, fi_uses);
5342 for (unsigned i = 0; i < num_implicit_use_args; ++i)
5343 {
5344 tree arg = gimple_call_arg (t, implicit_use_args[i]);
5345 get_constraint_for (arg, &rhsc);
5346 FOR_EACH_VEC_ELT (rhsc, j, rhsp)
5347 process_constraint (new_constraint (lhs, *rhsp));
5348 rhsc.truncate (0);
5349 }
5350
5351 /* The caller clobbers what the callee does. */
5352 lhs = get_function_part_constraint (fi, fi_clobbers);
5353 rhs = get_function_part_constraint (cfi, fi_clobbers);
5354 process_constraint (new_constraint (lhs, rhs));
5355
5356 /* The caller uses what the callee does. */
5357 lhs = get_function_part_constraint (fi, fi_uses);
5358 rhs = get_function_part_constraint (cfi, fi_uses);
5359 process_constraint (new_constraint (lhs, rhs));
5360
5361 return;
5362 }
5363 /* printf-style functions may have hooks to set pointers to
5364 point to somewhere into the generated string. Leave them
5365 for a later exercise... */
5366 default:
5367 /* Fallthru to general call handling. */;
5368 }
5369
5370 /* Parameters passed by value are used. */
5371 lhs = get_function_part_constraint (fi, fi_uses);
5372 for (i = 0; i < gimple_call_num_args (t); i++)
5373 {
5374 struct constraint_expr *rhsp;
5375 tree arg = gimple_call_arg (t, i);
5376
5377 if (TREE_CODE (arg) == SSA_NAME
5378 || is_gimple_min_invariant (arg))
5379 continue;
5380
5381 get_constraint_for_address_of (arg, &rhsc);
5382 FOR_EACH_VEC_ELT (rhsc, j, rhsp)
5383 process_constraint (new_constraint (lhs, *rhsp));
5384 rhsc.truncate (0);
5385 }
5386
5387 /* Build constraints for propagating clobbers/uses along the
5388 callgraph edges. */
5389 cfi = get_fi_for_callee (call_stmt);
5390 if (cfi->id == anything_id)
5391 {
5392 if (gimple_vdef (t))
5393 make_constraint_from (first_vi_for_offset (fi, fi_clobbers),
5394 anything_id);
5395 make_constraint_from (first_vi_for_offset (fi, fi_uses),
5396 anything_id);
5397 return;
5398 }
5399
5400 /* For callees without function info (that's external functions),
5401 ESCAPED is clobbered and used. */
5402 if (cfi->decl
5403 && TREE_CODE (cfi->decl) == FUNCTION_DECL
5404 && !cfi->is_fn_info)
5405 {
5406 varinfo_t vi;
5407
5408 if (gimple_vdef (t))
5409 make_copy_constraint (first_vi_for_offset (fi, fi_clobbers),
5410 escaped_id);
5411 make_copy_constraint (first_vi_for_offset (fi, fi_uses), escaped_id);
5412
5413 /* Also honor the call statement use/clobber info. */
5414 if ((vi = lookup_call_clobber_vi (call_stmt)) != NULL)
5415 make_copy_constraint (first_vi_for_offset (fi, fi_clobbers),
5416 vi->id);
5417 if ((vi = lookup_call_use_vi (call_stmt)) != NULL)
5418 make_copy_constraint (first_vi_for_offset (fi, fi_uses),
5419 vi->id);
5420 return;
5421 }
5422
5423 /* Otherwise the caller clobbers and uses what the callee does.
5424 ??? This should use a new complex constraint that filters
5425 local variables of the callee. */
5426 if (gimple_vdef (t))
5427 {
5428 lhs = get_function_part_constraint (fi, fi_clobbers);
5429 rhs = get_function_part_constraint (cfi, fi_clobbers);
5430 process_constraint (new_constraint (lhs, rhs));
5431 }
5432 lhs = get_function_part_constraint (fi, fi_uses);
5433 rhs = get_function_part_constraint (cfi, fi_uses);
5434 process_constraint (new_constraint (lhs, rhs));
5435 }
5436 else if (gimple_code (t) == GIMPLE_ASM)
5437 {
5438 /* ??? Ick. We can do better. */
5439 if (gimple_vdef (t))
5440 make_constraint_from (first_vi_for_offset (fi, fi_clobbers),
5441 anything_id);
5442 make_constraint_from (first_vi_for_offset (fi, fi_uses),
5443 anything_id);
5444 }
5445 }
5446
5447
5448 /* Find the first varinfo in the same variable as START that overlaps with
5449 OFFSET. Return NULL if we can't find one. */
5450
5451 static varinfo_t
5452 first_vi_for_offset (varinfo_t start, unsigned HOST_WIDE_INT offset)
5453 {
5454 /* If the offset is outside of the variable, bail out. */
5455 if (offset >= start->fullsize)
5456 return NULL;
5457
5458 /* If we cannot reach offset from start, lookup the first field
5459 and start from there. */
5460 if (start->offset > offset)
5461 start = get_varinfo (start->head);
5462
5463 while (start)
5464 {
5465 /* We may not find a variable in the field list with the actual
5466 offset when we have glommed a structure to a variable.
5467 In that case, however, offset should still be within the size
5468 of the variable. */
5469 if (offset >= start->offset
5470 && (offset - start->offset) < start->size)
5471 return start;
5472
5473 start = vi_next (start);
5474 }
5475
5476 return NULL;
5477 }
5478
5479 /* Find the first varinfo in the same variable as START that overlaps with
5480 OFFSET. If there is no such varinfo the varinfo directly preceding
5481 OFFSET is returned. */
5482
5483 static varinfo_t
5484 first_or_preceding_vi_for_offset (varinfo_t start,
5485 unsigned HOST_WIDE_INT offset)
5486 {
5487 /* If we cannot reach offset from start, lookup the first field
5488 and start from there. */
5489 if (start->offset > offset)
5490 start = get_varinfo (start->head);
5491
5492 /* We may not find a variable in the field list with the actual
5493 offset when we have glommed a structure to a variable.
5494 In that case, however, offset should still be within the size
5495 of the variable.
5496 If we got beyond the offset we look for return the field
5497 directly preceding offset which may be the last field. */
5498 while (start->next
5499 && offset >= start->offset
5500 && !((offset - start->offset) < start->size))
5501 start = vi_next (start);
5502
5503 return start;
5504 }
5505
5506
5507 /* This structure is used during pushing fields onto the fieldstack
5508 to track the offset of the field, since bitpos_of_field gives it
5509 relative to its immediate containing type, and we want it relative
5510 to the ultimate containing object. */
5511
5512 struct fieldoff
5513 {
5514 /* Offset from the base of the base containing object to this field. */
5515 HOST_WIDE_INT offset;
5516
5517 /* Size, in bits, of the field. */
5518 unsigned HOST_WIDE_INT size;
5519
5520 unsigned has_unknown_size : 1;
5521
5522 unsigned must_have_pointers : 1;
5523
5524 unsigned may_have_pointers : 1;
5525
5526 unsigned only_restrict_pointers : 1;
5527
5528 tree restrict_pointed_type;
5529 };
5530 typedef struct fieldoff fieldoff_s;
5531
5532
5533 /* qsort comparison function for two fieldoff's PA and PB */
5534
5535 static int
5536 fieldoff_compare (const void *pa, const void *pb)
5537 {
5538 const fieldoff_s *foa = (const fieldoff_s *)pa;
5539 const fieldoff_s *fob = (const fieldoff_s *)pb;
5540 unsigned HOST_WIDE_INT foasize, fobsize;
5541
5542 if (foa->offset < fob->offset)
5543 return -1;
5544 else if (foa->offset > fob->offset)
5545 return 1;
5546
5547 foasize = foa->size;
5548 fobsize = fob->size;
5549 if (foasize < fobsize)
5550 return -1;
5551 else if (foasize > fobsize)
5552 return 1;
5553 return 0;
5554 }
5555
5556 /* Sort a fieldstack according to the field offset and sizes. */
5557 static void
5558 sort_fieldstack (vec<fieldoff_s> fieldstack)
5559 {
5560 fieldstack.qsort (fieldoff_compare);
5561 }
5562
5563 /* Return true if T is a type that can have subvars. */
5564
5565 static inline bool
5566 type_can_have_subvars (const_tree t)
5567 {
5568 /* Aggregates without overlapping fields can have subvars. */
5569 return TREE_CODE (t) == RECORD_TYPE;
5570 }
5571
5572 /* Return true if V is a tree that we can have subvars for.
5573 Normally, this is any aggregate type. Also complex
5574 types which are not gimple registers can have subvars. */
5575
5576 static inline bool
5577 var_can_have_subvars (const_tree v)
5578 {
5579 /* Volatile variables should never have subvars. */
5580 if (TREE_THIS_VOLATILE (v))
5581 return false;
5582
5583 /* Non decls or memory tags can never have subvars. */
5584 if (!DECL_P (v))
5585 return false;
5586
5587 return type_can_have_subvars (TREE_TYPE (v));
5588 }
5589
5590 /* Return true if T is a type that does contain pointers. */
5591
5592 static bool
5593 type_must_have_pointers (tree type)
5594 {
5595 if (POINTER_TYPE_P (type))
5596 return true;
5597
5598 if (TREE_CODE (type) == ARRAY_TYPE)
5599 return type_must_have_pointers (TREE_TYPE (type));
5600
5601 /* A function or method can have pointers as arguments, so track
5602 those separately. */
5603 if (TREE_CODE (type) == FUNCTION_TYPE
5604 || TREE_CODE (type) == METHOD_TYPE)
5605 return true;
5606
5607 return false;
5608 }
5609
5610 static bool
5611 field_must_have_pointers (tree t)
5612 {
5613 return type_must_have_pointers (TREE_TYPE (t));
5614 }
5615
5616 /* Given a TYPE, and a vector of field offsets FIELDSTACK, push all
5617 the fields of TYPE onto fieldstack, recording their offsets along
5618 the way.
5619
5620 OFFSET is used to keep track of the offset in this entire
5621 structure, rather than just the immediately containing structure.
5622 Returns false if the caller is supposed to handle the field we
5623 recursed for. */
5624
5625 static bool
5626 push_fields_onto_fieldstack (tree type, vec<fieldoff_s> *fieldstack,
5627 HOST_WIDE_INT offset)
5628 {
5629 tree field;
5630 bool empty_p = true;
5631
5632 if (TREE_CODE (type) != RECORD_TYPE)
5633 return false;
5634
5635 /* If the vector of fields is growing too big, bail out early.
5636 Callers check for vec::length <= MAX_FIELDS_FOR_FIELD_SENSITIVE, make
5637 sure this fails. */
5638 if (fieldstack->length () > MAX_FIELDS_FOR_FIELD_SENSITIVE)
5639 return false;
5640
5641 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
5642 if (TREE_CODE (field) == FIELD_DECL)
5643 {
5644 bool push = false;
5645 HOST_WIDE_INT foff = bitpos_of_field (field);
5646 tree field_type = TREE_TYPE (field);
5647
5648 if (!var_can_have_subvars (field)
5649 || TREE_CODE (field_type) == QUAL_UNION_TYPE
5650 || TREE_CODE (field_type) == UNION_TYPE)
5651 push = true;
5652 else if (!push_fields_onto_fieldstack
5653 (field_type, fieldstack, offset + foff)
5654 && (DECL_SIZE (field)
5655 && !integer_zerop (DECL_SIZE (field))))
5656 /* Empty structures may have actual size, like in C++. So
5657 see if we didn't push any subfields and the size is
5658 nonzero, push the field onto the stack. */
5659 push = true;
5660
5661 if (push)
5662 {
5663 fieldoff_s *pair = NULL;
5664 bool has_unknown_size = false;
5665 bool must_have_pointers_p;
5666
5667 if (!fieldstack->is_empty ())
5668 pair = &fieldstack->last ();
5669
5670 /* If there isn't anything at offset zero, create sth. */
5671 if (!pair
5672 && offset + foff != 0)
5673 {
5674 fieldoff_s e
5675 = {0, offset + foff, false, false, true, false, NULL_TREE};
5676 pair = fieldstack->safe_push (e);
5677 }
5678
5679 if (!DECL_SIZE (field)
5680 || !tree_fits_uhwi_p (DECL_SIZE (field)))
5681 has_unknown_size = true;
5682
5683 /* If adjacent fields do not contain pointers merge them. */
5684 must_have_pointers_p = field_must_have_pointers (field);
5685 if (pair
5686 && !has_unknown_size
5687 && !must_have_pointers_p
5688 && !pair->must_have_pointers
5689 && !pair->has_unknown_size
5690 && pair->offset + (HOST_WIDE_INT)pair->size == offset + foff)
5691 {
5692 pair->size += tree_to_uhwi (DECL_SIZE (field));
5693 }
5694 else
5695 {
5696 fieldoff_s e;
5697 e.offset = offset + foff;
5698 e.has_unknown_size = has_unknown_size;
5699 if (!has_unknown_size)
5700 e.size = tree_to_uhwi (DECL_SIZE (field));
5701 else
5702 e.size = -1;
5703 e.must_have_pointers = must_have_pointers_p;
5704 e.may_have_pointers = true;
5705 e.only_restrict_pointers
5706 = (!has_unknown_size
5707 && POINTER_TYPE_P (field_type)
5708 && TYPE_RESTRICT (field_type));
5709 if (e.only_restrict_pointers)
5710 e.restrict_pointed_type = TREE_TYPE (field_type);
5711 fieldstack->safe_push (e);
5712 }
5713 }
5714
5715 empty_p = false;
5716 }
5717
5718 return !empty_p;
5719 }
5720
5721 /* Count the number of arguments DECL has, and set IS_VARARGS to true
5722 if it is a varargs function. */
5723
5724 static unsigned int
5725 count_num_arguments (tree decl, bool *is_varargs)
5726 {
5727 unsigned int num = 0;
5728 tree t;
5729
5730 /* Capture named arguments for K&R functions. They do not
5731 have a prototype and thus no TYPE_ARG_TYPES. */
5732 for (t = DECL_ARGUMENTS (decl); t; t = DECL_CHAIN (t))
5733 ++num;
5734
5735 /* Check if the function has variadic arguments. */
5736 for (t = TYPE_ARG_TYPES (TREE_TYPE (decl)); t; t = TREE_CHAIN (t))
5737 if (TREE_VALUE (t) == void_type_node)
5738 break;
5739 if (!t)
5740 *is_varargs = true;
5741
5742 return num;
5743 }
5744
5745 /* Creation function node for DECL, using NAME, and return the index
5746 of the variable we've created for the function. If NONLOCAL_p, create
5747 initial constraints. */
5748
5749 static varinfo_t
5750 create_function_info_for (tree decl, const char *name, bool add_id,
5751 bool nonlocal_p)
5752 {
5753 struct function *fn = DECL_STRUCT_FUNCTION (decl);
5754 varinfo_t vi, prev_vi;
5755 tree arg;
5756 unsigned int i;
5757 bool is_varargs = false;
5758 unsigned int num_args = count_num_arguments (decl, &is_varargs);
5759
5760 /* Create the variable info. */
5761
5762 vi = new_var_info (decl, name, add_id);
5763 vi->offset = 0;
5764 vi->size = 1;
5765 vi->fullsize = fi_parm_base + num_args;
5766 vi->is_fn_info = 1;
5767 vi->may_have_pointers = false;
5768 if (is_varargs)
5769 vi->fullsize = ~0;
5770 insert_vi_for_tree (vi->decl, vi);
5771
5772 prev_vi = vi;
5773
5774 /* Create a variable for things the function clobbers and one for
5775 things the function uses. */
5776 {
5777 varinfo_t clobbervi, usevi;
5778 const char *newname;
5779 char *tempname;
5780
5781 tempname = xasprintf ("%s.clobber", name);
5782 newname = ggc_strdup (tempname);
5783 free (tempname);
5784
5785 clobbervi = new_var_info (NULL, newname, false);
5786 clobbervi->offset = fi_clobbers;
5787 clobbervi->size = 1;
5788 clobbervi->fullsize = vi->fullsize;
5789 clobbervi->is_full_var = true;
5790 clobbervi->is_global_var = false;
5791 clobbervi->is_reg_var = true;
5792
5793 gcc_assert (prev_vi->offset < clobbervi->offset);
5794 prev_vi->next = clobbervi->id;
5795 prev_vi = clobbervi;
5796
5797 tempname = xasprintf ("%s.use", name);
5798 newname = ggc_strdup (tempname);
5799 free (tempname);
5800
5801 usevi = new_var_info (NULL, newname, false);
5802 usevi->offset = fi_uses;
5803 usevi->size = 1;
5804 usevi->fullsize = vi->fullsize;
5805 usevi->is_full_var = true;
5806 usevi->is_global_var = false;
5807 usevi->is_reg_var = true;
5808
5809 gcc_assert (prev_vi->offset < usevi->offset);
5810 prev_vi->next = usevi->id;
5811 prev_vi = usevi;
5812 }
5813
5814 /* And one for the static chain. */
5815 if (fn->static_chain_decl != NULL_TREE)
5816 {
5817 varinfo_t chainvi;
5818 const char *newname;
5819 char *tempname;
5820
5821 tempname = xasprintf ("%s.chain", name);
5822 newname = ggc_strdup (tempname);
5823 free (tempname);
5824
5825 chainvi = new_var_info (fn->static_chain_decl, newname, false);
5826 chainvi->offset = fi_static_chain;
5827 chainvi->size = 1;
5828 chainvi->fullsize = vi->fullsize;
5829 chainvi->is_full_var = true;
5830 chainvi->is_global_var = false;
5831
5832 insert_vi_for_tree (fn->static_chain_decl, chainvi);
5833
5834 if (nonlocal_p
5835 && chainvi->may_have_pointers)
5836 make_constraint_from (chainvi, nonlocal_id);
5837
5838 gcc_assert (prev_vi->offset < chainvi->offset);
5839 prev_vi->next = chainvi->id;
5840 prev_vi = chainvi;
5841 }
5842
5843 /* Create a variable for the return var. */
5844 if (DECL_RESULT (decl) != NULL
5845 || !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (decl))))
5846 {
5847 varinfo_t resultvi;
5848 const char *newname;
5849 char *tempname;
5850 tree resultdecl = decl;
5851
5852 if (DECL_RESULT (decl))
5853 resultdecl = DECL_RESULT (decl);
5854
5855 tempname = xasprintf ("%s.result", name);
5856 newname = ggc_strdup (tempname);
5857 free (tempname);
5858
5859 resultvi = new_var_info (resultdecl, newname, false);
5860 resultvi->offset = fi_result;
5861 resultvi->size = 1;
5862 resultvi->fullsize = vi->fullsize;
5863 resultvi->is_full_var = true;
5864 if (DECL_RESULT (decl))
5865 resultvi->may_have_pointers = true;
5866
5867 if (DECL_RESULT (decl))
5868 insert_vi_for_tree (DECL_RESULT (decl), resultvi);
5869
5870 if (nonlocal_p
5871 && DECL_RESULT (decl)
5872 && DECL_BY_REFERENCE (DECL_RESULT (decl)))
5873 make_constraint_from (resultvi, nonlocal_id);
5874
5875 gcc_assert (prev_vi->offset < resultvi->offset);
5876 prev_vi->next = resultvi->id;
5877 prev_vi = resultvi;
5878 }
5879
5880 /* We also need to make function return values escape. Nothing
5881 escapes by returning from main though. */
5882 if (nonlocal_p
5883 && !MAIN_NAME_P (DECL_NAME (decl)))
5884 {
5885 varinfo_t fi, rvi;
5886 fi = lookup_vi_for_tree (decl);
5887 rvi = first_vi_for_offset (fi, fi_result);
5888 if (rvi && rvi->offset == fi_result)
5889 make_copy_constraint (get_varinfo (escaped_id), rvi->id);
5890 }
5891
5892 /* Set up variables for each argument. */
5893 arg = DECL_ARGUMENTS (decl);
5894 for (i = 0; i < num_args; i++)
5895 {
5896 varinfo_t argvi;
5897 const char *newname;
5898 char *tempname;
5899 tree argdecl = decl;
5900
5901 if (arg)
5902 argdecl = arg;
5903
5904 tempname = xasprintf ("%s.arg%d", name, i);
5905 newname = ggc_strdup (tempname);
5906 free (tempname);
5907
5908 argvi = new_var_info (argdecl, newname, false);
5909 argvi->offset = fi_parm_base + i;
5910 argvi->size = 1;
5911 argvi->is_full_var = true;
5912 argvi->fullsize = vi->fullsize;
5913 if (arg)
5914 argvi->may_have_pointers = true;
5915
5916 if (arg)
5917 insert_vi_for_tree (arg, argvi);
5918
5919 if (nonlocal_p
5920 && argvi->may_have_pointers)
5921 make_constraint_from (argvi, nonlocal_id);
5922
5923 gcc_assert (prev_vi->offset < argvi->offset);
5924 prev_vi->next = argvi->id;
5925 prev_vi = argvi;
5926 if (arg)
5927 arg = DECL_CHAIN (arg);
5928 }
5929
5930 /* Add one representative for all further args. */
5931 if (is_varargs)
5932 {
5933 varinfo_t argvi;
5934 const char *newname;
5935 char *tempname;
5936 tree decl;
5937
5938 tempname = xasprintf ("%s.varargs", name);
5939 newname = ggc_strdup (tempname);
5940 free (tempname);
5941
5942 /* We need sth that can be pointed to for va_start. */
5943 decl = build_fake_var_decl (ptr_type_node);
5944
5945 argvi = new_var_info (decl, newname, false);
5946 argvi->offset = fi_parm_base + num_args;
5947 argvi->size = ~0;
5948 argvi->is_full_var = true;
5949 argvi->is_heap_var = true;
5950 argvi->fullsize = vi->fullsize;
5951
5952 if (nonlocal_p
5953 && argvi->may_have_pointers)
5954 make_constraint_from (argvi, nonlocal_id);
5955
5956 gcc_assert (prev_vi->offset < argvi->offset);
5957 prev_vi->next = argvi->id;
5958 prev_vi = argvi;
5959 }
5960
5961 return vi;
5962 }
5963
5964
5965 /* Return true if FIELDSTACK contains fields that overlap.
5966 FIELDSTACK is assumed to be sorted by offset. */
5967
5968 static bool
5969 check_for_overlaps (vec<fieldoff_s> fieldstack)
5970 {
5971 fieldoff_s *fo = NULL;
5972 unsigned int i;
5973 HOST_WIDE_INT lastoffset = -1;
5974
5975 FOR_EACH_VEC_ELT (fieldstack, i, fo)
5976 {
5977 if (fo->offset == lastoffset)
5978 return true;
5979 lastoffset = fo->offset;
5980 }
5981 return false;
5982 }
5983
5984 /* Create a varinfo structure for NAME and DECL, and add it to VARMAP.
5985 This will also create any varinfo structures necessary for fields
5986 of DECL. DECL is a function parameter if HANDLE_PARAM is set.
5987 HANDLED_STRUCT_TYPE is used to register struct types reached by following
5988 restrict pointers. This is needed to prevent infinite recursion.
5989 If ADD_RESTRICT, pretend that the pointer NAME is restrict even if DECL
5990 does not advertise it. */
5991
5992 static varinfo_t
5993 create_variable_info_for_1 (tree decl, const char *name, bool add_id,
5994 bool handle_param, bitmap handled_struct_type,
5995 bool add_restrict = false)
5996 {
5997 varinfo_t vi, newvi;
5998 tree decl_type = TREE_TYPE (decl);
5999 tree declsize = DECL_P (decl) ? DECL_SIZE (decl) : TYPE_SIZE (decl_type);
6000 auto_vec<fieldoff_s> fieldstack;
6001 fieldoff_s *fo;
6002 unsigned int i;
6003
6004 if (!declsize
6005 || !tree_fits_uhwi_p (declsize))
6006 {
6007 vi = new_var_info (decl, name, add_id);
6008 vi->offset = 0;
6009 vi->size = ~0;
6010 vi->fullsize = ~0;
6011 vi->is_unknown_size_var = true;
6012 vi->is_full_var = true;
6013 vi->may_have_pointers = true;
6014 return vi;
6015 }
6016
6017 /* Collect field information. */
6018 if (use_field_sensitive
6019 && var_can_have_subvars (decl)
6020 /* ??? Force us to not use subfields for globals in IPA mode.
6021 Else we'd have to parse arbitrary initializers. */
6022 && !(in_ipa_mode
6023 && is_global_var (decl)))
6024 {
6025 fieldoff_s *fo = NULL;
6026 bool notokay = false;
6027 unsigned int i;
6028
6029 push_fields_onto_fieldstack (decl_type, &fieldstack, 0);
6030
6031 for (i = 0; !notokay && fieldstack.iterate (i, &fo); i++)
6032 if (fo->has_unknown_size
6033 || fo->offset < 0)
6034 {
6035 notokay = true;
6036 break;
6037 }
6038
6039 /* We can't sort them if we have a field with a variable sized type,
6040 which will make notokay = true. In that case, we are going to return
6041 without creating varinfos for the fields anyway, so sorting them is a
6042 waste to boot. */
6043 if (!notokay)
6044 {
6045 sort_fieldstack (fieldstack);
6046 /* Due to some C++ FE issues, like PR 22488, we might end up
6047 what appear to be overlapping fields even though they,
6048 in reality, do not overlap. Until the C++ FE is fixed,
6049 we will simply disable field-sensitivity for these cases. */
6050 notokay = check_for_overlaps (fieldstack);
6051 }
6052
6053 if (notokay)
6054 fieldstack.release ();
6055 }
6056
6057 /* If we didn't end up collecting sub-variables create a full
6058 variable for the decl. */
6059 if (fieldstack.length () == 0
6060 || fieldstack.length () > MAX_FIELDS_FOR_FIELD_SENSITIVE)
6061 {
6062 vi = new_var_info (decl, name, add_id);
6063 vi->offset = 0;
6064 vi->may_have_pointers = true;
6065 vi->fullsize = tree_to_uhwi (declsize);
6066 vi->size = vi->fullsize;
6067 vi->is_full_var = true;
6068 if (POINTER_TYPE_P (decl_type)
6069 && (TYPE_RESTRICT (decl_type) || add_restrict))
6070 vi->only_restrict_pointers = 1;
6071 if (vi->only_restrict_pointers
6072 && !type_contains_placeholder_p (TREE_TYPE (decl_type))
6073 && handle_param
6074 && !bitmap_bit_p (handled_struct_type,
6075 TYPE_UID (TREE_TYPE (decl_type))))
6076 {
6077 varinfo_t rvi;
6078 tree heapvar = build_fake_var_decl (TREE_TYPE (decl_type));
6079 DECL_EXTERNAL (heapvar) = 1;
6080 if (var_can_have_subvars (heapvar))
6081 bitmap_set_bit (handled_struct_type,
6082 TYPE_UID (TREE_TYPE (decl_type)));
6083 rvi = create_variable_info_for_1 (heapvar, "PARM_NOALIAS", true,
6084 true, handled_struct_type);
6085 if (var_can_have_subvars (heapvar))
6086 bitmap_clear_bit (handled_struct_type,
6087 TYPE_UID (TREE_TYPE (decl_type)));
6088 rvi->is_restrict_var = 1;
6089 insert_vi_for_tree (heapvar, rvi);
6090 make_constraint_from (vi, rvi->id);
6091 make_param_constraints (rvi);
6092 }
6093 fieldstack.release ();
6094 return vi;
6095 }
6096
6097 vi = new_var_info (decl, name, add_id);
6098 vi->fullsize = tree_to_uhwi (declsize);
6099 if (fieldstack.length () == 1)
6100 vi->is_full_var = true;
6101 for (i = 0, newvi = vi;
6102 fieldstack.iterate (i, &fo);
6103 ++i, newvi = vi_next (newvi))
6104 {
6105 const char *newname = NULL;
6106 char *tempname;
6107
6108 if (dump_file)
6109 {
6110 if (fieldstack.length () != 1)
6111 {
6112 tempname
6113 = xasprintf ("%s." HOST_WIDE_INT_PRINT_DEC
6114 "+" HOST_WIDE_INT_PRINT_DEC, name,
6115 fo->offset, fo->size);
6116 newname = ggc_strdup (tempname);
6117 free (tempname);
6118 }
6119 }
6120 else
6121 newname = "NULL";
6122
6123 if (newname)
6124 newvi->name = newname;
6125 newvi->offset = fo->offset;
6126 newvi->size = fo->size;
6127 newvi->fullsize = vi->fullsize;
6128 newvi->may_have_pointers = fo->may_have_pointers;
6129 newvi->only_restrict_pointers = fo->only_restrict_pointers;
6130 if (handle_param
6131 && newvi->only_restrict_pointers
6132 && !type_contains_placeholder_p (fo->restrict_pointed_type)
6133 && !bitmap_bit_p (handled_struct_type,
6134 TYPE_UID (fo->restrict_pointed_type)))
6135 {
6136 varinfo_t rvi;
6137 tree heapvar = build_fake_var_decl (fo->restrict_pointed_type);
6138 DECL_EXTERNAL (heapvar) = 1;
6139 if (var_can_have_subvars (heapvar))
6140 bitmap_set_bit (handled_struct_type,
6141 TYPE_UID (fo->restrict_pointed_type));
6142 rvi = create_variable_info_for_1 (heapvar, "PARM_NOALIAS", true,
6143 true, handled_struct_type);
6144 if (var_can_have_subvars (heapvar))
6145 bitmap_clear_bit (handled_struct_type,
6146 TYPE_UID (fo->restrict_pointed_type));
6147 rvi->is_restrict_var = 1;
6148 insert_vi_for_tree (heapvar, rvi);
6149 make_constraint_from (newvi, rvi->id);
6150 make_param_constraints (rvi);
6151 }
6152 if (i + 1 < fieldstack.length ())
6153 {
6154 varinfo_t tem = new_var_info (decl, name, false);
6155 newvi->next = tem->id;
6156 tem->head = vi->id;
6157 }
6158 }
6159
6160 return vi;
6161 }
6162
6163 static unsigned int
6164 create_variable_info_for (tree decl, const char *name, bool add_id)
6165 {
6166 /* First see if we are dealing with an ifunc resolver call and
6167 assiociate that with a call to the resolver function result. */
6168 cgraph_node *node;
6169 if (in_ipa_mode
6170 && TREE_CODE (decl) == FUNCTION_DECL
6171 && (node = cgraph_node::get (decl))
6172 && node->ifunc_resolver)
6173 {
6174 varinfo_t fi = get_vi_for_tree (node->get_alias_target ()->decl);
6175 constraint_expr rhs
6176 = get_function_part_constraint (fi, fi_result);
6177 fi = new_var_info (NULL_TREE, "ifuncres", true);
6178 fi->is_reg_var = true;
6179 constraint_expr lhs;
6180 lhs.type = SCALAR;
6181 lhs.var = fi->id;
6182 lhs.offset = 0;
6183 process_constraint (new_constraint (lhs, rhs));
6184 insert_vi_for_tree (decl, fi);
6185 return fi->id;
6186 }
6187
6188 varinfo_t vi = create_variable_info_for_1 (decl, name, add_id, false, NULL);
6189 unsigned int id = vi->id;
6190
6191 insert_vi_for_tree (decl, vi);
6192
6193 if (!VAR_P (decl))
6194 return id;
6195
6196 /* Create initial constraints for globals. */
6197 for (; vi; vi = vi_next (vi))
6198 {
6199 if (!vi->may_have_pointers
6200 || !vi->is_global_var)
6201 continue;
6202
6203 /* Mark global restrict qualified pointers. */
6204 if ((POINTER_TYPE_P (TREE_TYPE (decl))
6205 && TYPE_RESTRICT (TREE_TYPE (decl)))
6206 || vi->only_restrict_pointers)
6207 {
6208 varinfo_t rvi
6209 = make_constraint_from_global_restrict (vi, "GLOBAL_RESTRICT",
6210 true);
6211 /* ??? For now exclude reads from globals as restrict sources
6212 if those are not (indirectly) from incoming parameters. */
6213 rvi->is_restrict_var = false;
6214 continue;
6215 }
6216
6217 /* In non-IPA mode the initializer from nonlocal is all we need. */
6218 if (!in_ipa_mode
6219 || DECL_HARD_REGISTER (decl))
6220 make_copy_constraint (vi, nonlocal_id);
6221
6222 /* In IPA mode parse the initializer and generate proper constraints
6223 for it. */
6224 else
6225 {
6226 varpool_node *vnode = varpool_node::get (decl);
6227
6228 /* For escaped variables initialize them from nonlocal. */
6229 if (!vnode->all_refs_explicit_p ())
6230 make_copy_constraint (vi, nonlocal_id);
6231
6232 /* If this is a global variable with an initializer and we are in
6233 IPA mode generate constraints for it. */
6234 ipa_ref *ref;
6235 for (unsigned idx = 0; vnode->iterate_reference (idx, ref); ++idx)
6236 {
6237 auto_vec<ce_s> rhsc;
6238 struct constraint_expr lhs, *rhsp;
6239 unsigned i;
6240 get_constraint_for_address_of (ref->referred->decl, &rhsc);
6241 lhs.var = vi->id;
6242 lhs.offset = 0;
6243 lhs.type = SCALAR;
6244 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
6245 process_constraint (new_constraint (lhs, *rhsp));
6246 /* If this is a variable that escapes from the unit
6247 the initializer escapes as well. */
6248 if (!vnode->all_refs_explicit_p ())
6249 {
6250 lhs.var = escaped_id;
6251 lhs.offset = 0;
6252 lhs.type = SCALAR;
6253 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
6254 process_constraint (new_constraint (lhs, *rhsp));
6255 }
6256 }
6257 }
6258 }
6259
6260 return id;
6261 }
6262
6263 /* Print out the points-to solution for VAR to FILE. */
6264
6265 static void
6266 dump_solution_for_var (FILE *file, unsigned int var)
6267 {
6268 varinfo_t vi = get_varinfo (var);
6269 unsigned int i;
6270 bitmap_iterator bi;
6271
6272 /* Dump the solution for unified vars anyway, this avoids difficulties
6273 in scanning dumps in the testsuite. */
6274 fprintf (file, "%s = { ", vi->name);
6275 vi = get_varinfo (find (var));
6276 EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, i, bi)
6277 fprintf (file, "%s ", get_varinfo (i)->name);
6278 fprintf (file, "}");
6279
6280 /* But note when the variable was unified. */
6281 if (vi->id != var)
6282 fprintf (file, " same as %s", vi->name);
6283
6284 fprintf (file, "\n");
6285 }
6286
6287 /* Print the points-to solution for VAR to stderr. */
6288
6289 DEBUG_FUNCTION void
6290 debug_solution_for_var (unsigned int var)
6291 {
6292 dump_solution_for_var (stderr, var);
6293 }
6294
6295 /* Register the constraints for function parameter related VI. */
6296
6297 static void
6298 make_param_constraints (varinfo_t vi)
6299 {
6300 for (; vi; vi = vi_next (vi))
6301 {
6302 if (vi->only_restrict_pointers)
6303 ;
6304 else if (vi->may_have_pointers)
6305 make_constraint_from (vi, nonlocal_id);
6306
6307 if (vi->is_full_var)
6308 break;
6309 }
6310 }
6311
6312 /* Create varinfo structures for all of the variables in the
6313 function for intraprocedural mode. */
6314
6315 static void
6316 intra_create_variable_infos (struct function *fn)
6317 {
6318 tree t;
6319 bitmap handled_struct_type = NULL;
6320 bool this_parm_in_ctor = DECL_CXX_CONSTRUCTOR_P (fn->decl);
6321
6322 /* For each incoming pointer argument arg, create the constraint ARG
6323 = NONLOCAL or a dummy variable if it is a restrict qualified
6324 passed-by-reference argument. */
6325 for (t = DECL_ARGUMENTS (fn->decl); t; t = DECL_CHAIN (t))
6326 {
6327 if (handled_struct_type == NULL)
6328 handled_struct_type = BITMAP_ALLOC (NULL);
6329
6330 varinfo_t p
6331 = create_variable_info_for_1 (t, alias_get_name (t), false, true,
6332 handled_struct_type, this_parm_in_ctor);
6333 insert_vi_for_tree (t, p);
6334
6335 make_param_constraints (p);
6336
6337 this_parm_in_ctor = false;
6338 }
6339
6340 if (handled_struct_type != NULL)
6341 BITMAP_FREE (handled_struct_type);
6342
6343 /* Add a constraint for a result decl that is passed by reference. */
6344 if (DECL_RESULT (fn->decl)
6345 && DECL_BY_REFERENCE (DECL_RESULT (fn->decl)))
6346 {
6347 varinfo_t p, result_vi = get_vi_for_tree (DECL_RESULT (fn->decl));
6348
6349 for (p = result_vi; p; p = vi_next (p))
6350 make_constraint_from (p, nonlocal_id);
6351 }
6352
6353 /* Add a constraint for the incoming static chain parameter. */
6354 if (fn->static_chain_decl != NULL_TREE)
6355 {
6356 varinfo_t p, chain_vi = get_vi_for_tree (fn->static_chain_decl);
6357
6358 for (p = chain_vi; p; p = vi_next (p))
6359 make_constraint_from (p, nonlocal_id);
6360 }
6361 }
6362
6363 /* Structure used to put solution bitmaps in a hashtable so they can
6364 be shared among variables with the same points-to set. */
6365
6366 typedef struct shared_bitmap_info
6367 {
6368 bitmap pt_vars;
6369 hashval_t hashcode;
6370 } *shared_bitmap_info_t;
6371 typedef const struct shared_bitmap_info *const_shared_bitmap_info_t;
6372
6373 /* Shared_bitmap hashtable helpers. */
6374
6375 struct shared_bitmap_hasher : free_ptr_hash <shared_bitmap_info>
6376 {
6377 static inline hashval_t hash (const shared_bitmap_info *);
6378 static inline bool equal (const shared_bitmap_info *,
6379 const shared_bitmap_info *);
6380 };
6381
6382 /* Hash function for a shared_bitmap_info_t */
6383
6384 inline hashval_t
6385 shared_bitmap_hasher::hash (const shared_bitmap_info *bi)
6386 {
6387 return bi->hashcode;
6388 }
6389
6390 /* Equality function for two shared_bitmap_info_t's. */
6391
6392 inline bool
6393 shared_bitmap_hasher::equal (const shared_bitmap_info *sbi1,
6394 const shared_bitmap_info *sbi2)
6395 {
6396 return bitmap_equal_p (sbi1->pt_vars, sbi2->pt_vars);
6397 }
6398
6399 /* Shared_bitmap hashtable. */
6400
6401 static hash_table<shared_bitmap_hasher> *shared_bitmap_table;
6402
6403 /* Lookup a bitmap in the shared bitmap hashtable, and return an already
6404 existing instance if there is one, NULL otherwise. */
6405
6406 static bitmap
6407 shared_bitmap_lookup (bitmap pt_vars)
6408 {
6409 shared_bitmap_info **slot;
6410 struct shared_bitmap_info sbi;
6411
6412 sbi.pt_vars = pt_vars;
6413 sbi.hashcode = bitmap_hash (pt_vars);
6414
6415 slot = shared_bitmap_table->find_slot (&sbi, NO_INSERT);
6416 if (!slot)
6417 return NULL;
6418 else
6419 return (*slot)->pt_vars;
6420 }
6421
6422
6423 /* Add a bitmap to the shared bitmap hashtable. */
6424
6425 static void
6426 shared_bitmap_add (bitmap pt_vars)
6427 {
6428 shared_bitmap_info **slot;
6429 shared_bitmap_info_t sbi = XNEW (struct shared_bitmap_info);
6430
6431 sbi->pt_vars = pt_vars;
6432 sbi->hashcode = bitmap_hash (pt_vars);
6433
6434 slot = shared_bitmap_table->find_slot (sbi, INSERT);
6435 gcc_assert (!*slot);
6436 *slot = sbi;
6437 }
6438
6439
6440 /* Set bits in INTO corresponding to the variable uids in solution set FROM. */
6441
6442 static void
6443 set_uids_in_ptset (bitmap into, bitmap from, struct pt_solution *pt,
6444 tree fndecl)
6445 {
6446 unsigned int i;
6447 bitmap_iterator bi;
6448 varinfo_t escaped_vi = get_varinfo (find (escaped_id));
6449 bool everything_escaped
6450 = escaped_vi->solution && bitmap_bit_p (escaped_vi->solution, anything_id);
6451
6452 EXECUTE_IF_SET_IN_BITMAP (from, 0, i, bi)
6453 {
6454 varinfo_t vi = get_varinfo (i);
6455
6456 if (vi->is_artificial_var)
6457 continue;
6458
6459 if (everything_escaped
6460 || (escaped_vi->solution
6461 && bitmap_bit_p (escaped_vi->solution, i)))
6462 {
6463 pt->vars_contains_escaped = true;
6464 pt->vars_contains_escaped_heap |= vi->is_heap_var;
6465 }
6466
6467 if (vi->is_restrict_var)
6468 pt->vars_contains_restrict = true;
6469
6470 if (VAR_P (vi->decl)
6471 || TREE_CODE (vi->decl) == PARM_DECL
6472 || TREE_CODE (vi->decl) == RESULT_DECL)
6473 {
6474 /* If we are in IPA mode we will not recompute points-to
6475 sets after inlining so make sure they stay valid. */
6476 if (in_ipa_mode
6477 && !DECL_PT_UID_SET_P (vi->decl))
6478 SET_DECL_PT_UID (vi->decl, DECL_UID (vi->decl));
6479
6480 /* Add the decl to the points-to set. Note that the points-to
6481 set contains global variables. */
6482 bitmap_set_bit (into, DECL_PT_UID (vi->decl));
6483 if (vi->is_global_var
6484 /* In IPA mode the escaped_heap trick doesn't work as
6485 ESCAPED is escaped from the unit but
6486 pt_solution_includes_global needs to answer true for
6487 all variables not automatic within a function.
6488 For the same reason is_global_var is not the
6489 correct flag to track - local variables from other
6490 functions also need to be considered global.
6491 Conveniently all HEAP vars are not put in function
6492 scope. */
6493 || (in_ipa_mode
6494 && fndecl
6495 && ! auto_var_in_fn_p (vi->decl, fndecl)))
6496 pt->vars_contains_nonlocal = true;
6497
6498 /* If we have a variable that is interposable record that fact
6499 for pointer comparison simplification. */
6500 if (VAR_P (vi->decl)
6501 && (TREE_STATIC (vi->decl) || DECL_EXTERNAL (vi->decl))
6502 && ! decl_binds_to_current_def_p (vi->decl))
6503 pt->vars_contains_interposable = true;
6504
6505 /* If this is a local variable we can have overlapping lifetime
6506 of different function invocations through recursion duplicate
6507 it with its shadow variable. */
6508 if (in_ipa_mode
6509 && vi->shadow_var_uid != 0)
6510 {
6511 bitmap_set_bit (into, vi->shadow_var_uid);
6512 pt->vars_contains_nonlocal = true;
6513 }
6514 }
6515
6516 else if (TREE_CODE (vi->decl) == FUNCTION_DECL
6517 || TREE_CODE (vi->decl) == LABEL_DECL)
6518 {
6519 /* Nothing should read/write from/to code so we can
6520 save bits by not including them in the points-to bitmaps.
6521 Still mark the points-to set as containing global memory
6522 to make code-patching possible - see PR70128. */
6523 pt->vars_contains_nonlocal = true;
6524 }
6525 }
6526 }
6527
6528
6529 /* Compute the points-to solution *PT for the variable VI. */
6530
6531 static struct pt_solution
6532 find_what_var_points_to (tree fndecl, varinfo_t orig_vi)
6533 {
6534 unsigned int i;
6535 bitmap_iterator bi;
6536 bitmap finished_solution;
6537 bitmap result;
6538 varinfo_t vi;
6539 struct pt_solution *pt;
6540
6541 /* This variable may have been collapsed, let's get the real
6542 variable. */
6543 vi = get_varinfo (find (orig_vi->id));
6544
6545 /* See if we have already computed the solution and return it. */
6546 pt_solution **slot = &final_solutions->get_or_insert (vi);
6547 if (*slot != NULL)
6548 return **slot;
6549
6550 *slot = pt = XOBNEW (&final_solutions_obstack, struct pt_solution);
6551 memset (pt, 0, sizeof (struct pt_solution));
6552
6553 /* Translate artificial variables into SSA_NAME_PTR_INFO
6554 attributes. */
6555 EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, i, bi)
6556 {
6557 varinfo_t vi = get_varinfo (i);
6558
6559 if (vi->is_artificial_var)
6560 {
6561 if (vi->id == nothing_id)
6562 pt->null = 1;
6563 else if (vi->id == escaped_id)
6564 {
6565 if (in_ipa_mode)
6566 pt->ipa_escaped = 1;
6567 else
6568 pt->escaped = 1;
6569 /* Expand some special vars of ESCAPED in-place here. */
6570 varinfo_t evi = get_varinfo (find (escaped_id));
6571 if (bitmap_bit_p (evi->solution, nonlocal_id))
6572 pt->nonlocal = 1;
6573 }
6574 else if (vi->id == nonlocal_id)
6575 pt->nonlocal = 1;
6576 else if (vi->id == string_id)
6577 /* Nobody cares - STRING_CSTs are read-only entities. */
6578 ;
6579 else if (vi->id == anything_id
6580 || vi->id == integer_id)
6581 pt->anything = 1;
6582 }
6583 }
6584
6585 /* Instead of doing extra work, simply do not create
6586 elaborate points-to information for pt_anything pointers. */
6587 if (pt->anything)
6588 return *pt;
6589
6590 /* Share the final set of variables when possible. */
6591 finished_solution = BITMAP_GGC_ALLOC ();
6592 stats.points_to_sets_created++;
6593
6594 set_uids_in_ptset (finished_solution, vi->solution, pt, fndecl);
6595 result = shared_bitmap_lookup (finished_solution);
6596 if (!result)
6597 {
6598 shared_bitmap_add (finished_solution);
6599 pt->vars = finished_solution;
6600 }
6601 else
6602 {
6603 pt->vars = result;
6604 bitmap_clear (finished_solution);
6605 }
6606
6607 return *pt;
6608 }
6609
6610 /* Given a pointer variable P, fill in its points-to set. */
6611
6612 static void
6613 find_what_p_points_to (tree fndecl, tree p)
6614 {
6615 struct ptr_info_def *pi;
6616 tree lookup_p = p;
6617 varinfo_t vi;
6618 bool nonnull = get_ptr_nonnull (p);
6619
6620 /* For parameters, get at the points-to set for the actual parm
6621 decl. */
6622 if (TREE_CODE (p) == SSA_NAME
6623 && SSA_NAME_IS_DEFAULT_DEF (p)
6624 && (TREE_CODE (SSA_NAME_VAR (p)) == PARM_DECL
6625 || TREE_CODE (SSA_NAME_VAR (p)) == RESULT_DECL))
6626 lookup_p = SSA_NAME_VAR (p);
6627
6628 vi = lookup_vi_for_tree (lookup_p);
6629 if (!vi)
6630 return;
6631
6632 pi = get_ptr_info (p);
6633 pi->pt = find_what_var_points_to (fndecl, vi);
6634 /* Conservatively set to NULL from PTA (to true). */
6635 pi->pt.null = 1;
6636 /* Preserve pointer nonnull computed by VRP. See get_ptr_nonnull
6637 in gcc/tree-ssaname.c for more information. */
6638 if (nonnull)
6639 set_ptr_nonnull (p);
6640 }
6641
6642
6643 /* Query statistics for points-to solutions. */
6644
6645 static struct {
6646 unsigned HOST_WIDE_INT pt_solution_includes_may_alias;
6647 unsigned HOST_WIDE_INT pt_solution_includes_no_alias;
6648 unsigned HOST_WIDE_INT pt_solutions_intersect_may_alias;
6649 unsigned HOST_WIDE_INT pt_solutions_intersect_no_alias;
6650 } pta_stats;
6651
6652 void
6653 dump_pta_stats (FILE *s)
6654 {
6655 fprintf (s, "\nPTA query stats:\n");
6656 fprintf (s, " pt_solution_includes: "
6657 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
6658 HOST_WIDE_INT_PRINT_DEC" queries\n",
6659 pta_stats.pt_solution_includes_no_alias,
6660 pta_stats.pt_solution_includes_no_alias
6661 + pta_stats.pt_solution_includes_may_alias);
6662 fprintf (s, " pt_solutions_intersect: "
6663 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
6664 HOST_WIDE_INT_PRINT_DEC" queries\n",
6665 pta_stats.pt_solutions_intersect_no_alias,
6666 pta_stats.pt_solutions_intersect_no_alias
6667 + pta_stats.pt_solutions_intersect_may_alias);
6668 }
6669
6670
6671 /* Reset the points-to solution *PT to a conservative default
6672 (point to anything). */
6673
6674 void
6675 pt_solution_reset (struct pt_solution *pt)
6676 {
6677 memset (pt, 0, sizeof (struct pt_solution));
6678 pt->anything = true;
6679 pt->null = true;
6680 }
6681
6682 /* Set the points-to solution *PT to point only to the variables
6683 in VARS. VARS_CONTAINS_GLOBAL specifies whether that contains
6684 global variables and VARS_CONTAINS_RESTRICT specifies whether
6685 it contains restrict tag variables. */
6686
6687 void
6688 pt_solution_set (struct pt_solution *pt, bitmap vars,
6689 bool vars_contains_nonlocal)
6690 {
6691 memset (pt, 0, sizeof (struct pt_solution));
6692 pt->vars = vars;
6693 pt->vars_contains_nonlocal = vars_contains_nonlocal;
6694 pt->vars_contains_escaped
6695 = (cfun->gimple_df->escaped.anything
6696 || bitmap_intersect_p (cfun->gimple_df->escaped.vars, vars));
6697 }
6698
6699 /* Set the points-to solution *PT to point only to the variable VAR. */
6700
6701 void
6702 pt_solution_set_var (struct pt_solution *pt, tree var)
6703 {
6704 memset (pt, 0, sizeof (struct pt_solution));
6705 pt->vars = BITMAP_GGC_ALLOC ();
6706 bitmap_set_bit (pt->vars, DECL_PT_UID (var));
6707 pt->vars_contains_nonlocal = is_global_var (var);
6708 pt->vars_contains_escaped
6709 = (cfun->gimple_df->escaped.anything
6710 || bitmap_bit_p (cfun->gimple_df->escaped.vars, DECL_PT_UID (var)));
6711 }
6712
6713 /* Computes the union of the points-to solutions *DEST and *SRC and
6714 stores the result in *DEST. This changes the points-to bitmap
6715 of *DEST and thus may not be used if that might be shared.
6716 The points-to bitmap of *SRC and *DEST will not be shared after
6717 this function if they were not before. */
6718
6719 static void
6720 pt_solution_ior_into (struct pt_solution *dest, struct pt_solution *src)
6721 {
6722 dest->anything |= src->anything;
6723 if (dest->anything)
6724 {
6725 pt_solution_reset (dest);
6726 return;
6727 }
6728
6729 dest->nonlocal |= src->nonlocal;
6730 dest->escaped |= src->escaped;
6731 dest->ipa_escaped |= src->ipa_escaped;
6732 dest->null |= src->null;
6733 dest->vars_contains_nonlocal |= src->vars_contains_nonlocal;
6734 dest->vars_contains_escaped |= src->vars_contains_escaped;
6735 dest->vars_contains_escaped_heap |= src->vars_contains_escaped_heap;
6736 if (!src->vars)
6737 return;
6738
6739 if (!dest->vars)
6740 dest->vars = BITMAP_GGC_ALLOC ();
6741 bitmap_ior_into (dest->vars, src->vars);
6742 }
6743
6744 /* Return true if the points-to solution *PT is empty. */
6745
6746 bool
6747 pt_solution_empty_p (struct pt_solution *pt)
6748 {
6749 if (pt->anything
6750 || pt->nonlocal)
6751 return false;
6752
6753 if (pt->vars
6754 && !bitmap_empty_p (pt->vars))
6755 return false;
6756
6757 /* If the solution includes ESCAPED, check if that is empty. */
6758 if (pt->escaped
6759 && !pt_solution_empty_p (&cfun->gimple_df->escaped))
6760 return false;
6761
6762 /* If the solution includes ESCAPED, check if that is empty. */
6763 if (pt->ipa_escaped
6764 && !pt_solution_empty_p (&ipa_escaped_pt))
6765 return false;
6766
6767 return true;
6768 }
6769
6770 /* Return true if the points-to solution *PT only point to a single var, and
6771 return the var uid in *UID. */
6772
6773 bool
6774 pt_solution_singleton_or_null_p (struct pt_solution *pt, unsigned *uid)
6775 {
6776 if (pt->anything || pt->nonlocal || pt->escaped || pt->ipa_escaped
6777 || pt->vars == NULL
6778 || !bitmap_single_bit_set_p (pt->vars))
6779 return false;
6780
6781 *uid = bitmap_first_set_bit (pt->vars);
6782 return true;
6783 }
6784
6785 /* Return true if the points-to solution *PT includes global memory. */
6786
6787 bool
6788 pt_solution_includes_global (struct pt_solution *pt)
6789 {
6790 if (pt->anything
6791 || pt->nonlocal
6792 || pt->vars_contains_nonlocal
6793 /* The following is a hack to make the malloc escape hack work.
6794 In reality we'd need different sets for escaped-through-return
6795 and escaped-to-callees and passes would need to be updated. */
6796 || pt->vars_contains_escaped_heap)
6797 return true;
6798
6799 /* 'escaped' is also a placeholder so we have to look into it. */
6800 if (pt->escaped)
6801 return pt_solution_includes_global (&cfun->gimple_df->escaped);
6802
6803 if (pt->ipa_escaped)
6804 return pt_solution_includes_global (&ipa_escaped_pt);
6805
6806 return false;
6807 }
6808
6809 /* Return true if the points-to solution *PT includes the variable
6810 declaration DECL. */
6811
6812 static bool
6813 pt_solution_includes_1 (struct pt_solution *pt, const_tree decl)
6814 {
6815 if (pt->anything)
6816 return true;
6817
6818 if (pt->nonlocal
6819 && is_global_var (decl))
6820 return true;
6821
6822 if (pt->vars
6823 && bitmap_bit_p (pt->vars, DECL_PT_UID (decl)))
6824 return true;
6825
6826 /* If the solution includes ESCAPED, check it. */
6827 if (pt->escaped
6828 && pt_solution_includes_1 (&cfun->gimple_df->escaped, decl))
6829 return true;
6830
6831 /* If the solution includes ESCAPED, check it. */
6832 if (pt->ipa_escaped
6833 && pt_solution_includes_1 (&ipa_escaped_pt, decl))
6834 return true;
6835
6836 return false;
6837 }
6838
6839 bool
6840 pt_solution_includes (struct pt_solution *pt, const_tree decl)
6841 {
6842 bool res = pt_solution_includes_1 (pt, decl);
6843 if (res)
6844 ++pta_stats.pt_solution_includes_may_alias;
6845 else
6846 ++pta_stats.pt_solution_includes_no_alias;
6847 return res;
6848 }
6849
6850 /* Return true if both points-to solutions PT1 and PT2 have a non-empty
6851 intersection. */
6852
6853 static bool
6854 pt_solutions_intersect_1 (struct pt_solution *pt1, struct pt_solution *pt2)
6855 {
6856 if (pt1->anything || pt2->anything)
6857 return true;
6858
6859 /* If either points to unknown global memory and the other points to
6860 any global memory they alias. */
6861 if ((pt1->nonlocal
6862 && (pt2->nonlocal
6863 || pt2->vars_contains_nonlocal))
6864 || (pt2->nonlocal
6865 && pt1->vars_contains_nonlocal))
6866 return true;
6867
6868 /* If either points to all escaped memory and the other points to
6869 any escaped memory they alias. */
6870 if ((pt1->escaped
6871 && (pt2->escaped
6872 || pt2->vars_contains_escaped))
6873 || (pt2->escaped
6874 && pt1->vars_contains_escaped))
6875 return true;
6876
6877 /* Check the escaped solution if required.
6878 ??? Do we need to check the local against the IPA escaped sets? */
6879 if ((pt1->ipa_escaped || pt2->ipa_escaped)
6880 && !pt_solution_empty_p (&ipa_escaped_pt))
6881 {
6882 /* If both point to escaped memory and that solution
6883 is not empty they alias. */
6884 if (pt1->ipa_escaped && pt2->ipa_escaped)
6885 return true;
6886
6887 /* If either points to escaped memory see if the escaped solution
6888 intersects with the other. */
6889 if ((pt1->ipa_escaped
6890 && pt_solutions_intersect_1 (&ipa_escaped_pt, pt2))
6891 || (pt2->ipa_escaped
6892 && pt_solutions_intersect_1 (&ipa_escaped_pt, pt1)))
6893 return true;
6894 }
6895
6896 /* Now both pointers alias if their points-to solution intersects. */
6897 return (pt1->vars
6898 && pt2->vars
6899 && bitmap_intersect_p (pt1->vars, pt2->vars));
6900 }
6901
6902 bool
6903 pt_solutions_intersect (struct pt_solution *pt1, struct pt_solution *pt2)
6904 {
6905 bool res = pt_solutions_intersect_1 (pt1, pt2);
6906 if (res)
6907 ++pta_stats.pt_solutions_intersect_may_alias;
6908 else
6909 ++pta_stats.pt_solutions_intersect_no_alias;
6910 return res;
6911 }
6912
6913
6914 /* Dump points-to information to OUTFILE. */
6915
6916 static void
6917 dump_sa_points_to_info (FILE *outfile)
6918 {
6919 unsigned int i;
6920
6921 fprintf (outfile, "\nPoints-to sets\n\n");
6922
6923 if (dump_flags & TDF_STATS)
6924 {
6925 fprintf (outfile, "Stats:\n");
6926 fprintf (outfile, "Total vars: %d\n", stats.total_vars);
6927 fprintf (outfile, "Non-pointer vars: %d\n",
6928 stats.nonpointer_vars);
6929 fprintf (outfile, "Statically unified vars: %d\n",
6930 stats.unified_vars_static);
6931 fprintf (outfile, "Dynamically unified vars: %d\n",
6932 stats.unified_vars_dynamic);
6933 fprintf (outfile, "Iterations: %d\n", stats.iterations);
6934 fprintf (outfile, "Number of edges: %d\n", stats.num_edges);
6935 fprintf (outfile, "Number of implicit edges: %d\n",
6936 stats.num_implicit_edges);
6937 }
6938
6939 for (i = 1; i < varmap.length (); i++)
6940 {
6941 varinfo_t vi = get_varinfo (i);
6942 if (!vi->may_have_pointers)
6943 continue;
6944 dump_solution_for_var (outfile, i);
6945 }
6946 }
6947
6948
6949 /* Debug points-to information to stderr. */
6950
6951 DEBUG_FUNCTION void
6952 debug_sa_points_to_info (void)
6953 {
6954 dump_sa_points_to_info (stderr);
6955 }
6956
6957
6958 /* Initialize the always-existing constraint variables for NULL
6959 ANYTHING, READONLY, and INTEGER */
6960
6961 static void
6962 init_base_vars (void)
6963 {
6964 struct constraint_expr lhs, rhs;
6965 varinfo_t var_anything;
6966 varinfo_t var_nothing;
6967 varinfo_t var_string;
6968 varinfo_t var_escaped;
6969 varinfo_t var_nonlocal;
6970 varinfo_t var_storedanything;
6971 varinfo_t var_integer;
6972
6973 /* Variable ID zero is reserved and should be NULL. */
6974 varmap.safe_push (NULL);
6975
6976 /* Create the NULL variable, used to represent that a variable points
6977 to NULL. */
6978 var_nothing = new_var_info (NULL_TREE, "NULL", false);
6979 gcc_assert (var_nothing->id == nothing_id);
6980 var_nothing->is_artificial_var = 1;
6981 var_nothing->offset = 0;
6982 var_nothing->size = ~0;
6983 var_nothing->fullsize = ~0;
6984 var_nothing->is_special_var = 1;
6985 var_nothing->may_have_pointers = 0;
6986 var_nothing->is_global_var = 0;
6987
6988 /* Create the ANYTHING variable, used to represent that a variable
6989 points to some unknown piece of memory. */
6990 var_anything = new_var_info (NULL_TREE, "ANYTHING", false);
6991 gcc_assert (var_anything->id == anything_id);
6992 var_anything->is_artificial_var = 1;
6993 var_anything->size = ~0;
6994 var_anything->offset = 0;
6995 var_anything->fullsize = ~0;
6996 var_anything->is_special_var = 1;
6997
6998 /* Anything points to anything. This makes deref constraints just
6999 work in the presence of linked list and other p = *p type loops,
7000 by saying that *ANYTHING = ANYTHING. */
7001 lhs.type = SCALAR;
7002 lhs.var = anything_id;
7003 lhs.offset = 0;
7004 rhs.type = ADDRESSOF;
7005 rhs.var = anything_id;
7006 rhs.offset = 0;
7007
7008 /* This specifically does not use process_constraint because
7009 process_constraint ignores all anything = anything constraints, since all
7010 but this one are redundant. */
7011 constraints.safe_push (new_constraint (lhs, rhs));
7012
7013 /* Create the STRING variable, used to represent that a variable
7014 points to a string literal. String literals don't contain
7015 pointers so STRING doesn't point to anything. */
7016 var_string = new_var_info (NULL_TREE, "STRING", false);
7017 gcc_assert (var_string->id == string_id);
7018 var_string->is_artificial_var = 1;
7019 var_string->offset = 0;
7020 var_string->size = ~0;
7021 var_string->fullsize = ~0;
7022 var_string->is_special_var = 1;
7023 var_string->may_have_pointers = 0;
7024
7025 /* Create the ESCAPED variable, used to represent the set of escaped
7026 memory. */
7027 var_escaped = new_var_info (NULL_TREE, "ESCAPED", false);
7028 gcc_assert (var_escaped->id == escaped_id);
7029 var_escaped->is_artificial_var = 1;
7030 var_escaped->offset = 0;
7031 var_escaped->size = ~0;
7032 var_escaped->fullsize = ~0;
7033 var_escaped->is_special_var = 0;
7034
7035 /* Create the NONLOCAL variable, used to represent the set of nonlocal
7036 memory. */
7037 var_nonlocal = new_var_info (NULL_TREE, "NONLOCAL", false);
7038 gcc_assert (var_nonlocal->id == nonlocal_id);
7039 var_nonlocal->is_artificial_var = 1;
7040 var_nonlocal->offset = 0;
7041 var_nonlocal->size = ~0;
7042 var_nonlocal->fullsize = ~0;
7043 var_nonlocal->is_special_var = 1;
7044
7045 /* ESCAPED = *ESCAPED, because escaped is may-deref'd at calls, etc. */
7046 lhs.type = SCALAR;
7047 lhs.var = escaped_id;
7048 lhs.offset = 0;
7049 rhs.type = DEREF;
7050 rhs.var = escaped_id;
7051 rhs.offset = 0;
7052 process_constraint (new_constraint (lhs, rhs));
7053
7054 /* ESCAPED = ESCAPED + UNKNOWN_OFFSET, because if a sub-field escapes the
7055 whole variable escapes. */
7056 lhs.type = SCALAR;
7057 lhs.var = escaped_id;
7058 lhs.offset = 0;
7059 rhs.type = SCALAR;
7060 rhs.var = escaped_id;
7061 rhs.offset = UNKNOWN_OFFSET;
7062 process_constraint (new_constraint (lhs, rhs));
7063
7064 /* *ESCAPED = NONLOCAL. This is true because we have to assume
7065 everything pointed to by escaped points to what global memory can
7066 point to. */
7067 lhs.type = DEREF;
7068 lhs.var = escaped_id;
7069 lhs.offset = 0;
7070 rhs.type = SCALAR;
7071 rhs.var = nonlocal_id;
7072 rhs.offset = 0;
7073 process_constraint (new_constraint (lhs, rhs));
7074
7075 /* NONLOCAL = &NONLOCAL, NONLOCAL = &ESCAPED. This is true because
7076 global memory may point to global memory and escaped memory. */
7077 lhs.type = SCALAR;
7078 lhs.var = nonlocal_id;
7079 lhs.offset = 0;
7080 rhs.type = ADDRESSOF;
7081 rhs.var = nonlocal_id;
7082 rhs.offset = 0;
7083 process_constraint (new_constraint (lhs, rhs));
7084 rhs.type = ADDRESSOF;
7085 rhs.var = escaped_id;
7086 rhs.offset = 0;
7087 process_constraint (new_constraint (lhs, rhs));
7088
7089 /* Create the STOREDANYTHING variable, used to represent the set of
7090 variables stored to *ANYTHING. */
7091 var_storedanything = new_var_info (NULL_TREE, "STOREDANYTHING", false);
7092 gcc_assert (var_storedanything->id == storedanything_id);
7093 var_storedanything->is_artificial_var = 1;
7094 var_storedanything->offset = 0;
7095 var_storedanything->size = ~0;
7096 var_storedanything->fullsize = ~0;
7097 var_storedanything->is_special_var = 0;
7098
7099 /* Create the INTEGER variable, used to represent that a variable points
7100 to what an INTEGER "points to". */
7101 var_integer = new_var_info (NULL_TREE, "INTEGER", false);
7102 gcc_assert (var_integer->id == integer_id);
7103 var_integer->is_artificial_var = 1;
7104 var_integer->size = ~0;
7105 var_integer->fullsize = ~0;
7106 var_integer->offset = 0;
7107 var_integer->is_special_var = 1;
7108
7109 /* INTEGER = ANYTHING, because we don't know where a dereference of
7110 a random integer will point to. */
7111 lhs.type = SCALAR;
7112 lhs.var = integer_id;
7113 lhs.offset = 0;
7114 rhs.type = ADDRESSOF;
7115 rhs.var = anything_id;
7116 rhs.offset = 0;
7117 process_constraint (new_constraint (lhs, rhs));
7118 }
7119
7120 /* Initialize things necessary to perform PTA */
7121
7122 static void
7123 init_alias_vars (void)
7124 {
7125 use_field_sensitive = (MAX_FIELDS_FOR_FIELD_SENSITIVE > 1);
7126
7127 bitmap_obstack_initialize (&pta_obstack);
7128 bitmap_obstack_initialize (&oldpta_obstack);
7129 bitmap_obstack_initialize (&predbitmap_obstack);
7130
7131 constraints.create (8);
7132 varmap.create (8);
7133 vi_for_tree = new hash_map<tree, varinfo_t>;
7134 call_stmt_vars = new hash_map<gimple *, varinfo_t>;
7135
7136 memset (&stats, 0, sizeof (stats));
7137 shared_bitmap_table = new hash_table<shared_bitmap_hasher> (511);
7138 init_base_vars ();
7139
7140 gcc_obstack_init (&fake_var_decl_obstack);
7141
7142 final_solutions = new hash_map<varinfo_t, pt_solution *>;
7143 gcc_obstack_init (&final_solutions_obstack);
7144 }
7145
7146 /* Remove the REF and ADDRESS edges from GRAPH, as well as all the
7147 predecessor edges. */
7148
7149 static void
7150 remove_preds_and_fake_succs (constraint_graph_t graph)
7151 {
7152 unsigned int i;
7153
7154 /* Clear the implicit ref and address nodes from the successor
7155 lists. */
7156 for (i = 1; i < FIRST_REF_NODE; i++)
7157 {
7158 if (graph->succs[i])
7159 bitmap_clear_range (graph->succs[i], FIRST_REF_NODE,
7160 FIRST_REF_NODE * 2);
7161 }
7162
7163 /* Free the successor list for the non-ref nodes. */
7164 for (i = FIRST_REF_NODE + 1; i < graph->size; i++)
7165 {
7166 if (graph->succs[i])
7167 BITMAP_FREE (graph->succs[i]);
7168 }
7169
7170 /* Now reallocate the size of the successor list as, and blow away
7171 the predecessor bitmaps. */
7172 graph->size = varmap.length ();
7173 graph->succs = XRESIZEVEC (bitmap, graph->succs, graph->size);
7174
7175 free (graph->implicit_preds);
7176 graph->implicit_preds = NULL;
7177 free (graph->preds);
7178 graph->preds = NULL;
7179 bitmap_obstack_release (&predbitmap_obstack);
7180 }
7181
7182 /* Solve the constraint set. */
7183
7184 static void
7185 solve_constraints (void)
7186 {
7187 struct scc_info *si;
7188
7189 /* Sort varinfos so that ones that cannot be pointed to are last.
7190 This makes bitmaps more efficient. */
7191 unsigned int *map = XNEWVEC (unsigned int, varmap.length ());
7192 for (unsigned i = 0; i < integer_id + 1; ++i)
7193 map[i] = i;
7194 /* Start with non-register vars (as possibly address-taken), followed
7195 by register vars as conservative set of vars never appearing in
7196 the points-to solution bitmaps. */
7197 unsigned j = integer_id + 1;
7198 for (unsigned i = integer_id + 1; i < varmap.length (); ++i)
7199 if (! varmap[i]->is_reg_var)
7200 map[i] = j++;
7201 for (unsigned i = integer_id + 1; i < varmap.length (); ++i)
7202 if (varmap[i]->is_reg_var)
7203 map[i] = j++;
7204 /* Shuffle varmap according to map. */
7205 for (unsigned i = integer_id + 1; i < varmap.length (); ++i)
7206 {
7207 while (map[varmap[i]->id] != i)
7208 std::swap (varmap[i], varmap[map[varmap[i]->id]]);
7209 gcc_assert (bitmap_empty_p (varmap[i]->solution));
7210 varmap[i]->id = i;
7211 varmap[i]->next = map[varmap[i]->next];
7212 varmap[i]->head = map[varmap[i]->head];
7213 }
7214 /* Finally rewrite constraints. */
7215 for (unsigned i = 0; i < constraints.length (); ++i)
7216 {
7217 constraints[i]->lhs.var = map[constraints[i]->lhs.var];
7218 constraints[i]->rhs.var = map[constraints[i]->rhs.var];
7219 }
7220 free (map);
7221
7222 if (dump_file)
7223 fprintf (dump_file,
7224 "\nCollapsing static cycles and doing variable "
7225 "substitution\n");
7226
7227 init_graph (varmap.length () * 2);
7228
7229 if (dump_file)
7230 fprintf (dump_file, "Building predecessor graph\n");
7231 build_pred_graph ();
7232
7233 if (dump_file)
7234 fprintf (dump_file, "Detecting pointer and location "
7235 "equivalences\n");
7236 si = perform_var_substitution (graph);
7237
7238 if (dump_file)
7239 fprintf (dump_file, "Rewriting constraints and unifying "
7240 "variables\n");
7241 rewrite_constraints (graph, si);
7242
7243 build_succ_graph ();
7244
7245 free_var_substitution_info (si);
7246
7247 /* Attach complex constraints to graph nodes. */
7248 move_complex_constraints (graph);
7249
7250 if (dump_file)
7251 fprintf (dump_file, "Uniting pointer but not location equivalent "
7252 "variables\n");
7253 unite_pointer_equivalences (graph);
7254
7255 if (dump_file)
7256 fprintf (dump_file, "Finding indirect cycles\n");
7257 find_indirect_cycles (graph);
7258
7259 /* Implicit nodes and predecessors are no longer necessary at this
7260 point. */
7261 remove_preds_and_fake_succs (graph);
7262
7263 if (dump_file && (dump_flags & TDF_GRAPH))
7264 {
7265 fprintf (dump_file, "\n\n// The constraint graph before solve-graph "
7266 "in dot format:\n");
7267 dump_constraint_graph (dump_file);
7268 fprintf (dump_file, "\n\n");
7269 }
7270
7271 if (dump_file)
7272 fprintf (dump_file, "Solving graph\n");
7273
7274 solve_graph (graph);
7275
7276 if (dump_file && (dump_flags & TDF_GRAPH))
7277 {
7278 fprintf (dump_file, "\n\n// The constraint graph after solve-graph "
7279 "in dot format:\n");
7280 dump_constraint_graph (dump_file);
7281 fprintf (dump_file, "\n\n");
7282 }
7283 }
7284
7285 /* Create points-to sets for the current function. See the comments
7286 at the start of the file for an algorithmic overview. */
7287
7288 static void
7289 compute_points_to_sets (void)
7290 {
7291 basic_block bb;
7292 varinfo_t vi;
7293
7294 timevar_push (TV_TREE_PTA);
7295
7296 init_alias_vars ();
7297
7298 intra_create_variable_infos (cfun);
7299
7300 /* Now walk all statements and build the constraint set. */
7301 FOR_EACH_BB_FN (bb, cfun)
7302 {
7303 for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
7304 gsi_next (&gsi))
7305 {
7306 gphi *phi = gsi.phi ();
7307
7308 if (! virtual_operand_p (gimple_phi_result (phi)))
7309 find_func_aliases (cfun, phi);
7310 }
7311
7312 for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);
7313 gsi_next (&gsi))
7314 {
7315 gimple *stmt = gsi_stmt (gsi);
7316
7317 find_func_aliases (cfun, stmt);
7318 }
7319 }
7320
7321 if (dump_file)
7322 {
7323 fprintf (dump_file, "Points-to analysis\n\nConstraints:\n\n");
7324 dump_constraints (dump_file, 0);
7325 }
7326
7327 /* From the constraints compute the points-to sets. */
7328 solve_constraints ();
7329
7330 /* Post-process solutions for escapes through returns. */
7331 edge_iterator ei;
7332 edge e;
7333 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
7334 if (greturn *ret = safe_dyn_cast <greturn *> (last_stmt (e->src)))
7335 {
7336 tree val = gimple_return_retval (ret);
7337 /* ??? Easy to handle simple indirections with some work.
7338 Arbitrary references like foo.bar.baz are more difficult
7339 (but conservatively easy enough with just looking at the base).
7340 Mind to fixup find_func_aliases as well. */
7341 if (!val || !SSA_VAR_P (val))
7342 continue;
7343 /* returns happen last in non-IPA so they only influence
7344 the ESCAPED solution and we can filter local variables. */
7345 varinfo_t escaped_vi = get_varinfo (find (escaped_id));
7346 varinfo_t vi = lookup_vi_for_tree (val);
7347 bitmap delta = BITMAP_ALLOC (&pta_obstack);
7348 bitmap_iterator bi;
7349 unsigned i;
7350 for (; vi; vi = vi_next (vi))
7351 {
7352 varinfo_t part_vi = get_varinfo (find (vi->id));
7353 EXECUTE_IF_AND_COMPL_IN_BITMAP (part_vi->solution,
7354 escaped_vi->solution, 0, i, bi)
7355 {
7356 varinfo_t pointed_to_vi = get_varinfo (i);
7357 if (pointed_to_vi->is_global_var
7358 /* We delay marking of heap memory as global. */
7359 || pointed_to_vi->is_heap_var)
7360 bitmap_set_bit (delta, i);
7361 }
7362 }
7363
7364 /* Now compute the transitive closure. */
7365 bitmap_ior_into (escaped_vi->solution, delta);
7366 bitmap new_delta = BITMAP_ALLOC (&pta_obstack);
7367 while (!bitmap_empty_p (delta))
7368 {
7369 EXECUTE_IF_SET_IN_BITMAP (delta, 0, i, bi)
7370 {
7371 varinfo_t pointed_to_vi = get_varinfo (i);
7372 pointed_to_vi = get_varinfo (find (pointed_to_vi->id));
7373 unsigned j;
7374 bitmap_iterator bi2;
7375 EXECUTE_IF_AND_COMPL_IN_BITMAP (pointed_to_vi->solution,
7376 escaped_vi->solution,
7377 0, j, bi2)
7378 {
7379 varinfo_t pointed_to_vi2 = get_varinfo (j);
7380 if (pointed_to_vi2->is_global_var
7381 /* We delay marking of heap memory as global. */
7382 || pointed_to_vi2->is_heap_var)
7383 bitmap_set_bit (new_delta, j);
7384 }
7385 }
7386 bitmap_ior_into (escaped_vi->solution, new_delta);
7387 bitmap_clear (delta);
7388 std::swap (delta, new_delta);
7389 }
7390 BITMAP_FREE (delta);
7391 BITMAP_FREE (new_delta);
7392 }
7393
7394 if (dump_file)
7395 dump_sa_points_to_info (dump_file);
7396
7397 /* Compute the points-to set for ESCAPED used for call-clobber analysis. */
7398 cfun->gimple_df->escaped = find_what_var_points_to (cfun->decl,
7399 get_varinfo (escaped_id));
7400
7401 /* Make sure the ESCAPED solution (which is used as placeholder in
7402 other solutions) does not reference itself. This simplifies
7403 points-to solution queries. */
7404 cfun->gimple_df->escaped.escaped = 0;
7405
7406 /* Compute the points-to sets for pointer SSA_NAMEs. */
7407 unsigned i;
7408 tree ptr;
7409
7410 FOR_EACH_SSA_NAME (i, ptr, cfun)
7411 {
7412 if (POINTER_TYPE_P (TREE_TYPE (ptr)))
7413 find_what_p_points_to (cfun->decl, ptr);
7414 }
7415
7416 /* Compute the call-used/clobbered sets. */
7417 FOR_EACH_BB_FN (bb, cfun)
7418 {
7419 gimple_stmt_iterator gsi;
7420
7421 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
7422 {
7423 gcall *stmt;
7424 struct pt_solution *pt;
7425
7426 stmt = dyn_cast <gcall *> (gsi_stmt (gsi));
7427 if (!stmt)
7428 continue;
7429
7430 pt = gimple_call_use_set (stmt);
7431 if (gimple_call_flags (stmt) & ECF_CONST)
7432 memset (pt, 0, sizeof (struct pt_solution));
7433 else if ((vi = lookup_call_use_vi (stmt)) != NULL)
7434 {
7435 *pt = find_what_var_points_to (cfun->decl, vi);
7436 /* Escaped (and thus nonlocal) variables are always
7437 implicitly used by calls. */
7438 /* ??? ESCAPED can be empty even though NONLOCAL
7439 always escaped. */
7440 pt->nonlocal = 1;
7441 pt->escaped = 1;
7442 }
7443 else
7444 {
7445 /* If there is nothing special about this call then
7446 we have made everything that is used also escape. */
7447 *pt = cfun->gimple_df->escaped;
7448 pt->nonlocal = 1;
7449 }
7450
7451 pt = gimple_call_clobber_set (stmt);
7452 if (gimple_call_flags (stmt) & (ECF_CONST|ECF_PURE|ECF_NOVOPS))
7453 memset (pt, 0, sizeof (struct pt_solution));
7454 else if ((vi = lookup_call_clobber_vi (stmt)) != NULL)
7455 {
7456 *pt = find_what_var_points_to (cfun->decl, vi);
7457 /* Escaped (and thus nonlocal) variables are always
7458 implicitly clobbered by calls. */
7459 /* ??? ESCAPED can be empty even though NONLOCAL
7460 always escaped. */
7461 pt->nonlocal = 1;
7462 pt->escaped = 1;
7463 }
7464 else
7465 {
7466 /* If there is nothing special about this call then
7467 we have made everything that is used also escape. */
7468 *pt = cfun->gimple_df->escaped;
7469 pt->nonlocal = 1;
7470 }
7471 }
7472 }
7473
7474 timevar_pop (TV_TREE_PTA);
7475 }
7476
7477
7478 /* Delete created points-to sets. */
7479
7480 static void
7481 delete_points_to_sets (void)
7482 {
7483 unsigned int i;
7484
7485 delete shared_bitmap_table;
7486 shared_bitmap_table = NULL;
7487 if (dump_file && (dump_flags & TDF_STATS))
7488 fprintf (dump_file, "Points to sets created:%d\n",
7489 stats.points_to_sets_created);
7490
7491 delete vi_for_tree;
7492 delete call_stmt_vars;
7493 bitmap_obstack_release (&pta_obstack);
7494 constraints.release ();
7495
7496 for (i = 0; i < graph->size; i++)
7497 graph->complex[i].release ();
7498 free (graph->complex);
7499
7500 free (graph->rep);
7501 free (graph->succs);
7502 free (graph->pe);
7503 free (graph->pe_rep);
7504 free (graph->indirect_cycles);
7505 free (graph);
7506
7507 varmap.release ();
7508 variable_info_pool.release ();
7509 constraint_pool.release ();
7510
7511 obstack_free (&fake_var_decl_obstack, NULL);
7512
7513 delete final_solutions;
7514 obstack_free (&final_solutions_obstack, NULL);
7515 }
7516
7517 struct vls_data
7518 {
7519 unsigned short clique;
7520 bool escaped_p;
7521 bitmap rvars;
7522 };
7523
7524 /* Mark "other" loads and stores as belonging to CLIQUE and with
7525 base zero. */
7526
7527 static bool
7528 visit_loadstore (gimple *, tree base, tree ref, void *data)
7529 {
7530 unsigned short clique = ((vls_data *) data)->clique;
7531 bitmap rvars = ((vls_data *) data)->rvars;
7532 bool escaped_p = ((vls_data *) data)->escaped_p;
7533 if (TREE_CODE (base) == MEM_REF
7534 || TREE_CODE (base) == TARGET_MEM_REF)
7535 {
7536 tree ptr = TREE_OPERAND (base, 0);
7537 if (TREE_CODE (ptr) == SSA_NAME)
7538 {
7539 /* For parameters, get at the points-to set for the actual parm
7540 decl. */
7541 if (SSA_NAME_IS_DEFAULT_DEF (ptr)
7542 && (TREE_CODE (SSA_NAME_VAR (ptr)) == PARM_DECL
7543 || TREE_CODE (SSA_NAME_VAR (ptr)) == RESULT_DECL))
7544 ptr = SSA_NAME_VAR (ptr);
7545
7546 /* We need to make sure 'ptr' doesn't include any of
7547 the restrict tags we added bases for in its points-to set. */
7548 varinfo_t vi = lookup_vi_for_tree (ptr);
7549 if (! vi)
7550 return false;
7551
7552 vi = get_varinfo (find (vi->id));
7553 if (bitmap_intersect_p (rvars, vi->solution)
7554 || (escaped_p && bitmap_bit_p (vi->solution, escaped_id)))
7555 return false;
7556 }
7557
7558 /* Do not overwrite existing cliques (that includes clique, base
7559 pairs we just set). */
7560 if (MR_DEPENDENCE_CLIQUE (base) == 0)
7561 {
7562 MR_DEPENDENCE_CLIQUE (base) = clique;
7563 MR_DEPENDENCE_BASE (base) = 0;
7564 }
7565 }
7566
7567 /* For plain decl accesses see whether they are accesses to globals
7568 and rewrite them to MEM_REFs with { clique, 0 }. */
7569 if (VAR_P (base)
7570 && is_global_var (base)
7571 /* ??? We can't rewrite a plain decl with the walk_stmt_load_store
7572 ops callback. */
7573 && base != ref)
7574 {
7575 tree *basep = &ref;
7576 while (handled_component_p (*basep))
7577 basep = &TREE_OPERAND (*basep, 0);
7578 gcc_assert (VAR_P (*basep));
7579 tree ptr = build_fold_addr_expr (*basep);
7580 tree zero = build_int_cst (TREE_TYPE (ptr), 0);
7581 *basep = build2 (MEM_REF, TREE_TYPE (*basep), ptr, zero);
7582 MR_DEPENDENCE_CLIQUE (*basep) = clique;
7583 MR_DEPENDENCE_BASE (*basep) = 0;
7584 }
7585
7586 return false;
7587 }
7588
7589 struct msdi_data {
7590 tree ptr;
7591 unsigned short *clique;
7592 unsigned short *last_ruid;
7593 varinfo_t restrict_var;
7594 };
7595
7596 /* If BASE is a MEM_REF then assign a clique, base pair to it, updating
7597 CLIQUE, *RESTRICT_VAR and LAST_RUID as passed via DATA.
7598 Return whether dependence info was assigned to BASE. */
7599
7600 static bool
7601 maybe_set_dependence_info (gimple *, tree base, tree, void *data)
7602 {
7603 tree ptr = ((msdi_data *)data)->ptr;
7604 unsigned short &clique = *((msdi_data *)data)->clique;
7605 unsigned short &last_ruid = *((msdi_data *)data)->last_ruid;
7606 varinfo_t restrict_var = ((msdi_data *)data)->restrict_var;
7607 if ((TREE_CODE (base) == MEM_REF
7608 || TREE_CODE (base) == TARGET_MEM_REF)
7609 && TREE_OPERAND (base, 0) == ptr)
7610 {
7611 /* Do not overwrite existing cliques. This avoids overwriting dependence
7612 info inlined from a function with restrict parameters inlined
7613 into a function with restrict parameters. This usually means we
7614 prefer to be precise in innermost loops. */
7615 if (MR_DEPENDENCE_CLIQUE (base) == 0)
7616 {
7617 if (clique == 0)
7618 {
7619 if (cfun->last_clique == 0)
7620 cfun->last_clique = 1;
7621 clique = 1;
7622 }
7623 if (restrict_var->ruid == 0)
7624 restrict_var->ruid = ++last_ruid;
7625 MR_DEPENDENCE_CLIQUE (base) = clique;
7626 MR_DEPENDENCE_BASE (base) = restrict_var->ruid;
7627 return true;
7628 }
7629 }
7630 return false;
7631 }
7632
7633 /* Clear dependence info for the clique DATA. */
7634
7635 static bool
7636 clear_dependence_clique (gimple *, tree base, tree, void *data)
7637 {
7638 unsigned short clique = (uintptr_t)data;
7639 if ((TREE_CODE (base) == MEM_REF
7640 || TREE_CODE (base) == TARGET_MEM_REF)
7641 && MR_DEPENDENCE_CLIQUE (base) == clique)
7642 {
7643 MR_DEPENDENCE_CLIQUE (base) = 0;
7644 MR_DEPENDENCE_BASE (base) = 0;
7645 }
7646
7647 return false;
7648 }
7649
7650 /* Compute the set of independend memory references based on restrict
7651 tags and their conservative propagation to the points-to sets. */
7652
7653 static void
7654 compute_dependence_clique (void)
7655 {
7656 /* First clear the special "local" clique. */
7657 basic_block bb;
7658 if (cfun->last_clique != 0)
7659 FOR_EACH_BB_FN (bb, cfun)
7660 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
7661 !gsi_end_p (gsi); gsi_next (&gsi))
7662 {
7663 gimple *stmt = gsi_stmt (gsi);
7664 walk_stmt_load_store_ops (stmt, (void *)(uintptr_t) 1,
7665 clear_dependence_clique,
7666 clear_dependence_clique);
7667 }
7668
7669 unsigned short clique = 0;
7670 unsigned short last_ruid = 0;
7671 bitmap rvars = BITMAP_ALLOC (NULL);
7672 bool escaped_p = false;
7673 for (unsigned i = 0; i < num_ssa_names; ++i)
7674 {
7675 tree ptr = ssa_name (i);
7676 if (!ptr || !POINTER_TYPE_P (TREE_TYPE (ptr)))
7677 continue;
7678
7679 /* Avoid all this when ptr is not dereferenced? */
7680 tree p = ptr;
7681 if (SSA_NAME_IS_DEFAULT_DEF (ptr)
7682 && (TREE_CODE (SSA_NAME_VAR (ptr)) == PARM_DECL
7683 || TREE_CODE (SSA_NAME_VAR (ptr)) == RESULT_DECL))
7684 p = SSA_NAME_VAR (ptr);
7685 varinfo_t vi = lookup_vi_for_tree (p);
7686 if (!vi)
7687 continue;
7688 vi = get_varinfo (find (vi->id));
7689 bitmap_iterator bi;
7690 unsigned j;
7691 varinfo_t restrict_var = NULL;
7692 EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, j, bi)
7693 {
7694 varinfo_t oi = get_varinfo (j);
7695 if (oi->head != j)
7696 oi = get_varinfo (oi->head);
7697 if (oi->is_restrict_var)
7698 {
7699 if (restrict_var
7700 && restrict_var != oi)
7701 {
7702 if (dump_file && (dump_flags & TDF_DETAILS))
7703 {
7704 fprintf (dump_file, "found restrict pointed-to "
7705 "for ");
7706 print_generic_expr (dump_file, ptr);
7707 fprintf (dump_file, " but not exclusively\n");
7708 }
7709 restrict_var = NULL;
7710 break;
7711 }
7712 restrict_var = oi;
7713 }
7714 /* NULL is the only other valid points-to entry. */
7715 else if (oi->id != nothing_id)
7716 {
7717 restrict_var = NULL;
7718 break;
7719 }
7720 }
7721 /* Ok, found that ptr must(!) point to a single(!) restrict
7722 variable. */
7723 /* ??? PTA isn't really a proper propagation engine to compute
7724 this property.
7725 ??? We could handle merging of two restricts by unifying them. */
7726 if (restrict_var)
7727 {
7728 /* Now look at possible dereferences of ptr. */
7729 imm_use_iterator ui;
7730 gimple *use_stmt;
7731 bool used = false;
7732 msdi_data data = { ptr, &clique, &last_ruid, restrict_var };
7733 FOR_EACH_IMM_USE_STMT (use_stmt, ui, ptr)
7734 used |= walk_stmt_load_store_ops (use_stmt, &data,
7735 maybe_set_dependence_info,
7736 maybe_set_dependence_info);
7737 if (used)
7738 {
7739 /* Add all subvars to the set of restrict pointed-to set. */
7740 for (unsigned sv = restrict_var->head; sv != 0;
7741 sv = get_varinfo (sv)->next)
7742 bitmap_set_bit (rvars, sv);
7743 varinfo_t escaped = get_varinfo (find (escaped_id));
7744 if (bitmap_bit_p (escaped->solution, restrict_var->id))
7745 escaped_p = true;
7746 }
7747 }
7748 }
7749
7750 if (clique != 0)
7751 {
7752 /* Assign the BASE id zero to all accesses not based on a restrict
7753 pointer. That way they get disambiguated against restrict
7754 accesses but not against each other. */
7755 /* ??? For restricts derived from globals (thus not incoming
7756 parameters) we can't restrict scoping properly thus the following
7757 is too aggressive there. For now we have excluded those globals from
7758 getting into the MR_DEPENDENCE machinery. */
7759 vls_data data = { clique, escaped_p, rvars };
7760 basic_block bb;
7761 FOR_EACH_BB_FN (bb, cfun)
7762 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
7763 !gsi_end_p (gsi); gsi_next (&gsi))
7764 {
7765 gimple *stmt = gsi_stmt (gsi);
7766 walk_stmt_load_store_ops (stmt, &data,
7767 visit_loadstore, visit_loadstore);
7768 }
7769 }
7770
7771 BITMAP_FREE (rvars);
7772 }
7773
7774 /* Compute points-to information for every SSA_NAME pointer in the
7775 current function and compute the transitive closure of escaped
7776 variables to re-initialize the call-clobber states of local variables. */
7777
7778 unsigned int
7779 compute_may_aliases (void)
7780 {
7781 if (cfun->gimple_df->ipa_pta)
7782 {
7783 if (dump_file)
7784 {
7785 fprintf (dump_file, "\nNot re-computing points-to information "
7786 "because IPA points-to information is available.\n\n");
7787
7788 /* But still dump what we have remaining it. */
7789 dump_alias_info (dump_file);
7790 }
7791
7792 return 0;
7793 }
7794
7795 /* For each pointer P_i, determine the sets of variables that P_i may
7796 point-to. Compute the reachability set of escaped and call-used
7797 variables. */
7798 compute_points_to_sets ();
7799
7800 /* Debugging dumps. */
7801 if (dump_file)
7802 dump_alias_info (dump_file);
7803
7804 /* Compute restrict-based memory disambiguations. */
7805 compute_dependence_clique ();
7806
7807 /* Deallocate memory used by aliasing data structures and the internal
7808 points-to solution. */
7809 delete_points_to_sets ();
7810
7811 gcc_assert (!need_ssa_update_p (cfun));
7812
7813 return 0;
7814 }
7815
7816 /* A dummy pass to cause points-to information to be computed via
7817 TODO_rebuild_alias. */
7818
7819 namespace {
7820
7821 const pass_data pass_data_build_alias =
7822 {
7823 GIMPLE_PASS, /* type */
7824 "alias", /* name */
7825 OPTGROUP_NONE, /* optinfo_flags */
7826 TV_NONE, /* tv_id */
7827 ( PROP_cfg | PROP_ssa ), /* properties_required */
7828 0, /* properties_provided */
7829 0, /* properties_destroyed */
7830 0, /* todo_flags_start */
7831 TODO_rebuild_alias, /* todo_flags_finish */
7832 };
7833
7834 class pass_build_alias : public gimple_opt_pass
7835 {
7836 public:
7837 pass_build_alias (gcc::context *ctxt)
7838 : gimple_opt_pass (pass_data_build_alias, ctxt)
7839 {}
7840
7841 /* opt_pass methods: */
7842 virtual bool gate (function *) { return flag_tree_pta; }
7843
7844 }; // class pass_build_alias
7845
7846 } // anon namespace
7847
7848 gimple_opt_pass *
7849 make_pass_build_alias (gcc::context *ctxt)
7850 {
7851 return new pass_build_alias (ctxt);
7852 }
7853
7854 /* A dummy pass to cause points-to information to be computed via
7855 TODO_rebuild_alias. */
7856
7857 namespace {
7858
7859 const pass_data pass_data_build_ealias =
7860 {
7861 GIMPLE_PASS, /* type */
7862 "ealias", /* name */
7863 OPTGROUP_NONE, /* optinfo_flags */
7864 TV_NONE, /* tv_id */
7865 ( PROP_cfg | PROP_ssa ), /* properties_required */
7866 0, /* properties_provided */
7867 0, /* properties_destroyed */
7868 0, /* todo_flags_start */
7869 TODO_rebuild_alias, /* todo_flags_finish */
7870 };
7871
7872 class pass_build_ealias : public gimple_opt_pass
7873 {
7874 public:
7875 pass_build_ealias (gcc::context *ctxt)
7876 : gimple_opt_pass (pass_data_build_ealias, ctxt)
7877 {}
7878
7879 /* opt_pass methods: */
7880 virtual bool gate (function *) { return flag_tree_pta; }
7881
7882 }; // class pass_build_ealias
7883
7884 } // anon namespace
7885
7886 gimple_opt_pass *
7887 make_pass_build_ealias (gcc::context *ctxt)
7888 {
7889 return new pass_build_ealias (ctxt);
7890 }
7891
7892
7893 /* IPA PTA solutions for ESCAPED. */
7894 struct pt_solution ipa_escaped_pt
7895 = { true, false, false, false, false,
7896 false, false, false, false, false, NULL };
7897
7898 /* Associate node with varinfo DATA. Worker for
7899 cgraph_for_symbol_thunks_and_aliases. */
7900 static bool
7901 associate_varinfo_to_alias (struct cgraph_node *node, void *data)
7902 {
7903 if ((node->alias
7904 || (node->thunk.thunk_p
7905 && ! node->global.inlined_to))
7906 && node->analyzed
7907 && !node->ifunc_resolver)
7908 insert_vi_for_tree (node->decl, (varinfo_t)data);
7909 return false;
7910 }
7911
7912 /* Dump varinfo VI to FILE. */
7913
7914 static void
7915 dump_varinfo (FILE *file, varinfo_t vi)
7916 {
7917 if (vi == NULL)
7918 return;
7919
7920 fprintf (file, "%u: %s\n", vi->id, vi->name);
7921
7922 const char *sep = " ";
7923 if (vi->is_artificial_var)
7924 fprintf (file, "%sartificial", sep);
7925 if (vi->is_special_var)
7926 fprintf (file, "%sspecial", sep);
7927 if (vi->is_unknown_size_var)
7928 fprintf (file, "%sunknown-size", sep);
7929 if (vi->is_full_var)
7930 fprintf (file, "%sfull", sep);
7931 if (vi->is_heap_var)
7932 fprintf (file, "%sheap", sep);
7933 if (vi->may_have_pointers)
7934 fprintf (file, "%smay-have-pointers", sep);
7935 if (vi->only_restrict_pointers)
7936 fprintf (file, "%sonly-restrict-pointers", sep);
7937 if (vi->is_restrict_var)
7938 fprintf (file, "%sis-restrict-var", sep);
7939 if (vi->is_global_var)
7940 fprintf (file, "%sglobal", sep);
7941 if (vi->is_ipa_escape_point)
7942 fprintf (file, "%sipa-escape-point", sep);
7943 if (vi->is_fn_info)
7944 fprintf (file, "%sfn-info", sep);
7945 if (vi->ruid)
7946 fprintf (file, "%srestrict-uid:%u", sep, vi->ruid);
7947 if (vi->next)
7948 fprintf (file, "%snext:%u", sep, vi->next);
7949 if (vi->head != vi->id)
7950 fprintf (file, "%shead:%u", sep, vi->head);
7951 if (vi->offset)
7952 fprintf (file, "%soffset:" HOST_WIDE_INT_PRINT_DEC, sep, vi->offset);
7953 if (vi->size != ~(unsigned HOST_WIDE_INT)0)
7954 fprintf (file, "%ssize:" HOST_WIDE_INT_PRINT_DEC, sep, vi->size);
7955 if (vi->fullsize != ~(unsigned HOST_WIDE_INT)0
7956 && vi->fullsize != vi->size)
7957 fprintf (file, "%sfullsize:" HOST_WIDE_INT_PRINT_DEC, sep,
7958 vi->fullsize);
7959 fprintf (file, "\n");
7960
7961 if (vi->solution && !bitmap_empty_p (vi->solution))
7962 {
7963 bitmap_iterator bi;
7964 unsigned i;
7965 fprintf (file, " solution: {");
7966 EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, i, bi)
7967 fprintf (file, " %u", i);
7968 fprintf (file, " }\n");
7969 }
7970
7971 if (vi->oldsolution && !bitmap_empty_p (vi->oldsolution)
7972 && !bitmap_equal_p (vi->solution, vi->oldsolution))
7973 {
7974 bitmap_iterator bi;
7975 unsigned i;
7976 fprintf (file, " oldsolution: {");
7977 EXECUTE_IF_SET_IN_BITMAP (vi->oldsolution, 0, i, bi)
7978 fprintf (file, " %u", i);
7979 fprintf (file, " }\n");
7980 }
7981 }
7982
7983 /* Dump varinfo VI to stderr. */
7984
7985 DEBUG_FUNCTION void
7986 debug_varinfo (varinfo_t vi)
7987 {
7988 dump_varinfo (stderr, vi);
7989 }
7990
7991 /* Dump varmap to FILE. */
7992
7993 static void
7994 dump_varmap (FILE *file)
7995 {
7996 if (varmap.length () == 0)
7997 return;
7998
7999 fprintf (file, "variables:\n");
8000
8001 for (unsigned int i = 0; i < varmap.length (); ++i)
8002 {
8003 varinfo_t vi = get_varinfo (i);
8004 dump_varinfo (file, vi);
8005 }
8006
8007 fprintf (file, "\n");
8008 }
8009
8010 /* Dump varmap to stderr. */
8011
8012 DEBUG_FUNCTION void
8013 debug_varmap (void)
8014 {
8015 dump_varmap (stderr);
8016 }
8017
8018 /* Compute whether node is refered to non-locally. Worker for
8019 cgraph_for_symbol_thunks_and_aliases. */
8020 static bool
8021 refered_from_nonlocal_fn (struct cgraph_node *node, void *data)
8022 {
8023 bool *nonlocal_p = (bool *)data;
8024 *nonlocal_p |= (node->used_from_other_partition
8025 || node->externally_visible
8026 || node->force_output
8027 || lookup_attribute ("noipa", DECL_ATTRIBUTES (node->decl)));
8028 return false;
8029 }
8030
8031 /* Same for varpool nodes. */
8032 static bool
8033 refered_from_nonlocal_var (struct varpool_node *node, void *data)
8034 {
8035 bool *nonlocal_p = (bool *)data;
8036 *nonlocal_p |= (node->used_from_other_partition
8037 || node->externally_visible
8038 || node->force_output);
8039 return false;
8040 }
8041
8042 /* Execute the driver for IPA PTA. */
8043 static unsigned int
8044 ipa_pta_execute (void)
8045 {
8046 struct cgraph_node *node;
8047 varpool_node *var;
8048 unsigned int from = 0;
8049
8050 in_ipa_mode = 1;
8051
8052 init_alias_vars ();
8053
8054 if (dump_file && (dump_flags & TDF_DETAILS))
8055 {
8056 symtab->dump (dump_file);
8057 fprintf (dump_file, "\n");
8058 }
8059
8060 if (dump_file)
8061 {
8062 fprintf (dump_file, "Generating generic constraints\n\n");
8063 dump_constraints (dump_file, from);
8064 fprintf (dump_file, "\n");
8065 from = constraints.length ();
8066 }
8067
8068 /* Build the constraints. */
8069 FOR_EACH_DEFINED_FUNCTION (node)
8070 {
8071 varinfo_t vi;
8072 /* Nodes without a body are not interesting. Especially do not
8073 visit clones at this point for now - we get duplicate decls
8074 there for inline clones at least. */
8075 if (!node->has_gimple_body_p () || node->global.inlined_to)
8076 continue;
8077 node->get_body ();
8078
8079 gcc_assert (!node->clone_of);
8080
8081 /* For externally visible or attribute used annotated functions use
8082 local constraints for their arguments.
8083 For local functions we see all callers and thus do not need initial
8084 constraints for parameters. */
8085 bool nonlocal_p = (node->used_from_other_partition
8086 || node->externally_visible
8087 || node->force_output
8088 || lookup_attribute ("noipa",
8089 DECL_ATTRIBUTES (node->decl)));
8090 node->call_for_symbol_thunks_and_aliases (refered_from_nonlocal_fn,
8091 &nonlocal_p, true);
8092
8093 vi = create_function_info_for (node->decl,
8094 alias_get_name (node->decl), false,
8095 nonlocal_p);
8096 if (dump_file
8097 && from != constraints.length ())
8098 {
8099 fprintf (dump_file,
8100 "Generating intial constraints for %s", node->name ());
8101 if (DECL_ASSEMBLER_NAME_SET_P (node->decl))
8102 fprintf (dump_file, " (%s)",
8103 IDENTIFIER_POINTER
8104 (DECL_ASSEMBLER_NAME (node->decl)));
8105 fprintf (dump_file, "\n\n");
8106 dump_constraints (dump_file, from);
8107 fprintf (dump_file, "\n");
8108
8109 from = constraints.length ();
8110 }
8111
8112 node->call_for_symbol_thunks_and_aliases
8113 (associate_varinfo_to_alias, vi, true);
8114 }
8115
8116 /* Create constraints for global variables and their initializers. */
8117 FOR_EACH_VARIABLE (var)
8118 {
8119 if (var->alias && var->analyzed)
8120 continue;
8121
8122 varinfo_t vi = get_vi_for_tree (var->decl);
8123
8124 /* For the purpose of IPA PTA unit-local globals are not
8125 escape points. */
8126 bool nonlocal_p = (var->used_from_other_partition
8127 || var->externally_visible
8128 || var->force_output);
8129 var->call_for_symbol_and_aliases (refered_from_nonlocal_var,
8130 &nonlocal_p, true);
8131 if (nonlocal_p)
8132 vi->is_ipa_escape_point = true;
8133 }
8134
8135 if (dump_file
8136 && from != constraints.length ())
8137 {
8138 fprintf (dump_file,
8139 "Generating constraints for global initializers\n\n");
8140 dump_constraints (dump_file, from);
8141 fprintf (dump_file, "\n");
8142 from = constraints.length ();
8143 }
8144
8145 FOR_EACH_DEFINED_FUNCTION (node)
8146 {
8147 struct function *func;
8148 basic_block bb;
8149
8150 /* Nodes without a body are not interesting. */
8151 if (!node->has_gimple_body_p () || node->clone_of)
8152 continue;
8153
8154 if (dump_file)
8155 {
8156 fprintf (dump_file,
8157 "Generating constraints for %s", node->name ());
8158 if (DECL_ASSEMBLER_NAME_SET_P (node->decl))
8159 fprintf (dump_file, " (%s)",
8160 IDENTIFIER_POINTER
8161 (DECL_ASSEMBLER_NAME (node->decl)));
8162 fprintf (dump_file, "\n");
8163 }
8164
8165 func = DECL_STRUCT_FUNCTION (node->decl);
8166 gcc_assert (cfun == NULL);
8167
8168 /* Build constriants for the function body. */
8169 FOR_EACH_BB_FN (bb, func)
8170 {
8171 for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
8172 gsi_next (&gsi))
8173 {
8174 gphi *phi = gsi.phi ();
8175
8176 if (! virtual_operand_p (gimple_phi_result (phi)))
8177 find_func_aliases (func, phi);
8178 }
8179
8180 for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);
8181 gsi_next (&gsi))
8182 {
8183 gimple *stmt = gsi_stmt (gsi);
8184
8185 find_func_aliases (func, stmt);
8186 find_func_clobbers (func, stmt);
8187 }
8188 }
8189
8190 if (dump_file)
8191 {
8192 fprintf (dump_file, "\n");
8193 dump_constraints (dump_file, from);
8194 fprintf (dump_file, "\n");
8195 from = constraints.length ();
8196 }
8197 }
8198
8199 /* From the constraints compute the points-to sets. */
8200 solve_constraints ();
8201
8202 if (dump_file)
8203 dump_sa_points_to_info (dump_file);
8204
8205 /* Now post-process solutions to handle locals from different
8206 runtime instantiations coming in through recursive invocations. */
8207 unsigned shadow_var_cnt = 0;
8208 for (unsigned i = 1; i < varmap.length (); ++i)
8209 {
8210 varinfo_t fi = get_varinfo (i);
8211 if (fi->is_fn_info
8212 && fi->decl)
8213 /* Automatic variables pointed to by their containing functions
8214 parameters need this treatment. */
8215 for (varinfo_t ai = first_vi_for_offset (fi, fi_parm_base);
8216 ai; ai = vi_next (ai))
8217 {
8218 varinfo_t vi = get_varinfo (find (ai->id));
8219 bitmap_iterator bi;
8220 unsigned j;
8221 EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, j, bi)
8222 {
8223 varinfo_t pt = get_varinfo (j);
8224 if (pt->shadow_var_uid == 0
8225 && pt->decl
8226 && auto_var_in_fn_p (pt->decl, fi->decl))
8227 {
8228 pt->shadow_var_uid = allocate_decl_uid ();
8229 shadow_var_cnt++;
8230 }
8231 }
8232 }
8233 /* As well as global variables which are another way of passing
8234 arguments to recursive invocations. */
8235 else if (fi->is_global_var)
8236 {
8237 for (varinfo_t ai = fi; ai; ai = vi_next (ai))
8238 {
8239 varinfo_t vi = get_varinfo (find (ai->id));
8240 bitmap_iterator bi;
8241 unsigned j;
8242 EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, j, bi)
8243 {
8244 varinfo_t pt = get_varinfo (j);
8245 if (pt->shadow_var_uid == 0
8246 && pt->decl
8247 && auto_var_p (pt->decl))
8248 {
8249 pt->shadow_var_uid = allocate_decl_uid ();
8250 shadow_var_cnt++;
8251 }
8252 }
8253 }
8254 }
8255 }
8256 if (shadow_var_cnt && dump_file && (dump_flags & TDF_DETAILS))
8257 fprintf (dump_file, "Allocated %u shadow variables for locals "
8258 "maybe leaking into recursive invocations of their containing "
8259 "functions\n", shadow_var_cnt);
8260
8261 /* Compute the global points-to sets for ESCAPED.
8262 ??? Note that the computed escape set is not correct
8263 for the whole unit as we fail to consider graph edges to
8264 externally visible functions. */
8265 ipa_escaped_pt = find_what_var_points_to (NULL, get_varinfo (escaped_id));
8266
8267 /* Make sure the ESCAPED solution (which is used as placeholder in
8268 other solutions) does not reference itself. This simplifies
8269 points-to solution queries. */
8270 ipa_escaped_pt.ipa_escaped = 0;
8271
8272 /* Assign the points-to sets to the SSA names in the unit. */
8273 FOR_EACH_DEFINED_FUNCTION (node)
8274 {
8275 tree ptr;
8276 struct function *fn;
8277 unsigned i;
8278 basic_block bb;
8279
8280 /* Nodes without a body are not interesting. */
8281 if (!node->has_gimple_body_p () || node->clone_of)
8282 continue;
8283
8284 fn = DECL_STRUCT_FUNCTION (node->decl);
8285
8286 /* Compute the points-to sets for pointer SSA_NAMEs. */
8287 FOR_EACH_VEC_ELT (*fn->gimple_df->ssa_names, i, ptr)
8288 {
8289 if (ptr
8290 && POINTER_TYPE_P (TREE_TYPE (ptr)))
8291 find_what_p_points_to (node->decl, ptr);
8292 }
8293
8294 /* Compute the call-use and call-clobber sets for indirect calls
8295 and calls to external functions. */
8296 FOR_EACH_BB_FN (bb, fn)
8297 {
8298 gimple_stmt_iterator gsi;
8299
8300 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
8301 {
8302 gcall *stmt;
8303 struct pt_solution *pt;
8304 varinfo_t vi, fi;
8305 tree decl;
8306
8307 stmt = dyn_cast <gcall *> (gsi_stmt (gsi));
8308 if (!stmt)
8309 continue;
8310
8311 /* Handle direct calls to functions with body. */
8312 decl = gimple_call_fndecl (stmt);
8313
8314 {
8315 tree called_decl = NULL_TREE;
8316 if (gimple_call_builtin_p (stmt, BUILT_IN_GOMP_PARALLEL))
8317 called_decl = TREE_OPERAND (gimple_call_arg (stmt, 0), 0);
8318 else if (gimple_call_builtin_p (stmt, BUILT_IN_GOACC_PARALLEL))
8319 called_decl = TREE_OPERAND (gimple_call_arg (stmt, 1), 0);
8320
8321 if (called_decl != NULL_TREE
8322 && !fndecl_maybe_in_other_partition (called_decl))
8323 decl = called_decl;
8324 }
8325
8326 if (decl
8327 && (fi = lookup_vi_for_tree (decl))
8328 && fi->is_fn_info)
8329 {
8330 *gimple_call_clobber_set (stmt)
8331 = find_what_var_points_to
8332 (node->decl, first_vi_for_offset (fi, fi_clobbers));
8333 *gimple_call_use_set (stmt)
8334 = find_what_var_points_to
8335 (node->decl, first_vi_for_offset (fi, fi_uses));
8336 }
8337 /* Handle direct calls to external functions. */
8338 else if (decl && (!fi || fi->decl))
8339 {
8340 pt = gimple_call_use_set (stmt);
8341 if (gimple_call_flags (stmt) & ECF_CONST)
8342 memset (pt, 0, sizeof (struct pt_solution));
8343 else if ((vi = lookup_call_use_vi (stmt)) != NULL)
8344 {
8345 *pt = find_what_var_points_to (node->decl, vi);
8346 /* Escaped (and thus nonlocal) variables are always
8347 implicitly used by calls. */
8348 /* ??? ESCAPED can be empty even though NONLOCAL
8349 always escaped. */
8350 pt->nonlocal = 1;
8351 pt->ipa_escaped = 1;
8352 }
8353 else
8354 {
8355 /* If there is nothing special about this call then
8356 we have made everything that is used also escape. */
8357 *pt = ipa_escaped_pt;
8358 pt->nonlocal = 1;
8359 }
8360
8361 pt = gimple_call_clobber_set (stmt);
8362 if (gimple_call_flags (stmt) & (ECF_CONST|ECF_PURE|ECF_NOVOPS))
8363 memset (pt, 0, sizeof (struct pt_solution));
8364 else if ((vi = lookup_call_clobber_vi (stmt)) != NULL)
8365 {
8366 *pt = find_what_var_points_to (node->decl, vi);
8367 /* Escaped (and thus nonlocal) variables are always
8368 implicitly clobbered by calls. */
8369 /* ??? ESCAPED can be empty even though NONLOCAL
8370 always escaped. */
8371 pt->nonlocal = 1;
8372 pt->ipa_escaped = 1;
8373 }
8374 else
8375 {
8376 /* If there is nothing special about this call then
8377 we have made everything that is used also escape. */
8378 *pt = ipa_escaped_pt;
8379 pt->nonlocal = 1;
8380 }
8381 }
8382 /* Handle indirect calls. */
8383 else if ((fi = get_fi_for_callee (stmt)))
8384 {
8385 /* We need to accumulate all clobbers/uses of all possible
8386 callees. */
8387 fi = get_varinfo (find (fi->id));
8388 /* If we cannot constrain the set of functions we'll end up
8389 calling we end up using/clobbering everything. */
8390 if (bitmap_bit_p (fi->solution, anything_id)
8391 || bitmap_bit_p (fi->solution, nonlocal_id)
8392 || bitmap_bit_p (fi->solution, escaped_id))
8393 {
8394 pt_solution_reset (gimple_call_clobber_set (stmt));
8395 pt_solution_reset (gimple_call_use_set (stmt));
8396 }
8397 else
8398 {
8399 bitmap_iterator bi;
8400 unsigned i;
8401 struct pt_solution *uses, *clobbers;
8402
8403 uses = gimple_call_use_set (stmt);
8404 clobbers = gimple_call_clobber_set (stmt);
8405 memset (uses, 0, sizeof (struct pt_solution));
8406 memset (clobbers, 0, sizeof (struct pt_solution));
8407 EXECUTE_IF_SET_IN_BITMAP (fi->solution, 0, i, bi)
8408 {
8409 struct pt_solution sol;
8410
8411 vi = get_varinfo (i);
8412 if (!vi->is_fn_info)
8413 {
8414 /* ??? We could be more precise here? */
8415 uses->nonlocal = 1;
8416 uses->ipa_escaped = 1;
8417 clobbers->nonlocal = 1;
8418 clobbers->ipa_escaped = 1;
8419 continue;
8420 }
8421
8422 if (!uses->anything)
8423 {
8424 sol = find_what_var_points_to
8425 (node->decl,
8426 first_vi_for_offset (vi, fi_uses));
8427 pt_solution_ior_into (uses, &sol);
8428 }
8429 if (!clobbers->anything)
8430 {
8431 sol = find_what_var_points_to
8432 (node->decl,
8433 first_vi_for_offset (vi, fi_clobbers));
8434 pt_solution_ior_into (clobbers, &sol);
8435 }
8436 }
8437 }
8438 }
8439 else
8440 gcc_unreachable ();
8441 }
8442 }
8443
8444 fn->gimple_df->ipa_pta = true;
8445
8446 /* We have to re-set the final-solution cache after each function
8447 because what is a "global" is dependent on function context. */
8448 final_solutions->empty ();
8449 obstack_free (&final_solutions_obstack, NULL);
8450 gcc_obstack_init (&final_solutions_obstack);
8451 }
8452
8453 delete_points_to_sets ();
8454
8455 in_ipa_mode = 0;
8456
8457 return 0;
8458 }
8459
8460 namespace {
8461
8462 const pass_data pass_data_ipa_pta =
8463 {
8464 SIMPLE_IPA_PASS, /* type */
8465 "pta", /* name */
8466 OPTGROUP_NONE, /* optinfo_flags */
8467 TV_IPA_PTA, /* tv_id */
8468 0, /* properties_required */
8469 0, /* properties_provided */
8470 0, /* properties_destroyed */
8471 0, /* todo_flags_start */
8472 0, /* todo_flags_finish */
8473 };
8474
8475 class pass_ipa_pta : public simple_ipa_opt_pass
8476 {
8477 public:
8478 pass_ipa_pta (gcc::context *ctxt)
8479 : simple_ipa_opt_pass (pass_data_ipa_pta, ctxt)
8480 {}
8481
8482 /* opt_pass methods: */
8483 virtual bool gate (function *)
8484 {
8485 return (optimize
8486 && flag_ipa_pta
8487 /* Don't bother doing anything if the program has errors. */
8488 && !seen_error ());
8489 }
8490
8491 opt_pass * clone () { return new pass_ipa_pta (m_ctxt); }
8492
8493 virtual unsigned int execute (function *) { return ipa_pta_execute (); }
8494
8495 }; // class pass_ipa_pta
8496
8497 } // anon namespace
8498
8499 simple_ipa_opt_pass *
8500 make_pass_ipa_pta (gcc::context *ctxt)
8501 {
8502 return new pass_ipa_pta (ctxt);
8503 }