]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/tree-sra.c
Introduce symtab_node::dump_{asm_,}name functions.
[thirdparty/gcc.git] / gcc / tree-sra.c
1 /* Scalar Replacement of Aggregates (SRA) converts some structure
2 references into scalar references, exposing them to the scalar
3 optimizers.
4 Copyright (C) 2008-2017 Free Software Foundation, Inc.
5 Contributed by Martin Jambor <mjambor@suse.cz>
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23 /* This file implements Scalar Reduction of Aggregates (SRA). SRA is run
24 twice, once in the early stages of compilation (early SRA) and once in the
25 late stages (late SRA). The aim of both is to turn references to scalar
26 parts of aggregates into uses of independent scalar variables.
27
28 The two passes are nearly identical, the only difference is that early SRA
29 does not scalarize unions which are used as the result in a GIMPLE_RETURN
30 statement because together with inlining this can lead to weird type
31 conversions.
32
33 Both passes operate in four stages:
34
35 1. The declarations that have properties which make them candidates for
36 scalarization are identified in function find_var_candidates(). The
37 candidates are stored in candidate_bitmap.
38
39 2. The function body is scanned. In the process, declarations which are
40 used in a manner that prevent their scalarization are removed from the
41 candidate bitmap. More importantly, for every access into an aggregate,
42 an access structure (struct access) is created by create_access() and
43 stored in a vector associated with the aggregate. Among other
44 information, the aggregate declaration, the offset and size of the access
45 and its type are stored in the structure.
46
47 On a related note, assign_link structures are created for every assign
48 statement between candidate aggregates and attached to the related
49 accesses.
50
51 3. The vectors of accesses are analyzed. They are first sorted according to
52 their offset and size and then scanned for partially overlapping accesses
53 (i.e. those which overlap but one is not entirely within another). Such
54 an access disqualifies the whole aggregate from being scalarized.
55
56 If there is no such inhibiting overlap, a representative access structure
57 is chosen for every unique combination of offset and size. Afterwards,
58 the pass builds a set of trees from these structures, in which children
59 of an access are within their parent (in terms of offset and size).
60
61 Then accesses are propagated whenever possible (i.e. in cases when it
62 does not create a partially overlapping access) across assign_links from
63 the right hand side to the left hand side.
64
65 Then the set of trees for each declaration is traversed again and those
66 accesses which should be replaced by a scalar are identified.
67
68 4. The function is traversed again, and for every reference into an
69 aggregate that has some component which is about to be scalarized,
70 statements are amended and new statements are created as necessary.
71 Finally, if a parameter got scalarized, the scalar replacements are
72 initialized with values from respective parameter aggregates. */
73
74 #include "config.h"
75 #include "system.h"
76 #include "coretypes.h"
77 #include "backend.h"
78 #include "target.h"
79 #include "rtl.h"
80 #include "tree.h"
81 #include "gimple.h"
82 #include "predict.h"
83 #include "alloc-pool.h"
84 #include "tree-pass.h"
85 #include "ssa.h"
86 #include "cgraph.h"
87 #include "gimple-pretty-print.h"
88 #include "alias.h"
89 #include "fold-const.h"
90 #include "tree-eh.h"
91 #include "stor-layout.h"
92 #include "gimplify.h"
93 #include "gimple-iterator.h"
94 #include "gimplify-me.h"
95 #include "gimple-walk.h"
96 #include "tree-cfg.h"
97 #include "tree-dfa.h"
98 #include "tree-ssa.h"
99 #include "symbol-summary.h"
100 #include "ipa-prop.h"
101 #include "params.h"
102 #include "dbgcnt.h"
103 #include "tree-inline.h"
104 #include "ipa-fnsummary.h"
105 #include "ipa-utils.h"
106 #include "builtins.h"
107
108 /* Enumeration of all aggregate reductions we can do. */
109 enum sra_mode { SRA_MODE_EARLY_IPA, /* early call regularization */
110 SRA_MODE_EARLY_INTRA, /* early intraprocedural SRA */
111 SRA_MODE_INTRA }; /* late intraprocedural SRA */
112
113 /* Global variable describing which aggregate reduction we are performing at
114 the moment. */
115 static enum sra_mode sra_mode;
116
117 struct assign_link;
118
119 /* ACCESS represents each access to an aggregate variable (as a whole or a
120 part). It can also represent a group of accesses that refer to exactly the
121 same fragment of an aggregate (i.e. those that have exactly the same offset
122 and size). Such representatives for a single aggregate, once determined,
123 are linked in a linked list and have the group fields set.
124
125 Moreover, when doing intraprocedural SRA, a tree is built from those
126 representatives (by the means of first_child and next_sibling pointers), in
127 which all items in a subtree are "within" the root, i.e. their offset is
128 greater or equal to offset of the root and offset+size is smaller or equal
129 to offset+size of the root. Children of an access are sorted by offset.
130
131 Note that accesses to parts of vector and complex number types always
132 represented by an access to the whole complex number or a vector. It is a
133 duty of the modifying functions to replace them appropriately. */
134
135 struct access
136 {
137 /* Values returned by `get_ref_base_and_extent' for each component reference
138 If EXPR isn't a component reference just set `BASE = EXPR', `OFFSET = 0',
139 `SIZE = TREE_SIZE (TREE_TYPE (expr))'. */
140 HOST_WIDE_INT offset;
141 HOST_WIDE_INT size;
142 tree base;
143
144 /* Expression. It is context dependent so do not use it to create new
145 expressions to access the original aggregate. See PR 42154 for a
146 testcase. */
147 tree expr;
148 /* Type. */
149 tree type;
150
151 /* The statement this access belongs to. */
152 gimple *stmt;
153
154 /* Next group representative for this aggregate. */
155 struct access *next_grp;
156
157 /* Pointer to the group representative. Pointer to itself if the struct is
158 the representative. */
159 struct access *group_representative;
160
161 /* After access tree has been constructed, this points to the parent of the
162 current access, if there is one. NULL for roots. */
163 struct access *parent;
164
165 /* If this access has any children (in terms of the definition above), this
166 points to the first one. */
167 struct access *first_child;
168
169 /* In intraprocedural SRA, pointer to the next sibling in the access tree as
170 described above. In IPA-SRA this is a pointer to the next access
171 belonging to the same group (having the same representative). */
172 struct access *next_sibling;
173
174 /* Pointers to the first and last element in the linked list of assign
175 links. */
176 struct assign_link *first_link, *last_link;
177
178 /* Pointer to the next access in the work queue. */
179 struct access *next_queued;
180
181 /* Replacement variable for this access "region." Never to be accessed
182 directly, always only by the means of get_access_replacement() and only
183 when grp_to_be_replaced flag is set. */
184 tree replacement_decl;
185
186 /* Is this access an access to a non-addressable field? */
187 unsigned non_addressable : 1;
188
189 /* Is this access made in reverse storage order? */
190 unsigned reverse : 1;
191
192 /* Is this particular access write access? */
193 unsigned write : 1;
194
195 /* Is this access currently in the work queue? */
196 unsigned grp_queued : 1;
197
198 /* Does this group contain a write access? This flag is propagated down the
199 access tree. */
200 unsigned grp_write : 1;
201
202 /* Does this group contain a read access? This flag is propagated down the
203 access tree. */
204 unsigned grp_read : 1;
205
206 /* Does this group contain a read access that comes from an assignment
207 statement? This flag is propagated down the access tree. */
208 unsigned grp_assignment_read : 1;
209
210 /* Does this group contain a write access that comes from an assignment
211 statement? This flag is propagated down the access tree. */
212 unsigned grp_assignment_write : 1;
213
214 /* Does this group contain a read access through a scalar type? This flag is
215 not propagated in the access tree in any direction. */
216 unsigned grp_scalar_read : 1;
217
218 /* Does this group contain a write access through a scalar type? This flag
219 is not propagated in the access tree in any direction. */
220 unsigned grp_scalar_write : 1;
221
222 /* Is this access an artificial one created to scalarize some record
223 entirely? */
224 unsigned grp_total_scalarization : 1;
225
226 /* Other passes of the analysis use this bit to make function
227 analyze_access_subtree create scalar replacements for this group if
228 possible. */
229 unsigned grp_hint : 1;
230
231 /* Is the subtree rooted in this access fully covered by scalar
232 replacements? */
233 unsigned grp_covered : 1;
234
235 /* If set to true, this access and all below it in an access tree must not be
236 scalarized. */
237 unsigned grp_unscalarizable_region : 1;
238
239 /* Whether data have been written to parts of the aggregate covered by this
240 access which is not to be scalarized. This flag is propagated up in the
241 access tree. */
242 unsigned grp_unscalarized_data : 1;
243
244 /* Does this access and/or group contain a write access through a
245 BIT_FIELD_REF? */
246 unsigned grp_partial_lhs : 1;
247
248 /* Set when a scalar replacement should be created for this variable. */
249 unsigned grp_to_be_replaced : 1;
250
251 /* Set when we want a replacement for the sole purpose of having it in
252 generated debug statements. */
253 unsigned grp_to_be_debug_replaced : 1;
254
255 /* Should TREE_NO_WARNING of a replacement be set? */
256 unsigned grp_no_warning : 1;
257
258 /* Is it possible that the group refers to data which might be (directly or
259 otherwise) modified? */
260 unsigned grp_maybe_modified : 1;
261
262 /* Set when this is a representative of a pointer to scalar (i.e. by
263 reference) parameter which we consider for turning into a plain scalar
264 (i.e. a by value parameter). */
265 unsigned grp_scalar_ptr : 1;
266
267 /* Set when we discover that this pointer is not safe to dereference in the
268 caller. */
269 unsigned grp_not_necessarilly_dereferenced : 1;
270 };
271
272 typedef struct access *access_p;
273
274
275 /* Alloc pool for allocating access structures. */
276 static object_allocator<struct access> access_pool ("SRA accesses");
277
278 /* A structure linking lhs and rhs accesses from an aggregate assignment. They
279 are used to propagate subaccesses from rhs to lhs as long as they don't
280 conflict with what is already there. */
281 struct assign_link
282 {
283 struct access *lacc, *racc;
284 struct assign_link *next;
285 };
286
287 /* Alloc pool for allocating assign link structures. */
288 static object_allocator<assign_link> assign_link_pool ("SRA links");
289
290 /* Base (tree) -> Vector (vec<access_p> *) map. */
291 static hash_map<tree, auto_vec<access_p> > *base_access_vec;
292
293 /* Candidate hash table helpers. */
294
295 struct uid_decl_hasher : nofree_ptr_hash <tree_node>
296 {
297 static inline hashval_t hash (const tree_node *);
298 static inline bool equal (const tree_node *, const tree_node *);
299 };
300
301 /* Hash a tree in a uid_decl_map. */
302
303 inline hashval_t
304 uid_decl_hasher::hash (const tree_node *item)
305 {
306 return item->decl_minimal.uid;
307 }
308
309 /* Return true if the DECL_UID in both trees are equal. */
310
311 inline bool
312 uid_decl_hasher::equal (const tree_node *a, const tree_node *b)
313 {
314 return (a->decl_minimal.uid == b->decl_minimal.uid);
315 }
316
317 /* Set of candidates. */
318 static bitmap candidate_bitmap;
319 static hash_table<uid_decl_hasher> *candidates;
320
321 /* For a candidate UID return the candidates decl. */
322
323 static inline tree
324 candidate (unsigned uid)
325 {
326 tree_node t;
327 t.decl_minimal.uid = uid;
328 return candidates->find_with_hash (&t, static_cast <hashval_t> (uid));
329 }
330
331 /* Bitmap of candidates which we should try to entirely scalarize away and
332 those which cannot be (because they are and need be used as a whole). */
333 static bitmap should_scalarize_away_bitmap, cannot_scalarize_away_bitmap;
334
335 /* Bitmap of candidates in the constant pool, which cannot be scalarized
336 because this would produce non-constant expressions (e.g. Ada). */
337 static bitmap disqualified_constants;
338
339 /* Obstack for creation of fancy names. */
340 static struct obstack name_obstack;
341
342 /* Head of a linked list of accesses that need to have its subaccesses
343 propagated to their assignment counterparts. */
344 static struct access *work_queue_head;
345
346 /* Number of parameters of the analyzed function when doing early ipa SRA. */
347 static int func_param_count;
348
349 /* scan_function sets the following to true if it encounters a call to
350 __builtin_apply_args. */
351 static bool encountered_apply_args;
352
353 /* Set by scan_function when it finds a recursive call. */
354 static bool encountered_recursive_call;
355
356 /* Set by scan_function when it finds a recursive call with less actual
357 arguments than formal parameters.. */
358 static bool encountered_unchangable_recursive_call;
359
360 /* This is a table in which for each basic block and parameter there is a
361 distance (offset + size) in that parameter which is dereferenced and
362 accessed in that BB. */
363 static HOST_WIDE_INT *bb_dereferences;
364 /* Bitmap of BBs that can cause the function to "stop" progressing by
365 returning, throwing externally, looping infinitely or calling a function
366 which might abort etc.. */
367 static bitmap final_bbs;
368
369 /* Representative of no accesses at all. */
370 static struct access no_accesses_representant;
371
372 /* Predicate to test the special value. */
373
374 static inline bool
375 no_accesses_p (struct access *access)
376 {
377 return access == &no_accesses_representant;
378 }
379
380 /* Dump contents of ACCESS to file F in a human friendly way. If GRP is true,
381 representative fields are dumped, otherwise those which only describe the
382 individual access are. */
383
384 static struct
385 {
386 /* Number of processed aggregates is readily available in
387 analyze_all_variable_accesses and so is not stored here. */
388
389 /* Number of created scalar replacements. */
390 int replacements;
391
392 /* Number of times sra_modify_expr or sra_modify_assign themselves changed an
393 expression. */
394 int exprs;
395
396 /* Number of statements created by generate_subtree_copies. */
397 int subtree_copies;
398
399 /* Number of statements created by load_assign_lhs_subreplacements. */
400 int subreplacements;
401
402 /* Number of times sra_modify_assign has deleted a statement. */
403 int deleted;
404
405 /* Number of times sra_modify_assign has to deal with subaccesses of LHS and
406 RHS reparately due to type conversions or nonexistent matching
407 references. */
408 int separate_lhs_rhs_handling;
409
410 /* Number of parameters that were removed because they were unused. */
411 int deleted_unused_parameters;
412
413 /* Number of scalars passed as parameters by reference that have been
414 converted to be passed by value. */
415 int scalar_by_ref_to_by_val;
416
417 /* Number of aggregate parameters that were replaced by one or more of their
418 components. */
419 int aggregate_params_reduced;
420
421 /* Numbber of components created when splitting aggregate parameters. */
422 int param_reductions_created;
423 } sra_stats;
424
425 static void
426 dump_access (FILE *f, struct access *access, bool grp)
427 {
428 fprintf (f, "access { ");
429 fprintf (f, "base = (%d)'", DECL_UID (access->base));
430 print_generic_expr (f, access->base);
431 fprintf (f, "', offset = " HOST_WIDE_INT_PRINT_DEC, access->offset);
432 fprintf (f, ", size = " HOST_WIDE_INT_PRINT_DEC, access->size);
433 fprintf (f, ", expr = ");
434 print_generic_expr (f, access->expr);
435 fprintf (f, ", type = ");
436 print_generic_expr (f, access->type);
437 fprintf (f, ", non_addressable = %d, reverse = %d",
438 access->non_addressable, access->reverse);
439 if (grp)
440 fprintf (f, ", grp_read = %d, grp_write = %d, grp_assignment_read = %d, "
441 "grp_assignment_write = %d, grp_scalar_read = %d, "
442 "grp_scalar_write = %d, grp_total_scalarization = %d, "
443 "grp_hint = %d, grp_covered = %d, "
444 "grp_unscalarizable_region = %d, grp_unscalarized_data = %d, "
445 "grp_partial_lhs = %d, grp_to_be_replaced = %d, "
446 "grp_to_be_debug_replaced = %d, grp_maybe_modified = %d, "
447 "grp_not_necessarilly_dereferenced = %d\n",
448 access->grp_read, access->grp_write, access->grp_assignment_read,
449 access->grp_assignment_write, access->grp_scalar_read,
450 access->grp_scalar_write, access->grp_total_scalarization,
451 access->grp_hint, access->grp_covered,
452 access->grp_unscalarizable_region, access->grp_unscalarized_data,
453 access->grp_partial_lhs, access->grp_to_be_replaced,
454 access->grp_to_be_debug_replaced, access->grp_maybe_modified,
455 access->grp_not_necessarilly_dereferenced);
456 else
457 fprintf (f, ", write = %d, grp_total_scalarization = %d, "
458 "grp_partial_lhs = %d\n",
459 access->write, access->grp_total_scalarization,
460 access->grp_partial_lhs);
461 }
462
463 /* Dump a subtree rooted in ACCESS to file F, indent by LEVEL. */
464
465 static void
466 dump_access_tree_1 (FILE *f, struct access *access, int level)
467 {
468 do
469 {
470 int i;
471
472 for (i = 0; i < level; i++)
473 fputs ("* ", dump_file);
474
475 dump_access (f, access, true);
476
477 if (access->first_child)
478 dump_access_tree_1 (f, access->first_child, level + 1);
479
480 access = access->next_sibling;
481 }
482 while (access);
483 }
484
485 /* Dump all access trees for a variable, given the pointer to the first root in
486 ACCESS. */
487
488 static void
489 dump_access_tree (FILE *f, struct access *access)
490 {
491 for (; access; access = access->next_grp)
492 dump_access_tree_1 (f, access, 0);
493 }
494
495 /* Return true iff ACC is non-NULL and has subaccesses. */
496
497 static inline bool
498 access_has_children_p (struct access *acc)
499 {
500 return acc && acc->first_child;
501 }
502
503 /* Return true iff ACC is (partly) covered by at least one replacement. */
504
505 static bool
506 access_has_replacements_p (struct access *acc)
507 {
508 struct access *child;
509 if (acc->grp_to_be_replaced)
510 return true;
511 for (child = acc->first_child; child; child = child->next_sibling)
512 if (access_has_replacements_p (child))
513 return true;
514 return false;
515 }
516
517 /* Return a vector of pointers to accesses for the variable given in BASE or
518 NULL if there is none. */
519
520 static vec<access_p> *
521 get_base_access_vector (tree base)
522 {
523 return base_access_vec->get (base);
524 }
525
526 /* Find an access with required OFFSET and SIZE in a subtree of accesses rooted
527 in ACCESS. Return NULL if it cannot be found. */
528
529 static struct access *
530 find_access_in_subtree (struct access *access, HOST_WIDE_INT offset,
531 HOST_WIDE_INT size)
532 {
533 while (access && (access->offset != offset || access->size != size))
534 {
535 struct access *child = access->first_child;
536
537 while (child && (child->offset + child->size <= offset))
538 child = child->next_sibling;
539 access = child;
540 }
541
542 return access;
543 }
544
545 /* Return the first group representative for DECL or NULL if none exists. */
546
547 static struct access *
548 get_first_repr_for_decl (tree base)
549 {
550 vec<access_p> *access_vec;
551
552 access_vec = get_base_access_vector (base);
553 if (!access_vec)
554 return NULL;
555
556 return (*access_vec)[0];
557 }
558
559 /* Find an access representative for the variable BASE and given OFFSET and
560 SIZE. Requires that access trees have already been built. Return NULL if
561 it cannot be found. */
562
563 static struct access *
564 get_var_base_offset_size_access (tree base, HOST_WIDE_INT offset,
565 HOST_WIDE_INT size)
566 {
567 struct access *access;
568
569 access = get_first_repr_for_decl (base);
570 while (access && (access->offset + access->size <= offset))
571 access = access->next_grp;
572 if (!access)
573 return NULL;
574
575 return find_access_in_subtree (access, offset, size);
576 }
577
578 /* Add LINK to the linked list of assign links of RACC. */
579 static void
580 add_link_to_rhs (struct access *racc, struct assign_link *link)
581 {
582 gcc_assert (link->racc == racc);
583
584 if (!racc->first_link)
585 {
586 gcc_assert (!racc->last_link);
587 racc->first_link = link;
588 }
589 else
590 racc->last_link->next = link;
591
592 racc->last_link = link;
593 link->next = NULL;
594 }
595
596 /* Move all link structures in their linked list in OLD_RACC to the linked list
597 in NEW_RACC. */
598 static void
599 relink_to_new_repr (struct access *new_racc, struct access *old_racc)
600 {
601 if (!old_racc->first_link)
602 {
603 gcc_assert (!old_racc->last_link);
604 return;
605 }
606
607 if (new_racc->first_link)
608 {
609 gcc_assert (!new_racc->last_link->next);
610 gcc_assert (!old_racc->last_link || !old_racc->last_link->next);
611
612 new_racc->last_link->next = old_racc->first_link;
613 new_racc->last_link = old_racc->last_link;
614 }
615 else
616 {
617 gcc_assert (!new_racc->last_link);
618
619 new_racc->first_link = old_racc->first_link;
620 new_racc->last_link = old_racc->last_link;
621 }
622 old_racc->first_link = old_racc->last_link = NULL;
623 }
624
625 /* Add ACCESS to the work queue (which is actually a stack). */
626
627 static void
628 add_access_to_work_queue (struct access *access)
629 {
630 if (!access->grp_queued)
631 {
632 gcc_assert (!access->next_queued);
633 access->next_queued = work_queue_head;
634 access->grp_queued = 1;
635 work_queue_head = access;
636 }
637 }
638
639 /* Pop an access from the work queue, and return it, assuming there is one. */
640
641 static struct access *
642 pop_access_from_work_queue (void)
643 {
644 struct access *access = work_queue_head;
645
646 work_queue_head = access->next_queued;
647 access->next_queued = NULL;
648 access->grp_queued = 0;
649 return access;
650 }
651
652
653 /* Allocate necessary structures. */
654
655 static void
656 sra_initialize (void)
657 {
658 candidate_bitmap = BITMAP_ALLOC (NULL);
659 candidates = new hash_table<uid_decl_hasher>
660 (vec_safe_length (cfun->local_decls) / 2);
661 should_scalarize_away_bitmap = BITMAP_ALLOC (NULL);
662 cannot_scalarize_away_bitmap = BITMAP_ALLOC (NULL);
663 disqualified_constants = BITMAP_ALLOC (NULL);
664 gcc_obstack_init (&name_obstack);
665 base_access_vec = new hash_map<tree, auto_vec<access_p> >;
666 memset (&sra_stats, 0, sizeof (sra_stats));
667 encountered_apply_args = false;
668 encountered_recursive_call = false;
669 encountered_unchangable_recursive_call = false;
670 }
671
672 /* Deallocate all general structures. */
673
674 static void
675 sra_deinitialize (void)
676 {
677 BITMAP_FREE (candidate_bitmap);
678 delete candidates;
679 candidates = NULL;
680 BITMAP_FREE (should_scalarize_away_bitmap);
681 BITMAP_FREE (cannot_scalarize_away_bitmap);
682 BITMAP_FREE (disqualified_constants);
683 access_pool.release ();
684 assign_link_pool.release ();
685 obstack_free (&name_obstack, NULL);
686
687 delete base_access_vec;
688 }
689
690 /* Return true if DECL is a VAR_DECL in the constant pool, false otherwise. */
691
692 static bool constant_decl_p (tree decl)
693 {
694 return VAR_P (decl) && DECL_IN_CONSTANT_POOL (decl);
695 }
696
697
698 /* Mark LHS of assign links out of ACCESS and its children as written to. */
699
700 static void
701 process_subtree_disqualification (struct access *access)
702 {
703 struct access *child;
704 for (struct assign_link *link = access->first_link; link; link = link->next)
705 link->lacc->grp_write = true;
706 for (child = access->first_child; child; child = child->next_sibling)
707 process_subtree_disqualification (child);
708 }
709
710 /* Remove DECL from candidates for SRA and write REASON to the dump file if
711 there is one. */
712 static void
713 disqualify_candidate (tree decl, const char *reason)
714 {
715 if (bitmap_clear_bit (candidate_bitmap, DECL_UID (decl)))
716 candidates->remove_elt_with_hash (decl, DECL_UID (decl));
717 if (constant_decl_p (decl))
718 bitmap_set_bit (disqualified_constants, DECL_UID (decl));
719
720 if (dump_file && (dump_flags & TDF_DETAILS))
721 {
722 fprintf (dump_file, "! Disqualifying ");
723 print_generic_expr (dump_file, decl);
724 fprintf (dump_file, " - %s\n", reason);
725 }
726
727 struct access *access = get_first_repr_for_decl (decl);
728 while (access)
729 {
730 process_subtree_disqualification (access);
731 access = access->next_grp;
732 }
733 }
734
735 /* Return true iff the type contains a field or an element which does not allow
736 scalarization. */
737
738 static bool
739 type_internals_preclude_sra_p (tree type, const char **msg)
740 {
741 tree fld;
742 tree et;
743
744 switch (TREE_CODE (type))
745 {
746 case RECORD_TYPE:
747 case UNION_TYPE:
748 case QUAL_UNION_TYPE:
749 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
750 if (TREE_CODE (fld) == FIELD_DECL)
751 {
752 tree ft = TREE_TYPE (fld);
753
754 if (TREE_THIS_VOLATILE (fld))
755 {
756 *msg = "volatile structure field";
757 return true;
758 }
759 if (!DECL_FIELD_OFFSET (fld))
760 {
761 *msg = "no structure field offset";
762 return true;
763 }
764 if (!DECL_SIZE (fld))
765 {
766 *msg = "zero structure field size";
767 return true;
768 }
769 if (!tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
770 {
771 *msg = "structure field offset not fixed";
772 return true;
773 }
774 if (!tree_fits_uhwi_p (DECL_SIZE (fld)))
775 {
776 *msg = "structure field size not fixed";
777 return true;
778 }
779 if (!tree_fits_shwi_p (bit_position (fld)))
780 {
781 *msg = "structure field size too big";
782 return true;
783 }
784 if (AGGREGATE_TYPE_P (ft)
785 && int_bit_position (fld) % BITS_PER_UNIT != 0)
786 {
787 *msg = "structure field is bit field";
788 return true;
789 }
790
791 if (AGGREGATE_TYPE_P (ft) && type_internals_preclude_sra_p (ft, msg))
792 return true;
793 }
794
795 return false;
796
797 case ARRAY_TYPE:
798 et = TREE_TYPE (type);
799
800 if (TYPE_VOLATILE (et))
801 {
802 *msg = "element type is volatile";
803 return true;
804 }
805
806 if (AGGREGATE_TYPE_P (et) && type_internals_preclude_sra_p (et, msg))
807 return true;
808
809 return false;
810
811 default:
812 return false;
813 }
814 }
815
816 /* If T is an SSA_NAME, return NULL if it is not a default def or return its
817 base variable if it is. Return T if it is not an SSA_NAME. */
818
819 static tree
820 get_ssa_base_param (tree t)
821 {
822 if (TREE_CODE (t) == SSA_NAME)
823 {
824 if (SSA_NAME_IS_DEFAULT_DEF (t))
825 return SSA_NAME_VAR (t);
826 else
827 return NULL_TREE;
828 }
829 return t;
830 }
831
832 /* Mark a dereference of BASE of distance DIST in a basic block tht STMT
833 belongs to, unless the BB has already been marked as a potentially
834 final. */
835
836 static void
837 mark_parm_dereference (tree base, HOST_WIDE_INT dist, gimple *stmt)
838 {
839 basic_block bb = gimple_bb (stmt);
840 int idx, parm_index = 0;
841 tree parm;
842
843 if (bitmap_bit_p (final_bbs, bb->index))
844 return;
845
846 for (parm = DECL_ARGUMENTS (current_function_decl);
847 parm && parm != base;
848 parm = DECL_CHAIN (parm))
849 parm_index++;
850
851 gcc_assert (parm_index < func_param_count);
852
853 idx = bb->index * func_param_count + parm_index;
854 if (bb_dereferences[idx] < dist)
855 bb_dereferences[idx] = dist;
856 }
857
858 /* Allocate an access structure for BASE, OFFSET and SIZE, clear it, fill in
859 the three fields. Also add it to the vector of accesses corresponding to
860 the base. Finally, return the new access. */
861
862 static struct access *
863 create_access_1 (tree base, HOST_WIDE_INT offset, HOST_WIDE_INT size)
864 {
865 struct access *access = access_pool.allocate ();
866
867 memset (access, 0, sizeof (struct access));
868 access->base = base;
869 access->offset = offset;
870 access->size = size;
871
872 base_access_vec->get_or_insert (base).safe_push (access);
873
874 return access;
875 }
876
877 static bool maybe_add_sra_candidate (tree);
878
879 /* Create and insert access for EXPR. Return created access, or NULL if it is
880 not possible. Also scan for uses of constant pool as we go along and add
881 to candidates. */
882
883 static struct access *
884 create_access (tree expr, gimple *stmt, bool write)
885 {
886 struct access *access;
887 HOST_WIDE_INT offset, size, max_size;
888 tree base = expr;
889 bool reverse, ptr, unscalarizable_region = false;
890
891 base = get_ref_base_and_extent (expr, &offset, &size, &max_size, &reverse);
892
893 if (sra_mode == SRA_MODE_EARLY_IPA
894 && TREE_CODE (base) == MEM_REF)
895 {
896 base = get_ssa_base_param (TREE_OPERAND (base, 0));
897 if (!base)
898 return NULL;
899 ptr = true;
900 }
901 else
902 ptr = false;
903
904 /* For constant-pool entries, check we can substitute the constant value. */
905 if (constant_decl_p (base)
906 && (sra_mode == SRA_MODE_EARLY_INTRA || sra_mode == SRA_MODE_INTRA))
907 {
908 gcc_assert (!bitmap_bit_p (disqualified_constants, DECL_UID (base)));
909 if (expr != base
910 && !is_gimple_reg_type (TREE_TYPE (expr))
911 && dump_file && (dump_flags & TDF_DETAILS))
912 {
913 /* This occurs in Ada with accesses to ARRAY_RANGE_REFs,
914 and elements of multidimensional arrays (which are
915 multi-element arrays in their own right). */
916 fprintf (dump_file, "Allowing non-reg-type load of part"
917 " of constant-pool entry: ");
918 print_generic_expr (dump_file, expr);
919 }
920 maybe_add_sra_candidate (base);
921 }
922
923 if (!DECL_P (base) || !bitmap_bit_p (candidate_bitmap, DECL_UID (base)))
924 return NULL;
925
926 if (sra_mode == SRA_MODE_EARLY_IPA)
927 {
928 if (size < 0 || size != max_size)
929 {
930 disqualify_candidate (base, "Encountered a variable sized access.");
931 return NULL;
932 }
933 if (TREE_CODE (expr) == COMPONENT_REF
934 && DECL_BIT_FIELD (TREE_OPERAND (expr, 1)))
935 {
936 disqualify_candidate (base, "Encountered a bit-field access.");
937 return NULL;
938 }
939 gcc_checking_assert ((offset % BITS_PER_UNIT) == 0);
940
941 if (ptr)
942 mark_parm_dereference (base, offset + size, stmt);
943 }
944 else
945 {
946 if (size != max_size)
947 {
948 size = max_size;
949 unscalarizable_region = true;
950 }
951 if (size < 0)
952 {
953 disqualify_candidate (base, "Encountered an unconstrained access.");
954 return NULL;
955 }
956 }
957
958 access = create_access_1 (base, offset, size);
959 access->expr = expr;
960 access->type = TREE_TYPE (expr);
961 access->write = write;
962 access->grp_unscalarizable_region = unscalarizable_region;
963 access->stmt = stmt;
964 access->reverse = reverse;
965
966 if (TREE_CODE (expr) == COMPONENT_REF
967 && DECL_NONADDRESSABLE_P (TREE_OPERAND (expr, 1)))
968 access->non_addressable = 1;
969
970 return access;
971 }
972
973
974 /* Return true iff TYPE is scalarizable - i.e. a RECORD_TYPE or fixed-length
975 ARRAY_TYPE with fields that are either of gimple register types (excluding
976 bit-fields) or (recursively) scalarizable types. CONST_DECL must be true if
977 we are considering a decl from constant pool. If it is false, char arrays
978 will be refused. */
979
980 static bool
981 scalarizable_type_p (tree type, bool const_decl)
982 {
983 gcc_assert (!is_gimple_reg_type (type));
984 if (type_contains_placeholder_p (type))
985 return false;
986
987 switch (TREE_CODE (type))
988 {
989 case RECORD_TYPE:
990 for (tree fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
991 if (TREE_CODE (fld) == FIELD_DECL)
992 {
993 tree ft = TREE_TYPE (fld);
994
995 if (DECL_BIT_FIELD (fld))
996 return false;
997
998 if (!is_gimple_reg_type (ft)
999 && !scalarizable_type_p (ft, const_decl))
1000 return false;
1001 }
1002
1003 return true;
1004
1005 case ARRAY_TYPE:
1006 {
1007 HOST_WIDE_INT min_elem_size;
1008 if (const_decl)
1009 min_elem_size = 0;
1010 else
1011 min_elem_size = BITS_PER_UNIT;
1012
1013 if (TYPE_DOMAIN (type) == NULL_TREE
1014 || !tree_fits_shwi_p (TYPE_SIZE (type))
1015 || !tree_fits_shwi_p (TYPE_SIZE (TREE_TYPE (type)))
1016 || (tree_to_shwi (TYPE_SIZE (TREE_TYPE (type))) <= min_elem_size)
1017 || !tree_fits_shwi_p (TYPE_MIN_VALUE (TYPE_DOMAIN (type))))
1018 return false;
1019 if (tree_to_shwi (TYPE_SIZE (type)) == 0
1020 && TYPE_MAX_VALUE (TYPE_DOMAIN (type)) == NULL_TREE)
1021 /* Zero-element array, should not prevent scalarization. */
1022 ;
1023 else if ((tree_to_shwi (TYPE_SIZE (type)) <= 0)
1024 || !tree_fits_shwi_p (TYPE_MAX_VALUE (TYPE_DOMAIN (type))))
1025 /* Variable-length array, do not allow scalarization. */
1026 return false;
1027
1028 tree elem = TREE_TYPE (type);
1029 if (!is_gimple_reg_type (elem)
1030 && !scalarizable_type_p (elem, const_decl))
1031 return false;
1032 return true;
1033 }
1034 default:
1035 return false;
1036 }
1037 }
1038
1039 static void scalarize_elem (tree, HOST_WIDE_INT, HOST_WIDE_INT, bool, tree, tree);
1040
1041 /* Create total_scalarization accesses for all scalar fields of a member
1042 of type DECL_TYPE conforming to scalarizable_type_p. BASE
1043 must be the top-most VAR_DECL representing the variable; within that,
1044 OFFSET locates the member and REF must be the memory reference expression for
1045 the member. */
1046
1047 static void
1048 completely_scalarize (tree base, tree decl_type, HOST_WIDE_INT offset, tree ref)
1049 {
1050 switch (TREE_CODE (decl_type))
1051 {
1052 case RECORD_TYPE:
1053 for (tree fld = TYPE_FIELDS (decl_type); fld; fld = DECL_CHAIN (fld))
1054 if (TREE_CODE (fld) == FIELD_DECL)
1055 {
1056 HOST_WIDE_INT pos = offset + int_bit_position (fld);
1057 tree ft = TREE_TYPE (fld);
1058 tree nref = build3 (COMPONENT_REF, ft, ref, fld, NULL_TREE);
1059
1060 scalarize_elem (base, pos, tree_to_uhwi (DECL_SIZE (fld)),
1061 TYPE_REVERSE_STORAGE_ORDER (decl_type),
1062 nref, ft);
1063 }
1064 break;
1065 case ARRAY_TYPE:
1066 {
1067 tree elemtype = TREE_TYPE (decl_type);
1068 tree elem_size = TYPE_SIZE (elemtype);
1069 gcc_assert (elem_size && tree_fits_shwi_p (elem_size));
1070 HOST_WIDE_INT el_size = tree_to_shwi (elem_size);
1071 gcc_assert (el_size > 0);
1072
1073 tree minidx = TYPE_MIN_VALUE (TYPE_DOMAIN (decl_type));
1074 gcc_assert (TREE_CODE (minidx) == INTEGER_CST);
1075 tree maxidx = TYPE_MAX_VALUE (TYPE_DOMAIN (decl_type));
1076 /* Skip (some) zero-length arrays; others have MAXIDX == MINIDX - 1. */
1077 if (maxidx)
1078 {
1079 gcc_assert (TREE_CODE (maxidx) == INTEGER_CST);
1080 tree domain = TYPE_DOMAIN (decl_type);
1081 /* MINIDX and MAXIDX are inclusive, and must be interpreted in
1082 DOMAIN (e.g. signed int, whereas min/max may be size_int). */
1083 offset_int idx = wi::to_offset (minidx);
1084 offset_int max = wi::to_offset (maxidx);
1085 if (!TYPE_UNSIGNED (domain))
1086 {
1087 idx = wi::sext (idx, TYPE_PRECISION (domain));
1088 max = wi::sext (max, TYPE_PRECISION (domain));
1089 }
1090 for (int el_off = offset; idx <= max; ++idx)
1091 {
1092 tree nref = build4 (ARRAY_REF, elemtype,
1093 ref,
1094 wide_int_to_tree (domain, idx),
1095 NULL_TREE, NULL_TREE);
1096 scalarize_elem (base, el_off, el_size,
1097 TYPE_REVERSE_STORAGE_ORDER (decl_type),
1098 nref, elemtype);
1099 el_off += el_size;
1100 }
1101 }
1102 }
1103 break;
1104 default:
1105 gcc_unreachable ();
1106 }
1107 }
1108
1109 /* Create total_scalarization accesses for a member of type TYPE, which must
1110 satisfy either is_gimple_reg_type or scalarizable_type_p. BASE must be the
1111 top-most VAR_DECL representing the variable; within that, POS and SIZE locate
1112 the member, REVERSE gives its torage order. and REF must be the reference
1113 expression for it. */
1114
1115 static void
1116 scalarize_elem (tree base, HOST_WIDE_INT pos, HOST_WIDE_INT size, bool reverse,
1117 tree ref, tree type)
1118 {
1119 if (is_gimple_reg_type (type))
1120 {
1121 struct access *access = create_access_1 (base, pos, size);
1122 access->expr = ref;
1123 access->type = type;
1124 access->grp_total_scalarization = 1;
1125 access->reverse = reverse;
1126 /* Accesses for intraprocedural SRA can have their stmt NULL. */
1127 }
1128 else
1129 completely_scalarize (base, type, pos, ref);
1130 }
1131
1132 /* Create a total_scalarization access for VAR as a whole. VAR must be of a
1133 RECORD_TYPE or ARRAY_TYPE conforming to scalarizable_type_p. */
1134
1135 static void
1136 create_total_scalarization_access (tree var)
1137 {
1138 HOST_WIDE_INT size = tree_to_uhwi (DECL_SIZE (var));
1139 struct access *access;
1140
1141 access = create_access_1 (var, 0, size);
1142 access->expr = var;
1143 access->type = TREE_TYPE (var);
1144 access->grp_total_scalarization = 1;
1145 }
1146
1147 /* Return true if REF has an VIEW_CONVERT_EXPR somewhere in it. */
1148
1149 static inline bool
1150 contains_view_convert_expr_p (const_tree ref)
1151 {
1152 while (handled_component_p (ref))
1153 {
1154 if (TREE_CODE (ref) == VIEW_CONVERT_EXPR)
1155 return true;
1156 ref = TREE_OPERAND (ref, 0);
1157 }
1158
1159 return false;
1160 }
1161
1162 /* Search the given tree for a declaration by skipping handled components and
1163 exclude it from the candidates. */
1164
1165 static void
1166 disqualify_base_of_expr (tree t, const char *reason)
1167 {
1168 t = get_base_address (t);
1169 if (sra_mode == SRA_MODE_EARLY_IPA
1170 && TREE_CODE (t) == MEM_REF)
1171 t = get_ssa_base_param (TREE_OPERAND (t, 0));
1172
1173 if (t && DECL_P (t))
1174 disqualify_candidate (t, reason);
1175 }
1176
1177 /* Scan expression EXPR and create access structures for all accesses to
1178 candidates for scalarization. Return the created access or NULL if none is
1179 created. */
1180
1181 static struct access *
1182 build_access_from_expr_1 (tree expr, gimple *stmt, bool write)
1183 {
1184 struct access *ret = NULL;
1185 bool partial_ref;
1186
1187 if (TREE_CODE (expr) == BIT_FIELD_REF
1188 || TREE_CODE (expr) == IMAGPART_EXPR
1189 || TREE_CODE (expr) == REALPART_EXPR)
1190 {
1191 expr = TREE_OPERAND (expr, 0);
1192 partial_ref = true;
1193 }
1194 else
1195 partial_ref = false;
1196
1197 /* We need to dive through V_C_Es in order to get the size of its parameter
1198 and not the result type. Ada produces such statements. We are also
1199 capable of handling the topmost V_C_E but not any of those buried in other
1200 handled components. */
1201 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR && !storage_order_barrier_p (expr))
1202 expr = TREE_OPERAND (expr, 0);
1203
1204 if (contains_view_convert_expr_p (expr))
1205 {
1206 disqualify_base_of_expr (expr, "V_C_E under a different handled "
1207 "component.");
1208 return NULL;
1209 }
1210 if (TREE_THIS_VOLATILE (expr))
1211 {
1212 disqualify_base_of_expr (expr, "part of a volatile reference.");
1213 return NULL;
1214 }
1215
1216 switch (TREE_CODE (expr))
1217 {
1218 case MEM_REF:
1219 if (TREE_CODE (TREE_OPERAND (expr, 0)) != ADDR_EXPR
1220 && sra_mode != SRA_MODE_EARLY_IPA)
1221 return NULL;
1222 /* fall through */
1223 case VAR_DECL:
1224 case PARM_DECL:
1225 case RESULT_DECL:
1226 case COMPONENT_REF:
1227 case ARRAY_REF:
1228 case ARRAY_RANGE_REF:
1229 ret = create_access (expr, stmt, write);
1230 break;
1231
1232 default:
1233 break;
1234 }
1235
1236 if (write && partial_ref && ret)
1237 ret->grp_partial_lhs = 1;
1238
1239 return ret;
1240 }
1241
1242 /* Scan expression EXPR and create access structures for all accesses to
1243 candidates for scalarization. Return true if any access has been inserted.
1244 STMT must be the statement from which the expression is taken, WRITE must be
1245 true if the expression is a store and false otherwise. */
1246
1247 static bool
1248 build_access_from_expr (tree expr, gimple *stmt, bool write)
1249 {
1250 struct access *access;
1251
1252 access = build_access_from_expr_1 (expr, stmt, write);
1253 if (access)
1254 {
1255 /* This means the aggregate is accesses as a whole in a way other than an
1256 assign statement and thus cannot be removed even if we had a scalar
1257 replacement for everything. */
1258 if (cannot_scalarize_away_bitmap)
1259 bitmap_set_bit (cannot_scalarize_away_bitmap, DECL_UID (access->base));
1260 return true;
1261 }
1262 return false;
1263 }
1264
1265 /* Return the single non-EH successor edge of BB or NULL if there is none or
1266 more than one. */
1267
1268 static edge
1269 single_non_eh_succ (basic_block bb)
1270 {
1271 edge e, res = NULL;
1272 edge_iterator ei;
1273
1274 FOR_EACH_EDGE (e, ei, bb->succs)
1275 if (!(e->flags & EDGE_EH))
1276 {
1277 if (res)
1278 return NULL;
1279 res = e;
1280 }
1281
1282 return res;
1283 }
1284
1285 /* Disqualify LHS and RHS for scalarization if STMT has to terminate its BB and
1286 there is no alternative spot where to put statements SRA might need to
1287 generate after it. The spot we are looking for is an edge leading to a
1288 single non-EH successor, if it exists and is indeed single. RHS may be
1289 NULL, in that case ignore it. */
1290
1291 static bool
1292 disqualify_if_bad_bb_terminating_stmt (gimple *stmt, tree lhs, tree rhs)
1293 {
1294 if ((sra_mode == SRA_MODE_EARLY_INTRA || sra_mode == SRA_MODE_INTRA)
1295 && stmt_ends_bb_p (stmt))
1296 {
1297 if (single_non_eh_succ (gimple_bb (stmt)))
1298 return false;
1299
1300 disqualify_base_of_expr (lhs, "LHS of a throwing stmt.");
1301 if (rhs)
1302 disqualify_base_of_expr (rhs, "RHS of a throwing stmt.");
1303 return true;
1304 }
1305 return false;
1306 }
1307
1308 /* Return true if the nature of BASE is such that it contains data even if
1309 there is no write to it in the function. */
1310
1311 static bool
1312 comes_initialized_p (tree base)
1313 {
1314 return TREE_CODE (base) == PARM_DECL || constant_decl_p (base);
1315 }
1316
1317 /* Scan expressions occurring in STMT, create access structures for all accesses
1318 to candidates for scalarization and remove those candidates which occur in
1319 statements or expressions that prevent them from being split apart. Return
1320 true if any access has been inserted. */
1321
1322 static bool
1323 build_accesses_from_assign (gimple *stmt)
1324 {
1325 tree lhs, rhs;
1326 struct access *lacc, *racc;
1327
1328 if (!gimple_assign_single_p (stmt)
1329 /* Scope clobbers don't influence scalarization. */
1330 || gimple_clobber_p (stmt))
1331 return false;
1332
1333 lhs = gimple_assign_lhs (stmt);
1334 rhs = gimple_assign_rhs1 (stmt);
1335
1336 if (disqualify_if_bad_bb_terminating_stmt (stmt, lhs, rhs))
1337 return false;
1338
1339 racc = build_access_from_expr_1 (rhs, stmt, false);
1340 lacc = build_access_from_expr_1 (lhs, stmt, true);
1341
1342 if (lacc)
1343 {
1344 lacc->grp_assignment_write = 1;
1345 if (storage_order_barrier_p (rhs))
1346 lacc->grp_unscalarizable_region = 1;
1347 }
1348
1349 if (racc)
1350 {
1351 racc->grp_assignment_read = 1;
1352 if (should_scalarize_away_bitmap && !gimple_has_volatile_ops (stmt)
1353 && !is_gimple_reg_type (racc->type))
1354 bitmap_set_bit (should_scalarize_away_bitmap, DECL_UID (racc->base));
1355 if (storage_order_barrier_p (lhs))
1356 racc->grp_unscalarizable_region = 1;
1357 }
1358
1359 if (lacc && racc
1360 && (sra_mode == SRA_MODE_EARLY_INTRA || sra_mode == SRA_MODE_INTRA)
1361 && !lacc->grp_unscalarizable_region
1362 && !racc->grp_unscalarizable_region
1363 && AGGREGATE_TYPE_P (TREE_TYPE (lhs))
1364 && lacc->size == racc->size
1365 && useless_type_conversion_p (lacc->type, racc->type))
1366 {
1367 struct assign_link *link;
1368
1369 link = assign_link_pool.allocate ();
1370 memset (link, 0, sizeof (struct assign_link));
1371
1372 link->lacc = lacc;
1373 link->racc = racc;
1374 add_link_to_rhs (racc, link);
1375 /* Let's delay marking the areas as written until propagation of accesses
1376 across link, unless the nature of rhs tells us that its data comes
1377 from elsewhere. */
1378 if (!comes_initialized_p (racc->base))
1379 lacc->write = false;
1380 }
1381
1382 return lacc || racc;
1383 }
1384
1385 /* Callback of walk_stmt_load_store_addr_ops visit_addr used to determine
1386 GIMPLE_ASM operands with memory constrains which cannot be scalarized. */
1387
1388 static bool
1389 asm_visit_addr (gimple *, tree op, tree, void *)
1390 {
1391 op = get_base_address (op);
1392 if (op
1393 && DECL_P (op))
1394 disqualify_candidate (op, "Non-scalarizable GIMPLE_ASM operand.");
1395
1396 return false;
1397 }
1398
1399 /* Return true iff callsite CALL has at least as many actual arguments as there
1400 are formal parameters of the function currently processed by IPA-SRA and
1401 that their types match. */
1402
1403 static inline bool
1404 callsite_arguments_match_p (gimple *call)
1405 {
1406 if (gimple_call_num_args (call) < (unsigned) func_param_count)
1407 return false;
1408
1409 tree parm;
1410 int i;
1411 for (parm = DECL_ARGUMENTS (current_function_decl), i = 0;
1412 parm;
1413 parm = DECL_CHAIN (parm), i++)
1414 {
1415 tree arg = gimple_call_arg (call, i);
1416 if (!useless_type_conversion_p (TREE_TYPE (parm), TREE_TYPE (arg)))
1417 return false;
1418 }
1419 return true;
1420 }
1421
1422 /* Scan function and look for interesting expressions and create access
1423 structures for them. Return true iff any access is created. */
1424
1425 static bool
1426 scan_function (void)
1427 {
1428 basic_block bb;
1429 bool ret = false;
1430
1431 FOR_EACH_BB_FN (bb, cfun)
1432 {
1433 gimple_stmt_iterator gsi;
1434 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1435 {
1436 gimple *stmt = gsi_stmt (gsi);
1437 tree t;
1438 unsigned i;
1439
1440 if (final_bbs && stmt_can_throw_external (stmt))
1441 bitmap_set_bit (final_bbs, bb->index);
1442 switch (gimple_code (stmt))
1443 {
1444 case GIMPLE_RETURN:
1445 t = gimple_return_retval (as_a <greturn *> (stmt));
1446 if (t != NULL_TREE)
1447 ret |= build_access_from_expr (t, stmt, false);
1448 if (final_bbs)
1449 bitmap_set_bit (final_bbs, bb->index);
1450 break;
1451
1452 case GIMPLE_ASSIGN:
1453 ret |= build_accesses_from_assign (stmt);
1454 break;
1455
1456 case GIMPLE_CALL:
1457 for (i = 0; i < gimple_call_num_args (stmt); i++)
1458 ret |= build_access_from_expr (gimple_call_arg (stmt, i),
1459 stmt, false);
1460
1461 if (sra_mode == SRA_MODE_EARLY_IPA)
1462 {
1463 tree dest = gimple_call_fndecl (stmt);
1464 int flags = gimple_call_flags (stmt);
1465
1466 if (dest)
1467 {
1468 if (DECL_BUILT_IN_CLASS (dest) == BUILT_IN_NORMAL
1469 && DECL_FUNCTION_CODE (dest) == BUILT_IN_APPLY_ARGS)
1470 encountered_apply_args = true;
1471 if (recursive_call_p (current_function_decl, dest))
1472 {
1473 encountered_recursive_call = true;
1474 if (!callsite_arguments_match_p (stmt))
1475 encountered_unchangable_recursive_call = true;
1476 }
1477 }
1478
1479 if (final_bbs
1480 && (flags & (ECF_CONST | ECF_PURE)) == 0)
1481 bitmap_set_bit (final_bbs, bb->index);
1482 }
1483
1484 t = gimple_call_lhs (stmt);
1485 if (t && !disqualify_if_bad_bb_terminating_stmt (stmt, t, NULL))
1486 ret |= build_access_from_expr (t, stmt, true);
1487 break;
1488
1489 case GIMPLE_ASM:
1490 {
1491 gasm *asm_stmt = as_a <gasm *> (stmt);
1492 walk_stmt_load_store_addr_ops (asm_stmt, NULL, NULL, NULL,
1493 asm_visit_addr);
1494 if (final_bbs)
1495 bitmap_set_bit (final_bbs, bb->index);
1496
1497 for (i = 0; i < gimple_asm_ninputs (asm_stmt); i++)
1498 {
1499 t = TREE_VALUE (gimple_asm_input_op (asm_stmt, i));
1500 ret |= build_access_from_expr (t, asm_stmt, false);
1501 }
1502 for (i = 0; i < gimple_asm_noutputs (asm_stmt); i++)
1503 {
1504 t = TREE_VALUE (gimple_asm_output_op (asm_stmt, i));
1505 ret |= build_access_from_expr (t, asm_stmt, true);
1506 }
1507 }
1508 break;
1509
1510 default:
1511 break;
1512 }
1513 }
1514 }
1515
1516 return ret;
1517 }
1518
1519 /* Helper of QSORT function. There are pointers to accesses in the array. An
1520 access is considered smaller than another if it has smaller offset or if the
1521 offsets are the same but is size is bigger. */
1522
1523 static int
1524 compare_access_positions (const void *a, const void *b)
1525 {
1526 const access_p *fp1 = (const access_p *) a;
1527 const access_p *fp2 = (const access_p *) b;
1528 const access_p f1 = *fp1;
1529 const access_p f2 = *fp2;
1530
1531 if (f1->offset != f2->offset)
1532 return f1->offset < f2->offset ? -1 : 1;
1533
1534 if (f1->size == f2->size)
1535 {
1536 if (f1->type == f2->type)
1537 return 0;
1538 /* Put any non-aggregate type before any aggregate type. */
1539 else if (!is_gimple_reg_type (f1->type)
1540 && is_gimple_reg_type (f2->type))
1541 return 1;
1542 else if (is_gimple_reg_type (f1->type)
1543 && !is_gimple_reg_type (f2->type))
1544 return -1;
1545 /* Put any complex or vector type before any other scalar type. */
1546 else if (TREE_CODE (f1->type) != COMPLEX_TYPE
1547 && TREE_CODE (f1->type) != VECTOR_TYPE
1548 && (TREE_CODE (f2->type) == COMPLEX_TYPE
1549 || TREE_CODE (f2->type) == VECTOR_TYPE))
1550 return 1;
1551 else if ((TREE_CODE (f1->type) == COMPLEX_TYPE
1552 || TREE_CODE (f1->type) == VECTOR_TYPE)
1553 && TREE_CODE (f2->type) != COMPLEX_TYPE
1554 && TREE_CODE (f2->type) != VECTOR_TYPE)
1555 return -1;
1556 /* Put the integral type with the bigger precision first. */
1557 else if (INTEGRAL_TYPE_P (f1->type)
1558 && INTEGRAL_TYPE_P (f2->type))
1559 return TYPE_PRECISION (f2->type) - TYPE_PRECISION (f1->type);
1560 /* Put any integral type with non-full precision last. */
1561 else if (INTEGRAL_TYPE_P (f1->type)
1562 && (TREE_INT_CST_LOW (TYPE_SIZE (f1->type))
1563 != TYPE_PRECISION (f1->type)))
1564 return 1;
1565 else if (INTEGRAL_TYPE_P (f2->type)
1566 && (TREE_INT_CST_LOW (TYPE_SIZE (f2->type))
1567 != TYPE_PRECISION (f2->type)))
1568 return -1;
1569 /* Stabilize the sort. */
1570 return TYPE_UID (f1->type) - TYPE_UID (f2->type);
1571 }
1572
1573 /* We want the bigger accesses first, thus the opposite operator in the next
1574 line: */
1575 return f1->size > f2->size ? -1 : 1;
1576 }
1577
1578
1579 /* Append a name of the declaration to the name obstack. A helper function for
1580 make_fancy_name. */
1581
1582 static void
1583 make_fancy_decl_name (tree decl)
1584 {
1585 char buffer[32];
1586
1587 tree name = DECL_NAME (decl);
1588 if (name)
1589 obstack_grow (&name_obstack, IDENTIFIER_POINTER (name),
1590 IDENTIFIER_LENGTH (name));
1591 else
1592 {
1593 sprintf (buffer, "D%u", DECL_UID (decl));
1594 obstack_grow (&name_obstack, buffer, strlen (buffer));
1595 }
1596 }
1597
1598 /* Helper for make_fancy_name. */
1599
1600 static void
1601 make_fancy_name_1 (tree expr)
1602 {
1603 char buffer[32];
1604 tree index;
1605
1606 if (DECL_P (expr))
1607 {
1608 make_fancy_decl_name (expr);
1609 return;
1610 }
1611
1612 switch (TREE_CODE (expr))
1613 {
1614 case COMPONENT_REF:
1615 make_fancy_name_1 (TREE_OPERAND (expr, 0));
1616 obstack_1grow (&name_obstack, '$');
1617 make_fancy_decl_name (TREE_OPERAND (expr, 1));
1618 break;
1619
1620 case ARRAY_REF:
1621 make_fancy_name_1 (TREE_OPERAND (expr, 0));
1622 obstack_1grow (&name_obstack, '$');
1623 /* Arrays with only one element may not have a constant as their
1624 index. */
1625 index = TREE_OPERAND (expr, 1);
1626 if (TREE_CODE (index) != INTEGER_CST)
1627 break;
1628 sprintf (buffer, HOST_WIDE_INT_PRINT_DEC, TREE_INT_CST_LOW (index));
1629 obstack_grow (&name_obstack, buffer, strlen (buffer));
1630 break;
1631
1632 case ADDR_EXPR:
1633 make_fancy_name_1 (TREE_OPERAND (expr, 0));
1634 break;
1635
1636 case MEM_REF:
1637 make_fancy_name_1 (TREE_OPERAND (expr, 0));
1638 if (!integer_zerop (TREE_OPERAND (expr, 1)))
1639 {
1640 obstack_1grow (&name_obstack, '$');
1641 sprintf (buffer, HOST_WIDE_INT_PRINT_DEC,
1642 TREE_INT_CST_LOW (TREE_OPERAND (expr, 1)));
1643 obstack_grow (&name_obstack, buffer, strlen (buffer));
1644 }
1645 break;
1646
1647 case BIT_FIELD_REF:
1648 case REALPART_EXPR:
1649 case IMAGPART_EXPR:
1650 gcc_unreachable (); /* we treat these as scalars. */
1651 break;
1652 default:
1653 break;
1654 }
1655 }
1656
1657 /* Create a human readable name for replacement variable of ACCESS. */
1658
1659 static char *
1660 make_fancy_name (tree expr)
1661 {
1662 make_fancy_name_1 (expr);
1663 obstack_1grow (&name_obstack, '\0');
1664 return XOBFINISH (&name_obstack, char *);
1665 }
1666
1667 /* Construct a MEM_REF that would reference a part of aggregate BASE of type
1668 EXP_TYPE at the given OFFSET and with storage order REVERSE. If BASE is
1669 something for which get_addr_base_and_unit_offset returns NULL, gsi must
1670 be non-NULL and is used to insert new statements either before or below
1671 the current one as specified by INSERT_AFTER. This function is not capable
1672 of handling bitfields. */
1673
1674 tree
1675 build_ref_for_offset (location_t loc, tree base, HOST_WIDE_INT offset,
1676 bool reverse, tree exp_type, gimple_stmt_iterator *gsi,
1677 bool insert_after)
1678 {
1679 tree prev_base = base;
1680 tree off;
1681 tree mem_ref;
1682 HOST_WIDE_INT base_offset;
1683 unsigned HOST_WIDE_INT misalign;
1684 unsigned int align;
1685
1686 /* Preserve address-space information. */
1687 addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (base));
1688 if (as != TYPE_ADDR_SPACE (exp_type))
1689 exp_type = build_qualified_type (exp_type,
1690 TYPE_QUALS (exp_type)
1691 | ENCODE_QUAL_ADDR_SPACE (as));
1692
1693 gcc_checking_assert (offset % BITS_PER_UNIT == 0);
1694 get_object_alignment_1 (base, &align, &misalign);
1695 base = get_addr_base_and_unit_offset (base, &base_offset);
1696
1697 /* get_addr_base_and_unit_offset returns NULL for references with a variable
1698 offset such as array[var_index]. */
1699 if (!base)
1700 {
1701 gassign *stmt;
1702 tree tmp, addr;
1703
1704 gcc_checking_assert (gsi);
1705 tmp = make_ssa_name (build_pointer_type (TREE_TYPE (prev_base)));
1706 addr = build_fold_addr_expr (unshare_expr (prev_base));
1707 STRIP_USELESS_TYPE_CONVERSION (addr);
1708 stmt = gimple_build_assign (tmp, addr);
1709 gimple_set_location (stmt, loc);
1710 if (insert_after)
1711 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
1712 else
1713 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
1714
1715 off = build_int_cst (reference_alias_ptr_type (prev_base),
1716 offset / BITS_PER_UNIT);
1717 base = tmp;
1718 }
1719 else if (TREE_CODE (base) == MEM_REF)
1720 {
1721 off = build_int_cst (TREE_TYPE (TREE_OPERAND (base, 1)),
1722 base_offset + offset / BITS_PER_UNIT);
1723 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1), off);
1724 base = unshare_expr (TREE_OPERAND (base, 0));
1725 }
1726 else
1727 {
1728 off = build_int_cst (reference_alias_ptr_type (prev_base),
1729 base_offset + offset / BITS_PER_UNIT);
1730 base = build_fold_addr_expr (unshare_expr (base));
1731 }
1732
1733 misalign = (misalign + offset) & (align - 1);
1734 if (misalign != 0)
1735 align = least_bit_hwi (misalign);
1736 if (align != TYPE_ALIGN (exp_type))
1737 exp_type = build_aligned_type (exp_type, align);
1738
1739 mem_ref = fold_build2_loc (loc, MEM_REF, exp_type, base, off);
1740 REF_REVERSE_STORAGE_ORDER (mem_ref) = reverse;
1741 if (TREE_THIS_VOLATILE (prev_base))
1742 TREE_THIS_VOLATILE (mem_ref) = 1;
1743 if (TREE_SIDE_EFFECTS (prev_base))
1744 TREE_SIDE_EFFECTS (mem_ref) = 1;
1745 return mem_ref;
1746 }
1747
1748 /* Construct a memory reference to a part of an aggregate BASE at the given
1749 OFFSET and of the same type as MODEL. In case this is a reference to a
1750 bit-field, the function will replicate the last component_ref of model's
1751 expr to access it. GSI and INSERT_AFTER have the same meaning as in
1752 build_ref_for_offset. */
1753
1754 static tree
1755 build_ref_for_model (location_t loc, tree base, HOST_WIDE_INT offset,
1756 struct access *model, gimple_stmt_iterator *gsi,
1757 bool insert_after)
1758 {
1759 if (TREE_CODE (model->expr) == COMPONENT_REF
1760 && DECL_BIT_FIELD (TREE_OPERAND (model->expr, 1)))
1761 {
1762 /* This access represents a bit-field. */
1763 tree t, exp_type, fld = TREE_OPERAND (model->expr, 1);
1764
1765 offset -= int_bit_position (fld);
1766 exp_type = TREE_TYPE (TREE_OPERAND (model->expr, 0));
1767 t = build_ref_for_offset (loc, base, offset, model->reverse, exp_type,
1768 gsi, insert_after);
1769 /* The flag will be set on the record type. */
1770 REF_REVERSE_STORAGE_ORDER (t) = 0;
1771 return fold_build3_loc (loc, COMPONENT_REF, TREE_TYPE (fld), t, fld,
1772 NULL_TREE);
1773 }
1774 else
1775 return
1776 build_ref_for_offset (loc, base, offset, model->reverse, model->type,
1777 gsi, insert_after);
1778 }
1779
1780 /* Attempt to build a memory reference that we could but into a gimple
1781 debug_bind statement. Similar to build_ref_for_model but punts if it has to
1782 create statements and return s NULL instead. This function also ignores
1783 alignment issues and so its results should never end up in non-debug
1784 statements. */
1785
1786 static tree
1787 build_debug_ref_for_model (location_t loc, tree base, HOST_WIDE_INT offset,
1788 struct access *model)
1789 {
1790 HOST_WIDE_INT base_offset;
1791 tree off;
1792
1793 if (TREE_CODE (model->expr) == COMPONENT_REF
1794 && DECL_BIT_FIELD (TREE_OPERAND (model->expr, 1)))
1795 return NULL_TREE;
1796
1797 base = get_addr_base_and_unit_offset (base, &base_offset);
1798 if (!base)
1799 return NULL_TREE;
1800 if (TREE_CODE (base) == MEM_REF)
1801 {
1802 off = build_int_cst (TREE_TYPE (TREE_OPERAND (base, 1)),
1803 base_offset + offset / BITS_PER_UNIT);
1804 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1), off);
1805 base = unshare_expr (TREE_OPERAND (base, 0));
1806 }
1807 else
1808 {
1809 off = build_int_cst (reference_alias_ptr_type (base),
1810 base_offset + offset / BITS_PER_UNIT);
1811 base = build_fold_addr_expr (unshare_expr (base));
1812 }
1813
1814 return fold_build2_loc (loc, MEM_REF, model->type, base, off);
1815 }
1816
1817 /* Construct a memory reference consisting of component_refs and array_refs to
1818 a part of an aggregate *RES (which is of type TYPE). The requested part
1819 should have type EXP_TYPE at be the given OFFSET. This function might not
1820 succeed, it returns true when it does and only then *RES points to something
1821 meaningful. This function should be used only to build expressions that we
1822 might need to present to user (e.g. in warnings). In all other situations,
1823 build_ref_for_model or build_ref_for_offset should be used instead. */
1824
1825 static bool
1826 build_user_friendly_ref_for_offset (tree *res, tree type, HOST_WIDE_INT offset,
1827 tree exp_type)
1828 {
1829 while (1)
1830 {
1831 tree fld;
1832 tree tr_size, index, minidx;
1833 HOST_WIDE_INT el_size;
1834
1835 if (offset == 0 && exp_type
1836 && types_compatible_p (exp_type, type))
1837 return true;
1838
1839 switch (TREE_CODE (type))
1840 {
1841 case UNION_TYPE:
1842 case QUAL_UNION_TYPE:
1843 case RECORD_TYPE:
1844 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
1845 {
1846 HOST_WIDE_INT pos, size;
1847 tree tr_pos, expr, *expr_ptr;
1848
1849 if (TREE_CODE (fld) != FIELD_DECL)
1850 continue;
1851
1852 tr_pos = bit_position (fld);
1853 if (!tr_pos || !tree_fits_uhwi_p (tr_pos))
1854 continue;
1855 pos = tree_to_uhwi (tr_pos);
1856 gcc_assert (TREE_CODE (type) == RECORD_TYPE || pos == 0);
1857 tr_size = DECL_SIZE (fld);
1858 if (!tr_size || !tree_fits_uhwi_p (tr_size))
1859 continue;
1860 size = tree_to_uhwi (tr_size);
1861 if (size == 0)
1862 {
1863 if (pos != offset)
1864 continue;
1865 }
1866 else if (pos > offset || (pos + size) <= offset)
1867 continue;
1868
1869 expr = build3 (COMPONENT_REF, TREE_TYPE (fld), *res, fld,
1870 NULL_TREE);
1871 expr_ptr = &expr;
1872 if (build_user_friendly_ref_for_offset (expr_ptr, TREE_TYPE (fld),
1873 offset - pos, exp_type))
1874 {
1875 *res = expr;
1876 return true;
1877 }
1878 }
1879 return false;
1880
1881 case ARRAY_TYPE:
1882 tr_size = TYPE_SIZE (TREE_TYPE (type));
1883 if (!tr_size || !tree_fits_uhwi_p (tr_size))
1884 return false;
1885 el_size = tree_to_uhwi (tr_size);
1886
1887 minidx = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
1888 if (TREE_CODE (minidx) != INTEGER_CST || el_size == 0)
1889 return false;
1890 index = build_int_cst (TYPE_DOMAIN (type), offset / el_size);
1891 if (!integer_zerop (minidx))
1892 index = int_const_binop (PLUS_EXPR, index, minidx);
1893 *res = build4 (ARRAY_REF, TREE_TYPE (type), *res, index,
1894 NULL_TREE, NULL_TREE);
1895 offset = offset % el_size;
1896 type = TREE_TYPE (type);
1897 break;
1898
1899 default:
1900 if (offset != 0)
1901 return false;
1902
1903 if (exp_type)
1904 return false;
1905 else
1906 return true;
1907 }
1908 }
1909 }
1910
1911 /* Return true iff TYPE is stdarg va_list type. */
1912
1913 static inline bool
1914 is_va_list_type (tree type)
1915 {
1916 return TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (va_list_type_node);
1917 }
1918
1919 /* Print message to dump file why a variable was rejected. */
1920
1921 static void
1922 reject (tree var, const char *msg)
1923 {
1924 if (dump_file && (dump_flags & TDF_DETAILS))
1925 {
1926 fprintf (dump_file, "Rejected (%d): %s: ", DECL_UID (var), msg);
1927 print_generic_expr (dump_file, var);
1928 fprintf (dump_file, "\n");
1929 }
1930 }
1931
1932 /* Return true if VAR is a candidate for SRA. */
1933
1934 static bool
1935 maybe_add_sra_candidate (tree var)
1936 {
1937 tree type = TREE_TYPE (var);
1938 const char *msg;
1939 tree_node **slot;
1940
1941 if (!AGGREGATE_TYPE_P (type))
1942 {
1943 reject (var, "not aggregate");
1944 return false;
1945 }
1946 /* Allow constant-pool entries (that "need to live in memory")
1947 unless we are doing IPA SRA. */
1948 if (needs_to_live_in_memory (var)
1949 && (sra_mode == SRA_MODE_EARLY_IPA || !constant_decl_p (var)))
1950 {
1951 reject (var, "needs to live in memory");
1952 return false;
1953 }
1954 if (TREE_THIS_VOLATILE (var))
1955 {
1956 reject (var, "is volatile");
1957 return false;
1958 }
1959 if (!COMPLETE_TYPE_P (type))
1960 {
1961 reject (var, "has incomplete type");
1962 return false;
1963 }
1964 if (!tree_fits_uhwi_p (TYPE_SIZE (type)))
1965 {
1966 reject (var, "type size not fixed");
1967 return false;
1968 }
1969 if (tree_to_uhwi (TYPE_SIZE (type)) == 0)
1970 {
1971 reject (var, "type size is zero");
1972 return false;
1973 }
1974 if (type_internals_preclude_sra_p (type, &msg))
1975 {
1976 reject (var, msg);
1977 return false;
1978 }
1979 if (/* Fix for PR 41089. tree-stdarg.c needs to have va_lists intact but
1980 we also want to schedule it rather late. Thus we ignore it in
1981 the early pass. */
1982 (sra_mode == SRA_MODE_EARLY_INTRA
1983 && is_va_list_type (type)))
1984 {
1985 reject (var, "is va_list");
1986 return false;
1987 }
1988
1989 bitmap_set_bit (candidate_bitmap, DECL_UID (var));
1990 slot = candidates->find_slot_with_hash (var, DECL_UID (var), INSERT);
1991 *slot = var;
1992
1993 if (dump_file && (dump_flags & TDF_DETAILS))
1994 {
1995 fprintf (dump_file, "Candidate (%d): ", DECL_UID (var));
1996 print_generic_expr (dump_file, var);
1997 fprintf (dump_file, "\n");
1998 }
1999
2000 return true;
2001 }
2002
2003 /* The very first phase of intraprocedural SRA. It marks in candidate_bitmap
2004 those with type which is suitable for scalarization. */
2005
2006 static bool
2007 find_var_candidates (void)
2008 {
2009 tree var, parm;
2010 unsigned int i;
2011 bool ret = false;
2012
2013 for (parm = DECL_ARGUMENTS (current_function_decl);
2014 parm;
2015 parm = DECL_CHAIN (parm))
2016 ret |= maybe_add_sra_candidate (parm);
2017
2018 FOR_EACH_LOCAL_DECL (cfun, i, var)
2019 {
2020 if (!VAR_P (var))
2021 continue;
2022
2023 ret |= maybe_add_sra_candidate (var);
2024 }
2025
2026 return ret;
2027 }
2028
2029 /* Sort all accesses for the given variable, check for partial overlaps and
2030 return NULL if there are any. If there are none, pick a representative for
2031 each combination of offset and size and create a linked list out of them.
2032 Return the pointer to the first representative and make sure it is the first
2033 one in the vector of accesses. */
2034
2035 static struct access *
2036 sort_and_splice_var_accesses (tree var)
2037 {
2038 int i, j, access_count;
2039 struct access *res, **prev_acc_ptr = &res;
2040 vec<access_p> *access_vec;
2041 bool first = true;
2042 HOST_WIDE_INT low = -1, high = 0;
2043
2044 access_vec = get_base_access_vector (var);
2045 if (!access_vec)
2046 return NULL;
2047 access_count = access_vec->length ();
2048
2049 /* Sort by <OFFSET, SIZE>. */
2050 access_vec->qsort (compare_access_positions);
2051
2052 i = 0;
2053 while (i < access_count)
2054 {
2055 struct access *access = (*access_vec)[i];
2056 bool grp_write = access->write;
2057 bool grp_read = !access->write;
2058 bool grp_scalar_write = access->write
2059 && is_gimple_reg_type (access->type);
2060 bool grp_scalar_read = !access->write
2061 && is_gimple_reg_type (access->type);
2062 bool grp_assignment_read = access->grp_assignment_read;
2063 bool grp_assignment_write = access->grp_assignment_write;
2064 bool multiple_scalar_reads = false;
2065 bool total_scalarization = access->grp_total_scalarization;
2066 bool grp_partial_lhs = access->grp_partial_lhs;
2067 bool first_scalar = is_gimple_reg_type (access->type);
2068 bool unscalarizable_region = access->grp_unscalarizable_region;
2069
2070 if (first || access->offset >= high)
2071 {
2072 first = false;
2073 low = access->offset;
2074 high = access->offset + access->size;
2075 }
2076 else if (access->offset > low && access->offset + access->size > high)
2077 return NULL;
2078 else
2079 gcc_assert (access->offset >= low
2080 && access->offset + access->size <= high);
2081
2082 j = i + 1;
2083 while (j < access_count)
2084 {
2085 struct access *ac2 = (*access_vec)[j];
2086 if (ac2->offset != access->offset || ac2->size != access->size)
2087 break;
2088 if (ac2->write)
2089 {
2090 grp_write = true;
2091 grp_scalar_write = (grp_scalar_write
2092 || is_gimple_reg_type (ac2->type));
2093 }
2094 else
2095 {
2096 grp_read = true;
2097 if (is_gimple_reg_type (ac2->type))
2098 {
2099 if (grp_scalar_read)
2100 multiple_scalar_reads = true;
2101 else
2102 grp_scalar_read = true;
2103 }
2104 }
2105 grp_assignment_read |= ac2->grp_assignment_read;
2106 grp_assignment_write |= ac2->grp_assignment_write;
2107 grp_partial_lhs |= ac2->grp_partial_lhs;
2108 unscalarizable_region |= ac2->grp_unscalarizable_region;
2109 total_scalarization |= ac2->grp_total_scalarization;
2110 relink_to_new_repr (access, ac2);
2111
2112 /* If there are both aggregate-type and scalar-type accesses with
2113 this combination of size and offset, the comparison function
2114 should have put the scalars first. */
2115 gcc_assert (first_scalar || !is_gimple_reg_type (ac2->type));
2116 ac2->group_representative = access;
2117 j++;
2118 }
2119
2120 i = j;
2121
2122 access->group_representative = access;
2123 access->grp_write = grp_write;
2124 access->grp_read = grp_read;
2125 access->grp_scalar_read = grp_scalar_read;
2126 access->grp_scalar_write = grp_scalar_write;
2127 access->grp_assignment_read = grp_assignment_read;
2128 access->grp_assignment_write = grp_assignment_write;
2129 access->grp_hint = total_scalarization
2130 || (multiple_scalar_reads && !constant_decl_p (var));
2131 access->grp_total_scalarization = total_scalarization;
2132 access->grp_partial_lhs = grp_partial_lhs;
2133 access->grp_unscalarizable_region = unscalarizable_region;
2134 if (access->first_link)
2135 add_access_to_work_queue (access);
2136
2137 *prev_acc_ptr = access;
2138 prev_acc_ptr = &access->next_grp;
2139 }
2140
2141 gcc_assert (res == (*access_vec)[0]);
2142 return res;
2143 }
2144
2145 /* Create a variable for the given ACCESS which determines the type, name and a
2146 few other properties. Return the variable declaration and store it also to
2147 ACCESS->replacement. */
2148
2149 static tree
2150 create_access_replacement (struct access *access)
2151 {
2152 tree repl;
2153
2154 if (access->grp_to_be_debug_replaced)
2155 {
2156 repl = create_tmp_var_raw (access->type);
2157 DECL_CONTEXT (repl) = current_function_decl;
2158 }
2159 else
2160 /* Drop any special alignment on the type if it's not on the main
2161 variant. This avoids issues with weirdo ABIs like AAPCS. */
2162 repl = create_tmp_var (build_qualified_type
2163 (TYPE_MAIN_VARIANT (access->type),
2164 TYPE_QUALS (access->type)), "SR");
2165 if (TREE_CODE (access->type) == COMPLEX_TYPE
2166 || TREE_CODE (access->type) == VECTOR_TYPE)
2167 {
2168 if (!access->grp_partial_lhs)
2169 DECL_GIMPLE_REG_P (repl) = 1;
2170 }
2171 else if (access->grp_partial_lhs
2172 && is_gimple_reg_type (access->type))
2173 TREE_ADDRESSABLE (repl) = 1;
2174
2175 DECL_SOURCE_LOCATION (repl) = DECL_SOURCE_LOCATION (access->base);
2176 DECL_ARTIFICIAL (repl) = 1;
2177 DECL_IGNORED_P (repl) = DECL_IGNORED_P (access->base);
2178
2179 if (DECL_NAME (access->base)
2180 && !DECL_IGNORED_P (access->base)
2181 && !DECL_ARTIFICIAL (access->base))
2182 {
2183 char *pretty_name = make_fancy_name (access->expr);
2184 tree debug_expr = unshare_expr_without_location (access->expr), d;
2185 bool fail = false;
2186
2187 DECL_NAME (repl) = get_identifier (pretty_name);
2188 DECL_NAMELESS (repl) = 1;
2189 obstack_free (&name_obstack, pretty_name);
2190
2191 /* Get rid of any SSA_NAMEs embedded in debug_expr,
2192 as DECL_DEBUG_EXPR isn't considered when looking for still
2193 used SSA_NAMEs and thus they could be freed. All debug info
2194 generation cares is whether something is constant or variable
2195 and that get_ref_base_and_extent works properly on the
2196 expression. It cannot handle accesses at a non-constant offset
2197 though, so just give up in those cases. */
2198 for (d = debug_expr;
2199 !fail && (handled_component_p (d) || TREE_CODE (d) == MEM_REF);
2200 d = TREE_OPERAND (d, 0))
2201 switch (TREE_CODE (d))
2202 {
2203 case ARRAY_REF:
2204 case ARRAY_RANGE_REF:
2205 if (TREE_OPERAND (d, 1)
2206 && TREE_CODE (TREE_OPERAND (d, 1)) != INTEGER_CST)
2207 fail = true;
2208 if (TREE_OPERAND (d, 3)
2209 && TREE_CODE (TREE_OPERAND (d, 3)) != INTEGER_CST)
2210 fail = true;
2211 /* FALLTHRU */
2212 case COMPONENT_REF:
2213 if (TREE_OPERAND (d, 2)
2214 && TREE_CODE (TREE_OPERAND (d, 2)) != INTEGER_CST)
2215 fail = true;
2216 break;
2217 case MEM_REF:
2218 if (TREE_CODE (TREE_OPERAND (d, 0)) != ADDR_EXPR)
2219 fail = true;
2220 else
2221 d = TREE_OPERAND (d, 0);
2222 break;
2223 default:
2224 break;
2225 }
2226 if (!fail)
2227 {
2228 SET_DECL_DEBUG_EXPR (repl, debug_expr);
2229 DECL_HAS_DEBUG_EXPR_P (repl) = 1;
2230 }
2231 if (access->grp_no_warning)
2232 TREE_NO_WARNING (repl) = 1;
2233 else
2234 TREE_NO_WARNING (repl) = TREE_NO_WARNING (access->base);
2235 }
2236 else
2237 TREE_NO_WARNING (repl) = 1;
2238
2239 if (dump_file)
2240 {
2241 if (access->grp_to_be_debug_replaced)
2242 {
2243 fprintf (dump_file, "Created a debug-only replacement for ");
2244 print_generic_expr (dump_file, access->base);
2245 fprintf (dump_file, " offset: %u, size: %u\n",
2246 (unsigned) access->offset, (unsigned) access->size);
2247 }
2248 else
2249 {
2250 fprintf (dump_file, "Created a replacement for ");
2251 print_generic_expr (dump_file, access->base);
2252 fprintf (dump_file, " offset: %u, size: %u: ",
2253 (unsigned) access->offset, (unsigned) access->size);
2254 print_generic_expr (dump_file, repl);
2255 fprintf (dump_file, "\n");
2256 }
2257 }
2258 sra_stats.replacements++;
2259
2260 return repl;
2261 }
2262
2263 /* Return ACCESS scalar replacement, which must exist. */
2264
2265 static inline tree
2266 get_access_replacement (struct access *access)
2267 {
2268 gcc_checking_assert (access->replacement_decl);
2269 return access->replacement_decl;
2270 }
2271
2272
2273 /* Build a subtree of accesses rooted in *ACCESS, and move the pointer in the
2274 linked list along the way. Stop when *ACCESS is NULL or the access pointed
2275 to it is not "within" the root. Return false iff some accesses partially
2276 overlap. */
2277
2278 static bool
2279 build_access_subtree (struct access **access)
2280 {
2281 struct access *root = *access, *last_child = NULL;
2282 HOST_WIDE_INT limit = root->offset + root->size;
2283
2284 *access = (*access)->next_grp;
2285 while (*access && (*access)->offset + (*access)->size <= limit)
2286 {
2287 if (!last_child)
2288 root->first_child = *access;
2289 else
2290 last_child->next_sibling = *access;
2291 last_child = *access;
2292 (*access)->parent = root;
2293 (*access)->grp_write |= root->grp_write;
2294
2295 if (!build_access_subtree (access))
2296 return false;
2297 }
2298
2299 if (*access && (*access)->offset < limit)
2300 return false;
2301
2302 return true;
2303 }
2304
2305 /* Build a tree of access representatives, ACCESS is the pointer to the first
2306 one, others are linked in a list by the next_grp field. Return false iff
2307 some accesses partially overlap. */
2308
2309 static bool
2310 build_access_trees (struct access *access)
2311 {
2312 while (access)
2313 {
2314 struct access *root = access;
2315
2316 if (!build_access_subtree (&access))
2317 return false;
2318 root->next_grp = access;
2319 }
2320 return true;
2321 }
2322
2323 /* Return true if expr contains some ARRAY_REFs into a variable bounded
2324 array. */
2325
2326 static bool
2327 expr_with_var_bounded_array_refs_p (tree expr)
2328 {
2329 while (handled_component_p (expr))
2330 {
2331 if (TREE_CODE (expr) == ARRAY_REF
2332 && !tree_fits_shwi_p (array_ref_low_bound (expr)))
2333 return true;
2334 expr = TREE_OPERAND (expr, 0);
2335 }
2336 return false;
2337 }
2338
2339 /* Analyze the subtree of accesses rooted in ROOT, scheduling replacements when
2340 both seeming beneficial and when ALLOW_REPLACEMENTS allows it. Also set all
2341 sorts of access flags appropriately along the way, notably always set
2342 grp_read and grp_assign_read according to MARK_READ and grp_write when
2343 MARK_WRITE is true.
2344
2345 Creating a replacement for a scalar access is considered beneficial if its
2346 grp_hint is set (this means we are either attempting total scalarization or
2347 there is more than one direct read access) or according to the following
2348 table:
2349
2350 Access written to through a scalar type (once or more times)
2351 |
2352 | Written to in an assignment statement
2353 | |
2354 | | Access read as scalar _once_
2355 | | |
2356 | | | Read in an assignment statement
2357 | | | |
2358 | | | | Scalarize Comment
2359 -----------------------------------------------------------------------------
2360 0 0 0 0 No access for the scalar
2361 0 0 0 1 No access for the scalar
2362 0 0 1 0 No Single read - won't help
2363 0 0 1 1 No The same case
2364 0 1 0 0 No access for the scalar
2365 0 1 0 1 No access for the scalar
2366 0 1 1 0 Yes s = *g; return s.i;
2367 0 1 1 1 Yes The same case as above
2368 1 0 0 0 No Won't help
2369 1 0 0 1 Yes s.i = 1; *g = s;
2370 1 0 1 0 Yes s.i = 5; g = s.i;
2371 1 0 1 1 Yes The same case as above
2372 1 1 0 0 No Won't help.
2373 1 1 0 1 Yes s.i = 1; *g = s;
2374 1 1 1 0 Yes s = *g; return s.i;
2375 1 1 1 1 Yes Any of the above yeses */
2376
2377 static bool
2378 analyze_access_subtree (struct access *root, struct access *parent,
2379 bool allow_replacements)
2380 {
2381 struct access *child;
2382 HOST_WIDE_INT limit = root->offset + root->size;
2383 HOST_WIDE_INT covered_to = root->offset;
2384 bool scalar = is_gimple_reg_type (root->type);
2385 bool hole = false, sth_created = false;
2386
2387 if (parent)
2388 {
2389 if (parent->grp_read)
2390 root->grp_read = 1;
2391 if (parent->grp_assignment_read)
2392 root->grp_assignment_read = 1;
2393 if (parent->grp_write)
2394 root->grp_write = 1;
2395 if (parent->grp_assignment_write)
2396 root->grp_assignment_write = 1;
2397 if (parent->grp_total_scalarization)
2398 root->grp_total_scalarization = 1;
2399 }
2400
2401 if (root->grp_unscalarizable_region)
2402 allow_replacements = false;
2403
2404 if (allow_replacements && expr_with_var_bounded_array_refs_p (root->expr))
2405 allow_replacements = false;
2406
2407 for (child = root->first_child; child; child = child->next_sibling)
2408 {
2409 hole |= covered_to < child->offset;
2410 sth_created |= analyze_access_subtree (child, root,
2411 allow_replacements && !scalar);
2412
2413 root->grp_unscalarized_data |= child->grp_unscalarized_data;
2414 root->grp_total_scalarization &= child->grp_total_scalarization;
2415 if (child->grp_covered)
2416 covered_to += child->size;
2417 else
2418 hole = true;
2419 }
2420
2421 if (allow_replacements && scalar && !root->first_child
2422 && (root->grp_hint
2423 || ((root->grp_scalar_read || root->grp_assignment_read)
2424 && (root->grp_scalar_write || root->grp_assignment_write))))
2425 {
2426 /* Always create access replacements that cover the whole access.
2427 For integral types this means the precision has to match.
2428 Avoid assumptions based on the integral type kind, too. */
2429 if (INTEGRAL_TYPE_P (root->type)
2430 && (TREE_CODE (root->type) != INTEGER_TYPE
2431 || TYPE_PRECISION (root->type) != root->size)
2432 /* But leave bitfield accesses alone. */
2433 && (TREE_CODE (root->expr) != COMPONENT_REF
2434 || !DECL_BIT_FIELD (TREE_OPERAND (root->expr, 1))))
2435 {
2436 tree rt = root->type;
2437 gcc_assert ((root->offset % BITS_PER_UNIT) == 0
2438 && (root->size % BITS_PER_UNIT) == 0);
2439 root->type = build_nonstandard_integer_type (root->size,
2440 TYPE_UNSIGNED (rt));
2441 root->expr = build_ref_for_offset (UNKNOWN_LOCATION, root->base,
2442 root->offset, root->reverse,
2443 root->type, NULL, false);
2444
2445 if (dump_file && (dump_flags & TDF_DETAILS))
2446 {
2447 fprintf (dump_file, "Changing the type of a replacement for ");
2448 print_generic_expr (dump_file, root->base);
2449 fprintf (dump_file, " offset: %u, size: %u ",
2450 (unsigned) root->offset, (unsigned) root->size);
2451 fprintf (dump_file, " to an integer.\n");
2452 }
2453 }
2454
2455 root->grp_to_be_replaced = 1;
2456 root->replacement_decl = create_access_replacement (root);
2457 sth_created = true;
2458 hole = false;
2459 }
2460 else
2461 {
2462 if (allow_replacements
2463 && scalar && !root->first_child
2464 && (root->grp_scalar_write || root->grp_assignment_write)
2465 && !bitmap_bit_p (cannot_scalarize_away_bitmap,
2466 DECL_UID (root->base)))
2467 {
2468 gcc_checking_assert (!root->grp_scalar_read
2469 && !root->grp_assignment_read);
2470 sth_created = true;
2471 if (MAY_HAVE_DEBUG_STMTS)
2472 {
2473 root->grp_to_be_debug_replaced = 1;
2474 root->replacement_decl = create_access_replacement (root);
2475 }
2476 }
2477
2478 if (covered_to < limit)
2479 hole = true;
2480 if (scalar || !allow_replacements)
2481 root->grp_total_scalarization = 0;
2482 }
2483
2484 if (!hole || root->grp_total_scalarization)
2485 root->grp_covered = 1;
2486 else if (root->grp_write || comes_initialized_p (root->base))
2487 root->grp_unscalarized_data = 1; /* not covered and written to */
2488 return sth_created;
2489 }
2490
2491 /* Analyze all access trees linked by next_grp by the means of
2492 analyze_access_subtree. */
2493 static bool
2494 analyze_access_trees (struct access *access)
2495 {
2496 bool ret = false;
2497
2498 while (access)
2499 {
2500 if (analyze_access_subtree (access, NULL, true))
2501 ret = true;
2502 access = access->next_grp;
2503 }
2504
2505 return ret;
2506 }
2507
2508 /* Return true iff a potential new child of LACC at offset OFFSET and with size
2509 SIZE would conflict with an already existing one. If exactly such a child
2510 already exists in LACC, store a pointer to it in EXACT_MATCH. */
2511
2512 static bool
2513 child_would_conflict_in_lacc (struct access *lacc, HOST_WIDE_INT norm_offset,
2514 HOST_WIDE_INT size, struct access **exact_match)
2515 {
2516 struct access *child;
2517
2518 for (child = lacc->first_child; child; child = child->next_sibling)
2519 {
2520 if (child->offset == norm_offset && child->size == size)
2521 {
2522 *exact_match = child;
2523 return true;
2524 }
2525
2526 if (child->offset < norm_offset + size
2527 && child->offset + child->size > norm_offset)
2528 return true;
2529 }
2530
2531 return false;
2532 }
2533
2534 /* Create a new child access of PARENT, with all properties just like MODEL
2535 except for its offset and with its grp_write false and grp_read true.
2536 Return the new access or NULL if it cannot be created. Note that this
2537 access is created long after all splicing and sorting, it's not located in
2538 any access vector and is automatically a representative of its group. Set
2539 the gpr_write flag of the new accesss if SET_GRP_WRITE is true. */
2540
2541 static struct access *
2542 create_artificial_child_access (struct access *parent, struct access *model,
2543 HOST_WIDE_INT new_offset,
2544 bool set_grp_write)
2545 {
2546 struct access **child;
2547 tree expr = parent->base;
2548
2549 gcc_assert (!model->grp_unscalarizable_region);
2550
2551 struct access *access = access_pool.allocate ();
2552 memset (access, 0, sizeof (struct access));
2553 if (!build_user_friendly_ref_for_offset (&expr, TREE_TYPE (expr), new_offset,
2554 model->type))
2555 {
2556 access->grp_no_warning = true;
2557 expr = build_ref_for_model (EXPR_LOCATION (parent->base), parent->base,
2558 new_offset, model, NULL, false);
2559 }
2560
2561 access->base = parent->base;
2562 access->expr = expr;
2563 access->offset = new_offset;
2564 access->size = model->size;
2565 access->type = model->type;
2566 access->grp_write = set_grp_write;
2567 access->grp_read = false;
2568 access->reverse = model->reverse;
2569
2570 child = &parent->first_child;
2571 while (*child && (*child)->offset < new_offset)
2572 child = &(*child)->next_sibling;
2573
2574 access->next_sibling = *child;
2575 *child = access;
2576
2577 return access;
2578 }
2579
2580
2581 /* Propagate all subaccesses of RACC across an assignment link to LACC. Return
2582 true if any new subaccess was created. Additionally, if RACC is a scalar
2583 access but LACC is not, change the type of the latter, if possible. */
2584
2585 static bool
2586 propagate_subaccesses_across_link (struct access *lacc, struct access *racc)
2587 {
2588 struct access *rchild;
2589 HOST_WIDE_INT norm_delta = lacc->offset - racc->offset;
2590 bool ret = false;
2591
2592 /* IF the LHS is still not marked as being written to, we only need to do so
2593 if the RHS at this level actually was. */
2594 if (!lacc->grp_write)
2595 {
2596 gcc_checking_assert (!comes_initialized_p (racc->base));
2597 if (racc->grp_write)
2598 {
2599 lacc->grp_write = true;
2600 ret = true;
2601 }
2602 }
2603
2604 if (is_gimple_reg_type (lacc->type)
2605 || lacc->grp_unscalarizable_region
2606 || racc->grp_unscalarizable_region)
2607 {
2608 ret |= !lacc->grp_write;
2609 lacc->grp_write = true;
2610 return ret;
2611 }
2612
2613 if (is_gimple_reg_type (racc->type))
2614 {
2615 if (!lacc->first_child && !racc->first_child)
2616 {
2617 tree t = lacc->base;
2618
2619 lacc->type = racc->type;
2620 if (build_user_friendly_ref_for_offset (&t, TREE_TYPE (t),
2621 lacc->offset, racc->type))
2622 lacc->expr = t;
2623 else
2624 {
2625 lacc->expr = build_ref_for_model (EXPR_LOCATION (lacc->base),
2626 lacc->base, lacc->offset,
2627 racc, NULL, false);
2628 lacc->grp_no_warning = true;
2629 }
2630 }
2631 return ret;
2632 }
2633
2634 for (rchild = racc->first_child; rchild; rchild = rchild->next_sibling)
2635 {
2636 struct access *new_acc = NULL;
2637 HOST_WIDE_INT norm_offset = rchild->offset + norm_delta;
2638
2639 if (rchild->grp_unscalarizable_region)
2640 {
2641 lacc->grp_write = true;
2642 continue;
2643 }
2644
2645 if (child_would_conflict_in_lacc (lacc, norm_offset, rchild->size,
2646 &new_acc))
2647 {
2648 if (new_acc)
2649 {
2650 if (!new_acc->grp_write
2651 && (lacc->grp_write || rchild->grp_write))
2652 {
2653 new_acc ->grp_write = true;
2654 ret = true;
2655 }
2656
2657 rchild->grp_hint = 1;
2658 new_acc->grp_hint |= new_acc->grp_read;
2659 if (rchild->first_child)
2660 ret |= propagate_subaccesses_across_link (new_acc, rchild);
2661 }
2662 else
2663 lacc->grp_write = true;
2664 continue;
2665 }
2666
2667 rchild->grp_hint = 1;
2668 new_acc = create_artificial_child_access (lacc, rchild, norm_offset,
2669 lacc->grp_write
2670 || rchild->grp_write);
2671 if (new_acc)
2672 {
2673 ret = true;
2674 if (racc->first_child)
2675 propagate_subaccesses_across_link (new_acc, rchild);
2676 }
2677 }
2678
2679 return ret;
2680 }
2681
2682 /* Propagate all subaccesses across assignment links. */
2683
2684 static void
2685 propagate_all_subaccesses (void)
2686 {
2687 while (work_queue_head)
2688 {
2689 struct access *racc = pop_access_from_work_queue ();
2690 struct assign_link *link;
2691
2692 gcc_assert (racc->first_link);
2693
2694 for (link = racc->first_link; link; link = link->next)
2695 {
2696 struct access *lacc = link->lacc;
2697
2698 if (!bitmap_bit_p (candidate_bitmap, DECL_UID (lacc->base)))
2699 continue;
2700 lacc = lacc->group_representative;
2701 if (propagate_subaccesses_across_link (lacc, racc))
2702 do
2703 {
2704 if (lacc->first_link)
2705 {
2706 add_access_to_work_queue (lacc);
2707 break;
2708 }
2709 lacc = lacc->parent;
2710 }
2711 while (lacc);
2712 }
2713 }
2714 }
2715
2716 /* Go through all accesses collected throughout the (intraprocedural) analysis
2717 stage, exclude overlapping ones, identify representatives and build trees
2718 out of them, making decisions about scalarization on the way. Return true
2719 iff there are any to-be-scalarized variables after this stage. */
2720
2721 static bool
2722 analyze_all_variable_accesses (void)
2723 {
2724 int res = 0;
2725 bitmap tmp = BITMAP_ALLOC (NULL);
2726 bitmap_iterator bi;
2727 unsigned i;
2728 bool optimize_speed_p = !optimize_function_for_size_p (cfun);
2729
2730 enum compiler_param param = optimize_speed_p
2731 ? PARAM_SRA_MAX_SCALARIZATION_SIZE_SPEED
2732 : PARAM_SRA_MAX_SCALARIZATION_SIZE_SIZE;
2733
2734 /* If the user didn't set PARAM_SRA_MAX_SCALARIZATION_SIZE_<...>,
2735 fall back to a target default. */
2736 unsigned HOST_WIDE_INT max_scalarization_size
2737 = global_options_set.x_param_values[param]
2738 ? PARAM_VALUE (param)
2739 : get_move_ratio (optimize_speed_p) * UNITS_PER_WORD;
2740
2741 max_scalarization_size *= BITS_PER_UNIT;
2742
2743 EXECUTE_IF_SET_IN_BITMAP (candidate_bitmap, 0, i, bi)
2744 if (bitmap_bit_p (should_scalarize_away_bitmap, i)
2745 && !bitmap_bit_p (cannot_scalarize_away_bitmap, i))
2746 {
2747 tree var = candidate (i);
2748
2749 if (VAR_P (var) && scalarizable_type_p (TREE_TYPE (var),
2750 constant_decl_p (var)))
2751 {
2752 if (tree_to_uhwi (TYPE_SIZE (TREE_TYPE (var)))
2753 <= max_scalarization_size)
2754 {
2755 create_total_scalarization_access (var);
2756 completely_scalarize (var, TREE_TYPE (var), 0, var);
2757 statistics_counter_event (cfun,
2758 "Totally-scalarized aggregates", 1);
2759 if (dump_file && (dump_flags & TDF_DETAILS))
2760 {
2761 fprintf (dump_file, "Will attempt to totally scalarize ");
2762 print_generic_expr (dump_file, var);
2763 fprintf (dump_file, " (UID: %u): \n", DECL_UID (var));
2764 }
2765 }
2766 else if (dump_file && (dump_flags & TDF_DETAILS))
2767 {
2768 fprintf (dump_file, "Too big to totally scalarize: ");
2769 print_generic_expr (dump_file, var);
2770 fprintf (dump_file, " (UID: %u)\n", DECL_UID (var));
2771 }
2772 }
2773 }
2774
2775 bitmap_copy (tmp, candidate_bitmap);
2776 EXECUTE_IF_SET_IN_BITMAP (tmp, 0, i, bi)
2777 {
2778 tree var = candidate (i);
2779 struct access *access;
2780
2781 access = sort_and_splice_var_accesses (var);
2782 if (!access || !build_access_trees (access))
2783 disqualify_candidate (var,
2784 "No or inhibitingly overlapping accesses.");
2785 }
2786
2787 propagate_all_subaccesses ();
2788
2789 bitmap_copy (tmp, candidate_bitmap);
2790 EXECUTE_IF_SET_IN_BITMAP (tmp, 0, i, bi)
2791 {
2792 tree var = candidate (i);
2793 struct access *access = get_first_repr_for_decl (var);
2794
2795 if (analyze_access_trees (access))
2796 {
2797 res++;
2798 if (dump_file && (dump_flags & TDF_DETAILS))
2799 {
2800 fprintf (dump_file, "\nAccess trees for ");
2801 print_generic_expr (dump_file, var);
2802 fprintf (dump_file, " (UID: %u): \n", DECL_UID (var));
2803 dump_access_tree (dump_file, access);
2804 fprintf (dump_file, "\n");
2805 }
2806 }
2807 else
2808 disqualify_candidate (var, "No scalar replacements to be created.");
2809 }
2810
2811 BITMAP_FREE (tmp);
2812
2813 if (res)
2814 {
2815 statistics_counter_event (cfun, "Scalarized aggregates", res);
2816 return true;
2817 }
2818 else
2819 return false;
2820 }
2821
2822 /* Generate statements copying scalar replacements of accesses within a subtree
2823 into or out of AGG. ACCESS, all its children, siblings and their children
2824 are to be processed. AGG is an aggregate type expression (can be a
2825 declaration but does not have to be, it can for example also be a mem_ref or
2826 a series of handled components). TOP_OFFSET is the offset of the processed
2827 subtree which has to be subtracted from offsets of individual accesses to
2828 get corresponding offsets for AGG. If CHUNK_SIZE is non-null, copy only
2829 replacements in the interval <start_offset, start_offset + chunk_size>,
2830 otherwise copy all. GSI is a statement iterator used to place the new
2831 statements. WRITE should be true when the statements should write from AGG
2832 to the replacement and false if vice versa. if INSERT_AFTER is true, new
2833 statements will be added after the current statement in GSI, they will be
2834 added before the statement otherwise. */
2835
2836 static void
2837 generate_subtree_copies (struct access *access, tree agg,
2838 HOST_WIDE_INT top_offset,
2839 HOST_WIDE_INT start_offset, HOST_WIDE_INT chunk_size,
2840 gimple_stmt_iterator *gsi, bool write,
2841 bool insert_after, location_t loc)
2842 {
2843 /* Never write anything into constant pool decls. See PR70602. */
2844 if (!write && constant_decl_p (agg))
2845 return;
2846 do
2847 {
2848 if (chunk_size && access->offset >= start_offset + chunk_size)
2849 return;
2850
2851 if (access->grp_to_be_replaced
2852 && (chunk_size == 0
2853 || access->offset + access->size > start_offset))
2854 {
2855 tree expr, repl = get_access_replacement (access);
2856 gassign *stmt;
2857
2858 expr = build_ref_for_model (loc, agg, access->offset - top_offset,
2859 access, gsi, insert_after);
2860
2861 if (write)
2862 {
2863 if (access->grp_partial_lhs)
2864 expr = force_gimple_operand_gsi (gsi, expr, true, NULL_TREE,
2865 !insert_after,
2866 insert_after ? GSI_NEW_STMT
2867 : GSI_SAME_STMT);
2868 stmt = gimple_build_assign (repl, expr);
2869 }
2870 else
2871 {
2872 TREE_NO_WARNING (repl) = 1;
2873 if (access->grp_partial_lhs)
2874 repl = force_gimple_operand_gsi (gsi, repl, true, NULL_TREE,
2875 !insert_after,
2876 insert_after ? GSI_NEW_STMT
2877 : GSI_SAME_STMT);
2878 stmt = gimple_build_assign (expr, repl);
2879 }
2880 gimple_set_location (stmt, loc);
2881
2882 if (insert_after)
2883 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
2884 else
2885 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2886 update_stmt (stmt);
2887 sra_stats.subtree_copies++;
2888 }
2889 else if (write
2890 && access->grp_to_be_debug_replaced
2891 && (chunk_size == 0
2892 || access->offset + access->size > start_offset))
2893 {
2894 gdebug *ds;
2895 tree drhs = build_debug_ref_for_model (loc, agg,
2896 access->offset - top_offset,
2897 access);
2898 ds = gimple_build_debug_bind (get_access_replacement (access),
2899 drhs, gsi_stmt (*gsi));
2900 if (insert_after)
2901 gsi_insert_after (gsi, ds, GSI_NEW_STMT);
2902 else
2903 gsi_insert_before (gsi, ds, GSI_SAME_STMT);
2904 }
2905
2906 if (access->first_child)
2907 generate_subtree_copies (access->first_child, agg, top_offset,
2908 start_offset, chunk_size, gsi,
2909 write, insert_after, loc);
2910
2911 access = access->next_sibling;
2912 }
2913 while (access);
2914 }
2915
2916 /* Assign zero to all scalar replacements in an access subtree. ACCESS is the
2917 root of the subtree to be processed. GSI is the statement iterator used
2918 for inserting statements which are added after the current statement if
2919 INSERT_AFTER is true or before it otherwise. */
2920
2921 static void
2922 init_subtree_with_zero (struct access *access, gimple_stmt_iterator *gsi,
2923 bool insert_after, location_t loc)
2924
2925 {
2926 struct access *child;
2927
2928 if (access->grp_to_be_replaced)
2929 {
2930 gassign *stmt;
2931
2932 stmt = gimple_build_assign (get_access_replacement (access),
2933 build_zero_cst (access->type));
2934 if (insert_after)
2935 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
2936 else
2937 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2938 update_stmt (stmt);
2939 gimple_set_location (stmt, loc);
2940 }
2941 else if (access->grp_to_be_debug_replaced)
2942 {
2943 gdebug *ds
2944 = gimple_build_debug_bind (get_access_replacement (access),
2945 build_zero_cst (access->type),
2946 gsi_stmt (*gsi));
2947 if (insert_after)
2948 gsi_insert_after (gsi, ds, GSI_NEW_STMT);
2949 else
2950 gsi_insert_before (gsi, ds, GSI_SAME_STMT);
2951 }
2952
2953 for (child = access->first_child; child; child = child->next_sibling)
2954 init_subtree_with_zero (child, gsi, insert_after, loc);
2955 }
2956
2957 /* Clobber all scalar replacements in an access subtree. ACCESS is the
2958 root of the subtree to be processed. GSI is the statement iterator used
2959 for inserting statements which are added after the current statement if
2960 INSERT_AFTER is true or before it otherwise. */
2961
2962 static void
2963 clobber_subtree (struct access *access, gimple_stmt_iterator *gsi,
2964 bool insert_after, location_t loc)
2965
2966 {
2967 struct access *child;
2968
2969 if (access->grp_to_be_replaced)
2970 {
2971 tree rep = get_access_replacement (access);
2972 tree clobber = build_constructor (access->type, NULL);
2973 TREE_THIS_VOLATILE (clobber) = 1;
2974 gimple *stmt = gimple_build_assign (rep, clobber);
2975
2976 if (insert_after)
2977 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
2978 else
2979 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2980 update_stmt (stmt);
2981 gimple_set_location (stmt, loc);
2982 }
2983
2984 for (child = access->first_child; child; child = child->next_sibling)
2985 clobber_subtree (child, gsi, insert_after, loc);
2986 }
2987
2988 /* Search for an access representative for the given expression EXPR and
2989 return it or NULL if it cannot be found. */
2990
2991 static struct access *
2992 get_access_for_expr (tree expr)
2993 {
2994 HOST_WIDE_INT offset, size, max_size;
2995 tree base;
2996 bool reverse;
2997
2998 /* FIXME: This should not be necessary but Ada produces V_C_Es with a type of
2999 a different size than the size of its argument and we need the latter
3000 one. */
3001 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
3002 expr = TREE_OPERAND (expr, 0);
3003
3004 base = get_ref_base_and_extent (expr, &offset, &size, &max_size, &reverse);
3005 if (max_size == -1 || !DECL_P (base))
3006 return NULL;
3007
3008 if (!bitmap_bit_p (candidate_bitmap, DECL_UID (base)))
3009 return NULL;
3010
3011 return get_var_base_offset_size_access (base, offset, max_size);
3012 }
3013
3014 /* Replace the expression EXPR with a scalar replacement if there is one and
3015 generate other statements to do type conversion or subtree copying if
3016 necessary. GSI is used to place newly created statements, WRITE is true if
3017 the expression is being written to (it is on a LHS of a statement or output
3018 in an assembly statement). */
3019
3020 static bool
3021 sra_modify_expr (tree *expr, gimple_stmt_iterator *gsi, bool write)
3022 {
3023 location_t loc;
3024 struct access *access;
3025 tree type, bfr, orig_expr;
3026
3027 if (TREE_CODE (*expr) == BIT_FIELD_REF)
3028 {
3029 bfr = *expr;
3030 expr = &TREE_OPERAND (*expr, 0);
3031 }
3032 else
3033 bfr = NULL_TREE;
3034
3035 if (TREE_CODE (*expr) == REALPART_EXPR || TREE_CODE (*expr) == IMAGPART_EXPR)
3036 expr = &TREE_OPERAND (*expr, 0);
3037 access = get_access_for_expr (*expr);
3038 if (!access)
3039 return false;
3040 type = TREE_TYPE (*expr);
3041 orig_expr = *expr;
3042
3043 loc = gimple_location (gsi_stmt (*gsi));
3044 gimple_stmt_iterator alt_gsi = gsi_none ();
3045 if (write && stmt_ends_bb_p (gsi_stmt (*gsi)))
3046 {
3047 alt_gsi = gsi_start_edge (single_non_eh_succ (gsi_bb (*gsi)));
3048 gsi = &alt_gsi;
3049 }
3050
3051 if (access->grp_to_be_replaced)
3052 {
3053 tree repl = get_access_replacement (access);
3054 /* If we replace a non-register typed access simply use the original
3055 access expression to extract the scalar component afterwards.
3056 This happens if scalarizing a function return value or parameter
3057 like in gcc.c-torture/execute/20041124-1.c, 20050316-1.c and
3058 gcc.c-torture/compile/20011217-1.c.
3059
3060 We also want to use this when accessing a complex or vector which can
3061 be accessed as a different type too, potentially creating a need for
3062 type conversion (see PR42196) and when scalarized unions are involved
3063 in assembler statements (see PR42398). */
3064 if (!useless_type_conversion_p (type, access->type))
3065 {
3066 tree ref;
3067
3068 ref = build_ref_for_model (loc, orig_expr, 0, access, gsi, false);
3069
3070 if (write)
3071 {
3072 gassign *stmt;
3073
3074 if (access->grp_partial_lhs)
3075 ref = force_gimple_operand_gsi (gsi, ref, true, NULL_TREE,
3076 false, GSI_NEW_STMT);
3077 stmt = gimple_build_assign (repl, ref);
3078 gimple_set_location (stmt, loc);
3079 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
3080 }
3081 else
3082 {
3083 gassign *stmt;
3084
3085 if (access->grp_partial_lhs)
3086 repl = force_gimple_operand_gsi (gsi, repl, true, NULL_TREE,
3087 true, GSI_SAME_STMT);
3088 stmt = gimple_build_assign (ref, repl);
3089 gimple_set_location (stmt, loc);
3090 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
3091 }
3092 }
3093 else
3094 *expr = repl;
3095 sra_stats.exprs++;
3096 }
3097 else if (write && access->grp_to_be_debug_replaced)
3098 {
3099 gdebug *ds = gimple_build_debug_bind (get_access_replacement (access),
3100 NULL_TREE,
3101 gsi_stmt (*gsi));
3102 gsi_insert_after (gsi, ds, GSI_NEW_STMT);
3103 }
3104
3105 if (access->first_child)
3106 {
3107 HOST_WIDE_INT start_offset, chunk_size;
3108 if (bfr
3109 && tree_fits_uhwi_p (TREE_OPERAND (bfr, 1))
3110 && tree_fits_uhwi_p (TREE_OPERAND (bfr, 2)))
3111 {
3112 chunk_size = tree_to_uhwi (TREE_OPERAND (bfr, 1));
3113 start_offset = access->offset
3114 + tree_to_uhwi (TREE_OPERAND (bfr, 2));
3115 }
3116 else
3117 start_offset = chunk_size = 0;
3118
3119 generate_subtree_copies (access->first_child, orig_expr, access->offset,
3120 start_offset, chunk_size, gsi, write, write,
3121 loc);
3122 }
3123 return true;
3124 }
3125
3126 /* Where scalar replacements of the RHS have been written to when a replacement
3127 of a LHS of an assigments cannot be direclty loaded from a replacement of
3128 the RHS. */
3129 enum unscalarized_data_handling { SRA_UDH_NONE, /* Nothing done so far. */
3130 SRA_UDH_RIGHT, /* Data flushed to the RHS. */
3131 SRA_UDH_LEFT }; /* Data flushed to the LHS. */
3132
3133 struct subreplacement_assignment_data
3134 {
3135 /* Offset of the access representing the lhs of the assignment. */
3136 HOST_WIDE_INT left_offset;
3137
3138 /* LHS and RHS of the original assignment. */
3139 tree assignment_lhs, assignment_rhs;
3140
3141 /* Access representing the rhs of the whole assignment. */
3142 struct access *top_racc;
3143
3144 /* Stmt iterator used for statement insertions after the original assignment.
3145 It points to the main GSI used to traverse a BB during function body
3146 modification. */
3147 gimple_stmt_iterator *new_gsi;
3148
3149 /* Stmt iterator used for statement insertions before the original
3150 assignment. Keeps on pointing to the original statement. */
3151 gimple_stmt_iterator old_gsi;
3152
3153 /* Location of the assignment. */
3154 location_t loc;
3155
3156 /* Keeps the information whether we have needed to refresh replacements of
3157 the LHS and from which side of the assignments this takes place. */
3158 enum unscalarized_data_handling refreshed;
3159 };
3160
3161 /* Store all replacements in the access tree rooted in TOP_RACC either to their
3162 base aggregate if there are unscalarized data or directly to LHS of the
3163 statement that is pointed to by GSI otherwise. */
3164
3165 static void
3166 handle_unscalarized_data_in_subtree (struct subreplacement_assignment_data *sad)
3167 {
3168 tree src;
3169 if (sad->top_racc->grp_unscalarized_data)
3170 {
3171 src = sad->assignment_rhs;
3172 sad->refreshed = SRA_UDH_RIGHT;
3173 }
3174 else
3175 {
3176 src = sad->assignment_lhs;
3177 sad->refreshed = SRA_UDH_LEFT;
3178 }
3179 generate_subtree_copies (sad->top_racc->first_child, src,
3180 sad->top_racc->offset, 0, 0,
3181 &sad->old_gsi, false, false, sad->loc);
3182 }
3183
3184 /* Try to generate statements to load all sub-replacements in an access subtree
3185 formed by children of LACC from scalar replacements in the SAD->top_racc
3186 subtree. If that is not possible, refresh the SAD->top_racc base aggregate
3187 and load the accesses from it. */
3188
3189 static void
3190 load_assign_lhs_subreplacements (struct access *lacc,
3191 struct subreplacement_assignment_data *sad)
3192 {
3193 for (lacc = lacc->first_child; lacc; lacc = lacc->next_sibling)
3194 {
3195 HOST_WIDE_INT offset;
3196 offset = lacc->offset - sad->left_offset + sad->top_racc->offset;
3197
3198 if (lacc->grp_to_be_replaced)
3199 {
3200 struct access *racc;
3201 gassign *stmt;
3202 tree rhs;
3203
3204 racc = find_access_in_subtree (sad->top_racc, offset, lacc->size);
3205 if (racc && racc->grp_to_be_replaced)
3206 {
3207 rhs = get_access_replacement (racc);
3208 if (!useless_type_conversion_p (lacc->type, racc->type))
3209 rhs = fold_build1_loc (sad->loc, VIEW_CONVERT_EXPR,
3210 lacc->type, rhs);
3211
3212 if (racc->grp_partial_lhs && lacc->grp_partial_lhs)
3213 rhs = force_gimple_operand_gsi (&sad->old_gsi, rhs, true,
3214 NULL_TREE, true, GSI_SAME_STMT);
3215 }
3216 else
3217 {
3218 /* No suitable access on the right hand side, need to load from
3219 the aggregate. See if we have to update it first... */
3220 if (sad->refreshed == SRA_UDH_NONE)
3221 handle_unscalarized_data_in_subtree (sad);
3222
3223 if (sad->refreshed == SRA_UDH_LEFT)
3224 rhs = build_ref_for_model (sad->loc, sad->assignment_lhs,
3225 lacc->offset - sad->left_offset,
3226 lacc, sad->new_gsi, true);
3227 else
3228 rhs = build_ref_for_model (sad->loc, sad->assignment_rhs,
3229 lacc->offset - sad->left_offset,
3230 lacc, sad->new_gsi, true);
3231 if (lacc->grp_partial_lhs)
3232 rhs = force_gimple_operand_gsi (sad->new_gsi,
3233 rhs, true, NULL_TREE,
3234 false, GSI_NEW_STMT);
3235 }
3236
3237 stmt = gimple_build_assign (get_access_replacement (lacc), rhs);
3238 gsi_insert_after (sad->new_gsi, stmt, GSI_NEW_STMT);
3239 gimple_set_location (stmt, sad->loc);
3240 update_stmt (stmt);
3241 sra_stats.subreplacements++;
3242 }
3243 else
3244 {
3245 if (sad->refreshed == SRA_UDH_NONE
3246 && lacc->grp_read && !lacc->grp_covered)
3247 handle_unscalarized_data_in_subtree (sad);
3248
3249 if (lacc && lacc->grp_to_be_debug_replaced)
3250 {
3251 gdebug *ds;
3252 tree drhs;
3253 struct access *racc = find_access_in_subtree (sad->top_racc,
3254 offset,
3255 lacc->size);
3256
3257 if (racc && racc->grp_to_be_replaced)
3258 {
3259 if (racc->grp_write || constant_decl_p (racc->base))
3260 drhs = get_access_replacement (racc);
3261 else
3262 drhs = NULL;
3263 }
3264 else if (sad->refreshed == SRA_UDH_LEFT)
3265 drhs = build_debug_ref_for_model (sad->loc, lacc->base,
3266 lacc->offset, lacc);
3267 else if (sad->refreshed == SRA_UDH_RIGHT)
3268 drhs = build_debug_ref_for_model (sad->loc, sad->top_racc->base,
3269 offset, lacc);
3270 else
3271 drhs = NULL_TREE;
3272 if (drhs
3273 && !useless_type_conversion_p (lacc->type, TREE_TYPE (drhs)))
3274 drhs = fold_build1_loc (sad->loc, VIEW_CONVERT_EXPR,
3275 lacc->type, drhs);
3276 ds = gimple_build_debug_bind (get_access_replacement (lacc),
3277 drhs, gsi_stmt (sad->old_gsi));
3278 gsi_insert_after (sad->new_gsi, ds, GSI_NEW_STMT);
3279 }
3280 }
3281
3282 if (lacc->first_child)
3283 load_assign_lhs_subreplacements (lacc, sad);
3284 }
3285 }
3286
3287 /* Result code for SRA assignment modification. */
3288 enum assignment_mod_result { SRA_AM_NONE, /* nothing done for the stmt */
3289 SRA_AM_MODIFIED, /* stmt changed but not
3290 removed */
3291 SRA_AM_REMOVED }; /* stmt eliminated */
3292
3293 /* Modify assignments with a CONSTRUCTOR on their RHS. STMT contains a pointer
3294 to the assignment and GSI is the statement iterator pointing at it. Returns
3295 the same values as sra_modify_assign. */
3296
3297 static enum assignment_mod_result
3298 sra_modify_constructor_assign (gimple *stmt, gimple_stmt_iterator *gsi)
3299 {
3300 tree lhs = gimple_assign_lhs (stmt);
3301 struct access *acc = get_access_for_expr (lhs);
3302 if (!acc)
3303 return SRA_AM_NONE;
3304 location_t loc = gimple_location (stmt);
3305
3306 if (gimple_clobber_p (stmt))
3307 {
3308 /* Clobber the replacement variable. */
3309 clobber_subtree (acc, gsi, !acc->grp_covered, loc);
3310 /* Remove clobbers of fully scalarized variables, they are dead. */
3311 if (acc->grp_covered)
3312 {
3313 unlink_stmt_vdef (stmt);
3314 gsi_remove (gsi, true);
3315 release_defs (stmt);
3316 return SRA_AM_REMOVED;
3317 }
3318 else
3319 return SRA_AM_MODIFIED;
3320 }
3321
3322 if (CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt)) > 0)
3323 {
3324 /* I have never seen this code path trigger but if it can happen the
3325 following should handle it gracefully. */
3326 if (access_has_children_p (acc))
3327 generate_subtree_copies (acc->first_child, lhs, acc->offset, 0, 0, gsi,
3328 true, true, loc);
3329 return SRA_AM_MODIFIED;
3330 }
3331
3332 if (acc->grp_covered)
3333 {
3334 init_subtree_with_zero (acc, gsi, false, loc);
3335 unlink_stmt_vdef (stmt);
3336 gsi_remove (gsi, true);
3337 release_defs (stmt);
3338 return SRA_AM_REMOVED;
3339 }
3340 else
3341 {
3342 init_subtree_with_zero (acc, gsi, true, loc);
3343 return SRA_AM_MODIFIED;
3344 }
3345 }
3346
3347 /* Create and return a new suitable default definition SSA_NAME for RACC which
3348 is an access describing an uninitialized part of an aggregate that is being
3349 loaded. */
3350
3351 static tree
3352 get_repl_default_def_ssa_name (struct access *racc)
3353 {
3354 gcc_checking_assert (!racc->grp_to_be_replaced
3355 && !racc->grp_to_be_debug_replaced);
3356 if (!racc->replacement_decl)
3357 racc->replacement_decl = create_access_replacement (racc);
3358 return get_or_create_ssa_default_def (cfun, racc->replacement_decl);
3359 }
3360
3361 /* Return true if REF has an VIEW_CONVERT_EXPR or a COMPONENT_REF with a
3362 bit-field field declaration somewhere in it. */
3363
3364 static inline bool
3365 contains_vce_or_bfcref_p (const_tree ref)
3366 {
3367 while (handled_component_p (ref))
3368 {
3369 if (TREE_CODE (ref) == VIEW_CONVERT_EXPR
3370 || (TREE_CODE (ref) == COMPONENT_REF
3371 && DECL_BIT_FIELD (TREE_OPERAND (ref, 1))))
3372 return true;
3373 ref = TREE_OPERAND (ref, 0);
3374 }
3375
3376 return false;
3377 }
3378
3379 /* Examine both sides of the assignment statement pointed to by STMT, replace
3380 them with a scalare replacement if there is one and generate copying of
3381 replacements if scalarized aggregates have been used in the assignment. GSI
3382 is used to hold generated statements for type conversions and subtree
3383 copying. */
3384
3385 static enum assignment_mod_result
3386 sra_modify_assign (gimple *stmt, gimple_stmt_iterator *gsi)
3387 {
3388 struct access *lacc, *racc;
3389 tree lhs, rhs;
3390 bool modify_this_stmt = false;
3391 bool force_gimple_rhs = false;
3392 location_t loc;
3393 gimple_stmt_iterator orig_gsi = *gsi;
3394
3395 if (!gimple_assign_single_p (stmt))
3396 return SRA_AM_NONE;
3397 lhs = gimple_assign_lhs (stmt);
3398 rhs = gimple_assign_rhs1 (stmt);
3399
3400 if (TREE_CODE (rhs) == CONSTRUCTOR)
3401 return sra_modify_constructor_assign (stmt, gsi);
3402
3403 if (TREE_CODE (rhs) == REALPART_EXPR || TREE_CODE (lhs) == REALPART_EXPR
3404 || TREE_CODE (rhs) == IMAGPART_EXPR || TREE_CODE (lhs) == IMAGPART_EXPR
3405 || TREE_CODE (rhs) == BIT_FIELD_REF || TREE_CODE (lhs) == BIT_FIELD_REF)
3406 {
3407 modify_this_stmt = sra_modify_expr (gimple_assign_rhs1_ptr (stmt),
3408 gsi, false);
3409 modify_this_stmt |= sra_modify_expr (gimple_assign_lhs_ptr (stmt),
3410 gsi, true);
3411 return modify_this_stmt ? SRA_AM_MODIFIED : SRA_AM_NONE;
3412 }
3413
3414 lacc = get_access_for_expr (lhs);
3415 racc = get_access_for_expr (rhs);
3416 if (!lacc && !racc)
3417 return SRA_AM_NONE;
3418 /* Avoid modifying initializations of constant-pool replacements. */
3419 if (racc && (racc->replacement_decl == lhs))
3420 return SRA_AM_NONE;
3421
3422 loc = gimple_location (stmt);
3423 if (lacc && lacc->grp_to_be_replaced)
3424 {
3425 lhs = get_access_replacement (lacc);
3426 gimple_assign_set_lhs (stmt, lhs);
3427 modify_this_stmt = true;
3428 if (lacc->grp_partial_lhs)
3429 force_gimple_rhs = true;
3430 sra_stats.exprs++;
3431 }
3432
3433 if (racc && racc->grp_to_be_replaced)
3434 {
3435 rhs = get_access_replacement (racc);
3436 modify_this_stmt = true;
3437 if (racc->grp_partial_lhs)
3438 force_gimple_rhs = true;
3439 sra_stats.exprs++;
3440 }
3441 else if (racc
3442 && !racc->grp_unscalarized_data
3443 && !racc->grp_unscalarizable_region
3444 && TREE_CODE (lhs) == SSA_NAME
3445 && !access_has_replacements_p (racc))
3446 {
3447 rhs = get_repl_default_def_ssa_name (racc);
3448 modify_this_stmt = true;
3449 sra_stats.exprs++;
3450 }
3451
3452 if (modify_this_stmt)
3453 {
3454 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs)))
3455 {
3456 /* If we can avoid creating a VIEW_CONVERT_EXPR do so.
3457 ??? This should move to fold_stmt which we simply should
3458 call after building a VIEW_CONVERT_EXPR here. */
3459 if (AGGREGATE_TYPE_P (TREE_TYPE (lhs))
3460 && !contains_bitfld_component_ref_p (lhs))
3461 {
3462 lhs = build_ref_for_model (loc, lhs, 0, racc, gsi, false);
3463 gimple_assign_set_lhs (stmt, lhs);
3464 }
3465 else if (AGGREGATE_TYPE_P (TREE_TYPE (rhs))
3466 && !contains_vce_or_bfcref_p (rhs))
3467 rhs = build_ref_for_model (loc, rhs, 0, lacc, gsi, false);
3468
3469 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs)))
3470 {
3471 rhs = fold_build1_loc (loc, VIEW_CONVERT_EXPR, TREE_TYPE (lhs),
3472 rhs);
3473 if (is_gimple_reg_type (TREE_TYPE (lhs))
3474 && TREE_CODE (lhs) != SSA_NAME)
3475 force_gimple_rhs = true;
3476 }
3477 }
3478 }
3479
3480 if (lacc && lacc->grp_to_be_debug_replaced)
3481 {
3482 tree dlhs = get_access_replacement (lacc);
3483 tree drhs = unshare_expr (rhs);
3484 if (!useless_type_conversion_p (TREE_TYPE (dlhs), TREE_TYPE (drhs)))
3485 {
3486 if (AGGREGATE_TYPE_P (TREE_TYPE (drhs))
3487 && !contains_vce_or_bfcref_p (drhs))
3488 drhs = build_debug_ref_for_model (loc, drhs, 0, lacc);
3489 if (drhs
3490 && !useless_type_conversion_p (TREE_TYPE (dlhs),
3491 TREE_TYPE (drhs)))
3492 drhs = fold_build1_loc (loc, VIEW_CONVERT_EXPR,
3493 TREE_TYPE (dlhs), drhs);
3494 }
3495 gdebug *ds = gimple_build_debug_bind (dlhs, drhs, stmt);
3496 gsi_insert_before (gsi, ds, GSI_SAME_STMT);
3497 }
3498
3499 /* From this point on, the function deals with assignments in between
3500 aggregates when at least one has scalar reductions of some of its
3501 components. There are three possible scenarios: Both the LHS and RHS have
3502 to-be-scalarized components, 2) only the RHS has or 3) only the LHS has.
3503
3504 In the first case, we would like to load the LHS components from RHS
3505 components whenever possible. If that is not possible, we would like to
3506 read it directly from the RHS (after updating it by storing in it its own
3507 components). If there are some necessary unscalarized data in the LHS,
3508 those will be loaded by the original assignment too. If neither of these
3509 cases happen, the original statement can be removed. Most of this is done
3510 by load_assign_lhs_subreplacements.
3511
3512 In the second case, we would like to store all RHS scalarized components
3513 directly into LHS and if they cover the aggregate completely, remove the
3514 statement too. In the third case, we want the LHS components to be loaded
3515 directly from the RHS (DSE will remove the original statement if it
3516 becomes redundant).
3517
3518 This is a bit complex but manageable when types match and when unions do
3519 not cause confusion in a way that we cannot really load a component of LHS
3520 from the RHS or vice versa (the access representing this level can have
3521 subaccesses that are accessible only through a different union field at a
3522 higher level - different from the one used in the examined expression).
3523 Unions are fun.
3524
3525 Therefore, I specially handle a fourth case, happening when there is a
3526 specific type cast or it is impossible to locate a scalarized subaccess on
3527 the other side of the expression. If that happens, I simply "refresh" the
3528 RHS by storing in it is scalarized components leave the original statement
3529 there to do the copying and then load the scalar replacements of the LHS.
3530 This is what the first branch does. */
3531
3532 if (modify_this_stmt
3533 || gimple_has_volatile_ops (stmt)
3534 || contains_vce_or_bfcref_p (rhs)
3535 || contains_vce_or_bfcref_p (lhs)
3536 || stmt_ends_bb_p (stmt))
3537 {
3538 /* No need to copy into a constant-pool, it comes pre-initialized. */
3539 if (access_has_children_p (racc) && !constant_decl_p (racc->base))
3540 generate_subtree_copies (racc->first_child, rhs, racc->offset, 0, 0,
3541 gsi, false, false, loc);
3542 if (access_has_children_p (lacc))
3543 {
3544 gimple_stmt_iterator alt_gsi = gsi_none ();
3545 if (stmt_ends_bb_p (stmt))
3546 {
3547 alt_gsi = gsi_start_edge (single_non_eh_succ (gsi_bb (*gsi)));
3548 gsi = &alt_gsi;
3549 }
3550 generate_subtree_copies (lacc->first_child, lhs, lacc->offset, 0, 0,
3551 gsi, true, true, loc);
3552 }
3553 sra_stats.separate_lhs_rhs_handling++;
3554
3555 /* This gimplification must be done after generate_subtree_copies,
3556 lest we insert the subtree copies in the middle of the gimplified
3557 sequence. */
3558 if (force_gimple_rhs)
3559 rhs = force_gimple_operand_gsi (&orig_gsi, rhs, true, NULL_TREE,
3560 true, GSI_SAME_STMT);
3561 if (gimple_assign_rhs1 (stmt) != rhs)
3562 {
3563 modify_this_stmt = true;
3564 gimple_assign_set_rhs_from_tree (&orig_gsi, rhs);
3565 gcc_assert (stmt == gsi_stmt (orig_gsi));
3566 }
3567
3568 return modify_this_stmt ? SRA_AM_MODIFIED : SRA_AM_NONE;
3569 }
3570 else
3571 {
3572 if (access_has_children_p (lacc)
3573 && access_has_children_p (racc)
3574 /* When an access represents an unscalarizable region, it usually
3575 represents accesses with variable offset and thus must not be used
3576 to generate new memory accesses. */
3577 && !lacc->grp_unscalarizable_region
3578 && !racc->grp_unscalarizable_region)
3579 {
3580 struct subreplacement_assignment_data sad;
3581
3582 sad.left_offset = lacc->offset;
3583 sad.assignment_lhs = lhs;
3584 sad.assignment_rhs = rhs;
3585 sad.top_racc = racc;
3586 sad.old_gsi = *gsi;
3587 sad.new_gsi = gsi;
3588 sad.loc = gimple_location (stmt);
3589 sad.refreshed = SRA_UDH_NONE;
3590
3591 if (lacc->grp_read && !lacc->grp_covered)
3592 handle_unscalarized_data_in_subtree (&sad);
3593
3594 load_assign_lhs_subreplacements (lacc, &sad);
3595 if (sad.refreshed != SRA_UDH_RIGHT)
3596 {
3597 gsi_next (gsi);
3598 unlink_stmt_vdef (stmt);
3599 gsi_remove (&sad.old_gsi, true);
3600 release_defs (stmt);
3601 sra_stats.deleted++;
3602 return SRA_AM_REMOVED;
3603 }
3604 }
3605 else
3606 {
3607 if (access_has_children_p (racc)
3608 && !racc->grp_unscalarized_data
3609 && TREE_CODE (lhs) != SSA_NAME)
3610 {
3611 if (dump_file)
3612 {
3613 fprintf (dump_file, "Removing load: ");
3614 print_gimple_stmt (dump_file, stmt, 0);
3615 }
3616 generate_subtree_copies (racc->first_child, lhs,
3617 racc->offset, 0, 0, gsi,
3618 false, false, loc);
3619 gcc_assert (stmt == gsi_stmt (*gsi));
3620 unlink_stmt_vdef (stmt);
3621 gsi_remove (gsi, true);
3622 release_defs (stmt);
3623 sra_stats.deleted++;
3624 return SRA_AM_REMOVED;
3625 }
3626 /* Restore the aggregate RHS from its components so the
3627 prevailing aggregate copy does the right thing. */
3628 if (access_has_children_p (racc))
3629 generate_subtree_copies (racc->first_child, rhs, racc->offset, 0, 0,
3630 gsi, false, false, loc);
3631 /* Re-load the components of the aggregate copy destination.
3632 But use the RHS aggregate to load from to expose more
3633 optimization opportunities. */
3634 if (access_has_children_p (lacc))
3635 generate_subtree_copies (lacc->first_child, rhs, lacc->offset,
3636 0, 0, gsi, true, true, loc);
3637 }
3638
3639 return SRA_AM_NONE;
3640 }
3641 }
3642
3643 /* Set any scalar replacements of values in the constant pool to the initial
3644 value of the constant. (Constant-pool decls like *.LC0 have effectively
3645 been initialized before the program starts, we must do the same for their
3646 replacements.) Thus, we output statements like 'SR.1 = *.LC0[0];' into
3647 the function's entry block. */
3648
3649 static void
3650 initialize_constant_pool_replacements (void)
3651 {
3652 gimple_seq seq = NULL;
3653 gimple_stmt_iterator gsi = gsi_start (seq);
3654 bitmap_iterator bi;
3655 unsigned i;
3656
3657 EXECUTE_IF_SET_IN_BITMAP (candidate_bitmap, 0, i, bi)
3658 {
3659 tree var = candidate (i);
3660 if (!constant_decl_p (var))
3661 continue;
3662 vec<access_p> *access_vec = get_base_access_vector (var);
3663 if (!access_vec)
3664 continue;
3665 for (unsigned i = 0; i < access_vec->length (); i++)
3666 {
3667 struct access *access = (*access_vec)[i];
3668 if (!access->replacement_decl)
3669 continue;
3670 gassign *stmt
3671 = gimple_build_assign (get_access_replacement (access),
3672 unshare_expr (access->expr));
3673 if (dump_file && (dump_flags & TDF_DETAILS))
3674 {
3675 fprintf (dump_file, "Generating constant initializer: ");
3676 print_gimple_stmt (dump_file, stmt, 0);
3677 fprintf (dump_file, "\n");
3678 }
3679 gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
3680 update_stmt (stmt);
3681 }
3682 }
3683
3684 seq = gsi_seq (gsi);
3685 if (seq)
3686 gsi_insert_seq_on_edge_immediate (
3687 single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)), seq);
3688 }
3689
3690 /* Traverse the function body and all modifications as decided in
3691 analyze_all_variable_accesses. Return true iff the CFG has been
3692 changed. */
3693
3694 static bool
3695 sra_modify_function_body (void)
3696 {
3697 bool cfg_changed = false;
3698 basic_block bb;
3699
3700 initialize_constant_pool_replacements ();
3701
3702 FOR_EACH_BB_FN (bb, cfun)
3703 {
3704 gimple_stmt_iterator gsi = gsi_start_bb (bb);
3705 while (!gsi_end_p (gsi))
3706 {
3707 gimple *stmt = gsi_stmt (gsi);
3708 enum assignment_mod_result assign_result;
3709 bool modified = false, deleted = false;
3710 tree *t;
3711 unsigned i;
3712
3713 switch (gimple_code (stmt))
3714 {
3715 case GIMPLE_RETURN:
3716 t = gimple_return_retval_ptr (as_a <greturn *> (stmt));
3717 if (*t != NULL_TREE)
3718 modified |= sra_modify_expr (t, &gsi, false);
3719 break;
3720
3721 case GIMPLE_ASSIGN:
3722 assign_result = sra_modify_assign (stmt, &gsi);
3723 modified |= assign_result == SRA_AM_MODIFIED;
3724 deleted = assign_result == SRA_AM_REMOVED;
3725 break;
3726
3727 case GIMPLE_CALL:
3728 /* Operands must be processed before the lhs. */
3729 for (i = 0; i < gimple_call_num_args (stmt); i++)
3730 {
3731 t = gimple_call_arg_ptr (stmt, i);
3732 modified |= sra_modify_expr (t, &gsi, false);
3733 }
3734
3735 if (gimple_call_lhs (stmt))
3736 {
3737 t = gimple_call_lhs_ptr (stmt);
3738 modified |= sra_modify_expr (t, &gsi, true);
3739 }
3740 break;
3741
3742 case GIMPLE_ASM:
3743 {
3744 gasm *asm_stmt = as_a <gasm *> (stmt);
3745 for (i = 0; i < gimple_asm_ninputs (asm_stmt); i++)
3746 {
3747 t = &TREE_VALUE (gimple_asm_input_op (asm_stmt, i));
3748 modified |= sra_modify_expr (t, &gsi, false);
3749 }
3750 for (i = 0; i < gimple_asm_noutputs (asm_stmt); i++)
3751 {
3752 t = &TREE_VALUE (gimple_asm_output_op (asm_stmt, i));
3753 modified |= sra_modify_expr (t, &gsi, true);
3754 }
3755 }
3756 break;
3757
3758 default:
3759 break;
3760 }
3761
3762 if (modified)
3763 {
3764 update_stmt (stmt);
3765 if (maybe_clean_eh_stmt (stmt)
3766 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
3767 cfg_changed = true;
3768 }
3769 if (!deleted)
3770 gsi_next (&gsi);
3771 }
3772 }
3773
3774 gsi_commit_edge_inserts ();
3775 return cfg_changed;
3776 }
3777
3778 /* Generate statements initializing scalar replacements of parts of function
3779 parameters. */
3780
3781 static void
3782 initialize_parameter_reductions (void)
3783 {
3784 gimple_stmt_iterator gsi;
3785 gimple_seq seq = NULL;
3786 tree parm;
3787
3788 gsi = gsi_start (seq);
3789 for (parm = DECL_ARGUMENTS (current_function_decl);
3790 parm;
3791 parm = DECL_CHAIN (parm))
3792 {
3793 vec<access_p> *access_vec;
3794 struct access *access;
3795
3796 if (!bitmap_bit_p (candidate_bitmap, DECL_UID (parm)))
3797 continue;
3798 access_vec = get_base_access_vector (parm);
3799 if (!access_vec)
3800 continue;
3801
3802 for (access = (*access_vec)[0];
3803 access;
3804 access = access->next_grp)
3805 generate_subtree_copies (access, parm, 0, 0, 0, &gsi, true, true,
3806 EXPR_LOCATION (parm));
3807 }
3808
3809 seq = gsi_seq (gsi);
3810 if (seq)
3811 gsi_insert_seq_on_edge_immediate (single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)), seq);
3812 }
3813
3814 /* The "main" function of intraprocedural SRA passes. Runs the analysis and if
3815 it reveals there are components of some aggregates to be scalarized, it runs
3816 the required transformations. */
3817 static unsigned int
3818 perform_intra_sra (void)
3819 {
3820 int ret = 0;
3821 sra_initialize ();
3822
3823 if (!find_var_candidates ())
3824 goto out;
3825
3826 if (!scan_function ())
3827 goto out;
3828
3829 if (!analyze_all_variable_accesses ())
3830 goto out;
3831
3832 if (sra_modify_function_body ())
3833 ret = TODO_update_ssa | TODO_cleanup_cfg;
3834 else
3835 ret = TODO_update_ssa;
3836 initialize_parameter_reductions ();
3837
3838 statistics_counter_event (cfun, "Scalar replacements created",
3839 sra_stats.replacements);
3840 statistics_counter_event (cfun, "Modified expressions", sra_stats.exprs);
3841 statistics_counter_event (cfun, "Subtree copy stmts",
3842 sra_stats.subtree_copies);
3843 statistics_counter_event (cfun, "Subreplacement stmts",
3844 sra_stats.subreplacements);
3845 statistics_counter_event (cfun, "Deleted stmts", sra_stats.deleted);
3846 statistics_counter_event (cfun, "Separate LHS and RHS handling",
3847 sra_stats.separate_lhs_rhs_handling);
3848
3849 out:
3850 sra_deinitialize ();
3851 return ret;
3852 }
3853
3854 /* Perform early intraprocedural SRA. */
3855 static unsigned int
3856 early_intra_sra (void)
3857 {
3858 sra_mode = SRA_MODE_EARLY_INTRA;
3859 return perform_intra_sra ();
3860 }
3861
3862 /* Perform "late" intraprocedural SRA. */
3863 static unsigned int
3864 late_intra_sra (void)
3865 {
3866 sra_mode = SRA_MODE_INTRA;
3867 return perform_intra_sra ();
3868 }
3869
3870
3871 static bool
3872 gate_intra_sra (void)
3873 {
3874 return flag_tree_sra != 0 && dbg_cnt (tree_sra);
3875 }
3876
3877
3878 namespace {
3879
3880 const pass_data pass_data_sra_early =
3881 {
3882 GIMPLE_PASS, /* type */
3883 "esra", /* name */
3884 OPTGROUP_NONE, /* optinfo_flags */
3885 TV_TREE_SRA, /* tv_id */
3886 ( PROP_cfg | PROP_ssa ), /* properties_required */
3887 0, /* properties_provided */
3888 0, /* properties_destroyed */
3889 0, /* todo_flags_start */
3890 TODO_update_ssa, /* todo_flags_finish */
3891 };
3892
3893 class pass_sra_early : public gimple_opt_pass
3894 {
3895 public:
3896 pass_sra_early (gcc::context *ctxt)
3897 : gimple_opt_pass (pass_data_sra_early, ctxt)
3898 {}
3899
3900 /* opt_pass methods: */
3901 virtual bool gate (function *) { return gate_intra_sra (); }
3902 virtual unsigned int execute (function *) { return early_intra_sra (); }
3903
3904 }; // class pass_sra_early
3905
3906 } // anon namespace
3907
3908 gimple_opt_pass *
3909 make_pass_sra_early (gcc::context *ctxt)
3910 {
3911 return new pass_sra_early (ctxt);
3912 }
3913
3914 namespace {
3915
3916 const pass_data pass_data_sra =
3917 {
3918 GIMPLE_PASS, /* type */
3919 "sra", /* name */
3920 OPTGROUP_NONE, /* optinfo_flags */
3921 TV_TREE_SRA, /* tv_id */
3922 ( PROP_cfg | PROP_ssa ), /* properties_required */
3923 0, /* properties_provided */
3924 0, /* properties_destroyed */
3925 TODO_update_address_taken, /* todo_flags_start */
3926 TODO_update_ssa, /* todo_flags_finish */
3927 };
3928
3929 class pass_sra : public gimple_opt_pass
3930 {
3931 public:
3932 pass_sra (gcc::context *ctxt)
3933 : gimple_opt_pass (pass_data_sra, ctxt)
3934 {}
3935
3936 /* opt_pass methods: */
3937 virtual bool gate (function *) { return gate_intra_sra (); }
3938 virtual unsigned int execute (function *) { return late_intra_sra (); }
3939
3940 }; // class pass_sra
3941
3942 } // anon namespace
3943
3944 gimple_opt_pass *
3945 make_pass_sra (gcc::context *ctxt)
3946 {
3947 return new pass_sra (ctxt);
3948 }
3949
3950
3951 /* Return true iff PARM (which must be a parm_decl) is an unused scalar
3952 parameter. */
3953
3954 static bool
3955 is_unused_scalar_param (tree parm)
3956 {
3957 tree name;
3958 return (is_gimple_reg (parm)
3959 && (!(name = ssa_default_def (cfun, parm))
3960 || has_zero_uses (name)));
3961 }
3962
3963 /* Scan immediate uses of a default definition SSA name of a parameter PARM and
3964 examine whether there are any direct or otherwise infeasible ones. If so,
3965 return true, otherwise return false. PARM must be a gimple register with a
3966 non-NULL default definition. */
3967
3968 static bool
3969 ptr_parm_has_direct_uses (tree parm)
3970 {
3971 imm_use_iterator ui;
3972 gimple *stmt;
3973 tree name = ssa_default_def (cfun, parm);
3974 bool ret = false;
3975
3976 FOR_EACH_IMM_USE_STMT (stmt, ui, name)
3977 {
3978 int uses_ok = 0;
3979 use_operand_p use_p;
3980
3981 if (is_gimple_debug (stmt))
3982 continue;
3983
3984 /* Valid uses include dereferences on the lhs and the rhs. */
3985 if (gimple_has_lhs (stmt))
3986 {
3987 tree lhs = gimple_get_lhs (stmt);
3988 while (handled_component_p (lhs))
3989 lhs = TREE_OPERAND (lhs, 0);
3990 if (TREE_CODE (lhs) == MEM_REF
3991 && TREE_OPERAND (lhs, 0) == name
3992 && integer_zerop (TREE_OPERAND (lhs, 1))
3993 && types_compatible_p (TREE_TYPE (lhs),
3994 TREE_TYPE (TREE_TYPE (name)))
3995 && !TREE_THIS_VOLATILE (lhs))
3996 uses_ok++;
3997 }
3998 if (gimple_assign_single_p (stmt))
3999 {
4000 tree rhs = gimple_assign_rhs1 (stmt);
4001 while (handled_component_p (rhs))
4002 rhs = TREE_OPERAND (rhs, 0);
4003 if (TREE_CODE (rhs) == MEM_REF
4004 && TREE_OPERAND (rhs, 0) == name
4005 && integer_zerop (TREE_OPERAND (rhs, 1))
4006 && types_compatible_p (TREE_TYPE (rhs),
4007 TREE_TYPE (TREE_TYPE (name)))
4008 && !TREE_THIS_VOLATILE (rhs))
4009 uses_ok++;
4010 }
4011 else if (is_gimple_call (stmt))
4012 {
4013 unsigned i;
4014 for (i = 0; i < gimple_call_num_args (stmt); ++i)
4015 {
4016 tree arg = gimple_call_arg (stmt, i);
4017 while (handled_component_p (arg))
4018 arg = TREE_OPERAND (arg, 0);
4019 if (TREE_CODE (arg) == MEM_REF
4020 && TREE_OPERAND (arg, 0) == name
4021 && integer_zerop (TREE_OPERAND (arg, 1))
4022 && types_compatible_p (TREE_TYPE (arg),
4023 TREE_TYPE (TREE_TYPE (name)))
4024 && !TREE_THIS_VOLATILE (arg))
4025 uses_ok++;
4026 }
4027 }
4028
4029 /* If the number of valid uses does not match the number of
4030 uses in this stmt there is an unhandled use. */
4031 FOR_EACH_IMM_USE_ON_STMT (use_p, ui)
4032 --uses_ok;
4033
4034 if (uses_ok != 0)
4035 ret = true;
4036
4037 if (ret)
4038 BREAK_FROM_IMM_USE_STMT (ui);
4039 }
4040
4041 return ret;
4042 }
4043
4044 /* Identify candidates for reduction for IPA-SRA based on their type and mark
4045 them in candidate_bitmap. Note that these do not necessarily include
4046 parameter which are unused and thus can be removed. Return true iff any
4047 such candidate has been found. */
4048
4049 static bool
4050 find_param_candidates (void)
4051 {
4052 tree parm;
4053 int count = 0;
4054 bool ret = false;
4055 const char *msg;
4056
4057 for (parm = DECL_ARGUMENTS (current_function_decl);
4058 parm;
4059 parm = DECL_CHAIN (parm))
4060 {
4061 tree type = TREE_TYPE (parm);
4062 tree_node **slot;
4063
4064 count++;
4065
4066 if (TREE_THIS_VOLATILE (parm)
4067 || TREE_ADDRESSABLE (parm)
4068 || (!is_gimple_reg_type (type) && is_va_list_type (type)))
4069 continue;
4070
4071 if (is_unused_scalar_param (parm))
4072 {
4073 ret = true;
4074 continue;
4075 }
4076
4077 if (POINTER_TYPE_P (type))
4078 {
4079 type = TREE_TYPE (type);
4080
4081 if (TREE_CODE (type) == FUNCTION_TYPE
4082 || TYPE_VOLATILE (type)
4083 || (TREE_CODE (type) == ARRAY_TYPE
4084 && TYPE_NONALIASED_COMPONENT (type))
4085 || !is_gimple_reg (parm)
4086 || is_va_list_type (type)
4087 || ptr_parm_has_direct_uses (parm))
4088 continue;
4089 }
4090 else if (!AGGREGATE_TYPE_P (type))
4091 continue;
4092
4093 if (!COMPLETE_TYPE_P (type)
4094 || !tree_fits_uhwi_p (TYPE_SIZE (type))
4095 || tree_to_uhwi (TYPE_SIZE (type)) == 0
4096 || (AGGREGATE_TYPE_P (type)
4097 && type_internals_preclude_sra_p (type, &msg)))
4098 continue;
4099
4100 bitmap_set_bit (candidate_bitmap, DECL_UID (parm));
4101 slot = candidates->find_slot_with_hash (parm, DECL_UID (parm), INSERT);
4102 *slot = parm;
4103
4104 ret = true;
4105 if (dump_file && (dump_flags & TDF_DETAILS))
4106 {
4107 fprintf (dump_file, "Candidate (%d): ", DECL_UID (parm));
4108 print_generic_expr (dump_file, parm);
4109 fprintf (dump_file, "\n");
4110 }
4111 }
4112
4113 func_param_count = count;
4114 return ret;
4115 }
4116
4117 /* Callback of walk_aliased_vdefs, marks the access passed as DATA as
4118 maybe_modified. */
4119
4120 static bool
4121 mark_maybe_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
4122 void *data)
4123 {
4124 struct access *repr = (struct access *) data;
4125
4126 repr->grp_maybe_modified = 1;
4127 return true;
4128 }
4129
4130 /* Analyze what representatives (in linked lists accessible from
4131 REPRESENTATIVES) can be modified by side effects of statements in the
4132 current function. */
4133
4134 static void
4135 analyze_modified_params (vec<access_p> representatives)
4136 {
4137 int i;
4138
4139 for (i = 0; i < func_param_count; i++)
4140 {
4141 struct access *repr;
4142
4143 for (repr = representatives[i];
4144 repr;
4145 repr = repr->next_grp)
4146 {
4147 struct access *access;
4148 bitmap visited;
4149 ao_ref ar;
4150
4151 if (no_accesses_p (repr))
4152 continue;
4153 if (!POINTER_TYPE_P (TREE_TYPE (repr->base))
4154 || repr->grp_maybe_modified)
4155 continue;
4156
4157 ao_ref_init (&ar, repr->expr);
4158 visited = BITMAP_ALLOC (NULL);
4159 for (access = repr; access; access = access->next_sibling)
4160 {
4161 /* All accesses are read ones, otherwise grp_maybe_modified would
4162 be trivially set. */
4163 walk_aliased_vdefs (&ar, gimple_vuse (access->stmt),
4164 mark_maybe_modified, repr, &visited);
4165 if (repr->grp_maybe_modified)
4166 break;
4167 }
4168 BITMAP_FREE (visited);
4169 }
4170 }
4171 }
4172
4173 /* Propagate distances in bb_dereferences in the opposite direction than the
4174 control flow edges, in each step storing the maximum of the current value
4175 and the minimum of all successors. These steps are repeated until the table
4176 stabilizes. Note that BBs which might terminate the functions (according to
4177 final_bbs bitmap) never updated in this way. */
4178
4179 static void
4180 propagate_dereference_distances (void)
4181 {
4182 basic_block bb;
4183
4184 auto_vec<basic_block> queue (last_basic_block_for_fn (cfun));
4185 queue.quick_push (ENTRY_BLOCK_PTR_FOR_FN (cfun));
4186 FOR_EACH_BB_FN (bb, cfun)
4187 {
4188 queue.quick_push (bb);
4189 bb->aux = bb;
4190 }
4191
4192 while (!queue.is_empty ())
4193 {
4194 edge_iterator ei;
4195 edge e;
4196 bool change = false;
4197 int i;
4198
4199 bb = queue.pop ();
4200 bb->aux = NULL;
4201
4202 if (bitmap_bit_p (final_bbs, bb->index))
4203 continue;
4204
4205 for (i = 0; i < func_param_count; i++)
4206 {
4207 int idx = bb->index * func_param_count + i;
4208 bool first = true;
4209 HOST_WIDE_INT inh = 0;
4210
4211 FOR_EACH_EDGE (e, ei, bb->succs)
4212 {
4213 int succ_idx = e->dest->index * func_param_count + i;
4214
4215 if (e->src == EXIT_BLOCK_PTR_FOR_FN (cfun))
4216 continue;
4217
4218 if (first)
4219 {
4220 first = false;
4221 inh = bb_dereferences [succ_idx];
4222 }
4223 else if (bb_dereferences [succ_idx] < inh)
4224 inh = bb_dereferences [succ_idx];
4225 }
4226
4227 if (!first && bb_dereferences[idx] < inh)
4228 {
4229 bb_dereferences[idx] = inh;
4230 change = true;
4231 }
4232 }
4233
4234 if (change && !bitmap_bit_p (final_bbs, bb->index))
4235 FOR_EACH_EDGE (e, ei, bb->preds)
4236 {
4237 if (e->src->aux)
4238 continue;
4239
4240 e->src->aux = e->src;
4241 queue.quick_push (e->src);
4242 }
4243 }
4244 }
4245
4246 /* Dump a dereferences TABLE with heading STR to file F. */
4247
4248 static void
4249 dump_dereferences_table (FILE *f, const char *str, HOST_WIDE_INT *table)
4250 {
4251 basic_block bb;
4252
4253 fprintf (dump_file, "%s", str);
4254 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun),
4255 EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
4256 {
4257 fprintf (f, "%4i %i ", bb->index, bitmap_bit_p (final_bbs, bb->index));
4258 if (bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
4259 {
4260 int i;
4261 for (i = 0; i < func_param_count; i++)
4262 {
4263 int idx = bb->index * func_param_count + i;
4264 fprintf (f, " %4" HOST_WIDE_INT_PRINT "d", table[idx]);
4265 }
4266 }
4267 fprintf (f, "\n");
4268 }
4269 fprintf (dump_file, "\n");
4270 }
4271
4272 /* Determine what (parts of) parameters passed by reference that are not
4273 assigned to are not certainly dereferenced in this function and thus the
4274 dereferencing cannot be safely moved to the caller without potentially
4275 introducing a segfault. Mark such REPRESENTATIVES as
4276 grp_not_necessarilly_dereferenced.
4277
4278 The dereferenced maximum "distance," i.e. the offset + size of the accessed
4279 part is calculated rather than simple booleans are calculated for each
4280 pointer parameter to handle cases when only a fraction of the whole
4281 aggregate is allocated (see testsuite/gcc.c-torture/execute/ipa-sra-2.c for
4282 an example).
4283
4284 The maximum dereference distances for each pointer parameter and BB are
4285 already stored in bb_dereference. This routine simply propagates these
4286 values upwards by propagate_dereference_distances and then compares the
4287 distances of individual parameters in the ENTRY BB to the equivalent
4288 distances of each representative of a (fraction of a) parameter. */
4289
4290 static void
4291 analyze_caller_dereference_legality (vec<access_p> representatives)
4292 {
4293 int i;
4294
4295 if (dump_file && (dump_flags & TDF_DETAILS))
4296 dump_dereferences_table (dump_file,
4297 "Dereference table before propagation:\n",
4298 bb_dereferences);
4299
4300 propagate_dereference_distances ();
4301
4302 if (dump_file && (dump_flags & TDF_DETAILS))
4303 dump_dereferences_table (dump_file,
4304 "Dereference table after propagation:\n",
4305 bb_dereferences);
4306
4307 for (i = 0; i < func_param_count; i++)
4308 {
4309 struct access *repr = representatives[i];
4310 int idx = ENTRY_BLOCK_PTR_FOR_FN (cfun)->index * func_param_count + i;
4311
4312 if (!repr || no_accesses_p (repr))
4313 continue;
4314
4315 do
4316 {
4317 if ((repr->offset + repr->size) > bb_dereferences[idx])
4318 repr->grp_not_necessarilly_dereferenced = 1;
4319 repr = repr->next_grp;
4320 }
4321 while (repr);
4322 }
4323 }
4324
4325 /* Return the representative access for the parameter declaration PARM if it is
4326 a scalar passed by reference which is not written to and the pointer value
4327 is not used directly. Thus, if it is legal to dereference it in the caller
4328 and we can rule out modifications through aliases, such parameter should be
4329 turned into one passed by value. Return NULL otherwise. */
4330
4331 static struct access *
4332 unmodified_by_ref_scalar_representative (tree parm)
4333 {
4334 int i, access_count;
4335 struct access *repr;
4336 vec<access_p> *access_vec;
4337
4338 access_vec = get_base_access_vector (parm);
4339 gcc_assert (access_vec);
4340 repr = (*access_vec)[0];
4341 if (repr->write)
4342 return NULL;
4343 repr->group_representative = repr;
4344
4345 access_count = access_vec->length ();
4346 for (i = 1; i < access_count; i++)
4347 {
4348 struct access *access = (*access_vec)[i];
4349 if (access->write)
4350 return NULL;
4351 access->group_representative = repr;
4352 access->next_sibling = repr->next_sibling;
4353 repr->next_sibling = access;
4354 }
4355
4356 repr->grp_read = 1;
4357 repr->grp_scalar_ptr = 1;
4358 return repr;
4359 }
4360
4361 /* Return true iff this ACCESS precludes IPA-SRA of the parameter it is
4362 associated with. REQ_ALIGN is the minimum required alignment. */
4363
4364 static bool
4365 access_precludes_ipa_sra_p (struct access *access, unsigned int req_align)
4366 {
4367 unsigned int exp_align;
4368 /* Avoid issues such as the second simple testcase in PR 42025. The problem
4369 is incompatible assign in a call statement (and possibly even in asm
4370 statements). This can be relaxed by using a new temporary but only for
4371 non-TREE_ADDRESSABLE types and is probably not worth the complexity. (In
4372 intraprocedural SRA we deal with this by keeping the old aggregate around,
4373 something we cannot do in IPA-SRA.) */
4374 if (access->write
4375 && (is_gimple_call (access->stmt)
4376 || gimple_code (access->stmt) == GIMPLE_ASM))
4377 return true;
4378
4379 exp_align = get_object_alignment (access->expr);
4380 if (exp_align < req_align)
4381 return true;
4382
4383 return false;
4384 }
4385
4386
4387 /* Sort collected accesses for parameter PARM, identify representatives for
4388 each accessed region and link them together. Return NULL if there are
4389 different but overlapping accesses, return the special ptr value meaning
4390 there are no accesses for this parameter if that is the case and return the
4391 first representative otherwise. Set *RO_GRP if there is a group of accesses
4392 with only read (i.e. no write) accesses. */
4393
4394 static struct access *
4395 splice_param_accesses (tree parm, bool *ro_grp)
4396 {
4397 int i, j, access_count, group_count;
4398 int agg_size, total_size = 0;
4399 struct access *access, *res, **prev_acc_ptr = &res;
4400 vec<access_p> *access_vec;
4401
4402 access_vec = get_base_access_vector (parm);
4403 if (!access_vec)
4404 return &no_accesses_representant;
4405 access_count = access_vec->length ();
4406
4407 access_vec->qsort (compare_access_positions);
4408
4409 i = 0;
4410 total_size = 0;
4411 group_count = 0;
4412 while (i < access_count)
4413 {
4414 bool modification;
4415 tree a1_alias_type;
4416 access = (*access_vec)[i];
4417 modification = access->write;
4418 if (access_precludes_ipa_sra_p (access, TYPE_ALIGN (access->type)))
4419 return NULL;
4420 a1_alias_type = reference_alias_ptr_type (access->expr);
4421
4422 /* Access is about to become group representative unless we find some
4423 nasty overlap which would preclude us from breaking this parameter
4424 apart. */
4425
4426 j = i + 1;
4427 while (j < access_count)
4428 {
4429 struct access *ac2 = (*access_vec)[j];
4430 if (ac2->offset != access->offset)
4431 {
4432 /* All or nothing law for parameters. */
4433 if (access->offset + access->size > ac2->offset)
4434 return NULL;
4435 else
4436 break;
4437 }
4438 else if (ac2->size != access->size)
4439 return NULL;
4440
4441 if (access_precludes_ipa_sra_p (ac2, TYPE_ALIGN (access->type))
4442 || (ac2->type != access->type
4443 && (TREE_ADDRESSABLE (ac2->type)
4444 || TREE_ADDRESSABLE (access->type)))
4445 || (reference_alias_ptr_type (ac2->expr) != a1_alias_type))
4446 return NULL;
4447
4448 modification |= ac2->write;
4449 ac2->group_representative = access;
4450 ac2->next_sibling = access->next_sibling;
4451 access->next_sibling = ac2;
4452 j++;
4453 }
4454
4455 group_count++;
4456 access->grp_maybe_modified = modification;
4457 if (!modification)
4458 *ro_grp = true;
4459 *prev_acc_ptr = access;
4460 prev_acc_ptr = &access->next_grp;
4461 total_size += access->size;
4462 i = j;
4463 }
4464
4465 if (POINTER_TYPE_P (TREE_TYPE (parm)))
4466 agg_size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (parm))));
4467 else
4468 agg_size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (parm)));
4469 if (total_size >= agg_size)
4470 return NULL;
4471
4472 gcc_assert (group_count > 0);
4473 return res;
4474 }
4475
4476 /* Decide whether parameters with representative accesses given by REPR should
4477 be reduced into components. */
4478
4479 static int
4480 decide_one_param_reduction (struct access *repr)
4481 {
4482 int total_size, cur_parm_size, agg_size, new_param_count, parm_size_limit;
4483 bool by_ref;
4484 tree parm;
4485
4486 parm = repr->base;
4487 cur_parm_size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (parm)));
4488 gcc_assert (cur_parm_size > 0);
4489
4490 if (POINTER_TYPE_P (TREE_TYPE (parm)))
4491 {
4492 by_ref = true;
4493 agg_size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (parm))));
4494 }
4495 else
4496 {
4497 by_ref = false;
4498 agg_size = cur_parm_size;
4499 }
4500
4501 if (dump_file)
4502 {
4503 struct access *acc;
4504 fprintf (dump_file, "Evaluating PARAM group sizes for ");
4505 print_generic_expr (dump_file, parm);
4506 fprintf (dump_file, " (UID: %u): \n", DECL_UID (parm));
4507 for (acc = repr; acc; acc = acc->next_grp)
4508 dump_access (dump_file, acc, true);
4509 }
4510
4511 total_size = 0;
4512 new_param_count = 0;
4513
4514 for (; repr; repr = repr->next_grp)
4515 {
4516 gcc_assert (parm == repr->base);
4517
4518 /* Taking the address of a non-addressable field is verboten. */
4519 if (by_ref && repr->non_addressable)
4520 return 0;
4521
4522 /* Do not decompose a non-BLKmode param in a way that would
4523 create BLKmode params. Especially for by-reference passing
4524 (thus, pointer-type param) this is hardly worthwhile. */
4525 if (DECL_MODE (parm) != BLKmode
4526 && TYPE_MODE (repr->type) == BLKmode)
4527 return 0;
4528
4529 if (!by_ref || (!repr->grp_maybe_modified
4530 && !repr->grp_not_necessarilly_dereferenced))
4531 total_size += repr->size;
4532 else
4533 total_size += cur_parm_size;
4534
4535 new_param_count++;
4536 }
4537
4538 gcc_assert (new_param_count > 0);
4539
4540 if (optimize_function_for_size_p (cfun))
4541 parm_size_limit = cur_parm_size;
4542 else
4543 parm_size_limit = (PARAM_VALUE (PARAM_IPA_SRA_PTR_GROWTH_FACTOR)
4544 * cur_parm_size);
4545
4546 if (total_size < agg_size
4547 && total_size <= parm_size_limit)
4548 {
4549 if (dump_file)
4550 fprintf (dump_file, " ....will be split into %i components\n",
4551 new_param_count);
4552 return new_param_count;
4553 }
4554 else
4555 return 0;
4556 }
4557
4558 /* The order of the following enums is important, we need to do extra work for
4559 UNUSED_PARAMS, BY_VAL_ACCESSES and UNMODIF_BY_REF_ACCESSES. */
4560 enum ipa_splicing_result { NO_GOOD_ACCESS, UNUSED_PARAMS, BY_VAL_ACCESSES,
4561 MODIF_BY_REF_ACCESSES, UNMODIF_BY_REF_ACCESSES };
4562
4563 /* Identify representatives of all accesses to all candidate parameters for
4564 IPA-SRA. Return result based on what representatives have been found. */
4565
4566 static enum ipa_splicing_result
4567 splice_all_param_accesses (vec<access_p> &representatives)
4568 {
4569 enum ipa_splicing_result result = NO_GOOD_ACCESS;
4570 tree parm;
4571 struct access *repr;
4572
4573 representatives.create (func_param_count);
4574
4575 for (parm = DECL_ARGUMENTS (current_function_decl);
4576 parm;
4577 parm = DECL_CHAIN (parm))
4578 {
4579 if (is_unused_scalar_param (parm))
4580 {
4581 representatives.quick_push (&no_accesses_representant);
4582 if (result == NO_GOOD_ACCESS)
4583 result = UNUSED_PARAMS;
4584 }
4585 else if (POINTER_TYPE_P (TREE_TYPE (parm))
4586 && is_gimple_reg_type (TREE_TYPE (TREE_TYPE (parm)))
4587 && bitmap_bit_p (candidate_bitmap, DECL_UID (parm)))
4588 {
4589 repr = unmodified_by_ref_scalar_representative (parm);
4590 representatives.quick_push (repr);
4591 if (repr)
4592 result = UNMODIF_BY_REF_ACCESSES;
4593 }
4594 else if (bitmap_bit_p (candidate_bitmap, DECL_UID (parm)))
4595 {
4596 bool ro_grp = false;
4597 repr = splice_param_accesses (parm, &ro_grp);
4598 representatives.quick_push (repr);
4599
4600 if (repr && !no_accesses_p (repr))
4601 {
4602 if (POINTER_TYPE_P (TREE_TYPE (parm)))
4603 {
4604 if (ro_grp)
4605 result = UNMODIF_BY_REF_ACCESSES;
4606 else if (result < MODIF_BY_REF_ACCESSES)
4607 result = MODIF_BY_REF_ACCESSES;
4608 }
4609 else if (result < BY_VAL_ACCESSES)
4610 result = BY_VAL_ACCESSES;
4611 }
4612 else if (no_accesses_p (repr) && (result == NO_GOOD_ACCESS))
4613 result = UNUSED_PARAMS;
4614 }
4615 else
4616 representatives.quick_push (NULL);
4617 }
4618
4619 if (result == NO_GOOD_ACCESS)
4620 {
4621 representatives.release ();
4622 return NO_GOOD_ACCESS;
4623 }
4624
4625 return result;
4626 }
4627
4628 /* Return the index of BASE in PARMS. Abort if it is not found. */
4629
4630 static inline int
4631 get_param_index (tree base, vec<tree> parms)
4632 {
4633 int i, len;
4634
4635 len = parms.length ();
4636 for (i = 0; i < len; i++)
4637 if (parms[i] == base)
4638 return i;
4639 gcc_unreachable ();
4640 }
4641
4642 /* Convert the decisions made at the representative level into compact
4643 parameter adjustments. REPRESENTATIVES are pointers to first
4644 representatives of each param accesses, ADJUSTMENTS_COUNT is the expected
4645 final number of adjustments. */
4646
4647 static ipa_parm_adjustment_vec
4648 turn_representatives_into_adjustments (vec<access_p> representatives,
4649 int adjustments_count)
4650 {
4651 vec<tree> parms;
4652 ipa_parm_adjustment_vec adjustments;
4653 tree parm;
4654 int i;
4655
4656 gcc_assert (adjustments_count > 0);
4657 parms = ipa_get_vector_of_formal_parms (current_function_decl);
4658 adjustments.create (adjustments_count);
4659 parm = DECL_ARGUMENTS (current_function_decl);
4660 for (i = 0; i < func_param_count; i++, parm = DECL_CHAIN (parm))
4661 {
4662 struct access *repr = representatives[i];
4663
4664 if (!repr || no_accesses_p (repr))
4665 {
4666 struct ipa_parm_adjustment adj;
4667
4668 memset (&adj, 0, sizeof (adj));
4669 adj.base_index = get_param_index (parm, parms);
4670 adj.base = parm;
4671 if (!repr)
4672 adj.op = IPA_PARM_OP_COPY;
4673 else
4674 adj.op = IPA_PARM_OP_REMOVE;
4675 adj.arg_prefix = "ISRA";
4676 adjustments.quick_push (adj);
4677 }
4678 else
4679 {
4680 struct ipa_parm_adjustment adj;
4681 int index = get_param_index (parm, parms);
4682
4683 for (; repr; repr = repr->next_grp)
4684 {
4685 memset (&adj, 0, sizeof (adj));
4686 gcc_assert (repr->base == parm);
4687 adj.base_index = index;
4688 adj.base = repr->base;
4689 adj.type = repr->type;
4690 adj.alias_ptr_type = reference_alias_ptr_type (repr->expr);
4691 adj.offset = repr->offset;
4692 adj.reverse = repr->reverse;
4693 adj.by_ref = (POINTER_TYPE_P (TREE_TYPE (repr->base))
4694 && (repr->grp_maybe_modified
4695 || repr->grp_not_necessarilly_dereferenced));
4696 adj.arg_prefix = "ISRA";
4697 adjustments.quick_push (adj);
4698 }
4699 }
4700 }
4701 parms.release ();
4702 return adjustments;
4703 }
4704
4705 /* Analyze the collected accesses and produce a plan what to do with the
4706 parameters in the form of adjustments, NULL meaning nothing. */
4707
4708 static ipa_parm_adjustment_vec
4709 analyze_all_param_acesses (void)
4710 {
4711 enum ipa_splicing_result repr_state;
4712 bool proceed = false;
4713 int i, adjustments_count = 0;
4714 vec<access_p> representatives;
4715 ipa_parm_adjustment_vec adjustments;
4716
4717 repr_state = splice_all_param_accesses (representatives);
4718 if (repr_state == NO_GOOD_ACCESS)
4719 return ipa_parm_adjustment_vec ();
4720
4721 /* If there are any parameters passed by reference which are not modified
4722 directly, we need to check whether they can be modified indirectly. */
4723 if (repr_state == UNMODIF_BY_REF_ACCESSES)
4724 {
4725 analyze_caller_dereference_legality (representatives);
4726 analyze_modified_params (representatives);
4727 }
4728
4729 for (i = 0; i < func_param_count; i++)
4730 {
4731 struct access *repr = representatives[i];
4732
4733 if (repr && !no_accesses_p (repr))
4734 {
4735 if (repr->grp_scalar_ptr)
4736 {
4737 adjustments_count++;
4738 if (repr->grp_not_necessarilly_dereferenced
4739 || repr->grp_maybe_modified)
4740 representatives[i] = NULL;
4741 else
4742 {
4743 proceed = true;
4744 sra_stats.scalar_by_ref_to_by_val++;
4745 }
4746 }
4747 else
4748 {
4749 int new_components = decide_one_param_reduction (repr);
4750
4751 if (new_components == 0)
4752 {
4753 representatives[i] = NULL;
4754 adjustments_count++;
4755 }
4756 else
4757 {
4758 adjustments_count += new_components;
4759 sra_stats.aggregate_params_reduced++;
4760 sra_stats.param_reductions_created += new_components;
4761 proceed = true;
4762 }
4763 }
4764 }
4765 else
4766 {
4767 if (no_accesses_p (repr))
4768 {
4769 proceed = true;
4770 sra_stats.deleted_unused_parameters++;
4771 }
4772 adjustments_count++;
4773 }
4774 }
4775
4776 if (!proceed && dump_file)
4777 fprintf (dump_file, "NOT proceeding to change params.\n");
4778
4779 if (proceed)
4780 adjustments = turn_representatives_into_adjustments (representatives,
4781 adjustments_count);
4782 else
4783 adjustments = ipa_parm_adjustment_vec ();
4784
4785 representatives.release ();
4786 return adjustments;
4787 }
4788
4789 /* If a parameter replacement identified by ADJ does not yet exist in the form
4790 of declaration, create it and record it, otherwise return the previously
4791 created one. */
4792
4793 static tree
4794 get_replaced_param_substitute (struct ipa_parm_adjustment *adj)
4795 {
4796 tree repl;
4797 if (!adj->new_ssa_base)
4798 {
4799 char *pretty_name = make_fancy_name (adj->base);
4800
4801 repl = create_tmp_reg (TREE_TYPE (adj->base), "ISR");
4802 DECL_NAME (repl) = get_identifier (pretty_name);
4803 DECL_NAMELESS (repl) = 1;
4804 obstack_free (&name_obstack, pretty_name);
4805
4806 adj->new_ssa_base = repl;
4807 }
4808 else
4809 repl = adj->new_ssa_base;
4810 return repl;
4811 }
4812
4813 /* Find the first adjustment for a particular parameter BASE in a vector of
4814 ADJUSTMENTS which is not a copy_param. Return NULL if there is no such
4815 adjustment. */
4816
4817 static struct ipa_parm_adjustment *
4818 get_adjustment_for_base (ipa_parm_adjustment_vec adjustments, tree base)
4819 {
4820 int i, len;
4821
4822 len = adjustments.length ();
4823 for (i = 0; i < len; i++)
4824 {
4825 struct ipa_parm_adjustment *adj;
4826
4827 adj = &adjustments[i];
4828 if (adj->op != IPA_PARM_OP_COPY && adj->base == base)
4829 return adj;
4830 }
4831
4832 return NULL;
4833 }
4834
4835 /* If OLD_NAME, which is being defined by statement STMT, is an SSA_NAME of a
4836 parameter which is to be removed because its value is not used, create a new
4837 SSA_NAME relating to a replacement VAR_DECL, replace all uses of the
4838 original with it and return it. If there is no need to re-map, return NULL.
4839 ADJUSTMENTS is a pointer to a vector of IPA-SRA adjustments. */
4840
4841 static tree
4842 replace_removed_params_ssa_names (tree old_name, gimple *stmt,
4843 ipa_parm_adjustment_vec adjustments)
4844 {
4845 struct ipa_parm_adjustment *adj;
4846 tree decl, repl, new_name;
4847
4848 if (TREE_CODE (old_name) != SSA_NAME)
4849 return NULL;
4850
4851 decl = SSA_NAME_VAR (old_name);
4852 if (decl == NULL_TREE
4853 || TREE_CODE (decl) != PARM_DECL)
4854 return NULL;
4855
4856 adj = get_adjustment_for_base (adjustments, decl);
4857 if (!adj)
4858 return NULL;
4859
4860 repl = get_replaced_param_substitute (adj);
4861 new_name = make_ssa_name (repl, stmt);
4862 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_name)
4863 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (old_name);
4864
4865 if (dump_file)
4866 {
4867 fprintf (dump_file, "replacing an SSA name of a removed param ");
4868 print_generic_expr (dump_file, old_name);
4869 fprintf (dump_file, " with ");
4870 print_generic_expr (dump_file, new_name);
4871 fprintf (dump_file, "\n");
4872 }
4873
4874 replace_uses_by (old_name, new_name);
4875 return new_name;
4876 }
4877
4878 /* If the statement STMT contains any expressions that need to replaced with a
4879 different one as noted by ADJUSTMENTS, do so. Handle any potential type
4880 incompatibilities (GSI is used to accommodate conversion statements and must
4881 point to the statement). Return true iff the statement was modified. */
4882
4883 static bool
4884 sra_ipa_modify_assign (gimple *stmt, gimple_stmt_iterator *gsi,
4885 ipa_parm_adjustment_vec adjustments)
4886 {
4887 tree *lhs_p, *rhs_p;
4888 bool any;
4889
4890 if (!gimple_assign_single_p (stmt))
4891 return false;
4892
4893 rhs_p = gimple_assign_rhs1_ptr (stmt);
4894 lhs_p = gimple_assign_lhs_ptr (stmt);
4895
4896 any = ipa_modify_expr (rhs_p, false, adjustments);
4897 any |= ipa_modify_expr (lhs_p, false, adjustments);
4898 if (any)
4899 {
4900 tree new_rhs = NULL_TREE;
4901
4902 if (!useless_type_conversion_p (TREE_TYPE (*lhs_p), TREE_TYPE (*rhs_p)))
4903 {
4904 if (TREE_CODE (*rhs_p) == CONSTRUCTOR)
4905 {
4906 /* V_C_Es of constructors can cause trouble (PR 42714). */
4907 if (is_gimple_reg_type (TREE_TYPE (*lhs_p)))
4908 *rhs_p = build_zero_cst (TREE_TYPE (*lhs_p));
4909 else
4910 *rhs_p = build_constructor (TREE_TYPE (*lhs_p),
4911 NULL);
4912 }
4913 else
4914 new_rhs = fold_build1_loc (gimple_location (stmt),
4915 VIEW_CONVERT_EXPR, TREE_TYPE (*lhs_p),
4916 *rhs_p);
4917 }
4918 else if (REFERENCE_CLASS_P (*rhs_p)
4919 && is_gimple_reg_type (TREE_TYPE (*lhs_p))
4920 && !is_gimple_reg (*lhs_p))
4921 /* This can happen when an assignment in between two single field
4922 structures is turned into an assignment in between two pointers to
4923 scalars (PR 42237). */
4924 new_rhs = *rhs_p;
4925
4926 if (new_rhs)
4927 {
4928 tree tmp = force_gimple_operand_gsi (gsi, new_rhs, true, NULL_TREE,
4929 true, GSI_SAME_STMT);
4930
4931 gimple_assign_set_rhs_from_tree (gsi, tmp);
4932 }
4933
4934 return true;
4935 }
4936
4937 return false;
4938 }
4939
4940 /* Traverse the function body and all modifications as described in
4941 ADJUSTMENTS. Return true iff the CFG has been changed. */
4942
4943 bool
4944 ipa_sra_modify_function_body (ipa_parm_adjustment_vec adjustments)
4945 {
4946 bool cfg_changed = false;
4947 basic_block bb;
4948
4949 FOR_EACH_BB_FN (bb, cfun)
4950 {
4951 gimple_stmt_iterator gsi;
4952
4953 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4954 {
4955 gphi *phi = as_a <gphi *> (gsi_stmt (gsi));
4956 tree new_lhs, old_lhs = gimple_phi_result (phi);
4957 new_lhs = replace_removed_params_ssa_names (old_lhs, phi, adjustments);
4958 if (new_lhs)
4959 {
4960 gimple_phi_set_result (phi, new_lhs);
4961 release_ssa_name (old_lhs);
4962 }
4963 }
4964
4965 gsi = gsi_start_bb (bb);
4966 while (!gsi_end_p (gsi))
4967 {
4968 gimple *stmt = gsi_stmt (gsi);
4969 bool modified = false;
4970 tree *t;
4971 unsigned i;
4972
4973 switch (gimple_code (stmt))
4974 {
4975 case GIMPLE_RETURN:
4976 t = gimple_return_retval_ptr (as_a <greturn *> (stmt));
4977 if (*t != NULL_TREE)
4978 modified |= ipa_modify_expr (t, true, adjustments);
4979 break;
4980
4981 case GIMPLE_ASSIGN:
4982 modified |= sra_ipa_modify_assign (stmt, &gsi, adjustments);
4983 break;
4984
4985 case GIMPLE_CALL:
4986 /* Operands must be processed before the lhs. */
4987 for (i = 0; i < gimple_call_num_args (stmt); i++)
4988 {
4989 t = gimple_call_arg_ptr (stmt, i);
4990 modified |= ipa_modify_expr (t, true, adjustments);
4991 }
4992
4993 if (gimple_call_lhs (stmt))
4994 {
4995 t = gimple_call_lhs_ptr (stmt);
4996 modified |= ipa_modify_expr (t, false, adjustments);
4997 }
4998 break;
4999
5000 case GIMPLE_ASM:
5001 {
5002 gasm *asm_stmt = as_a <gasm *> (stmt);
5003 for (i = 0; i < gimple_asm_ninputs (asm_stmt); i++)
5004 {
5005 t = &TREE_VALUE (gimple_asm_input_op (asm_stmt, i));
5006 modified |= ipa_modify_expr (t, true, adjustments);
5007 }
5008 for (i = 0; i < gimple_asm_noutputs (asm_stmt); i++)
5009 {
5010 t = &TREE_VALUE (gimple_asm_output_op (asm_stmt, i));
5011 modified |= ipa_modify_expr (t, false, adjustments);
5012 }
5013 }
5014 break;
5015
5016 default:
5017 break;
5018 }
5019
5020 def_operand_p defp;
5021 ssa_op_iter iter;
5022 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_DEF)
5023 {
5024 tree old_def = DEF_FROM_PTR (defp);
5025 if (tree new_def = replace_removed_params_ssa_names (old_def, stmt,
5026 adjustments))
5027 {
5028 SET_DEF (defp, new_def);
5029 release_ssa_name (old_def);
5030 modified = true;
5031 }
5032 }
5033
5034 if (modified)
5035 {
5036 update_stmt (stmt);
5037 if (maybe_clean_eh_stmt (stmt)
5038 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
5039 cfg_changed = true;
5040 }
5041 gsi_next (&gsi);
5042 }
5043 }
5044
5045 return cfg_changed;
5046 }
5047
5048 /* Call gimple_debug_bind_reset_value on all debug statements describing
5049 gimple register parameters that are being removed or replaced. */
5050
5051 static void
5052 sra_ipa_reset_debug_stmts (ipa_parm_adjustment_vec adjustments)
5053 {
5054 int i, len;
5055 gimple_stmt_iterator *gsip = NULL, gsi;
5056
5057 if (MAY_HAVE_DEBUG_STMTS && single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (cfun)))
5058 {
5059 gsi = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
5060 gsip = &gsi;
5061 }
5062 len = adjustments.length ();
5063 for (i = 0; i < len; i++)
5064 {
5065 struct ipa_parm_adjustment *adj;
5066 imm_use_iterator ui;
5067 gimple *stmt;
5068 gdebug *def_temp;
5069 tree name, vexpr, copy = NULL_TREE;
5070 use_operand_p use_p;
5071
5072 adj = &adjustments[i];
5073 if (adj->op == IPA_PARM_OP_COPY || !is_gimple_reg (adj->base))
5074 continue;
5075 name = ssa_default_def (cfun, adj->base);
5076 vexpr = NULL;
5077 if (name)
5078 FOR_EACH_IMM_USE_STMT (stmt, ui, name)
5079 {
5080 if (gimple_clobber_p (stmt))
5081 {
5082 gimple_stmt_iterator cgsi = gsi_for_stmt (stmt);
5083 unlink_stmt_vdef (stmt);
5084 gsi_remove (&cgsi, true);
5085 release_defs (stmt);
5086 continue;
5087 }
5088 /* All other users must have been removed by
5089 ipa_sra_modify_function_body. */
5090 gcc_assert (is_gimple_debug (stmt));
5091 if (vexpr == NULL && gsip != NULL)
5092 {
5093 gcc_assert (TREE_CODE (adj->base) == PARM_DECL);
5094 vexpr = make_node (DEBUG_EXPR_DECL);
5095 def_temp = gimple_build_debug_source_bind (vexpr, adj->base,
5096 NULL);
5097 DECL_ARTIFICIAL (vexpr) = 1;
5098 TREE_TYPE (vexpr) = TREE_TYPE (name);
5099 SET_DECL_MODE (vexpr, DECL_MODE (adj->base));
5100 gsi_insert_before (gsip, def_temp, GSI_SAME_STMT);
5101 }
5102 if (vexpr)
5103 {
5104 FOR_EACH_IMM_USE_ON_STMT (use_p, ui)
5105 SET_USE (use_p, vexpr);
5106 }
5107 else
5108 gimple_debug_bind_reset_value (stmt);
5109 update_stmt (stmt);
5110 }
5111 /* Create a VAR_DECL for debug info purposes. */
5112 if (!DECL_IGNORED_P (adj->base))
5113 {
5114 copy = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
5115 VAR_DECL, DECL_NAME (adj->base),
5116 TREE_TYPE (adj->base));
5117 if (DECL_PT_UID_SET_P (adj->base))
5118 SET_DECL_PT_UID (copy, DECL_PT_UID (adj->base));
5119 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (adj->base);
5120 TREE_READONLY (copy) = TREE_READONLY (adj->base);
5121 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (adj->base);
5122 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (adj->base);
5123 DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (adj->base);
5124 DECL_IGNORED_P (copy) = DECL_IGNORED_P (adj->base);
5125 DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (adj->base);
5126 DECL_SEEN_IN_BIND_EXPR_P (copy) = 1;
5127 SET_DECL_RTL (copy, 0);
5128 TREE_USED (copy) = 1;
5129 DECL_CONTEXT (copy) = current_function_decl;
5130 add_local_decl (cfun, copy);
5131 DECL_CHAIN (copy) =
5132 BLOCK_VARS (DECL_INITIAL (current_function_decl));
5133 BLOCK_VARS (DECL_INITIAL (current_function_decl)) = copy;
5134 }
5135 if (gsip != NULL && copy && target_for_debug_bind (adj->base))
5136 {
5137 gcc_assert (TREE_CODE (adj->base) == PARM_DECL);
5138 if (vexpr)
5139 def_temp = gimple_build_debug_bind (copy, vexpr, NULL);
5140 else
5141 def_temp = gimple_build_debug_source_bind (copy, adj->base,
5142 NULL);
5143 gsi_insert_before (gsip, def_temp, GSI_SAME_STMT);
5144 }
5145 }
5146 }
5147
5148 /* Return false if all callers have at least as many actual arguments as there
5149 are formal parameters in the current function and that their types
5150 match. */
5151
5152 static bool
5153 some_callers_have_mismatched_arguments_p (struct cgraph_node *node,
5154 void *data ATTRIBUTE_UNUSED)
5155 {
5156 struct cgraph_edge *cs;
5157 for (cs = node->callers; cs; cs = cs->next_caller)
5158 if (!cs->call_stmt || !callsite_arguments_match_p (cs->call_stmt))
5159 return true;
5160
5161 return false;
5162 }
5163
5164 /* Return false if all callers have vuse attached to a call statement. */
5165
5166 static bool
5167 some_callers_have_no_vuse_p (struct cgraph_node *node,
5168 void *data ATTRIBUTE_UNUSED)
5169 {
5170 struct cgraph_edge *cs;
5171 for (cs = node->callers; cs; cs = cs->next_caller)
5172 if (!cs->call_stmt || !gimple_vuse (cs->call_stmt))
5173 return true;
5174
5175 return false;
5176 }
5177
5178 /* Convert all callers of NODE. */
5179
5180 static bool
5181 convert_callers_for_node (struct cgraph_node *node,
5182 void *data)
5183 {
5184 ipa_parm_adjustment_vec *adjustments = (ipa_parm_adjustment_vec *) data;
5185 bitmap recomputed_callers = BITMAP_ALLOC (NULL);
5186 struct cgraph_edge *cs;
5187
5188 for (cs = node->callers; cs; cs = cs->next_caller)
5189 {
5190 push_cfun (DECL_STRUCT_FUNCTION (cs->caller->decl));
5191
5192 if (dump_file)
5193 fprintf (dump_file, "Adjusting call %s -> %s\n",
5194 cs->caller->dump_name (), cs->callee->dump_name ());
5195
5196 ipa_modify_call_arguments (cs, cs->call_stmt, *adjustments);
5197
5198 pop_cfun ();
5199 }
5200
5201 for (cs = node->callers; cs; cs = cs->next_caller)
5202 if (bitmap_set_bit (recomputed_callers, cs->caller->uid)
5203 && gimple_in_ssa_p (DECL_STRUCT_FUNCTION (cs->caller->decl)))
5204 compute_fn_summary (cs->caller, true);
5205 BITMAP_FREE (recomputed_callers);
5206
5207 return true;
5208 }
5209
5210 /* Convert all callers of NODE to pass parameters as given in ADJUSTMENTS. */
5211
5212 static void
5213 convert_callers (struct cgraph_node *node, tree old_decl,
5214 ipa_parm_adjustment_vec adjustments)
5215 {
5216 basic_block this_block;
5217
5218 node->call_for_symbol_and_aliases (convert_callers_for_node,
5219 &adjustments, false);
5220
5221 if (!encountered_recursive_call)
5222 return;
5223
5224 FOR_EACH_BB_FN (this_block, cfun)
5225 {
5226 gimple_stmt_iterator gsi;
5227
5228 for (gsi = gsi_start_bb (this_block); !gsi_end_p (gsi); gsi_next (&gsi))
5229 {
5230 gcall *stmt;
5231 tree call_fndecl;
5232 stmt = dyn_cast <gcall *> (gsi_stmt (gsi));
5233 if (!stmt)
5234 continue;
5235 call_fndecl = gimple_call_fndecl (stmt);
5236 if (call_fndecl == old_decl)
5237 {
5238 if (dump_file)
5239 fprintf (dump_file, "Adjusting recursive call");
5240 gimple_call_set_fndecl (stmt, node->decl);
5241 ipa_modify_call_arguments (NULL, stmt, adjustments);
5242 }
5243 }
5244 }
5245
5246 return;
5247 }
5248
5249 /* Perform all the modification required in IPA-SRA for NODE to have parameters
5250 as given in ADJUSTMENTS. Return true iff the CFG has been changed. */
5251
5252 static bool
5253 modify_function (struct cgraph_node *node, ipa_parm_adjustment_vec adjustments)
5254 {
5255 struct cgraph_node *new_node;
5256 bool cfg_changed;
5257
5258 cgraph_edge::rebuild_edges ();
5259 free_dominance_info (CDI_DOMINATORS);
5260 pop_cfun ();
5261
5262 /* This must be done after rebuilding cgraph edges for node above.
5263 Otherwise any recursive calls to node that are recorded in
5264 redirect_callers will be corrupted. */
5265 vec<cgraph_edge *> redirect_callers = node->collect_callers ();
5266 new_node = node->create_version_clone_with_body (redirect_callers, NULL,
5267 NULL, false, NULL, NULL,
5268 "isra");
5269 redirect_callers.release ();
5270
5271 push_cfun (DECL_STRUCT_FUNCTION (new_node->decl));
5272 ipa_modify_formal_parameters (current_function_decl, adjustments);
5273 cfg_changed = ipa_sra_modify_function_body (adjustments);
5274 sra_ipa_reset_debug_stmts (adjustments);
5275 convert_callers (new_node, node->decl, adjustments);
5276 new_node->make_local ();
5277 return cfg_changed;
5278 }
5279
5280 /* Means of communication between ipa_sra_check_caller and
5281 ipa_sra_preliminary_function_checks. */
5282
5283 struct ipa_sra_check_caller_data
5284 {
5285 bool has_callers;
5286 bool bad_arg_alignment;
5287 bool has_thunk;
5288 };
5289
5290 /* If NODE has a caller, mark that fact in DATA which is pointer to
5291 ipa_sra_check_caller_data. Also check all aggregate arguments in all known
5292 calls if they are unit aligned and if not, set the appropriate flag in DATA
5293 too. */
5294
5295 static bool
5296 ipa_sra_check_caller (struct cgraph_node *node, void *data)
5297 {
5298 if (!node->callers)
5299 return false;
5300
5301 struct ipa_sra_check_caller_data *iscc;
5302 iscc = (struct ipa_sra_check_caller_data *) data;
5303 iscc->has_callers = true;
5304
5305 for (cgraph_edge *cs = node->callers; cs; cs = cs->next_caller)
5306 {
5307 if (cs->caller->thunk.thunk_p)
5308 {
5309 iscc->has_thunk = true;
5310 return true;
5311 }
5312 gimple *call_stmt = cs->call_stmt;
5313 unsigned count = gimple_call_num_args (call_stmt);
5314 for (unsigned i = 0; i < count; i++)
5315 {
5316 tree arg = gimple_call_arg (call_stmt, i);
5317 if (is_gimple_reg (arg))
5318 continue;
5319
5320 tree offset;
5321 HOST_WIDE_INT bitsize, bitpos;
5322 machine_mode mode;
5323 int unsignedp, reversep, volatilep = 0;
5324 get_inner_reference (arg, &bitsize, &bitpos, &offset, &mode,
5325 &unsignedp, &reversep, &volatilep);
5326 if (bitpos % BITS_PER_UNIT)
5327 {
5328 iscc->bad_arg_alignment = true;
5329 return true;
5330 }
5331 }
5332 }
5333
5334 return false;
5335 }
5336
5337 /* Return false the function is apparently unsuitable for IPA-SRA based on it's
5338 attributes, return true otherwise. NODE is the cgraph node of the current
5339 function. */
5340
5341 static bool
5342 ipa_sra_preliminary_function_checks (struct cgraph_node *node)
5343 {
5344 if (!node->can_be_local_p ())
5345 {
5346 if (dump_file)
5347 fprintf (dump_file, "Function not local to this compilation unit.\n");
5348 return false;
5349 }
5350
5351 if (!node->local.can_change_signature)
5352 {
5353 if (dump_file)
5354 fprintf (dump_file, "Function can not change signature.\n");
5355 return false;
5356 }
5357
5358 if (!tree_versionable_function_p (node->decl))
5359 {
5360 if (dump_file)
5361 fprintf (dump_file, "Function is not versionable.\n");
5362 return false;
5363 }
5364
5365 if (!opt_for_fn (node->decl, optimize)
5366 || !opt_for_fn (node->decl, flag_ipa_sra))
5367 {
5368 if (dump_file)
5369 fprintf (dump_file, "Function not optimized.\n");
5370 return false;
5371 }
5372
5373 if (DECL_VIRTUAL_P (current_function_decl))
5374 {
5375 if (dump_file)
5376 fprintf (dump_file, "Function is a virtual method.\n");
5377 return false;
5378 }
5379
5380 if ((DECL_ONE_ONLY (node->decl) || DECL_EXTERNAL (node->decl))
5381 && ipa_fn_summaries->get (node)->size >= MAX_INLINE_INSNS_AUTO)
5382 {
5383 if (dump_file)
5384 fprintf (dump_file, "Function too big to be made truly local.\n");
5385 return false;
5386 }
5387
5388 if (cfun->stdarg)
5389 {
5390 if (dump_file)
5391 fprintf (dump_file, "Function uses stdarg. \n");
5392 return false;
5393 }
5394
5395 if (TYPE_ATTRIBUTES (TREE_TYPE (node->decl)))
5396 return false;
5397
5398 if (DECL_DISREGARD_INLINE_LIMITS (node->decl))
5399 {
5400 if (dump_file)
5401 fprintf (dump_file, "Always inline function will be inlined "
5402 "anyway. \n");
5403 return false;
5404 }
5405
5406 struct ipa_sra_check_caller_data iscc;
5407 memset (&iscc, 0, sizeof(iscc));
5408 node->call_for_symbol_and_aliases (ipa_sra_check_caller, &iscc, true);
5409 if (!iscc.has_callers)
5410 {
5411 if (dump_file)
5412 fprintf (dump_file,
5413 "Function has no callers in this compilation unit.\n");
5414 return false;
5415 }
5416
5417 if (iscc.bad_arg_alignment)
5418 {
5419 if (dump_file)
5420 fprintf (dump_file,
5421 "A function call has an argument with non-unit alignment.\n");
5422 return false;
5423 }
5424
5425 if (iscc.has_thunk)
5426 {
5427 if (dump_file)
5428 fprintf (dump_file,
5429 "A has thunk.\n");
5430 return false;
5431 }
5432
5433 return true;
5434 }
5435
5436 /* Perform early interprocedural SRA. */
5437
5438 static unsigned int
5439 ipa_early_sra (void)
5440 {
5441 struct cgraph_node *node = cgraph_node::get (current_function_decl);
5442 ipa_parm_adjustment_vec adjustments;
5443 int ret = 0;
5444
5445 if (!ipa_sra_preliminary_function_checks (node))
5446 return 0;
5447
5448 sra_initialize ();
5449 sra_mode = SRA_MODE_EARLY_IPA;
5450
5451 if (!find_param_candidates ())
5452 {
5453 if (dump_file)
5454 fprintf (dump_file, "Function has no IPA-SRA candidates.\n");
5455 goto simple_out;
5456 }
5457
5458 if (node->call_for_symbol_and_aliases
5459 (some_callers_have_mismatched_arguments_p, NULL, true))
5460 {
5461 if (dump_file)
5462 fprintf (dump_file, "There are callers with insufficient number of "
5463 "arguments or arguments with type mismatches.\n");
5464 goto simple_out;
5465 }
5466
5467 if (node->call_for_symbol_and_aliases
5468 (some_callers_have_no_vuse_p, NULL, true))
5469 {
5470 if (dump_file)
5471 fprintf (dump_file, "There are callers with no VUSE attached "
5472 "to a call stmt.\n");
5473 goto simple_out;
5474 }
5475
5476 bb_dereferences = XCNEWVEC (HOST_WIDE_INT,
5477 func_param_count
5478 * last_basic_block_for_fn (cfun));
5479 final_bbs = BITMAP_ALLOC (NULL);
5480
5481 scan_function ();
5482 if (encountered_apply_args)
5483 {
5484 if (dump_file)
5485 fprintf (dump_file, "Function calls __builtin_apply_args().\n");
5486 goto out;
5487 }
5488
5489 if (encountered_unchangable_recursive_call)
5490 {
5491 if (dump_file)
5492 fprintf (dump_file, "Function calls itself with insufficient "
5493 "number of arguments.\n");
5494 goto out;
5495 }
5496
5497 adjustments = analyze_all_param_acesses ();
5498 if (!adjustments.exists ())
5499 goto out;
5500 if (dump_file)
5501 ipa_dump_param_adjustments (dump_file, adjustments, current_function_decl);
5502
5503 if (modify_function (node, adjustments))
5504 ret = TODO_update_ssa | TODO_cleanup_cfg;
5505 else
5506 ret = TODO_update_ssa;
5507 adjustments.release ();
5508
5509 statistics_counter_event (cfun, "Unused parameters deleted",
5510 sra_stats.deleted_unused_parameters);
5511 statistics_counter_event (cfun, "Scalar parameters converted to by-value",
5512 sra_stats.scalar_by_ref_to_by_val);
5513 statistics_counter_event (cfun, "Aggregate parameters broken up",
5514 sra_stats.aggregate_params_reduced);
5515 statistics_counter_event (cfun, "Aggregate parameter components created",
5516 sra_stats.param_reductions_created);
5517
5518 out:
5519 BITMAP_FREE (final_bbs);
5520 free (bb_dereferences);
5521 simple_out:
5522 sra_deinitialize ();
5523 return ret;
5524 }
5525
5526 namespace {
5527
5528 const pass_data pass_data_early_ipa_sra =
5529 {
5530 GIMPLE_PASS, /* type */
5531 "eipa_sra", /* name */
5532 OPTGROUP_NONE, /* optinfo_flags */
5533 TV_IPA_SRA, /* tv_id */
5534 0, /* properties_required */
5535 0, /* properties_provided */
5536 0, /* properties_destroyed */
5537 0, /* todo_flags_start */
5538 TODO_dump_symtab, /* todo_flags_finish */
5539 };
5540
5541 class pass_early_ipa_sra : public gimple_opt_pass
5542 {
5543 public:
5544 pass_early_ipa_sra (gcc::context *ctxt)
5545 : gimple_opt_pass (pass_data_early_ipa_sra, ctxt)
5546 {}
5547
5548 /* opt_pass methods: */
5549 virtual bool gate (function *) { return flag_ipa_sra && dbg_cnt (eipa_sra); }
5550 virtual unsigned int execute (function *) { return ipa_early_sra (); }
5551
5552 }; // class pass_early_ipa_sra
5553
5554 } // anon namespace
5555
5556 gimple_opt_pass *
5557 make_pass_early_ipa_sra (gcc::context *ctxt)
5558 {
5559 return new pass_early_ipa_sra (ctxt);
5560 }