]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/tree-sra.c
31834ed7af748d588c37c15f03bb80929be0b8d5
[thirdparty/gcc.git] / gcc / tree-sra.c
1 /* Scalar Replacement of Aggregates (SRA) converts some structure
2 references into scalar references, exposing them to the scalar
3 optimizers.
4 Copyright (C) 2008-2017 Free Software Foundation, Inc.
5 Contributed by Martin Jambor <mjambor@suse.cz>
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23 /* This file implements Scalar Reduction of Aggregates (SRA). SRA is run
24 twice, once in the early stages of compilation (early SRA) and once in the
25 late stages (late SRA). The aim of both is to turn references to scalar
26 parts of aggregates into uses of independent scalar variables.
27
28 The two passes are nearly identical, the only difference is that early SRA
29 does not scalarize unions which are used as the result in a GIMPLE_RETURN
30 statement because together with inlining this can lead to weird type
31 conversions.
32
33 Both passes operate in four stages:
34
35 1. The declarations that have properties which make them candidates for
36 scalarization are identified in function find_var_candidates(). The
37 candidates are stored in candidate_bitmap.
38
39 2. The function body is scanned. In the process, declarations which are
40 used in a manner that prevent their scalarization are removed from the
41 candidate bitmap. More importantly, for every access into an aggregate,
42 an access structure (struct access) is created by create_access() and
43 stored in a vector associated with the aggregate. Among other
44 information, the aggregate declaration, the offset and size of the access
45 and its type are stored in the structure.
46
47 On a related note, assign_link structures are created for every assign
48 statement between candidate aggregates and attached to the related
49 accesses.
50
51 3. The vectors of accesses are analyzed. They are first sorted according to
52 their offset and size and then scanned for partially overlapping accesses
53 (i.e. those which overlap but one is not entirely within another). Such
54 an access disqualifies the whole aggregate from being scalarized.
55
56 If there is no such inhibiting overlap, a representative access structure
57 is chosen for every unique combination of offset and size. Afterwards,
58 the pass builds a set of trees from these structures, in which children
59 of an access are within their parent (in terms of offset and size).
60
61 Then accesses are propagated whenever possible (i.e. in cases when it
62 does not create a partially overlapping access) across assign_links from
63 the right hand side to the left hand side.
64
65 Then the set of trees for each declaration is traversed again and those
66 accesses which should be replaced by a scalar are identified.
67
68 4. The function is traversed again, and for every reference into an
69 aggregate that has some component which is about to be scalarized,
70 statements are amended and new statements are created as necessary.
71 Finally, if a parameter got scalarized, the scalar replacements are
72 initialized with values from respective parameter aggregates. */
73
74 #include "config.h"
75 #include "system.h"
76 #include "coretypes.h"
77 #include "backend.h"
78 #include "target.h"
79 #include "rtl.h"
80 #include "tree.h"
81 #include "gimple.h"
82 #include "predict.h"
83 #include "alloc-pool.h"
84 #include "tree-pass.h"
85 #include "ssa.h"
86 #include "cgraph.h"
87 #include "gimple-pretty-print.h"
88 #include "alias.h"
89 #include "fold-const.h"
90 #include "tree-eh.h"
91 #include "stor-layout.h"
92 #include "gimplify.h"
93 #include "gimple-iterator.h"
94 #include "gimplify-me.h"
95 #include "gimple-walk.h"
96 #include "tree-cfg.h"
97 #include "tree-dfa.h"
98 #include "tree-ssa.h"
99 #include "symbol-summary.h"
100 #include "ipa-prop.h"
101 #include "params.h"
102 #include "dbgcnt.h"
103 #include "tree-inline.h"
104 #include "ipa-inline.h"
105 #include "ipa-utils.h"
106 #include "builtins.h"
107
108 /* Enumeration of all aggregate reductions we can do. */
109 enum sra_mode { SRA_MODE_EARLY_IPA, /* early call regularization */
110 SRA_MODE_EARLY_INTRA, /* early intraprocedural SRA */
111 SRA_MODE_INTRA }; /* late intraprocedural SRA */
112
113 /* Global variable describing which aggregate reduction we are performing at
114 the moment. */
115 static enum sra_mode sra_mode;
116
117 struct assign_link;
118
119 /* ACCESS represents each access to an aggregate variable (as a whole or a
120 part). It can also represent a group of accesses that refer to exactly the
121 same fragment of an aggregate (i.e. those that have exactly the same offset
122 and size). Such representatives for a single aggregate, once determined,
123 are linked in a linked list and have the group fields set.
124
125 Moreover, when doing intraprocedural SRA, a tree is built from those
126 representatives (by the means of first_child and next_sibling pointers), in
127 which all items in a subtree are "within" the root, i.e. their offset is
128 greater or equal to offset of the root and offset+size is smaller or equal
129 to offset+size of the root. Children of an access are sorted by offset.
130
131 Note that accesses to parts of vector and complex number types always
132 represented by an access to the whole complex number or a vector. It is a
133 duty of the modifying functions to replace them appropriately. */
134
135 struct access
136 {
137 /* Values returned by `get_ref_base_and_extent' for each component reference
138 If EXPR isn't a component reference just set `BASE = EXPR', `OFFSET = 0',
139 `SIZE = TREE_SIZE (TREE_TYPE (expr))'. */
140 HOST_WIDE_INT offset;
141 HOST_WIDE_INT size;
142 tree base;
143
144 /* Expression. It is context dependent so do not use it to create new
145 expressions to access the original aggregate. See PR 42154 for a
146 testcase. */
147 tree expr;
148 /* Type. */
149 tree type;
150
151 /* The statement this access belongs to. */
152 gimple *stmt;
153
154 /* Next group representative for this aggregate. */
155 struct access *next_grp;
156
157 /* Pointer to the group representative. Pointer to itself if the struct is
158 the representative. */
159 struct access *group_representative;
160
161 /* If this access has any children (in terms of the definition above), this
162 points to the first one. */
163 struct access *first_child;
164
165 /* In intraprocedural SRA, pointer to the next sibling in the access tree as
166 described above. In IPA-SRA this is a pointer to the next access
167 belonging to the same group (having the same representative). */
168 struct access *next_sibling;
169
170 /* Pointers to the first and last element in the linked list of assign
171 links. */
172 struct assign_link *first_link, *last_link;
173
174 /* Pointer to the next access in the work queue. */
175 struct access *next_queued;
176
177 /* Replacement variable for this access "region." Never to be accessed
178 directly, always only by the means of get_access_replacement() and only
179 when grp_to_be_replaced flag is set. */
180 tree replacement_decl;
181
182 /* Is this access an access to a non-addressable field? */
183 unsigned non_addressable : 1;
184
185 /* Is this access made in reverse storage order? */
186 unsigned reverse : 1;
187
188 /* Is this particular access write access? */
189 unsigned write : 1;
190
191 /* Is this access currently in the work queue? */
192 unsigned grp_queued : 1;
193
194 /* Does this group contain a write access? This flag is propagated down the
195 access tree. */
196 unsigned grp_write : 1;
197
198 /* Does this group contain a read access? This flag is propagated down the
199 access tree. */
200 unsigned grp_read : 1;
201
202 /* Does this group contain a read access that comes from an assignment
203 statement? This flag is propagated down the access tree. */
204 unsigned grp_assignment_read : 1;
205
206 /* Does this group contain a write access that comes from an assignment
207 statement? This flag is propagated down the access tree. */
208 unsigned grp_assignment_write : 1;
209
210 /* Does this group contain a read access through a scalar type? This flag is
211 not propagated in the access tree in any direction. */
212 unsigned grp_scalar_read : 1;
213
214 /* Does this group contain a write access through a scalar type? This flag
215 is not propagated in the access tree in any direction. */
216 unsigned grp_scalar_write : 1;
217
218 /* Is this access an artificial one created to scalarize some record
219 entirely? */
220 unsigned grp_total_scalarization : 1;
221
222 /* Other passes of the analysis use this bit to make function
223 analyze_access_subtree create scalar replacements for this group if
224 possible. */
225 unsigned grp_hint : 1;
226
227 /* Is the subtree rooted in this access fully covered by scalar
228 replacements? */
229 unsigned grp_covered : 1;
230
231 /* If set to true, this access and all below it in an access tree must not be
232 scalarized. */
233 unsigned grp_unscalarizable_region : 1;
234
235 /* Whether data have been written to parts of the aggregate covered by this
236 access which is not to be scalarized. This flag is propagated up in the
237 access tree. */
238 unsigned grp_unscalarized_data : 1;
239
240 /* Does this access and/or group contain a write access through a
241 BIT_FIELD_REF? */
242 unsigned grp_partial_lhs : 1;
243
244 /* Set when a scalar replacement should be created for this variable. */
245 unsigned grp_to_be_replaced : 1;
246
247 /* Set when we want a replacement for the sole purpose of having it in
248 generated debug statements. */
249 unsigned grp_to_be_debug_replaced : 1;
250
251 /* Should TREE_NO_WARNING of a replacement be set? */
252 unsigned grp_no_warning : 1;
253
254 /* Is it possible that the group refers to data which might be (directly or
255 otherwise) modified? */
256 unsigned grp_maybe_modified : 1;
257
258 /* Set when this is a representative of a pointer to scalar (i.e. by
259 reference) parameter which we consider for turning into a plain scalar
260 (i.e. a by value parameter). */
261 unsigned grp_scalar_ptr : 1;
262
263 /* Set when we discover that this pointer is not safe to dereference in the
264 caller. */
265 unsigned grp_not_necessarilly_dereferenced : 1;
266 };
267
268 typedef struct access *access_p;
269
270
271 /* Alloc pool for allocating access structures. */
272 static object_allocator<struct access> access_pool ("SRA accesses");
273
274 /* A structure linking lhs and rhs accesses from an aggregate assignment. They
275 are used to propagate subaccesses from rhs to lhs as long as they don't
276 conflict with what is already there. */
277 struct assign_link
278 {
279 struct access *lacc, *racc;
280 struct assign_link *next;
281 };
282
283 /* Alloc pool for allocating assign link structures. */
284 static object_allocator<assign_link> assign_link_pool ("SRA links");
285
286 /* Base (tree) -> Vector (vec<access_p> *) map. */
287 static hash_map<tree, auto_vec<access_p> > *base_access_vec;
288
289 /* Candidate hash table helpers. */
290
291 struct uid_decl_hasher : nofree_ptr_hash <tree_node>
292 {
293 static inline hashval_t hash (const tree_node *);
294 static inline bool equal (const tree_node *, const tree_node *);
295 };
296
297 /* Hash a tree in a uid_decl_map. */
298
299 inline hashval_t
300 uid_decl_hasher::hash (const tree_node *item)
301 {
302 return item->decl_minimal.uid;
303 }
304
305 /* Return true if the DECL_UID in both trees are equal. */
306
307 inline bool
308 uid_decl_hasher::equal (const tree_node *a, const tree_node *b)
309 {
310 return (a->decl_minimal.uid == b->decl_minimal.uid);
311 }
312
313 /* Set of candidates. */
314 static bitmap candidate_bitmap;
315 static hash_table<uid_decl_hasher> *candidates;
316
317 /* For a candidate UID return the candidates decl. */
318
319 static inline tree
320 candidate (unsigned uid)
321 {
322 tree_node t;
323 t.decl_minimal.uid = uid;
324 return candidates->find_with_hash (&t, static_cast <hashval_t> (uid));
325 }
326
327 /* Bitmap of candidates which we should try to entirely scalarize away and
328 those which cannot be (because they are and need be used as a whole). */
329 static bitmap should_scalarize_away_bitmap, cannot_scalarize_away_bitmap;
330
331 /* Bitmap of candidates in the constant pool, which cannot be scalarized
332 because this would produce non-constant expressions (e.g. Ada). */
333 static bitmap disqualified_constants;
334
335 /* Obstack for creation of fancy names. */
336 static struct obstack name_obstack;
337
338 /* Head of a linked list of accesses that need to have its subaccesses
339 propagated to their assignment counterparts. */
340 static struct access *work_queue_head;
341
342 /* Number of parameters of the analyzed function when doing early ipa SRA. */
343 static int func_param_count;
344
345 /* scan_function sets the following to true if it encounters a call to
346 __builtin_apply_args. */
347 static bool encountered_apply_args;
348
349 /* Set by scan_function when it finds a recursive call. */
350 static bool encountered_recursive_call;
351
352 /* Set by scan_function when it finds a recursive call with less actual
353 arguments than formal parameters.. */
354 static bool encountered_unchangable_recursive_call;
355
356 /* This is a table in which for each basic block and parameter there is a
357 distance (offset + size) in that parameter which is dereferenced and
358 accessed in that BB. */
359 static HOST_WIDE_INT *bb_dereferences;
360 /* Bitmap of BBs that can cause the function to "stop" progressing by
361 returning, throwing externally, looping infinitely or calling a function
362 which might abort etc.. */
363 static bitmap final_bbs;
364
365 /* Representative of no accesses at all. */
366 static struct access no_accesses_representant;
367
368 /* Predicate to test the special value. */
369
370 static inline bool
371 no_accesses_p (struct access *access)
372 {
373 return access == &no_accesses_representant;
374 }
375
376 /* Dump contents of ACCESS to file F in a human friendly way. If GRP is true,
377 representative fields are dumped, otherwise those which only describe the
378 individual access are. */
379
380 static struct
381 {
382 /* Number of processed aggregates is readily available in
383 analyze_all_variable_accesses and so is not stored here. */
384
385 /* Number of created scalar replacements. */
386 int replacements;
387
388 /* Number of times sra_modify_expr or sra_modify_assign themselves changed an
389 expression. */
390 int exprs;
391
392 /* Number of statements created by generate_subtree_copies. */
393 int subtree_copies;
394
395 /* Number of statements created by load_assign_lhs_subreplacements. */
396 int subreplacements;
397
398 /* Number of times sra_modify_assign has deleted a statement. */
399 int deleted;
400
401 /* Number of times sra_modify_assign has to deal with subaccesses of LHS and
402 RHS reparately due to type conversions or nonexistent matching
403 references. */
404 int separate_lhs_rhs_handling;
405
406 /* Number of parameters that were removed because they were unused. */
407 int deleted_unused_parameters;
408
409 /* Number of scalars passed as parameters by reference that have been
410 converted to be passed by value. */
411 int scalar_by_ref_to_by_val;
412
413 /* Number of aggregate parameters that were replaced by one or more of their
414 components. */
415 int aggregate_params_reduced;
416
417 /* Numbber of components created when splitting aggregate parameters. */
418 int param_reductions_created;
419 } sra_stats;
420
421 static void
422 dump_access (FILE *f, struct access *access, bool grp)
423 {
424 fprintf (f, "access { ");
425 fprintf (f, "base = (%d)'", DECL_UID (access->base));
426 print_generic_expr (f, access->base, 0);
427 fprintf (f, "', offset = " HOST_WIDE_INT_PRINT_DEC, access->offset);
428 fprintf (f, ", size = " HOST_WIDE_INT_PRINT_DEC, access->size);
429 fprintf (f, ", expr = ");
430 print_generic_expr (f, access->expr, 0);
431 fprintf (f, ", type = ");
432 print_generic_expr (f, access->type, 0);
433 fprintf (f, ", non_addressable = %d, reverse = %d",
434 access->non_addressable, access->reverse);
435 if (grp)
436 fprintf (f, ", grp_read = %d, grp_write = %d, grp_assignment_read = %d, "
437 "grp_assignment_write = %d, grp_scalar_read = %d, "
438 "grp_scalar_write = %d, grp_total_scalarization = %d, "
439 "grp_hint = %d, grp_covered = %d, "
440 "grp_unscalarizable_region = %d, grp_unscalarized_data = %d, "
441 "grp_partial_lhs = %d, grp_to_be_replaced = %d, "
442 "grp_to_be_debug_replaced = %d, grp_maybe_modified = %d, "
443 "grp_not_necessarilly_dereferenced = %d\n",
444 access->grp_read, access->grp_write, access->grp_assignment_read,
445 access->grp_assignment_write, access->grp_scalar_read,
446 access->grp_scalar_write, access->grp_total_scalarization,
447 access->grp_hint, access->grp_covered,
448 access->grp_unscalarizable_region, access->grp_unscalarized_data,
449 access->grp_partial_lhs, access->grp_to_be_replaced,
450 access->grp_to_be_debug_replaced, access->grp_maybe_modified,
451 access->grp_not_necessarilly_dereferenced);
452 else
453 fprintf (f, ", write = %d, grp_total_scalarization = %d, "
454 "grp_partial_lhs = %d\n",
455 access->write, access->grp_total_scalarization,
456 access->grp_partial_lhs);
457 }
458
459 /* Dump a subtree rooted in ACCESS to file F, indent by LEVEL. */
460
461 static void
462 dump_access_tree_1 (FILE *f, struct access *access, int level)
463 {
464 do
465 {
466 int i;
467
468 for (i = 0; i < level; i++)
469 fputs ("* ", dump_file);
470
471 dump_access (f, access, true);
472
473 if (access->first_child)
474 dump_access_tree_1 (f, access->first_child, level + 1);
475
476 access = access->next_sibling;
477 }
478 while (access);
479 }
480
481 /* Dump all access trees for a variable, given the pointer to the first root in
482 ACCESS. */
483
484 static void
485 dump_access_tree (FILE *f, struct access *access)
486 {
487 for (; access; access = access->next_grp)
488 dump_access_tree_1 (f, access, 0);
489 }
490
491 /* Return true iff ACC is non-NULL and has subaccesses. */
492
493 static inline bool
494 access_has_children_p (struct access *acc)
495 {
496 return acc && acc->first_child;
497 }
498
499 /* Return true iff ACC is (partly) covered by at least one replacement. */
500
501 static bool
502 access_has_replacements_p (struct access *acc)
503 {
504 struct access *child;
505 if (acc->grp_to_be_replaced)
506 return true;
507 for (child = acc->first_child; child; child = child->next_sibling)
508 if (access_has_replacements_p (child))
509 return true;
510 return false;
511 }
512
513 /* Return a vector of pointers to accesses for the variable given in BASE or
514 NULL if there is none. */
515
516 static vec<access_p> *
517 get_base_access_vector (tree base)
518 {
519 return base_access_vec->get (base);
520 }
521
522 /* Find an access with required OFFSET and SIZE in a subtree of accesses rooted
523 in ACCESS. Return NULL if it cannot be found. */
524
525 static struct access *
526 find_access_in_subtree (struct access *access, HOST_WIDE_INT offset,
527 HOST_WIDE_INT size)
528 {
529 while (access && (access->offset != offset || access->size != size))
530 {
531 struct access *child = access->first_child;
532
533 while (child && (child->offset + child->size <= offset))
534 child = child->next_sibling;
535 access = child;
536 }
537
538 return access;
539 }
540
541 /* Return the first group representative for DECL or NULL if none exists. */
542
543 static struct access *
544 get_first_repr_for_decl (tree base)
545 {
546 vec<access_p> *access_vec;
547
548 access_vec = get_base_access_vector (base);
549 if (!access_vec)
550 return NULL;
551
552 return (*access_vec)[0];
553 }
554
555 /* Find an access representative for the variable BASE and given OFFSET and
556 SIZE. Requires that access trees have already been built. Return NULL if
557 it cannot be found. */
558
559 static struct access *
560 get_var_base_offset_size_access (tree base, HOST_WIDE_INT offset,
561 HOST_WIDE_INT size)
562 {
563 struct access *access;
564
565 access = get_first_repr_for_decl (base);
566 while (access && (access->offset + access->size <= offset))
567 access = access->next_grp;
568 if (!access)
569 return NULL;
570
571 return find_access_in_subtree (access, offset, size);
572 }
573
574 /* Add LINK to the linked list of assign links of RACC. */
575 static void
576 add_link_to_rhs (struct access *racc, struct assign_link *link)
577 {
578 gcc_assert (link->racc == racc);
579
580 if (!racc->first_link)
581 {
582 gcc_assert (!racc->last_link);
583 racc->first_link = link;
584 }
585 else
586 racc->last_link->next = link;
587
588 racc->last_link = link;
589 link->next = NULL;
590 }
591
592 /* Move all link structures in their linked list in OLD_RACC to the linked list
593 in NEW_RACC. */
594 static void
595 relink_to_new_repr (struct access *new_racc, struct access *old_racc)
596 {
597 if (!old_racc->first_link)
598 {
599 gcc_assert (!old_racc->last_link);
600 return;
601 }
602
603 if (new_racc->first_link)
604 {
605 gcc_assert (!new_racc->last_link->next);
606 gcc_assert (!old_racc->last_link || !old_racc->last_link->next);
607
608 new_racc->last_link->next = old_racc->first_link;
609 new_racc->last_link = old_racc->last_link;
610 }
611 else
612 {
613 gcc_assert (!new_racc->last_link);
614
615 new_racc->first_link = old_racc->first_link;
616 new_racc->last_link = old_racc->last_link;
617 }
618 old_racc->first_link = old_racc->last_link = NULL;
619 }
620
621 /* Add ACCESS to the work queue (which is actually a stack). */
622
623 static void
624 add_access_to_work_queue (struct access *access)
625 {
626 if (!access->grp_queued)
627 {
628 gcc_assert (!access->next_queued);
629 access->next_queued = work_queue_head;
630 access->grp_queued = 1;
631 work_queue_head = access;
632 }
633 }
634
635 /* Pop an access from the work queue, and return it, assuming there is one. */
636
637 static struct access *
638 pop_access_from_work_queue (void)
639 {
640 struct access *access = work_queue_head;
641
642 work_queue_head = access->next_queued;
643 access->next_queued = NULL;
644 access->grp_queued = 0;
645 return access;
646 }
647
648
649 /* Allocate necessary structures. */
650
651 static void
652 sra_initialize (void)
653 {
654 candidate_bitmap = BITMAP_ALLOC (NULL);
655 candidates = new hash_table<uid_decl_hasher>
656 (vec_safe_length (cfun->local_decls) / 2);
657 should_scalarize_away_bitmap = BITMAP_ALLOC (NULL);
658 cannot_scalarize_away_bitmap = BITMAP_ALLOC (NULL);
659 disqualified_constants = BITMAP_ALLOC (NULL);
660 gcc_obstack_init (&name_obstack);
661 base_access_vec = new hash_map<tree, auto_vec<access_p> >;
662 memset (&sra_stats, 0, sizeof (sra_stats));
663 encountered_apply_args = false;
664 encountered_recursive_call = false;
665 encountered_unchangable_recursive_call = false;
666 }
667
668 /* Deallocate all general structures. */
669
670 static void
671 sra_deinitialize (void)
672 {
673 BITMAP_FREE (candidate_bitmap);
674 delete candidates;
675 candidates = NULL;
676 BITMAP_FREE (should_scalarize_away_bitmap);
677 BITMAP_FREE (cannot_scalarize_away_bitmap);
678 BITMAP_FREE (disqualified_constants);
679 access_pool.release ();
680 assign_link_pool.release ();
681 obstack_free (&name_obstack, NULL);
682
683 delete base_access_vec;
684 }
685
686 /* Return true if DECL is a VAR_DECL in the constant pool, false otherwise. */
687
688 static bool constant_decl_p (tree decl)
689 {
690 return VAR_P (decl) && DECL_IN_CONSTANT_POOL (decl);
691 }
692
693 /* Remove DECL from candidates for SRA and write REASON to the dump file if
694 there is one. */
695 static void
696 disqualify_candidate (tree decl, const char *reason)
697 {
698 if (bitmap_clear_bit (candidate_bitmap, DECL_UID (decl)))
699 candidates->remove_elt_with_hash (decl, DECL_UID (decl));
700 if (constant_decl_p (decl))
701 bitmap_set_bit (disqualified_constants, DECL_UID (decl));
702
703 if (dump_file && (dump_flags & TDF_DETAILS))
704 {
705 fprintf (dump_file, "! Disqualifying ");
706 print_generic_expr (dump_file, decl, 0);
707 fprintf (dump_file, " - %s\n", reason);
708 }
709 }
710
711 /* Return true iff the type contains a field or an element which does not allow
712 scalarization. */
713
714 static bool
715 type_internals_preclude_sra_p (tree type, const char **msg)
716 {
717 tree fld;
718 tree et;
719
720 switch (TREE_CODE (type))
721 {
722 case RECORD_TYPE:
723 case UNION_TYPE:
724 case QUAL_UNION_TYPE:
725 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
726 if (TREE_CODE (fld) == FIELD_DECL)
727 {
728 tree ft = TREE_TYPE (fld);
729
730 if (TREE_THIS_VOLATILE (fld))
731 {
732 *msg = "volatile structure field";
733 return true;
734 }
735 if (!DECL_FIELD_OFFSET (fld))
736 {
737 *msg = "no structure field offset";
738 return true;
739 }
740 if (!DECL_SIZE (fld))
741 {
742 *msg = "zero structure field size";
743 return true;
744 }
745 if (!tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
746 {
747 *msg = "structure field offset not fixed";
748 return true;
749 }
750 if (!tree_fits_uhwi_p (DECL_SIZE (fld)))
751 {
752 *msg = "structure field size not fixed";
753 return true;
754 }
755 if (!tree_fits_shwi_p (bit_position (fld)))
756 {
757 *msg = "structure field size too big";
758 return true;
759 }
760 if (AGGREGATE_TYPE_P (ft)
761 && int_bit_position (fld) % BITS_PER_UNIT != 0)
762 {
763 *msg = "structure field is bit field";
764 return true;
765 }
766
767 if (AGGREGATE_TYPE_P (ft) && type_internals_preclude_sra_p (ft, msg))
768 return true;
769 }
770
771 return false;
772
773 case ARRAY_TYPE:
774 et = TREE_TYPE (type);
775
776 if (TYPE_VOLATILE (et))
777 {
778 *msg = "element type is volatile";
779 return true;
780 }
781
782 if (AGGREGATE_TYPE_P (et) && type_internals_preclude_sra_p (et, msg))
783 return true;
784
785 return false;
786
787 default:
788 return false;
789 }
790 }
791
792 /* If T is an SSA_NAME, return NULL if it is not a default def or return its
793 base variable if it is. Return T if it is not an SSA_NAME. */
794
795 static tree
796 get_ssa_base_param (tree t)
797 {
798 if (TREE_CODE (t) == SSA_NAME)
799 {
800 if (SSA_NAME_IS_DEFAULT_DEF (t))
801 return SSA_NAME_VAR (t);
802 else
803 return NULL_TREE;
804 }
805 return t;
806 }
807
808 /* Mark a dereference of BASE of distance DIST in a basic block tht STMT
809 belongs to, unless the BB has already been marked as a potentially
810 final. */
811
812 static void
813 mark_parm_dereference (tree base, HOST_WIDE_INT dist, gimple *stmt)
814 {
815 basic_block bb = gimple_bb (stmt);
816 int idx, parm_index = 0;
817 tree parm;
818
819 if (bitmap_bit_p (final_bbs, bb->index))
820 return;
821
822 for (parm = DECL_ARGUMENTS (current_function_decl);
823 parm && parm != base;
824 parm = DECL_CHAIN (parm))
825 parm_index++;
826
827 gcc_assert (parm_index < func_param_count);
828
829 idx = bb->index * func_param_count + parm_index;
830 if (bb_dereferences[idx] < dist)
831 bb_dereferences[idx] = dist;
832 }
833
834 /* Allocate an access structure for BASE, OFFSET and SIZE, clear it, fill in
835 the three fields. Also add it to the vector of accesses corresponding to
836 the base. Finally, return the new access. */
837
838 static struct access *
839 create_access_1 (tree base, HOST_WIDE_INT offset, HOST_WIDE_INT size)
840 {
841 struct access *access = access_pool.allocate ();
842
843 memset (access, 0, sizeof (struct access));
844 access->base = base;
845 access->offset = offset;
846 access->size = size;
847
848 base_access_vec->get_or_insert (base).safe_push (access);
849
850 return access;
851 }
852
853 static bool maybe_add_sra_candidate (tree);
854
855 /* Create and insert access for EXPR. Return created access, or NULL if it is
856 not possible. Also scan for uses of constant pool as we go along and add
857 to candidates. */
858
859 static struct access *
860 create_access (tree expr, gimple *stmt, bool write)
861 {
862 struct access *access;
863 HOST_WIDE_INT offset, size, max_size;
864 tree base = expr;
865 bool reverse, ptr, unscalarizable_region = false;
866
867 base = get_ref_base_and_extent (expr, &offset, &size, &max_size, &reverse);
868
869 if (sra_mode == SRA_MODE_EARLY_IPA
870 && TREE_CODE (base) == MEM_REF)
871 {
872 base = get_ssa_base_param (TREE_OPERAND (base, 0));
873 if (!base)
874 return NULL;
875 ptr = true;
876 }
877 else
878 ptr = false;
879
880 /* For constant-pool entries, check we can substitute the constant value. */
881 if (constant_decl_p (base)
882 && (sra_mode == SRA_MODE_EARLY_INTRA || sra_mode == SRA_MODE_INTRA))
883 {
884 gcc_assert (!bitmap_bit_p (disqualified_constants, DECL_UID (base)));
885 if (expr != base
886 && !is_gimple_reg_type (TREE_TYPE (expr))
887 && dump_file && (dump_flags & TDF_DETAILS))
888 {
889 /* This occurs in Ada with accesses to ARRAY_RANGE_REFs,
890 and elements of multidimensional arrays (which are
891 multi-element arrays in their own right). */
892 fprintf (dump_file, "Allowing non-reg-type load of part"
893 " of constant-pool entry: ");
894 print_generic_expr (dump_file, expr, 0);
895 }
896 maybe_add_sra_candidate (base);
897 }
898
899 if (!DECL_P (base) || !bitmap_bit_p (candidate_bitmap, DECL_UID (base)))
900 return NULL;
901
902 if (sra_mode == SRA_MODE_EARLY_IPA)
903 {
904 if (size < 0 || size != max_size)
905 {
906 disqualify_candidate (base, "Encountered a variable sized access.");
907 return NULL;
908 }
909 if (TREE_CODE (expr) == COMPONENT_REF
910 && DECL_BIT_FIELD (TREE_OPERAND (expr, 1)))
911 {
912 disqualify_candidate (base, "Encountered a bit-field access.");
913 return NULL;
914 }
915 gcc_checking_assert ((offset % BITS_PER_UNIT) == 0);
916
917 if (ptr)
918 mark_parm_dereference (base, offset + size, stmt);
919 }
920 else
921 {
922 if (size != max_size)
923 {
924 size = max_size;
925 unscalarizable_region = true;
926 }
927 if (size < 0)
928 {
929 disqualify_candidate (base, "Encountered an unconstrained access.");
930 return NULL;
931 }
932 }
933
934 access = create_access_1 (base, offset, size);
935 access->expr = expr;
936 access->type = TREE_TYPE (expr);
937 access->write = write;
938 access->grp_unscalarizable_region = unscalarizable_region;
939 access->stmt = stmt;
940 access->reverse = reverse;
941
942 if (TREE_CODE (expr) == COMPONENT_REF
943 && DECL_NONADDRESSABLE_P (TREE_OPERAND (expr, 1)))
944 access->non_addressable = 1;
945
946 return access;
947 }
948
949
950 /* Return true iff TYPE is scalarizable - i.e. a RECORD_TYPE or fixed-length
951 ARRAY_TYPE with fields that are either of gimple register types (excluding
952 bit-fields) or (recursively) scalarizable types. */
953
954 static bool
955 scalarizable_type_p (tree type)
956 {
957 gcc_assert (!is_gimple_reg_type (type));
958 if (type_contains_placeholder_p (type))
959 return false;
960
961 switch (TREE_CODE (type))
962 {
963 case RECORD_TYPE:
964 for (tree fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
965 if (TREE_CODE (fld) == FIELD_DECL)
966 {
967 tree ft = TREE_TYPE (fld);
968
969 if (DECL_BIT_FIELD (fld))
970 return false;
971
972 if (!is_gimple_reg_type (ft)
973 && !scalarizable_type_p (ft))
974 return false;
975 }
976
977 return true;
978
979 case ARRAY_TYPE:
980 {
981 if (TYPE_DOMAIN (type) == NULL_TREE
982 || !tree_fits_shwi_p (TYPE_SIZE (type))
983 || !tree_fits_shwi_p (TYPE_SIZE (TREE_TYPE (type)))
984 || (tree_to_shwi (TYPE_SIZE (TREE_TYPE (type))) <= 0)
985 || !tree_fits_shwi_p (TYPE_MIN_VALUE (TYPE_DOMAIN (type))))
986 return false;
987 if (tree_to_shwi (TYPE_SIZE (type)) == 0
988 && TYPE_MAX_VALUE (TYPE_DOMAIN (type)) == NULL_TREE)
989 /* Zero-element array, should not prevent scalarization. */
990 ;
991 else if ((tree_to_shwi (TYPE_SIZE (type)) <= 0)
992 || !tree_fits_shwi_p (TYPE_MAX_VALUE (TYPE_DOMAIN (type))))
993 /* Variable-length array, do not allow scalarization. */
994 return false;
995
996 tree elem = TREE_TYPE (type);
997 if (!is_gimple_reg_type (elem)
998 && !scalarizable_type_p (elem))
999 return false;
1000 return true;
1001 }
1002 default:
1003 return false;
1004 }
1005 }
1006
1007 static void scalarize_elem (tree, HOST_WIDE_INT, HOST_WIDE_INT, bool, tree, tree);
1008
1009 /* Create total_scalarization accesses for all scalar fields of a member
1010 of type DECL_TYPE conforming to scalarizable_type_p. BASE
1011 must be the top-most VAR_DECL representing the variable; within that,
1012 OFFSET locates the member and REF must be the memory reference expression for
1013 the member. */
1014
1015 static void
1016 completely_scalarize (tree base, tree decl_type, HOST_WIDE_INT offset, tree ref)
1017 {
1018 switch (TREE_CODE (decl_type))
1019 {
1020 case RECORD_TYPE:
1021 for (tree fld = TYPE_FIELDS (decl_type); fld; fld = DECL_CHAIN (fld))
1022 if (TREE_CODE (fld) == FIELD_DECL)
1023 {
1024 HOST_WIDE_INT pos = offset + int_bit_position (fld);
1025 tree ft = TREE_TYPE (fld);
1026 tree nref = build3 (COMPONENT_REF, ft, ref, fld, NULL_TREE);
1027
1028 scalarize_elem (base, pos, tree_to_uhwi (DECL_SIZE (fld)),
1029 TYPE_REVERSE_STORAGE_ORDER (decl_type),
1030 nref, ft);
1031 }
1032 break;
1033 case ARRAY_TYPE:
1034 {
1035 tree elemtype = TREE_TYPE (decl_type);
1036 tree elem_size = TYPE_SIZE (elemtype);
1037 gcc_assert (elem_size && tree_fits_shwi_p (elem_size));
1038 HOST_WIDE_INT el_size = tree_to_shwi (elem_size);
1039 gcc_assert (el_size > 0);
1040
1041 tree minidx = TYPE_MIN_VALUE (TYPE_DOMAIN (decl_type));
1042 gcc_assert (TREE_CODE (minidx) == INTEGER_CST);
1043 tree maxidx = TYPE_MAX_VALUE (TYPE_DOMAIN (decl_type));
1044 /* Skip (some) zero-length arrays; others have MAXIDX == MINIDX - 1. */
1045 if (maxidx)
1046 {
1047 gcc_assert (TREE_CODE (maxidx) == INTEGER_CST);
1048 tree domain = TYPE_DOMAIN (decl_type);
1049 /* MINIDX and MAXIDX are inclusive, and must be interpreted in
1050 DOMAIN (e.g. signed int, whereas min/max may be size_int). */
1051 offset_int idx = wi::to_offset (minidx);
1052 offset_int max = wi::to_offset (maxidx);
1053 if (!TYPE_UNSIGNED (domain))
1054 {
1055 idx = wi::sext (idx, TYPE_PRECISION (domain));
1056 max = wi::sext (max, TYPE_PRECISION (domain));
1057 }
1058 for (int el_off = offset; idx <= max; ++idx)
1059 {
1060 tree nref = build4 (ARRAY_REF, elemtype,
1061 ref,
1062 wide_int_to_tree (domain, idx),
1063 NULL_TREE, NULL_TREE);
1064 scalarize_elem (base, el_off, el_size,
1065 TYPE_REVERSE_STORAGE_ORDER (decl_type),
1066 nref, elemtype);
1067 el_off += el_size;
1068 }
1069 }
1070 }
1071 break;
1072 default:
1073 gcc_unreachable ();
1074 }
1075 }
1076
1077 /* Create total_scalarization accesses for a member of type TYPE, which must
1078 satisfy either is_gimple_reg_type or scalarizable_type_p. BASE must be the
1079 top-most VAR_DECL representing the variable; within that, POS and SIZE locate
1080 the member, REVERSE gives its torage order. and REF must be the reference
1081 expression for it. */
1082
1083 static void
1084 scalarize_elem (tree base, HOST_WIDE_INT pos, HOST_WIDE_INT size, bool reverse,
1085 tree ref, tree type)
1086 {
1087 if (is_gimple_reg_type (type))
1088 {
1089 struct access *access = create_access_1 (base, pos, size);
1090 access->expr = ref;
1091 access->type = type;
1092 access->grp_total_scalarization = 1;
1093 access->reverse = reverse;
1094 /* Accesses for intraprocedural SRA can have their stmt NULL. */
1095 }
1096 else
1097 completely_scalarize (base, type, pos, ref);
1098 }
1099
1100 /* Create a total_scalarization access for VAR as a whole. VAR must be of a
1101 RECORD_TYPE or ARRAY_TYPE conforming to scalarizable_type_p. */
1102
1103 static void
1104 create_total_scalarization_access (tree var)
1105 {
1106 HOST_WIDE_INT size = tree_to_uhwi (DECL_SIZE (var));
1107 struct access *access;
1108
1109 access = create_access_1 (var, 0, size);
1110 access->expr = var;
1111 access->type = TREE_TYPE (var);
1112 access->grp_total_scalarization = 1;
1113 }
1114
1115 /* Return true if REF has an VIEW_CONVERT_EXPR somewhere in it. */
1116
1117 static inline bool
1118 contains_view_convert_expr_p (const_tree ref)
1119 {
1120 while (handled_component_p (ref))
1121 {
1122 if (TREE_CODE (ref) == VIEW_CONVERT_EXPR)
1123 return true;
1124 ref = TREE_OPERAND (ref, 0);
1125 }
1126
1127 return false;
1128 }
1129
1130 /* Search the given tree for a declaration by skipping handled components and
1131 exclude it from the candidates. */
1132
1133 static void
1134 disqualify_base_of_expr (tree t, const char *reason)
1135 {
1136 t = get_base_address (t);
1137 if (sra_mode == SRA_MODE_EARLY_IPA
1138 && TREE_CODE (t) == MEM_REF)
1139 t = get_ssa_base_param (TREE_OPERAND (t, 0));
1140
1141 if (t && DECL_P (t))
1142 disqualify_candidate (t, reason);
1143 }
1144
1145 /* Scan expression EXPR and create access structures for all accesses to
1146 candidates for scalarization. Return the created access or NULL if none is
1147 created. */
1148
1149 static struct access *
1150 build_access_from_expr_1 (tree expr, gimple *stmt, bool write)
1151 {
1152 struct access *ret = NULL;
1153 bool partial_ref;
1154
1155 if (TREE_CODE (expr) == BIT_FIELD_REF
1156 || TREE_CODE (expr) == IMAGPART_EXPR
1157 || TREE_CODE (expr) == REALPART_EXPR)
1158 {
1159 expr = TREE_OPERAND (expr, 0);
1160 partial_ref = true;
1161 }
1162 else
1163 partial_ref = false;
1164
1165 /* We need to dive through V_C_Es in order to get the size of its parameter
1166 and not the result type. Ada produces such statements. We are also
1167 capable of handling the topmost V_C_E but not any of those buried in other
1168 handled components. */
1169 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR && !storage_order_barrier_p (expr))
1170 expr = TREE_OPERAND (expr, 0);
1171
1172 if (contains_view_convert_expr_p (expr))
1173 {
1174 disqualify_base_of_expr (expr, "V_C_E under a different handled "
1175 "component.");
1176 return NULL;
1177 }
1178 if (TREE_THIS_VOLATILE (expr))
1179 {
1180 disqualify_base_of_expr (expr, "part of a volatile reference.");
1181 return NULL;
1182 }
1183
1184 switch (TREE_CODE (expr))
1185 {
1186 case MEM_REF:
1187 if (TREE_CODE (TREE_OPERAND (expr, 0)) != ADDR_EXPR
1188 && sra_mode != SRA_MODE_EARLY_IPA)
1189 return NULL;
1190 /* fall through */
1191 case VAR_DECL:
1192 case PARM_DECL:
1193 case RESULT_DECL:
1194 case COMPONENT_REF:
1195 case ARRAY_REF:
1196 case ARRAY_RANGE_REF:
1197 ret = create_access (expr, stmt, write);
1198 break;
1199
1200 default:
1201 break;
1202 }
1203
1204 if (write && partial_ref && ret)
1205 ret->grp_partial_lhs = 1;
1206
1207 return ret;
1208 }
1209
1210 /* Scan expression EXPR and create access structures for all accesses to
1211 candidates for scalarization. Return true if any access has been inserted.
1212 STMT must be the statement from which the expression is taken, WRITE must be
1213 true if the expression is a store and false otherwise. */
1214
1215 static bool
1216 build_access_from_expr (tree expr, gimple *stmt, bool write)
1217 {
1218 struct access *access;
1219
1220 access = build_access_from_expr_1 (expr, stmt, write);
1221 if (access)
1222 {
1223 /* This means the aggregate is accesses as a whole in a way other than an
1224 assign statement and thus cannot be removed even if we had a scalar
1225 replacement for everything. */
1226 if (cannot_scalarize_away_bitmap)
1227 bitmap_set_bit (cannot_scalarize_away_bitmap, DECL_UID (access->base));
1228 return true;
1229 }
1230 return false;
1231 }
1232
1233 /* Return the single non-EH successor edge of BB or NULL if there is none or
1234 more than one. */
1235
1236 static edge
1237 single_non_eh_succ (basic_block bb)
1238 {
1239 edge e, res = NULL;
1240 edge_iterator ei;
1241
1242 FOR_EACH_EDGE (e, ei, bb->succs)
1243 if (!(e->flags & EDGE_EH))
1244 {
1245 if (res)
1246 return NULL;
1247 res = e;
1248 }
1249
1250 return res;
1251 }
1252
1253 /* Disqualify LHS and RHS for scalarization if STMT has to terminate its BB and
1254 there is no alternative spot where to put statements SRA might need to
1255 generate after it. The spot we are looking for is an edge leading to a
1256 single non-EH successor, if it exists and is indeed single. RHS may be
1257 NULL, in that case ignore it. */
1258
1259 static bool
1260 disqualify_if_bad_bb_terminating_stmt (gimple *stmt, tree lhs, tree rhs)
1261 {
1262 if ((sra_mode == SRA_MODE_EARLY_INTRA || sra_mode == SRA_MODE_INTRA)
1263 && stmt_ends_bb_p (stmt))
1264 {
1265 if (single_non_eh_succ (gimple_bb (stmt)))
1266 return false;
1267
1268 disqualify_base_of_expr (lhs, "LHS of a throwing stmt.");
1269 if (rhs)
1270 disqualify_base_of_expr (rhs, "RHS of a throwing stmt.");
1271 return true;
1272 }
1273 return false;
1274 }
1275
1276 /* Scan expressions occurring in STMT, create access structures for all accesses
1277 to candidates for scalarization and remove those candidates which occur in
1278 statements or expressions that prevent them from being split apart. Return
1279 true if any access has been inserted. */
1280
1281 static bool
1282 build_accesses_from_assign (gimple *stmt)
1283 {
1284 tree lhs, rhs;
1285 struct access *lacc, *racc;
1286
1287 if (!gimple_assign_single_p (stmt)
1288 /* Scope clobbers don't influence scalarization. */
1289 || gimple_clobber_p (stmt))
1290 return false;
1291
1292 lhs = gimple_assign_lhs (stmt);
1293 rhs = gimple_assign_rhs1 (stmt);
1294
1295 if (disqualify_if_bad_bb_terminating_stmt (stmt, lhs, rhs))
1296 return false;
1297
1298 racc = build_access_from_expr_1 (rhs, stmt, false);
1299 lacc = build_access_from_expr_1 (lhs, stmt, true);
1300
1301 if (lacc)
1302 {
1303 lacc->grp_assignment_write = 1;
1304 if (storage_order_barrier_p (rhs))
1305 lacc->grp_unscalarizable_region = 1;
1306 }
1307
1308 if (racc)
1309 {
1310 racc->grp_assignment_read = 1;
1311 if (should_scalarize_away_bitmap && !gimple_has_volatile_ops (stmt)
1312 && !is_gimple_reg_type (racc->type))
1313 bitmap_set_bit (should_scalarize_away_bitmap, DECL_UID (racc->base));
1314 if (storage_order_barrier_p (lhs))
1315 racc->grp_unscalarizable_region = 1;
1316 }
1317
1318 if (lacc && racc
1319 && (sra_mode == SRA_MODE_EARLY_INTRA || sra_mode == SRA_MODE_INTRA)
1320 && !lacc->grp_unscalarizable_region
1321 && !racc->grp_unscalarizable_region
1322 && AGGREGATE_TYPE_P (TREE_TYPE (lhs))
1323 && lacc->size == racc->size
1324 && useless_type_conversion_p (lacc->type, racc->type))
1325 {
1326 struct assign_link *link;
1327
1328 link = assign_link_pool.allocate ();
1329 memset (link, 0, sizeof (struct assign_link));
1330
1331 link->lacc = lacc;
1332 link->racc = racc;
1333
1334 add_link_to_rhs (racc, link);
1335 }
1336
1337 return lacc || racc;
1338 }
1339
1340 /* Callback of walk_stmt_load_store_addr_ops visit_addr used to determine
1341 GIMPLE_ASM operands with memory constrains which cannot be scalarized. */
1342
1343 static bool
1344 asm_visit_addr (gimple *, tree op, tree, void *)
1345 {
1346 op = get_base_address (op);
1347 if (op
1348 && DECL_P (op))
1349 disqualify_candidate (op, "Non-scalarizable GIMPLE_ASM operand.");
1350
1351 return false;
1352 }
1353
1354 /* Return true iff callsite CALL has at least as many actual arguments as there
1355 are formal parameters of the function currently processed by IPA-SRA and
1356 that their types match. */
1357
1358 static inline bool
1359 callsite_arguments_match_p (gimple *call)
1360 {
1361 if (gimple_call_num_args (call) < (unsigned) func_param_count)
1362 return false;
1363
1364 tree parm;
1365 int i;
1366 for (parm = DECL_ARGUMENTS (current_function_decl), i = 0;
1367 parm;
1368 parm = DECL_CHAIN (parm), i++)
1369 {
1370 tree arg = gimple_call_arg (call, i);
1371 if (!useless_type_conversion_p (TREE_TYPE (parm), TREE_TYPE (arg)))
1372 return false;
1373 }
1374 return true;
1375 }
1376
1377 /* Scan function and look for interesting expressions and create access
1378 structures for them. Return true iff any access is created. */
1379
1380 static bool
1381 scan_function (void)
1382 {
1383 basic_block bb;
1384 bool ret = false;
1385
1386 FOR_EACH_BB_FN (bb, cfun)
1387 {
1388 gimple_stmt_iterator gsi;
1389 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1390 {
1391 gimple *stmt = gsi_stmt (gsi);
1392 tree t;
1393 unsigned i;
1394
1395 if (final_bbs && stmt_can_throw_external (stmt))
1396 bitmap_set_bit (final_bbs, bb->index);
1397 switch (gimple_code (stmt))
1398 {
1399 case GIMPLE_RETURN:
1400 t = gimple_return_retval (as_a <greturn *> (stmt));
1401 if (t != NULL_TREE)
1402 ret |= build_access_from_expr (t, stmt, false);
1403 if (final_bbs)
1404 bitmap_set_bit (final_bbs, bb->index);
1405 break;
1406
1407 case GIMPLE_ASSIGN:
1408 ret |= build_accesses_from_assign (stmt);
1409 break;
1410
1411 case GIMPLE_CALL:
1412 for (i = 0; i < gimple_call_num_args (stmt); i++)
1413 ret |= build_access_from_expr (gimple_call_arg (stmt, i),
1414 stmt, false);
1415
1416 if (sra_mode == SRA_MODE_EARLY_IPA)
1417 {
1418 tree dest = gimple_call_fndecl (stmt);
1419 int flags = gimple_call_flags (stmt);
1420
1421 if (dest)
1422 {
1423 if (DECL_BUILT_IN_CLASS (dest) == BUILT_IN_NORMAL
1424 && DECL_FUNCTION_CODE (dest) == BUILT_IN_APPLY_ARGS)
1425 encountered_apply_args = true;
1426 if (recursive_call_p (current_function_decl, dest))
1427 {
1428 encountered_recursive_call = true;
1429 if (!callsite_arguments_match_p (stmt))
1430 encountered_unchangable_recursive_call = true;
1431 }
1432 }
1433
1434 if (final_bbs
1435 && (flags & (ECF_CONST | ECF_PURE)) == 0)
1436 bitmap_set_bit (final_bbs, bb->index);
1437 }
1438
1439 t = gimple_call_lhs (stmt);
1440 if (t && !disqualify_if_bad_bb_terminating_stmt (stmt, t, NULL))
1441 ret |= build_access_from_expr (t, stmt, true);
1442 break;
1443
1444 case GIMPLE_ASM:
1445 {
1446 gasm *asm_stmt = as_a <gasm *> (stmt);
1447 walk_stmt_load_store_addr_ops (asm_stmt, NULL, NULL, NULL,
1448 asm_visit_addr);
1449 if (final_bbs)
1450 bitmap_set_bit (final_bbs, bb->index);
1451
1452 for (i = 0; i < gimple_asm_ninputs (asm_stmt); i++)
1453 {
1454 t = TREE_VALUE (gimple_asm_input_op (asm_stmt, i));
1455 ret |= build_access_from_expr (t, asm_stmt, false);
1456 }
1457 for (i = 0; i < gimple_asm_noutputs (asm_stmt); i++)
1458 {
1459 t = TREE_VALUE (gimple_asm_output_op (asm_stmt, i));
1460 ret |= build_access_from_expr (t, asm_stmt, true);
1461 }
1462 }
1463 break;
1464
1465 default:
1466 break;
1467 }
1468 }
1469 }
1470
1471 return ret;
1472 }
1473
1474 /* Helper of QSORT function. There are pointers to accesses in the array. An
1475 access is considered smaller than another if it has smaller offset or if the
1476 offsets are the same but is size is bigger. */
1477
1478 static int
1479 compare_access_positions (const void *a, const void *b)
1480 {
1481 const access_p *fp1 = (const access_p *) a;
1482 const access_p *fp2 = (const access_p *) b;
1483 const access_p f1 = *fp1;
1484 const access_p f2 = *fp2;
1485
1486 if (f1->offset != f2->offset)
1487 return f1->offset < f2->offset ? -1 : 1;
1488
1489 if (f1->size == f2->size)
1490 {
1491 if (f1->type == f2->type)
1492 return 0;
1493 /* Put any non-aggregate type before any aggregate type. */
1494 else if (!is_gimple_reg_type (f1->type)
1495 && is_gimple_reg_type (f2->type))
1496 return 1;
1497 else if (is_gimple_reg_type (f1->type)
1498 && !is_gimple_reg_type (f2->type))
1499 return -1;
1500 /* Put any complex or vector type before any other scalar type. */
1501 else if (TREE_CODE (f1->type) != COMPLEX_TYPE
1502 && TREE_CODE (f1->type) != VECTOR_TYPE
1503 && (TREE_CODE (f2->type) == COMPLEX_TYPE
1504 || TREE_CODE (f2->type) == VECTOR_TYPE))
1505 return 1;
1506 else if ((TREE_CODE (f1->type) == COMPLEX_TYPE
1507 || TREE_CODE (f1->type) == VECTOR_TYPE)
1508 && TREE_CODE (f2->type) != COMPLEX_TYPE
1509 && TREE_CODE (f2->type) != VECTOR_TYPE)
1510 return -1;
1511 /* Put the integral type with the bigger precision first. */
1512 else if (INTEGRAL_TYPE_P (f1->type)
1513 && INTEGRAL_TYPE_P (f2->type))
1514 return TYPE_PRECISION (f2->type) - TYPE_PRECISION (f1->type);
1515 /* Put any integral type with non-full precision last. */
1516 else if (INTEGRAL_TYPE_P (f1->type)
1517 && (TREE_INT_CST_LOW (TYPE_SIZE (f1->type))
1518 != TYPE_PRECISION (f1->type)))
1519 return 1;
1520 else if (INTEGRAL_TYPE_P (f2->type)
1521 && (TREE_INT_CST_LOW (TYPE_SIZE (f2->type))
1522 != TYPE_PRECISION (f2->type)))
1523 return -1;
1524 /* Stabilize the sort. */
1525 return TYPE_UID (f1->type) - TYPE_UID (f2->type);
1526 }
1527
1528 /* We want the bigger accesses first, thus the opposite operator in the next
1529 line: */
1530 return f1->size > f2->size ? -1 : 1;
1531 }
1532
1533
1534 /* Append a name of the declaration to the name obstack. A helper function for
1535 make_fancy_name. */
1536
1537 static void
1538 make_fancy_decl_name (tree decl)
1539 {
1540 char buffer[32];
1541
1542 tree name = DECL_NAME (decl);
1543 if (name)
1544 obstack_grow (&name_obstack, IDENTIFIER_POINTER (name),
1545 IDENTIFIER_LENGTH (name));
1546 else
1547 {
1548 sprintf (buffer, "D%u", DECL_UID (decl));
1549 obstack_grow (&name_obstack, buffer, strlen (buffer));
1550 }
1551 }
1552
1553 /* Helper for make_fancy_name. */
1554
1555 static void
1556 make_fancy_name_1 (tree expr)
1557 {
1558 char buffer[32];
1559 tree index;
1560
1561 if (DECL_P (expr))
1562 {
1563 make_fancy_decl_name (expr);
1564 return;
1565 }
1566
1567 switch (TREE_CODE (expr))
1568 {
1569 case COMPONENT_REF:
1570 make_fancy_name_1 (TREE_OPERAND (expr, 0));
1571 obstack_1grow (&name_obstack, '$');
1572 make_fancy_decl_name (TREE_OPERAND (expr, 1));
1573 break;
1574
1575 case ARRAY_REF:
1576 make_fancy_name_1 (TREE_OPERAND (expr, 0));
1577 obstack_1grow (&name_obstack, '$');
1578 /* Arrays with only one element may not have a constant as their
1579 index. */
1580 index = TREE_OPERAND (expr, 1);
1581 if (TREE_CODE (index) != INTEGER_CST)
1582 break;
1583 sprintf (buffer, HOST_WIDE_INT_PRINT_DEC, TREE_INT_CST_LOW (index));
1584 obstack_grow (&name_obstack, buffer, strlen (buffer));
1585 break;
1586
1587 case ADDR_EXPR:
1588 make_fancy_name_1 (TREE_OPERAND (expr, 0));
1589 break;
1590
1591 case MEM_REF:
1592 make_fancy_name_1 (TREE_OPERAND (expr, 0));
1593 if (!integer_zerop (TREE_OPERAND (expr, 1)))
1594 {
1595 obstack_1grow (&name_obstack, '$');
1596 sprintf (buffer, HOST_WIDE_INT_PRINT_DEC,
1597 TREE_INT_CST_LOW (TREE_OPERAND (expr, 1)));
1598 obstack_grow (&name_obstack, buffer, strlen (buffer));
1599 }
1600 break;
1601
1602 case BIT_FIELD_REF:
1603 case REALPART_EXPR:
1604 case IMAGPART_EXPR:
1605 gcc_unreachable (); /* we treat these as scalars. */
1606 break;
1607 default:
1608 break;
1609 }
1610 }
1611
1612 /* Create a human readable name for replacement variable of ACCESS. */
1613
1614 static char *
1615 make_fancy_name (tree expr)
1616 {
1617 make_fancy_name_1 (expr);
1618 obstack_1grow (&name_obstack, '\0');
1619 return XOBFINISH (&name_obstack, char *);
1620 }
1621
1622 /* Construct a MEM_REF that would reference a part of aggregate BASE of type
1623 EXP_TYPE at the given OFFSET and with storage order REVERSE. If BASE is
1624 something for which get_addr_base_and_unit_offset returns NULL, gsi must
1625 be non-NULL and is used to insert new statements either before or below
1626 the current one as specified by INSERT_AFTER. This function is not capable
1627 of handling bitfields. */
1628
1629 tree
1630 build_ref_for_offset (location_t loc, tree base, HOST_WIDE_INT offset,
1631 bool reverse, tree exp_type, gimple_stmt_iterator *gsi,
1632 bool insert_after)
1633 {
1634 tree prev_base = base;
1635 tree off;
1636 tree mem_ref;
1637 HOST_WIDE_INT base_offset;
1638 unsigned HOST_WIDE_INT misalign;
1639 unsigned int align;
1640
1641 /* Preserve address-space information. */
1642 addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (base));
1643 if (as != TYPE_ADDR_SPACE (exp_type))
1644 exp_type = build_qualified_type (exp_type,
1645 TYPE_QUALS (exp_type)
1646 | ENCODE_QUAL_ADDR_SPACE (as));
1647
1648 gcc_checking_assert (offset % BITS_PER_UNIT == 0);
1649 get_object_alignment_1 (base, &align, &misalign);
1650 base = get_addr_base_and_unit_offset (base, &base_offset);
1651
1652 /* get_addr_base_and_unit_offset returns NULL for references with a variable
1653 offset such as array[var_index]. */
1654 if (!base)
1655 {
1656 gassign *stmt;
1657 tree tmp, addr;
1658
1659 gcc_checking_assert (gsi);
1660 tmp = make_ssa_name (build_pointer_type (TREE_TYPE (prev_base)));
1661 addr = build_fold_addr_expr (unshare_expr (prev_base));
1662 STRIP_USELESS_TYPE_CONVERSION (addr);
1663 stmt = gimple_build_assign (tmp, addr);
1664 gimple_set_location (stmt, loc);
1665 if (insert_after)
1666 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
1667 else
1668 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
1669
1670 off = build_int_cst (reference_alias_ptr_type (prev_base),
1671 offset / BITS_PER_UNIT);
1672 base = tmp;
1673 }
1674 else if (TREE_CODE (base) == MEM_REF)
1675 {
1676 off = build_int_cst (TREE_TYPE (TREE_OPERAND (base, 1)),
1677 base_offset + offset / BITS_PER_UNIT);
1678 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1), off);
1679 base = unshare_expr (TREE_OPERAND (base, 0));
1680 }
1681 else
1682 {
1683 off = build_int_cst (reference_alias_ptr_type (prev_base),
1684 base_offset + offset / BITS_PER_UNIT);
1685 base = build_fold_addr_expr (unshare_expr (base));
1686 }
1687
1688 misalign = (misalign + offset) & (align - 1);
1689 if (misalign != 0)
1690 align = least_bit_hwi (misalign);
1691 if (align != TYPE_ALIGN (exp_type))
1692 exp_type = build_aligned_type (exp_type, align);
1693
1694 mem_ref = fold_build2_loc (loc, MEM_REF, exp_type, base, off);
1695 REF_REVERSE_STORAGE_ORDER (mem_ref) = reverse;
1696 if (TREE_THIS_VOLATILE (prev_base))
1697 TREE_THIS_VOLATILE (mem_ref) = 1;
1698 if (TREE_SIDE_EFFECTS (prev_base))
1699 TREE_SIDE_EFFECTS (mem_ref) = 1;
1700 return mem_ref;
1701 }
1702
1703 /* Construct a memory reference to a part of an aggregate BASE at the given
1704 OFFSET and of the same type as MODEL. In case this is a reference to a
1705 bit-field, the function will replicate the last component_ref of model's
1706 expr to access it. GSI and INSERT_AFTER have the same meaning as in
1707 build_ref_for_offset. */
1708
1709 static tree
1710 build_ref_for_model (location_t loc, tree base, HOST_WIDE_INT offset,
1711 struct access *model, gimple_stmt_iterator *gsi,
1712 bool insert_after)
1713 {
1714 if (TREE_CODE (model->expr) == COMPONENT_REF
1715 && DECL_BIT_FIELD (TREE_OPERAND (model->expr, 1)))
1716 {
1717 /* This access represents a bit-field. */
1718 tree t, exp_type, fld = TREE_OPERAND (model->expr, 1);
1719
1720 offset -= int_bit_position (fld);
1721 exp_type = TREE_TYPE (TREE_OPERAND (model->expr, 0));
1722 t = build_ref_for_offset (loc, base, offset, model->reverse, exp_type,
1723 gsi, insert_after);
1724 /* The flag will be set on the record type. */
1725 REF_REVERSE_STORAGE_ORDER (t) = 0;
1726 return fold_build3_loc (loc, COMPONENT_REF, TREE_TYPE (fld), t, fld,
1727 NULL_TREE);
1728 }
1729 else
1730 return
1731 build_ref_for_offset (loc, base, offset, model->reverse, model->type,
1732 gsi, insert_after);
1733 }
1734
1735 /* Attempt to build a memory reference that we could but into a gimple
1736 debug_bind statement. Similar to build_ref_for_model but punts if it has to
1737 create statements and return s NULL instead. This function also ignores
1738 alignment issues and so its results should never end up in non-debug
1739 statements. */
1740
1741 static tree
1742 build_debug_ref_for_model (location_t loc, tree base, HOST_WIDE_INT offset,
1743 struct access *model)
1744 {
1745 HOST_WIDE_INT base_offset;
1746 tree off;
1747
1748 if (TREE_CODE (model->expr) == COMPONENT_REF
1749 && DECL_BIT_FIELD (TREE_OPERAND (model->expr, 1)))
1750 return NULL_TREE;
1751
1752 base = get_addr_base_and_unit_offset (base, &base_offset);
1753 if (!base)
1754 return NULL_TREE;
1755 if (TREE_CODE (base) == MEM_REF)
1756 {
1757 off = build_int_cst (TREE_TYPE (TREE_OPERAND (base, 1)),
1758 base_offset + offset / BITS_PER_UNIT);
1759 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1), off);
1760 base = unshare_expr (TREE_OPERAND (base, 0));
1761 }
1762 else
1763 {
1764 off = build_int_cst (reference_alias_ptr_type (base),
1765 base_offset + offset / BITS_PER_UNIT);
1766 base = build_fold_addr_expr (unshare_expr (base));
1767 }
1768
1769 return fold_build2_loc (loc, MEM_REF, model->type, base, off);
1770 }
1771
1772 /* Construct a memory reference consisting of component_refs and array_refs to
1773 a part of an aggregate *RES (which is of type TYPE). The requested part
1774 should have type EXP_TYPE at be the given OFFSET. This function might not
1775 succeed, it returns true when it does and only then *RES points to something
1776 meaningful. This function should be used only to build expressions that we
1777 might need to present to user (e.g. in warnings). In all other situations,
1778 build_ref_for_model or build_ref_for_offset should be used instead. */
1779
1780 static bool
1781 build_user_friendly_ref_for_offset (tree *res, tree type, HOST_WIDE_INT offset,
1782 tree exp_type)
1783 {
1784 while (1)
1785 {
1786 tree fld;
1787 tree tr_size, index, minidx;
1788 HOST_WIDE_INT el_size;
1789
1790 if (offset == 0 && exp_type
1791 && types_compatible_p (exp_type, type))
1792 return true;
1793
1794 switch (TREE_CODE (type))
1795 {
1796 case UNION_TYPE:
1797 case QUAL_UNION_TYPE:
1798 case RECORD_TYPE:
1799 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
1800 {
1801 HOST_WIDE_INT pos, size;
1802 tree tr_pos, expr, *expr_ptr;
1803
1804 if (TREE_CODE (fld) != FIELD_DECL)
1805 continue;
1806
1807 tr_pos = bit_position (fld);
1808 if (!tr_pos || !tree_fits_uhwi_p (tr_pos))
1809 continue;
1810 pos = tree_to_uhwi (tr_pos);
1811 gcc_assert (TREE_CODE (type) == RECORD_TYPE || pos == 0);
1812 tr_size = DECL_SIZE (fld);
1813 if (!tr_size || !tree_fits_uhwi_p (tr_size))
1814 continue;
1815 size = tree_to_uhwi (tr_size);
1816 if (size == 0)
1817 {
1818 if (pos != offset)
1819 continue;
1820 }
1821 else if (pos > offset || (pos + size) <= offset)
1822 continue;
1823
1824 expr = build3 (COMPONENT_REF, TREE_TYPE (fld), *res, fld,
1825 NULL_TREE);
1826 expr_ptr = &expr;
1827 if (build_user_friendly_ref_for_offset (expr_ptr, TREE_TYPE (fld),
1828 offset - pos, exp_type))
1829 {
1830 *res = expr;
1831 return true;
1832 }
1833 }
1834 return false;
1835
1836 case ARRAY_TYPE:
1837 tr_size = TYPE_SIZE (TREE_TYPE (type));
1838 if (!tr_size || !tree_fits_uhwi_p (tr_size))
1839 return false;
1840 el_size = tree_to_uhwi (tr_size);
1841
1842 minidx = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
1843 if (TREE_CODE (minidx) != INTEGER_CST || el_size == 0)
1844 return false;
1845 index = build_int_cst (TYPE_DOMAIN (type), offset / el_size);
1846 if (!integer_zerop (minidx))
1847 index = int_const_binop (PLUS_EXPR, index, minidx);
1848 *res = build4 (ARRAY_REF, TREE_TYPE (type), *res, index,
1849 NULL_TREE, NULL_TREE);
1850 offset = offset % el_size;
1851 type = TREE_TYPE (type);
1852 break;
1853
1854 default:
1855 if (offset != 0)
1856 return false;
1857
1858 if (exp_type)
1859 return false;
1860 else
1861 return true;
1862 }
1863 }
1864 }
1865
1866 /* Return true iff TYPE is stdarg va_list type. */
1867
1868 static inline bool
1869 is_va_list_type (tree type)
1870 {
1871 return TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (va_list_type_node);
1872 }
1873
1874 /* Print message to dump file why a variable was rejected. */
1875
1876 static void
1877 reject (tree var, const char *msg)
1878 {
1879 if (dump_file && (dump_flags & TDF_DETAILS))
1880 {
1881 fprintf (dump_file, "Rejected (%d): %s: ", DECL_UID (var), msg);
1882 print_generic_expr (dump_file, var, 0);
1883 fprintf (dump_file, "\n");
1884 }
1885 }
1886
1887 /* Return true if VAR is a candidate for SRA. */
1888
1889 static bool
1890 maybe_add_sra_candidate (tree var)
1891 {
1892 tree type = TREE_TYPE (var);
1893 const char *msg;
1894 tree_node **slot;
1895
1896 if (!AGGREGATE_TYPE_P (type))
1897 {
1898 reject (var, "not aggregate");
1899 return false;
1900 }
1901 /* Allow constant-pool entries (that "need to live in memory")
1902 unless we are doing IPA SRA. */
1903 if (needs_to_live_in_memory (var)
1904 && (sra_mode == SRA_MODE_EARLY_IPA || !constant_decl_p (var)))
1905 {
1906 reject (var, "needs to live in memory");
1907 return false;
1908 }
1909 if (TREE_THIS_VOLATILE (var))
1910 {
1911 reject (var, "is volatile");
1912 return false;
1913 }
1914 if (!COMPLETE_TYPE_P (type))
1915 {
1916 reject (var, "has incomplete type");
1917 return false;
1918 }
1919 if (!tree_fits_uhwi_p (TYPE_SIZE (type)))
1920 {
1921 reject (var, "type size not fixed");
1922 return false;
1923 }
1924 if (tree_to_uhwi (TYPE_SIZE (type)) == 0)
1925 {
1926 reject (var, "type size is zero");
1927 return false;
1928 }
1929 if (type_internals_preclude_sra_p (type, &msg))
1930 {
1931 reject (var, msg);
1932 return false;
1933 }
1934 if (/* Fix for PR 41089. tree-stdarg.c needs to have va_lists intact but
1935 we also want to schedule it rather late. Thus we ignore it in
1936 the early pass. */
1937 (sra_mode == SRA_MODE_EARLY_INTRA
1938 && is_va_list_type (type)))
1939 {
1940 reject (var, "is va_list");
1941 return false;
1942 }
1943
1944 bitmap_set_bit (candidate_bitmap, DECL_UID (var));
1945 slot = candidates->find_slot_with_hash (var, DECL_UID (var), INSERT);
1946 *slot = var;
1947
1948 if (dump_file && (dump_flags & TDF_DETAILS))
1949 {
1950 fprintf (dump_file, "Candidate (%d): ", DECL_UID (var));
1951 print_generic_expr (dump_file, var, 0);
1952 fprintf (dump_file, "\n");
1953 }
1954
1955 return true;
1956 }
1957
1958 /* The very first phase of intraprocedural SRA. It marks in candidate_bitmap
1959 those with type which is suitable for scalarization. */
1960
1961 static bool
1962 find_var_candidates (void)
1963 {
1964 tree var, parm;
1965 unsigned int i;
1966 bool ret = false;
1967
1968 for (parm = DECL_ARGUMENTS (current_function_decl);
1969 parm;
1970 parm = DECL_CHAIN (parm))
1971 ret |= maybe_add_sra_candidate (parm);
1972
1973 FOR_EACH_LOCAL_DECL (cfun, i, var)
1974 {
1975 if (!VAR_P (var))
1976 continue;
1977
1978 ret |= maybe_add_sra_candidate (var);
1979 }
1980
1981 return ret;
1982 }
1983
1984 /* Sort all accesses for the given variable, check for partial overlaps and
1985 return NULL if there are any. If there are none, pick a representative for
1986 each combination of offset and size and create a linked list out of them.
1987 Return the pointer to the first representative and make sure it is the first
1988 one in the vector of accesses. */
1989
1990 static struct access *
1991 sort_and_splice_var_accesses (tree var)
1992 {
1993 int i, j, access_count;
1994 struct access *res, **prev_acc_ptr = &res;
1995 vec<access_p> *access_vec;
1996 bool first = true;
1997 HOST_WIDE_INT low = -1, high = 0;
1998
1999 access_vec = get_base_access_vector (var);
2000 if (!access_vec)
2001 return NULL;
2002 access_count = access_vec->length ();
2003
2004 /* Sort by <OFFSET, SIZE>. */
2005 access_vec->qsort (compare_access_positions);
2006
2007 i = 0;
2008 while (i < access_count)
2009 {
2010 struct access *access = (*access_vec)[i];
2011 bool grp_write = access->write;
2012 bool grp_read = !access->write;
2013 bool grp_scalar_write = access->write
2014 && is_gimple_reg_type (access->type);
2015 bool grp_scalar_read = !access->write
2016 && is_gimple_reg_type (access->type);
2017 bool grp_assignment_read = access->grp_assignment_read;
2018 bool grp_assignment_write = access->grp_assignment_write;
2019 bool multiple_scalar_reads = false;
2020 bool total_scalarization = access->grp_total_scalarization;
2021 bool grp_partial_lhs = access->grp_partial_lhs;
2022 bool first_scalar = is_gimple_reg_type (access->type);
2023 bool unscalarizable_region = access->grp_unscalarizable_region;
2024
2025 if (first || access->offset >= high)
2026 {
2027 first = false;
2028 low = access->offset;
2029 high = access->offset + access->size;
2030 }
2031 else if (access->offset > low && access->offset + access->size > high)
2032 return NULL;
2033 else
2034 gcc_assert (access->offset >= low
2035 && access->offset + access->size <= high);
2036
2037 j = i + 1;
2038 while (j < access_count)
2039 {
2040 struct access *ac2 = (*access_vec)[j];
2041 if (ac2->offset != access->offset || ac2->size != access->size)
2042 break;
2043 if (ac2->write)
2044 {
2045 grp_write = true;
2046 grp_scalar_write = (grp_scalar_write
2047 || is_gimple_reg_type (ac2->type));
2048 }
2049 else
2050 {
2051 grp_read = true;
2052 if (is_gimple_reg_type (ac2->type))
2053 {
2054 if (grp_scalar_read)
2055 multiple_scalar_reads = true;
2056 else
2057 grp_scalar_read = true;
2058 }
2059 }
2060 grp_assignment_read |= ac2->grp_assignment_read;
2061 grp_assignment_write |= ac2->grp_assignment_write;
2062 grp_partial_lhs |= ac2->grp_partial_lhs;
2063 unscalarizable_region |= ac2->grp_unscalarizable_region;
2064 total_scalarization |= ac2->grp_total_scalarization;
2065 relink_to_new_repr (access, ac2);
2066
2067 /* If there are both aggregate-type and scalar-type accesses with
2068 this combination of size and offset, the comparison function
2069 should have put the scalars first. */
2070 gcc_assert (first_scalar || !is_gimple_reg_type (ac2->type));
2071 ac2->group_representative = access;
2072 j++;
2073 }
2074
2075 i = j;
2076
2077 access->group_representative = access;
2078 access->grp_write = grp_write;
2079 access->grp_read = grp_read;
2080 access->grp_scalar_read = grp_scalar_read;
2081 access->grp_scalar_write = grp_scalar_write;
2082 access->grp_assignment_read = grp_assignment_read;
2083 access->grp_assignment_write = grp_assignment_write;
2084 access->grp_hint = total_scalarization
2085 || (multiple_scalar_reads && !constant_decl_p (var));
2086 access->grp_total_scalarization = total_scalarization;
2087 access->grp_partial_lhs = grp_partial_lhs;
2088 access->grp_unscalarizable_region = unscalarizable_region;
2089 if (access->first_link)
2090 add_access_to_work_queue (access);
2091
2092 *prev_acc_ptr = access;
2093 prev_acc_ptr = &access->next_grp;
2094 }
2095
2096 gcc_assert (res == (*access_vec)[0]);
2097 return res;
2098 }
2099
2100 /* Create a variable for the given ACCESS which determines the type, name and a
2101 few other properties. Return the variable declaration and store it also to
2102 ACCESS->replacement. */
2103
2104 static tree
2105 create_access_replacement (struct access *access)
2106 {
2107 tree repl;
2108
2109 if (access->grp_to_be_debug_replaced)
2110 {
2111 repl = create_tmp_var_raw (access->type);
2112 DECL_CONTEXT (repl) = current_function_decl;
2113 }
2114 else
2115 /* Drop any special alignment on the type if it's not on the main
2116 variant. This avoids issues with weirdo ABIs like AAPCS. */
2117 repl = create_tmp_var (build_qualified_type
2118 (TYPE_MAIN_VARIANT (access->type),
2119 TYPE_QUALS (access->type)), "SR");
2120 if (TREE_CODE (access->type) == COMPLEX_TYPE
2121 || TREE_CODE (access->type) == VECTOR_TYPE)
2122 {
2123 if (!access->grp_partial_lhs)
2124 DECL_GIMPLE_REG_P (repl) = 1;
2125 }
2126 else if (access->grp_partial_lhs
2127 && is_gimple_reg_type (access->type))
2128 TREE_ADDRESSABLE (repl) = 1;
2129
2130 DECL_SOURCE_LOCATION (repl) = DECL_SOURCE_LOCATION (access->base);
2131 DECL_ARTIFICIAL (repl) = 1;
2132 DECL_IGNORED_P (repl) = DECL_IGNORED_P (access->base);
2133
2134 if (DECL_NAME (access->base)
2135 && !DECL_IGNORED_P (access->base)
2136 && !DECL_ARTIFICIAL (access->base))
2137 {
2138 char *pretty_name = make_fancy_name (access->expr);
2139 tree debug_expr = unshare_expr_without_location (access->expr), d;
2140 bool fail = false;
2141
2142 DECL_NAME (repl) = get_identifier (pretty_name);
2143 DECL_NAMELESS (repl) = 1;
2144 obstack_free (&name_obstack, pretty_name);
2145
2146 /* Get rid of any SSA_NAMEs embedded in debug_expr,
2147 as DECL_DEBUG_EXPR isn't considered when looking for still
2148 used SSA_NAMEs and thus they could be freed. All debug info
2149 generation cares is whether something is constant or variable
2150 and that get_ref_base_and_extent works properly on the
2151 expression. It cannot handle accesses at a non-constant offset
2152 though, so just give up in those cases. */
2153 for (d = debug_expr;
2154 !fail && (handled_component_p (d) || TREE_CODE (d) == MEM_REF);
2155 d = TREE_OPERAND (d, 0))
2156 switch (TREE_CODE (d))
2157 {
2158 case ARRAY_REF:
2159 case ARRAY_RANGE_REF:
2160 if (TREE_OPERAND (d, 1)
2161 && TREE_CODE (TREE_OPERAND (d, 1)) != INTEGER_CST)
2162 fail = true;
2163 if (TREE_OPERAND (d, 3)
2164 && TREE_CODE (TREE_OPERAND (d, 3)) != INTEGER_CST)
2165 fail = true;
2166 /* FALLTHRU */
2167 case COMPONENT_REF:
2168 if (TREE_OPERAND (d, 2)
2169 && TREE_CODE (TREE_OPERAND (d, 2)) != INTEGER_CST)
2170 fail = true;
2171 break;
2172 case MEM_REF:
2173 if (TREE_CODE (TREE_OPERAND (d, 0)) != ADDR_EXPR)
2174 fail = true;
2175 else
2176 d = TREE_OPERAND (d, 0);
2177 break;
2178 default:
2179 break;
2180 }
2181 if (!fail)
2182 {
2183 SET_DECL_DEBUG_EXPR (repl, debug_expr);
2184 DECL_HAS_DEBUG_EXPR_P (repl) = 1;
2185 }
2186 if (access->grp_no_warning)
2187 TREE_NO_WARNING (repl) = 1;
2188 else
2189 TREE_NO_WARNING (repl) = TREE_NO_WARNING (access->base);
2190 }
2191 else
2192 TREE_NO_WARNING (repl) = 1;
2193
2194 if (dump_file)
2195 {
2196 if (access->grp_to_be_debug_replaced)
2197 {
2198 fprintf (dump_file, "Created a debug-only replacement for ");
2199 print_generic_expr (dump_file, access->base, 0);
2200 fprintf (dump_file, " offset: %u, size: %u\n",
2201 (unsigned) access->offset, (unsigned) access->size);
2202 }
2203 else
2204 {
2205 fprintf (dump_file, "Created a replacement for ");
2206 print_generic_expr (dump_file, access->base, 0);
2207 fprintf (dump_file, " offset: %u, size: %u: ",
2208 (unsigned) access->offset, (unsigned) access->size);
2209 print_generic_expr (dump_file, repl, 0);
2210 fprintf (dump_file, "\n");
2211 }
2212 }
2213 sra_stats.replacements++;
2214
2215 return repl;
2216 }
2217
2218 /* Return ACCESS scalar replacement, which must exist. */
2219
2220 static inline tree
2221 get_access_replacement (struct access *access)
2222 {
2223 gcc_checking_assert (access->replacement_decl);
2224 return access->replacement_decl;
2225 }
2226
2227
2228 /* Build a subtree of accesses rooted in *ACCESS, and move the pointer in the
2229 linked list along the way. Stop when *ACCESS is NULL or the access pointed
2230 to it is not "within" the root. Return false iff some accesses partially
2231 overlap. */
2232
2233 static bool
2234 build_access_subtree (struct access **access)
2235 {
2236 struct access *root = *access, *last_child = NULL;
2237 HOST_WIDE_INT limit = root->offset + root->size;
2238
2239 *access = (*access)->next_grp;
2240 while (*access && (*access)->offset + (*access)->size <= limit)
2241 {
2242 if (!last_child)
2243 root->first_child = *access;
2244 else
2245 last_child->next_sibling = *access;
2246 last_child = *access;
2247
2248 if (!build_access_subtree (access))
2249 return false;
2250 }
2251
2252 if (*access && (*access)->offset < limit)
2253 return false;
2254
2255 return true;
2256 }
2257
2258 /* Build a tree of access representatives, ACCESS is the pointer to the first
2259 one, others are linked in a list by the next_grp field. Return false iff
2260 some accesses partially overlap. */
2261
2262 static bool
2263 build_access_trees (struct access *access)
2264 {
2265 while (access)
2266 {
2267 struct access *root = access;
2268
2269 if (!build_access_subtree (&access))
2270 return false;
2271 root->next_grp = access;
2272 }
2273 return true;
2274 }
2275
2276 /* Return true if expr contains some ARRAY_REFs into a variable bounded
2277 array. */
2278
2279 static bool
2280 expr_with_var_bounded_array_refs_p (tree expr)
2281 {
2282 while (handled_component_p (expr))
2283 {
2284 if (TREE_CODE (expr) == ARRAY_REF
2285 && !tree_fits_shwi_p (array_ref_low_bound (expr)))
2286 return true;
2287 expr = TREE_OPERAND (expr, 0);
2288 }
2289 return false;
2290 }
2291
2292 /* Analyze the subtree of accesses rooted in ROOT, scheduling replacements when
2293 both seeming beneficial and when ALLOW_REPLACEMENTS allows it. Also set all
2294 sorts of access flags appropriately along the way, notably always set
2295 grp_read and grp_assign_read according to MARK_READ and grp_write when
2296 MARK_WRITE is true.
2297
2298 Creating a replacement for a scalar access is considered beneficial if its
2299 grp_hint is set (this means we are either attempting total scalarization or
2300 there is more than one direct read access) or according to the following
2301 table:
2302
2303 Access written to through a scalar type (once or more times)
2304 |
2305 | Written to in an assignment statement
2306 | |
2307 | | Access read as scalar _once_
2308 | | |
2309 | | | Read in an assignment statement
2310 | | | |
2311 | | | | Scalarize Comment
2312 -----------------------------------------------------------------------------
2313 0 0 0 0 No access for the scalar
2314 0 0 0 1 No access for the scalar
2315 0 0 1 0 No Single read - won't help
2316 0 0 1 1 No The same case
2317 0 1 0 0 No access for the scalar
2318 0 1 0 1 No access for the scalar
2319 0 1 1 0 Yes s = *g; return s.i;
2320 0 1 1 1 Yes The same case as above
2321 1 0 0 0 No Won't help
2322 1 0 0 1 Yes s.i = 1; *g = s;
2323 1 0 1 0 Yes s.i = 5; g = s.i;
2324 1 0 1 1 Yes The same case as above
2325 1 1 0 0 No Won't help.
2326 1 1 0 1 Yes s.i = 1; *g = s;
2327 1 1 1 0 Yes s = *g; return s.i;
2328 1 1 1 1 Yes Any of the above yeses */
2329
2330 static bool
2331 analyze_access_subtree (struct access *root, struct access *parent,
2332 bool allow_replacements)
2333 {
2334 struct access *child;
2335 HOST_WIDE_INT limit = root->offset + root->size;
2336 HOST_WIDE_INT covered_to = root->offset;
2337 bool scalar = is_gimple_reg_type (root->type);
2338 bool hole = false, sth_created = false;
2339
2340 if (parent)
2341 {
2342 if (parent->grp_read)
2343 root->grp_read = 1;
2344 if (parent->grp_assignment_read)
2345 root->grp_assignment_read = 1;
2346 if (parent->grp_write)
2347 root->grp_write = 1;
2348 if (parent->grp_assignment_write)
2349 root->grp_assignment_write = 1;
2350 if (parent->grp_total_scalarization)
2351 root->grp_total_scalarization = 1;
2352 }
2353
2354 if (root->grp_unscalarizable_region)
2355 allow_replacements = false;
2356
2357 if (allow_replacements && expr_with_var_bounded_array_refs_p (root->expr))
2358 allow_replacements = false;
2359
2360 for (child = root->first_child; child; child = child->next_sibling)
2361 {
2362 hole |= covered_to < child->offset;
2363 sth_created |= analyze_access_subtree (child, root,
2364 allow_replacements && !scalar);
2365
2366 root->grp_unscalarized_data |= child->grp_unscalarized_data;
2367 root->grp_total_scalarization &= child->grp_total_scalarization;
2368 if (child->grp_covered)
2369 covered_to += child->size;
2370 else
2371 hole = true;
2372 }
2373
2374 if (allow_replacements && scalar && !root->first_child
2375 && (root->grp_hint
2376 || ((root->grp_scalar_read || root->grp_assignment_read)
2377 && (root->grp_scalar_write || root->grp_assignment_write))))
2378 {
2379 /* Always create access replacements that cover the whole access.
2380 For integral types this means the precision has to match.
2381 Avoid assumptions based on the integral type kind, too. */
2382 if (INTEGRAL_TYPE_P (root->type)
2383 && (TREE_CODE (root->type) != INTEGER_TYPE
2384 || TYPE_PRECISION (root->type) != root->size)
2385 /* But leave bitfield accesses alone. */
2386 && (TREE_CODE (root->expr) != COMPONENT_REF
2387 || !DECL_BIT_FIELD (TREE_OPERAND (root->expr, 1))))
2388 {
2389 tree rt = root->type;
2390 gcc_assert ((root->offset % BITS_PER_UNIT) == 0
2391 && (root->size % BITS_PER_UNIT) == 0);
2392 root->type = build_nonstandard_integer_type (root->size,
2393 TYPE_UNSIGNED (rt));
2394 root->expr = build_ref_for_offset (UNKNOWN_LOCATION, root->base,
2395 root->offset, root->reverse,
2396 root->type, NULL, false);
2397
2398 if (dump_file && (dump_flags & TDF_DETAILS))
2399 {
2400 fprintf (dump_file, "Changing the type of a replacement for ");
2401 print_generic_expr (dump_file, root->base, 0);
2402 fprintf (dump_file, " offset: %u, size: %u ",
2403 (unsigned) root->offset, (unsigned) root->size);
2404 fprintf (dump_file, " to an integer.\n");
2405 }
2406 }
2407
2408 root->grp_to_be_replaced = 1;
2409 root->replacement_decl = create_access_replacement (root);
2410 sth_created = true;
2411 hole = false;
2412 }
2413 else
2414 {
2415 if (allow_replacements
2416 && scalar && !root->first_child
2417 && (root->grp_scalar_write || root->grp_assignment_write)
2418 && !bitmap_bit_p (cannot_scalarize_away_bitmap,
2419 DECL_UID (root->base)))
2420 {
2421 gcc_checking_assert (!root->grp_scalar_read
2422 && !root->grp_assignment_read);
2423 sth_created = true;
2424 if (MAY_HAVE_DEBUG_STMTS)
2425 {
2426 root->grp_to_be_debug_replaced = 1;
2427 root->replacement_decl = create_access_replacement (root);
2428 }
2429 }
2430
2431 if (covered_to < limit)
2432 hole = true;
2433 if (scalar || !allow_replacements)
2434 root->grp_total_scalarization = 0;
2435 }
2436
2437 if (!hole || root->grp_total_scalarization)
2438 root->grp_covered = 1;
2439 else if (root->grp_write || TREE_CODE (root->base) == PARM_DECL
2440 || constant_decl_p (root->base))
2441 root->grp_unscalarized_data = 1; /* not covered and written to */
2442 return sth_created;
2443 }
2444
2445 /* Analyze all access trees linked by next_grp by the means of
2446 analyze_access_subtree. */
2447 static bool
2448 analyze_access_trees (struct access *access)
2449 {
2450 bool ret = false;
2451
2452 while (access)
2453 {
2454 if (analyze_access_subtree (access, NULL, true))
2455 ret = true;
2456 access = access->next_grp;
2457 }
2458
2459 return ret;
2460 }
2461
2462 /* Return true iff a potential new child of LACC at offset OFFSET and with size
2463 SIZE would conflict with an already existing one. If exactly such a child
2464 already exists in LACC, store a pointer to it in EXACT_MATCH. */
2465
2466 static bool
2467 child_would_conflict_in_lacc (struct access *lacc, HOST_WIDE_INT norm_offset,
2468 HOST_WIDE_INT size, struct access **exact_match)
2469 {
2470 struct access *child;
2471
2472 for (child = lacc->first_child; child; child = child->next_sibling)
2473 {
2474 if (child->offset == norm_offset && child->size == size)
2475 {
2476 *exact_match = child;
2477 return true;
2478 }
2479
2480 if (child->offset < norm_offset + size
2481 && child->offset + child->size > norm_offset)
2482 return true;
2483 }
2484
2485 return false;
2486 }
2487
2488 /* Create a new child access of PARENT, with all properties just like MODEL
2489 except for its offset and with its grp_write false and grp_read true.
2490 Return the new access or NULL if it cannot be created. Note that this access
2491 is created long after all splicing and sorting, it's not located in any
2492 access vector and is automatically a representative of its group. */
2493
2494 static struct access *
2495 create_artificial_child_access (struct access *parent, struct access *model,
2496 HOST_WIDE_INT new_offset)
2497 {
2498 struct access **child;
2499 tree expr = parent->base;
2500
2501 gcc_assert (!model->grp_unscalarizable_region);
2502
2503 struct access *access = access_pool.allocate ();
2504 memset (access, 0, sizeof (struct access));
2505 if (!build_user_friendly_ref_for_offset (&expr, TREE_TYPE (expr), new_offset,
2506 model->type))
2507 {
2508 access->grp_no_warning = true;
2509 expr = build_ref_for_model (EXPR_LOCATION (parent->base), parent->base,
2510 new_offset, model, NULL, false);
2511 }
2512
2513 access->base = parent->base;
2514 access->expr = expr;
2515 access->offset = new_offset;
2516 access->size = model->size;
2517 access->type = model->type;
2518 access->grp_write = true;
2519 access->grp_read = false;
2520 access->reverse = model->reverse;
2521
2522 child = &parent->first_child;
2523 while (*child && (*child)->offset < new_offset)
2524 child = &(*child)->next_sibling;
2525
2526 access->next_sibling = *child;
2527 *child = access;
2528
2529 return access;
2530 }
2531
2532
2533 /* Propagate all subaccesses of RACC across an assignment link to LACC. Return
2534 true if any new subaccess was created. Additionally, if RACC is a scalar
2535 access but LACC is not, change the type of the latter, if possible. */
2536
2537 static bool
2538 propagate_subaccesses_across_link (struct access *lacc, struct access *racc)
2539 {
2540 struct access *rchild;
2541 HOST_WIDE_INT norm_delta = lacc->offset - racc->offset;
2542 bool ret = false;
2543
2544 if (is_gimple_reg_type (lacc->type)
2545 || lacc->grp_unscalarizable_region
2546 || racc->grp_unscalarizable_region)
2547 return false;
2548
2549 if (is_gimple_reg_type (racc->type))
2550 {
2551 if (!lacc->first_child && !racc->first_child)
2552 {
2553 tree t = lacc->base;
2554
2555 lacc->type = racc->type;
2556 if (build_user_friendly_ref_for_offset (&t, TREE_TYPE (t),
2557 lacc->offset, racc->type))
2558 lacc->expr = t;
2559 else
2560 {
2561 lacc->expr = build_ref_for_model (EXPR_LOCATION (lacc->base),
2562 lacc->base, lacc->offset,
2563 racc, NULL, false);
2564 lacc->grp_no_warning = true;
2565 }
2566 }
2567 return false;
2568 }
2569
2570 for (rchild = racc->first_child; rchild; rchild = rchild->next_sibling)
2571 {
2572 struct access *new_acc = NULL;
2573 HOST_WIDE_INT norm_offset = rchild->offset + norm_delta;
2574
2575 if (rchild->grp_unscalarizable_region)
2576 continue;
2577
2578 if (child_would_conflict_in_lacc (lacc, norm_offset, rchild->size,
2579 &new_acc))
2580 {
2581 if (new_acc)
2582 {
2583 rchild->grp_hint = 1;
2584 new_acc->grp_hint |= new_acc->grp_read;
2585 if (rchild->first_child)
2586 ret |= propagate_subaccesses_across_link (new_acc, rchild);
2587 }
2588 continue;
2589 }
2590
2591 rchild->grp_hint = 1;
2592 new_acc = create_artificial_child_access (lacc, rchild, norm_offset);
2593 if (new_acc)
2594 {
2595 ret = true;
2596 if (racc->first_child)
2597 propagate_subaccesses_across_link (new_acc, rchild);
2598 }
2599 }
2600
2601 return ret;
2602 }
2603
2604 /* Propagate all subaccesses across assignment links. */
2605
2606 static void
2607 propagate_all_subaccesses (void)
2608 {
2609 while (work_queue_head)
2610 {
2611 struct access *racc = pop_access_from_work_queue ();
2612 struct assign_link *link;
2613
2614 gcc_assert (racc->first_link);
2615
2616 for (link = racc->first_link; link; link = link->next)
2617 {
2618 struct access *lacc = link->lacc;
2619
2620 if (!bitmap_bit_p (candidate_bitmap, DECL_UID (lacc->base)))
2621 continue;
2622 lacc = lacc->group_representative;
2623 if (propagate_subaccesses_across_link (lacc, racc)
2624 && lacc->first_link)
2625 add_access_to_work_queue (lacc);
2626 }
2627 }
2628 }
2629
2630 /* Go through all accesses collected throughout the (intraprocedural) analysis
2631 stage, exclude overlapping ones, identify representatives and build trees
2632 out of them, making decisions about scalarization on the way. Return true
2633 iff there are any to-be-scalarized variables after this stage. */
2634
2635 static bool
2636 analyze_all_variable_accesses (void)
2637 {
2638 int res = 0;
2639 bitmap tmp = BITMAP_ALLOC (NULL);
2640 bitmap_iterator bi;
2641 unsigned i;
2642 bool optimize_speed_p = !optimize_function_for_size_p (cfun);
2643
2644 enum compiler_param param = optimize_speed_p
2645 ? PARAM_SRA_MAX_SCALARIZATION_SIZE_SPEED
2646 : PARAM_SRA_MAX_SCALARIZATION_SIZE_SIZE;
2647
2648 /* If the user didn't set PARAM_SRA_MAX_SCALARIZATION_SIZE_<...>,
2649 fall back to a target default. */
2650 unsigned HOST_WIDE_INT max_scalarization_size
2651 = global_options_set.x_param_values[param]
2652 ? PARAM_VALUE (param)
2653 : get_move_ratio (optimize_speed_p) * UNITS_PER_WORD;
2654
2655 max_scalarization_size *= BITS_PER_UNIT;
2656
2657 EXECUTE_IF_SET_IN_BITMAP (candidate_bitmap, 0, i, bi)
2658 if (bitmap_bit_p (should_scalarize_away_bitmap, i)
2659 && !bitmap_bit_p (cannot_scalarize_away_bitmap, i))
2660 {
2661 tree var = candidate (i);
2662
2663 if (VAR_P (var) && scalarizable_type_p (TREE_TYPE (var)))
2664 {
2665 if (tree_to_uhwi (TYPE_SIZE (TREE_TYPE (var)))
2666 <= max_scalarization_size)
2667 {
2668 create_total_scalarization_access (var);
2669 completely_scalarize (var, TREE_TYPE (var), 0, var);
2670 if (dump_file && (dump_flags & TDF_DETAILS))
2671 {
2672 fprintf (dump_file, "Will attempt to totally scalarize ");
2673 print_generic_expr (dump_file, var, 0);
2674 fprintf (dump_file, " (UID: %u): \n", DECL_UID (var));
2675 }
2676 }
2677 else if (dump_file && (dump_flags & TDF_DETAILS))
2678 {
2679 fprintf (dump_file, "Too big to totally scalarize: ");
2680 print_generic_expr (dump_file, var, 0);
2681 fprintf (dump_file, " (UID: %u)\n", DECL_UID (var));
2682 }
2683 }
2684 }
2685
2686 bitmap_copy (tmp, candidate_bitmap);
2687 EXECUTE_IF_SET_IN_BITMAP (tmp, 0, i, bi)
2688 {
2689 tree var = candidate (i);
2690 struct access *access;
2691
2692 access = sort_and_splice_var_accesses (var);
2693 if (!access || !build_access_trees (access))
2694 disqualify_candidate (var,
2695 "No or inhibitingly overlapping accesses.");
2696 }
2697
2698 propagate_all_subaccesses ();
2699
2700 bitmap_copy (tmp, candidate_bitmap);
2701 EXECUTE_IF_SET_IN_BITMAP (tmp, 0, i, bi)
2702 {
2703 tree var = candidate (i);
2704 struct access *access = get_first_repr_for_decl (var);
2705
2706 if (analyze_access_trees (access))
2707 {
2708 res++;
2709 if (dump_file && (dump_flags & TDF_DETAILS))
2710 {
2711 fprintf (dump_file, "\nAccess trees for ");
2712 print_generic_expr (dump_file, var, 0);
2713 fprintf (dump_file, " (UID: %u): \n", DECL_UID (var));
2714 dump_access_tree (dump_file, access);
2715 fprintf (dump_file, "\n");
2716 }
2717 }
2718 else
2719 disqualify_candidate (var, "No scalar replacements to be created.");
2720 }
2721
2722 BITMAP_FREE (tmp);
2723
2724 if (res)
2725 {
2726 statistics_counter_event (cfun, "Scalarized aggregates", res);
2727 return true;
2728 }
2729 else
2730 return false;
2731 }
2732
2733 /* Generate statements copying scalar replacements of accesses within a subtree
2734 into or out of AGG. ACCESS, all its children, siblings and their children
2735 are to be processed. AGG is an aggregate type expression (can be a
2736 declaration but does not have to be, it can for example also be a mem_ref or
2737 a series of handled components). TOP_OFFSET is the offset of the processed
2738 subtree which has to be subtracted from offsets of individual accesses to
2739 get corresponding offsets for AGG. If CHUNK_SIZE is non-null, copy only
2740 replacements in the interval <start_offset, start_offset + chunk_size>,
2741 otherwise copy all. GSI is a statement iterator used to place the new
2742 statements. WRITE should be true when the statements should write from AGG
2743 to the replacement and false if vice versa. if INSERT_AFTER is true, new
2744 statements will be added after the current statement in GSI, they will be
2745 added before the statement otherwise. */
2746
2747 static void
2748 generate_subtree_copies (struct access *access, tree agg,
2749 HOST_WIDE_INT top_offset,
2750 HOST_WIDE_INT start_offset, HOST_WIDE_INT chunk_size,
2751 gimple_stmt_iterator *gsi, bool write,
2752 bool insert_after, location_t loc)
2753 {
2754 /* Never write anything into constant pool decls. See PR70602. */
2755 if (!write && constant_decl_p (agg))
2756 return;
2757 do
2758 {
2759 if (chunk_size && access->offset >= start_offset + chunk_size)
2760 return;
2761
2762 if (access->grp_to_be_replaced
2763 && (chunk_size == 0
2764 || access->offset + access->size > start_offset))
2765 {
2766 tree expr, repl = get_access_replacement (access);
2767 gassign *stmt;
2768
2769 expr = build_ref_for_model (loc, agg, access->offset - top_offset,
2770 access, gsi, insert_after);
2771
2772 if (write)
2773 {
2774 if (access->grp_partial_lhs)
2775 expr = force_gimple_operand_gsi (gsi, expr, true, NULL_TREE,
2776 !insert_after,
2777 insert_after ? GSI_NEW_STMT
2778 : GSI_SAME_STMT);
2779 stmt = gimple_build_assign (repl, expr);
2780 }
2781 else
2782 {
2783 TREE_NO_WARNING (repl) = 1;
2784 if (access->grp_partial_lhs)
2785 repl = force_gimple_operand_gsi (gsi, repl, true, NULL_TREE,
2786 !insert_after,
2787 insert_after ? GSI_NEW_STMT
2788 : GSI_SAME_STMT);
2789 stmt = gimple_build_assign (expr, repl);
2790 }
2791 gimple_set_location (stmt, loc);
2792
2793 if (insert_after)
2794 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
2795 else
2796 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2797 update_stmt (stmt);
2798 sra_stats.subtree_copies++;
2799 }
2800 else if (write
2801 && access->grp_to_be_debug_replaced
2802 && (chunk_size == 0
2803 || access->offset + access->size > start_offset))
2804 {
2805 gdebug *ds;
2806 tree drhs = build_debug_ref_for_model (loc, agg,
2807 access->offset - top_offset,
2808 access);
2809 ds = gimple_build_debug_bind (get_access_replacement (access),
2810 drhs, gsi_stmt (*gsi));
2811 if (insert_after)
2812 gsi_insert_after (gsi, ds, GSI_NEW_STMT);
2813 else
2814 gsi_insert_before (gsi, ds, GSI_SAME_STMT);
2815 }
2816
2817 if (access->first_child)
2818 generate_subtree_copies (access->first_child, agg, top_offset,
2819 start_offset, chunk_size, gsi,
2820 write, insert_after, loc);
2821
2822 access = access->next_sibling;
2823 }
2824 while (access);
2825 }
2826
2827 /* Assign zero to all scalar replacements in an access subtree. ACCESS is the
2828 root of the subtree to be processed. GSI is the statement iterator used
2829 for inserting statements which are added after the current statement if
2830 INSERT_AFTER is true or before it otherwise. */
2831
2832 static void
2833 init_subtree_with_zero (struct access *access, gimple_stmt_iterator *gsi,
2834 bool insert_after, location_t loc)
2835
2836 {
2837 struct access *child;
2838
2839 if (access->grp_to_be_replaced)
2840 {
2841 gassign *stmt;
2842
2843 stmt = gimple_build_assign (get_access_replacement (access),
2844 build_zero_cst (access->type));
2845 if (insert_after)
2846 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
2847 else
2848 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2849 update_stmt (stmt);
2850 gimple_set_location (stmt, loc);
2851 }
2852 else if (access->grp_to_be_debug_replaced)
2853 {
2854 gdebug *ds
2855 = gimple_build_debug_bind (get_access_replacement (access),
2856 build_zero_cst (access->type),
2857 gsi_stmt (*gsi));
2858 if (insert_after)
2859 gsi_insert_after (gsi, ds, GSI_NEW_STMT);
2860 else
2861 gsi_insert_before (gsi, ds, GSI_SAME_STMT);
2862 }
2863
2864 for (child = access->first_child; child; child = child->next_sibling)
2865 init_subtree_with_zero (child, gsi, insert_after, loc);
2866 }
2867
2868 /* Clobber all scalar replacements in an access subtree. ACCESS is the
2869 root of the subtree to be processed. GSI is the statement iterator used
2870 for inserting statements which are added after the current statement if
2871 INSERT_AFTER is true or before it otherwise. */
2872
2873 static void
2874 clobber_subtree (struct access *access, gimple_stmt_iterator *gsi,
2875 bool insert_after, location_t loc)
2876
2877 {
2878 struct access *child;
2879
2880 if (access->grp_to_be_replaced)
2881 {
2882 tree rep = get_access_replacement (access);
2883 tree clobber = build_constructor (access->type, NULL);
2884 TREE_THIS_VOLATILE (clobber) = 1;
2885 gimple *stmt = gimple_build_assign (rep, clobber);
2886
2887 if (insert_after)
2888 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
2889 else
2890 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2891 update_stmt (stmt);
2892 gimple_set_location (stmt, loc);
2893 }
2894
2895 for (child = access->first_child; child; child = child->next_sibling)
2896 clobber_subtree (child, gsi, insert_after, loc);
2897 }
2898
2899 /* Search for an access representative for the given expression EXPR and
2900 return it or NULL if it cannot be found. */
2901
2902 static struct access *
2903 get_access_for_expr (tree expr)
2904 {
2905 HOST_WIDE_INT offset, size, max_size;
2906 tree base;
2907 bool reverse;
2908
2909 /* FIXME: This should not be necessary but Ada produces V_C_Es with a type of
2910 a different size than the size of its argument and we need the latter
2911 one. */
2912 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
2913 expr = TREE_OPERAND (expr, 0);
2914
2915 base = get_ref_base_and_extent (expr, &offset, &size, &max_size, &reverse);
2916 if (max_size == -1 || !DECL_P (base))
2917 return NULL;
2918
2919 if (!bitmap_bit_p (candidate_bitmap, DECL_UID (base)))
2920 return NULL;
2921
2922 return get_var_base_offset_size_access (base, offset, max_size);
2923 }
2924
2925 /* Replace the expression EXPR with a scalar replacement if there is one and
2926 generate other statements to do type conversion or subtree copying if
2927 necessary. GSI is used to place newly created statements, WRITE is true if
2928 the expression is being written to (it is on a LHS of a statement or output
2929 in an assembly statement). */
2930
2931 static bool
2932 sra_modify_expr (tree *expr, gimple_stmt_iterator *gsi, bool write)
2933 {
2934 location_t loc;
2935 struct access *access;
2936 tree type, bfr, orig_expr;
2937
2938 if (TREE_CODE (*expr) == BIT_FIELD_REF)
2939 {
2940 bfr = *expr;
2941 expr = &TREE_OPERAND (*expr, 0);
2942 }
2943 else
2944 bfr = NULL_TREE;
2945
2946 if (TREE_CODE (*expr) == REALPART_EXPR || TREE_CODE (*expr) == IMAGPART_EXPR)
2947 expr = &TREE_OPERAND (*expr, 0);
2948 access = get_access_for_expr (*expr);
2949 if (!access)
2950 return false;
2951 type = TREE_TYPE (*expr);
2952 orig_expr = *expr;
2953
2954 loc = gimple_location (gsi_stmt (*gsi));
2955 gimple_stmt_iterator alt_gsi = gsi_none ();
2956 if (write && stmt_ends_bb_p (gsi_stmt (*gsi)))
2957 {
2958 alt_gsi = gsi_start_edge (single_non_eh_succ (gsi_bb (*gsi)));
2959 gsi = &alt_gsi;
2960 }
2961
2962 if (access->grp_to_be_replaced)
2963 {
2964 tree repl = get_access_replacement (access);
2965 /* If we replace a non-register typed access simply use the original
2966 access expression to extract the scalar component afterwards.
2967 This happens if scalarizing a function return value or parameter
2968 like in gcc.c-torture/execute/20041124-1.c, 20050316-1.c and
2969 gcc.c-torture/compile/20011217-1.c.
2970
2971 We also want to use this when accessing a complex or vector which can
2972 be accessed as a different type too, potentially creating a need for
2973 type conversion (see PR42196) and when scalarized unions are involved
2974 in assembler statements (see PR42398). */
2975 if (!useless_type_conversion_p (type, access->type))
2976 {
2977 tree ref;
2978
2979 ref = build_ref_for_model (loc, orig_expr, 0, access, gsi, false);
2980
2981 if (write)
2982 {
2983 gassign *stmt;
2984
2985 if (access->grp_partial_lhs)
2986 ref = force_gimple_operand_gsi (gsi, ref, true, NULL_TREE,
2987 false, GSI_NEW_STMT);
2988 stmt = gimple_build_assign (repl, ref);
2989 gimple_set_location (stmt, loc);
2990 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
2991 }
2992 else
2993 {
2994 gassign *stmt;
2995
2996 if (access->grp_partial_lhs)
2997 repl = force_gimple_operand_gsi (gsi, repl, true, NULL_TREE,
2998 true, GSI_SAME_STMT);
2999 stmt = gimple_build_assign (ref, repl);
3000 gimple_set_location (stmt, loc);
3001 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
3002 }
3003 }
3004 else
3005 *expr = repl;
3006 sra_stats.exprs++;
3007 }
3008 else if (write && access->grp_to_be_debug_replaced)
3009 {
3010 gdebug *ds = gimple_build_debug_bind (get_access_replacement (access),
3011 NULL_TREE,
3012 gsi_stmt (*gsi));
3013 gsi_insert_after (gsi, ds, GSI_NEW_STMT);
3014 }
3015
3016 if (access->first_child)
3017 {
3018 HOST_WIDE_INT start_offset, chunk_size;
3019 if (bfr
3020 && tree_fits_uhwi_p (TREE_OPERAND (bfr, 1))
3021 && tree_fits_uhwi_p (TREE_OPERAND (bfr, 2)))
3022 {
3023 chunk_size = tree_to_uhwi (TREE_OPERAND (bfr, 1));
3024 start_offset = access->offset
3025 + tree_to_uhwi (TREE_OPERAND (bfr, 2));
3026 }
3027 else
3028 start_offset = chunk_size = 0;
3029
3030 generate_subtree_copies (access->first_child, orig_expr, access->offset,
3031 start_offset, chunk_size, gsi, write, write,
3032 loc);
3033 }
3034 return true;
3035 }
3036
3037 /* Where scalar replacements of the RHS have been written to when a replacement
3038 of a LHS of an assigments cannot be direclty loaded from a replacement of
3039 the RHS. */
3040 enum unscalarized_data_handling { SRA_UDH_NONE, /* Nothing done so far. */
3041 SRA_UDH_RIGHT, /* Data flushed to the RHS. */
3042 SRA_UDH_LEFT }; /* Data flushed to the LHS. */
3043
3044 struct subreplacement_assignment_data
3045 {
3046 /* Offset of the access representing the lhs of the assignment. */
3047 HOST_WIDE_INT left_offset;
3048
3049 /* LHS and RHS of the original assignment. */
3050 tree assignment_lhs, assignment_rhs;
3051
3052 /* Access representing the rhs of the whole assignment. */
3053 struct access *top_racc;
3054
3055 /* Stmt iterator used for statement insertions after the original assignment.
3056 It points to the main GSI used to traverse a BB during function body
3057 modification. */
3058 gimple_stmt_iterator *new_gsi;
3059
3060 /* Stmt iterator used for statement insertions before the original
3061 assignment. Keeps on pointing to the original statement. */
3062 gimple_stmt_iterator old_gsi;
3063
3064 /* Location of the assignment. */
3065 location_t loc;
3066
3067 /* Keeps the information whether we have needed to refresh replacements of
3068 the LHS and from which side of the assignments this takes place. */
3069 enum unscalarized_data_handling refreshed;
3070 };
3071
3072 /* Store all replacements in the access tree rooted in TOP_RACC either to their
3073 base aggregate if there are unscalarized data or directly to LHS of the
3074 statement that is pointed to by GSI otherwise. */
3075
3076 static void
3077 handle_unscalarized_data_in_subtree (struct subreplacement_assignment_data *sad)
3078 {
3079 tree src;
3080 if (sad->top_racc->grp_unscalarized_data)
3081 {
3082 src = sad->assignment_rhs;
3083 sad->refreshed = SRA_UDH_RIGHT;
3084 }
3085 else
3086 {
3087 src = sad->assignment_lhs;
3088 sad->refreshed = SRA_UDH_LEFT;
3089 }
3090 generate_subtree_copies (sad->top_racc->first_child, src,
3091 sad->top_racc->offset, 0, 0,
3092 &sad->old_gsi, false, false, sad->loc);
3093 }
3094
3095 /* Try to generate statements to load all sub-replacements in an access subtree
3096 formed by children of LACC from scalar replacements in the SAD->top_racc
3097 subtree. If that is not possible, refresh the SAD->top_racc base aggregate
3098 and load the accesses from it. */
3099
3100 static void
3101 load_assign_lhs_subreplacements (struct access *lacc,
3102 struct subreplacement_assignment_data *sad)
3103 {
3104 for (lacc = lacc->first_child; lacc; lacc = lacc->next_sibling)
3105 {
3106 HOST_WIDE_INT offset;
3107 offset = lacc->offset - sad->left_offset + sad->top_racc->offset;
3108
3109 if (lacc->grp_to_be_replaced)
3110 {
3111 struct access *racc;
3112 gassign *stmt;
3113 tree rhs;
3114
3115 racc = find_access_in_subtree (sad->top_racc, offset, lacc->size);
3116 if (racc && racc->grp_to_be_replaced)
3117 {
3118 rhs = get_access_replacement (racc);
3119 if (!useless_type_conversion_p (lacc->type, racc->type))
3120 rhs = fold_build1_loc (sad->loc, VIEW_CONVERT_EXPR,
3121 lacc->type, rhs);
3122
3123 if (racc->grp_partial_lhs && lacc->grp_partial_lhs)
3124 rhs = force_gimple_operand_gsi (&sad->old_gsi, rhs, true,
3125 NULL_TREE, true, GSI_SAME_STMT);
3126 }
3127 else
3128 {
3129 /* No suitable access on the right hand side, need to load from
3130 the aggregate. See if we have to update it first... */
3131 if (sad->refreshed == SRA_UDH_NONE)
3132 handle_unscalarized_data_in_subtree (sad);
3133
3134 if (sad->refreshed == SRA_UDH_LEFT)
3135 rhs = build_ref_for_model (sad->loc, sad->assignment_lhs,
3136 lacc->offset - sad->left_offset,
3137 lacc, sad->new_gsi, true);
3138 else
3139 rhs = build_ref_for_model (sad->loc, sad->assignment_rhs,
3140 lacc->offset - sad->left_offset,
3141 lacc, sad->new_gsi, true);
3142 if (lacc->grp_partial_lhs)
3143 rhs = force_gimple_operand_gsi (sad->new_gsi,
3144 rhs, true, NULL_TREE,
3145 false, GSI_NEW_STMT);
3146 }
3147
3148 stmt = gimple_build_assign (get_access_replacement (lacc), rhs);
3149 gsi_insert_after (sad->new_gsi, stmt, GSI_NEW_STMT);
3150 gimple_set_location (stmt, sad->loc);
3151 update_stmt (stmt);
3152 sra_stats.subreplacements++;
3153 }
3154 else
3155 {
3156 if (sad->refreshed == SRA_UDH_NONE
3157 && lacc->grp_read && !lacc->grp_covered)
3158 handle_unscalarized_data_in_subtree (sad);
3159
3160 if (lacc && lacc->grp_to_be_debug_replaced)
3161 {
3162 gdebug *ds;
3163 tree drhs;
3164 struct access *racc = find_access_in_subtree (sad->top_racc,
3165 offset,
3166 lacc->size);
3167
3168 if (racc && racc->grp_to_be_replaced)
3169 {
3170 if (racc->grp_write || constant_decl_p (racc->base))
3171 drhs = get_access_replacement (racc);
3172 else
3173 drhs = NULL;
3174 }
3175 else if (sad->refreshed == SRA_UDH_LEFT)
3176 drhs = build_debug_ref_for_model (sad->loc, lacc->base,
3177 lacc->offset, lacc);
3178 else if (sad->refreshed == SRA_UDH_RIGHT)
3179 drhs = build_debug_ref_for_model (sad->loc, sad->top_racc->base,
3180 offset, lacc);
3181 else
3182 drhs = NULL_TREE;
3183 if (drhs
3184 && !useless_type_conversion_p (lacc->type, TREE_TYPE (drhs)))
3185 drhs = fold_build1_loc (sad->loc, VIEW_CONVERT_EXPR,
3186 lacc->type, drhs);
3187 ds = gimple_build_debug_bind (get_access_replacement (lacc),
3188 drhs, gsi_stmt (sad->old_gsi));
3189 gsi_insert_after (sad->new_gsi, ds, GSI_NEW_STMT);
3190 }
3191 }
3192
3193 if (lacc->first_child)
3194 load_assign_lhs_subreplacements (lacc, sad);
3195 }
3196 }
3197
3198 /* Result code for SRA assignment modification. */
3199 enum assignment_mod_result { SRA_AM_NONE, /* nothing done for the stmt */
3200 SRA_AM_MODIFIED, /* stmt changed but not
3201 removed */
3202 SRA_AM_REMOVED }; /* stmt eliminated */
3203
3204 /* Modify assignments with a CONSTRUCTOR on their RHS. STMT contains a pointer
3205 to the assignment and GSI is the statement iterator pointing at it. Returns
3206 the same values as sra_modify_assign. */
3207
3208 static enum assignment_mod_result
3209 sra_modify_constructor_assign (gimple *stmt, gimple_stmt_iterator *gsi)
3210 {
3211 tree lhs = gimple_assign_lhs (stmt);
3212 struct access *acc = get_access_for_expr (lhs);
3213 if (!acc)
3214 return SRA_AM_NONE;
3215 location_t loc = gimple_location (stmt);
3216
3217 if (gimple_clobber_p (stmt))
3218 {
3219 /* Clobber the replacement variable. */
3220 clobber_subtree (acc, gsi, !acc->grp_covered, loc);
3221 /* Remove clobbers of fully scalarized variables, they are dead. */
3222 if (acc->grp_covered)
3223 {
3224 unlink_stmt_vdef (stmt);
3225 gsi_remove (gsi, true);
3226 release_defs (stmt);
3227 return SRA_AM_REMOVED;
3228 }
3229 else
3230 return SRA_AM_MODIFIED;
3231 }
3232
3233 if (CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt)) > 0)
3234 {
3235 /* I have never seen this code path trigger but if it can happen the
3236 following should handle it gracefully. */
3237 if (access_has_children_p (acc))
3238 generate_subtree_copies (acc->first_child, lhs, acc->offset, 0, 0, gsi,
3239 true, true, loc);
3240 return SRA_AM_MODIFIED;
3241 }
3242
3243 if (acc->grp_covered)
3244 {
3245 init_subtree_with_zero (acc, gsi, false, loc);
3246 unlink_stmt_vdef (stmt);
3247 gsi_remove (gsi, true);
3248 release_defs (stmt);
3249 return SRA_AM_REMOVED;
3250 }
3251 else
3252 {
3253 init_subtree_with_zero (acc, gsi, true, loc);
3254 return SRA_AM_MODIFIED;
3255 }
3256 }
3257
3258 /* Create and return a new suitable default definition SSA_NAME for RACC which
3259 is an access describing an uninitialized part of an aggregate that is being
3260 loaded. */
3261
3262 static tree
3263 get_repl_default_def_ssa_name (struct access *racc)
3264 {
3265 gcc_checking_assert (!racc->grp_to_be_replaced
3266 && !racc->grp_to_be_debug_replaced);
3267 if (!racc->replacement_decl)
3268 racc->replacement_decl = create_access_replacement (racc);
3269 return get_or_create_ssa_default_def (cfun, racc->replacement_decl);
3270 }
3271
3272 /* Return true if REF has an VIEW_CONVERT_EXPR or a COMPONENT_REF with a
3273 bit-field field declaration somewhere in it. */
3274
3275 static inline bool
3276 contains_vce_or_bfcref_p (const_tree ref)
3277 {
3278 while (handled_component_p (ref))
3279 {
3280 if (TREE_CODE (ref) == VIEW_CONVERT_EXPR
3281 || (TREE_CODE (ref) == COMPONENT_REF
3282 && DECL_BIT_FIELD (TREE_OPERAND (ref, 1))))
3283 return true;
3284 ref = TREE_OPERAND (ref, 0);
3285 }
3286
3287 return false;
3288 }
3289
3290 /* Examine both sides of the assignment statement pointed to by STMT, replace
3291 them with a scalare replacement if there is one and generate copying of
3292 replacements if scalarized aggregates have been used in the assignment. GSI
3293 is used to hold generated statements for type conversions and subtree
3294 copying. */
3295
3296 static enum assignment_mod_result
3297 sra_modify_assign (gimple *stmt, gimple_stmt_iterator *gsi)
3298 {
3299 struct access *lacc, *racc;
3300 tree lhs, rhs;
3301 bool modify_this_stmt = false;
3302 bool force_gimple_rhs = false;
3303 location_t loc;
3304 gimple_stmt_iterator orig_gsi = *gsi;
3305
3306 if (!gimple_assign_single_p (stmt))
3307 return SRA_AM_NONE;
3308 lhs = gimple_assign_lhs (stmt);
3309 rhs = gimple_assign_rhs1 (stmt);
3310
3311 if (TREE_CODE (rhs) == CONSTRUCTOR)
3312 return sra_modify_constructor_assign (stmt, gsi);
3313
3314 if (TREE_CODE (rhs) == REALPART_EXPR || TREE_CODE (lhs) == REALPART_EXPR
3315 || TREE_CODE (rhs) == IMAGPART_EXPR || TREE_CODE (lhs) == IMAGPART_EXPR
3316 || TREE_CODE (rhs) == BIT_FIELD_REF || TREE_CODE (lhs) == BIT_FIELD_REF)
3317 {
3318 modify_this_stmt = sra_modify_expr (gimple_assign_rhs1_ptr (stmt),
3319 gsi, false);
3320 modify_this_stmt |= sra_modify_expr (gimple_assign_lhs_ptr (stmt),
3321 gsi, true);
3322 return modify_this_stmt ? SRA_AM_MODIFIED : SRA_AM_NONE;
3323 }
3324
3325 lacc = get_access_for_expr (lhs);
3326 racc = get_access_for_expr (rhs);
3327 if (!lacc && !racc)
3328 return SRA_AM_NONE;
3329 /* Avoid modifying initializations of constant-pool replacements. */
3330 if (racc && (racc->replacement_decl == lhs))
3331 return SRA_AM_NONE;
3332
3333 loc = gimple_location (stmt);
3334 if (lacc && lacc->grp_to_be_replaced)
3335 {
3336 lhs = get_access_replacement (lacc);
3337 gimple_assign_set_lhs (stmt, lhs);
3338 modify_this_stmt = true;
3339 if (lacc->grp_partial_lhs)
3340 force_gimple_rhs = true;
3341 sra_stats.exprs++;
3342 }
3343
3344 if (racc && racc->grp_to_be_replaced)
3345 {
3346 rhs = get_access_replacement (racc);
3347 modify_this_stmt = true;
3348 if (racc->grp_partial_lhs)
3349 force_gimple_rhs = true;
3350 sra_stats.exprs++;
3351 }
3352 else if (racc
3353 && !racc->grp_unscalarized_data
3354 && !racc->grp_unscalarizable_region
3355 && TREE_CODE (lhs) == SSA_NAME
3356 && !access_has_replacements_p (racc))
3357 {
3358 rhs = get_repl_default_def_ssa_name (racc);
3359 modify_this_stmt = true;
3360 sra_stats.exprs++;
3361 }
3362
3363 if (modify_this_stmt)
3364 {
3365 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs)))
3366 {
3367 /* If we can avoid creating a VIEW_CONVERT_EXPR do so.
3368 ??? This should move to fold_stmt which we simply should
3369 call after building a VIEW_CONVERT_EXPR here. */
3370 if (AGGREGATE_TYPE_P (TREE_TYPE (lhs))
3371 && !contains_bitfld_component_ref_p (lhs))
3372 {
3373 lhs = build_ref_for_model (loc, lhs, 0, racc, gsi, false);
3374 gimple_assign_set_lhs (stmt, lhs);
3375 }
3376 else if (AGGREGATE_TYPE_P (TREE_TYPE (rhs))
3377 && !contains_vce_or_bfcref_p (rhs))
3378 rhs = build_ref_for_model (loc, rhs, 0, lacc, gsi, false);
3379
3380 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs)))
3381 {
3382 rhs = fold_build1_loc (loc, VIEW_CONVERT_EXPR, TREE_TYPE (lhs),
3383 rhs);
3384 if (is_gimple_reg_type (TREE_TYPE (lhs))
3385 && TREE_CODE (lhs) != SSA_NAME)
3386 force_gimple_rhs = true;
3387 }
3388 }
3389 }
3390
3391 if (lacc && lacc->grp_to_be_debug_replaced)
3392 {
3393 tree dlhs = get_access_replacement (lacc);
3394 tree drhs = unshare_expr (rhs);
3395 if (!useless_type_conversion_p (TREE_TYPE (dlhs), TREE_TYPE (drhs)))
3396 {
3397 if (AGGREGATE_TYPE_P (TREE_TYPE (drhs))
3398 && !contains_vce_or_bfcref_p (drhs))
3399 drhs = build_debug_ref_for_model (loc, drhs, 0, lacc);
3400 if (drhs
3401 && !useless_type_conversion_p (TREE_TYPE (dlhs),
3402 TREE_TYPE (drhs)))
3403 drhs = fold_build1_loc (loc, VIEW_CONVERT_EXPR,
3404 TREE_TYPE (dlhs), drhs);
3405 }
3406 gdebug *ds = gimple_build_debug_bind (dlhs, drhs, stmt);
3407 gsi_insert_before (gsi, ds, GSI_SAME_STMT);
3408 }
3409
3410 /* From this point on, the function deals with assignments in between
3411 aggregates when at least one has scalar reductions of some of its
3412 components. There are three possible scenarios: Both the LHS and RHS have
3413 to-be-scalarized components, 2) only the RHS has or 3) only the LHS has.
3414
3415 In the first case, we would like to load the LHS components from RHS
3416 components whenever possible. If that is not possible, we would like to
3417 read it directly from the RHS (after updating it by storing in it its own
3418 components). If there are some necessary unscalarized data in the LHS,
3419 those will be loaded by the original assignment too. If neither of these
3420 cases happen, the original statement can be removed. Most of this is done
3421 by load_assign_lhs_subreplacements.
3422
3423 In the second case, we would like to store all RHS scalarized components
3424 directly into LHS and if they cover the aggregate completely, remove the
3425 statement too. In the third case, we want the LHS components to be loaded
3426 directly from the RHS (DSE will remove the original statement if it
3427 becomes redundant).
3428
3429 This is a bit complex but manageable when types match and when unions do
3430 not cause confusion in a way that we cannot really load a component of LHS
3431 from the RHS or vice versa (the access representing this level can have
3432 subaccesses that are accessible only through a different union field at a
3433 higher level - different from the one used in the examined expression).
3434 Unions are fun.
3435
3436 Therefore, I specially handle a fourth case, happening when there is a
3437 specific type cast or it is impossible to locate a scalarized subaccess on
3438 the other side of the expression. If that happens, I simply "refresh" the
3439 RHS by storing in it is scalarized components leave the original statement
3440 there to do the copying and then load the scalar replacements of the LHS.
3441 This is what the first branch does. */
3442
3443 if (modify_this_stmt
3444 || gimple_has_volatile_ops (stmt)
3445 || contains_vce_or_bfcref_p (rhs)
3446 || contains_vce_or_bfcref_p (lhs)
3447 || stmt_ends_bb_p (stmt))
3448 {
3449 /* No need to copy into a constant-pool, it comes pre-initialized. */
3450 if (access_has_children_p (racc) && !constant_decl_p (racc->base))
3451 generate_subtree_copies (racc->first_child, rhs, racc->offset, 0, 0,
3452 gsi, false, false, loc);
3453 if (access_has_children_p (lacc))
3454 {
3455 gimple_stmt_iterator alt_gsi = gsi_none ();
3456 if (stmt_ends_bb_p (stmt))
3457 {
3458 alt_gsi = gsi_start_edge (single_non_eh_succ (gsi_bb (*gsi)));
3459 gsi = &alt_gsi;
3460 }
3461 generate_subtree_copies (lacc->first_child, lhs, lacc->offset, 0, 0,
3462 gsi, true, true, loc);
3463 }
3464 sra_stats.separate_lhs_rhs_handling++;
3465
3466 /* This gimplification must be done after generate_subtree_copies,
3467 lest we insert the subtree copies in the middle of the gimplified
3468 sequence. */
3469 if (force_gimple_rhs)
3470 rhs = force_gimple_operand_gsi (&orig_gsi, rhs, true, NULL_TREE,
3471 true, GSI_SAME_STMT);
3472 if (gimple_assign_rhs1 (stmt) != rhs)
3473 {
3474 modify_this_stmt = true;
3475 gimple_assign_set_rhs_from_tree (&orig_gsi, rhs);
3476 gcc_assert (stmt == gsi_stmt (orig_gsi));
3477 }
3478
3479 return modify_this_stmt ? SRA_AM_MODIFIED : SRA_AM_NONE;
3480 }
3481 else
3482 {
3483 if (access_has_children_p (lacc)
3484 && access_has_children_p (racc)
3485 /* When an access represents an unscalarizable region, it usually
3486 represents accesses with variable offset and thus must not be used
3487 to generate new memory accesses. */
3488 && !lacc->grp_unscalarizable_region
3489 && !racc->grp_unscalarizable_region)
3490 {
3491 struct subreplacement_assignment_data sad;
3492
3493 sad.left_offset = lacc->offset;
3494 sad.assignment_lhs = lhs;
3495 sad.assignment_rhs = rhs;
3496 sad.top_racc = racc;
3497 sad.old_gsi = *gsi;
3498 sad.new_gsi = gsi;
3499 sad.loc = gimple_location (stmt);
3500 sad.refreshed = SRA_UDH_NONE;
3501
3502 if (lacc->grp_read && !lacc->grp_covered)
3503 handle_unscalarized_data_in_subtree (&sad);
3504
3505 load_assign_lhs_subreplacements (lacc, &sad);
3506 if (sad.refreshed != SRA_UDH_RIGHT)
3507 {
3508 gsi_next (gsi);
3509 unlink_stmt_vdef (stmt);
3510 gsi_remove (&sad.old_gsi, true);
3511 release_defs (stmt);
3512 sra_stats.deleted++;
3513 return SRA_AM_REMOVED;
3514 }
3515 }
3516 else
3517 {
3518 if (access_has_children_p (racc)
3519 && !racc->grp_unscalarized_data
3520 && TREE_CODE (lhs) != SSA_NAME)
3521 {
3522 if (dump_file)
3523 {
3524 fprintf (dump_file, "Removing load: ");
3525 print_gimple_stmt (dump_file, stmt, 0, 0);
3526 }
3527 generate_subtree_copies (racc->first_child, lhs,
3528 racc->offset, 0, 0, gsi,
3529 false, false, loc);
3530 gcc_assert (stmt == gsi_stmt (*gsi));
3531 unlink_stmt_vdef (stmt);
3532 gsi_remove (gsi, true);
3533 release_defs (stmt);
3534 sra_stats.deleted++;
3535 return SRA_AM_REMOVED;
3536 }
3537 /* Restore the aggregate RHS from its components so the
3538 prevailing aggregate copy does the right thing. */
3539 if (access_has_children_p (racc))
3540 generate_subtree_copies (racc->first_child, rhs, racc->offset, 0, 0,
3541 gsi, false, false, loc);
3542 /* Re-load the components of the aggregate copy destination.
3543 But use the RHS aggregate to load from to expose more
3544 optimization opportunities. */
3545 if (access_has_children_p (lacc))
3546 generate_subtree_copies (lacc->first_child, rhs, lacc->offset,
3547 0, 0, gsi, true, true, loc);
3548 }
3549
3550 return SRA_AM_NONE;
3551 }
3552 }
3553
3554 /* Set any scalar replacements of values in the constant pool to the initial
3555 value of the constant. (Constant-pool decls like *.LC0 have effectively
3556 been initialized before the program starts, we must do the same for their
3557 replacements.) Thus, we output statements like 'SR.1 = *.LC0[0];' into
3558 the function's entry block. */
3559
3560 static void
3561 initialize_constant_pool_replacements (void)
3562 {
3563 gimple_seq seq = NULL;
3564 gimple_stmt_iterator gsi = gsi_start (seq);
3565 bitmap_iterator bi;
3566 unsigned i;
3567
3568 EXECUTE_IF_SET_IN_BITMAP (candidate_bitmap, 0, i, bi)
3569 {
3570 tree var = candidate (i);
3571 if (!constant_decl_p (var))
3572 continue;
3573 vec<access_p> *access_vec = get_base_access_vector (var);
3574 if (!access_vec)
3575 continue;
3576 for (unsigned i = 0; i < access_vec->length (); i++)
3577 {
3578 struct access *access = (*access_vec)[i];
3579 if (!access->replacement_decl)
3580 continue;
3581 gassign *stmt
3582 = gimple_build_assign (get_access_replacement (access),
3583 unshare_expr (access->expr));
3584 if (dump_file && (dump_flags & TDF_DETAILS))
3585 {
3586 fprintf (dump_file, "Generating constant initializer: ");
3587 print_gimple_stmt (dump_file, stmt, 0, 1);
3588 fprintf (dump_file, "\n");
3589 }
3590 gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
3591 update_stmt (stmt);
3592 }
3593 }
3594
3595 seq = gsi_seq (gsi);
3596 if (seq)
3597 gsi_insert_seq_on_edge_immediate (
3598 single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)), seq);
3599 }
3600
3601 /* Traverse the function body and all modifications as decided in
3602 analyze_all_variable_accesses. Return true iff the CFG has been
3603 changed. */
3604
3605 static bool
3606 sra_modify_function_body (void)
3607 {
3608 bool cfg_changed = false;
3609 basic_block bb;
3610
3611 initialize_constant_pool_replacements ();
3612
3613 FOR_EACH_BB_FN (bb, cfun)
3614 {
3615 gimple_stmt_iterator gsi = gsi_start_bb (bb);
3616 while (!gsi_end_p (gsi))
3617 {
3618 gimple *stmt = gsi_stmt (gsi);
3619 enum assignment_mod_result assign_result;
3620 bool modified = false, deleted = false;
3621 tree *t;
3622 unsigned i;
3623
3624 switch (gimple_code (stmt))
3625 {
3626 case GIMPLE_RETURN:
3627 t = gimple_return_retval_ptr (as_a <greturn *> (stmt));
3628 if (*t != NULL_TREE)
3629 modified |= sra_modify_expr (t, &gsi, false);
3630 break;
3631
3632 case GIMPLE_ASSIGN:
3633 assign_result = sra_modify_assign (stmt, &gsi);
3634 modified |= assign_result == SRA_AM_MODIFIED;
3635 deleted = assign_result == SRA_AM_REMOVED;
3636 break;
3637
3638 case GIMPLE_CALL:
3639 /* Operands must be processed before the lhs. */
3640 for (i = 0; i < gimple_call_num_args (stmt); i++)
3641 {
3642 t = gimple_call_arg_ptr (stmt, i);
3643 modified |= sra_modify_expr (t, &gsi, false);
3644 }
3645
3646 if (gimple_call_lhs (stmt))
3647 {
3648 t = gimple_call_lhs_ptr (stmt);
3649 modified |= sra_modify_expr (t, &gsi, true);
3650 }
3651 break;
3652
3653 case GIMPLE_ASM:
3654 {
3655 gasm *asm_stmt = as_a <gasm *> (stmt);
3656 for (i = 0; i < gimple_asm_ninputs (asm_stmt); i++)
3657 {
3658 t = &TREE_VALUE (gimple_asm_input_op (asm_stmt, i));
3659 modified |= sra_modify_expr (t, &gsi, false);
3660 }
3661 for (i = 0; i < gimple_asm_noutputs (asm_stmt); i++)
3662 {
3663 t = &TREE_VALUE (gimple_asm_output_op (asm_stmt, i));
3664 modified |= sra_modify_expr (t, &gsi, true);
3665 }
3666 }
3667 break;
3668
3669 default:
3670 break;
3671 }
3672
3673 if (modified)
3674 {
3675 update_stmt (stmt);
3676 if (maybe_clean_eh_stmt (stmt)
3677 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
3678 cfg_changed = true;
3679 }
3680 if (!deleted)
3681 gsi_next (&gsi);
3682 }
3683 }
3684
3685 gsi_commit_edge_inserts ();
3686 return cfg_changed;
3687 }
3688
3689 /* Generate statements initializing scalar replacements of parts of function
3690 parameters. */
3691
3692 static void
3693 initialize_parameter_reductions (void)
3694 {
3695 gimple_stmt_iterator gsi;
3696 gimple_seq seq = NULL;
3697 tree parm;
3698
3699 gsi = gsi_start (seq);
3700 for (parm = DECL_ARGUMENTS (current_function_decl);
3701 parm;
3702 parm = DECL_CHAIN (parm))
3703 {
3704 vec<access_p> *access_vec;
3705 struct access *access;
3706
3707 if (!bitmap_bit_p (candidate_bitmap, DECL_UID (parm)))
3708 continue;
3709 access_vec = get_base_access_vector (parm);
3710 if (!access_vec)
3711 continue;
3712
3713 for (access = (*access_vec)[0];
3714 access;
3715 access = access->next_grp)
3716 generate_subtree_copies (access, parm, 0, 0, 0, &gsi, true, true,
3717 EXPR_LOCATION (parm));
3718 }
3719
3720 seq = gsi_seq (gsi);
3721 if (seq)
3722 gsi_insert_seq_on_edge_immediate (single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)), seq);
3723 }
3724
3725 /* The "main" function of intraprocedural SRA passes. Runs the analysis and if
3726 it reveals there are components of some aggregates to be scalarized, it runs
3727 the required transformations. */
3728 static unsigned int
3729 perform_intra_sra (void)
3730 {
3731 int ret = 0;
3732 sra_initialize ();
3733
3734 if (!find_var_candidates ())
3735 goto out;
3736
3737 if (!scan_function ())
3738 goto out;
3739
3740 if (!analyze_all_variable_accesses ())
3741 goto out;
3742
3743 if (sra_modify_function_body ())
3744 ret = TODO_update_ssa | TODO_cleanup_cfg;
3745 else
3746 ret = TODO_update_ssa;
3747 initialize_parameter_reductions ();
3748
3749 statistics_counter_event (cfun, "Scalar replacements created",
3750 sra_stats.replacements);
3751 statistics_counter_event (cfun, "Modified expressions", sra_stats.exprs);
3752 statistics_counter_event (cfun, "Subtree copy stmts",
3753 sra_stats.subtree_copies);
3754 statistics_counter_event (cfun, "Subreplacement stmts",
3755 sra_stats.subreplacements);
3756 statistics_counter_event (cfun, "Deleted stmts", sra_stats.deleted);
3757 statistics_counter_event (cfun, "Separate LHS and RHS handling",
3758 sra_stats.separate_lhs_rhs_handling);
3759
3760 out:
3761 sra_deinitialize ();
3762 return ret;
3763 }
3764
3765 /* Perform early intraprocedural SRA. */
3766 static unsigned int
3767 early_intra_sra (void)
3768 {
3769 sra_mode = SRA_MODE_EARLY_INTRA;
3770 return perform_intra_sra ();
3771 }
3772
3773 /* Perform "late" intraprocedural SRA. */
3774 static unsigned int
3775 late_intra_sra (void)
3776 {
3777 sra_mode = SRA_MODE_INTRA;
3778 return perform_intra_sra ();
3779 }
3780
3781
3782 static bool
3783 gate_intra_sra (void)
3784 {
3785 return flag_tree_sra != 0 && dbg_cnt (tree_sra);
3786 }
3787
3788
3789 namespace {
3790
3791 const pass_data pass_data_sra_early =
3792 {
3793 GIMPLE_PASS, /* type */
3794 "esra", /* name */
3795 OPTGROUP_NONE, /* optinfo_flags */
3796 TV_TREE_SRA, /* tv_id */
3797 ( PROP_cfg | PROP_ssa ), /* properties_required */
3798 0, /* properties_provided */
3799 0, /* properties_destroyed */
3800 0, /* todo_flags_start */
3801 TODO_update_ssa, /* todo_flags_finish */
3802 };
3803
3804 class pass_sra_early : public gimple_opt_pass
3805 {
3806 public:
3807 pass_sra_early (gcc::context *ctxt)
3808 : gimple_opt_pass (pass_data_sra_early, ctxt)
3809 {}
3810
3811 /* opt_pass methods: */
3812 virtual bool gate (function *) { return gate_intra_sra (); }
3813 virtual unsigned int execute (function *) { return early_intra_sra (); }
3814
3815 }; // class pass_sra_early
3816
3817 } // anon namespace
3818
3819 gimple_opt_pass *
3820 make_pass_sra_early (gcc::context *ctxt)
3821 {
3822 return new pass_sra_early (ctxt);
3823 }
3824
3825 namespace {
3826
3827 const pass_data pass_data_sra =
3828 {
3829 GIMPLE_PASS, /* type */
3830 "sra", /* name */
3831 OPTGROUP_NONE, /* optinfo_flags */
3832 TV_TREE_SRA, /* tv_id */
3833 ( PROP_cfg | PROP_ssa ), /* properties_required */
3834 0, /* properties_provided */
3835 0, /* properties_destroyed */
3836 TODO_update_address_taken, /* todo_flags_start */
3837 TODO_update_ssa, /* todo_flags_finish */
3838 };
3839
3840 class pass_sra : public gimple_opt_pass
3841 {
3842 public:
3843 pass_sra (gcc::context *ctxt)
3844 : gimple_opt_pass (pass_data_sra, ctxt)
3845 {}
3846
3847 /* opt_pass methods: */
3848 virtual bool gate (function *) { return gate_intra_sra (); }
3849 virtual unsigned int execute (function *) { return late_intra_sra (); }
3850
3851 }; // class pass_sra
3852
3853 } // anon namespace
3854
3855 gimple_opt_pass *
3856 make_pass_sra (gcc::context *ctxt)
3857 {
3858 return new pass_sra (ctxt);
3859 }
3860
3861
3862 /* Return true iff PARM (which must be a parm_decl) is an unused scalar
3863 parameter. */
3864
3865 static bool
3866 is_unused_scalar_param (tree parm)
3867 {
3868 tree name;
3869 return (is_gimple_reg (parm)
3870 && (!(name = ssa_default_def (cfun, parm))
3871 || has_zero_uses (name)));
3872 }
3873
3874 /* Scan immediate uses of a default definition SSA name of a parameter PARM and
3875 examine whether there are any direct or otherwise infeasible ones. If so,
3876 return true, otherwise return false. PARM must be a gimple register with a
3877 non-NULL default definition. */
3878
3879 static bool
3880 ptr_parm_has_direct_uses (tree parm)
3881 {
3882 imm_use_iterator ui;
3883 gimple *stmt;
3884 tree name = ssa_default_def (cfun, parm);
3885 bool ret = false;
3886
3887 FOR_EACH_IMM_USE_STMT (stmt, ui, name)
3888 {
3889 int uses_ok = 0;
3890 use_operand_p use_p;
3891
3892 if (is_gimple_debug (stmt))
3893 continue;
3894
3895 /* Valid uses include dereferences on the lhs and the rhs. */
3896 if (gimple_has_lhs (stmt))
3897 {
3898 tree lhs = gimple_get_lhs (stmt);
3899 while (handled_component_p (lhs))
3900 lhs = TREE_OPERAND (lhs, 0);
3901 if (TREE_CODE (lhs) == MEM_REF
3902 && TREE_OPERAND (lhs, 0) == name
3903 && integer_zerop (TREE_OPERAND (lhs, 1))
3904 && types_compatible_p (TREE_TYPE (lhs),
3905 TREE_TYPE (TREE_TYPE (name)))
3906 && !TREE_THIS_VOLATILE (lhs))
3907 uses_ok++;
3908 }
3909 if (gimple_assign_single_p (stmt))
3910 {
3911 tree rhs = gimple_assign_rhs1 (stmt);
3912 while (handled_component_p (rhs))
3913 rhs = TREE_OPERAND (rhs, 0);
3914 if (TREE_CODE (rhs) == MEM_REF
3915 && TREE_OPERAND (rhs, 0) == name
3916 && integer_zerop (TREE_OPERAND (rhs, 1))
3917 && types_compatible_p (TREE_TYPE (rhs),
3918 TREE_TYPE (TREE_TYPE (name)))
3919 && !TREE_THIS_VOLATILE (rhs))
3920 uses_ok++;
3921 }
3922 else if (is_gimple_call (stmt))
3923 {
3924 unsigned i;
3925 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3926 {
3927 tree arg = gimple_call_arg (stmt, i);
3928 while (handled_component_p (arg))
3929 arg = TREE_OPERAND (arg, 0);
3930 if (TREE_CODE (arg) == MEM_REF
3931 && TREE_OPERAND (arg, 0) == name
3932 && integer_zerop (TREE_OPERAND (arg, 1))
3933 && types_compatible_p (TREE_TYPE (arg),
3934 TREE_TYPE (TREE_TYPE (name)))
3935 && !TREE_THIS_VOLATILE (arg))
3936 uses_ok++;
3937 }
3938 }
3939
3940 /* If the number of valid uses does not match the number of
3941 uses in this stmt there is an unhandled use. */
3942 FOR_EACH_IMM_USE_ON_STMT (use_p, ui)
3943 --uses_ok;
3944
3945 if (uses_ok != 0)
3946 ret = true;
3947
3948 if (ret)
3949 BREAK_FROM_IMM_USE_STMT (ui);
3950 }
3951
3952 return ret;
3953 }
3954
3955 /* Identify candidates for reduction for IPA-SRA based on their type and mark
3956 them in candidate_bitmap. Note that these do not necessarily include
3957 parameter which are unused and thus can be removed. Return true iff any
3958 such candidate has been found. */
3959
3960 static bool
3961 find_param_candidates (void)
3962 {
3963 tree parm;
3964 int count = 0;
3965 bool ret = false;
3966 const char *msg;
3967
3968 for (parm = DECL_ARGUMENTS (current_function_decl);
3969 parm;
3970 parm = DECL_CHAIN (parm))
3971 {
3972 tree type = TREE_TYPE (parm);
3973 tree_node **slot;
3974
3975 count++;
3976
3977 if (TREE_THIS_VOLATILE (parm)
3978 || TREE_ADDRESSABLE (parm)
3979 || (!is_gimple_reg_type (type) && is_va_list_type (type)))
3980 continue;
3981
3982 if (is_unused_scalar_param (parm))
3983 {
3984 ret = true;
3985 continue;
3986 }
3987
3988 if (POINTER_TYPE_P (type))
3989 {
3990 type = TREE_TYPE (type);
3991
3992 if (TREE_CODE (type) == FUNCTION_TYPE
3993 || TYPE_VOLATILE (type)
3994 || (TREE_CODE (type) == ARRAY_TYPE
3995 && TYPE_NONALIASED_COMPONENT (type))
3996 || !is_gimple_reg (parm)
3997 || is_va_list_type (type)
3998 || ptr_parm_has_direct_uses (parm))
3999 continue;
4000 }
4001 else if (!AGGREGATE_TYPE_P (type))
4002 continue;
4003
4004 if (!COMPLETE_TYPE_P (type)
4005 || !tree_fits_uhwi_p (TYPE_SIZE (type))
4006 || tree_to_uhwi (TYPE_SIZE (type)) == 0
4007 || (AGGREGATE_TYPE_P (type)
4008 && type_internals_preclude_sra_p (type, &msg)))
4009 continue;
4010
4011 bitmap_set_bit (candidate_bitmap, DECL_UID (parm));
4012 slot = candidates->find_slot_with_hash (parm, DECL_UID (parm), INSERT);
4013 *slot = parm;
4014
4015 ret = true;
4016 if (dump_file && (dump_flags & TDF_DETAILS))
4017 {
4018 fprintf (dump_file, "Candidate (%d): ", DECL_UID (parm));
4019 print_generic_expr (dump_file, parm, 0);
4020 fprintf (dump_file, "\n");
4021 }
4022 }
4023
4024 func_param_count = count;
4025 return ret;
4026 }
4027
4028 /* Callback of walk_aliased_vdefs, marks the access passed as DATA as
4029 maybe_modified. */
4030
4031 static bool
4032 mark_maybe_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
4033 void *data)
4034 {
4035 struct access *repr = (struct access *) data;
4036
4037 repr->grp_maybe_modified = 1;
4038 return true;
4039 }
4040
4041 /* Analyze what representatives (in linked lists accessible from
4042 REPRESENTATIVES) can be modified by side effects of statements in the
4043 current function. */
4044
4045 static void
4046 analyze_modified_params (vec<access_p> representatives)
4047 {
4048 int i;
4049
4050 for (i = 0; i < func_param_count; i++)
4051 {
4052 struct access *repr;
4053
4054 for (repr = representatives[i];
4055 repr;
4056 repr = repr->next_grp)
4057 {
4058 struct access *access;
4059 bitmap visited;
4060 ao_ref ar;
4061
4062 if (no_accesses_p (repr))
4063 continue;
4064 if (!POINTER_TYPE_P (TREE_TYPE (repr->base))
4065 || repr->grp_maybe_modified)
4066 continue;
4067
4068 ao_ref_init (&ar, repr->expr);
4069 visited = BITMAP_ALLOC (NULL);
4070 for (access = repr; access; access = access->next_sibling)
4071 {
4072 /* All accesses are read ones, otherwise grp_maybe_modified would
4073 be trivially set. */
4074 walk_aliased_vdefs (&ar, gimple_vuse (access->stmt),
4075 mark_maybe_modified, repr, &visited);
4076 if (repr->grp_maybe_modified)
4077 break;
4078 }
4079 BITMAP_FREE (visited);
4080 }
4081 }
4082 }
4083
4084 /* Propagate distances in bb_dereferences in the opposite direction than the
4085 control flow edges, in each step storing the maximum of the current value
4086 and the minimum of all successors. These steps are repeated until the table
4087 stabilizes. Note that BBs which might terminate the functions (according to
4088 final_bbs bitmap) never updated in this way. */
4089
4090 static void
4091 propagate_dereference_distances (void)
4092 {
4093 basic_block bb;
4094
4095 auto_vec<basic_block> queue (last_basic_block_for_fn (cfun));
4096 queue.quick_push (ENTRY_BLOCK_PTR_FOR_FN (cfun));
4097 FOR_EACH_BB_FN (bb, cfun)
4098 {
4099 queue.quick_push (bb);
4100 bb->aux = bb;
4101 }
4102
4103 while (!queue.is_empty ())
4104 {
4105 edge_iterator ei;
4106 edge e;
4107 bool change = false;
4108 int i;
4109
4110 bb = queue.pop ();
4111 bb->aux = NULL;
4112
4113 if (bitmap_bit_p (final_bbs, bb->index))
4114 continue;
4115
4116 for (i = 0; i < func_param_count; i++)
4117 {
4118 int idx = bb->index * func_param_count + i;
4119 bool first = true;
4120 HOST_WIDE_INT inh = 0;
4121
4122 FOR_EACH_EDGE (e, ei, bb->succs)
4123 {
4124 int succ_idx = e->dest->index * func_param_count + i;
4125
4126 if (e->src == EXIT_BLOCK_PTR_FOR_FN (cfun))
4127 continue;
4128
4129 if (first)
4130 {
4131 first = false;
4132 inh = bb_dereferences [succ_idx];
4133 }
4134 else if (bb_dereferences [succ_idx] < inh)
4135 inh = bb_dereferences [succ_idx];
4136 }
4137
4138 if (!first && bb_dereferences[idx] < inh)
4139 {
4140 bb_dereferences[idx] = inh;
4141 change = true;
4142 }
4143 }
4144
4145 if (change && !bitmap_bit_p (final_bbs, bb->index))
4146 FOR_EACH_EDGE (e, ei, bb->preds)
4147 {
4148 if (e->src->aux)
4149 continue;
4150
4151 e->src->aux = e->src;
4152 queue.quick_push (e->src);
4153 }
4154 }
4155 }
4156
4157 /* Dump a dereferences TABLE with heading STR to file F. */
4158
4159 static void
4160 dump_dereferences_table (FILE *f, const char *str, HOST_WIDE_INT *table)
4161 {
4162 basic_block bb;
4163
4164 fprintf (dump_file, "%s", str);
4165 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun),
4166 EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
4167 {
4168 fprintf (f, "%4i %i ", bb->index, bitmap_bit_p (final_bbs, bb->index));
4169 if (bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
4170 {
4171 int i;
4172 for (i = 0; i < func_param_count; i++)
4173 {
4174 int idx = bb->index * func_param_count + i;
4175 fprintf (f, " %4" HOST_WIDE_INT_PRINT "d", table[idx]);
4176 }
4177 }
4178 fprintf (f, "\n");
4179 }
4180 fprintf (dump_file, "\n");
4181 }
4182
4183 /* Determine what (parts of) parameters passed by reference that are not
4184 assigned to are not certainly dereferenced in this function and thus the
4185 dereferencing cannot be safely moved to the caller without potentially
4186 introducing a segfault. Mark such REPRESENTATIVES as
4187 grp_not_necessarilly_dereferenced.
4188
4189 The dereferenced maximum "distance," i.e. the offset + size of the accessed
4190 part is calculated rather than simple booleans are calculated for each
4191 pointer parameter to handle cases when only a fraction of the whole
4192 aggregate is allocated (see testsuite/gcc.c-torture/execute/ipa-sra-2.c for
4193 an example).
4194
4195 The maximum dereference distances for each pointer parameter and BB are
4196 already stored in bb_dereference. This routine simply propagates these
4197 values upwards by propagate_dereference_distances and then compares the
4198 distances of individual parameters in the ENTRY BB to the equivalent
4199 distances of each representative of a (fraction of a) parameter. */
4200
4201 static void
4202 analyze_caller_dereference_legality (vec<access_p> representatives)
4203 {
4204 int i;
4205
4206 if (dump_file && (dump_flags & TDF_DETAILS))
4207 dump_dereferences_table (dump_file,
4208 "Dereference table before propagation:\n",
4209 bb_dereferences);
4210
4211 propagate_dereference_distances ();
4212
4213 if (dump_file && (dump_flags & TDF_DETAILS))
4214 dump_dereferences_table (dump_file,
4215 "Dereference table after propagation:\n",
4216 bb_dereferences);
4217
4218 for (i = 0; i < func_param_count; i++)
4219 {
4220 struct access *repr = representatives[i];
4221 int idx = ENTRY_BLOCK_PTR_FOR_FN (cfun)->index * func_param_count + i;
4222
4223 if (!repr || no_accesses_p (repr))
4224 continue;
4225
4226 do
4227 {
4228 if ((repr->offset + repr->size) > bb_dereferences[idx])
4229 repr->grp_not_necessarilly_dereferenced = 1;
4230 repr = repr->next_grp;
4231 }
4232 while (repr);
4233 }
4234 }
4235
4236 /* Return the representative access for the parameter declaration PARM if it is
4237 a scalar passed by reference which is not written to and the pointer value
4238 is not used directly. Thus, if it is legal to dereference it in the caller
4239 and we can rule out modifications through aliases, such parameter should be
4240 turned into one passed by value. Return NULL otherwise. */
4241
4242 static struct access *
4243 unmodified_by_ref_scalar_representative (tree parm)
4244 {
4245 int i, access_count;
4246 struct access *repr;
4247 vec<access_p> *access_vec;
4248
4249 access_vec = get_base_access_vector (parm);
4250 gcc_assert (access_vec);
4251 repr = (*access_vec)[0];
4252 if (repr->write)
4253 return NULL;
4254 repr->group_representative = repr;
4255
4256 access_count = access_vec->length ();
4257 for (i = 1; i < access_count; i++)
4258 {
4259 struct access *access = (*access_vec)[i];
4260 if (access->write)
4261 return NULL;
4262 access->group_representative = repr;
4263 access->next_sibling = repr->next_sibling;
4264 repr->next_sibling = access;
4265 }
4266
4267 repr->grp_read = 1;
4268 repr->grp_scalar_ptr = 1;
4269 return repr;
4270 }
4271
4272 /* Return true iff this ACCESS precludes IPA-SRA of the parameter it is
4273 associated with. REQ_ALIGN is the minimum required alignment. */
4274
4275 static bool
4276 access_precludes_ipa_sra_p (struct access *access, unsigned int req_align)
4277 {
4278 unsigned int exp_align;
4279 /* Avoid issues such as the second simple testcase in PR 42025. The problem
4280 is incompatible assign in a call statement (and possibly even in asm
4281 statements). This can be relaxed by using a new temporary but only for
4282 non-TREE_ADDRESSABLE types and is probably not worth the complexity. (In
4283 intraprocedural SRA we deal with this by keeping the old aggregate around,
4284 something we cannot do in IPA-SRA.) */
4285 if (access->write
4286 && (is_gimple_call (access->stmt)
4287 || gimple_code (access->stmt) == GIMPLE_ASM))
4288 return true;
4289
4290 exp_align = get_object_alignment (access->expr);
4291 if (exp_align < req_align)
4292 return true;
4293
4294 return false;
4295 }
4296
4297
4298 /* Sort collected accesses for parameter PARM, identify representatives for
4299 each accessed region and link them together. Return NULL if there are
4300 different but overlapping accesses, return the special ptr value meaning
4301 there are no accesses for this parameter if that is the case and return the
4302 first representative otherwise. Set *RO_GRP if there is a group of accesses
4303 with only read (i.e. no write) accesses. */
4304
4305 static struct access *
4306 splice_param_accesses (tree parm, bool *ro_grp)
4307 {
4308 int i, j, access_count, group_count;
4309 int agg_size, total_size = 0;
4310 struct access *access, *res, **prev_acc_ptr = &res;
4311 vec<access_p> *access_vec;
4312
4313 access_vec = get_base_access_vector (parm);
4314 if (!access_vec)
4315 return &no_accesses_representant;
4316 access_count = access_vec->length ();
4317
4318 access_vec->qsort (compare_access_positions);
4319
4320 i = 0;
4321 total_size = 0;
4322 group_count = 0;
4323 while (i < access_count)
4324 {
4325 bool modification;
4326 tree a1_alias_type;
4327 access = (*access_vec)[i];
4328 modification = access->write;
4329 if (access_precludes_ipa_sra_p (access, TYPE_ALIGN (access->type)))
4330 return NULL;
4331 a1_alias_type = reference_alias_ptr_type (access->expr);
4332
4333 /* Access is about to become group representative unless we find some
4334 nasty overlap which would preclude us from breaking this parameter
4335 apart. */
4336
4337 j = i + 1;
4338 while (j < access_count)
4339 {
4340 struct access *ac2 = (*access_vec)[j];
4341 if (ac2->offset != access->offset)
4342 {
4343 /* All or nothing law for parameters. */
4344 if (access->offset + access->size > ac2->offset)
4345 return NULL;
4346 else
4347 break;
4348 }
4349 else if (ac2->size != access->size)
4350 return NULL;
4351
4352 if (access_precludes_ipa_sra_p (ac2, TYPE_ALIGN (access->type))
4353 || (ac2->type != access->type
4354 && (TREE_ADDRESSABLE (ac2->type)
4355 || TREE_ADDRESSABLE (access->type)))
4356 || (reference_alias_ptr_type (ac2->expr) != a1_alias_type))
4357 return NULL;
4358
4359 modification |= ac2->write;
4360 ac2->group_representative = access;
4361 ac2->next_sibling = access->next_sibling;
4362 access->next_sibling = ac2;
4363 j++;
4364 }
4365
4366 group_count++;
4367 access->grp_maybe_modified = modification;
4368 if (!modification)
4369 *ro_grp = true;
4370 *prev_acc_ptr = access;
4371 prev_acc_ptr = &access->next_grp;
4372 total_size += access->size;
4373 i = j;
4374 }
4375
4376 if (POINTER_TYPE_P (TREE_TYPE (parm)))
4377 agg_size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (parm))));
4378 else
4379 agg_size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (parm)));
4380 if (total_size >= agg_size)
4381 return NULL;
4382
4383 gcc_assert (group_count > 0);
4384 return res;
4385 }
4386
4387 /* Decide whether parameters with representative accesses given by REPR should
4388 be reduced into components. */
4389
4390 static int
4391 decide_one_param_reduction (struct access *repr)
4392 {
4393 int total_size, cur_parm_size, agg_size, new_param_count, parm_size_limit;
4394 bool by_ref;
4395 tree parm;
4396
4397 parm = repr->base;
4398 cur_parm_size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (parm)));
4399 gcc_assert (cur_parm_size > 0);
4400
4401 if (POINTER_TYPE_P (TREE_TYPE (parm)))
4402 {
4403 by_ref = true;
4404 agg_size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (parm))));
4405 }
4406 else
4407 {
4408 by_ref = false;
4409 agg_size = cur_parm_size;
4410 }
4411
4412 if (dump_file)
4413 {
4414 struct access *acc;
4415 fprintf (dump_file, "Evaluating PARAM group sizes for ");
4416 print_generic_expr (dump_file, parm, 0);
4417 fprintf (dump_file, " (UID: %u): \n", DECL_UID (parm));
4418 for (acc = repr; acc; acc = acc->next_grp)
4419 dump_access (dump_file, acc, true);
4420 }
4421
4422 total_size = 0;
4423 new_param_count = 0;
4424
4425 for (; repr; repr = repr->next_grp)
4426 {
4427 gcc_assert (parm == repr->base);
4428
4429 /* Taking the address of a non-addressable field is verboten. */
4430 if (by_ref && repr->non_addressable)
4431 return 0;
4432
4433 /* Do not decompose a non-BLKmode param in a way that would
4434 create BLKmode params. Especially for by-reference passing
4435 (thus, pointer-type param) this is hardly worthwhile. */
4436 if (DECL_MODE (parm) != BLKmode
4437 && TYPE_MODE (repr->type) == BLKmode)
4438 return 0;
4439
4440 if (!by_ref || (!repr->grp_maybe_modified
4441 && !repr->grp_not_necessarilly_dereferenced))
4442 total_size += repr->size;
4443 else
4444 total_size += cur_parm_size;
4445
4446 new_param_count++;
4447 }
4448
4449 gcc_assert (new_param_count > 0);
4450
4451 if (optimize_function_for_size_p (cfun))
4452 parm_size_limit = cur_parm_size;
4453 else
4454 parm_size_limit = (PARAM_VALUE (PARAM_IPA_SRA_PTR_GROWTH_FACTOR)
4455 * cur_parm_size);
4456
4457 if (total_size < agg_size
4458 && total_size <= parm_size_limit)
4459 {
4460 if (dump_file)
4461 fprintf (dump_file, " ....will be split into %i components\n",
4462 new_param_count);
4463 return new_param_count;
4464 }
4465 else
4466 return 0;
4467 }
4468
4469 /* The order of the following enums is important, we need to do extra work for
4470 UNUSED_PARAMS, BY_VAL_ACCESSES and UNMODIF_BY_REF_ACCESSES. */
4471 enum ipa_splicing_result { NO_GOOD_ACCESS, UNUSED_PARAMS, BY_VAL_ACCESSES,
4472 MODIF_BY_REF_ACCESSES, UNMODIF_BY_REF_ACCESSES };
4473
4474 /* Identify representatives of all accesses to all candidate parameters for
4475 IPA-SRA. Return result based on what representatives have been found. */
4476
4477 static enum ipa_splicing_result
4478 splice_all_param_accesses (vec<access_p> &representatives)
4479 {
4480 enum ipa_splicing_result result = NO_GOOD_ACCESS;
4481 tree parm;
4482 struct access *repr;
4483
4484 representatives.create (func_param_count);
4485
4486 for (parm = DECL_ARGUMENTS (current_function_decl);
4487 parm;
4488 parm = DECL_CHAIN (parm))
4489 {
4490 if (is_unused_scalar_param (parm))
4491 {
4492 representatives.quick_push (&no_accesses_representant);
4493 if (result == NO_GOOD_ACCESS)
4494 result = UNUSED_PARAMS;
4495 }
4496 else if (POINTER_TYPE_P (TREE_TYPE (parm))
4497 && is_gimple_reg_type (TREE_TYPE (TREE_TYPE (parm)))
4498 && bitmap_bit_p (candidate_bitmap, DECL_UID (parm)))
4499 {
4500 repr = unmodified_by_ref_scalar_representative (parm);
4501 representatives.quick_push (repr);
4502 if (repr)
4503 result = UNMODIF_BY_REF_ACCESSES;
4504 }
4505 else if (bitmap_bit_p (candidate_bitmap, DECL_UID (parm)))
4506 {
4507 bool ro_grp = false;
4508 repr = splice_param_accesses (parm, &ro_grp);
4509 representatives.quick_push (repr);
4510
4511 if (repr && !no_accesses_p (repr))
4512 {
4513 if (POINTER_TYPE_P (TREE_TYPE (parm)))
4514 {
4515 if (ro_grp)
4516 result = UNMODIF_BY_REF_ACCESSES;
4517 else if (result < MODIF_BY_REF_ACCESSES)
4518 result = MODIF_BY_REF_ACCESSES;
4519 }
4520 else if (result < BY_VAL_ACCESSES)
4521 result = BY_VAL_ACCESSES;
4522 }
4523 else if (no_accesses_p (repr) && (result == NO_GOOD_ACCESS))
4524 result = UNUSED_PARAMS;
4525 }
4526 else
4527 representatives.quick_push (NULL);
4528 }
4529
4530 if (result == NO_GOOD_ACCESS)
4531 {
4532 representatives.release ();
4533 return NO_GOOD_ACCESS;
4534 }
4535
4536 return result;
4537 }
4538
4539 /* Return the index of BASE in PARMS. Abort if it is not found. */
4540
4541 static inline int
4542 get_param_index (tree base, vec<tree> parms)
4543 {
4544 int i, len;
4545
4546 len = parms.length ();
4547 for (i = 0; i < len; i++)
4548 if (parms[i] == base)
4549 return i;
4550 gcc_unreachable ();
4551 }
4552
4553 /* Convert the decisions made at the representative level into compact
4554 parameter adjustments. REPRESENTATIVES are pointers to first
4555 representatives of each param accesses, ADJUSTMENTS_COUNT is the expected
4556 final number of adjustments. */
4557
4558 static ipa_parm_adjustment_vec
4559 turn_representatives_into_adjustments (vec<access_p> representatives,
4560 int adjustments_count)
4561 {
4562 vec<tree> parms;
4563 ipa_parm_adjustment_vec adjustments;
4564 tree parm;
4565 int i;
4566
4567 gcc_assert (adjustments_count > 0);
4568 parms = ipa_get_vector_of_formal_parms (current_function_decl);
4569 adjustments.create (adjustments_count);
4570 parm = DECL_ARGUMENTS (current_function_decl);
4571 for (i = 0; i < func_param_count; i++, parm = DECL_CHAIN (parm))
4572 {
4573 struct access *repr = representatives[i];
4574
4575 if (!repr || no_accesses_p (repr))
4576 {
4577 struct ipa_parm_adjustment adj;
4578
4579 memset (&adj, 0, sizeof (adj));
4580 adj.base_index = get_param_index (parm, parms);
4581 adj.base = parm;
4582 if (!repr)
4583 adj.op = IPA_PARM_OP_COPY;
4584 else
4585 adj.op = IPA_PARM_OP_REMOVE;
4586 adj.arg_prefix = "ISRA";
4587 adjustments.quick_push (adj);
4588 }
4589 else
4590 {
4591 struct ipa_parm_adjustment adj;
4592 int index = get_param_index (parm, parms);
4593
4594 for (; repr; repr = repr->next_grp)
4595 {
4596 memset (&adj, 0, sizeof (adj));
4597 gcc_assert (repr->base == parm);
4598 adj.base_index = index;
4599 adj.base = repr->base;
4600 adj.type = repr->type;
4601 adj.alias_ptr_type = reference_alias_ptr_type (repr->expr);
4602 adj.offset = repr->offset;
4603 adj.reverse = repr->reverse;
4604 adj.by_ref = (POINTER_TYPE_P (TREE_TYPE (repr->base))
4605 && (repr->grp_maybe_modified
4606 || repr->grp_not_necessarilly_dereferenced));
4607 adj.arg_prefix = "ISRA";
4608 adjustments.quick_push (adj);
4609 }
4610 }
4611 }
4612 parms.release ();
4613 return adjustments;
4614 }
4615
4616 /* Analyze the collected accesses and produce a plan what to do with the
4617 parameters in the form of adjustments, NULL meaning nothing. */
4618
4619 static ipa_parm_adjustment_vec
4620 analyze_all_param_acesses (void)
4621 {
4622 enum ipa_splicing_result repr_state;
4623 bool proceed = false;
4624 int i, adjustments_count = 0;
4625 vec<access_p> representatives;
4626 ipa_parm_adjustment_vec adjustments;
4627
4628 repr_state = splice_all_param_accesses (representatives);
4629 if (repr_state == NO_GOOD_ACCESS)
4630 return ipa_parm_adjustment_vec ();
4631
4632 /* If there are any parameters passed by reference which are not modified
4633 directly, we need to check whether they can be modified indirectly. */
4634 if (repr_state == UNMODIF_BY_REF_ACCESSES)
4635 {
4636 analyze_caller_dereference_legality (representatives);
4637 analyze_modified_params (representatives);
4638 }
4639
4640 for (i = 0; i < func_param_count; i++)
4641 {
4642 struct access *repr = representatives[i];
4643
4644 if (repr && !no_accesses_p (repr))
4645 {
4646 if (repr->grp_scalar_ptr)
4647 {
4648 adjustments_count++;
4649 if (repr->grp_not_necessarilly_dereferenced
4650 || repr->grp_maybe_modified)
4651 representatives[i] = NULL;
4652 else
4653 {
4654 proceed = true;
4655 sra_stats.scalar_by_ref_to_by_val++;
4656 }
4657 }
4658 else
4659 {
4660 int new_components = decide_one_param_reduction (repr);
4661
4662 if (new_components == 0)
4663 {
4664 representatives[i] = NULL;
4665 adjustments_count++;
4666 }
4667 else
4668 {
4669 adjustments_count += new_components;
4670 sra_stats.aggregate_params_reduced++;
4671 sra_stats.param_reductions_created += new_components;
4672 proceed = true;
4673 }
4674 }
4675 }
4676 else
4677 {
4678 if (no_accesses_p (repr))
4679 {
4680 proceed = true;
4681 sra_stats.deleted_unused_parameters++;
4682 }
4683 adjustments_count++;
4684 }
4685 }
4686
4687 if (!proceed && dump_file)
4688 fprintf (dump_file, "NOT proceeding to change params.\n");
4689
4690 if (proceed)
4691 adjustments = turn_representatives_into_adjustments (representatives,
4692 adjustments_count);
4693 else
4694 adjustments = ipa_parm_adjustment_vec ();
4695
4696 representatives.release ();
4697 return adjustments;
4698 }
4699
4700 /* If a parameter replacement identified by ADJ does not yet exist in the form
4701 of declaration, create it and record it, otherwise return the previously
4702 created one. */
4703
4704 static tree
4705 get_replaced_param_substitute (struct ipa_parm_adjustment *adj)
4706 {
4707 tree repl;
4708 if (!adj->new_ssa_base)
4709 {
4710 char *pretty_name = make_fancy_name (adj->base);
4711
4712 repl = create_tmp_reg (TREE_TYPE (adj->base), "ISR");
4713 DECL_NAME (repl) = get_identifier (pretty_name);
4714 DECL_NAMELESS (repl) = 1;
4715 obstack_free (&name_obstack, pretty_name);
4716
4717 adj->new_ssa_base = repl;
4718 }
4719 else
4720 repl = adj->new_ssa_base;
4721 return repl;
4722 }
4723
4724 /* Find the first adjustment for a particular parameter BASE in a vector of
4725 ADJUSTMENTS which is not a copy_param. Return NULL if there is no such
4726 adjustment. */
4727
4728 static struct ipa_parm_adjustment *
4729 get_adjustment_for_base (ipa_parm_adjustment_vec adjustments, tree base)
4730 {
4731 int i, len;
4732
4733 len = adjustments.length ();
4734 for (i = 0; i < len; i++)
4735 {
4736 struct ipa_parm_adjustment *adj;
4737
4738 adj = &adjustments[i];
4739 if (adj->op != IPA_PARM_OP_COPY && adj->base == base)
4740 return adj;
4741 }
4742
4743 return NULL;
4744 }
4745
4746 /* If OLD_NAME, which is being defined by statement STMT, is an SSA_NAME of a
4747 parameter which is to be removed because its value is not used, create a new
4748 SSA_NAME relating to a replacement VAR_DECL, replace all uses of the
4749 original with it and return it. If there is no need to re-map, return NULL.
4750 ADJUSTMENTS is a pointer to a vector of IPA-SRA adjustments. */
4751
4752 static tree
4753 replace_removed_params_ssa_names (tree old_name, gimple *stmt,
4754 ipa_parm_adjustment_vec adjustments)
4755 {
4756 struct ipa_parm_adjustment *adj;
4757 tree decl, repl, new_name;
4758
4759 if (TREE_CODE (old_name) != SSA_NAME)
4760 return NULL;
4761
4762 decl = SSA_NAME_VAR (old_name);
4763 if (decl == NULL_TREE
4764 || TREE_CODE (decl) != PARM_DECL)
4765 return NULL;
4766
4767 adj = get_adjustment_for_base (adjustments, decl);
4768 if (!adj)
4769 return NULL;
4770
4771 repl = get_replaced_param_substitute (adj);
4772 new_name = make_ssa_name (repl, stmt);
4773 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_name)
4774 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (old_name);
4775
4776 if (dump_file)
4777 {
4778 fprintf (dump_file, "replacing an SSA name of a removed param ");
4779 print_generic_expr (dump_file, old_name, 0);
4780 fprintf (dump_file, " with ");
4781 print_generic_expr (dump_file, new_name, 0);
4782 fprintf (dump_file, "\n");
4783 }
4784
4785 replace_uses_by (old_name, new_name);
4786 return new_name;
4787 }
4788
4789 /* If the statement STMT contains any expressions that need to replaced with a
4790 different one as noted by ADJUSTMENTS, do so. Handle any potential type
4791 incompatibilities (GSI is used to accommodate conversion statements and must
4792 point to the statement). Return true iff the statement was modified. */
4793
4794 static bool
4795 sra_ipa_modify_assign (gimple *stmt, gimple_stmt_iterator *gsi,
4796 ipa_parm_adjustment_vec adjustments)
4797 {
4798 tree *lhs_p, *rhs_p;
4799 bool any;
4800
4801 if (!gimple_assign_single_p (stmt))
4802 return false;
4803
4804 rhs_p = gimple_assign_rhs1_ptr (stmt);
4805 lhs_p = gimple_assign_lhs_ptr (stmt);
4806
4807 any = ipa_modify_expr (rhs_p, false, adjustments);
4808 any |= ipa_modify_expr (lhs_p, false, adjustments);
4809 if (any)
4810 {
4811 tree new_rhs = NULL_TREE;
4812
4813 if (!useless_type_conversion_p (TREE_TYPE (*lhs_p), TREE_TYPE (*rhs_p)))
4814 {
4815 if (TREE_CODE (*rhs_p) == CONSTRUCTOR)
4816 {
4817 /* V_C_Es of constructors can cause trouble (PR 42714). */
4818 if (is_gimple_reg_type (TREE_TYPE (*lhs_p)))
4819 *rhs_p = build_zero_cst (TREE_TYPE (*lhs_p));
4820 else
4821 *rhs_p = build_constructor (TREE_TYPE (*lhs_p),
4822 NULL);
4823 }
4824 else
4825 new_rhs = fold_build1_loc (gimple_location (stmt),
4826 VIEW_CONVERT_EXPR, TREE_TYPE (*lhs_p),
4827 *rhs_p);
4828 }
4829 else if (REFERENCE_CLASS_P (*rhs_p)
4830 && is_gimple_reg_type (TREE_TYPE (*lhs_p))
4831 && !is_gimple_reg (*lhs_p))
4832 /* This can happen when an assignment in between two single field
4833 structures is turned into an assignment in between two pointers to
4834 scalars (PR 42237). */
4835 new_rhs = *rhs_p;
4836
4837 if (new_rhs)
4838 {
4839 tree tmp = force_gimple_operand_gsi (gsi, new_rhs, true, NULL_TREE,
4840 true, GSI_SAME_STMT);
4841
4842 gimple_assign_set_rhs_from_tree (gsi, tmp);
4843 }
4844
4845 return true;
4846 }
4847
4848 return false;
4849 }
4850
4851 /* Traverse the function body and all modifications as described in
4852 ADJUSTMENTS. Return true iff the CFG has been changed. */
4853
4854 bool
4855 ipa_sra_modify_function_body (ipa_parm_adjustment_vec adjustments)
4856 {
4857 bool cfg_changed = false;
4858 basic_block bb;
4859
4860 FOR_EACH_BB_FN (bb, cfun)
4861 {
4862 gimple_stmt_iterator gsi;
4863
4864 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4865 {
4866 gphi *phi = as_a <gphi *> (gsi_stmt (gsi));
4867 tree new_lhs, old_lhs = gimple_phi_result (phi);
4868 new_lhs = replace_removed_params_ssa_names (old_lhs, phi, adjustments);
4869 if (new_lhs)
4870 {
4871 gimple_phi_set_result (phi, new_lhs);
4872 release_ssa_name (old_lhs);
4873 }
4874 }
4875
4876 gsi = gsi_start_bb (bb);
4877 while (!gsi_end_p (gsi))
4878 {
4879 gimple *stmt = gsi_stmt (gsi);
4880 bool modified = false;
4881 tree *t;
4882 unsigned i;
4883
4884 switch (gimple_code (stmt))
4885 {
4886 case GIMPLE_RETURN:
4887 t = gimple_return_retval_ptr (as_a <greturn *> (stmt));
4888 if (*t != NULL_TREE)
4889 modified |= ipa_modify_expr (t, true, adjustments);
4890 break;
4891
4892 case GIMPLE_ASSIGN:
4893 modified |= sra_ipa_modify_assign (stmt, &gsi, adjustments);
4894 break;
4895
4896 case GIMPLE_CALL:
4897 /* Operands must be processed before the lhs. */
4898 for (i = 0; i < gimple_call_num_args (stmt); i++)
4899 {
4900 t = gimple_call_arg_ptr (stmt, i);
4901 modified |= ipa_modify_expr (t, true, adjustments);
4902 }
4903
4904 if (gimple_call_lhs (stmt))
4905 {
4906 t = gimple_call_lhs_ptr (stmt);
4907 modified |= ipa_modify_expr (t, false, adjustments);
4908 }
4909 break;
4910
4911 case GIMPLE_ASM:
4912 {
4913 gasm *asm_stmt = as_a <gasm *> (stmt);
4914 for (i = 0; i < gimple_asm_ninputs (asm_stmt); i++)
4915 {
4916 t = &TREE_VALUE (gimple_asm_input_op (asm_stmt, i));
4917 modified |= ipa_modify_expr (t, true, adjustments);
4918 }
4919 for (i = 0; i < gimple_asm_noutputs (asm_stmt); i++)
4920 {
4921 t = &TREE_VALUE (gimple_asm_output_op (asm_stmt, i));
4922 modified |= ipa_modify_expr (t, false, adjustments);
4923 }
4924 }
4925 break;
4926
4927 default:
4928 break;
4929 }
4930
4931 def_operand_p defp;
4932 ssa_op_iter iter;
4933 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_DEF)
4934 {
4935 tree old_def = DEF_FROM_PTR (defp);
4936 if (tree new_def = replace_removed_params_ssa_names (old_def, stmt,
4937 adjustments))
4938 {
4939 SET_DEF (defp, new_def);
4940 release_ssa_name (old_def);
4941 modified = true;
4942 }
4943 }
4944
4945 if (modified)
4946 {
4947 update_stmt (stmt);
4948 if (maybe_clean_eh_stmt (stmt)
4949 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
4950 cfg_changed = true;
4951 }
4952 gsi_next (&gsi);
4953 }
4954 }
4955
4956 return cfg_changed;
4957 }
4958
4959 /* Call gimple_debug_bind_reset_value on all debug statements describing
4960 gimple register parameters that are being removed or replaced. */
4961
4962 static void
4963 sra_ipa_reset_debug_stmts (ipa_parm_adjustment_vec adjustments)
4964 {
4965 int i, len;
4966 gimple_stmt_iterator *gsip = NULL, gsi;
4967
4968 if (MAY_HAVE_DEBUG_STMTS && single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (cfun)))
4969 {
4970 gsi = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
4971 gsip = &gsi;
4972 }
4973 len = adjustments.length ();
4974 for (i = 0; i < len; i++)
4975 {
4976 struct ipa_parm_adjustment *adj;
4977 imm_use_iterator ui;
4978 gimple *stmt;
4979 gdebug *def_temp;
4980 tree name, vexpr, copy = NULL_TREE;
4981 use_operand_p use_p;
4982
4983 adj = &adjustments[i];
4984 if (adj->op == IPA_PARM_OP_COPY || !is_gimple_reg (adj->base))
4985 continue;
4986 name = ssa_default_def (cfun, adj->base);
4987 vexpr = NULL;
4988 if (name)
4989 FOR_EACH_IMM_USE_STMT (stmt, ui, name)
4990 {
4991 if (gimple_clobber_p (stmt))
4992 {
4993 gimple_stmt_iterator cgsi = gsi_for_stmt (stmt);
4994 unlink_stmt_vdef (stmt);
4995 gsi_remove (&cgsi, true);
4996 release_defs (stmt);
4997 continue;
4998 }
4999 /* All other users must have been removed by
5000 ipa_sra_modify_function_body. */
5001 gcc_assert (is_gimple_debug (stmt));
5002 if (vexpr == NULL && gsip != NULL)
5003 {
5004 gcc_assert (TREE_CODE (adj->base) == PARM_DECL);
5005 vexpr = make_node (DEBUG_EXPR_DECL);
5006 def_temp = gimple_build_debug_source_bind (vexpr, adj->base,
5007 NULL);
5008 DECL_ARTIFICIAL (vexpr) = 1;
5009 TREE_TYPE (vexpr) = TREE_TYPE (name);
5010 SET_DECL_MODE (vexpr, DECL_MODE (adj->base));
5011 gsi_insert_before (gsip, def_temp, GSI_SAME_STMT);
5012 }
5013 if (vexpr)
5014 {
5015 FOR_EACH_IMM_USE_ON_STMT (use_p, ui)
5016 SET_USE (use_p, vexpr);
5017 }
5018 else
5019 gimple_debug_bind_reset_value (stmt);
5020 update_stmt (stmt);
5021 }
5022 /* Create a VAR_DECL for debug info purposes. */
5023 if (!DECL_IGNORED_P (adj->base))
5024 {
5025 copy = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
5026 VAR_DECL, DECL_NAME (adj->base),
5027 TREE_TYPE (adj->base));
5028 if (DECL_PT_UID_SET_P (adj->base))
5029 SET_DECL_PT_UID (copy, DECL_PT_UID (adj->base));
5030 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (adj->base);
5031 TREE_READONLY (copy) = TREE_READONLY (adj->base);
5032 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (adj->base);
5033 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (adj->base);
5034 DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (adj->base);
5035 DECL_IGNORED_P (copy) = DECL_IGNORED_P (adj->base);
5036 DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (adj->base);
5037 DECL_SEEN_IN_BIND_EXPR_P (copy) = 1;
5038 SET_DECL_RTL (copy, 0);
5039 TREE_USED (copy) = 1;
5040 DECL_CONTEXT (copy) = current_function_decl;
5041 add_local_decl (cfun, copy);
5042 DECL_CHAIN (copy) =
5043 BLOCK_VARS (DECL_INITIAL (current_function_decl));
5044 BLOCK_VARS (DECL_INITIAL (current_function_decl)) = copy;
5045 }
5046 if (gsip != NULL && copy && target_for_debug_bind (adj->base))
5047 {
5048 gcc_assert (TREE_CODE (adj->base) == PARM_DECL);
5049 if (vexpr)
5050 def_temp = gimple_build_debug_bind (copy, vexpr, NULL);
5051 else
5052 def_temp = gimple_build_debug_source_bind (copy, adj->base,
5053 NULL);
5054 gsi_insert_before (gsip, def_temp, GSI_SAME_STMT);
5055 }
5056 }
5057 }
5058
5059 /* Return false if all callers have at least as many actual arguments as there
5060 are formal parameters in the current function and that their types
5061 match. */
5062
5063 static bool
5064 some_callers_have_mismatched_arguments_p (struct cgraph_node *node,
5065 void *data ATTRIBUTE_UNUSED)
5066 {
5067 struct cgraph_edge *cs;
5068 for (cs = node->callers; cs; cs = cs->next_caller)
5069 if (!cs->call_stmt || !callsite_arguments_match_p (cs->call_stmt))
5070 return true;
5071
5072 return false;
5073 }
5074
5075 /* Return false if all callers have vuse attached to a call statement. */
5076
5077 static bool
5078 some_callers_have_no_vuse_p (struct cgraph_node *node,
5079 void *data ATTRIBUTE_UNUSED)
5080 {
5081 struct cgraph_edge *cs;
5082 for (cs = node->callers; cs; cs = cs->next_caller)
5083 if (!cs->call_stmt || !gimple_vuse (cs->call_stmt))
5084 return true;
5085
5086 return false;
5087 }
5088
5089 /* Convert all callers of NODE. */
5090
5091 static bool
5092 convert_callers_for_node (struct cgraph_node *node,
5093 void *data)
5094 {
5095 ipa_parm_adjustment_vec *adjustments = (ipa_parm_adjustment_vec *) data;
5096 bitmap recomputed_callers = BITMAP_ALLOC (NULL);
5097 struct cgraph_edge *cs;
5098
5099 for (cs = node->callers; cs; cs = cs->next_caller)
5100 {
5101 push_cfun (DECL_STRUCT_FUNCTION (cs->caller->decl));
5102
5103 if (dump_file)
5104 fprintf (dump_file, "Adjusting call %s/%i -> %s/%i\n",
5105 xstrdup_for_dump (cs->caller->name ()),
5106 cs->caller->order,
5107 xstrdup_for_dump (cs->callee->name ()),
5108 cs->callee->order);
5109
5110 ipa_modify_call_arguments (cs, cs->call_stmt, *adjustments);
5111
5112 pop_cfun ();
5113 }
5114
5115 for (cs = node->callers; cs; cs = cs->next_caller)
5116 if (bitmap_set_bit (recomputed_callers, cs->caller->uid)
5117 && gimple_in_ssa_p (DECL_STRUCT_FUNCTION (cs->caller->decl)))
5118 compute_inline_parameters (cs->caller, true);
5119 BITMAP_FREE (recomputed_callers);
5120
5121 return true;
5122 }
5123
5124 /* Convert all callers of NODE to pass parameters as given in ADJUSTMENTS. */
5125
5126 static void
5127 convert_callers (struct cgraph_node *node, tree old_decl,
5128 ipa_parm_adjustment_vec adjustments)
5129 {
5130 basic_block this_block;
5131
5132 node->call_for_symbol_and_aliases (convert_callers_for_node,
5133 &adjustments, false);
5134
5135 if (!encountered_recursive_call)
5136 return;
5137
5138 FOR_EACH_BB_FN (this_block, cfun)
5139 {
5140 gimple_stmt_iterator gsi;
5141
5142 for (gsi = gsi_start_bb (this_block); !gsi_end_p (gsi); gsi_next (&gsi))
5143 {
5144 gcall *stmt;
5145 tree call_fndecl;
5146 stmt = dyn_cast <gcall *> (gsi_stmt (gsi));
5147 if (!stmt)
5148 continue;
5149 call_fndecl = gimple_call_fndecl (stmt);
5150 if (call_fndecl == old_decl)
5151 {
5152 if (dump_file)
5153 fprintf (dump_file, "Adjusting recursive call");
5154 gimple_call_set_fndecl (stmt, node->decl);
5155 ipa_modify_call_arguments (NULL, stmt, adjustments);
5156 }
5157 }
5158 }
5159
5160 return;
5161 }
5162
5163 /* Perform all the modification required in IPA-SRA for NODE to have parameters
5164 as given in ADJUSTMENTS. Return true iff the CFG has been changed. */
5165
5166 static bool
5167 modify_function (struct cgraph_node *node, ipa_parm_adjustment_vec adjustments)
5168 {
5169 struct cgraph_node *new_node;
5170 bool cfg_changed;
5171
5172 cgraph_edge::rebuild_edges ();
5173 free_dominance_info (CDI_DOMINATORS);
5174 pop_cfun ();
5175
5176 /* This must be done after rebuilding cgraph edges for node above.
5177 Otherwise any recursive calls to node that are recorded in
5178 redirect_callers will be corrupted. */
5179 vec<cgraph_edge *> redirect_callers = node->collect_callers ();
5180 new_node = node->create_version_clone_with_body (redirect_callers, NULL,
5181 NULL, false, NULL, NULL,
5182 "isra");
5183 redirect_callers.release ();
5184
5185 push_cfun (DECL_STRUCT_FUNCTION (new_node->decl));
5186 ipa_modify_formal_parameters (current_function_decl, adjustments);
5187 cfg_changed = ipa_sra_modify_function_body (adjustments);
5188 sra_ipa_reset_debug_stmts (adjustments);
5189 convert_callers (new_node, node->decl, adjustments);
5190 new_node->make_local ();
5191 return cfg_changed;
5192 }
5193
5194 /* Means of communication between ipa_sra_check_caller and
5195 ipa_sra_preliminary_function_checks. */
5196
5197 struct ipa_sra_check_caller_data
5198 {
5199 bool has_callers;
5200 bool bad_arg_alignment;
5201 bool has_thunk;
5202 };
5203
5204 /* If NODE has a caller, mark that fact in DATA which is pointer to
5205 ipa_sra_check_caller_data. Also check all aggregate arguments in all known
5206 calls if they are unit aligned and if not, set the appropriate flag in DATA
5207 too. */
5208
5209 static bool
5210 ipa_sra_check_caller (struct cgraph_node *node, void *data)
5211 {
5212 if (!node->callers)
5213 return false;
5214
5215 struct ipa_sra_check_caller_data *iscc;
5216 iscc = (struct ipa_sra_check_caller_data *) data;
5217 iscc->has_callers = true;
5218
5219 for (cgraph_edge *cs = node->callers; cs; cs = cs->next_caller)
5220 {
5221 if (cs->caller->thunk.thunk_p)
5222 {
5223 iscc->has_thunk = true;
5224 return true;
5225 }
5226 gimple *call_stmt = cs->call_stmt;
5227 unsigned count = gimple_call_num_args (call_stmt);
5228 for (unsigned i = 0; i < count; i++)
5229 {
5230 tree arg = gimple_call_arg (call_stmt, i);
5231 if (is_gimple_reg (arg))
5232 continue;
5233
5234 tree offset;
5235 HOST_WIDE_INT bitsize, bitpos;
5236 machine_mode mode;
5237 int unsignedp, reversep, volatilep = 0;
5238 get_inner_reference (arg, &bitsize, &bitpos, &offset, &mode,
5239 &unsignedp, &reversep, &volatilep);
5240 if (bitpos % BITS_PER_UNIT)
5241 {
5242 iscc->bad_arg_alignment = true;
5243 return true;
5244 }
5245 }
5246 }
5247
5248 return false;
5249 }
5250
5251 /* Return false the function is apparently unsuitable for IPA-SRA based on it's
5252 attributes, return true otherwise. NODE is the cgraph node of the current
5253 function. */
5254
5255 static bool
5256 ipa_sra_preliminary_function_checks (struct cgraph_node *node)
5257 {
5258 if (!node->can_be_local_p ())
5259 {
5260 if (dump_file)
5261 fprintf (dump_file, "Function not local to this compilation unit.\n");
5262 return false;
5263 }
5264
5265 if (!node->local.can_change_signature)
5266 {
5267 if (dump_file)
5268 fprintf (dump_file, "Function can not change signature.\n");
5269 return false;
5270 }
5271
5272 if (!tree_versionable_function_p (node->decl))
5273 {
5274 if (dump_file)
5275 fprintf (dump_file, "Function is not versionable.\n");
5276 return false;
5277 }
5278
5279 if (!opt_for_fn (node->decl, optimize)
5280 || !opt_for_fn (node->decl, flag_ipa_sra))
5281 {
5282 if (dump_file)
5283 fprintf (dump_file, "Function not optimized.\n");
5284 return false;
5285 }
5286
5287 if (DECL_VIRTUAL_P (current_function_decl))
5288 {
5289 if (dump_file)
5290 fprintf (dump_file, "Function is a virtual method.\n");
5291 return false;
5292 }
5293
5294 if ((DECL_ONE_ONLY (node->decl) || DECL_EXTERNAL (node->decl))
5295 && inline_summaries->get (node)->size >= MAX_INLINE_INSNS_AUTO)
5296 {
5297 if (dump_file)
5298 fprintf (dump_file, "Function too big to be made truly local.\n");
5299 return false;
5300 }
5301
5302 if (cfun->stdarg)
5303 {
5304 if (dump_file)
5305 fprintf (dump_file, "Function uses stdarg. \n");
5306 return false;
5307 }
5308
5309 if (TYPE_ATTRIBUTES (TREE_TYPE (node->decl)))
5310 return false;
5311
5312 if (DECL_DISREGARD_INLINE_LIMITS (node->decl))
5313 {
5314 if (dump_file)
5315 fprintf (dump_file, "Always inline function will be inlined "
5316 "anyway. \n");
5317 return false;
5318 }
5319
5320 struct ipa_sra_check_caller_data iscc;
5321 memset (&iscc, 0, sizeof(iscc));
5322 node->call_for_symbol_and_aliases (ipa_sra_check_caller, &iscc, true);
5323 if (!iscc.has_callers)
5324 {
5325 if (dump_file)
5326 fprintf (dump_file,
5327 "Function has no callers in this compilation unit.\n");
5328 return false;
5329 }
5330
5331 if (iscc.bad_arg_alignment)
5332 {
5333 if (dump_file)
5334 fprintf (dump_file,
5335 "A function call has an argument with non-unit alignment.\n");
5336 return false;
5337 }
5338
5339 if (iscc.has_thunk)
5340 {
5341 if (dump_file)
5342 fprintf (dump_file,
5343 "A has thunk.\n");
5344 return false;
5345 }
5346
5347 return true;
5348 }
5349
5350 /* Perform early interprocedural SRA. */
5351
5352 static unsigned int
5353 ipa_early_sra (void)
5354 {
5355 struct cgraph_node *node = cgraph_node::get (current_function_decl);
5356 ipa_parm_adjustment_vec adjustments;
5357 int ret = 0;
5358
5359 if (!ipa_sra_preliminary_function_checks (node))
5360 return 0;
5361
5362 sra_initialize ();
5363 sra_mode = SRA_MODE_EARLY_IPA;
5364
5365 if (!find_param_candidates ())
5366 {
5367 if (dump_file)
5368 fprintf (dump_file, "Function has no IPA-SRA candidates.\n");
5369 goto simple_out;
5370 }
5371
5372 if (node->call_for_symbol_and_aliases
5373 (some_callers_have_mismatched_arguments_p, NULL, true))
5374 {
5375 if (dump_file)
5376 fprintf (dump_file, "There are callers with insufficient number of "
5377 "arguments or arguments with type mismatches.\n");
5378 goto simple_out;
5379 }
5380
5381 if (node->call_for_symbol_and_aliases
5382 (some_callers_have_no_vuse_p, NULL, true))
5383 {
5384 if (dump_file)
5385 fprintf (dump_file, "There are callers with no VUSE attached "
5386 "to a call stmt.\n");
5387 goto simple_out;
5388 }
5389
5390 bb_dereferences = XCNEWVEC (HOST_WIDE_INT,
5391 func_param_count
5392 * last_basic_block_for_fn (cfun));
5393 final_bbs = BITMAP_ALLOC (NULL);
5394
5395 scan_function ();
5396 if (encountered_apply_args)
5397 {
5398 if (dump_file)
5399 fprintf (dump_file, "Function calls __builtin_apply_args().\n");
5400 goto out;
5401 }
5402
5403 if (encountered_unchangable_recursive_call)
5404 {
5405 if (dump_file)
5406 fprintf (dump_file, "Function calls itself with insufficient "
5407 "number of arguments.\n");
5408 goto out;
5409 }
5410
5411 adjustments = analyze_all_param_acesses ();
5412 if (!adjustments.exists ())
5413 goto out;
5414 if (dump_file)
5415 ipa_dump_param_adjustments (dump_file, adjustments, current_function_decl);
5416
5417 if (modify_function (node, adjustments))
5418 ret = TODO_update_ssa | TODO_cleanup_cfg;
5419 else
5420 ret = TODO_update_ssa;
5421 adjustments.release ();
5422
5423 statistics_counter_event (cfun, "Unused parameters deleted",
5424 sra_stats.deleted_unused_parameters);
5425 statistics_counter_event (cfun, "Scalar parameters converted to by-value",
5426 sra_stats.scalar_by_ref_to_by_val);
5427 statistics_counter_event (cfun, "Aggregate parameters broken up",
5428 sra_stats.aggregate_params_reduced);
5429 statistics_counter_event (cfun, "Aggregate parameter components created",
5430 sra_stats.param_reductions_created);
5431
5432 out:
5433 BITMAP_FREE (final_bbs);
5434 free (bb_dereferences);
5435 simple_out:
5436 sra_deinitialize ();
5437 return ret;
5438 }
5439
5440 namespace {
5441
5442 const pass_data pass_data_early_ipa_sra =
5443 {
5444 GIMPLE_PASS, /* type */
5445 "eipa_sra", /* name */
5446 OPTGROUP_NONE, /* optinfo_flags */
5447 TV_IPA_SRA, /* tv_id */
5448 0, /* properties_required */
5449 0, /* properties_provided */
5450 0, /* properties_destroyed */
5451 0, /* todo_flags_start */
5452 TODO_dump_symtab, /* todo_flags_finish */
5453 };
5454
5455 class pass_early_ipa_sra : public gimple_opt_pass
5456 {
5457 public:
5458 pass_early_ipa_sra (gcc::context *ctxt)
5459 : gimple_opt_pass (pass_data_early_ipa_sra, ctxt)
5460 {}
5461
5462 /* opt_pass methods: */
5463 virtual bool gate (function *) { return flag_ipa_sra && dbg_cnt (eipa_sra); }
5464 virtual unsigned int execute (function *) { return ipa_early_sra (); }
5465
5466 }; // class pass_early_ipa_sra
5467
5468 } // anon namespace
5469
5470 gimple_opt_pass *
5471 make_pass_early_ipa_sra (gcc::context *ctxt)
5472 {
5473 return new pass_early_ipa_sra (ctxt);
5474 }