]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/tree-sra.c
Move operand_less_p to vr-values.c.
[thirdparty/gcc.git] / gcc / tree-sra.c
1 /* Scalar Replacement of Aggregates (SRA) converts some structure
2 references into scalar references, exposing them to the scalar
3 optimizers.
4 Copyright (C) 2008-2020 Free Software Foundation, Inc.
5 Contributed by Martin Jambor <mjambor@suse.cz>
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23 /* This file implements Scalar Reduction of Aggregates (SRA). SRA is run
24 twice, once in the early stages of compilation (early SRA) and once in the
25 late stages (late SRA). The aim of both is to turn references to scalar
26 parts of aggregates into uses of independent scalar variables.
27
28 The two passes are nearly identical, the only difference is that early SRA
29 does not scalarize unions which are used as the result in a GIMPLE_RETURN
30 statement because together with inlining this can lead to weird type
31 conversions.
32
33 Both passes operate in four stages:
34
35 1. The declarations that have properties which make them candidates for
36 scalarization are identified in function find_var_candidates(). The
37 candidates are stored in candidate_bitmap.
38
39 2. The function body is scanned. In the process, declarations which are
40 used in a manner that prevent their scalarization are removed from the
41 candidate bitmap. More importantly, for every access into an aggregate,
42 an access structure (struct access) is created by create_access() and
43 stored in a vector associated with the aggregate. Among other
44 information, the aggregate declaration, the offset and size of the access
45 and its type are stored in the structure.
46
47 On a related note, assign_link structures are created for every assign
48 statement between candidate aggregates and attached to the related
49 accesses.
50
51 3. The vectors of accesses are analyzed. They are first sorted according to
52 their offset and size and then scanned for partially overlapping accesses
53 (i.e. those which overlap but one is not entirely within another). Such
54 an access disqualifies the whole aggregate from being scalarized.
55
56 If there is no such inhibiting overlap, a representative access structure
57 is chosen for every unique combination of offset and size. Afterwards,
58 the pass builds a set of trees from these structures, in which children
59 of an access are within their parent (in terms of offset and size).
60
61 Then accesses are propagated whenever possible (i.e. in cases when it
62 does not create a partially overlapping access) across assign_links from
63 the right hand side to the left hand side.
64
65 Then the set of trees for each declaration is traversed again and those
66 accesses which should be replaced by a scalar are identified.
67
68 4. The function is traversed again, and for every reference into an
69 aggregate that has some component which is about to be scalarized,
70 statements are amended and new statements are created as necessary.
71 Finally, if a parameter got scalarized, the scalar replacements are
72 initialized with values from respective parameter aggregates. */
73
74 #include "config.h"
75 #include "system.h"
76 #include "coretypes.h"
77 #include "backend.h"
78 #include "target.h"
79 #include "rtl.h"
80 #include "tree.h"
81 #include "gimple.h"
82 #include "predict.h"
83 #include "alloc-pool.h"
84 #include "tree-pass.h"
85 #include "ssa.h"
86 #include "cgraph.h"
87 #include "gimple-pretty-print.h"
88 #include "alias.h"
89 #include "fold-const.h"
90 #include "tree-eh.h"
91 #include "stor-layout.h"
92 #include "gimplify.h"
93 #include "gimple-iterator.h"
94 #include "gimplify-me.h"
95 #include "gimple-walk.h"
96 #include "tree-cfg.h"
97 #include "tree-dfa.h"
98 #include "tree-ssa.h"
99 #include "dbgcnt.h"
100 #include "builtins.h"
101 #include "tree-sra.h"
102
103
104 /* Enumeration of all aggregate reductions we can do. */
105 enum sra_mode { SRA_MODE_EARLY_IPA, /* early call regularization */
106 SRA_MODE_EARLY_INTRA, /* early intraprocedural SRA */
107 SRA_MODE_INTRA }; /* late intraprocedural SRA */
108
109 /* Global variable describing which aggregate reduction we are performing at
110 the moment. */
111 static enum sra_mode sra_mode;
112
113 struct assign_link;
114
115 /* ACCESS represents each access to an aggregate variable (as a whole or a
116 part). It can also represent a group of accesses that refer to exactly the
117 same fragment of an aggregate (i.e. those that have exactly the same offset
118 and size). Such representatives for a single aggregate, once determined,
119 are linked in a linked list and have the group fields set.
120
121 Moreover, when doing intraprocedural SRA, a tree is built from those
122 representatives (by the means of first_child and next_sibling pointers), in
123 which all items in a subtree are "within" the root, i.e. their offset is
124 greater or equal to offset of the root and offset+size is smaller or equal
125 to offset+size of the root. Children of an access are sorted by offset.
126
127 Note that accesses to parts of vector and complex number types always
128 represented by an access to the whole complex number or a vector. It is a
129 duty of the modifying functions to replace them appropriately. */
130
131 struct access
132 {
133 /* Values returned by `get_ref_base_and_extent' for each component reference
134 If EXPR isn't a component reference just set `BASE = EXPR', `OFFSET = 0',
135 `SIZE = TREE_SIZE (TREE_TYPE (expr))'. */
136 HOST_WIDE_INT offset;
137 HOST_WIDE_INT size;
138 tree base;
139
140 /* Expression. It is context dependent so do not use it to create new
141 expressions to access the original aggregate. See PR 42154 for a
142 testcase. */
143 tree expr;
144 /* Type. */
145 tree type;
146
147 /* The statement this access belongs to. */
148 gimple *stmt;
149
150 /* Next group representative for this aggregate. */
151 struct access *next_grp;
152
153 /* Pointer to the group representative. Pointer to itself if the struct is
154 the representative. */
155 struct access *group_representative;
156
157 /* After access tree has been constructed, this points to the parent of the
158 current access, if there is one. NULL for roots. */
159 struct access *parent;
160
161 /* If this access has any children (in terms of the definition above), this
162 points to the first one. */
163 struct access *first_child;
164
165 /* In intraprocedural SRA, pointer to the next sibling in the access tree as
166 described above. */
167 struct access *next_sibling;
168
169 /* Pointers to the first and last element in the linked list of assign
170 links for propagation from LHS to RHS. */
171 struct assign_link *first_rhs_link, *last_rhs_link;
172
173 /* Pointers to the first and last element in the linked list of assign
174 links for propagation from LHS to RHS. */
175 struct assign_link *first_lhs_link, *last_lhs_link;
176
177 /* Pointer to the next access in the work queues. */
178 struct access *next_rhs_queued, *next_lhs_queued;
179
180 /* Replacement variable for this access "region." Never to be accessed
181 directly, always only by the means of get_access_replacement() and only
182 when grp_to_be_replaced flag is set. */
183 tree replacement_decl;
184
185 /* Is this access made in reverse storage order? */
186 unsigned reverse : 1;
187
188 /* Is this particular access write access? */
189 unsigned write : 1;
190
191 /* Is this access currently in the rhs work queue? */
192 unsigned grp_rhs_queued : 1;
193
194 /* Is this access currently in the lhs work queue? */
195 unsigned grp_lhs_queued : 1;
196
197 /* Does this group contain a write access? This flag is propagated down the
198 access tree. */
199 unsigned grp_write : 1;
200
201 /* Does this group contain a read access? This flag is propagated down the
202 access tree. */
203 unsigned grp_read : 1;
204
205 /* Does this group contain a read access that comes from an assignment
206 statement? This flag is propagated down the access tree. */
207 unsigned grp_assignment_read : 1;
208
209 /* Does this group contain a write access that comes from an assignment
210 statement? This flag is propagated down the access tree. */
211 unsigned grp_assignment_write : 1;
212
213 /* Does this group contain a read access through a scalar type? This flag is
214 not propagated in the access tree in any direction. */
215 unsigned grp_scalar_read : 1;
216
217 /* Does this group contain a write access through a scalar type? This flag
218 is not propagated in the access tree in any direction. */
219 unsigned grp_scalar_write : 1;
220
221 /* In a root of an access tree, true means that the entire tree should be
222 totally scalarized - that all scalar leafs should be scalarized and
223 non-root grp_total_scalarization accesses should be honored. Otherwise,
224 non-root accesses with grp_total_scalarization should never get scalar
225 replacements. */
226 unsigned grp_total_scalarization : 1;
227
228 /* Other passes of the analysis use this bit to make function
229 analyze_access_subtree create scalar replacements for this group if
230 possible. */
231 unsigned grp_hint : 1;
232
233 /* Is the subtree rooted in this access fully covered by scalar
234 replacements? */
235 unsigned grp_covered : 1;
236
237 /* If set to true, this access and all below it in an access tree must not be
238 scalarized. */
239 unsigned grp_unscalarizable_region : 1;
240
241 /* Whether data have been written to parts of the aggregate covered by this
242 access which is not to be scalarized. This flag is propagated up in the
243 access tree. */
244 unsigned grp_unscalarized_data : 1;
245
246 /* Set if all accesses in the group consist of the same chain of
247 COMPONENT_REFs and ARRAY_REFs. */
248 unsigned grp_same_access_path : 1;
249
250 /* Does this access and/or group contain a write access through a
251 BIT_FIELD_REF? */
252 unsigned grp_partial_lhs : 1;
253
254 /* Set when a scalar replacement should be created for this variable. */
255 unsigned grp_to_be_replaced : 1;
256
257 /* Set when we want a replacement for the sole purpose of having it in
258 generated debug statements. */
259 unsigned grp_to_be_debug_replaced : 1;
260
261 /* Should TREE_NO_WARNING of a replacement be set? */
262 unsigned grp_no_warning : 1;
263 };
264
265 typedef struct access *access_p;
266
267
268 /* Alloc pool for allocating access structures. */
269 static object_allocator<struct access> access_pool ("SRA accesses");
270
271 /* A structure linking lhs and rhs accesses from an aggregate assignment. They
272 are used to propagate subaccesses from rhs to lhs and vice versa as long as
273 they don't conflict with what is already there. In the RHS->LHS direction,
274 we also propagate grp_write flag to lazily mark that the access contains any
275 meaningful data. */
276 struct assign_link
277 {
278 struct access *lacc, *racc;
279 struct assign_link *next_rhs, *next_lhs;
280 };
281
282 /* Alloc pool for allocating assign link structures. */
283 static object_allocator<assign_link> assign_link_pool ("SRA links");
284
285 /* Base (tree) -> Vector (vec<access_p> *) map. */
286 static hash_map<tree, auto_vec<access_p> > *base_access_vec;
287
288 /* Hash to limit creation of artificial accesses */
289 static hash_map<tree, unsigned> *propagation_budget;
290
291 /* Candidate hash table helpers. */
292
293 struct uid_decl_hasher : nofree_ptr_hash <tree_node>
294 {
295 static inline hashval_t hash (const tree_node *);
296 static inline bool equal (const tree_node *, const tree_node *);
297 };
298
299 /* Hash a tree in a uid_decl_map. */
300
301 inline hashval_t
302 uid_decl_hasher::hash (const tree_node *item)
303 {
304 return item->decl_minimal.uid;
305 }
306
307 /* Return true if the DECL_UID in both trees are equal. */
308
309 inline bool
310 uid_decl_hasher::equal (const tree_node *a, const tree_node *b)
311 {
312 return (a->decl_minimal.uid == b->decl_minimal.uid);
313 }
314
315 /* Set of candidates. */
316 static bitmap candidate_bitmap;
317 static hash_table<uid_decl_hasher> *candidates;
318
319 /* For a candidate UID return the candidates decl. */
320
321 static inline tree
322 candidate (unsigned uid)
323 {
324 tree_node t;
325 t.decl_minimal.uid = uid;
326 return candidates->find_with_hash (&t, static_cast <hashval_t> (uid));
327 }
328
329 /* Bitmap of candidates which we should try to entirely scalarize away and
330 those which cannot be (because they are and need be used as a whole). */
331 static bitmap should_scalarize_away_bitmap, cannot_scalarize_away_bitmap;
332
333 /* Bitmap of candidates in the constant pool, which cannot be scalarized
334 because this would produce non-constant expressions (e.g. Ada). */
335 static bitmap disqualified_constants;
336
337 /* Obstack for creation of fancy names. */
338 static struct obstack name_obstack;
339
340 /* Head of a linked list of accesses that need to have its subaccesses
341 propagated to their assignment counterparts. */
342 static struct access *rhs_work_queue_head, *lhs_work_queue_head;
343
344 /* Dump contents of ACCESS to file F in a human friendly way. If GRP is true,
345 representative fields are dumped, otherwise those which only describe the
346 individual access are. */
347
348 static struct
349 {
350 /* Number of processed aggregates is readily available in
351 analyze_all_variable_accesses and so is not stored here. */
352
353 /* Number of created scalar replacements. */
354 int replacements;
355
356 /* Number of times sra_modify_expr or sra_modify_assign themselves changed an
357 expression. */
358 int exprs;
359
360 /* Number of statements created by generate_subtree_copies. */
361 int subtree_copies;
362
363 /* Number of statements created by load_assign_lhs_subreplacements. */
364 int subreplacements;
365
366 /* Number of times sra_modify_assign has deleted a statement. */
367 int deleted;
368
369 /* Number of times sra_modify_assign has to deal with subaccesses of LHS and
370 RHS reparately due to type conversions or nonexistent matching
371 references. */
372 int separate_lhs_rhs_handling;
373
374 /* Number of parameters that were removed because they were unused. */
375 int deleted_unused_parameters;
376
377 /* Number of scalars passed as parameters by reference that have been
378 converted to be passed by value. */
379 int scalar_by_ref_to_by_val;
380
381 /* Number of aggregate parameters that were replaced by one or more of their
382 components. */
383 int aggregate_params_reduced;
384
385 /* Numbber of components created when splitting aggregate parameters. */
386 int param_reductions_created;
387 } sra_stats;
388
389 static void
390 dump_access (FILE *f, struct access *access, bool grp)
391 {
392 fprintf (f, "access { ");
393 fprintf (f, "base = (%d)'", DECL_UID (access->base));
394 print_generic_expr (f, access->base);
395 fprintf (f, "', offset = " HOST_WIDE_INT_PRINT_DEC, access->offset);
396 fprintf (f, ", size = " HOST_WIDE_INT_PRINT_DEC, access->size);
397 fprintf (f, ", expr = ");
398 print_generic_expr (f, access->expr);
399 fprintf (f, ", type = ");
400 print_generic_expr (f, access->type);
401 fprintf (f, ", reverse = %d", access->reverse);
402 if (grp)
403 fprintf (f, ", grp_read = %d, grp_write = %d, grp_assignment_read = %d, "
404 "grp_assignment_write = %d, grp_scalar_read = %d, "
405 "grp_scalar_write = %d, grp_total_scalarization = %d, "
406 "grp_hint = %d, grp_covered = %d, "
407 "grp_unscalarizable_region = %d, grp_unscalarized_data = %d, "
408 "grp_same_access_path = %d, grp_partial_lhs = %d, "
409 "grp_to_be_replaced = %d, grp_to_be_debug_replaced = %d}\n",
410 access->grp_read, access->grp_write, access->grp_assignment_read,
411 access->grp_assignment_write, access->grp_scalar_read,
412 access->grp_scalar_write, access->grp_total_scalarization,
413 access->grp_hint, access->grp_covered,
414 access->grp_unscalarizable_region, access->grp_unscalarized_data,
415 access->grp_same_access_path, access->grp_partial_lhs,
416 access->grp_to_be_replaced, access->grp_to_be_debug_replaced);
417 else
418 fprintf (f, ", write = %d, grp_total_scalarization = %d, "
419 "grp_partial_lhs = %d}\n",
420 access->write, access->grp_total_scalarization,
421 access->grp_partial_lhs);
422 }
423
424 /* Dump a subtree rooted in ACCESS to file F, indent by LEVEL. */
425
426 static void
427 dump_access_tree_1 (FILE *f, struct access *access, int level)
428 {
429 do
430 {
431 int i;
432
433 for (i = 0; i < level; i++)
434 fputs ("* ", f);
435
436 dump_access (f, access, true);
437
438 if (access->first_child)
439 dump_access_tree_1 (f, access->first_child, level + 1);
440
441 access = access->next_sibling;
442 }
443 while (access);
444 }
445
446 /* Dump all access trees for a variable, given the pointer to the first root in
447 ACCESS. */
448
449 static void
450 dump_access_tree (FILE *f, struct access *access)
451 {
452 for (; access; access = access->next_grp)
453 dump_access_tree_1 (f, access, 0);
454 }
455
456 /* Return true iff ACC is non-NULL and has subaccesses. */
457
458 static inline bool
459 access_has_children_p (struct access *acc)
460 {
461 return acc && acc->first_child;
462 }
463
464 /* Return true iff ACC is (partly) covered by at least one replacement. */
465
466 static bool
467 access_has_replacements_p (struct access *acc)
468 {
469 struct access *child;
470 if (acc->grp_to_be_replaced)
471 return true;
472 for (child = acc->first_child; child; child = child->next_sibling)
473 if (access_has_replacements_p (child))
474 return true;
475 return false;
476 }
477
478 /* Return a vector of pointers to accesses for the variable given in BASE or
479 NULL if there is none. */
480
481 static vec<access_p> *
482 get_base_access_vector (tree base)
483 {
484 return base_access_vec->get (base);
485 }
486
487 /* Find an access with required OFFSET and SIZE in a subtree of accesses rooted
488 in ACCESS. Return NULL if it cannot be found. */
489
490 static struct access *
491 find_access_in_subtree (struct access *access, HOST_WIDE_INT offset,
492 HOST_WIDE_INT size)
493 {
494 while (access && (access->offset != offset || access->size != size))
495 {
496 struct access *child = access->first_child;
497
498 while (child && (child->offset + child->size <= offset))
499 child = child->next_sibling;
500 access = child;
501 }
502
503 /* Total scalarization does not replace single field structures with their
504 single field but rather creates an access for them underneath. Look for
505 it. */
506 if (access)
507 while (access->first_child
508 && access->first_child->offset == offset
509 && access->first_child->size == size)
510 access = access->first_child;
511
512 return access;
513 }
514
515 /* Return the first group representative for DECL or NULL if none exists. */
516
517 static struct access *
518 get_first_repr_for_decl (tree base)
519 {
520 vec<access_p> *access_vec;
521
522 access_vec = get_base_access_vector (base);
523 if (!access_vec)
524 return NULL;
525
526 return (*access_vec)[0];
527 }
528
529 /* Find an access representative for the variable BASE and given OFFSET and
530 SIZE. Requires that access trees have already been built. Return NULL if
531 it cannot be found. */
532
533 static struct access *
534 get_var_base_offset_size_access (tree base, HOST_WIDE_INT offset,
535 HOST_WIDE_INT size)
536 {
537 struct access *access;
538
539 access = get_first_repr_for_decl (base);
540 while (access && (access->offset + access->size <= offset))
541 access = access->next_grp;
542 if (!access)
543 return NULL;
544
545 return find_access_in_subtree (access, offset, size);
546 }
547
548 /* Add LINK to the linked list of assign links of RACC. */
549
550 static void
551 add_link_to_rhs (struct access *racc, struct assign_link *link)
552 {
553 gcc_assert (link->racc == racc);
554
555 if (!racc->first_rhs_link)
556 {
557 gcc_assert (!racc->last_rhs_link);
558 racc->first_rhs_link = link;
559 }
560 else
561 racc->last_rhs_link->next_rhs = link;
562
563 racc->last_rhs_link = link;
564 link->next_rhs = NULL;
565 }
566
567 /* Add LINK to the linked list of lhs assign links of LACC. */
568
569 static void
570 add_link_to_lhs (struct access *lacc, struct assign_link *link)
571 {
572 gcc_assert (link->lacc == lacc);
573
574 if (!lacc->first_lhs_link)
575 {
576 gcc_assert (!lacc->last_lhs_link);
577 lacc->first_lhs_link = link;
578 }
579 else
580 lacc->last_lhs_link->next_lhs = link;
581
582 lacc->last_lhs_link = link;
583 link->next_lhs = NULL;
584 }
585
586 /* Move all link structures in their linked list in OLD_ACC to the linked list
587 in NEW_ACC. */
588 static void
589 relink_to_new_repr (struct access *new_acc, struct access *old_acc)
590 {
591 if (old_acc->first_rhs_link)
592 {
593
594 if (new_acc->first_rhs_link)
595 {
596 gcc_assert (!new_acc->last_rhs_link->next_rhs);
597 gcc_assert (!old_acc->last_rhs_link
598 || !old_acc->last_rhs_link->next_rhs);
599
600 new_acc->last_rhs_link->next_rhs = old_acc->first_rhs_link;
601 new_acc->last_rhs_link = old_acc->last_rhs_link;
602 }
603 else
604 {
605 gcc_assert (!new_acc->last_rhs_link);
606
607 new_acc->first_rhs_link = old_acc->first_rhs_link;
608 new_acc->last_rhs_link = old_acc->last_rhs_link;
609 }
610 old_acc->first_rhs_link = old_acc->last_rhs_link = NULL;
611 }
612 else
613 gcc_assert (!old_acc->last_rhs_link);
614
615 if (old_acc->first_lhs_link)
616 {
617
618 if (new_acc->first_lhs_link)
619 {
620 gcc_assert (!new_acc->last_lhs_link->next_lhs);
621 gcc_assert (!old_acc->last_lhs_link
622 || !old_acc->last_lhs_link->next_lhs);
623
624 new_acc->last_lhs_link->next_lhs = old_acc->first_lhs_link;
625 new_acc->last_lhs_link = old_acc->last_lhs_link;
626 }
627 else
628 {
629 gcc_assert (!new_acc->last_lhs_link);
630
631 new_acc->first_lhs_link = old_acc->first_lhs_link;
632 new_acc->last_lhs_link = old_acc->last_lhs_link;
633 }
634 old_acc->first_lhs_link = old_acc->last_lhs_link = NULL;
635 }
636 else
637 gcc_assert (!old_acc->last_lhs_link);
638
639 }
640
641 /* Add ACCESS to the work to queue for propagation of subaccesses from RHS to
642 LHS (which is actually a stack). */
643
644 static void
645 add_access_to_rhs_work_queue (struct access *access)
646 {
647 if (access->first_rhs_link && !access->grp_rhs_queued)
648 {
649 gcc_assert (!access->next_rhs_queued);
650 access->next_rhs_queued = rhs_work_queue_head;
651 access->grp_rhs_queued = 1;
652 rhs_work_queue_head = access;
653 }
654 }
655
656 /* Add ACCESS to the work to queue for propagation of subaccesses from LHS to
657 RHS (which is actually a stack). */
658
659 static void
660 add_access_to_lhs_work_queue (struct access *access)
661 {
662 if (access->first_lhs_link && !access->grp_lhs_queued)
663 {
664 gcc_assert (!access->next_lhs_queued);
665 access->next_lhs_queued = lhs_work_queue_head;
666 access->grp_lhs_queued = 1;
667 lhs_work_queue_head = access;
668 }
669 }
670
671 /* Pop an access from the work queue for propagating from RHS to LHS, and
672 return it, assuming there is one. */
673
674 static struct access *
675 pop_access_from_rhs_work_queue (void)
676 {
677 struct access *access = rhs_work_queue_head;
678
679 rhs_work_queue_head = access->next_rhs_queued;
680 access->next_rhs_queued = NULL;
681 access->grp_rhs_queued = 0;
682 return access;
683 }
684
685 /* Pop an access from the work queue for propagating from LHS to RHS, and
686 return it, assuming there is one. */
687
688 static struct access *
689 pop_access_from_lhs_work_queue (void)
690 {
691 struct access *access = lhs_work_queue_head;
692
693 lhs_work_queue_head = access->next_lhs_queued;
694 access->next_lhs_queued = NULL;
695 access->grp_lhs_queued = 0;
696 return access;
697 }
698
699 /* Allocate necessary structures. */
700
701 static void
702 sra_initialize (void)
703 {
704 candidate_bitmap = BITMAP_ALLOC (NULL);
705 candidates = new hash_table<uid_decl_hasher>
706 (vec_safe_length (cfun->local_decls) / 2);
707 should_scalarize_away_bitmap = BITMAP_ALLOC (NULL);
708 cannot_scalarize_away_bitmap = BITMAP_ALLOC (NULL);
709 disqualified_constants = BITMAP_ALLOC (NULL);
710 gcc_obstack_init (&name_obstack);
711 base_access_vec = new hash_map<tree, auto_vec<access_p> >;
712 memset (&sra_stats, 0, sizeof (sra_stats));
713 }
714
715 /* Deallocate all general structures. */
716
717 static void
718 sra_deinitialize (void)
719 {
720 BITMAP_FREE (candidate_bitmap);
721 delete candidates;
722 candidates = NULL;
723 BITMAP_FREE (should_scalarize_away_bitmap);
724 BITMAP_FREE (cannot_scalarize_away_bitmap);
725 BITMAP_FREE (disqualified_constants);
726 access_pool.release ();
727 assign_link_pool.release ();
728 obstack_free (&name_obstack, NULL);
729
730 delete base_access_vec;
731 }
732
733 /* Return true if DECL is a VAR_DECL in the constant pool, false otherwise. */
734
735 static bool constant_decl_p (tree decl)
736 {
737 return VAR_P (decl) && DECL_IN_CONSTANT_POOL (decl);
738 }
739
740 /* Remove DECL from candidates for SRA and write REASON to the dump file if
741 there is one. */
742
743 static void
744 disqualify_candidate (tree decl, const char *reason)
745 {
746 if (bitmap_clear_bit (candidate_bitmap, DECL_UID (decl)))
747 candidates->remove_elt_with_hash (decl, DECL_UID (decl));
748 if (constant_decl_p (decl))
749 bitmap_set_bit (disqualified_constants, DECL_UID (decl));
750
751 if (dump_file && (dump_flags & TDF_DETAILS))
752 {
753 fprintf (dump_file, "! Disqualifying ");
754 print_generic_expr (dump_file, decl);
755 fprintf (dump_file, " - %s\n", reason);
756 }
757 }
758
759 /* Return true iff the type contains a field or an element which does not allow
760 scalarization. Use VISITED_TYPES to avoid re-checking already checked
761 (sub-)types. */
762
763 static bool
764 type_internals_preclude_sra_p_1 (tree type, const char **msg,
765 hash_set<tree> *visited_types)
766 {
767 tree fld;
768 tree et;
769
770 if (visited_types->contains (type))
771 return false;
772 visited_types->add (type);
773
774 switch (TREE_CODE (type))
775 {
776 case RECORD_TYPE:
777 case UNION_TYPE:
778 case QUAL_UNION_TYPE:
779 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
780 if (TREE_CODE (fld) == FIELD_DECL)
781 {
782 if (TREE_CODE (fld) == FUNCTION_DECL)
783 continue;
784 tree ft = TREE_TYPE (fld);
785
786 if (TREE_THIS_VOLATILE (fld))
787 {
788 *msg = "volatile structure field";
789 return true;
790 }
791 if (!DECL_FIELD_OFFSET (fld))
792 {
793 *msg = "no structure field offset";
794 return true;
795 }
796 if (!DECL_SIZE (fld))
797 {
798 *msg = "zero structure field size";
799 return true;
800 }
801 if (!tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
802 {
803 *msg = "structure field offset not fixed";
804 return true;
805 }
806 if (!tree_fits_uhwi_p (DECL_SIZE (fld)))
807 {
808 *msg = "structure field size not fixed";
809 return true;
810 }
811 if (!tree_fits_shwi_p (bit_position (fld)))
812 {
813 *msg = "structure field size too big";
814 return true;
815 }
816 if (AGGREGATE_TYPE_P (ft)
817 && int_bit_position (fld) % BITS_PER_UNIT != 0)
818 {
819 *msg = "structure field is bit field";
820 return true;
821 }
822
823 if (AGGREGATE_TYPE_P (ft)
824 && type_internals_preclude_sra_p_1 (ft, msg, visited_types))
825 return true;
826 }
827
828 return false;
829
830 case ARRAY_TYPE:
831 et = TREE_TYPE (type);
832
833 if (TYPE_VOLATILE (et))
834 {
835 *msg = "element type is volatile";
836 return true;
837 }
838
839 if (AGGREGATE_TYPE_P (et)
840 && type_internals_preclude_sra_p_1 (et, msg, visited_types))
841 return true;
842
843 return false;
844
845 default:
846 return false;
847 }
848 }
849
850 /* Return true iff the type contains a field or an element which does not allow
851 scalarization. */
852
853 bool
854 type_internals_preclude_sra_p (tree type, const char **msg)
855 {
856 hash_set<tree> visited_types;
857 return type_internals_preclude_sra_p_1 (type, msg, &visited_types);
858 }
859
860
861 /* Allocate an access structure for BASE, OFFSET and SIZE, clear it, fill in
862 the three fields. Also add it to the vector of accesses corresponding to
863 the base. Finally, return the new access. */
864
865 static struct access *
866 create_access_1 (tree base, HOST_WIDE_INT offset, HOST_WIDE_INT size)
867 {
868 struct access *access = access_pool.allocate ();
869
870 memset (access, 0, sizeof (struct access));
871 access->base = base;
872 access->offset = offset;
873 access->size = size;
874
875 base_access_vec->get_or_insert (base).safe_push (access);
876
877 return access;
878 }
879
880 static bool maybe_add_sra_candidate (tree);
881
882 /* Create and insert access for EXPR. Return created access, or NULL if it is
883 not possible. Also scan for uses of constant pool as we go along and add
884 to candidates. */
885
886 static struct access *
887 create_access (tree expr, gimple *stmt, bool write)
888 {
889 struct access *access;
890 poly_int64 poffset, psize, pmax_size;
891 tree base = expr;
892 bool reverse, unscalarizable_region = false;
893
894 base = get_ref_base_and_extent (expr, &poffset, &psize, &pmax_size,
895 &reverse);
896
897 /* For constant-pool entries, check we can substitute the constant value. */
898 if (constant_decl_p (base))
899 {
900 gcc_assert (!bitmap_bit_p (disqualified_constants, DECL_UID (base)));
901 if (expr != base
902 && !is_gimple_reg_type (TREE_TYPE (expr))
903 && dump_file && (dump_flags & TDF_DETAILS))
904 {
905 /* This occurs in Ada with accesses to ARRAY_RANGE_REFs,
906 and elements of multidimensional arrays (which are
907 multi-element arrays in their own right). */
908 fprintf (dump_file, "Allowing non-reg-type load of part"
909 " of constant-pool entry: ");
910 print_generic_expr (dump_file, expr);
911 }
912 maybe_add_sra_candidate (base);
913 }
914
915 if (!DECL_P (base) || !bitmap_bit_p (candidate_bitmap, DECL_UID (base)))
916 return NULL;
917
918 HOST_WIDE_INT offset, size, max_size;
919 if (!poffset.is_constant (&offset)
920 || !psize.is_constant (&size)
921 || !pmax_size.is_constant (&max_size))
922 {
923 disqualify_candidate (base, "Encountered a polynomial-sized access.");
924 return NULL;
925 }
926
927 if (size != max_size)
928 {
929 size = max_size;
930 unscalarizable_region = true;
931 }
932 if (size == 0)
933 return NULL;
934 if (size < 0)
935 {
936 disqualify_candidate (base, "Encountered an unconstrained access.");
937 return NULL;
938 }
939
940 access = create_access_1 (base, offset, size);
941 access->expr = expr;
942 access->type = TREE_TYPE (expr);
943 access->write = write;
944 access->grp_unscalarizable_region = unscalarizable_region;
945 access->stmt = stmt;
946 access->reverse = reverse;
947
948 return access;
949 }
950
951
952 /* Return true iff TYPE is scalarizable - i.e. a RECORD_TYPE or fixed-length
953 ARRAY_TYPE with fields that are either of gimple register types (excluding
954 bit-fields) or (recursively) scalarizable types. CONST_DECL must be true if
955 we are considering a decl from constant pool. If it is false, char arrays
956 will be refused. */
957
958 static bool
959 scalarizable_type_p (tree type, bool const_decl)
960 {
961 if (is_gimple_reg_type (type))
962 return true;
963 if (type_contains_placeholder_p (type))
964 return false;
965
966 bool have_predecessor_field = false;
967 HOST_WIDE_INT prev_pos = 0;
968
969 switch (TREE_CODE (type))
970 {
971 case RECORD_TYPE:
972 for (tree fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
973 if (TREE_CODE (fld) == FIELD_DECL)
974 {
975 tree ft = TREE_TYPE (fld);
976
977 if (zerop (DECL_SIZE (fld)))
978 continue;
979
980 HOST_WIDE_INT pos = int_bit_position (fld);
981 if (have_predecessor_field
982 && pos <= prev_pos)
983 return false;
984
985 have_predecessor_field = true;
986 prev_pos = pos;
987
988 if (DECL_BIT_FIELD (fld))
989 return false;
990
991 if (!scalarizable_type_p (ft, const_decl))
992 return false;
993 }
994
995 return true;
996
997 case ARRAY_TYPE:
998 {
999 HOST_WIDE_INT min_elem_size;
1000 if (const_decl)
1001 min_elem_size = 0;
1002 else
1003 min_elem_size = BITS_PER_UNIT;
1004
1005 if (TYPE_DOMAIN (type) == NULL_TREE
1006 || !tree_fits_shwi_p (TYPE_SIZE (type))
1007 || !tree_fits_shwi_p (TYPE_SIZE (TREE_TYPE (type)))
1008 || (tree_to_shwi (TYPE_SIZE (TREE_TYPE (type))) <= min_elem_size)
1009 || !tree_fits_shwi_p (TYPE_MIN_VALUE (TYPE_DOMAIN (type))))
1010 return false;
1011 if (tree_to_shwi (TYPE_SIZE (type)) == 0
1012 && TYPE_MAX_VALUE (TYPE_DOMAIN (type)) == NULL_TREE)
1013 /* Zero-element array, should not prevent scalarization. */
1014 ;
1015 else if ((tree_to_shwi (TYPE_SIZE (type)) <= 0)
1016 || !tree_fits_shwi_p (TYPE_MAX_VALUE (TYPE_DOMAIN (type))))
1017 /* Variable-length array, do not allow scalarization. */
1018 return false;
1019
1020 tree elem = TREE_TYPE (type);
1021 if (!scalarizable_type_p (elem, const_decl))
1022 return false;
1023 return true;
1024 }
1025 default:
1026 return false;
1027 }
1028 }
1029
1030 /* Return true if REF has an VIEW_CONVERT_EXPR somewhere in it. */
1031
1032 static inline bool
1033 contains_view_convert_expr_p (const_tree ref)
1034 {
1035 while (handled_component_p (ref))
1036 {
1037 if (TREE_CODE (ref) == VIEW_CONVERT_EXPR)
1038 return true;
1039 ref = TREE_OPERAND (ref, 0);
1040 }
1041
1042 return false;
1043 }
1044
1045 /* Return true if REF contains a VIEW_CONVERT_EXPR or a COMPONENT_REF with a
1046 bit-field field declaration. If TYPE_CHANGING_P is non-NULL, set the bool
1047 it points to will be set if REF contains any of the above or a MEM_REF
1048 expression that effectively performs type conversion. */
1049
1050 static bool
1051 contains_vce_or_bfcref_p (const_tree ref, bool *type_changing_p = NULL)
1052 {
1053 while (handled_component_p (ref))
1054 {
1055 if (TREE_CODE (ref) == VIEW_CONVERT_EXPR
1056 || (TREE_CODE (ref) == COMPONENT_REF
1057 && DECL_BIT_FIELD (TREE_OPERAND (ref, 1))))
1058 {
1059 if (type_changing_p)
1060 *type_changing_p = true;
1061 return true;
1062 }
1063 ref = TREE_OPERAND (ref, 0);
1064 }
1065
1066 if (!type_changing_p
1067 || TREE_CODE (ref) != MEM_REF
1068 || TREE_CODE (TREE_OPERAND (ref, 0)) != ADDR_EXPR)
1069 return false;
1070
1071 tree mem = TREE_OPERAND (TREE_OPERAND (ref, 0), 0);
1072 if (TYPE_MAIN_VARIANT (TREE_TYPE (ref))
1073 != TYPE_MAIN_VARIANT (TREE_TYPE (mem)))
1074 *type_changing_p = true;
1075
1076 return false;
1077 }
1078
1079 /* Search the given tree for a declaration by skipping handled components and
1080 exclude it from the candidates. */
1081
1082 static void
1083 disqualify_base_of_expr (tree t, const char *reason)
1084 {
1085 t = get_base_address (t);
1086 if (t && DECL_P (t))
1087 disqualify_candidate (t, reason);
1088 }
1089
1090 /* Scan expression EXPR and create access structures for all accesses to
1091 candidates for scalarization. Return the created access or NULL if none is
1092 created. */
1093
1094 static struct access *
1095 build_access_from_expr_1 (tree expr, gimple *stmt, bool write)
1096 {
1097 struct access *ret = NULL;
1098 bool partial_ref;
1099
1100 if (TREE_CODE (expr) == BIT_FIELD_REF
1101 || TREE_CODE (expr) == IMAGPART_EXPR
1102 || TREE_CODE (expr) == REALPART_EXPR)
1103 {
1104 expr = TREE_OPERAND (expr, 0);
1105 partial_ref = true;
1106 }
1107 else
1108 partial_ref = false;
1109
1110 if (storage_order_barrier_p (expr))
1111 {
1112 disqualify_base_of_expr (expr, "storage order barrier.");
1113 return NULL;
1114 }
1115
1116 /* We need to dive through V_C_Es in order to get the size of its parameter
1117 and not the result type. Ada produces such statements. We are also
1118 capable of handling the topmost V_C_E but not any of those buried in other
1119 handled components. */
1120 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
1121 expr = TREE_OPERAND (expr, 0);
1122
1123 if (contains_view_convert_expr_p (expr))
1124 {
1125 disqualify_base_of_expr (expr, "V_C_E under a different handled "
1126 "component.");
1127 return NULL;
1128 }
1129 if (TREE_THIS_VOLATILE (expr))
1130 {
1131 disqualify_base_of_expr (expr, "part of a volatile reference.");
1132 return NULL;
1133 }
1134
1135 switch (TREE_CODE (expr))
1136 {
1137 case MEM_REF:
1138 if (TREE_CODE (TREE_OPERAND (expr, 0)) != ADDR_EXPR)
1139 return NULL;
1140 /* fall through */
1141 case VAR_DECL:
1142 case PARM_DECL:
1143 case RESULT_DECL:
1144 case COMPONENT_REF:
1145 case ARRAY_REF:
1146 case ARRAY_RANGE_REF:
1147 ret = create_access (expr, stmt, write);
1148 break;
1149
1150 default:
1151 break;
1152 }
1153
1154 if (write && partial_ref && ret)
1155 ret->grp_partial_lhs = 1;
1156
1157 return ret;
1158 }
1159
1160 /* Scan expression EXPR and create access structures for all accesses to
1161 candidates for scalarization. Return true if any access has been inserted.
1162 STMT must be the statement from which the expression is taken, WRITE must be
1163 true if the expression is a store and false otherwise. */
1164
1165 static bool
1166 build_access_from_expr (tree expr, gimple *stmt, bool write)
1167 {
1168 struct access *access;
1169
1170 access = build_access_from_expr_1 (expr, stmt, write);
1171 if (access)
1172 {
1173 /* This means the aggregate is accesses as a whole in a way other than an
1174 assign statement and thus cannot be removed even if we had a scalar
1175 replacement for everything. */
1176 if (cannot_scalarize_away_bitmap)
1177 bitmap_set_bit (cannot_scalarize_away_bitmap, DECL_UID (access->base));
1178 return true;
1179 }
1180 return false;
1181 }
1182
1183 /* Return the single non-EH successor edge of BB or NULL if there is none or
1184 more than one. */
1185
1186 static edge
1187 single_non_eh_succ (basic_block bb)
1188 {
1189 edge e, res = NULL;
1190 edge_iterator ei;
1191
1192 FOR_EACH_EDGE (e, ei, bb->succs)
1193 if (!(e->flags & EDGE_EH))
1194 {
1195 if (res)
1196 return NULL;
1197 res = e;
1198 }
1199
1200 return res;
1201 }
1202
1203 /* Disqualify LHS and RHS for scalarization if STMT has to terminate its BB and
1204 there is no alternative spot where to put statements SRA might need to
1205 generate after it. The spot we are looking for is an edge leading to a
1206 single non-EH successor, if it exists and is indeed single. RHS may be
1207 NULL, in that case ignore it. */
1208
1209 static bool
1210 disqualify_if_bad_bb_terminating_stmt (gimple *stmt, tree lhs, tree rhs)
1211 {
1212 if (stmt_ends_bb_p (stmt))
1213 {
1214 if (single_non_eh_succ (gimple_bb (stmt)))
1215 return false;
1216
1217 disqualify_base_of_expr (lhs, "LHS of a throwing stmt.");
1218 if (rhs)
1219 disqualify_base_of_expr (rhs, "RHS of a throwing stmt.");
1220 return true;
1221 }
1222 return false;
1223 }
1224
1225 /* Return true if the nature of BASE is such that it contains data even if
1226 there is no write to it in the function. */
1227
1228 static bool
1229 comes_initialized_p (tree base)
1230 {
1231 return TREE_CODE (base) == PARM_DECL || constant_decl_p (base);
1232 }
1233
1234 /* Scan expressions occurring in STMT, create access structures for all accesses
1235 to candidates for scalarization and remove those candidates which occur in
1236 statements or expressions that prevent them from being split apart. Return
1237 true if any access has been inserted. */
1238
1239 static bool
1240 build_accesses_from_assign (gimple *stmt)
1241 {
1242 tree lhs, rhs;
1243 struct access *lacc, *racc;
1244
1245 if (!gimple_assign_single_p (stmt)
1246 /* Scope clobbers don't influence scalarization. */
1247 || gimple_clobber_p (stmt))
1248 return false;
1249
1250 lhs = gimple_assign_lhs (stmt);
1251 rhs = gimple_assign_rhs1 (stmt);
1252
1253 if (disqualify_if_bad_bb_terminating_stmt (stmt, lhs, rhs))
1254 return false;
1255
1256 racc = build_access_from_expr_1 (rhs, stmt, false);
1257 lacc = build_access_from_expr_1 (lhs, stmt, true);
1258
1259 if (lacc)
1260 {
1261 lacc->grp_assignment_write = 1;
1262 if (storage_order_barrier_p (rhs))
1263 lacc->grp_unscalarizable_region = 1;
1264
1265 if (should_scalarize_away_bitmap && !is_gimple_reg_type (lacc->type))
1266 {
1267 bool type_changing_p = false;
1268 contains_vce_or_bfcref_p (lhs, &type_changing_p);
1269 if (type_changing_p)
1270 bitmap_set_bit (cannot_scalarize_away_bitmap,
1271 DECL_UID (lacc->base));
1272 }
1273 }
1274
1275 if (racc)
1276 {
1277 racc->grp_assignment_read = 1;
1278 if (should_scalarize_away_bitmap && !is_gimple_reg_type (racc->type))
1279 {
1280 bool type_changing_p = false;
1281 contains_vce_or_bfcref_p (rhs, &type_changing_p);
1282
1283 if (type_changing_p || gimple_has_volatile_ops (stmt))
1284 bitmap_set_bit (cannot_scalarize_away_bitmap,
1285 DECL_UID (racc->base));
1286 else
1287 bitmap_set_bit (should_scalarize_away_bitmap,
1288 DECL_UID (racc->base));
1289 }
1290 if (storage_order_barrier_p (lhs))
1291 racc->grp_unscalarizable_region = 1;
1292 }
1293
1294 if (lacc && racc
1295 && (sra_mode == SRA_MODE_EARLY_INTRA || sra_mode == SRA_MODE_INTRA)
1296 && !lacc->grp_unscalarizable_region
1297 && !racc->grp_unscalarizable_region
1298 && AGGREGATE_TYPE_P (TREE_TYPE (lhs))
1299 && lacc->size == racc->size
1300 && useless_type_conversion_p (lacc->type, racc->type))
1301 {
1302 struct assign_link *link;
1303
1304 link = assign_link_pool.allocate ();
1305 memset (link, 0, sizeof (struct assign_link));
1306
1307 link->lacc = lacc;
1308 link->racc = racc;
1309 add_link_to_rhs (racc, link);
1310 add_link_to_lhs (lacc, link);
1311 add_access_to_rhs_work_queue (racc);
1312 add_access_to_lhs_work_queue (lacc);
1313
1314 /* Let's delay marking the areas as written until propagation of accesses
1315 across link, unless the nature of rhs tells us that its data comes
1316 from elsewhere. */
1317 if (!comes_initialized_p (racc->base))
1318 lacc->write = false;
1319 }
1320
1321 return lacc || racc;
1322 }
1323
1324 /* Callback of walk_stmt_load_store_addr_ops visit_addr used to determine
1325 GIMPLE_ASM operands with memory constrains which cannot be scalarized. */
1326
1327 static bool
1328 asm_visit_addr (gimple *, tree op, tree, void *)
1329 {
1330 op = get_base_address (op);
1331 if (op
1332 && DECL_P (op))
1333 disqualify_candidate (op, "Non-scalarizable GIMPLE_ASM operand.");
1334
1335 return false;
1336 }
1337
1338 /* Scan function and look for interesting expressions and create access
1339 structures for them. Return true iff any access is created. */
1340
1341 static bool
1342 scan_function (void)
1343 {
1344 basic_block bb;
1345 bool ret = false;
1346
1347 FOR_EACH_BB_FN (bb, cfun)
1348 {
1349 gimple_stmt_iterator gsi;
1350 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1351 {
1352 gimple *stmt = gsi_stmt (gsi);
1353 tree t;
1354 unsigned i;
1355
1356 switch (gimple_code (stmt))
1357 {
1358 case GIMPLE_RETURN:
1359 t = gimple_return_retval (as_a <greturn *> (stmt));
1360 if (t != NULL_TREE)
1361 ret |= build_access_from_expr (t, stmt, false);
1362 break;
1363
1364 case GIMPLE_ASSIGN:
1365 ret |= build_accesses_from_assign (stmt);
1366 break;
1367
1368 case GIMPLE_CALL:
1369 for (i = 0; i < gimple_call_num_args (stmt); i++)
1370 ret |= build_access_from_expr (gimple_call_arg (stmt, i),
1371 stmt, false);
1372
1373 t = gimple_call_lhs (stmt);
1374 if (t && !disqualify_if_bad_bb_terminating_stmt (stmt, t, NULL))
1375 ret |= build_access_from_expr (t, stmt, true);
1376 break;
1377
1378 case GIMPLE_ASM:
1379 {
1380 gasm *asm_stmt = as_a <gasm *> (stmt);
1381 walk_stmt_load_store_addr_ops (asm_stmt, NULL, NULL, NULL,
1382 asm_visit_addr);
1383 for (i = 0; i < gimple_asm_ninputs (asm_stmt); i++)
1384 {
1385 t = TREE_VALUE (gimple_asm_input_op (asm_stmt, i));
1386 ret |= build_access_from_expr (t, asm_stmt, false);
1387 }
1388 for (i = 0; i < gimple_asm_noutputs (asm_stmt); i++)
1389 {
1390 t = TREE_VALUE (gimple_asm_output_op (asm_stmt, i));
1391 ret |= build_access_from_expr (t, asm_stmt, true);
1392 }
1393 }
1394 break;
1395
1396 default:
1397 break;
1398 }
1399 }
1400 }
1401
1402 return ret;
1403 }
1404
1405 /* Helper of QSORT function. There are pointers to accesses in the array. An
1406 access is considered smaller than another if it has smaller offset or if the
1407 offsets are the same but is size is bigger. */
1408
1409 static int
1410 compare_access_positions (const void *a, const void *b)
1411 {
1412 const access_p *fp1 = (const access_p *) a;
1413 const access_p *fp2 = (const access_p *) b;
1414 const access_p f1 = *fp1;
1415 const access_p f2 = *fp2;
1416
1417 if (f1->offset != f2->offset)
1418 return f1->offset < f2->offset ? -1 : 1;
1419
1420 if (f1->size == f2->size)
1421 {
1422 if (f1->type == f2->type)
1423 return 0;
1424 /* Put any non-aggregate type before any aggregate type. */
1425 else if (!is_gimple_reg_type (f1->type)
1426 && is_gimple_reg_type (f2->type))
1427 return 1;
1428 else if (is_gimple_reg_type (f1->type)
1429 && !is_gimple_reg_type (f2->type))
1430 return -1;
1431 /* Put any complex or vector type before any other scalar type. */
1432 else if (TREE_CODE (f1->type) != COMPLEX_TYPE
1433 && TREE_CODE (f1->type) != VECTOR_TYPE
1434 && (TREE_CODE (f2->type) == COMPLEX_TYPE
1435 || TREE_CODE (f2->type) == VECTOR_TYPE))
1436 return 1;
1437 else if ((TREE_CODE (f1->type) == COMPLEX_TYPE
1438 || TREE_CODE (f1->type) == VECTOR_TYPE)
1439 && TREE_CODE (f2->type) != COMPLEX_TYPE
1440 && TREE_CODE (f2->type) != VECTOR_TYPE)
1441 return -1;
1442 /* Put any integral type before any non-integral type. When splicing, we
1443 make sure that those with insufficient precision and occupying the
1444 same space are not scalarized. */
1445 else if (INTEGRAL_TYPE_P (f1->type)
1446 && !INTEGRAL_TYPE_P (f2->type))
1447 return -1;
1448 else if (!INTEGRAL_TYPE_P (f1->type)
1449 && INTEGRAL_TYPE_P (f2->type))
1450 return 1;
1451 /* Put the integral type with the bigger precision first. */
1452 else if (INTEGRAL_TYPE_P (f1->type)
1453 && INTEGRAL_TYPE_P (f2->type)
1454 && (TYPE_PRECISION (f2->type) != TYPE_PRECISION (f1->type)))
1455 return TYPE_PRECISION (f2->type) - TYPE_PRECISION (f1->type);
1456 /* Stabilize the sort. */
1457 return TYPE_UID (f1->type) - TYPE_UID (f2->type);
1458 }
1459
1460 /* We want the bigger accesses first, thus the opposite operator in the next
1461 line: */
1462 return f1->size > f2->size ? -1 : 1;
1463 }
1464
1465
1466 /* Append a name of the declaration to the name obstack. A helper function for
1467 make_fancy_name. */
1468
1469 static void
1470 make_fancy_decl_name (tree decl)
1471 {
1472 char buffer[32];
1473
1474 tree name = DECL_NAME (decl);
1475 if (name)
1476 obstack_grow (&name_obstack, IDENTIFIER_POINTER (name),
1477 IDENTIFIER_LENGTH (name));
1478 else
1479 {
1480 sprintf (buffer, "D%u", DECL_UID (decl));
1481 obstack_grow (&name_obstack, buffer, strlen (buffer));
1482 }
1483 }
1484
1485 /* Helper for make_fancy_name. */
1486
1487 static void
1488 make_fancy_name_1 (tree expr)
1489 {
1490 char buffer[32];
1491 tree index;
1492
1493 if (DECL_P (expr))
1494 {
1495 make_fancy_decl_name (expr);
1496 return;
1497 }
1498
1499 switch (TREE_CODE (expr))
1500 {
1501 case COMPONENT_REF:
1502 make_fancy_name_1 (TREE_OPERAND (expr, 0));
1503 obstack_1grow (&name_obstack, '$');
1504 make_fancy_decl_name (TREE_OPERAND (expr, 1));
1505 break;
1506
1507 case ARRAY_REF:
1508 make_fancy_name_1 (TREE_OPERAND (expr, 0));
1509 obstack_1grow (&name_obstack, '$');
1510 /* Arrays with only one element may not have a constant as their
1511 index. */
1512 index = TREE_OPERAND (expr, 1);
1513 if (TREE_CODE (index) != INTEGER_CST)
1514 break;
1515 sprintf (buffer, HOST_WIDE_INT_PRINT_DEC, TREE_INT_CST_LOW (index));
1516 obstack_grow (&name_obstack, buffer, strlen (buffer));
1517 break;
1518
1519 case ADDR_EXPR:
1520 make_fancy_name_1 (TREE_OPERAND (expr, 0));
1521 break;
1522
1523 case MEM_REF:
1524 make_fancy_name_1 (TREE_OPERAND (expr, 0));
1525 if (!integer_zerop (TREE_OPERAND (expr, 1)))
1526 {
1527 obstack_1grow (&name_obstack, '$');
1528 sprintf (buffer, HOST_WIDE_INT_PRINT_DEC,
1529 TREE_INT_CST_LOW (TREE_OPERAND (expr, 1)));
1530 obstack_grow (&name_obstack, buffer, strlen (buffer));
1531 }
1532 break;
1533
1534 case BIT_FIELD_REF:
1535 case REALPART_EXPR:
1536 case IMAGPART_EXPR:
1537 gcc_unreachable (); /* we treat these as scalars. */
1538 break;
1539 default:
1540 break;
1541 }
1542 }
1543
1544 /* Create a human readable name for replacement variable of ACCESS. */
1545
1546 static char *
1547 make_fancy_name (tree expr)
1548 {
1549 make_fancy_name_1 (expr);
1550 obstack_1grow (&name_obstack, '\0');
1551 return XOBFINISH (&name_obstack, char *);
1552 }
1553
1554 /* Construct a MEM_REF that would reference a part of aggregate BASE of type
1555 EXP_TYPE at the given OFFSET and with storage order REVERSE. If BASE is
1556 something for which get_addr_base_and_unit_offset returns NULL, gsi must
1557 be non-NULL and is used to insert new statements either before or below
1558 the current one as specified by INSERT_AFTER. This function is not capable
1559 of handling bitfields. */
1560
1561 tree
1562 build_ref_for_offset (location_t loc, tree base, poly_int64 offset,
1563 bool reverse, tree exp_type, gimple_stmt_iterator *gsi,
1564 bool insert_after)
1565 {
1566 tree prev_base = base;
1567 tree off;
1568 tree mem_ref;
1569 poly_int64 base_offset;
1570 unsigned HOST_WIDE_INT misalign;
1571 unsigned int align;
1572
1573 /* Preserve address-space information. */
1574 addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (base));
1575 if (as != TYPE_ADDR_SPACE (exp_type))
1576 exp_type = build_qualified_type (exp_type,
1577 TYPE_QUALS (exp_type)
1578 | ENCODE_QUAL_ADDR_SPACE (as));
1579
1580 poly_int64 byte_offset = exact_div (offset, BITS_PER_UNIT);
1581 get_object_alignment_1 (base, &align, &misalign);
1582 base = get_addr_base_and_unit_offset (base, &base_offset);
1583
1584 /* get_addr_base_and_unit_offset returns NULL for references with a variable
1585 offset such as array[var_index]. */
1586 if (!base)
1587 {
1588 gassign *stmt;
1589 tree tmp, addr;
1590
1591 gcc_checking_assert (gsi);
1592 tmp = make_ssa_name (build_pointer_type (TREE_TYPE (prev_base)));
1593 addr = build_fold_addr_expr (unshare_expr (prev_base));
1594 STRIP_USELESS_TYPE_CONVERSION (addr);
1595 stmt = gimple_build_assign (tmp, addr);
1596 gimple_set_location (stmt, loc);
1597 if (insert_after)
1598 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
1599 else
1600 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
1601
1602 off = build_int_cst (reference_alias_ptr_type (prev_base), byte_offset);
1603 base = tmp;
1604 }
1605 else if (TREE_CODE (base) == MEM_REF)
1606 {
1607 off = build_int_cst (TREE_TYPE (TREE_OPERAND (base, 1)),
1608 base_offset + byte_offset);
1609 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1), off);
1610 base = unshare_expr (TREE_OPERAND (base, 0));
1611 }
1612 else
1613 {
1614 off = build_int_cst (reference_alias_ptr_type (prev_base),
1615 base_offset + byte_offset);
1616 base = build_fold_addr_expr (unshare_expr (base));
1617 }
1618
1619 unsigned int align_bound = known_alignment (misalign + offset);
1620 if (align_bound != 0)
1621 align = MIN (align, align_bound);
1622 if (align != TYPE_ALIGN (exp_type))
1623 exp_type = build_aligned_type (exp_type, align);
1624
1625 mem_ref = fold_build2_loc (loc, MEM_REF, exp_type, base, off);
1626 REF_REVERSE_STORAGE_ORDER (mem_ref) = reverse;
1627 if (TREE_THIS_VOLATILE (prev_base))
1628 TREE_THIS_VOLATILE (mem_ref) = 1;
1629 if (TREE_SIDE_EFFECTS (prev_base))
1630 TREE_SIDE_EFFECTS (mem_ref) = 1;
1631 return mem_ref;
1632 }
1633
1634 /* Construct and return a memory reference that is equal to a portion of
1635 MODEL->expr but is based on BASE. If this cannot be done, return NULL. */
1636
1637 static tree
1638 build_reconstructed_reference (location_t, tree base, struct access *model)
1639 {
1640 tree expr = model->expr, prev_expr = NULL;
1641 while (!types_compatible_p (TREE_TYPE (expr), TREE_TYPE (base)))
1642 {
1643 if (!handled_component_p (expr))
1644 return NULL_TREE;
1645 prev_expr = expr;
1646 expr = TREE_OPERAND (expr, 0);
1647 }
1648
1649 /* Guard against broken VIEW_CONVERT_EXPRs... */
1650 if (!prev_expr)
1651 return NULL_TREE;
1652
1653 TREE_OPERAND (prev_expr, 0) = base;
1654 tree ref = unshare_expr (model->expr);
1655 TREE_OPERAND (prev_expr, 0) = expr;
1656 return ref;
1657 }
1658
1659 /* Construct a memory reference to a part of an aggregate BASE at the given
1660 OFFSET and of the same type as MODEL. In case this is a reference to a
1661 bit-field, the function will replicate the last component_ref of model's
1662 expr to access it. GSI and INSERT_AFTER have the same meaning as in
1663 build_ref_for_offset. */
1664
1665 static tree
1666 build_ref_for_model (location_t loc, tree base, HOST_WIDE_INT offset,
1667 struct access *model, gimple_stmt_iterator *gsi,
1668 bool insert_after)
1669 {
1670 if (TREE_CODE (model->expr) == COMPONENT_REF
1671 && DECL_BIT_FIELD (TREE_OPERAND (model->expr, 1)))
1672 {
1673 /* This access represents a bit-field. */
1674 tree t, exp_type, fld = TREE_OPERAND (model->expr, 1);
1675
1676 offset -= int_bit_position (fld);
1677 exp_type = TREE_TYPE (TREE_OPERAND (model->expr, 0));
1678 t = build_ref_for_offset (loc, base, offset, model->reverse, exp_type,
1679 gsi, insert_after);
1680 /* The flag will be set on the record type. */
1681 REF_REVERSE_STORAGE_ORDER (t) = 0;
1682 return fold_build3_loc (loc, COMPONENT_REF, TREE_TYPE (fld), t, fld,
1683 NULL_TREE);
1684 }
1685 else
1686 {
1687 tree res;
1688 if (model->grp_same_access_path
1689 && !TREE_THIS_VOLATILE (base)
1690 && (TYPE_ADDR_SPACE (TREE_TYPE (base))
1691 == TYPE_ADDR_SPACE (TREE_TYPE (model->expr)))
1692 && offset <= model->offset
1693 /* build_reconstructed_reference can still fail if we have already
1694 massaged BASE because of another type incompatibility. */
1695 && (res = build_reconstructed_reference (loc, base, model)))
1696 return res;
1697 else
1698 return build_ref_for_offset (loc, base, offset, model->reverse,
1699 model->type, gsi, insert_after);
1700 }
1701 }
1702
1703 /* Attempt to build a memory reference that we could but into a gimple
1704 debug_bind statement. Similar to build_ref_for_model but punts if it has to
1705 create statements and return s NULL instead. This function also ignores
1706 alignment issues and so its results should never end up in non-debug
1707 statements. */
1708
1709 static tree
1710 build_debug_ref_for_model (location_t loc, tree base, HOST_WIDE_INT offset,
1711 struct access *model)
1712 {
1713 poly_int64 base_offset;
1714 tree off;
1715
1716 if (TREE_CODE (model->expr) == COMPONENT_REF
1717 && DECL_BIT_FIELD (TREE_OPERAND (model->expr, 1)))
1718 return NULL_TREE;
1719
1720 base = get_addr_base_and_unit_offset (base, &base_offset);
1721 if (!base)
1722 return NULL_TREE;
1723 if (TREE_CODE (base) == MEM_REF)
1724 {
1725 off = build_int_cst (TREE_TYPE (TREE_OPERAND (base, 1)),
1726 base_offset + offset / BITS_PER_UNIT);
1727 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1), off);
1728 base = unshare_expr (TREE_OPERAND (base, 0));
1729 }
1730 else
1731 {
1732 off = build_int_cst (reference_alias_ptr_type (base),
1733 base_offset + offset / BITS_PER_UNIT);
1734 base = build_fold_addr_expr (unshare_expr (base));
1735 }
1736
1737 return fold_build2_loc (loc, MEM_REF, model->type, base, off);
1738 }
1739
1740 /* Construct a memory reference consisting of component_refs and array_refs to
1741 a part of an aggregate *RES (which is of type TYPE). The requested part
1742 should have type EXP_TYPE at be the given OFFSET. This function might not
1743 succeed, it returns true when it does and only then *RES points to something
1744 meaningful. This function should be used only to build expressions that we
1745 might need to present to user (e.g. in warnings). In all other situations,
1746 build_ref_for_model or build_ref_for_offset should be used instead. */
1747
1748 static bool
1749 build_user_friendly_ref_for_offset (tree *res, tree type, HOST_WIDE_INT offset,
1750 tree exp_type)
1751 {
1752 while (1)
1753 {
1754 tree fld;
1755 tree tr_size, index, minidx;
1756 HOST_WIDE_INT el_size;
1757
1758 if (offset == 0 && exp_type
1759 && types_compatible_p (exp_type, type))
1760 return true;
1761
1762 switch (TREE_CODE (type))
1763 {
1764 case UNION_TYPE:
1765 case QUAL_UNION_TYPE:
1766 case RECORD_TYPE:
1767 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
1768 {
1769 HOST_WIDE_INT pos, size;
1770 tree tr_pos, expr, *expr_ptr;
1771
1772 if (TREE_CODE (fld) != FIELD_DECL)
1773 continue;
1774
1775 tr_pos = bit_position (fld);
1776 if (!tr_pos || !tree_fits_uhwi_p (tr_pos))
1777 continue;
1778 pos = tree_to_uhwi (tr_pos);
1779 gcc_assert (TREE_CODE (type) == RECORD_TYPE || pos == 0);
1780 tr_size = DECL_SIZE (fld);
1781 if (!tr_size || !tree_fits_uhwi_p (tr_size))
1782 continue;
1783 size = tree_to_uhwi (tr_size);
1784 if (size == 0)
1785 {
1786 if (pos != offset)
1787 continue;
1788 }
1789 else if (pos > offset || (pos + size) <= offset)
1790 continue;
1791
1792 expr = build3 (COMPONENT_REF, TREE_TYPE (fld), *res, fld,
1793 NULL_TREE);
1794 expr_ptr = &expr;
1795 if (build_user_friendly_ref_for_offset (expr_ptr, TREE_TYPE (fld),
1796 offset - pos, exp_type))
1797 {
1798 *res = expr;
1799 return true;
1800 }
1801 }
1802 return false;
1803
1804 case ARRAY_TYPE:
1805 tr_size = TYPE_SIZE (TREE_TYPE (type));
1806 if (!tr_size || !tree_fits_uhwi_p (tr_size))
1807 return false;
1808 el_size = tree_to_uhwi (tr_size);
1809
1810 minidx = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
1811 if (TREE_CODE (minidx) != INTEGER_CST || el_size == 0)
1812 return false;
1813 index = build_int_cst (TYPE_DOMAIN (type), offset / el_size);
1814 if (!integer_zerop (minidx))
1815 index = int_const_binop (PLUS_EXPR, index, minidx);
1816 *res = build4 (ARRAY_REF, TREE_TYPE (type), *res, index,
1817 NULL_TREE, NULL_TREE);
1818 offset = offset % el_size;
1819 type = TREE_TYPE (type);
1820 break;
1821
1822 default:
1823 if (offset != 0)
1824 return false;
1825
1826 if (exp_type)
1827 return false;
1828 else
1829 return true;
1830 }
1831 }
1832 }
1833
1834 /* Print message to dump file why a variable was rejected. */
1835
1836 static void
1837 reject (tree var, const char *msg)
1838 {
1839 if (dump_file && (dump_flags & TDF_DETAILS))
1840 {
1841 fprintf (dump_file, "Rejected (%d): %s: ", DECL_UID (var), msg);
1842 print_generic_expr (dump_file, var);
1843 fprintf (dump_file, "\n");
1844 }
1845 }
1846
1847 /* Return true if VAR is a candidate for SRA. */
1848
1849 static bool
1850 maybe_add_sra_candidate (tree var)
1851 {
1852 tree type = TREE_TYPE (var);
1853 const char *msg;
1854 tree_node **slot;
1855
1856 if (!AGGREGATE_TYPE_P (type))
1857 {
1858 reject (var, "not aggregate");
1859 return false;
1860 }
1861 /* Allow constant-pool entries that "need to live in memory". */
1862 if (needs_to_live_in_memory (var) && !constant_decl_p (var))
1863 {
1864 reject (var, "needs to live in memory");
1865 return false;
1866 }
1867 if (TREE_THIS_VOLATILE (var))
1868 {
1869 reject (var, "is volatile");
1870 return false;
1871 }
1872 if (!COMPLETE_TYPE_P (type))
1873 {
1874 reject (var, "has incomplete type");
1875 return false;
1876 }
1877 if (!tree_fits_uhwi_p (TYPE_SIZE (type)))
1878 {
1879 reject (var, "type size not fixed");
1880 return false;
1881 }
1882 if (tree_to_uhwi (TYPE_SIZE (type)) == 0)
1883 {
1884 reject (var, "type size is zero");
1885 return false;
1886 }
1887 if (type_internals_preclude_sra_p (type, &msg))
1888 {
1889 reject (var, msg);
1890 return false;
1891 }
1892 if (/* Fix for PR 41089. tree-stdarg.c needs to have va_lists intact but
1893 we also want to schedule it rather late. Thus we ignore it in
1894 the early pass. */
1895 (sra_mode == SRA_MODE_EARLY_INTRA
1896 && is_va_list_type (type)))
1897 {
1898 reject (var, "is va_list");
1899 return false;
1900 }
1901
1902 bitmap_set_bit (candidate_bitmap, DECL_UID (var));
1903 slot = candidates->find_slot_with_hash (var, DECL_UID (var), INSERT);
1904 *slot = var;
1905
1906 if (dump_file && (dump_flags & TDF_DETAILS))
1907 {
1908 fprintf (dump_file, "Candidate (%d): ", DECL_UID (var));
1909 print_generic_expr (dump_file, var);
1910 fprintf (dump_file, "\n");
1911 }
1912
1913 return true;
1914 }
1915
1916 /* The very first phase of intraprocedural SRA. It marks in candidate_bitmap
1917 those with type which is suitable for scalarization. */
1918
1919 static bool
1920 find_var_candidates (void)
1921 {
1922 tree var, parm;
1923 unsigned int i;
1924 bool ret = false;
1925
1926 for (parm = DECL_ARGUMENTS (current_function_decl);
1927 parm;
1928 parm = DECL_CHAIN (parm))
1929 ret |= maybe_add_sra_candidate (parm);
1930
1931 FOR_EACH_LOCAL_DECL (cfun, i, var)
1932 {
1933 if (!VAR_P (var))
1934 continue;
1935
1936 ret |= maybe_add_sra_candidate (var);
1937 }
1938
1939 return ret;
1940 }
1941
1942 /* Return true if EXP is a reference chain of COMPONENT_REFs and AREAY_REFs
1943 ending either with a DECL or a MEM_REF with zero offset. */
1944
1945 static bool
1946 path_comparable_for_same_access (tree expr)
1947 {
1948 while (handled_component_p (expr))
1949 {
1950 if (TREE_CODE (expr) == ARRAY_REF)
1951 {
1952 /* SSA name indices can occur here too when the array is of sie one.
1953 But we cannot just re-use array_refs with SSA names elsewhere in
1954 the function, so disallow non-constant indices. TODO: Remove this
1955 limitation after teaching build_reconstructed_reference to replace
1956 the index with the index type lower bound. */
1957 if (TREE_CODE (TREE_OPERAND (expr, 1)) != INTEGER_CST)
1958 return false;
1959 }
1960 expr = TREE_OPERAND (expr, 0);
1961 }
1962
1963 if (TREE_CODE (expr) == MEM_REF)
1964 {
1965 if (!zerop (TREE_OPERAND (expr, 1)))
1966 return false;
1967 }
1968 else
1969 gcc_assert (DECL_P (expr));
1970
1971 return true;
1972 }
1973
1974 /* Assuming that EXP1 consists of only COMPONENT_REFs and ARRAY_REFs, return
1975 true if the chain of these handled components are exactly the same as EXP2
1976 and the expression under them is the same DECL or an equivalent MEM_REF.
1977 The reference picked by compare_access_positions must go to EXP1. */
1978
1979 static bool
1980 same_access_path_p (tree exp1, tree exp2)
1981 {
1982 if (TREE_CODE (exp1) != TREE_CODE (exp2))
1983 {
1984 /* Special case single-field structures loaded sometimes as the field
1985 and sometimes as the structure. If the field is of a scalar type,
1986 compare_access_positions will put it into exp1.
1987
1988 TODO: The gimple register type condition can be removed if teach
1989 compare_access_positions to put inner types first. */
1990 if (is_gimple_reg_type (TREE_TYPE (exp1))
1991 && TREE_CODE (exp1) == COMPONENT_REF
1992 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (exp1, 0)))
1993 == TYPE_MAIN_VARIANT (TREE_TYPE (exp2))))
1994 exp1 = TREE_OPERAND (exp1, 0);
1995 else
1996 return false;
1997 }
1998
1999 if (!operand_equal_p (exp1, exp2, OEP_ADDRESS_OF))
2000 return false;
2001
2002 return true;
2003 }
2004
2005 /* Sort all accesses for the given variable, check for partial overlaps and
2006 return NULL if there are any. If there are none, pick a representative for
2007 each combination of offset and size and create a linked list out of them.
2008 Return the pointer to the first representative and make sure it is the first
2009 one in the vector of accesses. */
2010
2011 static struct access *
2012 sort_and_splice_var_accesses (tree var)
2013 {
2014 int i, j, access_count;
2015 struct access *res, **prev_acc_ptr = &res;
2016 vec<access_p> *access_vec;
2017 bool first = true;
2018 HOST_WIDE_INT low = -1, high = 0;
2019
2020 access_vec = get_base_access_vector (var);
2021 if (!access_vec)
2022 return NULL;
2023 access_count = access_vec->length ();
2024
2025 /* Sort by <OFFSET, SIZE>. */
2026 access_vec->qsort (compare_access_positions);
2027
2028 i = 0;
2029 while (i < access_count)
2030 {
2031 struct access *access = (*access_vec)[i];
2032 bool grp_write = access->write;
2033 bool grp_read = !access->write;
2034 bool grp_scalar_write = access->write
2035 && is_gimple_reg_type (access->type);
2036 bool grp_scalar_read = !access->write
2037 && is_gimple_reg_type (access->type);
2038 bool grp_assignment_read = access->grp_assignment_read;
2039 bool grp_assignment_write = access->grp_assignment_write;
2040 bool multiple_scalar_reads = false;
2041 bool grp_partial_lhs = access->grp_partial_lhs;
2042 bool first_scalar = is_gimple_reg_type (access->type);
2043 bool unscalarizable_region = access->grp_unscalarizable_region;
2044 bool grp_same_access_path = true;
2045 bool bf_non_full_precision
2046 = (INTEGRAL_TYPE_P (access->type)
2047 && TYPE_PRECISION (access->type) != access->size
2048 && TREE_CODE (access->expr) == COMPONENT_REF
2049 && DECL_BIT_FIELD (TREE_OPERAND (access->expr, 1)));
2050
2051 if (first || access->offset >= high)
2052 {
2053 first = false;
2054 low = access->offset;
2055 high = access->offset + access->size;
2056 }
2057 else if (access->offset > low && access->offset + access->size > high)
2058 return NULL;
2059 else
2060 gcc_assert (access->offset >= low
2061 && access->offset + access->size <= high);
2062
2063 grp_same_access_path = path_comparable_for_same_access (access->expr);
2064
2065 j = i + 1;
2066 while (j < access_count)
2067 {
2068 struct access *ac2 = (*access_vec)[j];
2069 if (ac2->offset != access->offset || ac2->size != access->size)
2070 break;
2071 if (ac2->write)
2072 {
2073 grp_write = true;
2074 grp_scalar_write = (grp_scalar_write
2075 || is_gimple_reg_type (ac2->type));
2076 }
2077 else
2078 {
2079 grp_read = true;
2080 if (is_gimple_reg_type (ac2->type))
2081 {
2082 if (grp_scalar_read)
2083 multiple_scalar_reads = true;
2084 else
2085 grp_scalar_read = true;
2086 }
2087 }
2088 grp_assignment_read |= ac2->grp_assignment_read;
2089 grp_assignment_write |= ac2->grp_assignment_write;
2090 grp_partial_lhs |= ac2->grp_partial_lhs;
2091 unscalarizable_region |= ac2->grp_unscalarizable_region;
2092 relink_to_new_repr (access, ac2);
2093
2094 /* If there are both aggregate-type and scalar-type accesses with
2095 this combination of size and offset, the comparison function
2096 should have put the scalars first. */
2097 gcc_assert (first_scalar || !is_gimple_reg_type (ac2->type));
2098 /* It also prefers integral types to non-integral. However, when the
2099 precision of the selected type does not span the entire area and
2100 should also be used for a non-integer (i.e. float), we must not
2101 let that happen. Normally analyze_access_subtree expands the type
2102 to cover the entire area but for bit-fields it doesn't. */
2103 if (bf_non_full_precision && !INTEGRAL_TYPE_P (ac2->type))
2104 {
2105 if (dump_file && (dump_flags & TDF_DETAILS))
2106 {
2107 fprintf (dump_file, "Cannot scalarize the following access "
2108 "because insufficient precision integer type was "
2109 "selected.\n ");
2110 dump_access (dump_file, access, false);
2111 }
2112 unscalarizable_region = true;
2113 }
2114
2115 if (grp_same_access_path
2116 && !same_access_path_p (access->expr, ac2->expr))
2117 grp_same_access_path = false;
2118
2119 ac2->group_representative = access;
2120 j++;
2121 }
2122
2123 i = j;
2124
2125 access->group_representative = access;
2126 access->grp_write = grp_write;
2127 access->grp_read = grp_read;
2128 access->grp_scalar_read = grp_scalar_read;
2129 access->grp_scalar_write = grp_scalar_write;
2130 access->grp_assignment_read = grp_assignment_read;
2131 access->grp_assignment_write = grp_assignment_write;
2132 access->grp_hint = multiple_scalar_reads && !constant_decl_p (var);
2133 access->grp_partial_lhs = grp_partial_lhs;
2134 access->grp_unscalarizable_region = unscalarizable_region;
2135 access->grp_same_access_path = grp_same_access_path;
2136
2137 *prev_acc_ptr = access;
2138 prev_acc_ptr = &access->next_grp;
2139 }
2140
2141 gcc_assert (res == (*access_vec)[0]);
2142 return res;
2143 }
2144
2145 /* Create a variable for the given ACCESS which determines the type, name and a
2146 few other properties. Return the variable declaration and store it also to
2147 ACCESS->replacement. REG_TREE is used when creating a declaration to base a
2148 default-definition SSA name on in order to facilitate an uninitialized
2149 warning. It is used instead of the actual ACCESS type if that is not of a
2150 gimple register type. */
2151
2152 static tree
2153 create_access_replacement (struct access *access, tree reg_type = NULL_TREE)
2154 {
2155 tree repl;
2156
2157 tree type = access->type;
2158 if (reg_type && !is_gimple_reg_type (type))
2159 type = reg_type;
2160
2161 if (access->grp_to_be_debug_replaced)
2162 {
2163 repl = create_tmp_var_raw (access->type);
2164 DECL_CONTEXT (repl) = current_function_decl;
2165 }
2166 else
2167 /* Drop any special alignment on the type if it's not on the main
2168 variant. This avoids issues with weirdo ABIs like AAPCS. */
2169 repl = create_tmp_var (build_qualified_type (TYPE_MAIN_VARIANT (type),
2170 TYPE_QUALS (type)), "SR");
2171 if (access->grp_partial_lhs
2172 && is_gimple_reg_type (type))
2173 DECL_NOT_GIMPLE_REG_P (repl) = 1;
2174
2175 DECL_SOURCE_LOCATION (repl) = DECL_SOURCE_LOCATION (access->base);
2176 DECL_ARTIFICIAL (repl) = 1;
2177 DECL_IGNORED_P (repl) = DECL_IGNORED_P (access->base);
2178
2179 if (DECL_NAME (access->base)
2180 && !DECL_IGNORED_P (access->base)
2181 && !DECL_ARTIFICIAL (access->base))
2182 {
2183 char *pretty_name = make_fancy_name (access->expr);
2184 tree debug_expr = unshare_expr_without_location (access->expr), d;
2185 bool fail = false;
2186
2187 DECL_NAME (repl) = get_identifier (pretty_name);
2188 DECL_NAMELESS (repl) = 1;
2189 obstack_free (&name_obstack, pretty_name);
2190
2191 /* Get rid of any SSA_NAMEs embedded in debug_expr,
2192 as DECL_DEBUG_EXPR isn't considered when looking for still
2193 used SSA_NAMEs and thus they could be freed. All debug info
2194 generation cares is whether something is constant or variable
2195 and that get_ref_base_and_extent works properly on the
2196 expression. It cannot handle accesses at a non-constant offset
2197 though, so just give up in those cases. */
2198 for (d = debug_expr;
2199 !fail && (handled_component_p (d) || TREE_CODE (d) == MEM_REF);
2200 d = TREE_OPERAND (d, 0))
2201 switch (TREE_CODE (d))
2202 {
2203 case ARRAY_REF:
2204 case ARRAY_RANGE_REF:
2205 if (TREE_OPERAND (d, 1)
2206 && TREE_CODE (TREE_OPERAND (d, 1)) != INTEGER_CST)
2207 fail = true;
2208 if (TREE_OPERAND (d, 3)
2209 && TREE_CODE (TREE_OPERAND (d, 3)) != INTEGER_CST)
2210 fail = true;
2211 /* FALLTHRU */
2212 case COMPONENT_REF:
2213 if (TREE_OPERAND (d, 2)
2214 && TREE_CODE (TREE_OPERAND (d, 2)) != INTEGER_CST)
2215 fail = true;
2216 break;
2217 case MEM_REF:
2218 if (TREE_CODE (TREE_OPERAND (d, 0)) != ADDR_EXPR)
2219 fail = true;
2220 else
2221 d = TREE_OPERAND (d, 0);
2222 break;
2223 default:
2224 break;
2225 }
2226 if (!fail)
2227 {
2228 SET_DECL_DEBUG_EXPR (repl, debug_expr);
2229 DECL_HAS_DEBUG_EXPR_P (repl) = 1;
2230 }
2231 if (access->grp_no_warning)
2232 TREE_NO_WARNING (repl) = 1;
2233 else
2234 TREE_NO_WARNING (repl) = TREE_NO_WARNING (access->base);
2235 }
2236 else
2237 TREE_NO_WARNING (repl) = 1;
2238
2239 if (dump_file)
2240 {
2241 if (access->grp_to_be_debug_replaced)
2242 {
2243 fprintf (dump_file, "Created a debug-only replacement for ");
2244 print_generic_expr (dump_file, access->base);
2245 fprintf (dump_file, " offset: %u, size: %u\n",
2246 (unsigned) access->offset, (unsigned) access->size);
2247 }
2248 else
2249 {
2250 fprintf (dump_file, "Created a replacement for ");
2251 print_generic_expr (dump_file, access->base);
2252 fprintf (dump_file, " offset: %u, size: %u: ",
2253 (unsigned) access->offset, (unsigned) access->size);
2254 print_generic_expr (dump_file, repl, TDF_UID);
2255 fprintf (dump_file, "\n");
2256 }
2257 }
2258 sra_stats.replacements++;
2259
2260 return repl;
2261 }
2262
2263 /* Return ACCESS scalar replacement, which must exist. */
2264
2265 static inline tree
2266 get_access_replacement (struct access *access)
2267 {
2268 gcc_checking_assert (access->replacement_decl);
2269 return access->replacement_decl;
2270 }
2271
2272
2273 /* Build a subtree of accesses rooted in *ACCESS, and move the pointer in the
2274 linked list along the way. Stop when *ACCESS is NULL or the access pointed
2275 to it is not "within" the root. Return false iff some accesses partially
2276 overlap. */
2277
2278 static bool
2279 build_access_subtree (struct access **access)
2280 {
2281 struct access *root = *access, *last_child = NULL;
2282 HOST_WIDE_INT limit = root->offset + root->size;
2283
2284 *access = (*access)->next_grp;
2285 while (*access && (*access)->offset + (*access)->size <= limit)
2286 {
2287 if (!last_child)
2288 root->first_child = *access;
2289 else
2290 last_child->next_sibling = *access;
2291 last_child = *access;
2292 (*access)->parent = root;
2293 (*access)->grp_write |= root->grp_write;
2294
2295 if (!build_access_subtree (access))
2296 return false;
2297 }
2298
2299 if (*access && (*access)->offset < limit)
2300 return false;
2301
2302 return true;
2303 }
2304
2305 /* Build a tree of access representatives, ACCESS is the pointer to the first
2306 one, others are linked in a list by the next_grp field. Return false iff
2307 some accesses partially overlap. */
2308
2309 static bool
2310 build_access_trees (struct access *access)
2311 {
2312 while (access)
2313 {
2314 struct access *root = access;
2315
2316 if (!build_access_subtree (&access))
2317 return false;
2318 root->next_grp = access;
2319 }
2320 return true;
2321 }
2322
2323 /* Traverse the access forest where ROOT is the first root and verify that
2324 various important invariants hold true. */
2325
2326 DEBUG_FUNCTION void
2327 verify_sra_access_forest (struct access *root)
2328 {
2329 struct access *access = root;
2330 tree first_base = root->base;
2331 gcc_assert (DECL_P (first_base));
2332 do
2333 {
2334 gcc_assert (access->base == first_base);
2335 if (access->parent)
2336 gcc_assert (access->offset >= access->parent->offset
2337 && access->size <= access->parent->size);
2338 if (access->next_sibling)
2339 gcc_assert (access->next_sibling->offset
2340 >= access->offset + access->size);
2341
2342 poly_int64 poffset, psize, pmax_size;
2343 bool reverse;
2344 tree base = get_ref_base_and_extent (access->expr, &poffset, &psize,
2345 &pmax_size, &reverse);
2346 HOST_WIDE_INT offset, size, max_size;
2347 if (!poffset.is_constant (&offset)
2348 || !psize.is_constant (&size)
2349 || !pmax_size.is_constant (&max_size))
2350 gcc_unreachable ();
2351 gcc_assert (base == first_base);
2352 gcc_assert (offset == access->offset);
2353 gcc_assert (access->grp_unscalarizable_region
2354 || access->grp_total_scalarization
2355 || size == max_size);
2356 gcc_assert (access->grp_unscalarizable_region
2357 || !is_gimple_reg_type (access->type)
2358 || size == access->size);
2359 gcc_assert (reverse == access->reverse);
2360
2361 if (access->first_child)
2362 {
2363 gcc_assert (access->first_child->parent == access);
2364 access = access->first_child;
2365 }
2366 else if (access->next_sibling)
2367 {
2368 gcc_assert (access->next_sibling->parent == access->parent);
2369 access = access->next_sibling;
2370 }
2371 else
2372 {
2373 while (access->parent && !access->next_sibling)
2374 access = access->parent;
2375 if (access->next_sibling)
2376 access = access->next_sibling;
2377 else
2378 {
2379 gcc_assert (access == root);
2380 root = root->next_grp;
2381 access = root;
2382 }
2383 }
2384 }
2385 while (access);
2386 }
2387
2388 /* Verify access forests of all candidates with accesses by calling
2389 verify_access_forest on each on them. */
2390
2391 DEBUG_FUNCTION void
2392 verify_all_sra_access_forests (void)
2393 {
2394 bitmap_iterator bi;
2395 unsigned i;
2396 EXECUTE_IF_SET_IN_BITMAP (candidate_bitmap, 0, i, bi)
2397 {
2398 tree var = candidate (i);
2399 struct access *access = get_first_repr_for_decl (var);
2400 if (access)
2401 {
2402 gcc_assert (access->base == var);
2403 verify_sra_access_forest (access);
2404 }
2405 }
2406 }
2407
2408 /* Return true if expr contains some ARRAY_REFs into a variable bounded
2409 array. */
2410
2411 static bool
2412 expr_with_var_bounded_array_refs_p (tree expr)
2413 {
2414 while (handled_component_p (expr))
2415 {
2416 if (TREE_CODE (expr) == ARRAY_REF
2417 && !tree_fits_shwi_p (array_ref_low_bound (expr)))
2418 return true;
2419 expr = TREE_OPERAND (expr, 0);
2420 }
2421 return false;
2422 }
2423
2424 /* Analyze the subtree of accesses rooted in ROOT, scheduling replacements when
2425 both seeming beneficial and when ALLOW_REPLACEMENTS allows it. If TOTALLY
2426 is set, we are totally scalarizing the aggregate. Also set all sorts of
2427 access flags appropriately along the way, notably always set grp_read and
2428 grp_assign_read according to MARK_READ and grp_write when MARK_WRITE is
2429 true.
2430
2431 Creating a replacement for a scalar access is considered beneficial if its
2432 grp_hint ot TOTALLY is set (this means either that there is more than one
2433 direct read access or that we are attempting total scalarization) or
2434 according to the following table:
2435
2436 Access written to through a scalar type (once or more times)
2437 |
2438 | Written to in an assignment statement
2439 | |
2440 | | Access read as scalar _once_
2441 | | |
2442 | | | Read in an assignment statement
2443 | | | |
2444 | | | | Scalarize Comment
2445 -----------------------------------------------------------------------------
2446 0 0 0 0 No access for the scalar
2447 0 0 0 1 No access for the scalar
2448 0 0 1 0 No Single read - won't help
2449 0 0 1 1 No The same case
2450 0 1 0 0 No access for the scalar
2451 0 1 0 1 No access for the scalar
2452 0 1 1 0 Yes s = *g; return s.i;
2453 0 1 1 1 Yes The same case as above
2454 1 0 0 0 No Won't help
2455 1 0 0 1 Yes s.i = 1; *g = s;
2456 1 0 1 0 Yes s.i = 5; g = s.i;
2457 1 0 1 1 Yes The same case as above
2458 1 1 0 0 No Won't help.
2459 1 1 0 1 Yes s.i = 1; *g = s;
2460 1 1 1 0 Yes s = *g; return s.i;
2461 1 1 1 1 Yes Any of the above yeses */
2462
2463 static bool
2464 analyze_access_subtree (struct access *root, struct access *parent,
2465 bool allow_replacements, bool totally)
2466 {
2467 struct access *child;
2468 HOST_WIDE_INT limit = root->offset + root->size;
2469 HOST_WIDE_INT covered_to = root->offset;
2470 bool scalar = is_gimple_reg_type (root->type);
2471 bool hole = false, sth_created = false;
2472
2473 if (parent)
2474 {
2475 if (parent->grp_read)
2476 root->grp_read = 1;
2477 if (parent->grp_assignment_read)
2478 root->grp_assignment_read = 1;
2479 if (parent->grp_write)
2480 root->grp_write = 1;
2481 if (parent->grp_assignment_write)
2482 root->grp_assignment_write = 1;
2483 if (!parent->grp_same_access_path)
2484 root->grp_same_access_path = 0;
2485 }
2486
2487 if (root->grp_unscalarizable_region)
2488 allow_replacements = false;
2489
2490 if (allow_replacements && expr_with_var_bounded_array_refs_p (root->expr))
2491 allow_replacements = false;
2492
2493 for (child = root->first_child; child; child = child->next_sibling)
2494 {
2495 hole |= covered_to < child->offset;
2496 sth_created |= analyze_access_subtree (child, root,
2497 allow_replacements && !scalar,
2498 totally);
2499
2500 root->grp_unscalarized_data |= child->grp_unscalarized_data;
2501 if (child->grp_covered)
2502 covered_to += child->size;
2503 else
2504 hole = true;
2505 }
2506
2507 if (allow_replacements && scalar && !root->first_child
2508 && (totally || !root->grp_total_scalarization)
2509 && (totally
2510 || root->grp_hint
2511 || ((root->grp_scalar_read || root->grp_assignment_read)
2512 && (root->grp_scalar_write || root->grp_assignment_write))))
2513 {
2514 /* Always create access replacements that cover the whole access.
2515 For integral types this means the precision has to match.
2516 Avoid assumptions based on the integral type kind, too. */
2517 if (INTEGRAL_TYPE_P (root->type)
2518 && (TREE_CODE (root->type) != INTEGER_TYPE
2519 || TYPE_PRECISION (root->type) != root->size)
2520 /* But leave bitfield accesses alone. */
2521 && (TREE_CODE (root->expr) != COMPONENT_REF
2522 || !DECL_BIT_FIELD (TREE_OPERAND (root->expr, 1))))
2523 {
2524 tree rt = root->type;
2525 gcc_assert ((root->offset % BITS_PER_UNIT) == 0
2526 && (root->size % BITS_PER_UNIT) == 0);
2527 root->type = build_nonstandard_integer_type (root->size,
2528 TYPE_UNSIGNED (rt));
2529 root->expr = build_ref_for_offset (UNKNOWN_LOCATION, root->base,
2530 root->offset, root->reverse,
2531 root->type, NULL, false);
2532
2533 if (dump_file && (dump_flags & TDF_DETAILS))
2534 {
2535 fprintf (dump_file, "Changing the type of a replacement for ");
2536 print_generic_expr (dump_file, root->base);
2537 fprintf (dump_file, " offset: %u, size: %u ",
2538 (unsigned) root->offset, (unsigned) root->size);
2539 fprintf (dump_file, " to an integer.\n");
2540 }
2541 }
2542
2543 root->grp_to_be_replaced = 1;
2544 root->replacement_decl = create_access_replacement (root);
2545 sth_created = true;
2546 hole = false;
2547 }
2548 else
2549 {
2550 if (allow_replacements
2551 && scalar && !root->first_child
2552 && !root->grp_total_scalarization
2553 && (root->grp_scalar_write || root->grp_assignment_write)
2554 && !bitmap_bit_p (cannot_scalarize_away_bitmap,
2555 DECL_UID (root->base)))
2556 {
2557 gcc_checking_assert (!root->grp_scalar_read
2558 && !root->grp_assignment_read);
2559 sth_created = true;
2560 if (MAY_HAVE_DEBUG_BIND_STMTS)
2561 {
2562 root->grp_to_be_debug_replaced = 1;
2563 root->replacement_decl = create_access_replacement (root);
2564 }
2565 }
2566
2567 if (covered_to < limit)
2568 hole = true;
2569 if (scalar || !allow_replacements)
2570 root->grp_total_scalarization = 0;
2571 }
2572
2573 if (!hole || totally)
2574 root->grp_covered = 1;
2575 else if (root->grp_write || comes_initialized_p (root->base))
2576 root->grp_unscalarized_data = 1; /* not covered and written to */
2577 return sth_created;
2578 }
2579
2580 /* Analyze all access trees linked by next_grp by the means of
2581 analyze_access_subtree. */
2582 static bool
2583 analyze_access_trees (struct access *access)
2584 {
2585 bool ret = false;
2586
2587 while (access)
2588 {
2589 if (analyze_access_subtree (access, NULL, true,
2590 access->grp_total_scalarization))
2591 ret = true;
2592 access = access->next_grp;
2593 }
2594
2595 return ret;
2596 }
2597
2598 /* Return true iff a potential new child of ACC at offset OFFSET and with size
2599 SIZE would conflict with an already existing one. If exactly such a child
2600 already exists in ACC, store a pointer to it in EXACT_MATCH. */
2601
2602 static bool
2603 child_would_conflict_in_acc (struct access *acc, HOST_WIDE_INT norm_offset,
2604 HOST_WIDE_INT size, struct access **exact_match)
2605 {
2606 struct access *child;
2607
2608 for (child = acc->first_child; child; child = child->next_sibling)
2609 {
2610 if (child->offset == norm_offset && child->size == size)
2611 {
2612 *exact_match = child;
2613 return true;
2614 }
2615
2616 if (child->offset < norm_offset + size
2617 && child->offset + child->size > norm_offset)
2618 return true;
2619 }
2620
2621 return false;
2622 }
2623
2624 /* Create a new child access of PARENT, with all properties just like MODEL
2625 except for its offset and with its grp_write false and grp_read true.
2626 Return the new access or NULL if it cannot be created. Note that this
2627 access is created long after all splicing and sorting, it's not located in
2628 any access vector and is automatically a representative of its group. Set
2629 the gpr_write flag of the new accesss if SET_GRP_WRITE is true. */
2630
2631 static struct access *
2632 create_artificial_child_access (struct access *parent, struct access *model,
2633 HOST_WIDE_INT new_offset,
2634 bool set_grp_read, bool set_grp_write)
2635 {
2636 struct access **child;
2637 tree expr = parent->base;
2638
2639 gcc_assert (!model->grp_unscalarizable_region);
2640
2641 struct access *access = access_pool.allocate ();
2642 memset (access, 0, sizeof (struct access));
2643 if (!build_user_friendly_ref_for_offset (&expr, TREE_TYPE (expr), new_offset,
2644 model->type))
2645 {
2646 access->grp_no_warning = true;
2647 expr = build_ref_for_model (EXPR_LOCATION (parent->base), parent->base,
2648 new_offset, model, NULL, false);
2649 }
2650
2651 access->base = parent->base;
2652 access->expr = expr;
2653 access->offset = new_offset;
2654 access->size = model->size;
2655 access->type = model->type;
2656 access->parent = parent;
2657 access->grp_read = set_grp_read;
2658 access->grp_write = set_grp_write;
2659 access->reverse = model->reverse;
2660
2661 child = &parent->first_child;
2662 while (*child && (*child)->offset < new_offset)
2663 child = &(*child)->next_sibling;
2664
2665 access->next_sibling = *child;
2666 *child = access;
2667
2668 return access;
2669 }
2670
2671
2672 /* Beginning with ACCESS, traverse its whole access subtree and mark all
2673 sub-trees as written to. If any of them has not been marked so previously
2674 and has assignment links leading from it, re-enqueue it. */
2675
2676 static void
2677 subtree_mark_written_and_rhs_enqueue (struct access *access)
2678 {
2679 if (access->grp_write)
2680 return;
2681 access->grp_write = true;
2682 add_access_to_rhs_work_queue (access);
2683
2684 struct access *child;
2685 for (child = access->first_child; child; child = child->next_sibling)
2686 subtree_mark_written_and_rhs_enqueue (child);
2687 }
2688
2689 /* If there is still budget to create a propagation access for DECL, return
2690 true and decrement the budget. Otherwise return false. */
2691
2692 static bool
2693 budget_for_propagation_access (tree decl)
2694 {
2695 unsigned b, *p = propagation_budget->get (decl);
2696 if (p)
2697 b = *p;
2698 else
2699 b = param_sra_max_propagations;
2700
2701 if (b == 0)
2702 return false;
2703 b--;
2704
2705 if (b == 0 && dump_file && (dump_flags & TDF_DETAILS))
2706 {
2707 fprintf (dump_file, "The propagation budget of ");
2708 print_generic_expr (dump_file, decl);
2709 fprintf (dump_file, " (UID: %u) has been exhausted.\n", DECL_UID (decl));
2710 }
2711 propagation_budget->put (decl, b);
2712 return true;
2713 }
2714
2715 /* Propagate subaccesses and grp_write flags of RACC across an assignment link
2716 to LACC. Enqueue sub-accesses as necessary so that the write flag is
2717 propagated transitively. Return true if anything changed. Additionally, if
2718 RACC is a scalar access but LACC is not, change the type of the latter, if
2719 possible. */
2720
2721 static bool
2722 propagate_subaccesses_from_rhs (struct access *lacc, struct access *racc)
2723 {
2724 struct access *rchild;
2725 HOST_WIDE_INT norm_delta = lacc->offset - racc->offset;
2726 bool ret = false;
2727
2728 /* IF the LHS is still not marked as being written to, we only need to do so
2729 if the RHS at this level actually was. */
2730 if (!lacc->grp_write)
2731 {
2732 gcc_checking_assert (!comes_initialized_p (racc->base));
2733 if (racc->grp_write)
2734 {
2735 subtree_mark_written_and_rhs_enqueue (lacc);
2736 ret = true;
2737 }
2738 }
2739
2740 if (is_gimple_reg_type (lacc->type)
2741 || lacc->grp_unscalarizable_region
2742 || racc->grp_unscalarizable_region)
2743 {
2744 if (!lacc->grp_write)
2745 {
2746 ret = true;
2747 subtree_mark_written_and_rhs_enqueue (lacc);
2748 }
2749 return ret;
2750 }
2751
2752 if (is_gimple_reg_type (racc->type))
2753 {
2754 if (!lacc->grp_write)
2755 {
2756 ret = true;
2757 subtree_mark_written_and_rhs_enqueue (lacc);
2758 }
2759 if (!lacc->first_child && !racc->first_child)
2760 {
2761 tree t = lacc->base;
2762
2763 lacc->type = racc->type;
2764 if (build_user_friendly_ref_for_offset (&t, TREE_TYPE (t),
2765 lacc->offset, racc->type))
2766 {
2767 lacc->expr = t;
2768 lacc->grp_same_access_path = true;
2769 }
2770 else
2771 {
2772 lacc->expr = build_ref_for_model (EXPR_LOCATION (lacc->base),
2773 lacc->base, lacc->offset,
2774 racc, NULL, false);
2775 lacc->grp_no_warning = true;
2776 lacc->grp_same_access_path = false;
2777 }
2778 }
2779 return ret;
2780 }
2781
2782 for (rchild = racc->first_child; rchild; rchild = rchild->next_sibling)
2783 {
2784 struct access *new_acc = NULL;
2785 HOST_WIDE_INT norm_offset = rchild->offset + norm_delta;
2786
2787 if (child_would_conflict_in_acc (lacc, norm_offset, rchild->size,
2788 &new_acc))
2789 {
2790 if (new_acc)
2791 {
2792 if (!new_acc->grp_write && rchild->grp_write)
2793 {
2794 gcc_assert (!lacc->grp_write);
2795 subtree_mark_written_and_rhs_enqueue (new_acc);
2796 ret = true;
2797 }
2798
2799 rchild->grp_hint = 1;
2800 new_acc->grp_hint |= new_acc->grp_read;
2801 if (rchild->first_child
2802 && propagate_subaccesses_from_rhs (new_acc, rchild))
2803 {
2804 ret = 1;
2805 add_access_to_rhs_work_queue (new_acc);
2806 }
2807 }
2808 else
2809 {
2810 if (!lacc->grp_write)
2811 {
2812 ret = true;
2813 subtree_mark_written_and_rhs_enqueue (lacc);
2814 }
2815 }
2816 continue;
2817 }
2818
2819 if (rchild->grp_unscalarizable_region
2820 || !budget_for_propagation_access (lacc->base))
2821 {
2822 if (rchild->grp_write && !lacc->grp_write)
2823 {
2824 ret = true;
2825 subtree_mark_written_and_rhs_enqueue (lacc);
2826 }
2827 continue;
2828 }
2829
2830 rchild->grp_hint = 1;
2831 /* Because get_ref_base_and_extent always includes padding in size for
2832 accesses to DECLs but not necessarily for COMPONENT_REFs of the same
2833 type, we might be actually attempting to here to create a child of the
2834 same type as the parent. */
2835 if (!types_compatible_p (lacc->type, rchild->type))
2836 new_acc = create_artificial_child_access (lacc, rchild, norm_offset,
2837 false,
2838 (lacc->grp_write
2839 || rchild->grp_write));
2840 else
2841 new_acc = lacc;
2842 gcc_checking_assert (new_acc);
2843 if (racc->first_child)
2844 propagate_subaccesses_from_rhs (new_acc, rchild);
2845
2846 add_access_to_rhs_work_queue (lacc);
2847 ret = true;
2848 }
2849
2850 return ret;
2851 }
2852
2853 /* Propagate subaccesses of LACC across an assignment link to RACC if they
2854 should inhibit total scalarization of the corresponding area. No flags are
2855 being propagated in the process. Return true if anything changed. */
2856
2857 static bool
2858 propagate_subaccesses_from_lhs (struct access *lacc, struct access *racc)
2859 {
2860 if (is_gimple_reg_type (racc->type)
2861 || lacc->grp_unscalarizable_region
2862 || racc->grp_unscalarizable_region)
2863 return false;
2864
2865 /* TODO: Do we want set some new racc flag to stop potential total
2866 scalarization if lacc is a scalar access (and none fo the two have
2867 children)? */
2868
2869 bool ret = false;
2870 HOST_WIDE_INT norm_delta = racc->offset - lacc->offset;
2871 for (struct access *lchild = lacc->first_child;
2872 lchild;
2873 lchild = lchild->next_sibling)
2874 {
2875 struct access *matching_acc = NULL;
2876 HOST_WIDE_INT norm_offset = lchild->offset + norm_delta;
2877
2878 if (lchild->grp_unscalarizable_region
2879 || child_would_conflict_in_acc (racc, norm_offset, lchild->size,
2880 &matching_acc)
2881 || !budget_for_propagation_access (racc->base))
2882 {
2883 if (matching_acc
2884 && propagate_subaccesses_from_lhs (lchild, matching_acc))
2885 add_access_to_lhs_work_queue (matching_acc);
2886 continue;
2887 }
2888
2889 /* Because get_ref_base_and_extent always includes padding in size for
2890 accesses to DECLs but not necessarily for COMPONENT_REFs of the same
2891 type, we might be actually attempting to here to create a child of the
2892 same type as the parent. */
2893 if (!types_compatible_p (racc->type, lchild->type))
2894 {
2895 struct access *new_acc
2896 = create_artificial_child_access (racc, lchild, norm_offset,
2897 true, false);
2898 propagate_subaccesses_from_lhs (lchild, new_acc);
2899 }
2900 else
2901 propagate_subaccesses_from_lhs (lchild, racc);
2902 ret = true;
2903 }
2904 return ret;
2905 }
2906
2907 /* Propagate all subaccesses across assignment links. */
2908
2909 static void
2910 propagate_all_subaccesses (void)
2911 {
2912 propagation_budget = new hash_map<tree, unsigned>;
2913 while (rhs_work_queue_head)
2914 {
2915 struct access *racc = pop_access_from_rhs_work_queue ();
2916 struct assign_link *link;
2917
2918 if (racc->group_representative)
2919 racc= racc->group_representative;
2920 gcc_assert (racc->first_rhs_link);
2921
2922 for (link = racc->first_rhs_link; link; link = link->next_rhs)
2923 {
2924 struct access *lacc = link->lacc;
2925
2926 if (!bitmap_bit_p (candidate_bitmap, DECL_UID (lacc->base)))
2927 continue;
2928 lacc = lacc->group_representative;
2929
2930 bool reque_parents = false;
2931 if (!bitmap_bit_p (candidate_bitmap, DECL_UID (racc->base)))
2932 {
2933 if (!lacc->grp_write)
2934 {
2935 subtree_mark_written_and_rhs_enqueue (lacc);
2936 reque_parents = true;
2937 }
2938 }
2939 else if (propagate_subaccesses_from_rhs (lacc, racc))
2940 reque_parents = true;
2941
2942 if (reque_parents)
2943 do
2944 {
2945 add_access_to_rhs_work_queue (lacc);
2946 lacc = lacc->parent;
2947 }
2948 while (lacc);
2949 }
2950 }
2951
2952 while (lhs_work_queue_head)
2953 {
2954 struct access *lacc = pop_access_from_lhs_work_queue ();
2955 struct assign_link *link;
2956
2957 if (lacc->group_representative)
2958 lacc = lacc->group_representative;
2959 gcc_assert (lacc->first_lhs_link);
2960
2961 if (!bitmap_bit_p (candidate_bitmap, DECL_UID (lacc->base)))
2962 continue;
2963
2964 for (link = lacc->first_lhs_link; link; link = link->next_lhs)
2965 {
2966 struct access *racc = link->racc;
2967
2968 if (racc->group_representative)
2969 racc = racc->group_representative;
2970 if (!bitmap_bit_p (candidate_bitmap, DECL_UID (racc->base)))
2971 continue;
2972 if (propagate_subaccesses_from_lhs (lacc, racc))
2973 add_access_to_lhs_work_queue (racc);
2974 }
2975 }
2976 delete propagation_budget;
2977 }
2978
2979 /* Return true if the forest beginning with ROOT does not contain
2980 unscalarizable regions or non-byte aligned accesses. */
2981
2982 static bool
2983 can_totally_scalarize_forest_p (struct access *root)
2984 {
2985 struct access *access = root;
2986 do
2987 {
2988 if (access->grp_unscalarizable_region
2989 || (access->offset % BITS_PER_UNIT) != 0
2990 || (access->size % BITS_PER_UNIT) != 0
2991 || (is_gimple_reg_type (access->type)
2992 && access->first_child))
2993 return false;
2994
2995 if (access->first_child)
2996 access = access->first_child;
2997 else if (access->next_sibling)
2998 access = access->next_sibling;
2999 else
3000 {
3001 while (access->parent && !access->next_sibling)
3002 access = access->parent;
3003 if (access->next_sibling)
3004 access = access->next_sibling;
3005 else
3006 {
3007 gcc_assert (access == root);
3008 root = root->next_grp;
3009 access = root;
3010 }
3011 }
3012 }
3013 while (access);
3014 return true;
3015 }
3016
3017 /* Create and return an ACCESS in PARENT spanning from POS with SIZE, TYPE and
3018 reference EXPR for total scalarization purposes and mark it as such. Within
3019 the children of PARENT, link it in between PTR and NEXT_SIBLING. */
3020
3021 static struct access *
3022 create_total_scalarization_access (struct access *parent, HOST_WIDE_INT pos,
3023 HOST_WIDE_INT size, tree type, tree expr,
3024 struct access **ptr,
3025 struct access *next_sibling)
3026 {
3027 struct access *access = access_pool.allocate ();
3028 memset (access, 0, sizeof (struct access));
3029 access->base = parent->base;
3030 access->offset = pos;
3031 access->size = size;
3032 access->expr = expr;
3033 access->type = type;
3034 access->parent = parent;
3035 access->grp_write = parent->grp_write;
3036 access->grp_total_scalarization = 1;
3037 access->grp_hint = 1;
3038 access->grp_same_access_path = path_comparable_for_same_access (expr);
3039 access->reverse = reverse_storage_order_for_component_p (expr);
3040
3041 access->next_sibling = next_sibling;
3042 *ptr = access;
3043 return access;
3044 }
3045
3046 /* Create and return an ACCESS in PARENT spanning from POS with SIZE, TYPE and
3047 reference EXPR for total scalarization purposes and mark it as such, link it
3048 at *PTR and reshape the tree so that those elements at *PTR and their
3049 siblings which fall within the part described by POS and SIZE are moved to
3050 be children of the new access. If a partial overlap is detected, return
3051 NULL. */
3052
3053 static struct access *
3054 create_total_access_and_reshape (struct access *parent, HOST_WIDE_INT pos,
3055 HOST_WIDE_INT size, tree type, tree expr,
3056 struct access **ptr)
3057 {
3058 struct access **p = ptr;
3059
3060 while (*p && (*p)->offset < pos + size)
3061 {
3062 if ((*p)->offset + (*p)->size > pos + size)
3063 return NULL;
3064 p = &(*p)->next_sibling;
3065 }
3066
3067 struct access *next_child = *ptr;
3068 struct access *new_acc
3069 = create_total_scalarization_access (parent, pos, size, type, expr,
3070 ptr, *p);
3071 if (p != ptr)
3072 {
3073 new_acc->first_child = next_child;
3074 *p = NULL;
3075 for (struct access *a = next_child; a; a = a->next_sibling)
3076 a->parent = new_acc;
3077 }
3078 return new_acc;
3079 }
3080
3081 static bool totally_scalarize_subtree (struct access *root);
3082
3083 /* Return true if INNER is either the same type as OUTER or if it is the type
3084 of a record field in OUTER at offset zero, possibly in nested
3085 sub-records. */
3086
3087 static bool
3088 access_and_field_type_match_p (tree outer, tree inner)
3089 {
3090 if (TYPE_MAIN_VARIANT (outer) == TYPE_MAIN_VARIANT (inner))
3091 return true;
3092 if (TREE_CODE (outer) != RECORD_TYPE)
3093 return false;
3094 tree fld = TYPE_FIELDS (outer);
3095 while (fld)
3096 {
3097 if (TREE_CODE (fld) == FIELD_DECL)
3098 {
3099 if (!zerop (DECL_FIELD_OFFSET (fld)))
3100 return false;
3101 if (TYPE_MAIN_VARIANT (TREE_TYPE (fld)) == inner)
3102 return true;
3103 if (TREE_CODE (TREE_TYPE (fld)) == RECORD_TYPE)
3104 fld = TYPE_FIELDS (TREE_TYPE (fld));
3105 else
3106 return false;
3107 }
3108 else
3109 fld = DECL_CHAIN (fld);
3110 }
3111 return false;
3112 }
3113
3114 /* Return type of total_should_skip_creating_access indicating whether a total
3115 scalarization access for a field/element should be created, whether it
3116 already exists or whether the entire total scalarization has to fail. */
3117
3118 enum total_sra_field_state {TOTAL_FLD_CREATE, TOTAL_FLD_DONE, TOTAL_FLD_FAILED};
3119
3120 /* Do all the necessary steps in total scalarization when the given aggregate
3121 type has a TYPE at POS with the given SIZE should be put into PARENT and
3122 when we have processed all its siblings with smaller offsets up until and
3123 including LAST_SEEN_SIBLING (which can be NULL).
3124
3125 If some further siblings are to be skipped, set *LAST_SEEN_SIBLING as
3126 appropriate. Return TOTAL_FLD_CREATE id the caller should carry on with
3127 creating a new access, TOTAL_FLD_DONE if access or accesses capable of
3128 representing the described part of the aggregate for the purposes of total
3129 scalarization already exist or TOTAL_FLD_FAILED if there is a problem which
3130 prevents total scalarization from happening at all. */
3131
3132 static enum total_sra_field_state
3133 total_should_skip_creating_access (struct access *parent,
3134 struct access **last_seen_sibling,
3135 tree type, HOST_WIDE_INT pos,
3136 HOST_WIDE_INT size)
3137 {
3138 struct access *next_child;
3139 if (!*last_seen_sibling)
3140 next_child = parent->first_child;
3141 else
3142 next_child = (*last_seen_sibling)->next_sibling;
3143
3144 /* First, traverse the chain of siblings until it points to an access with
3145 offset at least equal to POS. Check all skipped accesses whether they
3146 span the POS boundary and if so, return with a failure. */
3147 while (next_child && next_child->offset < pos)
3148 {
3149 if (next_child->offset + next_child->size > pos)
3150 return TOTAL_FLD_FAILED;
3151 *last_seen_sibling = next_child;
3152 next_child = next_child->next_sibling;
3153 }
3154
3155 /* Now check whether next_child has exactly the right POS and SIZE and if so,
3156 whether it can represent what we need and can be totally scalarized
3157 itself. */
3158 if (next_child && next_child->offset == pos
3159 && next_child->size == size)
3160 {
3161 if (!is_gimple_reg_type (next_child->type)
3162 && (!access_and_field_type_match_p (type, next_child->type)
3163 || !totally_scalarize_subtree (next_child)))
3164 return TOTAL_FLD_FAILED;
3165
3166 *last_seen_sibling = next_child;
3167 return TOTAL_FLD_DONE;
3168 }
3169
3170 /* If the child we're looking at would partially overlap, we just cannot
3171 totally scalarize. */
3172 if (next_child
3173 && next_child->offset < pos + size
3174 && next_child->offset + next_child->size > pos + size)
3175 return TOTAL_FLD_FAILED;
3176
3177 if (is_gimple_reg_type (type))
3178 {
3179 /* We don't scalarize accesses that are children of other scalar type
3180 accesses, so if we go on and create an access for a register type,
3181 there should not be any pre-existing children. There are rare cases
3182 where the requested type is a vector but we already have register
3183 accesses for all its elements which is equally good. Detect that
3184 situation or whether we need to bail out. */
3185
3186 HOST_WIDE_INT covered = pos;
3187 bool skipping = false;
3188 while (next_child
3189 && next_child->offset + next_child->size <= pos + size)
3190 {
3191 if (next_child->offset != covered
3192 || !is_gimple_reg_type (next_child->type))
3193 return TOTAL_FLD_FAILED;
3194
3195 covered += next_child->size;
3196 *last_seen_sibling = next_child;
3197 next_child = next_child->next_sibling;
3198 skipping = true;
3199 }
3200
3201 if (skipping)
3202 {
3203 if (covered != pos + size)
3204 return TOTAL_FLD_FAILED;
3205 else
3206 return TOTAL_FLD_DONE;
3207 }
3208 }
3209
3210 return TOTAL_FLD_CREATE;
3211 }
3212
3213 /* Go over sub-tree rooted in ROOT and attempt to create scalar accesses
3214 spanning all uncovered areas covered by ROOT, return false if the attempt
3215 failed. All created accesses will have grp_unscalarizable_region set (and
3216 should be ignored if the function returns false). */
3217
3218 static bool
3219 totally_scalarize_subtree (struct access *root)
3220 {
3221 gcc_checking_assert (!root->grp_unscalarizable_region);
3222 gcc_checking_assert (!is_gimple_reg_type (root->type));
3223
3224 struct access *last_seen_sibling = NULL;
3225
3226 switch (TREE_CODE (root->type))
3227 {
3228 case RECORD_TYPE:
3229 for (tree fld = TYPE_FIELDS (root->type); fld; fld = DECL_CHAIN (fld))
3230 if (TREE_CODE (fld) == FIELD_DECL)
3231 {
3232 tree ft = TREE_TYPE (fld);
3233 HOST_WIDE_INT fsize = tree_to_uhwi (DECL_SIZE (fld));
3234 if (!fsize)
3235 continue;
3236
3237 HOST_WIDE_INT pos = root->offset + int_bit_position (fld);
3238 enum total_sra_field_state
3239 state = total_should_skip_creating_access (root,
3240 &last_seen_sibling,
3241 ft, pos, fsize);
3242 switch (state)
3243 {
3244 case TOTAL_FLD_FAILED:
3245 return false;
3246 case TOTAL_FLD_DONE:
3247 continue;
3248 case TOTAL_FLD_CREATE:
3249 break;
3250 default:
3251 gcc_unreachable ();
3252 }
3253
3254 struct access **p = (last_seen_sibling
3255 ? &last_seen_sibling->next_sibling
3256 : &root->first_child);
3257 tree nref = build3 (COMPONENT_REF, ft, root->expr, fld, NULL_TREE);
3258 struct access *new_child
3259 = create_total_access_and_reshape (root, pos, fsize, ft, nref, p);
3260 if (!new_child)
3261 return false;
3262
3263 if (!is_gimple_reg_type (ft)
3264 && !totally_scalarize_subtree (new_child))
3265 return false;
3266 last_seen_sibling = new_child;
3267 }
3268 break;
3269 case ARRAY_TYPE:
3270 {
3271 tree elemtype = TREE_TYPE (root->type);
3272 tree elem_size = TYPE_SIZE (elemtype);
3273 gcc_assert (elem_size && tree_fits_shwi_p (elem_size));
3274 HOST_WIDE_INT el_size = tree_to_shwi (elem_size);
3275 gcc_assert (el_size > 0);
3276
3277 tree minidx = TYPE_MIN_VALUE (TYPE_DOMAIN (root->type));
3278 gcc_assert (TREE_CODE (minidx) == INTEGER_CST);
3279 tree maxidx = TYPE_MAX_VALUE (TYPE_DOMAIN (root->type));
3280 /* Skip (some) zero-length arrays; others have MAXIDX == MINIDX - 1. */
3281 if (!maxidx)
3282 goto out;
3283 gcc_assert (TREE_CODE (maxidx) == INTEGER_CST);
3284 tree domain = TYPE_DOMAIN (root->type);
3285 /* MINIDX and MAXIDX are inclusive, and must be interpreted in
3286 DOMAIN (e.g. signed int, whereas min/max may be size_int). */
3287 offset_int idx = wi::to_offset (minidx);
3288 offset_int max = wi::to_offset (maxidx);
3289 if (!TYPE_UNSIGNED (domain))
3290 {
3291 idx = wi::sext (idx, TYPE_PRECISION (domain));
3292 max = wi::sext (max, TYPE_PRECISION (domain));
3293 }
3294 for (HOST_WIDE_INT pos = root->offset;
3295 idx <= max;
3296 pos += el_size, ++idx)
3297 {
3298 enum total_sra_field_state
3299 state = total_should_skip_creating_access (root,
3300 &last_seen_sibling,
3301 elemtype, pos,
3302 el_size);
3303 switch (state)
3304 {
3305 case TOTAL_FLD_FAILED:
3306 return false;
3307 case TOTAL_FLD_DONE:
3308 continue;
3309 case TOTAL_FLD_CREATE:
3310 break;
3311 default:
3312 gcc_unreachable ();
3313 }
3314
3315 struct access **p = (last_seen_sibling
3316 ? &last_seen_sibling->next_sibling
3317 : &root->first_child);
3318 tree nref = build4 (ARRAY_REF, elemtype, root->expr,
3319 wide_int_to_tree (domain, idx),
3320 NULL_TREE, NULL_TREE);
3321 struct access *new_child
3322 = create_total_access_and_reshape (root, pos, el_size, elemtype,
3323 nref, p);
3324 if (!new_child)
3325 return false;
3326
3327 if (!is_gimple_reg_type (elemtype)
3328 && !totally_scalarize_subtree (new_child))
3329 return false;
3330 last_seen_sibling = new_child;
3331 }
3332 }
3333 break;
3334 default:
3335 gcc_unreachable ();
3336 }
3337
3338 out:
3339 return true;
3340 }
3341
3342 /* Go through all accesses collected throughout the (intraprocedural) analysis
3343 stage, exclude overlapping ones, identify representatives and build trees
3344 out of them, making decisions about scalarization on the way. Return true
3345 iff there are any to-be-scalarized variables after this stage. */
3346
3347 static bool
3348 analyze_all_variable_accesses (void)
3349 {
3350 int res = 0;
3351 bitmap tmp = BITMAP_ALLOC (NULL);
3352 bitmap_iterator bi;
3353 unsigned i;
3354
3355 bitmap_copy (tmp, candidate_bitmap);
3356 EXECUTE_IF_SET_IN_BITMAP (tmp, 0, i, bi)
3357 {
3358 tree var = candidate (i);
3359 struct access *access;
3360
3361 access = sort_and_splice_var_accesses (var);
3362 if (!access || !build_access_trees (access))
3363 disqualify_candidate (var,
3364 "No or inhibitingly overlapping accesses.");
3365 }
3366
3367 propagate_all_subaccesses ();
3368
3369 bool optimize_speed_p = !optimize_function_for_size_p (cfun);
3370 /* If the user didn't set PARAM_SRA_MAX_SCALARIZATION_SIZE_<...>,
3371 fall back to a target default. */
3372 unsigned HOST_WIDE_INT max_scalarization_size
3373 = get_move_ratio (optimize_speed_p) * UNITS_PER_WORD;
3374
3375 if (optimize_speed_p)
3376 {
3377 if (global_options_set.x_param_sra_max_scalarization_size_speed)
3378 max_scalarization_size = param_sra_max_scalarization_size_speed;
3379 }
3380 else
3381 {
3382 if (global_options_set.x_param_sra_max_scalarization_size_size)
3383 max_scalarization_size = param_sra_max_scalarization_size_size;
3384 }
3385 max_scalarization_size *= BITS_PER_UNIT;
3386
3387 EXECUTE_IF_SET_IN_BITMAP (candidate_bitmap, 0, i, bi)
3388 if (bitmap_bit_p (should_scalarize_away_bitmap, i)
3389 && !bitmap_bit_p (cannot_scalarize_away_bitmap, i))
3390 {
3391 tree var = candidate (i);
3392 if (!VAR_P (var))
3393 continue;
3394
3395 if (tree_to_uhwi (TYPE_SIZE (TREE_TYPE (var))) > max_scalarization_size)
3396 {
3397 if (dump_file && (dump_flags & TDF_DETAILS))
3398 {
3399 fprintf (dump_file, "Too big to totally scalarize: ");
3400 print_generic_expr (dump_file, var);
3401 fprintf (dump_file, " (UID: %u)\n", DECL_UID (var));
3402 }
3403 continue;
3404 }
3405
3406 bool all_types_ok = true;
3407 for (struct access *access = get_first_repr_for_decl (var);
3408 access;
3409 access = access->next_grp)
3410 if (!can_totally_scalarize_forest_p (access)
3411 || !scalarizable_type_p (access->type, constant_decl_p (var)))
3412 {
3413 all_types_ok = false;
3414 break;
3415 }
3416 if (!all_types_ok)
3417 continue;
3418
3419 if (dump_file && (dump_flags & TDF_DETAILS))
3420 {
3421 fprintf (dump_file, "Will attempt to totally scalarize ");
3422 print_generic_expr (dump_file, var);
3423 fprintf (dump_file, " (UID: %u): \n", DECL_UID (var));
3424 }
3425 bool scalarized = true;
3426 for (struct access *access = get_first_repr_for_decl (var);
3427 access;
3428 access = access->next_grp)
3429 if (!is_gimple_reg_type (access->type)
3430 && !totally_scalarize_subtree (access))
3431 {
3432 scalarized = false;
3433 break;
3434 }
3435
3436 if (scalarized)
3437 for (struct access *access = get_first_repr_for_decl (var);
3438 access;
3439 access = access->next_grp)
3440 access->grp_total_scalarization = true;
3441 }
3442
3443 if (flag_checking)
3444 verify_all_sra_access_forests ();
3445
3446 bitmap_copy (tmp, candidate_bitmap);
3447 EXECUTE_IF_SET_IN_BITMAP (tmp, 0, i, bi)
3448 {
3449 tree var = candidate (i);
3450 struct access *access = get_first_repr_for_decl (var);
3451
3452 if (analyze_access_trees (access))
3453 {
3454 res++;
3455 if (dump_file && (dump_flags & TDF_DETAILS))
3456 {
3457 fprintf (dump_file, "\nAccess trees for ");
3458 print_generic_expr (dump_file, var);
3459 fprintf (dump_file, " (UID: %u): \n", DECL_UID (var));
3460 dump_access_tree (dump_file, access);
3461 fprintf (dump_file, "\n");
3462 }
3463 }
3464 else
3465 disqualify_candidate (var, "No scalar replacements to be created.");
3466 }
3467
3468 BITMAP_FREE (tmp);
3469
3470 if (res)
3471 {
3472 statistics_counter_event (cfun, "Scalarized aggregates", res);
3473 return true;
3474 }
3475 else
3476 return false;
3477 }
3478
3479 /* Generate statements copying scalar replacements of accesses within a subtree
3480 into or out of AGG. ACCESS, all its children, siblings and their children
3481 are to be processed. AGG is an aggregate type expression (can be a
3482 declaration but does not have to be, it can for example also be a mem_ref or
3483 a series of handled components). TOP_OFFSET is the offset of the processed
3484 subtree which has to be subtracted from offsets of individual accesses to
3485 get corresponding offsets for AGG. If CHUNK_SIZE is non-null, copy only
3486 replacements in the interval <start_offset, start_offset + chunk_size>,
3487 otherwise copy all. GSI is a statement iterator used to place the new
3488 statements. WRITE should be true when the statements should write from AGG
3489 to the replacement and false if vice versa. if INSERT_AFTER is true, new
3490 statements will be added after the current statement in GSI, they will be
3491 added before the statement otherwise. */
3492
3493 static void
3494 generate_subtree_copies (struct access *access, tree agg,
3495 HOST_WIDE_INT top_offset,
3496 HOST_WIDE_INT start_offset, HOST_WIDE_INT chunk_size,
3497 gimple_stmt_iterator *gsi, bool write,
3498 bool insert_after, location_t loc)
3499 {
3500 /* Never write anything into constant pool decls. See PR70602. */
3501 if (!write && constant_decl_p (agg))
3502 return;
3503 do
3504 {
3505 if (chunk_size && access->offset >= start_offset + chunk_size)
3506 return;
3507
3508 if (access->grp_to_be_replaced
3509 && (chunk_size == 0
3510 || access->offset + access->size > start_offset))
3511 {
3512 tree expr, repl = get_access_replacement (access);
3513 gassign *stmt;
3514
3515 expr = build_ref_for_model (loc, agg, access->offset - top_offset,
3516 access, gsi, insert_after);
3517
3518 if (write)
3519 {
3520 if (access->grp_partial_lhs)
3521 expr = force_gimple_operand_gsi (gsi, expr, true, NULL_TREE,
3522 !insert_after,
3523 insert_after ? GSI_NEW_STMT
3524 : GSI_SAME_STMT);
3525 stmt = gimple_build_assign (repl, expr);
3526 }
3527 else
3528 {
3529 TREE_NO_WARNING (repl) = 1;
3530 if (access->grp_partial_lhs)
3531 repl = force_gimple_operand_gsi (gsi, repl, true, NULL_TREE,
3532 !insert_after,
3533 insert_after ? GSI_NEW_STMT
3534 : GSI_SAME_STMT);
3535 stmt = gimple_build_assign (expr, repl);
3536 }
3537 gimple_set_location (stmt, loc);
3538
3539 if (insert_after)
3540 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
3541 else
3542 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
3543 update_stmt (stmt);
3544 sra_stats.subtree_copies++;
3545 }
3546 else if (write
3547 && access->grp_to_be_debug_replaced
3548 && (chunk_size == 0
3549 || access->offset + access->size > start_offset))
3550 {
3551 gdebug *ds;
3552 tree drhs = build_debug_ref_for_model (loc, agg,
3553 access->offset - top_offset,
3554 access);
3555 ds = gimple_build_debug_bind (get_access_replacement (access),
3556 drhs, gsi_stmt (*gsi));
3557 if (insert_after)
3558 gsi_insert_after (gsi, ds, GSI_NEW_STMT);
3559 else
3560 gsi_insert_before (gsi, ds, GSI_SAME_STMT);
3561 }
3562
3563 if (access->first_child)
3564 generate_subtree_copies (access->first_child, agg, top_offset,
3565 start_offset, chunk_size, gsi,
3566 write, insert_after, loc);
3567
3568 access = access->next_sibling;
3569 }
3570 while (access);
3571 }
3572
3573 /* Assign zero to all scalar replacements in an access subtree. ACCESS is the
3574 root of the subtree to be processed. GSI is the statement iterator used
3575 for inserting statements which are added after the current statement if
3576 INSERT_AFTER is true or before it otherwise. */
3577
3578 static void
3579 init_subtree_with_zero (struct access *access, gimple_stmt_iterator *gsi,
3580 bool insert_after, location_t loc)
3581
3582 {
3583 struct access *child;
3584
3585 if (access->grp_to_be_replaced)
3586 {
3587 gassign *stmt;
3588
3589 stmt = gimple_build_assign (get_access_replacement (access),
3590 build_zero_cst (access->type));
3591 if (insert_after)
3592 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
3593 else
3594 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
3595 update_stmt (stmt);
3596 gimple_set_location (stmt, loc);
3597 }
3598 else if (access->grp_to_be_debug_replaced)
3599 {
3600 gdebug *ds
3601 = gimple_build_debug_bind (get_access_replacement (access),
3602 build_zero_cst (access->type),
3603 gsi_stmt (*gsi));
3604 if (insert_after)
3605 gsi_insert_after (gsi, ds, GSI_NEW_STMT);
3606 else
3607 gsi_insert_before (gsi, ds, GSI_SAME_STMT);
3608 }
3609
3610 for (child = access->first_child; child; child = child->next_sibling)
3611 init_subtree_with_zero (child, gsi, insert_after, loc);
3612 }
3613
3614 /* Clobber all scalar replacements in an access subtree. ACCESS is the
3615 root of the subtree to be processed. GSI is the statement iterator used
3616 for inserting statements which are added after the current statement if
3617 INSERT_AFTER is true or before it otherwise. */
3618
3619 static void
3620 clobber_subtree (struct access *access, gimple_stmt_iterator *gsi,
3621 bool insert_after, location_t loc)
3622
3623 {
3624 struct access *child;
3625
3626 if (access->grp_to_be_replaced)
3627 {
3628 tree rep = get_access_replacement (access);
3629 tree clobber = build_clobber (access->type);
3630 gimple *stmt = gimple_build_assign (rep, clobber);
3631
3632 if (insert_after)
3633 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
3634 else
3635 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
3636 update_stmt (stmt);
3637 gimple_set_location (stmt, loc);
3638 }
3639
3640 for (child = access->first_child; child; child = child->next_sibling)
3641 clobber_subtree (child, gsi, insert_after, loc);
3642 }
3643
3644 /* Search for an access representative for the given expression EXPR and
3645 return it or NULL if it cannot be found. */
3646
3647 static struct access *
3648 get_access_for_expr (tree expr)
3649 {
3650 poly_int64 poffset, psize, pmax_size;
3651 HOST_WIDE_INT offset, max_size;
3652 tree base;
3653 bool reverse;
3654
3655 /* FIXME: This should not be necessary but Ada produces V_C_Es with a type of
3656 a different size than the size of its argument and we need the latter
3657 one. */
3658 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
3659 expr = TREE_OPERAND (expr, 0);
3660
3661 base = get_ref_base_and_extent (expr, &poffset, &psize, &pmax_size,
3662 &reverse);
3663 if (!known_size_p (pmax_size)
3664 || !pmax_size.is_constant (&max_size)
3665 || !poffset.is_constant (&offset)
3666 || !DECL_P (base))
3667 return NULL;
3668
3669 if (tree basesize = DECL_SIZE (base))
3670 {
3671 poly_int64 sz;
3672 if (offset < 0
3673 || !poly_int_tree_p (basesize, &sz)
3674 || known_le (sz, offset))
3675 return NULL;
3676 }
3677
3678 if (max_size == 0
3679 || !bitmap_bit_p (candidate_bitmap, DECL_UID (base)))
3680 return NULL;
3681
3682 return get_var_base_offset_size_access (base, offset, max_size);
3683 }
3684
3685 /* Replace the expression EXPR with a scalar replacement if there is one and
3686 generate other statements to do type conversion or subtree copying if
3687 necessary. GSI is used to place newly created statements, WRITE is true if
3688 the expression is being written to (it is on a LHS of a statement or output
3689 in an assembly statement). */
3690
3691 static bool
3692 sra_modify_expr (tree *expr, gimple_stmt_iterator *gsi, bool write)
3693 {
3694 location_t loc;
3695 struct access *access;
3696 tree type, bfr, orig_expr;
3697 bool partial_cplx_access = false;
3698
3699 if (TREE_CODE (*expr) == BIT_FIELD_REF)
3700 {
3701 bfr = *expr;
3702 expr = &TREE_OPERAND (*expr, 0);
3703 }
3704 else
3705 bfr = NULL_TREE;
3706
3707 if (TREE_CODE (*expr) == REALPART_EXPR || TREE_CODE (*expr) == IMAGPART_EXPR)
3708 {
3709 expr = &TREE_OPERAND (*expr, 0);
3710 partial_cplx_access = true;
3711 }
3712 access = get_access_for_expr (*expr);
3713 if (!access)
3714 return false;
3715 type = TREE_TYPE (*expr);
3716 orig_expr = *expr;
3717
3718 loc = gimple_location (gsi_stmt (*gsi));
3719 gimple_stmt_iterator alt_gsi = gsi_none ();
3720 if (write && stmt_ends_bb_p (gsi_stmt (*gsi)))
3721 {
3722 alt_gsi = gsi_start_edge (single_non_eh_succ (gsi_bb (*gsi)));
3723 gsi = &alt_gsi;
3724 }
3725
3726 if (access->grp_to_be_replaced)
3727 {
3728 tree repl = get_access_replacement (access);
3729 /* If we replace a non-register typed access simply use the original
3730 access expression to extract the scalar component afterwards.
3731 This happens if scalarizing a function return value or parameter
3732 like in gcc.c-torture/execute/20041124-1.c, 20050316-1.c and
3733 gcc.c-torture/compile/20011217-1.c.
3734
3735 We also want to use this when accessing a complex or vector which can
3736 be accessed as a different type too, potentially creating a need for
3737 type conversion (see PR42196) and when scalarized unions are involved
3738 in assembler statements (see PR42398). */
3739 if (!bfr && !useless_type_conversion_p (type, access->type))
3740 {
3741 tree ref;
3742
3743 ref = build_ref_for_model (loc, orig_expr, 0, access, gsi, false);
3744
3745 if (partial_cplx_access)
3746 {
3747 /* VIEW_CONVERT_EXPRs in partial complex access are always fine in
3748 the case of a write because in such case the replacement cannot
3749 be a gimple register. In the case of a load, we have to
3750 differentiate in between a register an non-register
3751 replacement. */
3752 tree t = build1 (VIEW_CONVERT_EXPR, type, repl);
3753 gcc_checking_assert (!write || access->grp_partial_lhs);
3754 if (!access->grp_partial_lhs)
3755 {
3756 tree tmp = make_ssa_name (type);
3757 gassign *stmt = gimple_build_assign (tmp, t);
3758 /* This is always a read. */
3759 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
3760 t = tmp;
3761 }
3762 *expr = t;
3763 }
3764 else if (write)
3765 {
3766 gassign *stmt;
3767
3768 if (access->grp_partial_lhs)
3769 ref = force_gimple_operand_gsi (gsi, ref, true, NULL_TREE,
3770 false, GSI_NEW_STMT);
3771 stmt = gimple_build_assign (repl, ref);
3772 gimple_set_location (stmt, loc);
3773 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
3774 }
3775 else
3776 {
3777 gassign *stmt;
3778
3779 if (access->grp_partial_lhs)
3780 repl = force_gimple_operand_gsi (gsi, repl, true, NULL_TREE,
3781 true, GSI_SAME_STMT);
3782 stmt = gimple_build_assign (ref, repl);
3783 gimple_set_location (stmt, loc);
3784 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
3785 }
3786 }
3787 else
3788 *expr = repl;
3789 sra_stats.exprs++;
3790 }
3791 else if (write && access->grp_to_be_debug_replaced)
3792 {
3793 gdebug *ds = gimple_build_debug_bind (get_access_replacement (access),
3794 NULL_TREE,
3795 gsi_stmt (*gsi));
3796 gsi_insert_after (gsi, ds, GSI_NEW_STMT);
3797 }
3798
3799 if (access->first_child)
3800 {
3801 HOST_WIDE_INT start_offset, chunk_size;
3802 if (bfr
3803 && tree_fits_uhwi_p (TREE_OPERAND (bfr, 1))
3804 && tree_fits_uhwi_p (TREE_OPERAND (bfr, 2)))
3805 {
3806 chunk_size = tree_to_uhwi (TREE_OPERAND (bfr, 1));
3807 start_offset = access->offset
3808 + tree_to_uhwi (TREE_OPERAND (bfr, 2));
3809 }
3810 else
3811 start_offset = chunk_size = 0;
3812
3813 generate_subtree_copies (access->first_child, orig_expr, access->offset,
3814 start_offset, chunk_size, gsi, write, write,
3815 loc);
3816 }
3817 return true;
3818 }
3819
3820 /* Where scalar replacements of the RHS have been written to when a replacement
3821 of a LHS of an assigments cannot be direclty loaded from a replacement of
3822 the RHS. */
3823 enum unscalarized_data_handling { SRA_UDH_NONE, /* Nothing done so far. */
3824 SRA_UDH_RIGHT, /* Data flushed to the RHS. */
3825 SRA_UDH_LEFT }; /* Data flushed to the LHS. */
3826
3827 struct subreplacement_assignment_data
3828 {
3829 /* Offset of the access representing the lhs of the assignment. */
3830 HOST_WIDE_INT left_offset;
3831
3832 /* LHS and RHS of the original assignment. */
3833 tree assignment_lhs, assignment_rhs;
3834
3835 /* Access representing the rhs of the whole assignment. */
3836 struct access *top_racc;
3837
3838 /* Stmt iterator used for statement insertions after the original assignment.
3839 It points to the main GSI used to traverse a BB during function body
3840 modification. */
3841 gimple_stmt_iterator *new_gsi;
3842
3843 /* Stmt iterator used for statement insertions before the original
3844 assignment. Keeps on pointing to the original statement. */
3845 gimple_stmt_iterator old_gsi;
3846
3847 /* Location of the assignment. */
3848 location_t loc;
3849
3850 /* Keeps the information whether we have needed to refresh replacements of
3851 the LHS and from which side of the assignments this takes place. */
3852 enum unscalarized_data_handling refreshed;
3853 };
3854
3855 /* Store all replacements in the access tree rooted in TOP_RACC either to their
3856 base aggregate if there are unscalarized data or directly to LHS of the
3857 statement that is pointed to by GSI otherwise. */
3858
3859 static void
3860 handle_unscalarized_data_in_subtree (struct subreplacement_assignment_data *sad)
3861 {
3862 tree src;
3863 if (sad->top_racc->grp_unscalarized_data)
3864 {
3865 src = sad->assignment_rhs;
3866 sad->refreshed = SRA_UDH_RIGHT;
3867 }
3868 else
3869 {
3870 src = sad->assignment_lhs;
3871 sad->refreshed = SRA_UDH_LEFT;
3872 }
3873 generate_subtree_copies (sad->top_racc->first_child, src,
3874 sad->top_racc->offset, 0, 0,
3875 &sad->old_gsi, false, false, sad->loc);
3876 }
3877
3878 /* Try to generate statements to load all sub-replacements in an access subtree
3879 formed by children of LACC from scalar replacements in the SAD->top_racc
3880 subtree. If that is not possible, refresh the SAD->top_racc base aggregate
3881 and load the accesses from it. */
3882
3883 static void
3884 load_assign_lhs_subreplacements (struct access *lacc,
3885 struct subreplacement_assignment_data *sad)
3886 {
3887 for (lacc = lacc->first_child; lacc; lacc = lacc->next_sibling)
3888 {
3889 HOST_WIDE_INT offset;
3890 offset = lacc->offset - sad->left_offset + sad->top_racc->offset;
3891
3892 if (lacc->grp_to_be_replaced)
3893 {
3894 struct access *racc;
3895 gassign *stmt;
3896 tree rhs;
3897
3898 racc = find_access_in_subtree (sad->top_racc, offset, lacc->size);
3899 if (racc && racc->grp_to_be_replaced)
3900 {
3901 rhs = get_access_replacement (racc);
3902 if (!useless_type_conversion_p (lacc->type, racc->type))
3903 rhs = fold_build1_loc (sad->loc, VIEW_CONVERT_EXPR,
3904 lacc->type, rhs);
3905
3906 if (racc->grp_partial_lhs && lacc->grp_partial_lhs)
3907 rhs = force_gimple_operand_gsi (&sad->old_gsi, rhs, true,
3908 NULL_TREE, true, GSI_SAME_STMT);
3909 }
3910 else
3911 {
3912 /* No suitable access on the right hand side, need to load from
3913 the aggregate. See if we have to update it first... */
3914 if (sad->refreshed == SRA_UDH_NONE)
3915 handle_unscalarized_data_in_subtree (sad);
3916
3917 if (sad->refreshed == SRA_UDH_LEFT)
3918 rhs = build_ref_for_model (sad->loc, sad->assignment_lhs,
3919 lacc->offset - sad->left_offset,
3920 lacc, sad->new_gsi, true);
3921 else
3922 rhs = build_ref_for_model (sad->loc, sad->assignment_rhs,
3923 lacc->offset - sad->left_offset,
3924 lacc, sad->new_gsi, true);
3925 if (lacc->grp_partial_lhs)
3926 rhs = force_gimple_operand_gsi (sad->new_gsi,
3927 rhs, true, NULL_TREE,
3928 false, GSI_NEW_STMT);
3929 }
3930
3931 stmt = gimple_build_assign (get_access_replacement (lacc), rhs);
3932 gsi_insert_after (sad->new_gsi, stmt, GSI_NEW_STMT);
3933 gimple_set_location (stmt, sad->loc);
3934 update_stmt (stmt);
3935 sra_stats.subreplacements++;
3936 }
3937 else
3938 {
3939 if (sad->refreshed == SRA_UDH_NONE
3940 && lacc->grp_read && !lacc->grp_covered)
3941 handle_unscalarized_data_in_subtree (sad);
3942
3943 if (lacc && lacc->grp_to_be_debug_replaced)
3944 {
3945 gdebug *ds;
3946 tree drhs;
3947 struct access *racc = find_access_in_subtree (sad->top_racc,
3948 offset,
3949 lacc->size);
3950
3951 if (racc && racc->grp_to_be_replaced)
3952 {
3953 if (racc->grp_write || constant_decl_p (racc->base))
3954 drhs = get_access_replacement (racc);
3955 else
3956 drhs = NULL;
3957 }
3958 else if (sad->refreshed == SRA_UDH_LEFT)
3959 drhs = build_debug_ref_for_model (sad->loc, lacc->base,
3960 lacc->offset, lacc);
3961 else if (sad->refreshed == SRA_UDH_RIGHT)
3962 drhs = build_debug_ref_for_model (sad->loc, sad->top_racc->base,
3963 offset, lacc);
3964 else
3965 drhs = NULL_TREE;
3966 if (drhs
3967 && !useless_type_conversion_p (lacc->type, TREE_TYPE (drhs)))
3968 drhs = fold_build1_loc (sad->loc, VIEW_CONVERT_EXPR,
3969 lacc->type, drhs);
3970 ds = gimple_build_debug_bind (get_access_replacement (lacc),
3971 drhs, gsi_stmt (sad->old_gsi));
3972 gsi_insert_after (sad->new_gsi, ds, GSI_NEW_STMT);
3973 }
3974 }
3975
3976 if (lacc->first_child)
3977 load_assign_lhs_subreplacements (lacc, sad);
3978 }
3979 }
3980
3981 /* Result code for SRA assignment modification. */
3982 enum assignment_mod_result { SRA_AM_NONE, /* nothing done for the stmt */
3983 SRA_AM_MODIFIED, /* stmt changed but not
3984 removed */
3985 SRA_AM_REMOVED }; /* stmt eliminated */
3986
3987 /* Modify assignments with a CONSTRUCTOR on their RHS. STMT contains a pointer
3988 to the assignment and GSI is the statement iterator pointing at it. Returns
3989 the same values as sra_modify_assign. */
3990
3991 static enum assignment_mod_result
3992 sra_modify_constructor_assign (gimple *stmt, gimple_stmt_iterator *gsi)
3993 {
3994 tree lhs = gimple_assign_lhs (stmt);
3995 struct access *acc = get_access_for_expr (lhs);
3996 if (!acc)
3997 return SRA_AM_NONE;
3998 location_t loc = gimple_location (stmt);
3999
4000 if (gimple_clobber_p (stmt))
4001 {
4002 /* Clobber the replacement variable. */
4003 clobber_subtree (acc, gsi, !acc->grp_covered, loc);
4004 /* Remove clobbers of fully scalarized variables, they are dead. */
4005 if (acc->grp_covered)
4006 {
4007 unlink_stmt_vdef (stmt);
4008 gsi_remove (gsi, true);
4009 release_defs (stmt);
4010 return SRA_AM_REMOVED;
4011 }
4012 else
4013 return SRA_AM_MODIFIED;
4014 }
4015
4016 if (CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt)) > 0)
4017 {
4018 /* I have never seen this code path trigger but if it can happen the
4019 following should handle it gracefully. */
4020 if (access_has_children_p (acc))
4021 generate_subtree_copies (acc->first_child, lhs, acc->offset, 0, 0, gsi,
4022 true, true, loc);
4023 return SRA_AM_MODIFIED;
4024 }
4025
4026 if (acc->grp_covered)
4027 {
4028 init_subtree_with_zero (acc, gsi, false, loc);
4029 unlink_stmt_vdef (stmt);
4030 gsi_remove (gsi, true);
4031 release_defs (stmt);
4032 return SRA_AM_REMOVED;
4033 }
4034 else
4035 {
4036 init_subtree_with_zero (acc, gsi, true, loc);
4037 return SRA_AM_MODIFIED;
4038 }
4039 }
4040
4041 /* Create and return a new suitable default definition SSA_NAME for RACC which
4042 is an access describing an uninitialized part of an aggregate that is being
4043 loaded. REG_TREE is used instead of the actual RACC type if that is not of
4044 a gimple register type. */
4045
4046 static tree
4047 get_repl_default_def_ssa_name (struct access *racc, tree reg_type)
4048 {
4049 gcc_checking_assert (!racc->grp_to_be_replaced
4050 && !racc->grp_to_be_debug_replaced);
4051 if (!racc->replacement_decl)
4052 racc->replacement_decl = create_access_replacement (racc, reg_type);
4053 return get_or_create_ssa_default_def (cfun, racc->replacement_decl);
4054 }
4055
4056 /* Examine both sides of the assignment statement pointed to by STMT, replace
4057 them with a scalare replacement if there is one and generate copying of
4058 replacements if scalarized aggregates have been used in the assignment. GSI
4059 is used to hold generated statements for type conversions and subtree
4060 copying. */
4061
4062 static enum assignment_mod_result
4063 sra_modify_assign (gimple *stmt, gimple_stmt_iterator *gsi)
4064 {
4065 struct access *lacc, *racc;
4066 tree lhs, rhs;
4067 bool modify_this_stmt = false;
4068 bool force_gimple_rhs = false;
4069 location_t loc;
4070 gimple_stmt_iterator orig_gsi = *gsi;
4071
4072 if (!gimple_assign_single_p (stmt))
4073 return SRA_AM_NONE;
4074 lhs = gimple_assign_lhs (stmt);
4075 rhs = gimple_assign_rhs1 (stmt);
4076
4077 if (TREE_CODE (rhs) == CONSTRUCTOR)
4078 return sra_modify_constructor_assign (stmt, gsi);
4079
4080 if (TREE_CODE (rhs) == REALPART_EXPR || TREE_CODE (lhs) == REALPART_EXPR
4081 || TREE_CODE (rhs) == IMAGPART_EXPR || TREE_CODE (lhs) == IMAGPART_EXPR
4082 || TREE_CODE (rhs) == BIT_FIELD_REF || TREE_CODE (lhs) == BIT_FIELD_REF)
4083 {
4084 modify_this_stmt = sra_modify_expr (gimple_assign_rhs1_ptr (stmt),
4085 gsi, false);
4086 modify_this_stmt |= sra_modify_expr (gimple_assign_lhs_ptr (stmt),
4087 gsi, true);
4088 return modify_this_stmt ? SRA_AM_MODIFIED : SRA_AM_NONE;
4089 }
4090
4091 lacc = get_access_for_expr (lhs);
4092 racc = get_access_for_expr (rhs);
4093 if (!lacc && !racc)
4094 return SRA_AM_NONE;
4095 /* Avoid modifying initializations of constant-pool replacements. */
4096 if (racc && (racc->replacement_decl == lhs))
4097 return SRA_AM_NONE;
4098
4099 loc = gimple_location (stmt);
4100 if (lacc && lacc->grp_to_be_replaced)
4101 {
4102 lhs = get_access_replacement (lacc);
4103 gimple_assign_set_lhs (stmt, lhs);
4104 modify_this_stmt = true;
4105 if (lacc->grp_partial_lhs)
4106 force_gimple_rhs = true;
4107 sra_stats.exprs++;
4108 }
4109
4110 if (racc && racc->grp_to_be_replaced)
4111 {
4112 rhs = get_access_replacement (racc);
4113 modify_this_stmt = true;
4114 if (racc->grp_partial_lhs)
4115 force_gimple_rhs = true;
4116 sra_stats.exprs++;
4117 }
4118 else if (racc
4119 && !racc->grp_unscalarized_data
4120 && !racc->grp_unscalarizable_region
4121 && TREE_CODE (lhs) == SSA_NAME
4122 && !access_has_replacements_p (racc))
4123 {
4124 rhs = get_repl_default_def_ssa_name (racc, TREE_TYPE (lhs));
4125 modify_this_stmt = true;
4126 sra_stats.exprs++;
4127 }
4128
4129 if (modify_this_stmt)
4130 {
4131 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs)))
4132 {
4133 /* If we can avoid creating a VIEW_CONVERT_EXPR do so.
4134 ??? This should move to fold_stmt which we simply should
4135 call after building a VIEW_CONVERT_EXPR here. */
4136 if (AGGREGATE_TYPE_P (TREE_TYPE (lhs))
4137 && !contains_bitfld_component_ref_p (lhs))
4138 {
4139 lhs = build_ref_for_model (loc, lhs, 0, racc, gsi, false);
4140 gimple_assign_set_lhs (stmt, lhs);
4141 }
4142 else if (lacc
4143 && AGGREGATE_TYPE_P (TREE_TYPE (rhs))
4144 && !contains_vce_or_bfcref_p (rhs))
4145 rhs = build_ref_for_model (loc, rhs, 0, lacc, gsi, false);
4146
4147 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs)))
4148 {
4149 rhs = fold_build1_loc (loc, VIEW_CONVERT_EXPR, TREE_TYPE (lhs),
4150 rhs);
4151 if (is_gimple_reg_type (TREE_TYPE (lhs))
4152 && TREE_CODE (lhs) != SSA_NAME)
4153 force_gimple_rhs = true;
4154 }
4155 }
4156 }
4157
4158 if (lacc && lacc->grp_to_be_debug_replaced)
4159 {
4160 tree dlhs = get_access_replacement (lacc);
4161 tree drhs = unshare_expr (rhs);
4162 if (!useless_type_conversion_p (TREE_TYPE (dlhs), TREE_TYPE (drhs)))
4163 {
4164 if (AGGREGATE_TYPE_P (TREE_TYPE (drhs))
4165 && !contains_vce_or_bfcref_p (drhs))
4166 drhs = build_debug_ref_for_model (loc, drhs, 0, lacc);
4167 if (drhs
4168 && !useless_type_conversion_p (TREE_TYPE (dlhs),
4169 TREE_TYPE (drhs)))
4170 drhs = fold_build1_loc (loc, VIEW_CONVERT_EXPR,
4171 TREE_TYPE (dlhs), drhs);
4172 }
4173 gdebug *ds = gimple_build_debug_bind (dlhs, drhs, stmt);
4174 gsi_insert_before (gsi, ds, GSI_SAME_STMT);
4175 }
4176
4177 /* From this point on, the function deals with assignments in between
4178 aggregates when at least one has scalar reductions of some of its
4179 components. There are three possible scenarios: Both the LHS and RHS have
4180 to-be-scalarized components, 2) only the RHS has or 3) only the LHS has.
4181
4182 In the first case, we would like to load the LHS components from RHS
4183 components whenever possible. If that is not possible, we would like to
4184 read it directly from the RHS (after updating it by storing in it its own
4185 components). If there are some necessary unscalarized data in the LHS,
4186 those will be loaded by the original assignment too. If neither of these
4187 cases happen, the original statement can be removed. Most of this is done
4188 by load_assign_lhs_subreplacements.
4189
4190 In the second case, we would like to store all RHS scalarized components
4191 directly into LHS and if they cover the aggregate completely, remove the
4192 statement too. In the third case, we want the LHS components to be loaded
4193 directly from the RHS (DSE will remove the original statement if it
4194 becomes redundant).
4195
4196 This is a bit complex but manageable when types match and when unions do
4197 not cause confusion in a way that we cannot really load a component of LHS
4198 from the RHS or vice versa (the access representing this level can have
4199 subaccesses that are accessible only through a different union field at a
4200 higher level - different from the one used in the examined expression).
4201 Unions are fun.
4202
4203 Therefore, I specially handle a fourth case, happening when there is a
4204 specific type cast or it is impossible to locate a scalarized subaccess on
4205 the other side of the expression. If that happens, I simply "refresh" the
4206 RHS by storing in it is scalarized components leave the original statement
4207 there to do the copying and then load the scalar replacements of the LHS.
4208 This is what the first branch does. */
4209
4210 if (modify_this_stmt
4211 || gimple_has_volatile_ops (stmt)
4212 || contains_vce_or_bfcref_p (rhs)
4213 || contains_vce_or_bfcref_p (lhs)
4214 || stmt_ends_bb_p (stmt))
4215 {
4216 /* No need to copy into a constant-pool, it comes pre-initialized. */
4217 if (access_has_children_p (racc) && !constant_decl_p (racc->base))
4218 generate_subtree_copies (racc->first_child, rhs, racc->offset, 0, 0,
4219 gsi, false, false, loc);
4220 if (access_has_children_p (lacc))
4221 {
4222 gimple_stmt_iterator alt_gsi = gsi_none ();
4223 if (stmt_ends_bb_p (stmt))
4224 {
4225 alt_gsi = gsi_start_edge (single_non_eh_succ (gsi_bb (*gsi)));
4226 gsi = &alt_gsi;
4227 }
4228 generate_subtree_copies (lacc->first_child, lhs, lacc->offset, 0, 0,
4229 gsi, true, true, loc);
4230 }
4231 sra_stats.separate_lhs_rhs_handling++;
4232
4233 /* This gimplification must be done after generate_subtree_copies,
4234 lest we insert the subtree copies in the middle of the gimplified
4235 sequence. */
4236 if (force_gimple_rhs)
4237 rhs = force_gimple_operand_gsi (&orig_gsi, rhs, true, NULL_TREE,
4238 true, GSI_SAME_STMT);
4239 if (gimple_assign_rhs1 (stmt) != rhs)
4240 {
4241 modify_this_stmt = true;
4242 gimple_assign_set_rhs_from_tree (&orig_gsi, rhs);
4243 gcc_assert (stmt == gsi_stmt (orig_gsi));
4244 }
4245
4246 return modify_this_stmt ? SRA_AM_MODIFIED : SRA_AM_NONE;
4247 }
4248 else
4249 {
4250 if (access_has_children_p (lacc)
4251 && access_has_children_p (racc)
4252 /* When an access represents an unscalarizable region, it usually
4253 represents accesses with variable offset and thus must not be used
4254 to generate new memory accesses. */
4255 && !lacc->grp_unscalarizable_region
4256 && !racc->grp_unscalarizable_region)
4257 {
4258 struct subreplacement_assignment_data sad;
4259
4260 sad.left_offset = lacc->offset;
4261 sad.assignment_lhs = lhs;
4262 sad.assignment_rhs = rhs;
4263 sad.top_racc = racc;
4264 sad.old_gsi = *gsi;
4265 sad.new_gsi = gsi;
4266 sad.loc = gimple_location (stmt);
4267 sad.refreshed = SRA_UDH_NONE;
4268
4269 if (lacc->grp_read && !lacc->grp_covered)
4270 handle_unscalarized_data_in_subtree (&sad);
4271
4272 load_assign_lhs_subreplacements (lacc, &sad);
4273 if (sad.refreshed != SRA_UDH_RIGHT)
4274 {
4275 gsi_next (gsi);
4276 unlink_stmt_vdef (stmt);
4277 gsi_remove (&sad.old_gsi, true);
4278 release_defs (stmt);
4279 sra_stats.deleted++;
4280 return SRA_AM_REMOVED;
4281 }
4282 }
4283 else
4284 {
4285 if (access_has_children_p (racc)
4286 && !racc->grp_unscalarized_data
4287 && TREE_CODE (lhs) != SSA_NAME)
4288 {
4289 if (dump_file)
4290 {
4291 fprintf (dump_file, "Removing load: ");
4292 print_gimple_stmt (dump_file, stmt, 0);
4293 }
4294 generate_subtree_copies (racc->first_child, lhs,
4295 racc->offset, 0, 0, gsi,
4296 false, false, loc);
4297 gcc_assert (stmt == gsi_stmt (*gsi));
4298 unlink_stmt_vdef (stmt);
4299 gsi_remove (gsi, true);
4300 release_defs (stmt);
4301 sra_stats.deleted++;
4302 return SRA_AM_REMOVED;
4303 }
4304 /* Restore the aggregate RHS from its components so the
4305 prevailing aggregate copy does the right thing. */
4306 if (access_has_children_p (racc))
4307 generate_subtree_copies (racc->first_child, rhs, racc->offset, 0, 0,
4308 gsi, false, false, loc);
4309 /* Re-load the components of the aggregate copy destination.
4310 But use the RHS aggregate to load from to expose more
4311 optimization opportunities. */
4312 if (access_has_children_p (lacc))
4313 generate_subtree_copies (lacc->first_child, rhs, lacc->offset,
4314 0, 0, gsi, true, true, loc);
4315 }
4316
4317 return SRA_AM_NONE;
4318 }
4319 }
4320
4321 /* Set any scalar replacements of values in the constant pool to the initial
4322 value of the constant. (Constant-pool decls like *.LC0 have effectively
4323 been initialized before the program starts, we must do the same for their
4324 replacements.) Thus, we output statements like 'SR.1 = *.LC0[0];' into
4325 the function's entry block. */
4326
4327 static void
4328 initialize_constant_pool_replacements (void)
4329 {
4330 gimple_seq seq = NULL;
4331 gimple_stmt_iterator gsi = gsi_start (seq);
4332 bitmap_iterator bi;
4333 unsigned i;
4334
4335 EXECUTE_IF_SET_IN_BITMAP (candidate_bitmap, 0, i, bi)
4336 {
4337 tree var = candidate (i);
4338 if (!constant_decl_p (var))
4339 continue;
4340
4341 struct access *access = get_first_repr_for_decl (var);
4342
4343 while (access)
4344 {
4345 if (access->replacement_decl)
4346 {
4347 gassign *stmt
4348 = gimple_build_assign (get_access_replacement (access),
4349 unshare_expr (access->expr));
4350 if (dump_file && (dump_flags & TDF_DETAILS))
4351 {
4352 fprintf (dump_file, "Generating constant initializer: ");
4353 print_gimple_stmt (dump_file, stmt, 0);
4354 fprintf (dump_file, "\n");
4355 }
4356 gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
4357 update_stmt (stmt);
4358 }
4359
4360 if (access->first_child)
4361 access = access->first_child;
4362 else if (access->next_sibling)
4363 access = access->next_sibling;
4364 else
4365 {
4366 while (access->parent && !access->next_sibling)
4367 access = access->parent;
4368 if (access->next_sibling)
4369 access = access->next_sibling;
4370 else
4371 access = access->next_grp;
4372 }
4373 }
4374 }
4375
4376 seq = gsi_seq (gsi);
4377 if (seq)
4378 gsi_insert_seq_on_edge_immediate (
4379 single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)), seq);
4380 }
4381
4382 /* Traverse the function body and all modifications as decided in
4383 analyze_all_variable_accesses. Return true iff the CFG has been
4384 changed. */
4385
4386 static bool
4387 sra_modify_function_body (void)
4388 {
4389 bool cfg_changed = false;
4390 basic_block bb;
4391
4392 initialize_constant_pool_replacements ();
4393
4394 FOR_EACH_BB_FN (bb, cfun)
4395 {
4396 gimple_stmt_iterator gsi = gsi_start_bb (bb);
4397 while (!gsi_end_p (gsi))
4398 {
4399 gimple *stmt = gsi_stmt (gsi);
4400 enum assignment_mod_result assign_result;
4401 bool modified = false, deleted = false;
4402 tree *t;
4403 unsigned i;
4404
4405 switch (gimple_code (stmt))
4406 {
4407 case GIMPLE_RETURN:
4408 t = gimple_return_retval_ptr (as_a <greturn *> (stmt));
4409 if (*t != NULL_TREE)
4410 modified |= sra_modify_expr (t, &gsi, false);
4411 break;
4412
4413 case GIMPLE_ASSIGN:
4414 assign_result = sra_modify_assign (stmt, &gsi);
4415 modified |= assign_result == SRA_AM_MODIFIED;
4416 deleted = assign_result == SRA_AM_REMOVED;
4417 break;
4418
4419 case GIMPLE_CALL:
4420 /* Operands must be processed before the lhs. */
4421 for (i = 0; i < gimple_call_num_args (stmt); i++)
4422 {
4423 t = gimple_call_arg_ptr (stmt, i);
4424 modified |= sra_modify_expr (t, &gsi, false);
4425 }
4426
4427 if (gimple_call_lhs (stmt))
4428 {
4429 t = gimple_call_lhs_ptr (stmt);
4430 modified |= sra_modify_expr (t, &gsi, true);
4431 }
4432 break;
4433
4434 case GIMPLE_ASM:
4435 {
4436 gasm *asm_stmt = as_a <gasm *> (stmt);
4437 for (i = 0; i < gimple_asm_ninputs (asm_stmt); i++)
4438 {
4439 t = &TREE_VALUE (gimple_asm_input_op (asm_stmt, i));
4440 modified |= sra_modify_expr (t, &gsi, false);
4441 }
4442 for (i = 0; i < gimple_asm_noutputs (asm_stmt); i++)
4443 {
4444 t = &TREE_VALUE (gimple_asm_output_op (asm_stmt, i));
4445 modified |= sra_modify_expr (t, &gsi, true);
4446 }
4447 }
4448 break;
4449
4450 default:
4451 break;
4452 }
4453
4454 if (modified)
4455 {
4456 update_stmt (stmt);
4457 if (maybe_clean_eh_stmt (stmt)
4458 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
4459 cfg_changed = true;
4460 }
4461 if (!deleted)
4462 gsi_next (&gsi);
4463 }
4464 }
4465
4466 gsi_commit_edge_inserts ();
4467 return cfg_changed;
4468 }
4469
4470 /* Generate statements initializing scalar replacements of parts of function
4471 parameters. */
4472
4473 static void
4474 initialize_parameter_reductions (void)
4475 {
4476 gimple_stmt_iterator gsi;
4477 gimple_seq seq = NULL;
4478 tree parm;
4479
4480 gsi = gsi_start (seq);
4481 for (parm = DECL_ARGUMENTS (current_function_decl);
4482 parm;
4483 parm = DECL_CHAIN (parm))
4484 {
4485 vec<access_p> *access_vec;
4486 struct access *access;
4487
4488 if (!bitmap_bit_p (candidate_bitmap, DECL_UID (parm)))
4489 continue;
4490 access_vec = get_base_access_vector (parm);
4491 if (!access_vec)
4492 continue;
4493
4494 for (access = (*access_vec)[0];
4495 access;
4496 access = access->next_grp)
4497 generate_subtree_copies (access, parm, 0, 0, 0, &gsi, true, true,
4498 EXPR_LOCATION (parm));
4499 }
4500
4501 seq = gsi_seq (gsi);
4502 if (seq)
4503 gsi_insert_seq_on_edge_immediate (single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)), seq);
4504 }
4505
4506 /* The "main" function of intraprocedural SRA passes. Runs the analysis and if
4507 it reveals there are components of some aggregates to be scalarized, it runs
4508 the required transformations. */
4509 static unsigned int
4510 perform_intra_sra (void)
4511 {
4512 int ret = 0;
4513 sra_initialize ();
4514
4515 if (!find_var_candidates ())
4516 goto out;
4517
4518 if (!scan_function ())
4519 goto out;
4520
4521 if (!analyze_all_variable_accesses ())
4522 goto out;
4523
4524 if (sra_modify_function_body ())
4525 ret = TODO_update_ssa | TODO_cleanup_cfg;
4526 else
4527 ret = TODO_update_ssa;
4528 initialize_parameter_reductions ();
4529
4530 statistics_counter_event (cfun, "Scalar replacements created",
4531 sra_stats.replacements);
4532 statistics_counter_event (cfun, "Modified expressions", sra_stats.exprs);
4533 statistics_counter_event (cfun, "Subtree copy stmts",
4534 sra_stats.subtree_copies);
4535 statistics_counter_event (cfun, "Subreplacement stmts",
4536 sra_stats.subreplacements);
4537 statistics_counter_event (cfun, "Deleted stmts", sra_stats.deleted);
4538 statistics_counter_event (cfun, "Separate LHS and RHS handling",
4539 sra_stats.separate_lhs_rhs_handling);
4540
4541 out:
4542 sra_deinitialize ();
4543 return ret;
4544 }
4545
4546 /* Perform early intraprocedural SRA. */
4547 static unsigned int
4548 early_intra_sra (void)
4549 {
4550 sra_mode = SRA_MODE_EARLY_INTRA;
4551 return perform_intra_sra ();
4552 }
4553
4554 /* Perform "late" intraprocedural SRA. */
4555 static unsigned int
4556 late_intra_sra (void)
4557 {
4558 sra_mode = SRA_MODE_INTRA;
4559 return perform_intra_sra ();
4560 }
4561
4562
4563 static bool
4564 gate_intra_sra (void)
4565 {
4566 return flag_tree_sra != 0 && dbg_cnt (tree_sra);
4567 }
4568
4569
4570 namespace {
4571
4572 const pass_data pass_data_sra_early =
4573 {
4574 GIMPLE_PASS, /* type */
4575 "esra", /* name */
4576 OPTGROUP_NONE, /* optinfo_flags */
4577 TV_TREE_SRA, /* tv_id */
4578 ( PROP_cfg | PROP_ssa ), /* properties_required */
4579 0, /* properties_provided */
4580 0, /* properties_destroyed */
4581 0, /* todo_flags_start */
4582 TODO_update_ssa, /* todo_flags_finish */
4583 };
4584
4585 class pass_sra_early : public gimple_opt_pass
4586 {
4587 public:
4588 pass_sra_early (gcc::context *ctxt)
4589 : gimple_opt_pass (pass_data_sra_early, ctxt)
4590 {}
4591
4592 /* opt_pass methods: */
4593 virtual bool gate (function *) { return gate_intra_sra (); }
4594 virtual unsigned int execute (function *) { return early_intra_sra (); }
4595
4596 }; // class pass_sra_early
4597
4598 } // anon namespace
4599
4600 gimple_opt_pass *
4601 make_pass_sra_early (gcc::context *ctxt)
4602 {
4603 return new pass_sra_early (ctxt);
4604 }
4605
4606 namespace {
4607
4608 const pass_data pass_data_sra =
4609 {
4610 GIMPLE_PASS, /* type */
4611 "sra", /* name */
4612 OPTGROUP_NONE, /* optinfo_flags */
4613 TV_TREE_SRA, /* tv_id */
4614 ( PROP_cfg | PROP_ssa ), /* properties_required */
4615 0, /* properties_provided */
4616 0, /* properties_destroyed */
4617 TODO_update_address_taken, /* todo_flags_start */
4618 TODO_update_ssa, /* todo_flags_finish */
4619 };
4620
4621 class pass_sra : public gimple_opt_pass
4622 {
4623 public:
4624 pass_sra (gcc::context *ctxt)
4625 : gimple_opt_pass (pass_data_sra, ctxt)
4626 {}
4627
4628 /* opt_pass methods: */
4629 virtual bool gate (function *) { return gate_intra_sra (); }
4630 virtual unsigned int execute (function *) { return late_intra_sra (); }
4631
4632 }; // class pass_sra
4633
4634 } // anon namespace
4635
4636 gimple_opt_pass *
4637 make_pass_sra (gcc::context *ctxt)
4638 {
4639 return new pass_sra (ctxt);
4640 }