]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/tree-sra.c
Update copyright years.
[thirdparty/gcc.git] / gcc / tree-sra.c
CommitLineData
6de9cd9a
DN
1/* Scalar Replacement of Aggregates (SRA) converts some structure
2 references into scalar references, exposing them to the scalar
3 optimizers.
85ec4feb 4 Copyright (C) 2008-2018 Free Software Foundation, Inc.
0674b9d0 5 Contributed by Martin Jambor <mjambor@suse.cz>
6de9cd9a
DN
6
7This file is part of GCC.
19114537 8
0674b9d0
MJ
9GCC is free software; you can redistribute it and/or modify it under
10the terms of the GNU General Public License as published by the Free
11Software Foundation; either version 3, or (at your option) any later
12version.
19114537 13
0674b9d0
MJ
14GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15WARRANTY; without even the implied warranty of MERCHANTABILITY or
6de9cd9a
DN
16FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17for more details.
19114537 18
6de9cd9a 19You should have received a copy of the GNU General Public License
9dcd6f09
NC
20along with GCC; see the file COPYING3. If not see
21<http://www.gnu.org/licenses/>. */
6de9cd9a 22
0674b9d0
MJ
23/* This file implements Scalar Reduction of Aggregates (SRA). SRA is run
24 twice, once in the early stages of compilation (early SRA) and once in the
25 late stages (late SRA). The aim of both is to turn references to scalar
26 parts of aggregates into uses of independent scalar variables.
27
28 The two passes are nearly identical, the only difference is that early SRA
29 does not scalarize unions which are used as the result in a GIMPLE_RETURN
30 statement because together with inlining this can lead to weird type
31 conversions.
32
33 Both passes operate in four stages:
34
35 1. The declarations that have properties which make them candidates for
36 scalarization are identified in function find_var_candidates(). The
37 candidates are stored in candidate_bitmap.
38
39 2. The function body is scanned. In the process, declarations which are
40 used in a manner that prevent their scalarization are removed from the
41 candidate bitmap. More importantly, for every access into an aggregate,
42 an access structure (struct access) is created by create_access() and
43 stored in a vector associated with the aggregate. Among other
44 information, the aggregate declaration, the offset and size of the access
45 and its type are stored in the structure.
46
47 On a related note, assign_link structures are created for every assign
48 statement between candidate aggregates and attached to the related
49 accesses.
50
51 3. The vectors of accesses are analyzed. They are first sorted according to
52 their offset and size and then scanned for partially overlapping accesses
53 (i.e. those which overlap but one is not entirely within another). Such
54 an access disqualifies the whole aggregate from being scalarized.
55
56 If there is no such inhibiting overlap, a representative access structure
57 is chosen for every unique combination of offset and size. Afterwards,
58 the pass builds a set of trees from these structures, in which children
59 of an access are within their parent (in terms of offset and size).
60
61 Then accesses are propagated whenever possible (i.e. in cases when it
62 does not create a partially overlapping access) across assign_links from
63 the right hand side to the left hand side.
64
65 Then the set of trees for each declaration is traversed again and those
66 accesses which should be replaced by a scalar are identified.
67
68 4. The function is traversed again, and for every reference into an
69 aggregate that has some component which is about to be scalarized,
70 statements are amended and new statements are created as necessary.
71 Finally, if a parameter got scalarized, the scalar replacements are
72 initialized with values from respective parameter aggregates. */
73
6de9cd9a
DN
74#include "config.h"
75#include "system.h"
76#include "coretypes.h"
c7131fb2 77#include "backend.h"
957060b5
AM
78#include "target.h"
79#include "rtl.h"
40e23961 80#include "tree.h"
c7131fb2 81#include "gimple.h"
957060b5
AM
82#include "predict.h"
83#include "alloc-pool.h"
84#include "tree-pass.h"
c7131fb2 85#include "ssa.h"
957060b5
AM
86#include "cgraph.h"
87#include "gimple-pretty-print.h"
c7131fb2 88#include "alias.h"
40e23961 89#include "fold-const.h"
2fb9a547 90#include "tree-eh.h"
d8a2d370 91#include "stor-layout.h"
45b0be94 92#include "gimplify.h"
5be5c238 93#include "gimple-iterator.h"
18f429e2 94#include "gimplify-me.h"
5be5c238 95#include "gimple-walk.h"
442b4905 96#include "tree-cfg.h"
442b4905 97#include "tree-dfa.h"
7a300452 98#include "tree-ssa.h"
dd912cb8 99#include "symbol-summary.h"
4d99a848 100#include "ipa-param-manipulation.h"
3f84bf08 101#include "ipa-prop.h"
61b58001 102#include "params.h"
567a4beb 103#include "dbgcnt.h"
29be3835 104#include "tree-inline.h"
27d020cf 105#include "ipa-fnsummary.h"
9e401b63 106#include "ipa-utils.h"
9b2b7279 107#include "builtins.h"
6de9cd9a 108
0674b9d0 109/* Enumeration of all aggregate reductions we can do. */
07ffa034
MJ
110enum sra_mode { SRA_MODE_EARLY_IPA, /* early call regularization */
111 SRA_MODE_EARLY_INTRA, /* early intraprocedural SRA */
112 SRA_MODE_INTRA }; /* late intraprocedural SRA */
6de9cd9a 113
0674b9d0
MJ
114/* Global variable describing which aggregate reduction we are performing at
115 the moment. */
116static enum sra_mode sra_mode;
97e73bd2 117
0674b9d0 118struct assign_link;
97e73bd2 119
0674b9d0
MJ
120/* ACCESS represents each access to an aggregate variable (as a whole or a
121 part). It can also represent a group of accesses that refer to exactly the
122 same fragment of an aggregate (i.e. those that have exactly the same offset
123 and size). Such representatives for a single aggregate, once determined,
124 are linked in a linked list and have the group fields set.
97e73bd2 125
0674b9d0
MJ
126 Moreover, when doing intraprocedural SRA, a tree is built from those
127 representatives (by the means of first_child and next_sibling pointers), in
128 which all items in a subtree are "within" the root, i.e. their offset is
129 greater or equal to offset of the root and offset+size is smaller or equal
130 to offset+size of the root. Children of an access are sorted by offset.
97e73bd2 131
0674b9d0
MJ
132 Note that accesses to parts of vector and complex number types always
133 represented by an access to the whole complex number or a vector. It is a
134 duty of the modifying functions to replace them appropriately. */
97e73bd2 135
0674b9d0
MJ
136struct access
137{
138 /* Values returned by `get_ref_base_and_extent' for each component reference
139 If EXPR isn't a component reference just set `BASE = EXPR', `OFFSET = 0',
140 `SIZE = TREE_SIZE (TREE_TYPE (expr))'. */
141 HOST_WIDE_INT offset;
142 HOST_WIDE_INT size;
143 tree base;
6de9cd9a 144
09f0dc45
MJ
145 /* Expression. It is context dependent so do not use it to create new
146 expressions to access the original aggregate. See PR 42154 for a
147 testcase. */
0674b9d0
MJ
148 tree expr;
149 /* Type. */
150 tree type;
6de9cd9a 151
07ffa034 152 /* The statement this access belongs to. */
355fe088 153 gimple *stmt;
07ffa034 154
0674b9d0
MJ
155 /* Next group representative for this aggregate. */
156 struct access *next_grp;
157
158 /* Pointer to the group representative. Pointer to itself if the struct is
159 the representative. */
160 struct access *group_representative;
161
2bba7541
MJ
162 /* After access tree has been constructed, this points to the parent of the
163 current access, if there is one. NULL for roots. */
164 struct access *parent;
165
0674b9d0
MJ
166 /* If this access has any children (in terms of the definition above), this
167 points to the first one. */
168 struct access *first_child;
169
30a20e97
MJ
170 /* In intraprocedural SRA, pointer to the next sibling in the access tree as
171 described above. In IPA-SRA this is a pointer to the next access
172 belonging to the same group (having the same representative). */
0674b9d0
MJ
173 struct access *next_sibling;
174
175 /* Pointers to the first and last element in the linked list of assign
176 links. */
177 struct assign_link *first_link, *last_link;
178
179 /* Pointer to the next access in the work queue. */
180 struct access *next_queued;
181
182 /* Replacement variable for this access "region." Never to be accessed
183 directly, always only by the means of get_access_replacement() and only
184 when grp_to_be_replaced flag is set. */
185 tree replacement_decl;
186
5e9fba51
EB
187 /* Is this access an access to a non-addressable field? */
188 unsigned non_addressable : 1;
189
ee45a32d
EB
190 /* Is this access made in reverse storage order? */
191 unsigned reverse : 1;
192
193 /* Is this particular access write access? */
194 unsigned write : 1;
195
0674b9d0
MJ
196 /* Is this access currently in the work queue? */
197 unsigned grp_queued : 1;
07ffa034 198
0674b9d0
MJ
199 /* Does this group contain a write access? This flag is propagated down the
200 access tree. */
201 unsigned grp_write : 1;
07ffa034 202
0674b9d0
MJ
203 /* Does this group contain a read access? This flag is propagated down the
204 access tree. */
205 unsigned grp_read : 1;
07ffa034 206
77620011
MJ
207 /* Does this group contain a read access that comes from an assignment
208 statement? This flag is propagated down the access tree. */
209 unsigned grp_assignment_read : 1;
210
fc37536b
MJ
211 /* Does this group contain a write access that comes from an assignment
212 statement? This flag is propagated down the access tree. */
213 unsigned grp_assignment_write : 1;
214
4fd73214
MJ
215 /* Does this group contain a read access through a scalar type? This flag is
216 not propagated in the access tree in any direction. */
217 unsigned grp_scalar_read : 1;
218
219 /* Does this group contain a write access through a scalar type? This flag
220 is not propagated in the access tree in any direction. */
221 unsigned grp_scalar_write : 1;
222
1ac93f10
MJ
223 /* Is this access an artificial one created to scalarize some record
224 entirely? */
225 unsigned grp_total_scalarization : 1;
226
fef94f76
MJ
227 /* Other passes of the analysis use this bit to make function
228 analyze_access_subtree create scalar replacements for this group if
229 possible. */
230 unsigned grp_hint : 1;
07ffa034 231
0674b9d0
MJ
232 /* Is the subtree rooted in this access fully covered by scalar
233 replacements? */
234 unsigned grp_covered : 1;
07ffa034 235
0674b9d0
MJ
236 /* If set to true, this access and all below it in an access tree must not be
237 scalarized. */
238 unsigned grp_unscalarizable_region : 1;
07ffa034 239
0674b9d0
MJ
240 /* Whether data have been written to parts of the aggregate covered by this
241 access which is not to be scalarized. This flag is propagated up in the
242 access tree. */
243 unsigned grp_unscalarized_data : 1;
07ffa034 244
0674b9d0
MJ
245 /* Does this access and/or group contain a write access through a
246 BIT_FIELD_REF? */
247 unsigned grp_partial_lhs : 1;
248
d94b820b 249 /* Set when a scalar replacement should be created for this variable. */
0674b9d0 250 unsigned grp_to_be_replaced : 1;
07ffa034 251
be384c10
MJ
252 /* Set when we want a replacement for the sole purpose of having it in
253 generated debug statements. */
254 unsigned grp_to_be_debug_replaced : 1;
255
9271a43c
MJ
256 /* Should TREE_NO_WARNING of a replacement be set? */
257 unsigned grp_no_warning : 1;
258
07ffa034
MJ
259 /* Is it possible that the group refers to data which might be (directly or
260 otherwise) modified? */
261 unsigned grp_maybe_modified : 1;
262
263 /* Set when this is a representative of a pointer to scalar (i.e. by
264 reference) parameter which we consider for turning into a plain scalar
265 (i.e. a by value parameter). */
266 unsigned grp_scalar_ptr : 1;
267
268 /* Set when we discover that this pointer is not safe to dereference in the
269 caller. */
270 unsigned grp_not_necessarilly_dereferenced : 1;
0674b9d0 271};
029f45bd 272
0674b9d0 273typedef struct access *access_p;
6de9cd9a 274
6de9cd9a 275
0674b9d0 276/* Alloc pool for allocating access structures. */
fcb87c50 277static object_allocator<struct access> access_pool ("SRA accesses");
97e73bd2 278
0674b9d0
MJ
279/* A structure linking lhs and rhs accesses from an aggregate assignment. They
280 are used to propagate subaccesses from rhs to lhs as long as they don't
281 conflict with what is already there. */
282struct assign_link
6de9cd9a 283{
0674b9d0
MJ
284 struct access *lacc, *racc;
285 struct assign_link *next;
286};
6de9cd9a 287
0674b9d0 288/* Alloc pool for allocating assign link structures. */
fcb87c50 289static object_allocator<assign_link> assign_link_pool ("SRA links");
6de9cd9a 290
9771b263 291/* Base (tree) -> Vector (vec<access_p> *) map. */
b787e7a2 292static hash_map<tree, auto_vec<access_p> > *base_access_vec;
6de9cd9a 293
4a8fb1a1
LC
294/* Candidate hash table helpers. */
295
8d67ee55 296struct uid_decl_hasher : nofree_ptr_hash <tree_node>
4a8fb1a1 297{
67f58944
TS
298 static inline hashval_t hash (const tree_node *);
299 static inline bool equal (const tree_node *, const tree_node *);
4a8fb1a1
LC
300};
301
302/* Hash a tree in a uid_decl_map. */
303
304inline hashval_t
67f58944 305uid_decl_hasher::hash (const tree_node *item)
4a8fb1a1
LC
306{
307 return item->decl_minimal.uid;
308}
309
310/* Return true if the DECL_UID in both trees are equal. */
311
312inline bool
67f58944 313uid_decl_hasher::equal (const tree_node *a, const tree_node *b)
4a8fb1a1
LC
314{
315 return (a->decl_minimal.uid == b->decl_minimal.uid);
316}
317
d94b820b 318/* Set of candidates. */
0674b9d0 319static bitmap candidate_bitmap;
c203e8a7 320static hash_table<uid_decl_hasher> *candidates;
d94b820b
RG
321
322/* For a candidate UID return the candidates decl. */
323
324static inline tree
325candidate (unsigned uid)
326{
4a8fb1a1
LC
327 tree_node t;
328 t.decl_minimal.uid = uid;
c203e8a7 329 return candidates->find_with_hash (&t, static_cast <hashval_t> (uid));
d94b820b 330}
07ffa034 331
7744b697
MJ
332/* Bitmap of candidates which we should try to entirely scalarize away and
333 those which cannot be (because they are and need be used as a whole). */
334static bitmap should_scalarize_away_bitmap, cannot_scalarize_away_bitmap;
335
88bed196
AL
336/* Bitmap of candidates in the constant pool, which cannot be scalarized
337 because this would produce non-constant expressions (e.g. Ada). */
338static bitmap disqualified_constants;
339
0674b9d0
MJ
340/* Obstack for creation of fancy names. */
341static struct obstack name_obstack;
6de9cd9a 342
0674b9d0
MJ
343/* Head of a linked list of accesses that need to have its subaccesses
344 propagated to their assignment counterparts. */
345static struct access *work_queue_head;
6de9cd9a 346
07ffa034
MJ
347/* Number of parameters of the analyzed function when doing early ipa SRA. */
348static int func_param_count;
349
350/* scan_function sets the following to true if it encounters a call to
351 __builtin_apply_args. */
352static bool encountered_apply_args;
353
2f3cdcf5
MJ
354/* Set by scan_function when it finds a recursive call. */
355static bool encountered_recursive_call;
356
357/* Set by scan_function when it finds a recursive call with less actual
358 arguments than formal parameters.. */
359static bool encountered_unchangable_recursive_call;
360
07ffa034
MJ
361/* This is a table in which for each basic block and parameter there is a
362 distance (offset + size) in that parameter which is dereferenced and
363 accessed in that BB. */
364static HOST_WIDE_INT *bb_dereferences;
365/* Bitmap of BBs that can cause the function to "stop" progressing by
366 returning, throwing externally, looping infinitely or calling a function
367 which might abort etc.. */
368static bitmap final_bbs;
369
370/* Representative of no accesses at all. */
371static struct access no_accesses_representant;
372
373/* Predicate to test the special value. */
374
375static inline bool
376no_accesses_p (struct access *access)
377{
378 return access == &no_accesses_representant;
379}
380
0674b9d0
MJ
381/* Dump contents of ACCESS to file F in a human friendly way. If GRP is true,
382 representative fields are dumped, otherwise those which only describe the
383 individual access are. */
11fc4275 384
2a45675f
MJ
385static struct
386{
07ffa034
MJ
387 /* Number of processed aggregates is readily available in
388 analyze_all_variable_accesses and so is not stored here. */
389
2a45675f
MJ
390 /* Number of created scalar replacements. */
391 int replacements;
392
393 /* Number of times sra_modify_expr or sra_modify_assign themselves changed an
394 expression. */
395 int exprs;
396
397 /* Number of statements created by generate_subtree_copies. */
398 int subtree_copies;
399
400 /* Number of statements created by load_assign_lhs_subreplacements. */
401 int subreplacements;
402
403 /* Number of times sra_modify_assign has deleted a statement. */
404 int deleted;
405
406 /* Number of times sra_modify_assign has to deal with subaccesses of LHS and
407 RHS reparately due to type conversions or nonexistent matching
408 references. */
409 int separate_lhs_rhs_handling;
410
07ffa034
MJ
411 /* Number of parameters that were removed because they were unused. */
412 int deleted_unused_parameters;
413
414 /* Number of scalars passed as parameters by reference that have been
415 converted to be passed by value. */
416 int scalar_by_ref_to_by_val;
417
418 /* Number of aggregate parameters that were replaced by one or more of their
419 components. */
420 int aggregate_params_reduced;
421
422 /* Numbber of components created when splitting aggregate parameters. */
423 int param_reductions_created;
2a45675f
MJ
424} sra_stats;
425
0674b9d0
MJ
426static void
427dump_access (FILE *f, struct access *access, bool grp)
428{
429 fprintf (f, "access { ");
430 fprintf (f, "base = (%d)'", DECL_UID (access->base));
ef6cb4c7 431 print_generic_expr (f, access->base);
0674b9d0
MJ
432 fprintf (f, "', offset = " HOST_WIDE_INT_PRINT_DEC, access->offset);
433 fprintf (f, ", size = " HOST_WIDE_INT_PRINT_DEC, access->size);
434 fprintf (f, ", expr = ");
ef6cb4c7 435 print_generic_expr (f, access->expr);
0674b9d0 436 fprintf (f, ", type = ");
ef6cb4c7 437 print_generic_expr (f, access->type);
ee45a32d
EB
438 fprintf (f, ", non_addressable = %d, reverse = %d",
439 access->non_addressable, access->reverse);
0674b9d0 440 if (grp)
1ac93f10
MJ
441 fprintf (f, ", grp_read = %d, grp_write = %d, grp_assignment_read = %d, "
442 "grp_assignment_write = %d, grp_scalar_read = %d, "
443 "grp_scalar_write = %d, grp_total_scalarization = %d, "
4fd73214 444 "grp_hint = %d, grp_covered = %d, "
fc37536b
MJ
445 "grp_unscalarizable_region = %d, grp_unscalarized_data = %d, "
446 "grp_partial_lhs = %d, grp_to_be_replaced = %d, "
be384c10 447 "grp_to_be_debug_replaced = %d, grp_maybe_modified = %d, "
07ffa034 448 "grp_not_necessarilly_dereferenced = %d\n",
1ac93f10
MJ
449 access->grp_read, access->grp_write, access->grp_assignment_read,
450 access->grp_assignment_write, access->grp_scalar_read,
451 access->grp_scalar_write, access->grp_total_scalarization,
4fd73214 452 access->grp_hint, access->grp_covered,
fc37536b
MJ
453 access->grp_unscalarizable_region, access->grp_unscalarized_data,
454 access->grp_partial_lhs, access->grp_to_be_replaced,
be384c10 455 access->grp_to_be_debug_replaced, access->grp_maybe_modified,
07ffa034 456 access->grp_not_necessarilly_dereferenced);
0674b9d0 457 else
1ac93f10 458 fprintf (f, ", write = %d, grp_total_scalarization = %d, "
7744b697 459 "grp_partial_lhs = %d\n",
1ac93f10 460 access->write, access->grp_total_scalarization,
0674b9d0
MJ
461 access->grp_partial_lhs);
462}
6de9cd9a 463
0674b9d0 464/* Dump a subtree rooted in ACCESS to file F, indent by LEVEL. */
a32b97a2 465
0674b9d0
MJ
466static void
467dump_access_tree_1 (FILE *f, struct access *access, int level)
468{
469 do
470 {
471 int i;
d116ffa6 472
0674b9d0 473 for (i = 0; i < level; i++)
7aa923e0 474 fputs ("* ", f);
0890b981 475
0674b9d0 476 dump_access (f, access, true);
510335c8 477
0674b9d0
MJ
478 if (access->first_child)
479 dump_access_tree_1 (f, access->first_child, level + 1);
a32b97a2 480
0674b9d0
MJ
481 access = access->next_sibling;
482 }
483 while (access);
484}
11fc4275 485
0674b9d0
MJ
486/* Dump all access trees for a variable, given the pointer to the first root in
487 ACCESS. */
11fc4275 488
0674b9d0
MJ
489static void
490dump_access_tree (FILE *f, struct access *access)
11fc4275 491{
0674b9d0
MJ
492 for (; access; access = access->next_grp)
493 dump_access_tree_1 (f, access, 0);
494}
11fc4275 495
0674b9d0 496/* Return true iff ACC is non-NULL and has subaccesses. */
11fc4275 497
0674b9d0
MJ
498static inline bool
499access_has_children_p (struct access *acc)
500{
501 return acc && acc->first_child;
502}
11fc4275 503
973a39ae
RG
504/* Return true iff ACC is (partly) covered by at least one replacement. */
505
506static bool
507access_has_replacements_p (struct access *acc)
508{
509 struct access *child;
510 if (acc->grp_to_be_replaced)
511 return true;
512 for (child = acc->first_child; child; child = child->next_sibling)
513 if (access_has_replacements_p (child))
514 return true;
515 return false;
516}
517
0674b9d0
MJ
518/* Return a vector of pointers to accesses for the variable given in BASE or
519 NULL if there is none. */
11fc4275 520
9771b263 521static vec<access_p> *
0674b9d0
MJ
522get_base_access_vector (tree base)
523{
b787e7a2 524 return base_access_vec->get (base);
11fc4275
EB
525}
526
0674b9d0
MJ
527/* Find an access with required OFFSET and SIZE in a subtree of accesses rooted
528 in ACCESS. Return NULL if it cannot be found. */
a32b97a2 529
0674b9d0
MJ
530static struct access *
531find_access_in_subtree (struct access *access, HOST_WIDE_INT offset,
532 HOST_WIDE_INT size)
533{
534 while (access && (access->offset != offset || access->size != size))
535 {
536 struct access *child = access->first_child;
a32b97a2 537
0674b9d0
MJ
538 while (child && (child->offset + child->size <= offset))
539 child = child->next_sibling;
540 access = child;
541 }
6de9cd9a 542
0674b9d0
MJ
543 return access;
544}
510335c8 545
0674b9d0 546/* Return the first group representative for DECL or NULL if none exists. */
6de9cd9a 547
0674b9d0
MJ
548static struct access *
549get_first_repr_for_decl (tree base)
6de9cd9a 550{
9771b263 551 vec<access_p> *access_vec;
0674b9d0
MJ
552
553 access_vec = get_base_access_vector (base);
554 if (!access_vec)
555 return NULL;
556
9771b263 557 return (*access_vec)[0];
6de9cd9a
DN
558}
559
0674b9d0
MJ
560/* Find an access representative for the variable BASE and given OFFSET and
561 SIZE. Requires that access trees have already been built. Return NULL if
562 it cannot be found. */
6de9cd9a 563
0674b9d0
MJ
564static struct access *
565get_var_base_offset_size_access (tree base, HOST_WIDE_INT offset,
566 HOST_WIDE_INT size)
6de9cd9a 567{
0674b9d0 568 struct access *access;
6de9cd9a 569
0674b9d0
MJ
570 access = get_first_repr_for_decl (base);
571 while (access && (access->offset + access->size <= offset))
572 access = access->next_grp;
573 if (!access)
574 return NULL;
97e73bd2 575
0674b9d0
MJ
576 return find_access_in_subtree (access, offset, size);
577}
03797ac5 578
0674b9d0
MJ
579/* Add LINK to the linked list of assign links of RACC. */
580static void
581add_link_to_rhs (struct access *racc, struct assign_link *link)
03797ac5 582{
0674b9d0 583 gcc_assert (link->racc == racc);
03797ac5 584
0674b9d0
MJ
585 if (!racc->first_link)
586 {
587 gcc_assert (!racc->last_link);
588 racc->first_link = link;
589 }
590 else
591 racc->last_link->next = link;
6de9cd9a 592
0674b9d0
MJ
593 racc->last_link = link;
594 link->next = NULL;
595}
6de9cd9a 596
0674b9d0
MJ
597/* Move all link structures in their linked list in OLD_RACC to the linked list
598 in NEW_RACC. */
599static void
600relink_to_new_repr (struct access *new_racc, struct access *old_racc)
601{
602 if (!old_racc->first_link)
6de9cd9a 603 {
0674b9d0
MJ
604 gcc_assert (!old_racc->last_link);
605 return;
606 }
6de9cd9a 607
0674b9d0
MJ
608 if (new_racc->first_link)
609 {
610 gcc_assert (!new_racc->last_link->next);
611 gcc_assert (!old_racc->last_link || !old_racc->last_link->next);
6de9cd9a 612
0674b9d0
MJ
613 new_racc->last_link->next = old_racc->first_link;
614 new_racc->last_link = old_racc->last_link;
615 }
616 else
617 {
618 gcc_assert (!new_racc->last_link);
6de9cd9a 619
0674b9d0
MJ
620 new_racc->first_link = old_racc->first_link;
621 new_racc->last_link = old_racc->last_link;
622 }
623 old_racc->first_link = old_racc->last_link = NULL;
624}
6de9cd9a 625
0674b9d0 626/* Add ACCESS to the work queue (which is actually a stack). */
6de9cd9a 627
0674b9d0
MJ
628static void
629add_access_to_work_queue (struct access *access)
630{
35a952ba 631 if (access->first_link && !access->grp_queued)
0674b9d0
MJ
632 {
633 gcc_assert (!access->next_queued);
634 access->next_queued = work_queue_head;
635 access->grp_queued = 1;
636 work_queue_head = access;
97e73bd2 637 }
0674b9d0 638}
6de9cd9a 639
0674b9d0 640/* Pop an access from the work queue, and return it, assuming there is one. */
6de9cd9a 641
0674b9d0
MJ
642static struct access *
643pop_access_from_work_queue (void)
644{
645 struct access *access = work_queue_head;
646
647 work_queue_head = access->next_queued;
648 access->next_queued = NULL;
649 access->grp_queued = 0;
650 return access;
651}
652
653
654/* Allocate necessary structures. */
655
656static void
657sra_initialize (void)
658{
659 candidate_bitmap = BITMAP_ALLOC (NULL);
c203e8a7
TS
660 candidates = new hash_table<uid_decl_hasher>
661 (vec_safe_length (cfun->local_decls) / 2);
7744b697
MJ
662 should_scalarize_away_bitmap = BITMAP_ALLOC (NULL);
663 cannot_scalarize_away_bitmap = BITMAP_ALLOC (NULL);
88bed196 664 disqualified_constants = BITMAP_ALLOC (NULL);
0674b9d0 665 gcc_obstack_init (&name_obstack);
b787e7a2 666 base_access_vec = new hash_map<tree, auto_vec<access_p> >;
2a45675f 667 memset (&sra_stats, 0, sizeof (sra_stats));
07ffa034 668 encountered_apply_args = false;
2f3cdcf5
MJ
669 encountered_recursive_call = false;
670 encountered_unchangable_recursive_call = false;
6de9cd9a
DN
671}
672
0674b9d0 673/* Deallocate all general structures. */
6de9cd9a 674
0674b9d0
MJ
675static void
676sra_deinitialize (void)
6de9cd9a 677{
0674b9d0 678 BITMAP_FREE (candidate_bitmap);
c203e8a7
TS
679 delete candidates;
680 candidates = NULL;
7744b697
MJ
681 BITMAP_FREE (should_scalarize_away_bitmap);
682 BITMAP_FREE (cannot_scalarize_away_bitmap);
88bed196 683 BITMAP_FREE (disqualified_constants);
fb0b2914
ML
684 access_pool.release ();
685 assign_link_pool.release ();
0674b9d0 686 obstack_free (&name_obstack, NULL);
6de9cd9a 687
b787e7a2 688 delete base_access_vec;
0674b9d0 689}
6de9cd9a 690
88bed196
AL
691/* Return true if DECL is a VAR_DECL in the constant pool, false otherwise. */
692
693static bool constant_decl_p (tree decl)
694{
8813a647 695 return VAR_P (decl) && DECL_IN_CONSTANT_POOL (decl);
88bed196
AL
696}
697
0674b9d0
MJ
698/* Remove DECL from candidates for SRA and write REASON to the dump file if
699 there is one. */
c8638450 700
0674b9d0
MJ
701static void
702disqualify_candidate (tree decl, const char *reason)
703{
d94b820b 704 if (bitmap_clear_bit (candidate_bitmap, DECL_UID (decl)))
c203e8a7 705 candidates->remove_elt_with_hash (decl, DECL_UID (decl));
88bed196
AL
706 if (constant_decl_p (decl))
707 bitmap_set_bit (disqualified_constants, DECL_UID (decl));
6de9cd9a 708
0674b9d0 709 if (dump_file && (dump_flags & TDF_DETAILS))
97e73bd2 710 {
0674b9d0 711 fprintf (dump_file, "! Disqualifying ");
ef6cb4c7 712 print_generic_expr (dump_file, decl);
0674b9d0 713 fprintf (dump_file, " - %s\n", reason);
97e73bd2 714 }
97e73bd2
RH
715}
716
0674b9d0
MJ
717/* Return true iff the type contains a field or an element which does not allow
718 scalarization. */
97e73bd2
RH
719
720static bool
949cfd0a 721type_internals_preclude_sra_p (tree type, const char **msg)
97e73bd2 722{
0674b9d0
MJ
723 tree fld;
724 tree et;
6de9cd9a 725
97e73bd2 726 switch (TREE_CODE (type))
6de9cd9a 727 {
97e73bd2 728 case RECORD_TYPE:
0674b9d0
MJ
729 case UNION_TYPE:
730 case QUAL_UNION_TYPE:
910ad8de 731 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
0674b9d0
MJ
732 if (TREE_CODE (fld) == FIELD_DECL)
733 {
734 tree ft = TREE_TYPE (fld);
6de9cd9a 735
949cfd0a
AK
736 if (TREE_THIS_VOLATILE (fld))
737 {
738 *msg = "volatile structure field";
739 return true;
740 }
741 if (!DECL_FIELD_OFFSET (fld))
742 {
743 *msg = "no structure field offset";
744 return true;
745 }
746 if (!DECL_SIZE (fld))
747 {
748 *msg = "zero structure field size";
749 return true;
750 }
cc269bb6 751 if (!tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
949cfd0a
AK
752 {
753 *msg = "structure field offset not fixed";
754 return true;
755 }
cc269bb6 756 if (!tree_fits_uhwi_p (DECL_SIZE (fld)))
949cfd0a
AK
757 {
758 *msg = "structure field size not fixed";
759 return true;
28afe3fc 760 }
9541ffee 761 if (!tree_fits_shwi_p (bit_position (fld)))
28afe3fc
MJ
762 {
763 *msg = "structure field size too big";
764 return true;
765 }
949cfd0a
AK
766 if (AGGREGATE_TYPE_P (ft)
767 && int_bit_position (fld) % BITS_PER_UNIT != 0)
768 {
769 *msg = "structure field is bit field";
770 return true;
771 }
6de9cd9a 772
949cfd0a 773 if (AGGREGATE_TYPE_P (ft) && type_internals_preclude_sra_p (ft, msg))
0674b9d0
MJ
774 return true;
775 }
6de9cd9a 776
0674b9d0 777 return false;
6de9cd9a 778
97e73bd2 779 case ARRAY_TYPE:
0674b9d0 780 et = TREE_TYPE (type);
6de9cd9a 781
c020c92b 782 if (TYPE_VOLATILE (et))
949cfd0a
AK
783 {
784 *msg = "element type is volatile";
785 return true;
786 }
c020c92b 787
949cfd0a 788 if (AGGREGATE_TYPE_P (et) && type_internals_preclude_sra_p (et, msg))
c020c92b
EB
789 return true;
790
791 return false;
6de9cd9a 792
97e73bd2 793 default:
0674b9d0 794 return false;
97e73bd2
RH
795 }
796}
6de9cd9a 797
07ffa034
MJ
798/* If T is an SSA_NAME, return NULL if it is not a default def or return its
799 base variable if it is. Return T if it is not an SSA_NAME. */
800
801static tree
802get_ssa_base_param (tree t)
803{
804 if (TREE_CODE (t) == SSA_NAME)
805 {
806 if (SSA_NAME_IS_DEFAULT_DEF (t))
807 return SSA_NAME_VAR (t);
808 else
809 return NULL_TREE;
810 }
811 return t;
812}
813
814/* Mark a dereference of BASE of distance DIST in a basic block tht STMT
815 belongs to, unless the BB has already been marked as a potentially
816 final. */
817
818static void
355fe088 819mark_parm_dereference (tree base, HOST_WIDE_INT dist, gimple *stmt)
07ffa034
MJ
820{
821 basic_block bb = gimple_bb (stmt);
822 int idx, parm_index = 0;
823 tree parm;
824
825 if (bitmap_bit_p (final_bbs, bb->index))
826 return;
827
828 for (parm = DECL_ARGUMENTS (current_function_decl);
829 parm && parm != base;
910ad8de 830 parm = DECL_CHAIN (parm))
07ffa034
MJ
831 parm_index++;
832
833 gcc_assert (parm_index < func_param_count);
834
835 idx = bb->index * func_param_count + parm_index;
836 if (bb_dereferences[idx] < dist)
837 bb_dereferences[idx] = dist;
838}
839
7744b697
MJ
840/* Allocate an access structure for BASE, OFFSET and SIZE, clear it, fill in
841 the three fields. Also add it to the vector of accesses corresponding to
842 the base. Finally, return the new access. */
843
844static struct access *
845create_access_1 (tree base, HOST_WIDE_INT offset, HOST_WIDE_INT size)
846{
fb0b2914 847 struct access *access = access_pool.allocate ();
7744b697 848
7744b697
MJ
849 memset (access, 0, sizeof (struct access));
850 access->base = base;
851 access->offset = offset;
852 access->size = size;
853
b787e7a2 854 base_access_vec->get_or_insert (base).safe_push (access);
7744b697
MJ
855
856 return access;
857}
858
88bed196
AL
859static bool maybe_add_sra_candidate (tree);
860
0674b9d0 861/* Create and insert access for EXPR. Return created access, or NULL if it is
88bed196
AL
862 not possible. Also scan for uses of constant pool as we go along and add
863 to candidates. */
6de9cd9a 864
0674b9d0 865static struct access *
355fe088 866create_access (tree expr, gimple *stmt, bool write)
6de9cd9a 867{
0674b9d0 868 struct access *access;
588db50c 869 poly_int64 poffset, psize, pmax_size;
0674b9d0
MJ
870 HOST_WIDE_INT offset, size, max_size;
871 tree base = expr;
ee45a32d 872 bool reverse, ptr, unscalarizable_region = false;
97e73bd2 873
588db50c
RS
874 base = get_ref_base_and_extent (expr, &poffset, &psize, &pmax_size,
875 &reverse);
876 if (!poffset.is_constant (&offset)
877 || !psize.is_constant (&size)
878 || !pmax_size.is_constant (&max_size))
879 {
880 disqualify_candidate (base, "Encountered a polynomial-sized access.");
881 return NULL;
882 }
6de9cd9a 883
70f34814
RG
884 if (sra_mode == SRA_MODE_EARLY_IPA
885 && TREE_CODE (base) == MEM_REF)
07ffa034
MJ
886 {
887 base = get_ssa_base_param (TREE_OPERAND (base, 0));
888 if (!base)
889 return NULL;
890 ptr = true;
891 }
892 else
893 ptr = false;
894
88bed196
AL
895 /* For constant-pool entries, check we can substitute the constant value. */
896 if (constant_decl_p (base)
897 && (sra_mode == SRA_MODE_EARLY_INTRA || sra_mode == SRA_MODE_INTRA))
898 {
899 gcc_assert (!bitmap_bit_p (disqualified_constants, DECL_UID (base)));
900 if (expr != base
901 && !is_gimple_reg_type (TREE_TYPE (expr))
902 && dump_file && (dump_flags & TDF_DETAILS))
903 {
904 /* This occurs in Ada with accesses to ARRAY_RANGE_REFs,
905 and elements of multidimensional arrays (which are
906 multi-element arrays in their own right). */
907 fprintf (dump_file, "Allowing non-reg-type load of part"
908 " of constant-pool entry: ");
ef6cb4c7 909 print_generic_expr (dump_file, expr);
88bed196
AL
910 }
911 maybe_add_sra_candidate (base);
912 }
913
0674b9d0
MJ
914 if (!DECL_P (base) || !bitmap_bit_p (candidate_bitmap, DECL_UID (base)))
915 return NULL;
6de9cd9a 916
07ffa034 917 if (sra_mode == SRA_MODE_EARLY_IPA)
0674b9d0 918 {
07ffa034
MJ
919 if (size < 0 || size != max_size)
920 {
921 disqualify_candidate (base, "Encountered a variable sized access.");
922 return NULL;
923 }
1faab08d
MJ
924 if (TREE_CODE (expr) == COMPONENT_REF
925 && DECL_BIT_FIELD (TREE_OPERAND (expr, 1)))
07ffa034 926 {
1faab08d 927 disqualify_candidate (base, "Encountered a bit-field access.");
07ffa034
MJ
928 return NULL;
929 }
1faab08d 930 gcc_checking_assert ((offset % BITS_PER_UNIT) == 0);
fa27426e 931
07ffa034
MJ
932 if (ptr)
933 mark_parm_dereference (base, offset + size, stmt);
934 }
935 else
6de9cd9a 936 {
07ffa034
MJ
937 if (size != max_size)
938 {
939 size = max_size;
940 unscalarizable_region = true;
941 }
942 if (size < 0)
943 {
944 disqualify_candidate (base, "Encountered an unconstrained access.");
945 return NULL;
946 }
0674b9d0 947 }
fa27426e 948
7744b697 949 access = create_access_1 (base, offset, size);
0674b9d0
MJ
950 access->expr = expr;
951 access->type = TREE_TYPE (expr);
952 access->write = write;
953 access->grp_unscalarizable_region = unscalarizable_region;
07ffa034 954 access->stmt = stmt;
ee45a32d 955 access->reverse = reverse;
11fc4275 956
5e9fba51
EB
957 if (TREE_CODE (expr) == COMPONENT_REF
958 && DECL_NONADDRESSABLE_P (TREE_OPERAND (expr, 1)))
959 access->non_addressable = 1;
960
7744b697
MJ
961 return access;
962}
fa27426e 963
510335c8 964
d0f4e7fc
AL
965/* Return true iff TYPE is scalarizable - i.e. a RECORD_TYPE or fixed-length
966 ARRAY_TYPE with fields that are either of gimple register types (excluding
a753df11
MJ
967 bit-fields) or (recursively) scalarizable types. CONST_DECL must be true if
968 we are considering a decl from constant pool. If it is false, char arrays
969 will be refused. */
fa27426e 970
7744b697 971static bool
a753df11 972scalarizable_type_p (tree type, bool const_decl)
7744b697 973{
d0f4e7fc 974 gcc_assert (!is_gimple_reg_type (type));
88bed196
AL
975 if (type_contains_placeholder_p (type))
976 return false;
7744b697 977
d0f4e7fc
AL
978 switch (TREE_CODE (type))
979 {
980 case RECORD_TYPE:
981 for (tree fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
982 if (TREE_CODE (fld) == FIELD_DECL)
983 {
984 tree ft = TREE_TYPE (fld);
7744b697 985
d0f4e7fc
AL
986 if (DECL_BIT_FIELD (fld))
987 return false;
7744b697 988
d0f4e7fc 989 if (!is_gimple_reg_type (ft)
a753df11 990 && !scalarizable_type_p (ft, const_decl))
d0f4e7fc
AL
991 return false;
992 }
36b86f4a 993
d0f4e7fc 994 return true;
76f76cd0 995
d0f4e7fc
AL
996 case ARRAY_TYPE:
997 {
a753df11
MJ
998 HOST_WIDE_INT min_elem_size;
999 if (const_decl)
1000 min_elem_size = 0;
1001 else
1002 min_elem_size = BITS_PER_UNIT;
1003
d0f4e7fc
AL
1004 if (TYPE_DOMAIN (type) == NULL_TREE
1005 || !tree_fits_shwi_p (TYPE_SIZE (type))
1006 || !tree_fits_shwi_p (TYPE_SIZE (TREE_TYPE (type)))
a753df11 1007 || (tree_to_shwi (TYPE_SIZE (TREE_TYPE (type))) <= min_elem_size)
d0f4e7fc
AL
1008 || !tree_fits_shwi_p (TYPE_MIN_VALUE (TYPE_DOMAIN (type))))
1009 return false;
1010 if (tree_to_shwi (TYPE_SIZE (type)) == 0
1011 && TYPE_MAX_VALUE (TYPE_DOMAIN (type)) == NULL_TREE)
1012 /* Zero-element array, should not prevent scalarization. */
1013 ;
1014 else if ((tree_to_shwi (TYPE_SIZE (type)) <= 0)
1015 || !tree_fits_shwi_p (TYPE_MAX_VALUE (TYPE_DOMAIN (type))))
56517dda 1016 /* Variable-length array, do not allow scalarization. */
d0f4e7fc
AL
1017 return false;
1018
1019 tree elem = TREE_TYPE (type);
1020 if (!is_gimple_reg_type (elem)
a753df11 1021 && !scalarizable_type_p (elem, const_decl))
d0f4e7fc
AL
1022 return false;
1023 return true;
1024 }
1025 default:
1026 return false;
1027 }
7744b697
MJ
1028}
1029
ee45a32d 1030static void scalarize_elem (tree, HOST_WIDE_INT, HOST_WIDE_INT, bool, tree, tree);
d0f4e7fc
AL
1031
1032/* Create total_scalarization accesses for all scalar fields of a member
1033 of type DECL_TYPE conforming to scalarizable_type_p. BASE
1034 must be the top-most VAR_DECL representing the variable; within that,
1035 OFFSET locates the member and REF must be the memory reference expression for
1036 the member. */
7744b697
MJ
1037
1038static void
d0f4e7fc 1039completely_scalarize (tree base, tree decl_type, HOST_WIDE_INT offset, tree ref)
7744b697 1040{
d0f4e7fc
AL
1041 switch (TREE_CODE (decl_type))
1042 {
1043 case RECORD_TYPE:
1044 for (tree fld = TYPE_FIELDS (decl_type); fld; fld = DECL_CHAIN (fld))
1045 if (TREE_CODE (fld) == FIELD_DECL)
1046 {
1047 HOST_WIDE_INT pos = offset + int_bit_position (fld);
1048 tree ft = TREE_TYPE (fld);
1049 tree nref = build3 (COMPONENT_REF, ft, ref, fld, NULL_TREE);
7744b697 1050
ee45a32d
EB
1051 scalarize_elem (base, pos, tree_to_uhwi (DECL_SIZE (fld)),
1052 TYPE_REVERSE_STORAGE_ORDER (decl_type),
1053 nref, ft);
d0f4e7fc
AL
1054 }
1055 break;
1056 case ARRAY_TYPE:
7744b697 1057 {
d0f4e7fc
AL
1058 tree elemtype = TREE_TYPE (decl_type);
1059 tree elem_size = TYPE_SIZE (elemtype);
1060 gcc_assert (elem_size && tree_fits_shwi_p (elem_size));
1061 HOST_WIDE_INT el_size = tree_to_shwi (elem_size);
1062 gcc_assert (el_size > 0);
1063
1064 tree minidx = TYPE_MIN_VALUE (TYPE_DOMAIN (decl_type));
1065 gcc_assert (TREE_CODE (minidx) == INTEGER_CST);
1066 tree maxidx = TYPE_MAX_VALUE (TYPE_DOMAIN (decl_type));
56517dda 1067 /* Skip (some) zero-length arrays; others have MAXIDX == MINIDX - 1. */
d0f4e7fc 1068 if (maxidx)
7744b697 1069 {
d0f4e7fc 1070 gcc_assert (TREE_CODE (maxidx) == INTEGER_CST);
55bf45c0
AL
1071 tree domain = TYPE_DOMAIN (decl_type);
1072 /* MINIDX and MAXIDX are inclusive, and must be interpreted in
1073 DOMAIN (e.g. signed int, whereas min/max may be size_int). */
1074 offset_int idx = wi::to_offset (minidx);
1075 offset_int max = wi::to_offset (maxidx);
1076 if (!TYPE_UNSIGNED (domain))
d0f4e7fc 1077 {
55bf45c0
AL
1078 idx = wi::sext (idx, TYPE_PRECISION (domain));
1079 max = wi::sext (max, TYPE_PRECISION (domain));
1080 }
032c80e9 1081 for (int el_off = offset; idx <= max; ++idx)
55bf45c0
AL
1082 {
1083 tree nref = build4 (ARRAY_REF, elemtype,
1084 ref,
1085 wide_int_to_tree (domain, idx),
d0f4e7fc 1086 NULL_TREE, NULL_TREE);
ee45a32d
EB
1087 scalarize_elem (base, el_off, el_size,
1088 TYPE_REVERSE_STORAGE_ORDER (decl_type),
1089 nref, elemtype);
55bf45c0 1090 el_off += el_size;
d0f4e7fc 1091 }
7744b697 1092 }
7744b697 1093 }
d0f4e7fc
AL
1094 break;
1095 default:
1096 gcc_unreachable ();
1097 }
1098}
1099
1100/* Create total_scalarization accesses for a member of type TYPE, which must
1101 satisfy either is_gimple_reg_type or scalarizable_type_p. BASE must be the
1102 top-most VAR_DECL representing the variable; within that, POS and SIZE locate
ee45a32d
EB
1103 the member, REVERSE gives its torage order. and REF must be the reference
1104 expression for it. */
d0f4e7fc
AL
1105
1106static void
ee45a32d
EB
1107scalarize_elem (tree base, HOST_WIDE_INT pos, HOST_WIDE_INT size, bool reverse,
1108 tree ref, tree type)
d0f4e7fc
AL
1109{
1110 if (is_gimple_reg_type (type))
1111 {
1112 struct access *access = create_access_1 (base, pos, size);
1113 access->expr = ref;
1114 access->type = type;
1115 access->grp_total_scalarization = 1;
ee45a32d 1116 access->reverse = reverse;
d0f4e7fc
AL
1117 /* Accesses for intraprocedural SRA can have their stmt NULL. */
1118 }
1119 else
1120 completely_scalarize (base, type, pos, ref);
6de9cd9a
DN
1121}
1122
f2511224 1123/* Create a total_scalarization access for VAR as a whole. VAR must be of a
d0f4e7fc 1124 RECORD_TYPE or ARRAY_TYPE conforming to scalarizable_type_p. */
1ac93f10
MJ
1125
1126static void
f2511224 1127create_total_scalarization_access (tree var)
1ac93f10 1128{
ae7e9ddd 1129 HOST_WIDE_INT size = tree_to_uhwi (DECL_SIZE (var));
1ac93f10
MJ
1130 struct access *access;
1131
1132 access = create_access_1 (var, 0, size);
1133 access->expr = var;
1134 access->type = TREE_TYPE (var);
1135 access->grp_total_scalarization = 1;
1ac93f10 1136}
6de9cd9a 1137
cc524fc7
AM
1138/* Return true if REF has an VIEW_CONVERT_EXPR somewhere in it. */
1139
1140static inline bool
1141contains_view_convert_expr_p (const_tree ref)
1142{
1143 while (handled_component_p (ref))
1144 {
1145 if (TREE_CODE (ref) == VIEW_CONVERT_EXPR)
1146 return true;
1147 ref = TREE_OPERAND (ref, 0);
1148 }
1149
1150 return false;
1151}
1152
d90ffcfb
MJ
1153/* Return true if REF contains a VIEW_CONVERT_EXPR or a MEM_REF that performs
1154 type conversion or a COMPONENT_REF with a bit-field field declaration. */
1155
1156static bool
1157contains_vce_or_bfcref_p (const_tree ref)
1158{
1159 while (handled_component_p (ref))
1160 {
1161 if (TREE_CODE (ref) == VIEW_CONVERT_EXPR
1162 || (TREE_CODE (ref) == COMPONENT_REF
1163 && DECL_BIT_FIELD (TREE_OPERAND (ref, 1))))
1164 return true;
1165 ref = TREE_OPERAND (ref, 0);
1166 }
1167
1168 if (TREE_CODE (ref) != MEM_REF
1169 || TREE_CODE (TREE_OPERAND (ref, 0)) != ADDR_EXPR)
1170 return false;
1171
1172 tree mem = TREE_OPERAND (TREE_OPERAND (ref, 0), 0);
1173 if (TYPE_MAIN_VARIANT (TREE_TYPE (ref))
1174 != TYPE_MAIN_VARIANT (TREE_TYPE (mem)))
1175 return true;
1176
1177 return false;
1178}
1179
0674b9d0
MJ
1180/* Search the given tree for a declaration by skipping handled components and
1181 exclude it from the candidates. */
1182
1183static void
1184disqualify_base_of_expr (tree t, const char *reason)
6de9cd9a 1185{
70f34814
RG
1186 t = get_base_address (t);
1187 if (sra_mode == SRA_MODE_EARLY_IPA
1188 && TREE_CODE (t) == MEM_REF)
1189 t = get_ssa_base_param (TREE_OPERAND (t, 0));
07ffa034
MJ
1190
1191 if (t && DECL_P (t))
0674b9d0 1192 disqualify_candidate (t, reason);
97e73bd2 1193}
19114537 1194
0674b9d0
MJ
1195/* Scan expression EXPR and create access structures for all accesses to
1196 candidates for scalarization. Return the created access or NULL if none is
1197 created. */
6de9cd9a 1198
0674b9d0 1199static struct access *
355fe088 1200build_access_from_expr_1 (tree expr, gimple *stmt, bool write)
97e73bd2 1201{
0674b9d0 1202 struct access *ret = NULL;
0674b9d0 1203 bool partial_ref;
6de9cd9a 1204
0674b9d0
MJ
1205 if (TREE_CODE (expr) == BIT_FIELD_REF
1206 || TREE_CODE (expr) == IMAGPART_EXPR
1207 || TREE_CODE (expr) == REALPART_EXPR)
1208 {
1209 expr = TREE_OPERAND (expr, 0);
1210 partial_ref = true;
1211 }
1212 else
1213 partial_ref = false;
6de9cd9a 1214
89e18b1a
EB
1215 if (storage_order_barrier_p (expr))
1216 {
1217 disqualify_base_of_expr (expr, "storage order barrier.");
1218 return NULL;
1219 }
1220
0674b9d0
MJ
1221 /* We need to dive through V_C_Es in order to get the size of its parameter
1222 and not the result type. Ada produces such statements. We are also
1223 capable of handling the topmost V_C_E but not any of those buried in other
1224 handled components. */
89e18b1a 1225 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
0674b9d0
MJ
1226 expr = TREE_OPERAND (expr, 0);
1227
1228 if (contains_view_convert_expr_p (expr))
1229 {
1230 disqualify_base_of_expr (expr, "V_C_E under a different handled "
1231 "component.");
1232 return NULL;
1233 }
252c1a46
MJ
1234 if (TREE_THIS_VOLATILE (expr))
1235 {
1236 disqualify_base_of_expr (expr, "part of a volatile reference.");
1237 return NULL;
1238 }
fa27426e 1239
0674b9d0 1240 switch (TREE_CODE (expr))
fa27426e 1241 {
70f34814
RG
1242 case MEM_REF:
1243 if (TREE_CODE (TREE_OPERAND (expr, 0)) != ADDR_EXPR
1244 && sra_mode != SRA_MODE_EARLY_IPA)
07ffa034
MJ
1245 return NULL;
1246 /* fall through */
fa27426e
RH
1247 case VAR_DECL:
1248 case PARM_DECL:
1249 case RESULT_DECL:
0674b9d0
MJ
1250 case COMPONENT_REF:
1251 case ARRAY_REF:
1252 case ARRAY_RANGE_REF:
07ffa034 1253 ret = create_access (expr, stmt, write);
0674b9d0 1254 break;
fa27426e 1255
0674b9d0
MJ
1256 default:
1257 break;
1258 }
fa27426e 1259
0674b9d0
MJ
1260 if (write && partial_ref && ret)
1261 ret->grp_partial_lhs = 1;
11fc4275 1262
0674b9d0
MJ
1263 return ret;
1264}
fa27426e 1265
6cbd3b6a
MJ
1266/* Scan expression EXPR and create access structures for all accesses to
1267 candidates for scalarization. Return true if any access has been inserted.
1268 STMT must be the statement from which the expression is taken, WRITE must be
1269 true if the expression is a store and false otherwise. */
510335c8 1270
0674b9d0 1271static bool
355fe088 1272build_access_from_expr (tree expr, gimple *stmt, bool write)
0674b9d0 1273{
7744b697
MJ
1274 struct access *access;
1275
6cbd3b6a 1276 access = build_access_from_expr_1 (expr, stmt, write);
7744b697
MJ
1277 if (access)
1278 {
1279 /* This means the aggregate is accesses as a whole in a way other than an
1280 assign statement and thus cannot be removed even if we had a scalar
1281 replacement for everything. */
1282 if (cannot_scalarize_away_bitmap)
1283 bitmap_set_bit (cannot_scalarize_away_bitmap, DECL_UID (access->base));
1284 return true;
1285 }
1286 return false;
6de9cd9a
DN
1287}
1288
104cb50b
MJ
1289/* Return the single non-EH successor edge of BB or NULL if there is none or
1290 more than one. */
1291
1292static edge
1293single_non_eh_succ (basic_block bb)
1294{
1295 edge e, res = NULL;
1296 edge_iterator ei;
1297
1298 FOR_EACH_EDGE (e, ei, bb->succs)
1299 if (!(e->flags & EDGE_EH))
1300 {
1301 if (res)
1302 return NULL;
1303 res = e;
1304 }
1305
1306 return res;
1307}
1308
1309/* Disqualify LHS and RHS for scalarization if STMT has to terminate its BB and
1310 there is no alternative spot where to put statements SRA might need to
1311 generate after it. The spot we are looking for is an edge leading to a
1312 single non-EH successor, if it exists and is indeed single. RHS may be
1313 NULL, in that case ignore it. */
1314
0674b9d0 1315static bool
355fe088 1316disqualify_if_bad_bb_terminating_stmt (gimple *stmt, tree lhs, tree rhs)
6de9cd9a 1317{
07ffa034 1318 if ((sra_mode == SRA_MODE_EARLY_INTRA || sra_mode == SRA_MODE_INTRA)
104cb50b 1319 && stmt_ends_bb_p (stmt))
0674b9d0 1320 {
104cb50b
MJ
1321 if (single_non_eh_succ (gimple_bb (stmt)))
1322 return false;
1323
0674b9d0
MJ
1324 disqualify_base_of_expr (lhs, "LHS of a throwing stmt.");
1325 if (rhs)
1326 disqualify_base_of_expr (rhs, "RHS of a throwing stmt.");
1327 return true;
1328 }
1329 return false;
1330}
6de9cd9a 1331
8eef6097
MJ
1332/* Return true if the nature of BASE is such that it contains data even if
1333 there is no write to it in the function. */
1334
1335static bool
1336comes_initialized_p (tree base)
1337{
1338 return TREE_CODE (base) == PARM_DECL || constant_decl_p (base);
1339}
1340
073a8998 1341/* Scan expressions occurring in STMT, create access structures for all accesses
6cbd3b6a 1342 to candidates for scalarization and remove those candidates which occur in
0674b9d0
MJ
1343 statements or expressions that prevent them from being split apart. Return
1344 true if any access has been inserted. */
97e73bd2 1345
6cbd3b6a 1346static bool
355fe088 1347build_accesses_from_assign (gimple *stmt)
0674b9d0 1348{
6cbd3b6a 1349 tree lhs, rhs;
0674b9d0 1350 struct access *lacc, *racc;
6de9cd9a 1351
47598145
MM
1352 if (!gimple_assign_single_p (stmt)
1353 /* Scope clobbers don't influence scalarization. */
1354 || gimple_clobber_p (stmt))
6cbd3b6a 1355 return false;
6de9cd9a 1356
6cbd3b6a
MJ
1357 lhs = gimple_assign_lhs (stmt);
1358 rhs = gimple_assign_rhs1 (stmt);
6de9cd9a 1359
104cb50b 1360 if (disqualify_if_bad_bb_terminating_stmt (stmt, lhs, rhs))
6cbd3b6a 1361 return false;
97e73bd2 1362
6cbd3b6a
MJ
1363 racc = build_access_from_expr_1 (rhs, stmt, false);
1364 lacc = build_access_from_expr_1 (lhs, stmt, true);
97e73bd2 1365
fc37536b 1366 if (lacc)
ee45a32d
EB
1367 {
1368 lacc->grp_assignment_write = 1;
1369 if (storage_order_barrier_p (rhs))
1370 lacc->grp_unscalarizable_region = 1;
1371 }
fc37536b 1372
77620011
MJ
1373 if (racc)
1374 {
1375 racc->grp_assignment_read = 1;
1376 if (should_scalarize_away_bitmap && !gimple_has_volatile_ops (stmt)
1377 && !is_gimple_reg_type (racc->type))
d90ffcfb
MJ
1378 {
1379 if (contains_vce_or_bfcref_p (rhs))
1380 bitmap_set_bit (cannot_scalarize_away_bitmap,
1381 DECL_UID (racc->base));
1382 else
1383 bitmap_set_bit (should_scalarize_away_bitmap,
1384 DECL_UID (racc->base));
1385 }
ee45a32d
EB
1386 if (storage_order_barrier_p (lhs))
1387 racc->grp_unscalarizable_region = 1;
77620011 1388 }
7744b697 1389
0674b9d0 1390 if (lacc && racc
07ffa034 1391 && (sra_mode == SRA_MODE_EARLY_INTRA || sra_mode == SRA_MODE_INTRA)
0674b9d0
MJ
1392 && !lacc->grp_unscalarizable_region
1393 && !racc->grp_unscalarizable_region
6cbd3b6a 1394 && AGGREGATE_TYPE_P (TREE_TYPE (lhs))
0674b9d0
MJ
1395 && lacc->size == racc->size
1396 && useless_type_conversion_p (lacc->type, racc->type))
97e73bd2 1397 {
0674b9d0 1398 struct assign_link *link;
11fc4275 1399
fb0b2914 1400 link = assign_link_pool.allocate ();
0674b9d0 1401 memset (link, 0, sizeof (struct assign_link));
97e73bd2 1402
0674b9d0
MJ
1403 link->lacc = lacc;
1404 link->racc = racc;
0674b9d0 1405 add_link_to_rhs (racc, link);
e9982c6a
MJ
1406 add_access_to_work_queue (racc);
1407
2bba7541 1408 /* Let's delay marking the areas as written until propagation of accesses
8eef6097
MJ
1409 across link, unless the nature of rhs tells us that its data comes
1410 from elsewhere. */
1411 if (!comes_initialized_p (racc->base))
1412 lacc->write = false;
97e73bd2
RH
1413 }
1414
6cbd3b6a 1415 return lacc || racc;
97e73bd2
RH
1416}
1417
0674b9d0
MJ
1418/* Callback of walk_stmt_load_store_addr_ops visit_addr used to determine
1419 GIMPLE_ASM operands with memory constrains which cannot be scalarized. */
97e73bd2 1420
0674b9d0 1421static bool
355fe088 1422asm_visit_addr (gimple *, tree op, tree, void *)
97e73bd2 1423{
2ea9dc64
RG
1424 op = get_base_address (op);
1425 if (op
1426 && DECL_P (op))
0674b9d0 1427 disqualify_candidate (op, "Non-scalarizable GIMPLE_ASM operand.");
97e73bd2 1428
0674b9d0 1429 return false;
97e73bd2
RH
1430}
1431
2f3cdcf5 1432/* Return true iff callsite CALL has at least as many actual arguments as there
c18ff8a4
MJ
1433 are formal parameters of the function currently processed by IPA-SRA and
1434 that their types match. */
2f3cdcf5
MJ
1435
1436static inline bool
355fe088 1437callsite_arguments_match_p (gimple *call)
2f3cdcf5 1438{
c18ff8a4
MJ
1439 if (gimple_call_num_args (call) < (unsigned) func_param_count)
1440 return false;
1441
1442 tree parm;
1443 int i;
1444 for (parm = DECL_ARGUMENTS (current_function_decl), i = 0;
1445 parm;
1446 parm = DECL_CHAIN (parm), i++)
1447 {
1448 tree arg = gimple_call_arg (call, i);
1449 if (!useless_type_conversion_p (TREE_TYPE (parm), TREE_TYPE (arg)))
1450 return false;
1451 }
1452 return true;
2f3cdcf5 1453}
97e73bd2 1454
6cbd3b6a
MJ
1455/* Scan function and look for interesting expressions and create access
1456 structures for them. Return true iff any access is created. */
d4d3aad9 1457
0674b9d0 1458static bool
6cbd3b6a 1459scan_function (void)
97e73bd2
RH
1460{
1461 basic_block bb;
0674b9d0 1462 bool ret = false;
97e73bd2 1463
11cd3bed 1464 FOR_EACH_BB_FN (bb, cfun)
97e73bd2 1465 {
6cbd3b6a
MJ
1466 gimple_stmt_iterator gsi;
1467 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
97e73bd2 1468 {
355fe088 1469 gimple *stmt = gsi_stmt (gsi);
6cbd3b6a
MJ
1470 tree t;
1471 unsigned i;
7ec49257 1472
6cbd3b6a 1473 if (final_bbs && stmt_can_throw_external (stmt))
07ffa034 1474 bitmap_set_bit (final_bbs, bb->index);
0674b9d0 1475 switch (gimple_code (stmt))
510335c8 1476 {
0674b9d0 1477 case GIMPLE_RETURN:
538dd0b7 1478 t = gimple_return_retval (as_a <greturn *> (stmt));
6cbd3b6a
MJ
1479 if (t != NULL_TREE)
1480 ret |= build_access_from_expr (t, stmt, false);
1481 if (final_bbs)
07ffa034 1482 bitmap_set_bit (final_bbs, bb->index);
0674b9d0 1483 break;
510335c8 1484
0674b9d0 1485 case GIMPLE_ASSIGN:
6cbd3b6a 1486 ret |= build_accesses_from_assign (stmt);
0674b9d0 1487 break;
510335c8 1488
0674b9d0 1489 case GIMPLE_CALL:
0674b9d0 1490 for (i = 0; i < gimple_call_num_args (stmt); i++)
6cbd3b6a
MJ
1491 ret |= build_access_from_expr (gimple_call_arg (stmt, i),
1492 stmt, false);
510335c8 1493
6cbd3b6a 1494 if (sra_mode == SRA_MODE_EARLY_IPA)
07ffa034
MJ
1495 {
1496 tree dest = gimple_call_fndecl (stmt);
1497 int flags = gimple_call_flags (stmt);
1498
2f3cdcf5
MJ
1499 if (dest)
1500 {
1501 if (DECL_BUILT_IN_CLASS (dest) == BUILT_IN_NORMAL
1502 && DECL_FUNCTION_CODE (dest) == BUILT_IN_APPLY_ARGS)
1503 encountered_apply_args = true;
9e401b63 1504 if (recursive_call_p (current_function_decl, dest))
2f3cdcf5
MJ
1505 {
1506 encountered_recursive_call = true;
c18ff8a4 1507 if (!callsite_arguments_match_p (stmt))
2f3cdcf5
MJ
1508 encountered_unchangable_recursive_call = true;
1509 }
1510 }
07ffa034
MJ
1511
1512 if (final_bbs
1513 && (flags & (ECF_CONST | ECF_PURE)) == 0)
1514 bitmap_set_bit (final_bbs, bb->index);
1515 }
1516
6cbd3b6a 1517 t = gimple_call_lhs (stmt);
104cb50b 1518 if (t && !disqualify_if_bad_bb_terminating_stmt (stmt, t, NULL))
6cbd3b6a 1519 ret |= build_access_from_expr (t, stmt, true);
0674b9d0 1520 break;
510335c8 1521
0674b9d0 1522 case GIMPLE_ASM:
538dd0b7
DM
1523 {
1524 gasm *asm_stmt = as_a <gasm *> (stmt);
1525 walk_stmt_load_store_addr_ops (asm_stmt, NULL, NULL, NULL,
1526 asm_visit_addr);
1527 if (final_bbs)
1528 bitmap_set_bit (final_bbs, bb->index);
6cbd3b6a 1529
538dd0b7
DM
1530 for (i = 0; i < gimple_asm_ninputs (asm_stmt); i++)
1531 {
1532 t = TREE_VALUE (gimple_asm_input_op (asm_stmt, i));
1533 ret |= build_access_from_expr (t, asm_stmt, false);
1534 }
1535 for (i = 0; i < gimple_asm_noutputs (asm_stmt); i++)
1536 {
1537 t = TREE_VALUE (gimple_asm_output_op (asm_stmt, i));
1538 ret |= build_access_from_expr (t, asm_stmt, true);
1539 }
1540 }
07ffa034 1541 break;
97e73bd2 1542
0674b9d0
MJ
1543 default:
1544 break;
1545 }
97e73bd2 1546 }
87c476a2 1547 }
97e73bd2 1548
0674b9d0 1549 return ret;
97e73bd2
RH
1550}
1551
0674b9d0
MJ
1552/* Helper of QSORT function. There are pointers to accesses in the array. An
1553 access is considered smaller than another if it has smaller offset or if the
1554 offsets are the same but is size is bigger. */
97e73bd2 1555
0674b9d0
MJ
1556static int
1557compare_access_positions (const void *a, const void *b)
1558{
1559 const access_p *fp1 = (const access_p *) a;
1560 const access_p *fp2 = (const access_p *) b;
1561 const access_p f1 = *fp1;
1562 const access_p f2 = *fp2;
1563
1564 if (f1->offset != f2->offset)
1565 return f1->offset < f2->offset ? -1 : 1;
1566
1567 if (f1->size == f2->size)
1568 {
d05fe940
MJ
1569 if (f1->type == f2->type)
1570 return 0;
0674b9d0 1571 /* Put any non-aggregate type before any aggregate type. */
d05fe940 1572 else if (!is_gimple_reg_type (f1->type)
9fda11a2 1573 && is_gimple_reg_type (f2->type))
0674b9d0
MJ
1574 return 1;
1575 else if (is_gimple_reg_type (f1->type)
1576 && !is_gimple_reg_type (f2->type))
1577 return -1;
9fda11a2
MJ
1578 /* Put any complex or vector type before any other scalar type. */
1579 else if (TREE_CODE (f1->type) != COMPLEX_TYPE
1580 && TREE_CODE (f1->type) != VECTOR_TYPE
1581 && (TREE_CODE (f2->type) == COMPLEX_TYPE
1582 || TREE_CODE (f2->type) == VECTOR_TYPE))
1583 return 1;
1584 else if ((TREE_CODE (f1->type) == COMPLEX_TYPE
1585 || TREE_CODE (f1->type) == VECTOR_TYPE)
1586 && TREE_CODE (f2->type) != COMPLEX_TYPE
1587 && TREE_CODE (f2->type) != VECTOR_TYPE)
1588 return -1;
973c150c
MJ
1589 /* Put any integral type before any non-integral type. When splicing, we
1590 make sure that those with insufficient precision and occupying the
1591 same space are not scalarized. */
0674b9d0 1592 else if (INTEGRAL_TYPE_P (f1->type)
973c150c
MJ
1593 && !INTEGRAL_TYPE_P (f2->type))
1594 return -1;
1595 else if (!INTEGRAL_TYPE_P (f1->type)
9fda11a2 1596 && INTEGRAL_TYPE_P (f2->type))
0674b9d0 1597 return 1;
973c150c
MJ
1598 /* Put the integral type with the bigger precision first. */
1599 else if (INTEGRAL_TYPE_P (f1->type)
1600 && INTEGRAL_TYPE_P (f2->type)
1601 && (TYPE_PRECISION (f2->type) != TYPE_PRECISION (f1->type)))
1602 return TYPE_PRECISION (f2->type) - TYPE_PRECISION (f1->type);
0674b9d0
MJ
1603 /* Stabilize the sort. */
1604 return TYPE_UID (f1->type) - TYPE_UID (f2->type);
1605 }
1606
1607 /* We want the bigger accesses first, thus the opposite operator in the next
1608 line: */
1609 return f1->size > f2->size ? -1 : 1;
1610}
1611
1612
1613/* Append a name of the declaration to the name obstack. A helper function for
1614 make_fancy_name. */
0bca51f0
DN
1615
1616static void
0674b9d0 1617make_fancy_decl_name (tree decl)
0bca51f0 1618{
0674b9d0 1619 char buffer[32];
6de9cd9a 1620
0674b9d0
MJ
1621 tree name = DECL_NAME (decl);
1622 if (name)
1623 obstack_grow (&name_obstack, IDENTIFIER_POINTER (name),
1624 IDENTIFIER_LENGTH (name));
1625 else
1626 {
1627 sprintf (buffer, "D%u", DECL_UID (decl));
1628 obstack_grow (&name_obstack, buffer, strlen (buffer));
1629 }
726a989a 1630}
38635499 1631
0674b9d0 1632/* Helper for make_fancy_name. */
d116ffa6
RH
1633
1634static void
0674b9d0 1635make_fancy_name_1 (tree expr)
d116ffa6 1636{
0674b9d0
MJ
1637 char buffer[32];
1638 tree index;
1639
1640 if (DECL_P (expr))
d116ffa6 1641 {
0674b9d0
MJ
1642 make_fancy_decl_name (expr);
1643 return;
d116ffa6 1644 }
6de9cd9a 1645
0674b9d0 1646 switch (TREE_CODE (expr))
6de9cd9a 1647 {
0674b9d0
MJ
1648 case COMPONENT_REF:
1649 make_fancy_name_1 (TREE_OPERAND (expr, 0));
1650 obstack_1grow (&name_obstack, '$');
1651 make_fancy_decl_name (TREE_OPERAND (expr, 1));
1652 break;
6de9cd9a 1653
0674b9d0
MJ
1654 case ARRAY_REF:
1655 make_fancy_name_1 (TREE_OPERAND (expr, 0));
1656 obstack_1grow (&name_obstack, '$');
1657 /* Arrays with only one element may not have a constant as their
1658 index. */
1659 index = TREE_OPERAND (expr, 1);
1660 if (TREE_CODE (index) != INTEGER_CST)
1661 break;
1662 sprintf (buffer, HOST_WIDE_INT_PRINT_DEC, TREE_INT_CST_LOW (index));
1663 obstack_grow (&name_obstack, buffer, strlen (buffer));
70f34814 1664 break;
6de9cd9a 1665
70f34814
RG
1666 case ADDR_EXPR:
1667 make_fancy_name_1 (TREE_OPERAND (expr, 0));
1668 break;
1669
1670 case MEM_REF:
1671 make_fancy_name_1 (TREE_OPERAND (expr, 0));
1672 if (!integer_zerop (TREE_OPERAND (expr, 1)))
1673 {
1674 obstack_1grow (&name_obstack, '$');
1675 sprintf (buffer, HOST_WIDE_INT_PRINT_DEC,
1676 TREE_INT_CST_LOW (TREE_OPERAND (expr, 1)));
1677 obstack_grow (&name_obstack, buffer, strlen (buffer));
1678 }
0674b9d0 1679 break;
6de9cd9a 1680
0674b9d0
MJ
1681 case BIT_FIELD_REF:
1682 case REALPART_EXPR:
1683 case IMAGPART_EXPR:
1684 gcc_unreachable (); /* we treat these as scalars. */
1685 break;
97e73bd2 1686 default:
0674b9d0 1687 break;
97e73bd2 1688 }
6de9cd9a
DN
1689}
1690
0674b9d0 1691/* Create a human readable name for replacement variable of ACCESS. */
6de9cd9a 1692
0674b9d0
MJ
1693static char *
1694make_fancy_name (tree expr)
97e73bd2 1695{
0674b9d0
MJ
1696 make_fancy_name_1 (expr);
1697 obstack_1grow (&name_obstack, '\0');
1698 return XOBFINISH (&name_obstack, char *);
97e73bd2
RH
1699}
1700
d242d063 1701/* Construct a MEM_REF that would reference a part of aggregate BASE of type
ee45a32d
EB
1702 EXP_TYPE at the given OFFSET and with storage order REVERSE. If BASE is
1703 something for which get_addr_base_and_unit_offset returns NULL, gsi must
1704 be non-NULL and is used to insert new statements either before or below
1705 the current one as specified by INSERT_AFTER. This function is not capable
1706 of handling bitfields. */
70f34814 1707
d242d063 1708tree
f7ed3195 1709build_ref_for_offset (location_t loc, tree base, poly_int64 offset,
ee45a32d 1710 bool reverse, tree exp_type, gimple_stmt_iterator *gsi,
d242d063
MJ
1711 bool insert_after)
1712{
1713 tree prev_base = base;
1714 tree off;
4ca890e2 1715 tree mem_ref;
a90c8804 1716 poly_int64 base_offset;
aff86594
RG
1717 unsigned HOST_WIDE_INT misalign;
1718 unsigned int align;
d242d063 1719
0f784346
RB
1720 /* Preserve address-space information. */
1721 addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (base));
1722 if (as != TYPE_ADDR_SPACE (exp_type))
1723 exp_type = build_qualified_type (exp_type,
1724 TYPE_QUALS (exp_type)
1725 | ENCODE_QUAL_ADDR_SPACE (as));
1726
f7ed3195 1727 poly_int64 byte_offset = exact_div (offset, BITS_PER_UNIT);
4bd7b70b 1728 get_object_alignment_1 (base, &align, &misalign);
d242d063
MJ
1729 base = get_addr_base_and_unit_offset (base, &base_offset);
1730
1731 /* get_addr_base_and_unit_offset returns NULL for references with a variable
1732 offset such as array[var_index]. */
1733 if (!base)
1734 {
538dd0b7 1735 gassign *stmt;
d242d063
MJ
1736 tree tmp, addr;
1737
1738 gcc_checking_assert (gsi);
b731b390 1739 tmp = make_ssa_name (build_pointer_type (TREE_TYPE (prev_base)));
d242d063 1740 addr = build_fold_addr_expr (unshare_expr (prev_base));
1d60cc55 1741 STRIP_USELESS_TYPE_CONVERSION (addr);
d242d063 1742 stmt = gimple_build_assign (tmp, addr);
e4b5cace 1743 gimple_set_location (stmt, loc);
d242d063
MJ
1744 if (insert_after)
1745 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
1746 else
1747 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
d242d063 1748
f7ed3195 1749 off = build_int_cst (reference_alias_ptr_type (prev_base), byte_offset);
d242d063
MJ
1750 base = tmp;
1751 }
1752 else if (TREE_CODE (base) == MEM_REF)
1753 {
1754 off = build_int_cst (TREE_TYPE (TREE_OPERAND (base, 1)),
f7ed3195 1755 base_offset + byte_offset);
d35936ab 1756 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1), off);
d242d063
MJ
1757 base = unshare_expr (TREE_OPERAND (base, 0));
1758 }
1759 else
1760 {
3022186a 1761 off = build_int_cst (reference_alias_ptr_type (prev_base),
f7ed3195 1762 base_offset + byte_offset);
d242d063
MJ
1763 base = build_fold_addr_expr (unshare_expr (base));
1764 }
1765
f7ed3195
RS
1766 unsigned int align_bound = known_alignment (misalign + offset);
1767 if (align_bound != 0)
1768 align = MIN (align, align_bound);
474b3a7c 1769 if (align != TYPE_ALIGN (exp_type))
aff86594
RG
1770 exp_type = build_aligned_type (exp_type, align);
1771
4ca890e2 1772 mem_ref = fold_build2_loc (loc, MEM_REF, exp_type, base, off);
ee45a32d 1773 REF_REVERSE_STORAGE_ORDER (mem_ref) = reverse;
4ca890e2
JJ
1774 if (TREE_THIS_VOLATILE (prev_base))
1775 TREE_THIS_VOLATILE (mem_ref) = 1;
1776 if (TREE_SIDE_EFFECTS (prev_base))
1777 TREE_SIDE_EFFECTS (mem_ref) = 1;
1778 return mem_ref;
d242d063
MJ
1779}
1780
1781/* Construct a memory reference to a part of an aggregate BASE at the given
36e57e16
MJ
1782 OFFSET and of the same type as MODEL. In case this is a reference to a
1783 bit-field, the function will replicate the last component_ref of model's
1784 expr to access it. GSI and INSERT_AFTER have the same meaning as in
1785 build_ref_for_offset. */
d242d063
MJ
1786
1787static tree
e4b5cace 1788build_ref_for_model (location_t loc, tree base, HOST_WIDE_INT offset,
d242d063
MJ
1789 struct access *model, gimple_stmt_iterator *gsi,
1790 bool insert_after)
1791{
36e57e16
MJ
1792 if (TREE_CODE (model->expr) == COMPONENT_REF
1793 && DECL_BIT_FIELD (TREE_OPERAND (model->expr, 1)))
d242d063 1794 {
36e57e16
MJ
1795 /* This access represents a bit-field. */
1796 tree t, exp_type, fld = TREE_OPERAND (model->expr, 1);
1797
1798 offset -= int_bit_position (fld);
1799 exp_type = TREE_TYPE (TREE_OPERAND (model->expr, 0));
ee45a32d
EB
1800 t = build_ref_for_offset (loc, base, offset, model->reverse, exp_type,
1801 gsi, insert_after);
1802 /* The flag will be set on the record type. */
1803 REF_REVERSE_STORAGE_ORDER (t) = 0;
36e57e16
MJ
1804 return fold_build3_loc (loc, COMPONENT_REF, TREE_TYPE (fld), t, fld,
1805 NULL_TREE);
d242d063 1806 }
36e57e16 1807 else
ee45a32d
EB
1808 return
1809 build_ref_for_offset (loc, base, offset, model->reverse, model->type,
1810 gsi, insert_after);
d242d063
MJ
1811}
1812
be384c10
MJ
1813/* Attempt to build a memory reference that we could but into a gimple
1814 debug_bind statement. Similar to build_ref_for_model but punts if it has to
1815 create statements and return s NULL instead. This function also ignores
1816 alignment issues and so its results should never end up in non-debug
1817 statements. */
1818
1819static tree
1820build_debug_ref_for_model (location_t loc, tree base, HOST_WIDE_INT offset,
1821 struct access *model)
1822{
a90c8804 1823 poly_int64 base_offset;
be384c10
MJ
1824 tree off;
1825
1826 if (TREE_CODE (model->expr) == COMPONENT_REF
1827 && DECL_BIT_FIELD (TREE_OPERAND (model->expr, 1)))
1828 return NULL_TREE;
1829
1830 base = get_addr_base_and_unit_offset (base, &base_offset);
1831 if (!base)
1832 return NULL_TREE;
1833 if (TREE_CODE (base) == MEM_REF)
1834 {
1835 off = build_int_cst (TREE_TYPE (TREE_OPERAND (base, 1)),
1836 base_offset + offset / BITS_PER_UNIT);
1837 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1), off);
1838 base = unshare_expr (TREE_OPERAND (base, 0));
1839 }
1840 else
1841 {
1842 off = build_int_cst (reference_alias_ptr_type (base),
1843 base_offset + offset / BITS_PER_UNIT);
1844 base = build_fold_addr_expr (unshare_expr (base));
1845 }
1846
1847 return fold_build2_loc (loc, MEM_REF, model->type, base, off);
1848}
1849
d242d063
MJ
1850/* Construct a memory reference consisting of component_refs and array_refs to
1851 a part of an aggregate *RES (which is of type TYPE). The requested part
1852 should have type EXP_TYPE at be the given OFFSET. This function might not
1853 succeed, it returns true when it does and only then *RES points to something
1854 meaningful. This function should be used only to build expressions that we
1855 might need to present to user (e.g. in warnings). In all other situations,
1856 build_ref_for_model or build_ref_for_offset should be used instead. */
510335c8
AO
1857
1858static bool
d242d063
MJ
1859build_user_friendly_ref_for_offset (tree *res, tree type, HOST_WIDE_INT offset,
1860 tree exp_type)
45f94ec7 1861{
0674b9d0 1862 while (1)
510335c8 1863 {
0674b9d0 1864 tree fld;
22fc64b4 1865 tree tr_size, index, minidx;
0674b9d0 1866 HOST_WIDE_INT el_size;
510335c8 1867
0674b9d0 1868 if (offset == 0 && exp_type
71d4d3eb 1869 && types_compatible_p (exp_type, type))
0674b9d0 1870 return true;
510335c8 1871
0674b9d0 1872 switch (TREE_CODE (type))
510335c8 1873 {
0674b9d0
MJ
1874 case UNION_TYPE:
1875 case QUAL_UNION_TYPE:
1876 case RECORD_TYPE:
910ad8de 1877 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
0674b9d0
MJ
1878 {
1879 HOST_WIDE_INT pos, size;
ca8d9092 1880 tree tr_pos, expr, *expr_ptr;
510335c8 1881
0674b9d0
MJ
1882 if (TREE_CODE (fld) != FIELD_DECL)
1883 continue;
4c44c315 1884
ca8d9092 1885 tr_pos = bit_position (fld);
cc269bb6 1886 if (!tr_pos || !tree_fits_uhwi_p (tr_pos))
ca8d9092 1887 continue;
eb1ce453 1888 pos = tree_to_uhwi (tr_pos);
0674b9d0 1889 gcc_assert (TREE_CODE (type) == RECORD_TYPE || pos == 0);
a1aa1701 1890 tr_size = DECL_SIZE (fld);
cc269bb6 1891 if (!tr_size || !tree_fits_uhwi_p (tr_size))
a1aa1701 1892 continue;
eb1ce453 1893 size = tree_to_uhwi (tr_size);
fff08961
MJ
1894 if (size == 0)
1895 {
1896 if (pos != offset)
1897 continue;
1898 }
1899 else if (pos > offset || (pos + size) <= offset)
0674b9d0 1900 continue;
2fb5f2af 1901
d242d063
MJ
1902 expr = build3 (COMPONENT_REF, TREE_TYPE (fld), *res, fld,
1903 NULL_TREE);
1904 expr_ptr = &expr;
1905 if (build_user_friendly_ref_for_offset (expr_ptr, TREE_TYPE (fld),
1906 offset - pos, exp_type))
0674b9d0 1907 {
d242d063 1908 *res = expr;
0674b9d0
MJ
1909 return true;
1910 }
1911 }
1912 return false;
ff1fe457 1913
0674b9d0
MJ
1914 case ARRAY_TYPE:
1915 tr_size = TYPE_SIZE (TREE_TYPE (type));
cc269bb6 1916 if (!tr_size || !tree_fits_uhwi_p (tr_size))
0674b9d0 1917 return false;
ae7e9ddd 1918 el_size = tree_to_uhwi (tr_size);
ff1fe457 1919
22fc64b4 1920 minidx = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
746e119f 1921 if (TREE_CODE (minidx) != INTEGER_CST || el_size == 0)
22fc64b4 1922 return false;
d242d063
MJ
1923 index = build_int_cst (TYPE_DOMAIN (type), offset / el_size);
1924 if (!integer_zerop (minidx))
d35936ab 1925 index = int_const_binop (PLUS_EXPR, index, minidx);
d242d063
MJ
1926 *res = build4 (ARRAY_REF, TREE_TYPE (type), *res, index,
1927 NULL_TREE, NULL_TREE);
0674b9d0
MJ
1928 offset = offset % el_size;
1929 type = TREE_TYPE (type);
1930 break;
510335c8 1931
0674b9d0
MJ
1932 default:
1933 if (offset != 0)
1934 return false;
510335c8 1935
0674b9d0
MJ
1936 if (exp_type)
1937 return false;
1938 else
1939 return true;
1940 }
d573123d 1941 }
45f94ec7
AO
1942}
1943
1e9fb3de
MJ
1944/* Return true iff TYPE is stdarg va_list type. */
1945
1946static inline bool
1947is_va_list_type (tree type)
1948{
1949 return TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (va_list_type_node);
1950}
1951
949cfd0a
AK
1952/* Print message to dump file why a variable was rejected. */
1953
1954static void
1955reject (tree var, const char *msg)
1956{
1957 if (dump_file && (dump_flags & TDF_DETAILS))
1958 {
1959 fprintf (dump_file, "Rejected (%d): %s: ", DECL_UID (var), msg);
ef6cb4c7 1960 print_generic_expr (dump_file, var);
949cfd0a
AK
1961 fprintf (dump_file, "\n");
1962 }
1963}
1964
d94b820b
RG
1965/* Return true if VAR is a candidate for SRA. */
1966
1967static bool
1968maybe_add_sra_candidate (tree var)
1969{
1970 tree type = TREE_TYPE (var);
1971 const char *msg;
4a8fb1a1 1972 tree_node **slot;
d94b820b
RG
1973
1974 if (!AGGREGATE_TYPE_P (type))
1975 {
1976 reject (var, "not aggregate");
1977 return false;
1978 }
88bed196
AL
1979 /* Allow constant-pool entries (that "need to live in memory")
1980 unless we are doing IPA SRA. */
1981 if (needs_to_live_in_memory (var)
1982 && (sra_mode == SRA_MODE_EARLY_IPA || !constant_decl_p (var)))
d94b820b
RG
1983 {
1984 reject (var, "needs to live in memory");
1985 return false;
1986 }
1987 if (TREE_THIS_VOLATILE (var))
1988 {
1989 reject (var, "is volatile");
1990 return false;
1991 }
1992 if (!COMPLETE_TYPE_P (type))
1993 {
1994 reject (var, "has incomplete type");
1995 return false;
1996 }
cc269bb6 1997 if (!tree_fits_uhwi_p (TYPE_SIZE (type)))
d94b820b
RG
1998 {
1999 reject (var, "type size not fixed");
2000 return false;
2001 }
ae7e9ddd 2002 if (tree_to_uhwi (TYPE_SIZE (type)) == 0)
d94b820b
RG
2003 {
2004 reject (var, "type size is zero");
2005 return false;
2006 }
2007 if (type_internals_preclude_sra_p (type, &msg))
2008 {
2009 reject (var, msg);
2010 return false;
2011 }
2012 if (/* Fix for PR 41089. tree-stdarg.c needs to have va_lists intact but
2013 we also want to schedule it rather late. Thus we ignore it in
2014 the early pass. */
2015 (sra_mode == SRA_MODE_EARLY_INTRA
2016 && is_va_list_type (type)))
2017 {
2018 reject (var, "is va_list");
2019 return false;
2020 }
2021
2022 bitmap_set_bit (candidate_bitmap, DECL_UID (var));
c203e8a7 2023 slot = candidates->find_slot_with_hash (var, DECL_UID (var), INSERT);
4a8fb1a1 2024 *slot = var;
d94b820b
RG
2025
2026 if (dump_file && (dump_flags & TDF_DETAILS))
2027 {
2028 fprintf (dump_file, "Candidate (%d): ", DECL_UID (var));
ef6cb4c7 2029 print_generic_expr (dump_file, var);
d94b820b
RG
2030 fprintf (dump_file, "\n");
2031 }
2032
2033 return true;
2034}
2035
0674b9d0
MJ
2036/* The very first phase of intraprocedural SRA. It marks in candidate_bitmap
2037 those with type which is suitable for scalarization. */
aee91ff0 2038
0674b9d0
MJ
2039static bool
2040find_var_candidates (void)
2041{
d94b820b
RG
2042 tree var, parm;
2043 unsigned int i;
0674b9d0 2044 bool ret = false;
510335c8 2045
d94b820b
RG
2046 for (parm = DECL_ARGUMENTS (current_function_decl);
2047 parm;
2048 parm = DECL_CHAIN (parm))
2049 ret |= maybe_add_sra_candidate (parm);
2050
2051 FOR_EACH_LOCAL_DECL (cfun, i, var)
510335c8 2052 {
8813a647 2053 if (!VAR_P (var))
0674b9d0 2054 continue;
0674b9d0 2055
d94b820b 2056 ret |= maybe_add_sra_candidate (var);
510335c8
AO
2057 }
2058
0674b9d0
MJ
2059 return ret;
2060}
510335c8 2061
0674b9d0
MJ
2062/* Sort all accesses for the given variable, check for partial overlaps and
2063 return NULL if there are any. If there are none, pick a representative for
2064 each combination of offset and size and create a linked list out of them.
2065 Return the pointer to the first representative and make sure it is the first
2066 one in the vector of accesses. */
510335c8 2067
0674b9d0
MJ
2068static struct access *
2069sort_and_splice_var_accesses (tree var)
2070{
2071 int i, j, access_count;
2072 struct access *res, **prev_acc_ptr = &res;
9771b263 2073 vec<access_p> *access_vec;
0674b9d0
MJ
2074 bool first = true;
2075 HOST_WIDE_INT low = -1, high = 0;
510335c8 2076
0674b9d0
MJ
2077 access_vec = get_base_access_vector (var);
2078 if (!access_vec)
2079 return NULL;
9771b263 2080 access_count = access_vec->length ();
510335c8 2081
0674b9d0 2082 /* Sort by <OFFSET, SIZE>. */
9771b263 2083 access_vec->qsort (compare_access_positions);
510335c8 2084
0674b9d0
MJ
2085 i = 0;
2086 while (i < access_count)
510335c8 2087 {
9771b263 2088 struct access *access = (*access_vec)[i];
fef94f76 2089 bool grp_write = access->write;
0674b9d0 2090 bool grp_read = !access->write;
4fd73214
MJ
2091 bool grp_scalar_write = access->write
2092 && is_gimple_reg_type (access->type);
2093 bool grp_scalar_read = !access->write
2094 && is_gimple_reg_type (access->type);
77620011 2095 bool grp_assignment_read = access->grp_assignment_read;
fc37536b 2096 bool grp_assignment_write = access->grp_assignment_write;
4fd73214 2097 bool multiple_scalar_reads = false;
1ac93f10 2098 bool total_scalarization = access->grp_total_scalarization;
0674b9d0
MJ
2099 bool grp_partial_lhs = access->grp_partial_lhs;
2100 bool first_scalar = is_gimple_reg_type (access->type);
2101 bool unscalarizable_region = access->grp_unscalarizable_region;
973c150c
MJ
2102 bool bf_non_full_precision
2103 = (INTEGRAL_TYPE_P (access->type)
2104 && TYPE_PRECISION (access->type) != access->size
2105 && TREE_CODE (access->expr) == COMPONENT_REF
2106 && DECL_BIT_FIELD (TREE_OPERAND (access->expr, 1)));
510335c8 2107
0674b9d0 2108 if (first || access->offset >= high)
510335c8 2109 {
0674b9d0
MJ
2110 first = false;
2111 low = access->offset;
2112 high = access->offset + access->size;
510335c8 2113 }
0674b9d0
MJ
2114 else if (access->offset > low && access->offset + access->size > high)
2115 return NULL;
510335c8 2116 else
0674b9d0
MJ
2117 gcc_assert (access->offset >= low
2118 && access->offset + access->size <= high);
2119
2120 j = i + 1;
2121 while (j < access_count)
510335c8 2122 {
9771b263 2123 struct access *ac2 = (*access_vec)[j];
0674b9d0
MJ
2124 if (ac2->offset != access->offset || ac2->size != access->size)
2125 break;
fef94f76 2126 if (ac2->write)
4fd73214
MJ
2127 {
2128 grp_write = true;
2129 grp_scalar_write = (grp_scalar_write
2130 || is_gimple_reg_type (ac2->type));
2131 }
fef94f76
MJ
2132 else
2133 {
4fd73214
MJ
2134 grp_read = true;
2135 if (is_gimple_reg_type (ac2->type))
2136 {
2137 if (grp_scalar_read)
2138 multiple_scalar_reads = true;
2139 else
2140 grp_scalar_read = true;
2141 }
fef94f76 2142 }
77620011 2143 grp_assignment_read |= ac2->grp_assignment_read;
fc37536b 2144 grp_assignment_write |= ac2->grp_assignment_write;
0674b9d0
MJ
2145 grp_partial_lhs |= ac2->grp_partial_lhs;
2146 unscalarizable_region |= ac2->grp_unscalarizable_region;
1ac93f10 2147 total_scalarization |= ac2->grp_total_scalarization;
0674b9d0
MJ
2148 relink_to_new_repr (access, ac2);
2149
2150 /* If there are both aggregate-type and scalar-type accesses with
2151 this combination of size and offset, the comparison function
2152 should have put the scalars first. */
2153 gcc_assert (first_scalar || !is_gimple_reg_type (ac2->type));
973c150c
MJ
2154 /* It also prefers integral types to non-integral. However, when the
2155 precision of the selected type does not span the entire area and
2156 should also be used for a non-integer (i.e. float), we must not
2157 let that happen. Normally analyze_access_subtree expands the type
2158 to cover the entire area but for bit-fields it doesn't. */
2159 if (bf_non_full_precision && !INTEGRAL_TYPE_P (ac2->type))
2160 {
2161 if (dump_file && (dump_flags & TDF_DETAILS))
2162 {
2163 fprintf (dump_file, "Cannot scalarize the following access "
2164 "because insufficient precision integer type was "
2165 "selected.\n ");
2166 dump_access (dump_file, access, false);
2167 }
2168 unscalarizable_region = true;
2169 }
0674b9d0
MJ
2170 ac2->group_representative = access;
2171 j++;
510335c8
AO
2172 }
2173
0674b9d0
MJ
2174 i = j;
2175
2176 access->group_representative = access;
fef94f76 2177 access->grp_write = grp_write;
0674b9d0 2178 access->grp_read = grp_read;
4fd73214
MJ
2179 access->grp_scalar_read = grp_scalar_read;
2180 access->grp_scalar_write = grp_scalar_write;
77620011 2181 access->grp_assignment_read = grp_assignment_read;
fc37536b 2182 access->grp_assignment_write = grp_assignment_write;
c91a0948
MJ
2183 access->grp_hint = total_scalarization
2184 || (multiple_scalar_reads && !constant_decl_p (var));
1ac93f10 2185 access->grp_total_scalarization = total_scalarization;
0674b9d0
MJ
2186 access->grp_partial_lhs = grp_partial_lhs;
2187 access->grp_unscalarizable_region = unscalarizable_region;
0674b9d0
MJ
2188
2189 *prev_acc_ptr = access;
2190 prev_acc_ptr = &access->next_grp;
510335c8
AO
2191 }
2192
9771b263 2193 gcc_assert (res == (*access_vec)[0]);
0674b9d0 2194 return res;
510335c8
AO
2195}
2196
0674b9d0
MJ
2197/* Create a variable for the given ACCESS which determines the type, name and a
2198 few other properties. Return the variable declaration and store it also to
2199 ACCESS->replacement. */
97e73bd2 2200
0674b9d0 2201static tree
13714310 2202create_access_replacement (struct access *access)
6de9cd9a 2203{
0674b9d0 2204 tree repl;
97e73bd2 2205
be384c10
MJ
2206 if (access->grp_to_be_debug_replaced)
2207 {
b731b390 2208 repl = create_tmp_var_raw (access->type);
be384c10
MJ
2209 DECL_CONTEXT (repl) = current_function_decl;
2210 }
2211 else
78f1b523
RB
2212 /* Drop any special alignment on the type if it's not on the main
2213 variant. This avoids issues with weirdo ABIs like AAPCS. */
2214 repl = create_tmp_var (build_qualified_type
2215 (TYPE_MAIN_VARIANT (access->type),
2216 TYPE_QUALS (access->type)), "SR");
3f5f6592
RG
2217 if (TREE_CODE (access->type) == COMPLEX_TYPE
2218 || TREE_CODE (access->type) == VECTOR_TYPE)
2219 {
2220 if (!access->grp_partial_lhs)
2221 DECL_GIMPLE_REG_P (repl) = 1;
2222 }
2223 else if (access->grp_partial_lhs
2224 && is_gimple_reg_type (access->type))
2225 TREE_ADDRESSABLE (repl) = 1;
0563fe8b 2226
0674b9d0
MJ
2227 DECL_SOURCE_LOCATION (repl) = DECL_SOURCE_LOCATION (access->base);
2228 DECL_ARTIFICIAL (repl) = 1;
ec24771f 2229 DECL_IGNORED_P (repl) = DECL_IGNORED_P (access->base);
0674b9d0
MJ
2230
2231 if (DECL_NAME (access->base)
2232 && !DECL_IGNORED_P (access->base)
2233 && !DECL_ARTIFICIAL (access->base))
6de9cd9a 2234 {
0674b9d0 2235 char *pretty_name = make_fancy_name (access->expr);
64366d35 2236 tree debug_expr = unshare_expr_without_location (access->expr), d;
70b5e7dc 2237 bool fail = false;
0674b9d0
MJ
2238
2239 DECL_NAME (repl) = get_identifier (pretty_name);
5f36c869 2240 DECL_NAMELESS (repl) = 1;
0674b9d0
MJ
2241 obstack_free (&name_obstack, pretty_name);
2242
823e9473
JJ
2243 /* Get rid of any SSA_NAMEs embedded in debug_expr,
2244 as DECL_DEBUG_EXPR isn't considered when looking for still
2245 used SSA_NAMEs and thus they could be freed. All debug info
2246 generation cares is whether something is constant or variable
2247 and that get_ref_base_and_extent works properly on the
70b5e7dc
RG
2248 expression. It cannot handle accesses at a non-constant offset
2249 though, so just give up in those cases. */
9430b7ba
JJ
2250 for (d = debug_expr;
2251 !fail && (handled_component_p (d) || TREE_CODE (d) == MEM_REF);
70b5e7dc 2252 d = TREE_OPERAND (d, 0))
823e9473
JJ
2253 switch (TREE_CODE (d))
2254 {
2255 case ARRAY_REF:
2256 case ARRAY_RANGE_REF:
2257 if (TREE_OPERAND (d, 1)
70b5e7dc
RG
2258 && TREE_CODE (TREE_OPERAND (d, 1)) != INTEGER_CST)
2259 fail = true;
823e9473 2260 if (TREE_OPERAND (d, 3)
70b5e7dc
RG
2261 && TREE_CODE (TREE_OPERAND (d, 3)) != INTEGER_CST)
2262 fail = true;
823e9473
JJ
2263 /* FALLTHRU */
2264 case COMPONENT_REF:
2265 if (TREE_OPERAND (d, 2)
70b5e7dc
RG
2266 && TREE_CODE (TREE_OPERAND (d, 2)) != INTEGER_CST)
2267 fail = true;
823e9473 2268 break;
9430b7ba
JJ
2269 case MEM_REF:
2270 if (TREE_CODE (TREE_OPERAND (d, 0)) != ADDR_EXPR)
2271 fail = true;
2272 else
2273 d = TREE_OPERAND (d, 0);
2274 break;
823e9473
JJ
2275 default:
2276 break;
2277 }
70b5e7dc
RG
2278 if (!fail)
2279 {
2280 SET_DECL_DEBUG_EXPR (repl, debug_expr);
839b422f 2281 DECL_HAS_DEBUG_EXPR_P (repl) = 1;
70b5e7dc 2282 }
9271a43c
MJ
2283 if (access->grp_no_warning)
2284 TREE_NO_WARNING (repl) = 1;
2285 else
2286 TREE_NO_WARNING (repl) = TREE_NO_WARNING (access->base);
97e73bd2 2287 }
ec24771f
MJ
2288 else
2289 TREE_NO_WARNING (repl) = 1;
0674b9d0
MJ
2290
2291 if (dump_file)
97e73bd2 2292 {
be384c10
MJ
2293 if (access->grp_to_be_debug_replaced)
2294 {
2295 fprintf (dump_file, "Created a debug-only replacement for ");
ef6cb4c7 2296 print_generic_expr (dump_file, access->base);
be384c10
MJ
2297 fprintf (dump_file, " offset: %u, size: %u\n",
2298 (unsigned) access->offset, (unsigned) access->size);
2299 }
2300 else
2301 {
2302 fprintf (dump_file, "Created a replacement for ");
ef6cb4c7 2303 print_generic_expr (dump_file, access->base);
be384c10
MJ
2304 fprintf (dump_file, " offset: %u, size: %u: ",
2305 (unsigned) access->offset, (unsigned) access->size);
ef6cb4c7 2306 print_generic_expr (dump_file, repl);
be384c10
MJ
2307 fprintf (dump_file, "\n");
2308 }
97e73bd2 2309 }
2a45675f 2310 sra_stats.replacements++;
0674b9d0
MJ
2311
2312 return repl;
97e73bd2 2313}
6de9cd9a 2314
56517dda 2315/* Return ACCESS scalar replacement, which must exist. */
6de9cd9a 2316
0674b9d0
MJ
2317static inline tree
2318get_access_replacement (struct access *access)
97e73bd2 2319{
b48b3fc4 2320 gcc_checking_assert (access->replacement_decl);
5feb49f0
MJ
2321 return access->replacement_decl;
2322}
2323
2324
0674b9d0
MJ
2325/* Build a subtree of accesses rooted in *ACCESS, and move the pointer in the
2326 linked list along the way. Stop when *ACCESS is NULL or the access pointed
591d4f4a
MJ
2327 to it is not "within" the root. Return false iff some accesses partially
2328 overlap. */
e4521d11 2329
591d4f4a 2330static bool
0674b9d0
MJ
2331build_access_subtree (struct access **access)
2332{
2333 struct access *root = *access, *last_child = NULL;
2334 HOST_WIDE_INT limit = root->offset + root->size;
6de9cd9a 2335
0674b9d0
MJ
2336 *access = (*access)->next_grp;
2337 while (*access && (*access)->offset + (*access)->size <= limit)
97e73bd2 2338 {
0674b9d0
MJ
2339 if (!last_child)
2340 root->first_child = *access;
2341 else
2342 last_child->next_sibling = *access;
2343 last_child = *access;
2bba7541
MJ
2344 (*access)->parent = root;
2345 (*access)->grp_write |= root->grp_write;
6de9cd9a 2346
591d4f4a
MJ
2347 if (!build_access_subtree (access))
2348 return false;
97e73bd2 2349 }
591d4f4a
MJ
2350
2351 if (*access && (*access)->offset < limit)
2352 return false;
2353
2354 return true;
97e73bd2 2355}
6de9cd9a 2356
0674b9d0 2357/* Build a tree of access representatives, ACCESS is the pointer to the first
591d4f4a
MJ
2358 one, others are linked in a list by the next_grp field. Return false iff
2359 some accesses partially overlap. */
6de9cd9a 2360
591d4f4a 2361static bool
0674b9d0 2362build_access_trees (struct access *access)
6de9cd9a 2363{
0674b9d0 2364 while (access)
bfeebecf 2365 {
0674b9d0 2366 struct access *root = access;
6de9cd9a 2367
591d4f4a
MJ
2368 if (!build_access_subtree (&access))
2369 return false;
0674b9d0 2370 root->next_grp = access;
6de9cd9a 2371 }
591d4f4a 2372 return true;
97e73bd2 2373}
6de9cd9a 2374
22fc64b4
MJ
2375/* Return true if expr contains some ARRAY_REFs into a variable bounded
2376 array. */
2377
2378static bool
2379expr_with_var_bounded_array_refs_p (tree expr)
2380{
2381 while (handled_component_p (expr))
2382 {
2383 if (TREE_CODE (expr) == ARRAY_REF
9541ffee 2384 && !tree_fits_shwi_p (array_ref_low_bound (expr)))
22fc64b4
MJ
2385 return true;
2386 expr = TREE_OPERAND (expr, 0);
2387 }
2388 return false;
2389}
2390
0674b9d0 2391/* Analyze the subtree of accesses rooted in ROOT, scheduling replacements when
77620011
MJ
2392 both seeming beneficial and when ALLOW_REPLACEMENTS allows it. Also set all
2393 sorts of access flags appropriately along the way, notably always set
2394 grp_read and grp_assign_read according to MARK_READ and grp_write when
fc37536b
MJ
2395 MARK_WRITE is true.
2396
2397 Creating a replacement for a scalar access is considered beneficial if its
2398 grp_hint is set (this means we are either attempting total scalarization or
2399 there is more than one direct read access) or according to the following
2400 table:
2401
4fd73214 2402 Access written to through a scalar type (once or more times)
fc37536b 2403 |
4fd73214 2404 | Written to in an assignment statement
fc37536b 2405 | |
4fd73214 2406 | | Access read as scalar _once_
fc37536b 2407 | | |
4fd73214 2408 | | | Read in an assignment statement
fc37536b
MJ
2409 | | | |
2410 | | | | Scalarize Comment
2411-----------------------------------------------------------------------------
2412 0 0 0 0 No access for the scalar
2413 0 0 0 1 No access for the scalar
2414 0 0 1 0 No Single read - won't help
2415 0 0 1 1 No The same case
2416 0 1 0 0 No access for the scalar
2417 0 1 0 1 No access for the scalar
2418 0 1 1 0 Yes s = *g; return s.i;
2419 0 1 1 1 Yes The same case as above
2420 1 0 0 0 No Won't help
2421 1 0 0 1 Yes s.i = 1; *g = s;
2422 1 0 1 0 Yes s.i = 5; g = s.i;
2423 1 0 1 1 Yes The same case as above
2424 1 1 0 0 No Won't help.
2425 1 1 0 1 Yes s.i = 1; *g = s;
2426 1 1 1 0 Yes s = *g; return s.i;
2427 1 1 1 1 Yes Any of the above yeses */
71956db3 2428
0674b9d0 2429static bool
d9c77712
MJ
2430analyze_access_subtree (struct access *root, struct access *parent,
2431 bool allow_replacements)
71956db3 2432{
0674b9d0
MJ
2433 struct access *child;
2434 HOST_WIDE_INT limit = root->offset + root->size;
2435 HOST_WIDE_INT covered_to = root->offset;
2436 bool scalar = is_gimple_reg_type (root->type);
2437 bool hole = false, sth_created = false;
71956db3 2438
d9c77712 2439 if (parent)
fc37536b 2440 {
d9c77712
MJ
2441 if (parent->grp_read)
2442 root->grp_read = 1;
2443 if (parent->grp_assignment_read)
2444 root->grp_assignment_read = 1;
2445 if (parent->grp_write)
2446 root->grp_write = 1;
2447 if (parent->grp_assignment_write)
2448 root->grp_assignment_write = 1;
1ac93f10
MJ
2449 if (parent->grp_total_scalarization)
2450 root->grp_total_scalarization = 1;
fc37536b 2451 }
0674b9d0
MJ
2452
2453 if (root->grp_unscalarizable_region)
2454 allow_replacements = false;
2455
22fc64b4
MJ
2456 if (allow_replacements && expr_with_var_bounded_array_refs_p (root->expr))
2457 allow_replacements = false;
2458
0674b9d0 2459 for (child = root->first_child; child; child = child->next_sibling)
97e73bd2 2460 {
1ac93f10 2461 hole |= covered_to < child->offset;
d9c77712
MJ
2462 sth_created |= analyze_access_subtree (child, root,
2463 allow_replacements && !scalar);
0674b9d0
MJ
2464
2465 root->grp_unscalarized_data |= child->grp_unscalarized_data;
1ac93f10
MJ
2466 root->grp_total_scalarization &= child->grp_total_scalarization;
2467 if (child->grp_covered)
2468 covered_to += child->size;
2469 else
2470 hole = true;
97e73bd2 2471 }
6de9cd9a 2472
fef94f76
MJ
2473 if (allow_replacements && scalar && !root->first_child
2474 && (root->grp_hint
4fd73214
MJ
2475 || ((root->grp_scalar_read || root->grp_assignment_read)
2476 && (root->grp_scalar_write || root->grp_assignment_write))))
97e73bd2 2477 {
8085c586
RG
2478 /* Always create access replacements that cover the whole access.
2479 For integral types this means the precision has to match.
2480 Avoid assumptions based on the integral type kind, too. */
2481 if (INTEGRAL_TYPE_P (root->type)
2482 && (TREE_CODE (root->type) != INTEGER_TYPE
2483 || TYPE_PRECISION (root->type) != root->size)
2484 /* But leave bitfield accesses alone. */
e8257960
RG
2485 && (TREE_CODE (root->expr) != COMPONENT_REF
2486 || !DECL_BIT_FIELD (TREE_OPERAND (root->expr, 1))))
da990dc0
MJ
2487 {
2488 tree rt = root->type;
e8257960
RG
2489 gcc_assert ((root->offset % BITS_PER_UNIT) == 0
2490 && (root->size % BITS_PER_UNIT) == 0);
8085c586 2491 root->type = build_nonstandard_integer_type (root->size,
da990dc0 2492 TYPE_UNSIGNED (rt));
ee45a32d
EB
2493 root->expr = build_ref_for_offset (UNKNOWN_LOCATION, root->base,
2494 root->offset, root->reverse,
8085c586 2495 root->type, NULL, false);
da990dc0 2496
b48b3fc4
MJ
2497 if (dump_file && (dump_flags & TDF_DETAILS))
2498 {
2499 fprintf (dump_file, "Changing the type of a replacement for ");
ef6cb4c7 2500 print_generic_expr (dump_file, root->base);
b48b3fc4
MJ
2501 fprintf (dump_file, " offset: %u, size: %u ",
2502 (unsigned) root->offset, (unsigned) root->size);
2503 fprintf (dump_file, " to an integer.\n");
2504 }
97e73bd2 2505 }
6de9cd9a 2506
0674b9d0 2507 root->grp_to_be_replaced = 1;
b48b3fc4 2508 root->replacement_decl = create_access_replacement (root);
0674b9d0
MJ
2509 sth_created = true;
2510 hole = false;
97e73bd2 2511 }
1ac93f10
MJ
2512 else
2513 {
4267a4a6 2514 if (allow_replacements
be384c10 2515 && scalar && !root->first_child
207b5956
MJ
2516 && (root->grp_scalar_write || root->grp_assignment_write)
2517 && !bitmap_bit_p (cannot_scalarize_away_bitmap,
2518 DECL_UID (root->base)))
be384c10
MJ
2519 {
2520 gcc_checking_assert (!root->grp_scalar_read
2521 && !root->grp_assignment_read);
4267a4a6 2522 sth_created = true;
36f52e8f 2523 if (MAY_HAVE_DEBUG_BIND_STMTS)
be384c10 2524 {
4267a4a6 2525 root->grp_to_be_debug_replaced = 1;
b48b3fc4 2526 root->replacement_decl = create_access_replacement (root);
be384c10
MJ
2527 }
2528 }
2529
1ac93f10
MJ
2530 if (covered_to < limit)
2531 hole = true;
1430fb1f 2532 if (scalar || !allow_replacements)
1ac93f10
MJ
2533 root->grp_total_scalarization = 0;
2534 }
402a3dec 2535
4267a4a6
MJ
2536 if (!hole || root->grp_total_scalarization)
2537 root->grp_covered = 1;
8eef6097 2538 else if (root->grp_write || comes_initialized_p (root->base))
0674b9d0 2539 root->grp_unscalarized_data = 1; /* not covered and written to */
4267a4a6 2540 return sth_created;
97e73bd2 2541}
6de9cd9a 2542
0674b9d0
MJ
2543/* Analyze all access trees linked by next_grp by the means of
2544 analyze_access_subtree. */
fa588429 2545static bool
0674b9d0 2546analyze_access_trees (struct access *access)
fa588429 2547{
0674b9d0 2548 bool ret = false;
fa588429 2549
0674b9d0 2550 while (access)
fa588429 2551 {
d9c77712 2552 if (analyze_access_subtree (access, NULL, true))
0674b9d0
MJ
2553 ret = true;
2554 access = access->next_grp;
fa588429
RH
2555 }
2556
2557 return ret;
2558}
2559
0674b9d0
MJ
2560/* Return true iff a potential new child of LACC at offset OFFSET and with size
2561 SIZE would conflict with an already existing one. If exactly such a child
2562 already exists in LACC, store a pointer to it in EXACT_MATCH. */
6de9cd9a 2563
0674b9d0
MJ
2564static bool
2565child_would_conflict_in_lacc (struct access *lacc, HOST_WIDE_INT norm_offset,
2566 HOST_WIDE_INT size, struct access **exact_match)
6de9cd9a 2567{
0674b9d0
MJ
2568 struct access *child;
2569
2570 for (child = lacc->first_child; child; child = child->next_sibling)
2571 {
2572 if (child->offset == norm_offset && child->size == size)
2573 {
2574 *exact_match = child;
2575 return true;
2576 }
6de9cd9a 2577
0674b9d0
MJ
2578 if (child->offset < norm_offset + size
2579 && child->offset + child->size > norm_offset)
2580 return true;
2581 }
2582
2583 return false;
6de9cd9a
DN
2584}
2585
0674b9d0
MJ
2586/* Create a new child access of PARENT, with all properties just like MODEL
2587 except for its offset and with its grp_write false and grp_read true.
2bba7541
MJ
2588 Return the new access or NULL if it cannot be created. Note that this
2589 access is created long after all splicing and sorting, it's not located in
2590 any access vector and is automatically a representative of its group. Set
2591 the gpr_write flag of the new accesss if SET_GRP_WRITE is true. */
0674b9d0
MJ
2592
2593static struct access *
2594create_artificial_child_access (struct access *parent, struct access *model,
2bba7541
MJ
2595 HOST_WIDE_INT new_offset,
2596 bool set_grp_write)
6de9cd9a 2597{
0674b9d0 2598 struct access **child;
d242d063 2599 tree expr = parent->base;
6de9cd9a 2600
0674b9d0 2601 gcc_assert (!model->grp_unscalarizable_region);
4a50e99c 2602
fb0b2914 2603 struct access *access = access_pool.allocate ();
0674b9d0 2604 memset (access, 0, sizeof (struct access));
9271a43c
MJ
2605 if (!build_user_friendly_ref_for_offset (&expr, TREE_TYPE (expr), new_offset,
2606 model->type))
2607 {
2608 access->grp_no_warning = true;
2609 expr = build_ref_for_model (EXPR_LOCATION (parent->base), parent->base,
2610 new_offset, model, NULL, false);
2611 }
2612
0674b9d0 2613 access->base = parent->base;
4a50e99c 2614 access->expr = expr;
0674b9d0
MJ
2615 access->offset = new_offset;
2616 access->size = model->size;
0674b9d0 2617 access->type = model->type;
2bba7541 2618 access->grp_write = set_grp_write;
0674b9d0 2619 access->grp_read = false;
ee45a32d 2620 access->reverse = model->reverse;
510335c8 2621
0674b9d0
MJ
2622 child = &parent->first_child;
2623 while (*child && (*child)->offset < new_offset)
2624 child = &(*child)->next_sibling;
510335c8 2625
0674b9d0
MJ
2626 access->next_sibling = *child;
2627 *child = access;
510335c8 2628
0674b9d0
MJ
2629 return access;
2630}
510335c8 2631
0674b9d0 2632
345683a9
MJ
2633/* Beginning with ACCESS, traverse its whole access subtree and mark all
2634 sub-trees as written to. If any of them has not been marked so previously
2635 and has assignment links leading from it, re-enqueue it. */
2636
2637static void
2638subtree_mark_written_and_enqueue (struct access *access)
2639{
2640 if (access->grp_write)
2641 return;
2642 access->grp_write = true;
2643 add_access_to_work_queue (access);
2644
2645 struct access *child;
2646 for (child = access->first_child; child; child = child->next_sibling)
2647 subtree_mark_written_and_enqueue (child);
2648}
2649
2650/* Propagate subaccesses and grp_write flags of RACC across an assignment link
2651 to LACC. Enqueue sub-accesses as necessary so that the write flag is
2652 propagated transitively. Return true if anything changed. Additionally, if
2653 RACC is a scalar access but LACC is not, change the type of the latter, if
2654 possible. */
510335c8
AO
2655
2656static bool
8a1326b3 2657propagate_subaccesses_across_link (struct access *lacc, struct access *racc)
510335c8 2658{
0674b9d0
MJ
2659 struct access *rchild;
2660 HOST_WIDE_INT norm_delta = lacc->offset - racc->offset;
0674b9d0 2661 bool ret = false;
510335c8 2662
2bba7541
MJ
2663 /* IF the LHS is still not marked as being written to, we only need to do so
2664 if the RHS at this level actually was. */
8eef6097 2665 if (!lacc->grp_write)
2bba7541 2666 {
8eef6097
MJ
2667 gcc_checking_assert (!comes_initialized_p (racc->base));
2668 if (racc->grp_write)
2669 {
345683a9 2670 subtree_mark_written_and_enqueue (lacc);
8eef6097
MJ
2671 ret = true;
2672 }
2bba7541
MJ
2673 }
2674
0674b9d0
MJ
2675 if (is_gimple_reg_type (lacc->type)
2676 || lacc->grp_unscalarizable_region
2677 || racc->grp_unscalarizable_region)
2bba7541 2678 {
345683a9
MJ
2679 if (!lacc->grp_write)
2680 {
2681 ret = true;
2682 subtree_mark_written_and_enqueue (lacc);
2683 }
2bba7541
MJ
2684 return ret;
2685 }
510335c8 2686
d3705186 2687 if (is_gimple_reg_type (racc->type))
510335c8 2688 {
345683a9
MJ
2689 if (!lacc->grp_write)
2690 {
2691 ret = true;
2692 subtree_mark_written_and_enqueue (lacc);
2693 }
d3705186 2694 if (!lacc->first_child && !racc->first_child)
4a50e99c 2695 {
d3705186
MJ
2696 tree t = lacc->base;
2697
2698 lacc->type = racc->type;
2699 if (build_user_friendly_ref_for_offset (&t, TREE_TYPE (t),
2700 lacc->offset, racc->type))
2701 lacc->expr = t;
2702 else
2703 {
2704 lacc->expr = build_ref_for_model (EXPR_LOCATION (lacc->base),
2705 lacc->base, lacc->offset,
2706 racc, NULL, false);
2707 lacc->grp_no_warning = true;
2708 }
4a50e99c 2709 }
2bba7541 2710 return ret;
0674b9d0 2711 }
510335c8 2712
0674b9d0
MJ
2713 for (rchild = racc->first_child; rchild; rchild = rchild->next_sibling)
2714 {
2715 struct access *new_acc = NULL;
2716 HOST_WIDE_INT norm_offset = rchild->offset + norm_delta;
510335c8 2717
0674b9d0
MJ
2718 if (child_would_conflict_in_lacc (lacc, norm_offset, rchild->size,
2719 &new_acc))
510335c8 2720 {
fef94f76
MJ
2721 if (new_acc)
2722 {
345683a9 2723 if (!new_acc->grp_write && rchild->grp_write)
2bba7541 2724 {
345683a9
MJ
2725 gcc_assert (!lacc->grp_write);
2726 subtree_mark_written_and_enqueue (new_acc);
2bba7541
MJ
2727 ret = true;
2728 }
2729
fef94f76
MJ
2730 rchild->grp_hint = 1;
2731 new_acc->grp_hint |= new_acc->grp_read;
2732 if (rchild->first_child)
8a1326b3 2733 ret |= propagate_subaccesses_across_link (new_acc, rchild);
fef94f76 2734 }
2bba7541 2735 else
345683a9 2736 {
3b7511f9 2737 if (!lacc->grp_write)
345683a9
MJ
2738 {
2739 ret = true;
2740 subtree_mark_written_and_enqueue (lacc);
2741 }
2742 }
2743 continue;
2744 }
2745
2746 if (rchild->grp_unscalarizable_region)
2747 {
2748 if (rchild->grp_write && !lacc->grp_write)
2749 {
2750 ret = true;
2751 subtree_mark_written_and_enqueue (lacc);
2752 }
0674b9d0 2753 continue;
510335c8 2754 }
0674b9d0 2755
fef94f76 2756 rchild->grp_hint = 1;
2bba7541
MJ
2757 new_acc = create_artificial_child_access (lacc, rchild, norm_offset,
2758 lacc->grp_write
2759 || rchild->grp_write);
345683a9
MJ
2760 gcc_checking_assert (new_acc);
2761 if (racc->first_child)
2762 propagate_subaccesses_across_link (new_acc, rchild);
2763
2764 add_access_to_work_queue (lacc);
2765 ret = true;
510335c8
AO
2766 }
2767
2768 return ret;
2769}
2770
0674b9d0 2771/* Propagate all subaccesses across assignment links. */
510335c8
AO
2772
2773static void
0674b9d0 2774propagate_all_subaccesses (void)
510335c8 2775{
0674b9d0 2776 while (work_queue_head)
510335c8 2777 {
0674b9d0
MJ
2778 struct access *racc = pop_access_from_work_queue ();
2779 struct assign_link *link;
510335c8 2780
e9982c6a
MJ
2781 if (racc->group_representative)
2782 racc= racc->group_representative;
0674b9d0 2783 gcc_assert (racc->first_link);
510335c8 2784
0674b9d0 2785 for (link = racc->first_link; link; link = link->next)
510335c8 2786 {
0674b9d0 2787 struct access *lacc = link->lacc;
510335c8 2788
0674b9d0
MJ
2789 if (!bitmap_bit_p (candidate_bitmap, DECL_UID (lacc->base)))
2790 continue;
2791 lacc = lacc->group_representative;
c8638450
MJ
2792
2793 bool reque_parents = false;
2794 if (!bitmap_bit_p (candidate_bitmap, DECL_UID (racc->base)))
2795 {
2796 if (!lacc->grp_write)
2797 {
2798 subtree_mark_written_and_enqueue (lacc);
2799 reque_parents = true;
2800 }
2801 }
2802 else if (propagate_subaccesses_across_link (lacc, racc))
2803 reque_parents = true;
2804
2805 if (reque_parents)
2bba7541
MJ
2806 do
2807 {
35a952ba 2808 add_access_to_work_queue (lacc);
2bba7541
MJ
2809 lacc = lacc->parent;
2810 }
2811 while (lacc);
0674b9d0
MJ
2812 }
2813 }
2814}
510335c8 2815
0674b9d0
MJ
2816/* Go through all accesses collected throughout the (intraprocedural) analysis
2817 stage, exclude overlapping ones, identify representatives and build trees
2818 out of them, making decisions about scalarization on the way. Return true
2819 iff there are any to-be-scalarized variables after this stage. */
088371ac 2820
0674b9d0
MJ
2821static bool
2822analyze_all_variable_accesses (void)
2823{
2a45675f 2824 int res = 0;
aecd4d81
RG
2825 bitmap tmp = BITMAP_ALLOC (NULL);
2826 bitmap_iterator bi;
5a6bc9c7 2827 unsigned i;
7bd6f24b
JG
2828 bool optimize_speed_p = !optimize_function_for_size_p (cfun);
2829
2830 enum compiler_param param = optimize_speed_p
2831 ? PARAM_SRA_MAX_SCALARIZATION_SIZE_SPEED
2832 : PARAM_SRA_MAX_SCALARIZATION_SIZE_SIZE;
2833
2834 /* If the user didn't set PARAM_SRA_MAX_SCALARIZATION_SIZE_<...>,
2835 fall back to a target default. */
2836 unsigned HOST_WIDE_INT max_scalarization_size
2837 = global_options_set.x_param_values[param]
2838 ? PARAM_VALUE (param)
2839 : get_move_ratio (optimize_speed_p) * UNITS_PER_WORD;
2840
2841 max_scalarization_size *= BITS_PER_UNIT;
7744b697
MJ
2842
2843 EXECUTE_IF_SET_IN_BITMAP (candidate_bitmap, 0, i, bi)
2844 if (bitmap_bit_p (should_scalarize_away_bitmap, i)
2845 && !bitmap_bit_p (cannot_scalarize_away_bitmap, i))
2846 {
d94b820b 2847 tree var = candidate (i);
7744b697 2848
a753df11
MJ
2849 if (VAR_P (var) && scalarizable_type_p (TREE_TYPE (var),
2850 constant_decl_p (var)))
7744b697 2851 {
7d362f6c 2852 if (tree_to_uhwi (TYPE_SIZE (TREE_TYPE (var)))
5a6bc9c7 2853 <= max_scalarization_size)
e923ef41 2854 {
f2511224 2855 create_total_scalarization_access (var);
d0f4e7fc 2856 completely_scalarize (var, TREE_TYPE (var), 0, var);
a753df11
MJ
2857 statistics_counter_event (cfun,
2858 "Totally-scalarized aggregates", 1);
e923ef41
MJ
2859 if (dump_file && (dump_flags & TDF_DETAILS))
2860 {
2861 fprintf (dump_file, "Will attempt to totally scalarize ");
ef6cb4c7 2862 print_generic_expr (dump_file, var);
e923ef41
MJ
2863 fprintf (dump_file, " (UID: %u): \n", DECL_UID (var));
2864 }
2865 }
2866 else if (dump_file && (dump_flags & TDF_DETAILS))
7744b697 2867 {
e923ef41 2868 fprintf (dump_file, "Too big to totally scalarize: ");
ef6cb4c7 2869 print_generic_expr (dump_file, var);
e923ef41 2870 fprintf (dump_file, " (UID: %u)\n", DECL_UID (var));
7744b697
MJ
2871 }
2872 }
2873 }
510335c8 2874
aecd4d81
RG
2875 bitmap_copy (tmp, candidate_bitmap);
2876 EXECUTE_IF_SET_IN_BITMAP (tmp, 0, i, bi)
2877 {
d94b820b 2878 tree var = candidate (i);
aecd4d81
RG
2879 struct access *access;
2880
2881 access = sort_and_splice_var_accesses (var);
591d4f4a 2882 if (!access || !build_access_trees (access))
aecd4d81
RG
2883 disqualify_candidate (var,
2884 "No or inhibitingly overlapping accesses.");
2885 }
510335c8 2886
0674b9d0 2887 propagate_all_subaccesses ();
510335c8 2888
aecd4d81
RG
2889 bitmap_copy (tmp, candidate_bitmap);
2890 EXECUTE_IF_SET_IN_BITMAP (tmp, 0, i, bi)
2891 {
d94b820b 2892 tree var = candidate (i);
aecd4d81 2893 struct access *access = get_first_repr_for_decl (var);
510335c8 2894
aecd4d81
RG
2895 if (analyze_access_trees (access))
2896 {
2897 res++;
2898 if (dump_file && (dump_flags & TDF_DETAILS))
2899 {
2900 fprintf (dump_file, "\nAccess trees for ");
ef6cb4c7 2901 print_generic_expr (dump_file, var);
aecd4d81
RG
2902 fprintf (dump_file, " (UID: %u): \n", DECL_UID (var));
2903 dump_access_tree (dump_file, access);
2904 fprintf (dump_file, "\n");
2905 }
2906 }
2907 else
2908 disqualify_candidate (var, "No scalar replacements to be created.");
2909 }
2910
2911 BITMAP_FREE (tmp);
510335c8 2912
2a45675f
MJ
2913 if (res)
2914 {
2915 statistics_counter_event (cfun, "Scalarized aggregates", res);
2916 return true;
2917 }
2918 else
2919 return false;
510335c8
AO
2920}
2921
0674b9d0 2922/* Generate statements copying scalar replacements of accesses within a subtree
ea395a11
MJ
2923 into or out of AGG. ACCESS, all its children, siblings and their children
2924 are to be processed. AGG is an aggregate type expression (can be a
2925 declaration but does not have to be, it can for example also be a mem_ref or
2926 a series of handled components). TOP_OFFSET is the offset of the processed
2927 subtree which has to be subtracted from offsets of individual accesses to
2928 get corresponding offsets for AGG. If CHUNK_SIZE is non-null, copy only
d242d063
MJ
2929 replacements in the interval <start_offset, start_offset + chunk_size>,
2930 otherwise copy all. GSI is a statement iterator used to place the new
2931 statements. WRITE should be true when the statements should write from AGG
2932 to the replacement and false if vice versa. if INSERT_AFTER is true, new
2933 statements will be added after the current statement in GSI, they will be
2934 added before the statement otherwise. */
6de9cd9a
DN
2935
2936static void
0674b9d0
MJ
2937generate_subtree_copies (struct access *access, tree agg,
2938 HOST_WIDE_INT top_offset,
2939 HOST_WIDE_INT start_offset, HOST_WIDE_INT chunk_size,
2940 gimple_stmt_iterator *gsi, bool write,
e4b5cace 2941 bool insert_after, location_t loc)
6de9cd9a 2942{
4e0eea75
JJ
2943 /* Never write anything into constant pool decls. See PR70602. */
2944 if (!write && constant_decl_p (agg))
2945 return;
0674b9d0 2946 do
6de9cd9a 2947 {
0674b9d0
MJ
2948 if (chunk_size && access->offset >= start_offset + chunk_size)
2949 return;
510335c8 2950
0674b9d0
MJ
2951 if (access->grp_to_be_replaced
2952 && (chunk_size == 0
2953 || access->offset + access->size > start_offset))
510335c8 2954 {
d242d063 2955 tree expr, repl = get_access_replacement (access);
538dd0b7 2956 gassign *stmt;
510335c8 2957
e4b5cace 2958 expr = build_ref_for_model (loc, agg, access->offset - top_offset,
d242d063 2959 access, gsi, insert_after);
510335c8 2960
0674b9d0 2961 if (write)
510335c8 2962 {
0674b9d0
MJ
2963 if (access->grp_partial_lhs)
2964 expr = force_gimple_operand_gsi (gsi, expr, true, NULL_TREE,
2965 !insert_after,
2966 insert_after ? GSI_NEW_STMT
2967 : GSI_SAME_STMT);
2968 stmt = gimple_build_assign (repl, expr);
2969 }
2970 else
2971 {
2972 TREE_NO_WARNING (repl) = 1;
2973 if (access->grp_partial_lhs)
2974 repl = force_gimple_operand_gsi (gsi, repl, true, NULL_TREE,
2975 !insert_after,
2976 insert_after ? GSI_NEW_STMT
2977 : GSI_SAME_STMT);
2978 stmt = gimple_build_assign (expr, repl);
510335c8 2979 }
e4b5cace 2980 gimple_set_location (stmt, loc);
510335c8 2981
0674b9d0
MJ
2982 if (insert_after)
2983 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
b5dcec1e 2984 else
0674b9d0
MJ
2985 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2986 update_stmt (stmt);
71d4d3eb 2987 sra_stats.subtree_copies++;
510335c8 2988 }
be384c10
MJ
2989 else if (write
2990 && access->grp_to_be_debug_replaced
2991 && (chunk_size == 0
2992 || access->offset + access->size > start_offset))
2993 {
538dd0b7 2994 gdebug *ds;
be384c10
MJ
2995 tree drhs = build_debug_ref_for_model (loc, agg,
2996 access->offset - top_offset,
2997 access);
2998 ds = gimple_build_debug_bind (get_access_replacement (access),
2999 drhs, gsi_stmt (*gsi));
3000 if (insert_after)
3001 gsi_insert_after (gsi, ds, GSI_NEW_STMT);
3002 else
3003 gsi_insert_before (gsi, ds, GSI_SAME_STMT);
3004 }
510335c8 3005
0674b9d0
MJ
3006 if (access->first_child)
3007 generate_subtree_copies (access->first_child, agg, top_offset,
3008 start_offset, chunk_size, gsi,
e4b5cace 3009 write, insert_after, loc);
510335c8 3010
0674b9d0 3011 access = access->next_sibling;
510335c8 3012 }
0674b9d0
MJ
3013 while (access);
3014}
3015
3016/* Assign zero to all scalar replacements in an access subtree. ACCESS is the
6af801f5 3017 root of the subtree to be processed. GSI is the statement iterator used
0674b9d0
MJ
3018 for inserting statements which are added after the current statement if
3019 INSERT_AFTER is true or before it otherwise. */
3020
3021static void
3022init_subtree_with_zero (struct access *access, gimple_stmt_iterator *gsi,
e4b5cace 3023 bool insert_after, location_t loc)
0674b9d0
MJ
3024
3025{
3026 struct access *child;
3027
3028 if (access->grp_to_be_replaced)
510335c8 3029 {
538dd0b7 3030 gassign *stmt;
510335c8 3031
0674b9d0 3032 stmt = gimple_build_assign (get_access_replacement (access),
e8160c9a 3033 build_zero_cst (access->type));
0674b9d0
MJ
3034 if (insert_after)
3035 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
3036 else
3037 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
3038 update_stmt (stmt);
e4b5cace 3039 gimple_set_location (stmt, loc);
0674b9d0 3040 }
be384c10
MJ
3041 else if (access->grp_to_be_debug_replaced)
3042 {
538dd0b7
DM
3043 gdebug *ds
3044 = gimple_build_debug_bind (get_access_replacement (access),
3045 build_zero_cst (access->type),
3046 gsi_stmt (*gsi));
be384c10
MJ
3047 if (insert_after)
3048 gsi_insert_after (gsi, ds, GSI_NEW_STMT);
3049 else
3050 gsi_insert_before (gsi, ds, GSI_SAME_STMT);
3051 }
510335c8 3052
0674b9d0 3053 for (child = access->first_child; child; child = child->next_sibling)
e4b5cace 3054 init_subtree_with_zero (child, gsi, insert_after, loc);
0674b9d0 3055}
510335c8 3056
6af801f5 3057/* Clobber all scalar replacements in an access subtree. ACCESS is the
9ce85efc
MG
3058 root of the subtree to be processed. GSI is the statement iterator used
3059 for inserting statements which are added after the current statement if
3060 INSERT_AFTER is true or before it otherwise. */
3061
3062static void
3063clobber_subtree (struct access *access, gimple_stmt_iterator *gsi,
3064 bool insert_after, location_t loc)
3065
3066{
3067 struct access *child;
3068
3069 if (access->grp_to_be_replaced)
3070 {
3071 tree rep = get_access_replacement (access);
3072 tree clobber = build_constructor (access->type, NULL);
3073 TREE_THIS_VOLATILE (clobber) = 1;
355fe088 3074 gimple *stmt = gimple_build_assign (rep, clobber);
9ce85efc
MG
3075
3076 if (insert_after)
3077 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
3078 else
3079 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
3080 update_stmt (stmt);
3081 gimple_set_location (stmt, loc);
3082 }
3083
3084 for (child = access->first_child; child; child = child->next_sibling)
3085 clobber_subtree (child, gsi, insert_after, loc);
3086}
3087
0674b9d0
MJ
3088/* Search for an access representative for the given expression EXPR and
3089 return it or NULL if it cannot be found. */
510335c8 3090
0674b9d0
MJ
3091static struct access *
3092get_access_for_expr (tree expr)
3093{
588db50c
RS
3094 poly_int64 poffset, psize, pmax_size;
3095 HOST_WIDE_INT offset, max_size;
0674b9d0 3096 tree base;
ee45a32d 3097 bool reverse;
510335c8 3098
0674b9d0
MJ
3099 /* FIXME: This should not be necessary but Ada produces V_C_Es with a type of
3100 a different size than the size of its argument and we need the latter
3101 one. */
3102 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
3103 expr = TREE_OPERAND (expr, 0);
510335c8 3104
588db50c
RS
3105 base = get_ref_base_and_extent (expr, &poffset, &psize, &pmax_size,
3106 &reverse);
3107 if (!known_size_p (pmax_size)
3108 || !pmax_size.is_constant (&max_size)
3109 || !poffset.is_constant (&offset)
3110 || !DECL_P (base))
0674b9d0 3111 return NULL;
510335c8 3112
0674b9d0
MJ
3113 if (!bitmap_bit_p (candidate_bitmap, DECL_UID (base)))
3114 return NULL;
3115
3116 return get_var_base_offset_size_access (base, offset, max_size);
3117}
3118
6cbd3b6a
MJ
3119/* Replace the expression EXPR with a scalar replacement if there is one and
3120 generate other statements to do type conversion or subtree copying if
3121 necessary. GSI is used to place newly created statements, WRITE is true if
3122 the expression is being written to (it is on a LHS of a statement or output
3123 in an assembly statement). */
0674b9d0
MJ
3124
3125static bool
6cbd3b6a 3126sra_modify_expr (tree *expr, gimple_stmt_iterator *gsi, bool write)
0674b9d0 3127{
e4b5cace 3128 location_t loc;
0674b9d0 3129 struct access *access;
28151221 3130 tree type, bfr, orig_expr;
510335c8 3131
0674b9d0
MJ
3132 if (TREE_CODE (*expr) == BIT_FIELD_REF)
3133 {
3134 bfr = *expr;
3135 expr = &TREE_OPERAND (*expr, 0);
510335c8 3136 }
97e73bd2 3137 else
0674b9d0
MJ
3138 bfr = NULL_TREE;
3139
3140 if (TREE_CODE (*expr) == REALPART_EXPR || TREE_CODE (*expr) == IMAGPART_EXPR)
3141 expr = &TREE_OPERAND (*expr, 0);
3142 access = get_access_for_expr (*expr);
3143 if (!access)
3144 return false;
3145 type = TREE_TYPE (*expr);
28151221 3146 orig_expr = *expr;
0674b9d0 3147
e4b5cace 3148 loc = gimple_location (gsi_stmt (*gsi));
104cb50b
MJ
3149 gimple_stmt_iterator alt_gsi = gsi_none ();
3150 if (write && stmt_ends_bb_p (gsi_stmt (*gsi)))
3151 {
3152 alt_gsi = gsi_start_edge (single_non_eh_succ (gsi_bb (*gsi)));
3153 gsi = &alt_gsi;
3154 }
3155
0674b9d0 3156 if (access->grp_to_be_replaced)
6de9cd9a 3157 {
0674b9d0
MJ
3158 tree repl = get_access_replacement (access);
3159 /* If we replace a non-register typed access simply use the original
3160 access expression to extract the scalar component afterwards.
3161 This happens if scalarizing a function return value or parameter
3162 like in gcc.c-torture/execute/20041124-1.c, 20050316-1.c and
9fda11a2
MJ
3163 gcc.c-torture/compile/20011217-1.c.
3164
3165 We also want to use this when accessing a complex or vector which can
3166 be accessed as a different type too, potentially creating a need for
caee6ca1
MJ
3167 type conversion (see PR42196) and when scalarized unions are involved
3168 in assembler statements (see PR42398). */
3169 if (!useless_type_conversion_p (type, access->type))
0674b9d0 3170 {
d242d063 3171 tree ref;
09f0dc45 3172
9d2681a3 3173 ref = build_ref_for_model (loc, orig_expr, 0, access, gsi, false);
09f0dc45 3174
0674b9d0
MJ
3175 if (write)
3176 {
538dd0b7 3177 gassign *stmt;
09f0dc45 3178
0674b9d0
MJ
3179 if (access->grp_partial_lhs)
3180 ref = force_gimple_operand_gsi (gsi, ref, true, NULL_TREE,
3181 false, GSI_NEW_STMT);
3182 stmt = gimple_build_assign (repl, ref);
e4b5cace 3183 gimple_set_location (stmt, loc);
0674b9d0
MJ
3184 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
3185 }
3186 else
3187 {
538dd0b7 3188 gassign *stmt;
09f0dc45 3189
0674b9d0
MJ
3190 if (access->grp_partial_lhs)
3191 repl = force_gimple_operand_gsi (gsi, repl, true, NULL_TREE,
3192 true, GSI_SAME_STMT);
09f0dc45 3193 stmt = gimple_build_assign (ref, repl);
e4b5cace 3194 gimple_set_location (stmt, loc);
0674b9d0
MJ
3195 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
3196 }
3197 }
143569a8 3198 else
caee6ca1 3199 *expr = repl;
2a45675f 3200 sra_stats.exprs++;
0674b9d0 3201 }
be384c10
MJ
3202 else if (write && access->grp_to_be_debug_replaced)
3203 {
538dd0b7
DM
3204 gdebug *ds = gimple_build_debug_bind (get_access_replacement (access),
3205 NULL_TREE,
3206 gsi_stmt (*gsi));
be384c10
MJ
3207 gsi_insert_after (gsi, ds, GSI_NEW_STMT);
3208 }
0674b9d0
MJ
3209
3210 if (access->first_child)
3211 {
3212 HOST_WIDE_INT start_offset, chunk_size;
3213 if (bfr
cc269bb6
RS
3214 && tree_fits_uhwi_p (TREE_OPERAND (bfr, 1))
3215 && tree_fits_uhwi_p (TREE_OPERAND (bfr, 2)))
0674b9d0 3216 {
ae7e9ddd 3217 chunk_size = tree_to_uhwi (TREE_OPERAND (bfr, 1));
f57017cd 3218 start_offset = access->offset
ae7e9ddd 3219 + tree_to_uhwi (TREE_OPERAND (bfr, 2));
d116ffa6 3220 }
0674b9d0
MJ
3221 else
3222 start_offset = chunk_size = 0;
3223
28151221 3224 generate_subtree_copies (access->first_child, orig_expr, access->offset,
e4b5cace
MJ
3225 start_offset, chunk_size, gsi, write, write,
3226 loc);
6de9cd9a 3227 }
0674b9d0 3228 return true;
6de9cd9a
DN
3229}
3230
fac52fdd
MJ
3231/* Where scalar replacements of the RHS have been written to when a replacement
3232 of a LHS of an assigments cannot be direclty loaded from a replacement of
3233 the RHS. */
3234enum unscalarized_data_handling { SRA_UDH_NONE, /* Nothing done so far. */
3235 SRA_UDH_RIGHT, /* Data flushed to the RHS. */
3236 SRA_UDH_LEFT }; /* Data flushed to the LHS. */
3237
28151221
MJ
3238struct subreplacement_assignment_data
3239{
3240 /* Offset of the access representing the lhs of the assignment. */
3241 HOST_WIDE_INT left_offset;
3242
3243 /* LHS and RHS of the original assignment. */
3244 tree assignment_lhs, assignment_rhs;
3245
3246 /* Access representing the rhs of the whole assignment. */
3247 struct access *top_racc;
3248
3249 /* Stmt iterator used for statement insertions after the original assignment.
3250 It points to the main GSI used to traverse a BB during function body
3251 modification. */
3252 gimple_stmt_iterator *new_gsi;
3253
3254 /* Stmt iterator used for statement insertions before the original
3255 assignment. Keeps on pointing to the original statement. */
3256 gimple_stmt_iterator old_gsi;
3257
3258 /* Location of the assignment. */
3259 location_t loc;
3260
3261 /* Keeps the information whether we have needed to refresh replacements of
3262 the LHS and from which side of the assignments this takes place. */
3263 enum unscalarized_data_handling refreshed;
3264};
3265
0674b9d0 3266/* Store all replacements in the access tree rooted in TOP_RACC either to their
ea395a11
MJ
3267 base aggregate if there are unscalarized data or directly to LHS of the
3268 statement that is pointed to by GSI otherwise. */
6de9cd9a 3269
28151221
MJ
3270static void
3271handle_unscalarized_data_in_subtree (struct subreplacement_assignment_data *sad)
6de9cd9a 3272{
28151221
MJ
3273 tree src;
3274 if (sad->top_racc->grp_unscalarized_data)
fac52fdd 3275 {
28151221
MJ
3276 src = sad->assignment_rhs;
3277 sad->refreshed = SRA_UDH_RIGHT;
fac52fdd 3278 }
0674b9d0 3279 else
fac52fdd 3280 {
28151221
MJ
3281 src = sad->assignment_lhs;
3282 sad->refreshed = SRA_UDH_LEFT;
fac52fdd 3283 }
28151221
MJ
3284 generate_subtree_copies (sad->top_racc->first_child, src,
3285 sad->top_racc->offset, 0, 0,
3286 &sad->old_gsi, false, false, sad->loc);
0674b9d0 3287}
6de9cd9a 3288
ea395a11 3289/* Try to generate statements to load all sub-replacements in an access subtree
28151221
MJ
3290 formed by children of LACC from scalar replacements in the SAD->top_racc
3291 subtree. If that is not possible, refresh the SAD->top_racc base aggregate
3292 and load the accesses from it. */
726a989a 3293
0674b9d0 3294static void
28151221
MJ
3295load_assign_lhs_subreplacements (struct access *lacc,
3296 struct subreplacement_assignment_data *sad)
0674b9d0 3297{
ea395a11 3298 for (lacc = lacc->first_child; lacc; lacc = lacc->next_sibling)
97e73bd2 3299 {
28151221
MJ
3300 HOST_WIDE_INT offset;
3301 offset = lacc->offset - sad->left_offset + sad->top_racc->offset;
be384c10 3302
0674b9d0 3303 if (lacc->grp_to_be_replaced)
6de9cd9a 3304 {
0674b9d0 3305 struct access *racc;
538dd0b7 3306 gassign *stmt;
0674b9d0
MJ
3307 tree rhs;
3308
28151221 3309 racc = find_access_in_subtree (sad->top_racc, offset, lacc->size);
0674b9d0
MJ
3310 if (racc && racc->grp_to_be_replaced)
3311 {
3312 rhs = get_access_replacement (racc);
3313 if (!useless_type_conversion_p (lacc->type, racc->type))
28151221
MJ
3314 rhs = fold_build1_loc (sad->loc, VIEW_CONVERT_EXPR,
3315 lacc->type, rhs);
3e44f600
MJ
3316
3317 if (racc->grp_partial_lhs && lacc->grp_partial_lhs)
28151221
MJ
3318 rhs = force_gimple_operand_gsi (&sad->old_gsi, rhs, true,
3319 NULL_TREE, true, GSI_SAME_STMT);
0674b9d0
MJ
3320 }
3321 else
3322 {
0674b9d0
MJ
3323 /* No suitable access on the right hand side, need to load from
3324 the aggregate. See if we have to update it first... */
28151221
MJ
3325 if (sad->refreshed == SRA_UDH_NONE)
3326 handle_unscalarized_data_in_subtree (sad);
fac52fdd 3327
28151221
MJ
3328 if (sad->refreshed == SRA_UDH_LEFT)
3329 rhs = build_ref_for_model (sad->loc, sad->assignment_lhs,
3330 lacc->offset - sad->left_offset,
3331 lacc, sad->new_gsi, true);
fac52fdd 3332 else
28151221
MJ
3333 rhs = build_ref_for_model (sad->loc, sad->assignment_rhs,
3334 lacc->offset - sad->left_offset,
3335 lacc, sad->new_gsi, true);
6a9ceb17 3336 if (lacc->grp_partial_lhs)
28151221
MJ
3337 rhs = force_gimple_operand_gsi (sad->new_gsi,
3338 rhs, true, NULL_TREE,
6a9ceb17 3339 false, GSI_NEW_STMT);
0674b9d0 3340 }
97e73bd2 3341
0674b9d0 3342 stmt = gimple_build_assign (get_access_replacement (lacc), rhs);
28151221
MJ
3343 gsi_insert_after (sad->new_gsi, stmt, GSI_NEW_STMT);
3344 gimple_set_location (stmt, sad->loc);
0674b9d0 3345 update_stmt (stmt);
2a45675f 3346 sra_stats.subreplacements++;
0674b9d0 3347 }
be384c10
MJ
3348 else
3349 {
28151221 3350 if (sad->refreshed == SRA_UDH_NONE
be384c10 3351 && lacc->grp_read && !lacc->grp_covered)
28151221
MJ
3352 handle_unscalarized_data_in_subtree (sad);
3353
be384c10
MJ
3354 if (lacc && lacc->grp_to_be_debug_replaced)
3355 {
538dd0b7 3356 gdebug *ds;
be384c10 3357 tree drhs;
28151221
MJ
3358 struct access *racc = find_access_in_subtree (sad->top_racc,
3359 offset,
be384c10
MJ
3360 lacc->size);
3361
3362 if (racc && racc->grp_to_be_replaced)
f8f42513 3363 {
88bed196 3364 if (racc->grp_write || constant_decl_p (racc->base))
f8f42513
MJ
3365 drhs = get_access_replacement (racc);
3366 else
3367 drhs = NULL;
3368 }
28151221
MJ
3369 else if (sad->refreshed == SRA_UDH_LEFT)
3370 drhs = build_debug_ref_for_model (sad->loc, lacc->base,
3371 lacc->offset, lacc);
3372 else if (sad->refreshed == SRA_UDH_RIGHT)
3373 drhs = build_debug_ref_for_model (sad->loc, sad->top_racc->base,
3374 offset, lacc);
be384c10
MJ
3375 else
3376 drhs = NULL_TREE;
8268ad5c
JJ
3377 if (drhs
3378 && !useless_type_conversion_p (lacc->type, TREE_TYPE (drhs)))
28151221 3379 drhs = fold_build1_loc (sad->loc, VIEW_CONVERT_EXPR,
8268ad5c 3380 lacc->type, drhs);
be384c10 3381 ds = gimple_build_debug_bind (get_access_replacement (lacc),
28151221
MJ
3382 drhs, gsi_stmt (sad->old_gsi));
3383 gsi_insert_after (sad->new_gsi, ds, GSI_NEW_STMT);
be384c10
MJ
3384 }
3385 }
0674b9d0
MJ
3386
3387 if (lacc->first_child)
28151221 3388 load_assign_lhs_subreplacements (lacc, sad);
6de9cd9a 3389 }
97e73bd2 3390}
6de9cd9a 3391
6cbd3b6a
MJ
3392/* Result code for SRA assignment modification. */
3393enum assignment_mod_result { SRA_AM_NONE, /* nothing done for the stmt */
3394 SRA_AM_MODIFIED, /* stmt changed but not
3395 removed */
3396 SRA_AM_REMOVED }; /* stmt eliminated */
3397
0674b9d0
MJ
3398/* Modify assignments with a CONSTRUCTOR on their RHS. STMT contains a pointer
3399 to the assignment and GSI is the statement iterator pointing at it. Returns
3400 the same values as sra_modify_assign. */
6de9cd9a 3401
6cbd3b6a 3402static enum assignment_mod_result
355fe088 3403sra_modify_constructor_assign (gimple *stmt, gimple_stmt_iterator *gsi)
6de9cd9a 3404{
56386ab9 3405 tree lhs = gimple_assign_lhs (stmt);
9ce85efc 3406 struct access *acc = get_access_for_expr (lhs);
0674b9d0 3407 if (!acc)
6cbd3b6a 3408 return SRA_AM_NONE;
9ce85efc 3409 location_t loc = gimple_location (stmt);
6de9cd9a 3410
56386ab9 3411 if (gimple_clobber_p (stmt))
13604927 3412 {
9ce85efc
MG
3413 /* Clobber the replacement variable. */
3414 clobber_subtree (acc, gsi, !acc->grp_covered, loc);
3415 /* Remove clobbers of fully scalarized variables, they are dead. */
13604927
RG
3416 if (acc->grp_covered)
3417 {
56386ab9 3418 unlink_stmt_vdef (stmt);
13604927 3419 gsi_remove (gsi, true);
56386ab9 3420 release_defs (stmt);
13604927
RG
3421 return SRA_AM_REMOVED;
3422 }
3423 else
9ce85efc 3424 return SRA_AM_MODIFIED;
13604927
RG
3425 }
3426
aaa1b10f 3427 if (CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt)) > 0)
400196f1 3428 {
0674b9d0
MJ
3429 /* I have never seen this code path trigger but if it can happen the
3430 following should handle it gracefully. */
3431 if (access_has_children_p (acc))
28151221 3432 generate_subtree_copies (acc->first_child, lhs, acc->offset, 0, 0, gsi,
e4b5cace 3433 true, true, loc);
6cbd3b6a 3434 return SRA_AM_MODIFIED;
400196f1 3435 }
6de9cd9a 3436
0674b9d0 3437 if (acc->grp_covered)
97e73bd2 3438 {
e4b5cace 3439 init_subtree_with_zero (acc, gsi, false, loc);
56386ab9 3440 unlink_stmt_vdef (stmt);
0674b9d0 3441 gsi_remove (gsi, true);
56386ab9 3442 release_defs (stmt);
6cbd3b6a 3443 return SRA_AM_REMOVED;
97e73bd2
RH
3444 }
3445 else
3446 {
e4b5cace 3447 init_subtree_with_zero (acc, gsi, true, loc);
6cbd3b6a 3448 return SRA_AM_MODIFIED;
6de9cd9a
DN
3449 }
3450}
3451
56a42add
MJ
3452/* Create and return a new suitable default definition SSA_NAME for RACC which
3453 is an access describing an uninitialized part of an aggregate that is being
3454 loaded. */
0f2ffb9a 3455
56a42add
MJ
3456static tree
3457get_repl_default_def_ssa_name (struct access *racc)
0f2ffb9a 3458{
5d751b0c
JJ
3459 gcc_checking_assert (!racc->grp_to_be_replaced
3460 && !racc->grp_to_be_debug_replaced);
b48b3fc4
MJ
3461 if (!racc->replacement_decl)
3462 racc->replacement_decl = create_access_replacement (racc);
3463 return get_or_create_ssa_default_def (cfun, racc->replacement_decl);
0f2ffb9a 3464}
6de9cd9a 3465
6cbd3b6a
MJ
3466/* Examine both sides of the assignment statement pointed to by STMT, replace
3467 them with a scalare replacement if there is one and generate copying of
3468 replacements if scalarized aggregates have been used in the assignment. GSI
3469 is used to hold generated statements for type conversions and subtree
0674b9d0
MJ
3470 copying. */
3471
6cbd3b6a 3472static enum assignment_mod_result
355fe088 3473sra_modify_assign (gimple *stmt, gimple_stmt_iterator *gsi)
6de9cd9a 3474{
0674b9d0
MJ
3475 struct access *lacc, *racc;
3476 tree lhs, rhs;
3477 bool modify_this_stmt = false;
3478 bool force_gimple_rhs = false;
e4b5cace 3479 location_t loc;
002cda0a 3480 gimple_stmt_iterator orig_gsi = *gsi;
6de9cd9a 3481
56386ab9 3482 if (!gimple_assign_single_p (stmt))
6cbd3b6a 3483 return SRA_AM_NONE;
56386ab9
MJ
3484 lhs = gimple_assign_lhs (stmt);
3485 rhs = gimple_assign_rhs1 (stmt);
6de9cd9a 3486
0674b9d0
MJ
3487 if (TREE_CODE (rhs) == CONSTRUCTOR)
3488 return sra_modify_constructor_assign (stmt, gsi);
6de9cd9a 3489
0674b9d0
MJ
3490 if (TREE_CODE (rhs) == REALPART_EXPR || TREE_CODE (lhs) == REALPART_EXPR
3491 || TREE_CODE (rhs) == IMAGPART_EXPR || TREE_CODE (lhs) == IMAGPART_EXPR
3492 || TREE_CODE (rhs) == BIT_FIELD_REF || TREE_CODE (lhs) == BIT_FIELD_REF)
3493 {
56386ab9 3494 modify_this_stmt = sra_modify_expr (gimple_assign_rhs1_ptr (stmt),
6cbd3b6a 3495 gsi, false);
56386ab9 3496 modify_this_stmt |= sra_modify_expr (gimple_assign_lhs_ptr (stmt),
6cbd3b6a
MJ
3497 gsi, true);
3498 return modify_this_stmt ? SRA_AM_MODIFIED : SRA_AM_NONE;
0674b9d0 3499 }
6de9cd9a 3500
0674b9d0
MJ
3501 lacc = get_access_for_expr (lhs);
3502 racc = get_access_for_expr (rhs);
3503 if (!lacc && !racc)
6cbd3b6a 3504 return SRA_AM_NONE;
88bed196
AL
3505 /* Avoid modifying initializations of constant-pool replacements. */
3506 if (racc && (racc->replacement_decl == lhs))
3507 return SRA_AM_NONE;
6de9cd9a 3508
56386ab9 3509 loc = gimple_location (stmt);
0674b9d0 3510 if (lacc && lacc->grp_to_be_replaced)
97e73bd2 3511 {
0674b9d0 3512 lhs = get_access_replacement (lacc);
56386ab9 3513 gimple_assign_set_lhs (stmt, lhs);
0674b9d0
MJ
3514 modify_this_stmt = true;
3515 if (lacc->grp_partial_lhs)
3516 force_gimple_rhs = true;
2a45675f 3517 sra_stats.exprs++;
97e73bd2 3518 }
6de9cd9a 3519
0674b9d0
MJ
3520 if (racc && racc->grp_to_be_replaced)
3521 {
3522 rhs = get_access_replacement (racc);
3523 modify_this_stmt = true;
3524 if (racc->grp_partial_lhs)
3525 force_gimple_rhs = true;
2a45675f 3526 sra_stats.exprs++;
0674b9d0 3527 }
fdad69c1 3528 else if (racc
fdad69c1 3529 && !racc->grp_unscalarized_data
20c5e09e 3530 && !racc->grp_unscalarizable_region
973a39ae
RG
3531 && TREE_CODE (lhs) == SSA_NAME
3532 && !access_has_replacements_p (racc))
fdad69c1
RG
3533 {
3534 rhs = get_repl_default_def_ssa_name (racc);
3535 modify_this_stmt = true;
3536 sra_stats.exprs++;
3537 }
6de9cd9a 3538
0674b9d0
MJ
3539 if (modify_this_stmt)
3540 {
3541 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs)))
6de9cd9a 3542 {
0674b9d0
MJ
3543 /* If we can avoid creating a VIEW_CONVERT_EXPR do so.
3544 ??? This should move to fold_stmt which we simply should
3545 call after building a VIEW_CONVERT_EXPR here. */
3546 if (AGGREGATE_TYPE_P (TREE_TYPE (lhs))
7d2fb524 3547 && !contains_bitfld_component_ref_p (lhs))
0674b9d0 3548 {
e80b21ed 3549 lhs = build_ref_for_model (loc, lhs, 0, racc, gsi, false);
56386ab9 3550 gimple_assign_set_lhs (stmt, lhs);
0674b9d0
MJ
3551 }
3552 else if (AGGREGATE_TYPE_P (TREE_TYPE (rhs))
87eab554 3553 && !contains_vce_or_bfcref_p (rhs))
e80b21ed 3554 rhs = build_ref_for_model (loc, rhs, 0, lacc, gsi, false);
d242d063 3555
0674b9d0 3556 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs)))
0ec19b8c 3557 {
d242d063
MJ
3558 rhs = fold_build1_loc (loc, VIEW_CONVERT_EXPR, TREE_TYPE (lhs),
3559 rhs);
1bea3098
RG
3560 if (is_gimple_reg_type (TREE_TYPE (lhs))
3561 && TREE_CODE (lhs) != SSA_NAME)
0ec19b8c
MJ
3562 force_gimple_rhs = true;
3563 }
0674b9d0 3564 }
0674b9d0 3565 }
97e73bd2 3566
be384c10
MJ
3567 if (lacc && lacc->grp_to_be_debug_replaced)
3568 {
a7818b54
JJ
3569 tree dlhs = get_access_replacement (lacc);
3570 tree drhs = unshare_expr (rhs);
3571 if (!useless_type_conversion_p (TREE_TYPE (dlhs), TREE_TYPE (drhs)))
3572 {
3573 if (AGGREGATE_TYPE_P (TREE_TYPE (drhs))
3574 && !contains_vce_or_bfcref_p (drhs))
3575 drhs = build_debug_ref_for_model (loc, drhs, 0, lacc);
3576 if (drhs
3577 && !useless_type_conversion_p (TREE_TYPE (dlhs),
3578 TREE_TYPE (drhs)))
3579 drhs = fold_build1_loc (loc, VIEW_CONVERT_EXPR,
3580 TREE_TYPE (dlhs), drhs);
3581 }
538dd0b7 3582 gdebug *ds = gimple_build_debug_bind (dlhs, drhs, stmt);
be384c10
MJ
3583 gsi_insert_before (gsi, ds, GSI_SAME_STMT);
3584 }
3585
0674b9d0
MJ
3586 /* From this point on, the function deals with assignments in between
3587 aggregates when at least one has scalar reductions of some of its
3588 components. There are three possible scenarios: Both the LHS and RHS have
3589 to-be-scalarized components, 2) only the RHS has or 3) only the LHS has.
3590
3591 In the first case, we would like to load the LHS components from RHS
3592 components whenever possible. If that is not possible, we would like to
3593 read it directly from the RHS (after updating it by storing in it its own
3594 components). If there are some necessary unscalarized data in the LHS,
3595 those will be loaded by the original assignment too. If neither of these
3596 cases happen, the original statement can be removed. Most of this is done
3597 by load_assign_lhs_subreplacements.
3598
3599 In the second case, we would like to store all RHS scalarized components
3600 directly into LHS and if they cover the aggregate completely, remove the
3601 statement too. In the third case, we want the LHS components to be loaded
3602 directly from the RHS (DSE will remove the original statement if it
3603 becomes redundant).
3604
3605 This is a bit complex but manageable when types match and when unions do
3606 not cause confusion in a way that we cannot really load a component of LHS
3607 from the RHS or vice versa (the access representing this level can have
3608 subaccesses that are accessible only through a different union field at a
3609 higher level - different from the one used in the examined expression).
3610 Unions are fun.
3611
3612 Therefore, I specially handle a fourth case, happening when there is a
3613 specific type cast or it is impossible to locate a scalarized subaccess on
3614 the other side of the expression. If that happens, I simply "refresh" the
3615 RHS by storing in it is scalarized components leave the original statement
3616 there to do the copying and then load the scalar replacements of the LHS.
3617 This is what the first branch does. */
3618
b807e627 3619 if (modify_this_stmt
56386ab9 3620 || gimple_has_volatile_ops (stmt)
4cc13d9d 3621 || contains_vce_or_bfcref_p (rhs)
104cb50b 3622 || contains_vce_or_bfcref_p (lhs)
56386ab9 3623 || stmt_ends_bb_p (stmt))
0674b9d0 3624 {
88bed196
AL
3625 /* No need to copy into a constant-pool, it comes pre-initialized. */
3626 if (access_has_children_p (racc) && !constant_decl_p (racc->base))
28151221 3627 generate_subtree_copies (racc->first_child, rhs, racc->offset, 0, 0,
e4b5cace 3628 gsi, false, false, loc);
0674b9d0 3629 if (access_has_children_p (lacc))
104cb50b
MJ
3630 {
3631 gimple_stmt_iterator alt_gsi = gsi_none ();
56386ab9 3632 if (stmt_ends_bb_p (stmt))
104cb50b
MJ
3633 {
3634 alt_gsi = gsi_start_edge (single_non_eh_succ (gsi_bb (*gsi)));
3635 gsi = &alt_gsi;
3636 }
28151221 3637 generate_subtree_copies (lacc->first_child, lhs, lacc->offset, 0, 0,
104cb50b
MJ
3638 gsi, true, true, loc);
3639 }
2a45675f 3640 sra_stats.separate_lhs_rhs_handling++;
fdad69c1
RG
3641
3642 /* This gimplification must be done after generate_subtree_copies,
3643 lest we insert the subtree copies in the middle of the gimplified
3644 sequence. */
3645 if (force_gimple_rhs)
3646 rhs = force_gimple_operand_gsi (&orig_gsi, rhs, true, NULL_TREE,
3647 true, GSI_SAME_STMT);
56386ab9 3648 if (gimple_assign_rhs1 (stmt) != rhs)
fdad69c1
RG
3649 {
3650 modify_this_stmt = true;
3651 gimple_assign_set_rhs_from_tree (&orig_gsi, rhs);
56386ab9 3652 gcc_assert (stmt == gsi_stmt (orig_gsi));
fdad69c1
RG
3653 }
3654
3655 return modify_this_stmt ? SRA_AM_MODIFIED : SRA_AM_NONE;
0674b9d0
MJ
3656 }
3657 else
3658 {
429576ac
MJ
3659 if (access_has_children_p (lacc)
3660 && access_has_children_p (racc)
3661 /* When an access represents an unscalarizable region, it usually
3662 represents accesses with variable offset and thus must not be used
3663 to generate new memory accesses. */
3664 && !lacc->grp_unscalarizable_region
3665 && !racc->grp_unscalarizable_region)
0674b9d0 3666 {
28151221
MJ
3667 struct subreplacement_assignment_data sad;
3668
3669 sad.left_offset = lacc->offset;
3670 sad.assignment_lhs = lhs;
3671 sad.assignment_rhs = rhs;
3672 sad.top_racc = racc;
3673 sad.old_gsi = *gsi;
3674 sad.new_gsi = gsi;
56386ab9 3675 sad.loc = gimple_location (stmt);
28151221 3676 sad.refreshed = SRA_UDH_NONE;
510335c8 3677
0674b9d0 3678 if (lacc->grp_read && !lacc->grp_covered)
28151221 3679 handle_unscalarized_data_in_subtree (&sad);
19114537 3680
28151221
MJ
3681 load_assign_lhs_subreplacements (lacc, &sad);
3682 if (sad.refreshed != SRA_UDH_RIGHT)
97e73bd2 3683 {
75a75e91 3684 gsi_next (gsi);
56386ab9 3685 unlink_stmt_vdef (stmt);
28151221 3686 gsi_remove (&sad.old_gsi, true);
56386ab9 3687 release_defs (stmt);
2a45675f 3688 sra_stats.deleted++;
6cbd3b6a 3689 return SRA_AM_REMOVED;
97e73bd2 3690 }
6de9cd9a 3691 }
97e73bd2 3692 else
0674b9d0 3693 {
fdad69c1 3694 if (access_has_children_p (racc)
ad2a2706
MJ
3695 && !racc->grp_unscalarized_data
3696 && TREE_CODE (lhs) != SSA_NAME)
0674b9d0 3697 {
fdad69c1 3698 if (dump_file)
0674b9d0 3699 {
fdad69c1 3700 fprintf (dump_file, "Removing load: ");
ef6cb4c7 3701 print_gimple_stmt (dump_file, stmt, 0);
0674b9d0 3702 }
fdad69c1
RG
3703 generate_subtree_copies (racc->first_child, lhs,
3704 racc->offset, 0, 0, gsi,
3705 false, false, loc);
56386ab9
MJ
3706 gcc_assert (stmt == gsi_stmt (*gsi));
3707 unlink_stmt_vdef (stmt);
fdad69c1 3708 gsi_remove (gsi, true);
56386ab9 3709 release_defs (stmt);
fdad69c1
RG
3710 sra_stats.deleted++;
3711 return SRA_AM_REMOVED;
0674b9d0 3712 }
63d7ceaa
RG
3713 /* Restore the aggregate RHS from its components so the
3714 prevailing aggregate copy does the right thing. */
fdad69c1 3715 if (access_has_children_p (racc))
28151221 3716 generate_subtree_copies (racc->first_child, rhs, racc->offset, 0, 0,
63d7ceaa
RG
3717 gsi, false, false, loc);
3718 /* Re-load the components of the aggregate copy destination.
3719 But use the RHS aggregate to load from to expose more
3720 optimization opportunities. */
0f2ffb9a 3721 if (access_has_children_p (lacc))
0674b9d0 3722 generate_subtree_copies (lacc->first_child, rhs, lacc->offset,
e4b5cace 3723 0, 0, gsi, true, true, loc);
0674b9d0 3724 }
002cda0a 3725
fdad69c1 3726 return SRA_AM_NONE;
002cda0a 3727 }
6cbd3b6a
MJ
3728}
3729
88bed196
AL
3730/* Set any scalar replacements of values in the constant pool to the initial
3731 value of the constant. (Constant-pool decls like *.LC0 have effectively
3732 been initialized before the program starts, we must do the same for their
3733 replacements.) Thus, we output statements like 'SR.1 = *.LC0[0];' into
3734 the function's entry block. */
3735
3736static void
3737initialize_constant_pool_replacements (void)
3738{
3739 gimple_seq seq = NULL;
3740 gimple_stmt_iterator gsi = gsi_start (seq);
3741 bitmap_iterator bi;
3742 unsigned i;
3743
3744 EXECUTE_IF_SET_IN_BITMAP (candidate_bitmap, 0, i, bi)
c91a0948
MJ
3745 {
3746 tree var = candidate (i);
3747 if (!constant_decl_p (var))
3748 continue;
3749 vec<access_p> *access_vec = get_base_access_vector (var);
3750 if (!access_vec)
3751 continue;
3752 for (unsigned i = 0; i < access_vec->length (); i++)
3753 {
3754 struct access *access = (*access_vec)[i];
3755 if (!access->replacement_decl)
3756 continue;
3757 gassign *stmt
3758 = gimple_build_assign (get_access_replacement (access),
3759 unshare_expr (access->expr));
3760 if (dump_file && (dump_flags & TDF_DETAILS))
3761 {
3762 fprintf (dump_file, "Generating constant initializer: ");
ef6cb4c7 3763 print_gimple_stmt (dump_file, stmt, 0);
c91a0948
MJ
3764 fprintf (dump_file, "\n");
3765 }
3766 gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
3767 update_stmt (stmt);
3768 }
3769 }
88bed196
AL
3770
3771 seq = gsi_seq (gsi);
3772 if (seq)
3773 gsi_insert_seq_on_edge_immediate (
3774 single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)), seq);
3775}
3776
6cbd3b6a 3777/* Traverse the function body and all modifications as decided in
8cbeddcc
MJ
3778 analyze_all_variable_accesses. Return true iff the CFG has been
3779 changed. */
6cbd3b6a 3780
8cbeddcc 3781static bool
6cbd3b6a
MJ
3782sra_modify_function_body (void)
3783{
8cbeddcc 3784 bool cfg_changed = false;
6cbd3b6a
MJ
3785 basic_block bb;
3786
88bed196
AL
3787 initialize_constant_pool_replacements ();
3788
11cd3bed 3789 FOR_EACH_BB_FN (bb, cfun)
6cbd3b6a
MJ
3790 {
3791 gimple_stmt_iterator gsi = gsi_start_bb (bb);
3792 while (!gsi_end_p (gsi))
3793 {
355fe088 3794 gimple *stmt = gsi_stmt (gsi);
6cbd3b6a
MJ
3795 enum assignment_mod_result assign_result;
3796 bool modified = false, deleted = false;
3797 tree *t;
3798 unsigned i;
3799
3800 switch (gimple_code (stmt))
3801 {
3802 case GIMPLE_RETURN:
538dd0b7 3803 t = gimple_return_retval_ptr (as_a <greturn *> (stmt));
6cbd3b6a
MJ
3804 if (*t != NULL_TREE)
3805 modified |= sra_modify_expr (t, &gsi, false);
3806 break;
3807
3808 case GIMPLE_ASSIGN:
56386ab9 3809 assign_result = sra_modify_assign (stmt, &gsi);
6cbd3b6a
MJ
3810 modified |= assign_result == SRA_AM_MODIFIED;
3811 deleted = assign_result == SRA_AM_REMOVED;
3812 break;
3813
3814 case GIMPLE_CALL:
3815 /* Operands must be processed before the lhs. */
3816 for (i = 0; i < gimple_call_num_args (stmt); i++)
3817 {
3818 t = gimple_call_arg_ptr (stmt, i);
3819 modified |= sra_modify_expr (t, &gsi, false);
3820 }
3821
3822 if (gimple_call_lhs (stmt))
3823 {
3824 t = gimple_call_lhs_ptr (stmt);
3825 modified |= sra_modify_expr (t, &gsi, true);
3826 }
3827 break;
3828
3829 case GIMPLE_ASM:
538dd0b7
DM
3830 {
3831 gasm *asm_stmt = as_a <gasm *> (stmt);
3832 for (i = 0; i < gimple_asm_ninputs (asm_stmt); i++)
3833 {
3834 t = &TREE_VALUE (gimple_asm_input_op (asm_stmt, i));
3835 modified |= sra_modify_expr (t, &gsi, false);
3836 }
3837 for (i = 0; i < gimple_asm_noutputs (asm_stmt); i++)
3838 {
3839 t = &TREE_VALUE (gimple_asm_output_op (asm_stmt, i));
3840 modified |= sra_modify_expr (t, &gsi, true);
3841 }
3842 }
6cbd3b6a
MJ
3843 break;
3844
3845 default:
3846 break;
3847 }
3848
3849 if (modified)
3850 {
3851 update_stmt (stmt);
8cbeddcc
MJ
3852 if (maybe_clean_eh_stmt (stmt)
3853 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
3854 cfg_changed = true;
6cbd3b6a
MJ
3855 }
3856 if (!deleted)
3857 gsi_next (&gsi);
3858 }
3859 }
8cbeddcc 3860
104cb50b 3861 gsi_commit_edge_inserts ();
8cbeddcc 3862 return cfg_changed;
6de9cd9a
DN
3863}
3864
0674b9d0
MJ
3865/* Generate statements initializing scalar replacements of parts of function
3866 parameters. */
6de9cd9a 3867
97e73bd2 3868static void
0674b9d0 3869initialize_parameter_reductions (void)
6de9cd9a 3870{
0674b9d0 3871 gimple_stmt_iterator gsi;
726a989a 3872 gimple_seq seq = NULL;
0674b9d0 3873 tree parm;
6de9cd9a 3874
355a7673 3875 gsi = gsi_start (seq);
0674b9d0
MJ
3876 for (parm = DECL_ARGUMENTS (current_function_decl);
3877 parm;
910ad8de 3878 parm = DECL_CHAIN (parm))
0bca51f0 3879 {
9771b263 3880 vec<access_p> *access_vec;
0674b9d0 3881 struct access *access;
97e73bd2 3882
0674b9d0
MJ
3883 if (!bitmap_bit_p (candidate_bitmap, DECL_UID (parm)))
3884 continue;
3885 access_vec = get_base_access_vector (parm);
3886 if (!access_vec)
3887 continue;
6de9cd9a 3888
9771b263 3889 for (access = (*access_vec)[0];
0674b9d0
MJ
3890 access;
3891 access = access->next_grp)
e4b5cace
MJ
3892 generate_subtree_copies (access, parm, 0, 0, 0, &gsi, true, true,
3893 EXPR_LOCATION (parm));
0674b9d0 3894 }
97e73bd2 3895
355a7673 3896 seq = gsi_seq (gsi);
0674b9d0 3897 if (seq)
fefa31b5 3898 gsi_insert_seq_on_edge_immediate (single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)), seq);
97e73bd2 3899}
6de9cd9a 3900
0674b9d0
MJ
3901/* The "main" function of intraprocedural SRA passes. Runs the analysis and if
3902 it reveals there are components of some aggregates to be scalarized, it runs
3903 the required transformations. */
3904static unsigned int
3905perform_intra_sra (void)
ea900239 3906{
0674b9d0
MJ
3907 int ret = 0;
3908 sra_initialize ();
ea900239 3909
0674b9d0
MJ
3910 if (!find_var_candidates ())
3911 goto out;
ea900239 3912
6cbd3b6a 3913 if (!scan_function ())
0674b9d0 3914 goto out;
726a989a 3915
0674b9d0
MJ
3916 if (!analyze_all_variable_accesses ())
3917 goto out;
6de9cd9a 3918
8cbeddcc
MJ
3919 if (sra_modify_function_body ())
3920 ret = TODO_update_ssa | TODO_cleanup_cfg;
3921 else
3922 ret = TODO_update_ssa;
0674b9d0 3923 initialize_parameter_reductions ();
2a45675f
MJ
3924
3925 statistics_counter_event (cfun, "Scalar replacements created",
3926 sra_stats.replacements);
3927 statistics_counter_event (cfun, "Modified expressions", sra_stats.exprs);
3928 statistics_counter_event (cfun, "Subtree copy stmts",
3929 sra_stats.subtree_copies);
3930 statistics_counter_event (cfun, "Subreplacement stmts",
3931 sra_stats.subreplacements);
3932 statistics_counter_event (cfun, "Deleted stmts", sra_stats.deleted);
3933 statistics_counter_event (cfun, "Separate LHS and RHS handling",
3934 sra_stats.separate_lhs_rhs_handling);
3935
0674b9d0
MJ
3936 out:
3937 sra_deinitialize ();
3938 return ret;
6de9cd9a
DN
3939}
3940
0674b9d0 3941/* Perform early intraprocedural SRA. */
029f45bd 3942static unsigned int
0674b9d0 3943early_intra_sra (void)
029f45bd 3944{
0674b9d0
MJ
3945 sra_mode = SRA_MODE_EARLY_INTRA;
3946 return perform_intra_sra ();
3947}
029f45bd 3948
0674b9d0
MJ
3949/* Perform "late" intraprocedural SRA. */
3950static unsigned int
3951late_intra_sra (void)
3952{
3953 sra_mode = SRA_MODE_INTRA;
3954 return perform_intra_sra ();
029f45bd
RH
3955}
3956
0674b9d0 3957
6de9cd9a 3958static bool
0674b9d0 3959gate_intra_sra (void)
6de9cd9a 3960{
567a4beb 3961 return flag_tree_sra != 0 && dbg_cnt (tree_sra);
6de9cd9a
DN
3962}
3963
0674b9d0 3964
27a4cd48
DM
3965namespace {
3966
3967const pass_data pass_data_sra_early =
029f45bd 3968{
27a4cd48
DM
3969 GIMPLE_PASS, /* type */
3970 "esra", /* name */
3971 OPTGROUP_NONE, /* optinfo_flags */
27a4cd48
DM
3972 TV_TREE_SRA, /* tv_id */
3973 ( PROP_cfg | PROP_ssa ), /* properties_required */
3974 0, /* properties_provided */
3975 0, /* properties_destroyed */
3976 0, /* todo_flags_start */
3bea341f 3977 TODO_update_ssa, /* todo_flags_finish */
029f45bd
RH
3978};
3979
27a4cd48
DM
3980class pass_sra_early : public gimple_opt_pass
3981{
3982public:
c3284718
RS
3983 pass_sra_early (gcc::context *ctxt)
3984 : gimple_opt_pass (pass_data_sra_early, ctxt)
27a4cd48
DM
3985 {}
3986
3987 /* opt_pass methods: */
1a3d085c 3988 virtual bool gate (function *) { return gate_intra_sra (); }
be55bfe6 3989 virtual unsigned int execute (function *) { return early_intra_sra (); }
27a4cd48
DM
3990
3991}; // class pass_sra_early
3992
3993} // anon namespace
3994
3995gimple_opt_pass *
3996make_pass_sra_early (gcc::context *ctxt)
3997{
3998 return new pass_sra_early (ctxt);
3999}
4000
4001namespace {
4002
4003const pass_data pass_data_sra =
6de9cd9a 4004{
27a4cd48
DM
4005 GIMPLE_PASS, /* type */
4006 "sra", /* name */
4007 OPTGROUP_NONE, /* optinfo_flags */
27a4cd48
DM
4008 TV_TREE_SRA, /* tv_id */
4009 ( PROP_cfg | PROP_ssa ), /* properties_required */
4010 0, /* properties_provided */
4011 0, /* properties_destroyed */
4012 TODO_update_address_taken, /* todo_flags_start */
3bea341f 4013 TODO_update_ssa, /* todo_flags_finish */
6de9cd9a 4014};
07ffa034 4015
27a4cd48
DM
4016class pass_sra : public gimple_opt_pass
4017{
4018public:
c3284718
RS
4019 pass_sra (gcc::context *ctxt)
4020 : gimple_opt_pass (pass_data_sra, ctxt)
27a4cd48
DM
4021 {}
4022
4023 /* opt_pass methods: */
1a3d085c 4024 virtual bool gate (function *) { return gate_intra_sra (); }
be55bfe6 4025 virtual unsigned int execute (function *) { return late_intra_sra (); }
27a4cd48
DM
4026
4027}; // class pass_sra
4028
4029} // anon namespace
4030
4031gimple_opt_pass *
4032make_pass_sra (gcc::context *ctxt)
4033{
4034 return new pass_sra (ctxt);
4035}
4036
07ffa034
MJ
4037
4038/* Return true iff PARM (which must be a parm_decl) is an unused scalar
4039 parameter. */
4040
4041static bool
4042is_unused_scalar_param (tree parm)
4043{
4044 tree name;
4045 return (is_gimple_reg (parm)
32244553 4046 && (!(name = ssa_default_def (cfun, parm))
07ffa034
MJ
4047 || has_zero_uses (name)));
4048}
4049
4050/* Scan immediate uses of a default definition SSA name of a parameter PARM and
4051 examine whether there are any direct or otherwise infeasible ones. If so,
4052 return true, otherwise return false. PARM must be a gimple register with a
4053 non-NULL default definition. */
4054
4055static bool
4056ptr_parm_has_direct_uses (tree parm)
4057{
4058 imm_use_iterator ui;
355fe088 4059 gimple *stmt;
32244553 4060 tree name = ssa_default_def (cfun, parm);
07ffa034
MJ
4061 bool ret = false;
4062
4063 FOR_EACH_IMM_USE_STMT (stmt, ui, name)
4064 {
44f89620
RG
4065 int uses_ok = 0;
4066 use_operand_p use_p;
4067
4068 if (is_gimple_debug (stmt))
4069 continue;
4070
4071 /* Valid uses include dereferences on the lhs and the rhs. */
4072 if (gimple_has_lhs (stmt))
07ffa034 4073 {
44f89620
RG
4074 tree lhs = gimple_get_lhs (stmt);
4075 while (handled_component_p (lhs))
4076 lhs = TREE_OPERAND (lhs, 0);
70f34814
RG
4077 if (TREE_CODE (lhs) == MEM_REF
4078 && TREE_OPERAND (lhs, 0) == name
4079 && integer_zerop (TREE_OPERAND (lhs, 1))
4080 && types_compatible_p (TREE_TYPE (lhs),
0de204de
AP
4081 TREE_TYPE (TREE_TYPE (name)))
4082 && !TREE_THIS_VOLATILE (lhs))
44f89620 4083 uses_ok++;
07ffa034 4084 }
44f89620 4085 if (gimple_assign_single_p (stmt))
07ffa034 4086 {
44f89620
RG
4087 tree rhs = gimple_assign_rhs1 (stmt);
4088 while (handled_component_p (rhs))
4089 rhs = TREE_OPERAND (rhs, 0);
70f34814
RG
4090 if (TREE_CODE (rhs) == MEM_REF
4091 && TREE_OPERAND (rhs, 0) == name
4092 && integer_zerop (TREE_OPERAND (rhs, 1))
4093 && types_compatible_p (TREE_TYPE (rhs),
0de204de
AP
4094 TREE_TYPE (TREE_TYPE (name)))
4095 && !TREE_THIS_VOLATILE (rhs))
44f89620 4096 uses_ok++;
07ffa034
MJ
4097 }
4098 else if (is_gimple_call (stmt))
4099 {
4100 unsigned i;
44f89620 4101 for (i = 0; i < gimple_call_num_args (stmt); ++i)
07ffa034
MJ
4102 {
4103 tree arg = gimple_call_arg (stmt, i);
44f89620
RG
4104 while (handled_component_p (arg))
4105 arg = TREE_OPERAND (arg, 0);
70f34814
RG
4106 if (TREE_CODE (arg) == MEM_REF
4107 && TREE_OPERAND (arg, 0) == name
4108 && integer_zerop (TREE_OPERAND (arg, 1))
4109 && types_compatible_p (TREE_TYPE (arg),
0de204de
AP
4110 TREE_TYPE (TREE_TYPE (name)))
4111 && !TREE_THIS_VOLATILE (arg))
44f89620 4112 uses_ok++;
07ffa034
MJ
4113 }
4114 }
44f89620
RG
4115
4116 /* If the number of valid uses does not match the number of
4117 uses in this stmt there is an unhandled use. */
4118 FOR_EACH_IMM_USE_ON_STMT (use_p, ui)
4119 --uses_ok;
4120
4121 if (uses_ok != 0)
07ffa034
MJ
4122 ret = true;
4123
4124 if (ret)
4125 BREAK_FROM_IMM_USE_STMT (ui);
4126 }
4127
4128 return ret;
4129}
4130
4131/* Identify candidates for reduction for IPA-SRA based on their type and mark
4132 them in candidate_bitmap. Note that these do not necessarily include
4133 parameter which are unused and thus can be removed. Return true iff any
4134 such candidate has been found. */
4135
4136static bool
4137find_param_candidates (void)
4138{
4139 tree parm;
4140 int count = 0;
4141 bool ret = false;
949cfd0a 4142 const char *msg;
07ffa034
MJ
4143
4144 for (parm = DECL_ARGUMENTS (current_function_decl);
4145 parm;
910ad8de 4146 parm = DECL_CHAIN (parm))
07ffa034 4147 {
1e9fb3de 4148 tree type = TREE_TYPE (parm);
4a8fb1a1 4149 tree_node **slot;
07ffa034
MJ
4150
4151 count++;
1e9fb3de 4152
07ffa034 4153 if (TREE_THIS_VOLATILE (parm)
1e9fb3de 4154 || TREE_ADDRESSABLE (parm)
a7752396 4155 || (!is_gimple_reg_type (type) && is_va_list_type (type)))
07ffa034
MJ
4156 continue;
4157
4158 if (is_unused_scalar_param (parm))
4159 {
4160 ret = true;
4161 continue;
4162 }
4163
07ffa034
MJ
4164 if (POINTER_TYPE_P (type))
4165 {
4166 type = TREE_TYPE (type);
4167
4168 if (TREE_CODE (type) == FUNCTION_TYPE
4169 || TYPE_VOLATILE (type)
c2cf2f4a
EB
4170 || (TREE_CODE (type) == ARRAY_TYPE
4171 && TYPE_NONALIASED_COMPONENT (type))
07ffa034 4172 || !is_gimple_reg (parm)
1e9fb3de 4173 || is_va_list_type (type)
07ffa034
MJ
4174 || ptr_parm_has_direct_uses (parm))
4175 continue;
4176 }
4177 else if (!AGGREGATE_TYPE_P (type))
4178 continue;
4179
4180 if (!COMPLETE_TYPE_P (type)
cc269bb6 4181 || !tree_fits_uhwi_p (TYPE_SIZE (type))
ae7e9ddd 4182 || tree_to_uhwi (TYPE_SIZE (type)) == 0
07ffa034 4183 || (AGGREGATE_TYPE_P (type)
949cfd0a 4184 && type_internals_preclude_sra_p (type, &msg)))
07ffa034
MJ
4185 continue;
4186
4187 bitmap_set_bit (candidate_bitmap, DECL_UID (parm));
c203e8a7 4188 slot = candidates->find_slot_with_hash (parm, DECL_UID (parm), INSERT);
4a8fb1a1 4189 *slot = parm;
d94b820b 4190
07ffa034
MJ
4191 ret = true;
4192 if (dump_file && (dump_flags & TDF_DETAILS))
4193 {
4194 fprintf (dump_file, "Candidate (%d): ", DECL_UID (parm));
ef6cb4c7 4195 print_generic_expr (dump_file, parm);
07ffa034
MJ
4196 fprintf (dump_file, "\n");
4197 }
4198 }
4199
4200 func_param_count = count;
4201 return ret;
4202}
4203
4204/* Callback of walk_aliased_vdefs, marks the access passed as DATA as
4205 maybe_modified. */
4206
4207static bool
4208mark_maybe_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
4209 void *data)
4210{
4211 struct access *repr = (struct access *) data;
4212
4213 repr->grp_maybe_modified = 1;
4214 return true;
4215}
4216
4217/* Analyze what representatives (in linked lists accessible from
4218 REPRESENTATIVES) can be modified by side effects of statements in the
4219 current function. */
4220
4221static void
9771b263 4222analyze_modified_params (vec<access_p> representatives)
07ffa034
MJ
4223{
4224 int i;
4225
4226 for (i = 0; i < func_param_count; i++)
4227 {
2b93f88d 4228 struct access *repr;
07ffa034 4229
9771b263 4230 for (repr = representatives[i];
2b93f88d
MJ
4231 repr;
4232 repr = repr->next_grp)
07ffa034 4233 {
30a20e97
MJ
4234 struct access *access;
4235 bitmap visited;
4236 ao_ref ar;
2b93f88d
MJ
4237
4238 if (no_accesses_p (repr))
4239 continue;
30a20e97 4240 if (!POINTER_TYPE_P (TREE_TYPE (repr->base))
2b93f88d
MJ
4241 || repr->grp_maybe_modified)
4242 continue;
4243
30a20e97
MJ
4244 ao_ref_init (&ar, repr->expr);
4245 visited = BITMAP_ALLOC (NULL);
4246 for (access = repr; access; access = access->next_sibling)
2b93f88d 4247 {
2b93f88d
MJ
4248 /* All accesses are read ones, otherwise grp_maybe_modified would
4249 be trivially set. */
2b93f88d 4250 walk_aliased_vdefs (&ar, gimple_vuse (access->stmt),
30a20e97 4251 mark_maybe_modified, repr, &visited);
2b93f88d
MJ
4252 if (repr->grp_maybe_modified)
4253 break;
4254 }
30a20e97 4255 BITMAP_FREE (visited);
07ffa034
MJ
4256 }
4257 }
4258}
4259
4260/* Propagate distances in bb_dereferences in the opposite direction than the
4261 control flow edges, in each step storing the maximum of the current value
4262 and the minimum of all successors. These steps are repeated until the table
4263 stabilizes. Note that BBs which might terminate the functions (according to
4264 final_bbs bitmap) never updated in this way. */
4265
4266static void
4267propagate_dereference_distances (void)
4268{
07ffa034
MJ
4269 basic_block bb;
4270
3986e690 4271 auto_vec<basic_block> queue (last_basic_block_for_fn (cfun));
fefa31b5 4272 queue.quick_push (ENTRY_BLOCK_PTR_FOR_FN (cfun));
11cd3bed 4273 FOR_EACH_BB_FN (bb, cfun)
07ffa034 4274 {
9771b263 4275 queue.quick_push (bb);
07ffa034
MJ
4276 bb->aux = bb;
4277 }
4278
9771b263 4279 while (!queue.is_empty ())
07ffa034
MJ
4280 {
4281 edge_iterator ei;
4282 edge e;
4283 bool change = false;
4284 int i;
4285
9771b263 4286 bb = queue.pop ();
07ffa034
MJ
4287 bb->aux = NULL;
4288
4289 if (bitmap_bit_p (final_bbs, bb->index))
4290 continue;
4291
4292 for (i = 0; i < func_param_count; i++)
4293 {
4294 int idx = bb->index * func_param_count + i;
4295 bool first = true;
4296 HOST_WIDE_INT inh = 0;
4297
4298 FOR_EACH_EDGE (e, ei, bb->succs)
4299 {
4300 int succ_idx = e->dest->index * func_param_count + i;
4301
fefa31b5 4302 if (e->src == EXIT_BLOCK_PTR_FOR_FN (cfun))
07ffa034
MJ
4303 continue;
4304
4305 if (first)
4306 {
4307 first = false;
4308 inh = bb_dereferences [succ_idx];
4309 }
4310 else if (bb_dereferences [succ_idx] < inh)
4311 inh = bb_dereferences [succ_idx];
4312 }
4313
4314 if (!first && bb_dereferences[idx] < inh)
4315 {
4316 bb_dereferences[idx] = inh;
4317 change = true;
4318 }
4319 }
4320
4321 if (change && !bitmap_bit_p (final_bbs, bb->index))
4322 FOR_EACH_EDGE (e, ei, bb->preds)
4323 {
4324 if (e->src->aux)
4325 continue;
4326
4327 e->src->aux = e->src;
9771b263 4328 queue.quick_push (e->src);
07ffa034
MJ
4329 }
4330 }
07ffa034
MJ
4331}
4332
4333/* Dump a dereferences TABLE with heading STR to file F. */
4334
4335static void
4336dump_dereferences_table (FILE *f, const char *str, HOST_WIDE_INT *table)
4337{
4338 basic_block bb;
4339
81018dcf 4340 fprintf (dump_file, "%s", str);
fefa31b5
DM
4341 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun),
4342 EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
07ffa034
MJ
4343 {
4344 fprintf (f, "%4i %i ", bb->index, bitmap_bit_p (final_bbs, bb->index));
fefa31b5 4345 if (bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
07ffa034
MJ
4346 {
4347 int i;
4348 for (i = 0; i < func_param_count; i++)
4349 {
4350 int idx = bb->index * func_param_count + i;
4351 fprintf (f, " %4" HOST_WIDE_INT_PRINT "d", table[idx]);
4352 }
4353 }
4354 fprintf (f, "\n");
4355 }
4356 fprintf (dump_file, "\n");
4357}
4358
4359/* Determine what (parts of) parameters passed by reference that are not
4360 assigned to are not certainly dereferenced in this function and thus the
4361 dereferencing cannot be safely moved to the caller without potentially
4362 introducing a segfault. Mark such REPRESENTATIVES as
4363 grp_not_necessarilly_dereferenced.
4364
4365 The dereferenced maximum "distance," i.e. the offset + size of the accessed
4366 part is calculated rather than simple booleans are calculated for each
4367 pointer parameter to handle cases when only a fraction of the whole
4368 aggregate is allocated (see testsuite/gcc.c-torture/execute/ipa-sra-2.c for
4369 an example).
4370
4371 The maximum dereference distances for each pointer parameter and BB are
4372 already stored in bb_dereference. This routine simply propagates these
4373 values upwards by propagate_dereference_distances and then compares the
4374 distances of individual parameters in the ENTRY BB to the equivalent
4375 distances of each representative of a (fraction of a) parameter. */
4376
4377static void
9771b263 4378analyze_caller_dereference_legality (vec<access_p> representatives)
07ffa034
MJ
4379{
4380 int i;
4381
4382 if (dump_file && (dump_flags & TDF_DETAILS))
4383 dump_dereferences_table (dump_file,
4384 "Dereference table before propagation:\n",
4385 bb_dereferences);
4386
4387 propagate_dereference_distances ();
4388
4389 if (dump_file && (dump_flags & TDF_DETAILS))
4390 dump_dereferences_table (dump_file,
4391 "Dereference table after propagation:\n",
4392 bb_dereferences);
4393
4394 for (i = 0; i < func_param_count; i++)
4395 {
9771b263 4396 struct access *repr = representatives[i];
fefa31b5 4397 int idx = ENTRY_BLOCK_PTR_FOR_FN (cfun)->index * func_param_count + i;
07ffa034
MJ
4398
4399 if (!repr || no_accesses_p (repr))
4400 continue;
4401
4402 do
4403 {
4404 if ((repr->offset + repr->size) > bb_dereferences[idx])
4405 repr->grp_not_necessarilly_dereferenced = 1;
4406 repr = repr->next_grp;
4407 }
4408 while (repr);
4409 }
4410}
4411
4412/* Return the representative access for the parameter declaration PARM if it is
4413 a scalar passed by reference which is not written to and the pointer value
4414 is not used directly. Thus, if it is legal to dereference it in the caller
4415 and we can rule out modifications through aliases, such parameter should be
4416 turned into one passed by value. Return NULL otherwise. */
4417
4418static struct access *
4419unmodified_by_ref_scalar_representative (tree parm)
4420{
4421 int i, access_count;
30a20e97 4422 struct access *repr;
9771b263 4423 vec<access_p> *access_vec;
07ffa034
MJ
4424
4425 access_vec = get_base_access_vector (parm);
4426 gcc_assert (access_vec);
9771b263 4427 repr = (*access_vec)[0];
30a20e97
MJ
4428 if (repr->write)
4429 return NULL;
4430 repr->group_representative = repr;
07ffa034 4431
9771b263 4432 access_count = access_vec->length ();
30a20e97 4433 for (i = 1; i < access_count; i++)
07ffa034 4434 {
9771b263 4435 struct access *access = (*access_vec)[i];
07ffa034
MJ
4436 if (access->write)
4437 return NULL;
30a20e97
MJ
4438 access->group_representative = repr;
4439 access->next_sibling = repr->next_sibling;
4440 repr->next_sibling = access;
07ffa034
MJ
4441 }
4442
30a20e97
MJ
4443 repr->grp_read = 1;
4444 repr->grp_scalar_ptr = 1;
4445 return repr;
07ffa034
MJ
4446}
4447
c1ed6a01
MJ
4448/* Return true iff this ACCESS precludes IPA-SRA of the parameter it is
4449 associated with. REQ_ALIGN is the minimum required alignment. */
c6a2c25d
MJ
4450
4451static bool
c1ed6a01 4452access_precludes_ipa_sra_p (struct access *access, unsigned int req_align)
c6a2c25d 4453{
c1ed6a01 4454 unsigned int exp_align;
c6a2c25d
MJ
4455 /* Avoid issues such as the second simple testcase in PR 42025. The problem
4456 is incompatible assign in a call statement (and possibly even in asm
4457 statements). This can be relaxed by using a new temporary but only for
4458 non-TREE_ADDRESSABLE types and is probably not worth the complexity. (In
4459 intraprocedural SRA we deal with this by keeping the old aggregate around,
4460 something we cannot do in IPA-SRA.) */
4461 if (access->write
4462 && (is_gimple_call (access->stmt)
4463 || gimple_code (access->stmt) == GIMPLE_ASM))
4464 return true;
4465
c1ed6a01
MJ
4466 exp_align = get_object_alignment (access->expr);
4467 if (exp_align < req_align)
4468 return true;
4469
c6a2c25d
MJ
4470 return false;
4471}
4472
4473
07ffa034
MJ
4474/* Sort collected accesses for parameter PARM, identify representatives for
4475 each accessed region and link them together. Return NULL if there are
4476 different but overlapping accesses, return the special ptr value meaning
4477 there are no accesses for this parameter if that is the case and return the
4478 first representative otherwise. Set *RO_GRP if there is a group of accesses
4479 with only read (i.e. no write) accesses. */
4480
4481static struct access *
4482splice_param_accesses (tree parm, bool *ro_grp)
4483{
4484 int i, j, access_count, group_count;
be025a2d 4485 int total_size = 0;
07ffa034 4486 struct access *access, *res, **prev_acc_ptr = &res;
9771b263 4487 vec<access_p> *access_vec;
07ffa034
MJ
4488
4489 access_vec = get_base_access_vector (parm);
4490 if (!access_vec)
4491 return &no_accesses_representant;
9771b263 4492 access_count = access_vec->length ();
07ffa034 4493
9771b263 4494 access_vec->qsort (compare_access_positions);
07ffa034
MJ
4495
4496 i = 0;
4497 total_size = 0;
4498 group_count = 0;
4499 while (i < access_count)
4500 {
4501 bool modification;
82d49829 4502 tree a1_alias_type;
9771b263 4503 access = (*access_vec)[i];
07ffa034 4504 modification = access->write;
c1ed6a01 4505 if (access_precludes_ipa_sra_p (access, TYPE_ALIGN (access->type)))
c6a2c25d 4506 return NULL;
82d49829 4507 a1_alias_type = reference_alias_ptr_type (access->expr);
07ffa034
MJ
4508
4509 /* Access is about to become group representative unless we find some
4510 nasty overlap which would preclude us from breaking this parameter
4511 apart. */
4512
4513 j = i + 1;
4514 while (j < access_count)
4515 {
9771b263 4516 struct access *ac2 = (*access_vec)[j];
07ffa034
MJ
4517 if (ac2->offset != access->offset)
4518 {
4519 /* All or nothing law for parameters. */
4520 if (access->offset + access->size > ac2->offset)
4521 return NULL;
4522 else
4523 break;
4524 }
4525 else if (ac2->size != access->size)
4526 return NULL;
4527
c1ed6a01 4528 if (access_precludes_ipa_sra_p (ac2, TYPE_ALIGN (access->type))
363e01cc
MJ
4529 || (ac2->type != access->type
4530 && (TREE_ADDRESSABLE (ac2->type)
82d49829
MJ
4531 || TREE_ADDRESSABLE (access->type)))
4532 || (reference_alias_ptr_type (ac2->expr) != a1_alias_type))
c6a2c25d
MJ
4533 return NULL;
4534
07ffa034 4535 modification |= ac2->write;
30a20e97
MJ
4536 ac2->group_representative = access;
4537 ac2->next_sibling = access->next_sibling;
4538 access->next_sibling = ac2;
07ffa034
MJ
4539 j++;
4540 }
4541
4542 group_count++;
4543 access->grp_maybe_modified = modification;
4544 if (!modification)
4545 *ro_grp = true;
4546 *prev_acc_ptr = access;
4547 prev_acc_ptr = &access->next_grp;
4548 total_size += access->size;
4549 i = j;
4550 }
4551
07ffa034
MJ
4552 gcc_assert (group_count > 0);
4553 return res;
4554}
4555
4556/* Decide whether parameters with representative accesses given by REPR should
4557 be reduced into components. */
4558
4559static int
4560decide_one_param_reduction (struct access *repr)
4561{
be025a2d 4562 HOST_WIDE_INT total_size, cur_parm_size;
07ffa034
MJ
4563 bool by_ref;
4564 tree parm;
4565
4566 parm = repr->base;
ae7e9ddd 4567 cur_parm_size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (parm)));
07ffa034
MJ
4568 gcc_assert (cur_parm_size > 0);
4569
4570 if (POINTER_TYPE_P (TREE_TYPE (parm)))
be025a2d 4571 by_ref = true;
07ffa034 4572 else
be025a2d 4573 by_ref = false;
07ffa034
MJ
4574
4575 if (dump_file)
4576 {
4577 struct access *acc;
4578 fprintf (dump_file, "Evaluating PARAM group sizes for ");
ef6cb4c7 4579 print_generic_expr (dump_file, parm);
07ffa034
MJ
4580 fprintf (dump_file, " (UID: %u): \n", DECL_UID (parm));
4581 for (acc = repr; acc; acc = acc->next_grp)
4582 dump_access (dump_file, acc, true);
4583 }
4584
4585 total_size = 0;
be025a2d 4586 int new_param_count = 0;
07ffa034
MJ
4587
4588 for (; repr; repr = repr->next_grp)
4589 {
4590 gcc_assert (parm == repr->base);
5e9fba51
EB
4591
4592 /* Taking the address of a non-addressable field is verboten. */
4593 if (by_ref && repr->non_addressable)
4594 return 0;
07ffa034 4595
191879f9
RG
4596 /* Do not decompose a non-BLKmode param in a way that would
4597 create BLKmode params. Especially for by-reference passing
4598 (thus, pointer-type param) this is hardly worthwhile. */
4599 if (DECL_MODE (parm) != BLKmode
4600 && TYPE_MODE (repr->type) == BLKmode)
4601 return 0;
4602
07ffa034
MJ
4603 if (!by_ref || (!repr->grp_maybe_modified
4604 && !repr->grp_not_necessarilly_dereferenced))
4605 total_size += repr->size;
4606 else
4607 total_size += cur_parm_size;
5e9fba51
EB
4608
4609 new_param_count++;
07ffa034
MJ
4610 }
4611
4612 gcc_assert (new_param_count > 0);
4613
be025a2d 4614 if (!by_ref)
07ffa034 4615 {
be025a2d
MJ
4616 if (total_size >= cur_parm_size)
4617 return 0;
07ffa034
MJ
4618 }
4619 else
be025a2d
MJ
4620 {
4621 int parm_num_limit;
4622 if (optimize_function_for_size_p (cfun))
4623 parm_num_limit = 1;
4624 else
4625 parm_num_limit = PARAM_VALUE (PARAM_IPA_SRA_PTR_GROWTH_FACTOR);
4626
4627 if (new_param_count > parm_num_limit
4628 || total_size > (parm_num_limit * cur_parm_size))
4629 return 0;
4630 }
4631
4632 if (dump_file)
4633 fprintf (dump_file, " ....will be split into %i components\n",
4634 new_param_count);
4635 return new_param_count;
07ffa034
MJ
4636}
4637
4638/* The order of the following enums is important, we need to do extra work for
4639 UNUSED_PARAMS, BY_VAL_ACCESSES and UNMODIF_BY_REF_ACCESSES. */
4640enum ipa_splicing_result { NO_GOOD_ACCESS, UNUSED_PARAMS, BY_VAL_ACCESSES,
4641 MODIF_BY_REF_ACCESSES, UNMODIF_BY_REF_ACCESSES };
4642
4643/* Identify representatives of all accesses to all candidate parameters for
4644 IPA-SRA. Return result based on what representatives have been found. */
4645
4646static enum ipa_splicing_result
9771b263 4647splice_all_param_accesses (vec<access_p> &representatives)
07ffa034
MJ
4648{
4649 enum ipa_splicing_result result = NO_GOOD_ACCESS;
4650 tree parm;
4651 struct access *repr;
4652
9771b263 4653 representatives.create (func_param_count);
07ffa034
MJ
4654
4655 for (parm = DECL_ARGUMENTS (current_function_decl);
4656 parm;
910ad8de 4657 parm = DECL_CHAIN (parm))
07ffa034
MJ
4658 {
4659 if (is_unused_scalar_param (parm))
4660 {
9771b263 4661 representatives.quick_push (&no_accesses_representant);
07ffa034
MJ
4662 if (result == NO_GOOD_ACCESS)
4663 result = UNUSED_PARAMS;
4664 }
4665 else if (POINTER_TYPE_P (TREE_TYPE (parm))
4666 && is_gimple_reg_type (TREE_TYPE (TREE_TYPE (parm)))
4667 && bitmap_bit_p (candidate_bitmap, DECL_UID (parm)))
4668 {
4669 repr = unmodified_by_ref_scalar_representative (parm);
9771b263 4670 representatives.quick_push (repr);
07ffa034
MJ
4671 if (repr)
4672 result = UNMODIF_BY_REF_ACCESSES;
4673 }
4674 else if (bitmap_bit_p (candidate_bitmap, DECL_UID (parm)))
4675 {
4676 bool ro_grp = false;
4677 repr = splice_param_accesses (parm, &ro_grp);
9771b263 4678 representatives.quick_push (repr);
07ffa034
MJ
4679
4680 if (repr && !no_accesses_p (repr))
4681 {
4682 if (POINTER_TYPE_P (TREE_TYPE (parm)))
4683 {
4684 if (ro_grp)
4685 result = UNMODIF_BY_REF_ACCESSES;
4686 else if (result < MODIF_BY_REF_ACCESSES)
4687 result = MODIF_BY_REF_ACCESSES;
4688 }
4689 else if (result < BY_VAL_ACCESSES)
4690 result = BY_VAL_ACCESSES;
4691 }
4692 else if (no_accesses_p (repr) && (result == NO_GOOD_ACCESS))
4693 result = UNUSED_PARAMS;
4694 }
4695 else
9771b263 4696 representatives.quick_push (NULL);
07ffa034
MJ
4697 }
4698
4699 if (result == NO_GOOD_ACCESS)
4700 {
9771b263 4701 representatives.release ();
07ffa034
MJ
4702 return NO_GOOD_ACCESS;
4703 }
4704
4705 return result;
4706}
4707
4708/* Return the index of BASE in PARMS. Abort if it is not found. */
4709
4710static inline int
9771b263 4711get_param_index (tree base, vec<tree> parms)
07ffa034
MJ
4712{
4713 int i, len;
4714
9771b263 4715 len = parms.length ();
07ffa034 4716 for (i = 0; i < len; i++)
9771b263 4717 if (parms[i] == base)
07ffa034
MJ
4718 return i;
4719 gcc_unreachable ();
4720}
4721
4722/* Convert the decisions made at the representative level into compact
4723 parameter adjustments. REPRESENTATIVES are pointers to first
4724 representatives of each param accesses, ADJUSTMENTS_COUNT is the expected
4725 final number of adjustments. */
4726
4727static ipa_parm_adjustment_vec
9771b263 4728turn_representatives_into_adjustments (vec<access_p> representatives,
07ffa034
MJ
4729 int adjustments_count)
4730{
9771b263 4731 vec<tree> parms;
07ffa034
MJ
4732 ipa_parm_adjustment_vec adjustments;
4733 tree parm;
4734 int i;
4735
4736 gcc_assert (adjustments_count > 0);
4737 parms = ipa_get_vector_of_formal_parms (current_function_decl);
9771b263 4738 adjustments.create (adjustments_count);
07ffa034 4739 parm = DECL_ARGUMENTS (current_function_decl);
910ad8de 4740 for (i = 0; i < func_param_count; i++, parm = DECL_CHAIN (parm))
07ffa034 4741 {
9771b263 4742 struct access *repr = representatives[i];
07ffa034
MJ
4743
4744 if (!repr || no_accesses_p (repr))
4745 {
f32682ca 4746 struct ipa_parm_adjustment adj;
07ffa034 4747
f32682ca
DN
4748 memset (&adj, 0, sizeof (adj));
4749 adj.base_index = get_param_index (parm, parms);
4750 adj.base = parm;
07ffa034 4751 if (!repr)
31519c38 4752 adj.op = IPA_PARM_OP_COPY;
07ffa034 4753 else
31519c38
AH
4754 adj.op = IPA_PARM_OP_REMOVE;
4755 adj.arg_prefix = "ISRA";
9771b263 4756 adjustments.quick_push (adj);
07ffa034
MJ
4757 }
4758 else
4759 {
f32682ca 4760 struct ipa_parm_adjustment adj;
07ffa034
MJ
4761 int index = get_param_index (parm, parms);
4762
4763 for (; repr; repr = repr->next_grp)
4764 {
f32682ca 4765 memset (&adj, 0, sizeof (adj));
07ffa034 4766 gcc_assert (repr->base == parm);
f32682ca
DN
4767 adj.base_index = index;
4768 adj.base = repr->base;
4769 adj.type = repr->type;
4770 adj.alias_ptr_type = reference_alias_ptr_type (repr->expr);
4771 adj.offset = repr->offset;
ee45a32d 4772 adj.reverse = repr->reverse;
f32682ca
DN
4773 adj.by_ref = (POINTER_TYPE_P (TREE_TYPE (repr->base))
4774 && (repr->grp_maybe_modified
4775 || repr->grp_not_necessarilly_dereferenced));
31519c38 4776 adj.arg_prefix = "ISRA";
9771b263 4777 adjustments.quick_push (adj);
07ffa034
MJ
4778 }
4779 }
4780 }
9771b263 4781 parms.release ();
07ffa034
MJ
4782 return adjustments;
4783}
4784
4785/* Analyze the collected accesses and produce a plan what to do with the
4786 parameters in the form of adjustments, NULL meaning nothing. */
4787
4788static ipa_parm_adjustment_vec
4789analyze_all_param_acesses (void)
4790{
4791 enum ipa_splicing_result repr_state;
4792 bool proceed = false;
4793 int i, adjustments_count = 0;
9771b263 4794 vec<access_p> representatives;
07ffa034
MJ
4795 ipa_parm_adjustment_vec adjustments;
4796
9771b263 4797 repr_state = splice_all_param_accesses (representatives);
07ffa034 4798 if (repr_state == NO_GOOD_ACCESS)
c3284718 4799 return ipa_parm_adjustment_vec ();
07ffa034
MJ
4800
4801 /* If there are any parameters passed by reference which are not modified
4802 directly, we need to check whether they can be modified indirectly. */
4803 if (repr_state == UNMODIF_BY_REF_ACCESSES)
4804 {
4805 analyze_caller_dereference_legality (representatives);
4806 analyze_modified_params (representatives);
4807 }
4808
4809 for (i = 0; i < func_param_count; i++)
4810 {
9771b263 4811 struct access *repr = representatives[i];
07ffa034
MJ
4812
4813 if (repr && !no_accesses_p (repr))
4814 {
4815 if (repr->grp_scalar_ptr)
4816 {
4817 adjustments_count++;
4818 if (repr->grp_not_necessarilly_dereferenced
4819 || repr->grp_maybe_modified)
9771b263 4820 representatives[i] = NULL;
07ffa034
MJ
4821 else
4822 {
4823 proceed = true;
4824 sra_stats.scalar_by_ref_to_by_val++;
4825 }
4826 }
4827 else
4828 {
4829 int new_components = decide_one_param_reduction (repr);
4830
4831 if (new_components == 0)
4832 {
9771b263 4833 representatives[i] = NULL;
07ffa034
MJ
4834 adjustments_count++;
4835 }
4836 else
4837 {
4838 adjustments_count += new_components;
4839 sra_stats.aggregate_params_reduced++;
4840 sra_stats.param_reductions_created += new_components;
4841 proceed = true;
4842 }
4843 }
4844 }
4845 else
4846 {
4847 if (no_accesses_p (repr))
4848 {
4849 proceed = true;
4850 sra_stats.deleted_unused_parameters++;
4851 }
4852 adjustments_count++;
4853 }
4854 }
4855
4856 if (!proceed && dump_file)
4857 fprintf (dump_file, "NOT proceeding to change params.\n");
4858
4859 if (proceed)
4860 adjustments = turn_representatives_into_adjustments (representatives,
4861 adjustments_count);
4862 else
c3284718 4863 adjustments = ipa_parm_adjustment_vec ();
07ffa034 4864
9771b263 4865 representatives.release ();
07ffa034
MJ
4866 return adjustments;
4867}
4868
4869/* If a parameter replacement identified by ADJ does not yet exist in the form
4870 of declaration, create it and record it, otherwise return the previously
4871 created one. */
4872
4873static tree
4874get_replaced_param_substitute (struct ipa_parm_adjustment *adj)
4875{
4876 tree repl;
4877 if (!adj->new_ssa_base)
4878 {
4879 char *pretty_name = make_fancy_name (adj->base);
4880
acd63801 4881 repl = create_tmp_reg (TREE_TYPE (adj->base), "ISR");
07ffa034 4882 DECL_NAME (repl) = get_identifier (pretty_name);
5f36c869 4883 DECL_NAMELESS (repl) = 1;
07ffa034
MJ
4884 obstack_free (&name_obstack, pretty_name);
4885
07ffa034
MJ
4886 adj->new_ssa_base = repl;
4887 }
4888 else
4889 repl = adj->new_ssa_base;
4890 return repl;
4891}
4892
4893/* Find the first adjustment for a particular parameter BASE in a vector of
4894 ADJUSTMENTS which is not a copy_param. Return NULL if there is no such
4895 adjustment. */
4896
4897static struct ipa_parm_adjustment *
4898get_adjustment_for_base (ipa_parm_adjustment_vec adjustments, tree base)
4899{
4900 int i, len;
4901
9771b263 4902 len = adjustments.length ();
07ffa034
MJ
4903 for (i = 0; i < len; i++)
4904 {
4905 struct ipa_parm_adjustment *adj;
4906
9771b263 4907 adj = &adjustments[i];
31519c38 4908 if (adj->op != IPA_PARM_OP_COPY && adj->base == base)
07ffa034
MJ
4909 return adj;
4910 }
4911
4912 return NULL;
4913}
4914
6651d6b4
MJ
4915/* If OLD_NAME, which is being defined by statement STMT, is an SSA_NAME of a
4916 parameter which is to be removed because its value is not used, create a new
4917 SSA_NAME relating to a replacement VAR_DECL, replace all uses of the
4918 original with it and return it. If there is no need to re-map, return NULL.
4919 ADJUSTMENTS is a pointer to a vector of IPA-SRA adjustments. */
07ffa034 4920
6651d6b4
MJ
4921static tree
4922replace_removed_params_ssa_names (tree old_name, gimple *stmt,
6cbd3b6a 4923 ipa_parm_adjustment_vec adjustments)
07ffa034 4924{
07ffa034 4925 struct ipa_parm_adjustment *adj;
6651d6b4 4926 tree decl, repl, new_name;
07ffa034 4927
6651d6b4
MJ
4928 if (TREE_CODE (old_name) != SSA_NAME)
4929 return NULL;
70b5e7dc 4930
6651d6b4 4931 decl = SSA_NAME_VAR (old_name);
70b5e7dc
RG
4932 if (decl == NULL_TREE
4933 || TREE_CODE (decl) != PARM_DECL)
6651d6b4 4934 return NULL;
07ffa034
MJ
4935
4936 adj = get_adjustment_for_base (adjustments, decl);
4937 if (!adj)
6651d6b4 4938 return NULL;
07ffa034
MJ
4939
4940 repl = get_replaced_param_substitute (adj);
6651d6b4 4941 new_name = make_ssa_name (repl, stmt);
c2864f02
JJ
4942 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_name)
4943 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (old_name);
07ffa034
MJ
4944
4945 if (dump_file)
4946 {
4947 fprintf (dump_file, "replacing an SSA name of a removed param ");
ef6cb4c7 4948 print_generic_expr (dump_file, old_name);
07ffa034 4949 fprintf (dump_file, " with ");
ef6cb4c7 4950 print_generic_expr (dump_file, new_name);
07ffa034
MJ
4951 fprintf (dump_file, "\n");
4952 }
4953
6651d6b4
MJ
4954 replace_uses_by (old_name, new_name);
4955 return new_name;
07ffa034
MJ
4956}
4957
ea3e455a
MJ
4958/* If the statement STMT contains any expressions that need to replaced with a
4959 different one as noted by ADJUSTMENTS, do so. Handle any potential type
4960 incompatibilities (GSI is used to accommodate conversion statements and must
4961 point to the statement). Return true iff the statement was modified. */
07ffa034 4962
6cbd3b6a 4963static bool
355fe088 4964sra_ipa_modify_assign (gimple *stmt, gimple_stmt_iterator *gsi,
6cbd3b6a 4965 ipa_parm_adjustment_vec adjustments)
07ffa034 4966{
c6a2c25d
MJ
4967 tree *lhs_p, *rhs_p;
4968 bool any;
07ffa034
MJ
4969
4970 if (!gimple_assign_single_p (stmt))
6cbd3b6a 4971 return false;
07ffa034 4972
c6a2c25d
MJ
4973 rhs_p = gimple_assign_rhs1_ptr (stmt);
4974 lhs_p = gimple_assign_lhs_ptr (stmt);
4975
31519c38
AH
4976 any = ipa_modify_expr (rhs_p, false, adjustments);
4977 any |= ipa_modify_expr (lhs_p, false, adjustments);
c6a2c25d
MJ
4978 if (any)
4979 {
d557591d
MJ
4980 tree new_rhs = NULL_TREE;
4981
c6a2c25d 4982 if (!useless_type_conversion_p (TREE_TYPE (*lhs_p), TREE_TYPE (*rhs_p)))
92e97cdd
MJ
4983 {
4984 if (TREE_CODE (*rhs_p) == CONSTRUCTOR)
4985 {
4986 /* V_C_Es of constructors can cause trouble (PR 42714). */
4987 if (is_gimple_reg_type (TREE_TYPE (*lhs_p)))
e8160c9a 4988 *rhs_p = build_zero_cst (TREE_TYPE (*lhs_p));
92e97cdd 4989 else
9771b263
DN
4990 *rhs_p = build_constructor (TREE_TYPE (*lhs_p),
4991 NULL);
92e97cdd
MJ
4992 }
4993 else
4994 new_rhs = fold_build1_loc (gimple_location (stmt),
4995 VIEW_CONVERT_EXPR, TREE_TYPE (*lhs_p),
4996 *rhs_p);
4997 }
d557591d
MJ
4998 else if (REFERENCE_CLASS_P (*rhs_p)
4999 && is_gimple_reg_type (TREE_TYPE (*lhs_p))
5000 && !is_gimple_reg (*lhs_p))
5001 /* This can happen when an assignment in between two single field
5002 structures is turned into an assignment in between two pointers to
5003 scalars (PR 42237). */
5004 new_rhs = *rhs_p;
5005
5006 if (new_rhs)
c6a2c25d 5007 {
d557591d 5008 tree tmp = force_gimple_operand_gsi (gsi, new_rhs, true, NULL_TREE,
c6a2c25d
MJ
5009 true, GSI_SAME_STMT);
5010
5011 gimple_assign_set_rhs_from_tree (gsi, tmp);
5012 }
5013
6cbd3b6a 5014 return true;
c6a2c25d 5015 }
07ffa034 5016
6cbd3b6a
MJ
5017 return false;
5018}
5019
5020/* Traverse the function body and all modifications as described in
8cbeddcc 5021 ADJUSTMENTS. Return true iff the CFG has been changed. */
6cbd3b6a 5022
31519c38 5023bool
6cbd3b6a
MJ
5024ipa_sra_modify_function_body (ipa_parm_adjustment_vec adjustments)
5025{
8cbeddcc 5026 bool cfg_changed = false;
6cbd3b6a
MJ
5027 basic_block bb;
5028
11cd3bed 5029 FOR_EACH_BB_FN (bb, cfun)
6cbd3b6a
MJ
5030 {
5031 gimple_stmt_iterator gsi;
6cbd3b6a
MJ
5032
5033 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
6651d6b4
MJ
5034 {
5035 gphi *phi = as_a <gphi *> (gsi_stmt (gsi));
5036 tree new_lhs, old_lhs = gimple_phi_result (phi);
5037 new_lhs = replace_removed_params_ssa_names (old_lhs, phi, adjustments);
5038 if (new_lhs)
5039 {
5040 gimple_phi_set_result (phi, new_lhs);
5041 release_ssa_name (old_lhs);
5042 }
5043 }
6cbd3b6a
MJ
5044
5045 gsi = gsi_start_bb (bb);
5046 while (!gsi_end_p (gsi))
5047 {
355fe088 5048 gimple *stmt = gsi_stmt (gsi);
6cbd3b6a
MJ
5049 bool modified = false;
5050 tree *t;
5051 unsigned i;
5052
5053 switch (gimple_code (stmt))
5054 {
5055 case GIMPLE_RETURN:
538dd0b7 5056 t = gimple_return_retval_ptr (as_a <greturn *> (stmt));
6cbd3b6a 5057 if (*t != NULL_TREE)
31519c38 5058 modified |= ipa_modify_expr (t, true, adjustments);
6cbd3b6a
MJ
5059 break;
5060
5061 case GIMPLE_ASSIGN:
ea3e455a 5062 modified |= sra_ipa_modify_assign (stmt, &gsi, adjustments);
6cbd3b6a
MJ
5063 break;
5064
5065 case GIMPLE_CALL:
5066 /* Operands must be processed before the lhs. */
5067 for (i = 0; i < gimple_call_num_args (stmt); i++)
5068 {
5069 t = gimple_call_arg_ptr (stmt, i);
31519c38 5070 modified |= ipa_modify_expr (t, true, adjustments);
6cbd3b6a
MJ
5071 }
5072
5073 if (gimple_call_lhs (stmt))
5074 {
5075 t = gimple_call_lhs_ptr (stmt);
31519c38 5076 modified |= ipa_modify_expr (t, false, adjustments);
6cbd3b6a
MJ
5077 }
5078 break;
5079
5080 case GIMPLE_ASM:
538dd0b7
DM
5081 {
5082 gasm *asm_stmt = as_a <gasm *> (stmt);
5083 for (i = 0; i < gimple_asm_ninputs (asm_stmt); i++)
5084 {
5085 t = &TREE_VALUE (gimple_asm_input_op (asm_stmt, i));
5086 modified |= ipa_modify_expr (t, true, adjustments);
5087 }
5088 for (i = 0; i < gimple_asm_noutputs (asm_stmt); i++)
5089 {
5090 t = &TREE_VALUE (gimple_asm_output_op (asm_stmt, i));
5091 modified |= ipa_modify_expr (t, false, adjustments);
5092 }
5093 }
6cbd3b6a
MJ
5094 break;
5095
5096 default:
5097 break;
5098 }
5099
6651d6b4
MJ
5100 def_operand_p defp;
5101 ssa_op_iter iter;
5102 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_DEF)
5103 {
5104 tree old_def = DEF_FROM_PTR (defp);
5105 if (tree new_def = replace_removed_params_ssa_names (old_def, stmt,
5106 adjustments))
5107 {
5108 SET_DEF (defp, new_def);
5109 release_ssa_name (old_def);
5110 modified = true;
5111 }
5112 }
5113
6cbd3b6a
MJ
5114 if (modified)
5115 {
6cbd3b6a 5116 update_stmt (stmt);
8cbeddcc
MJ
5117 if (maybe_clean_eh_stmt (stmt)
5118 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
5119 cfg_changed = true;
6cbd3b6a
MJ
5120 }
5121 gsi_next (&gsi);
5122 }
6cbd3b6a 5123 }
8cbeddcc
MJ
5124
5125 return cfg_changed;
07ffa034
MJ
5126}
5127
5128/* Call gimple_debug_bind_reset_value on all debug statements describing
5129 gimple register parameters that are being removed or replaced. */
5130
5131static void
5132sra_ipa_reset_debug_stmts (ipa_parm_adjustment_vec adjustments)
5133{
5134 int i, len;
ddb555ed 5135 gimple_stmt_iterator *gsip = NULL, gsi;
07ffa034 5136
fefa31b5 5137 if (MAY_HAVE_DEBUG_STMTS && single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (cfun)))
ddb555ed 5138 {
fefa31b5 5139 gsi = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
ddb555ed
JJ
5140 gsip = &gsi;
5141 }
9771b263 5142 len = adjustments.length ();
07ffa034
MJ
5143 for (i = 0; i < len; i++)
5144 {
5145 struct ipa_parm_adjustment *adj;
5146 imm_use_iterator ui;
355fe088 5147 gimple *stmt;
538dd0b7 5148 gdebug *def_temp;
ddb555ed
JJ
5149 tree name, vexpr, copy = NULL_TREE;
5150 use_operand_p use_p;
07ffa034 5151
9771b263 5152 adj = &adjustments[i];
31519c38 5153 if (adj->op == IPA_PARM_OP_COPY || !is_gimple_reg (adj->base))
07ffa034 5154 continue;
32244553 5155 name = ssa_default_def (cfun, adj->base);
ddb555ed
JJ
5156 vexpr = NULL;
5157 if (name)
5158 FOR_EACH_IMM_USE_STMT (stmt, ui, name)
5159 {
5d751b0c
JJ
5160 if (gimple_clobber_p (stmt))
5161 {
5162 gimple_stmt_iterator cgsi = gsi_for_stmt (stmt);
5163 unlink_stmt_vdef (stmt);
5164 gsi_remove (&cgsi, true);
5165 release_defs (stmt);
5166 continue;
5167 }
ddb555ed
JJ
5168 /* All other users must have been removed by
5169 ipa_sra_modify_function_body. */
5170 gcc_assert (is_gimple_debug (stmt));
5171 if (vexpr == NULL && gsip != NULL)
5172 {
5173 gcc_assert (TREE_CODE (adj->base) == PARM_DECL);
5174 vexpr = make_node (DEBUG_EXPR_DECL);
5175 def_temp = gimple_build_debug_source_bind (vexpr, adj->base,
5176 NULL);
5177 DECL_ARTIFICIAL (vexpr) = 1;
5178 TREE_TYPE (vexpr) = TREE_TYPE (name);
899ca90e 5179 SET_DECL_MODE (vexpr, DECL_MODE (adj->base));
ddb555ed
JJ
5180 gsi_insert_before (gsip, def_temp, GSI_SAME_STMT);
5181 }
5182 if (vexpr)
5183 {
5184 FOR_EACH_IMM_USE_ON_STMT (use_p, ui)
5185 SET_USE (use_p, vexpr);
5186 }
5187 else
5188 gimple_debug_bind_reset_value (stmt);
5189 update_stmt (stmt);
5190 }
5191 /* Create a VAR_DECL for debug info purposes. */
5192 if (!DECL_IGNORED_P (adj->base))
07ffa034 5193 {
ddb555ed
JJ
5194 copy = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
5195 VAR_DECL, DECL_NAME (adj->base),
5196 TREE_TYPE (adj->base));
5197 if (DECL_PT_UID_SET_P (adj->base))
5198 SET_DECL_PT_UID (copy, DECL_PT_UID (adj->base));
5199 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (adj->base);
5200 TREE_READONLY (copy) = TREE_READONLY (adj->base);
5201 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (adj->base);
5202 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (adj->base);
5203 DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (adj->base);
5204 DECL_IGNORED_P (copy) = DECL_IGNORED_P (adj->base);
5205 DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (adj->base);
5206 DECL_SEEN_IN_BIND_EXPR_P (copy) = 1;
5207 SET_DECL_RTL (copy, 0);
5208 TREE_USED (copy) = 1;
5209 DECL_CONTEXT (copy) = current_function_decl;
ddb555ed
JJ
5210 add_local_decl (cfun, copy);
5211 DECL_CHAIN (copy) =
5212 BLOCK_VARS (DECL_INITIAL (current_function_decl));
5213 BLOCK_VARS (DECL_INITIAL (current_function_decl)) = copy;
5214 }
5215 if (gsip != NULL && copy && target_for_debug_bind (adj->base))
5216 {
5217 gcc_assert (TREE_CODE (adj->base) == PARM_DECL);
5218 if (vexpr)
5219 def_temp = gimple_build_debug_bind (copy, vexpr, NULL);
5220 else
5221 def_temp = gimple_build_debug_source_bind (copy, adj->base,
5222 NULL);
5223 gsi_insert_before (gsip, def_temp, GSI_SAME_STMT);
07ffa034
MJ
5224 }
5225 }
5226}
5227
c18ff8a4
MJ
5228/* Return false if all callers have at least as many actual arguments as there
5229 are formal parameters in the current function and that their types
5230 match. */
2f3cdcf5
MJ
5231
5232static bool
c18ff8a4
MJ
5233some_callers_have_mismatched_arguments_p (struct cgraph_node *node,
5234 void *data ATTRIBUTE_UNUSED)
2f3cdcf5
MJ
5235{
5236 struct cgraph_edge *cs;
5237 for (cs = node->callers; cs; cs = cs->next_caller)
3d36be01 5238 if (!cs->call_stmt || !callsite_arguments_match_p (cs->call_stmt))
a6f834c5 5239 return true;
2f3cdcf5 5240
a6f834c5 5241 return false;
2f3cdcf5
MJ
5242}
5243
fb0653ab
IE
5244/* Return false if all callers have vuse attached to a call statement. */
5245
5246static bool
5247some_callers_have_no_vuse_p (struct cgraph_node *node,
5248 void *data ATTRIBUTE_UNUSED)
5249{
5250 struct cgraph_edge *cs;
5251 for (cs = node->callers; cs; cs = cs->next_caller)
5252 if (!cs->call_stmt || !gimple_vuse (cs->call_stmt))
5253 return true;
5254
5255 return false;
5256}
5257
a6f834c5 5258/* Convert all callers of NODE. */
2f3cdcf5 5259
a6f834c5
JH
5260static bool
5261convert_callers_for_node (struct cgraph_node *node,
5262 void *data)
07ffa034 5263{
9771b263 5264 ipa_parm_adjustment_vec *adjustments = (ipa_parm_adjustment_vec *) data;
6096017e 5265 bitmap recomputed_callers = BITMAP_ALLOC (NULL);
a6f834c5 5266 struct cgraph_edge *cs;
07ffa034
MJ
5267
5268 for (cs = node->callers; cs; cs = cs->next_caller)
5269 {
67348ccc 5270 push_cfun (DECL_STRUCT_FUNCTION (cs->caller->decl));
07ffa034
MJ
5271
5272 if (dump_file)
464d0118
ML
5273 fprintf (dump_file, "Adjusting call %s -> %s\n",
5274 cs->caller->dump_name (), cs->callee->dump_name ());
07ffa034 5275
9771b263 5276 ipa_modify_call_arguments (cs, cs->call_stmt, *adjustments);
07ffa034
MJ
5277
5278 pop_cfun ();
5279 }
6096017e
MJ
5280
5281 for (cs = node->callers; cs; cs = cs->next_caller)
bb7e6d55 5282 if (bitmap_set_bit (recomputed_callers, cs->caller->uid)
67348ccc 5283 && gimple_in_ssa_p (DECL_STRUCT_FUNCTION (cs->caller->decl)))
0bceb671 5284 compute_fn_summary (cs->caller, true);
6096017e
MJ
5285 BITMAP_FREE (recomputed_callers);
5286
a6f834c5
JH
5287 return true;
5288}
5289
5290/* Convert all callers of NODE to pass parameters as given in ADJUSTMENTS. */
5291
5292static void
5293convert_callers (struct cgraph_node *node, tree old_decl,
5294 ipa_parm_adjustment_vec adjustments)
5295{
a6f834c5
JH
5296 basic_block this_block;
5297
670654ef
JH
5298 node->call_for_symbol_and_aliases (convert_callers_for_node,
5299 &adjustments, false);
a6f834c5 5300
2f3cdcf5
MJ
5301 if (!encountered_recursive_call)
5302 return;
5303
11cd3bed 5304 FOR_EACH_BB_FN (this_block, cfun)
07ffa034
MJ
5305 {
5306 gimple_stmt_iterator gsi;
5307
5308 for (gsi = gsi_start_bb (this_block); !gsi_end_p (gsi); gsi_next (&gsi))
5309 {
538dd0b7 5310 gcall *stmt;
566f27e4 5311 tree call_fndecl;
538dd0b7
DM
5312 stmt = dyn_cast <gcall *> (gsi_stmt (gsi));
5313 if (!stmt)
566f27e4
JJ
5314 continue;
5315 call_fndecl = gimple_call_fndecl (stmt);
bb8e5dca 5316 if (call_fndecl == old_decl)
07ffa034
MJ
5317 {
5318 if (dump_file)
5319 fprintf (dump_file, "Adjusting recursive call");
67348ccc 5320 gimple_call_set_fndecl (stmt, node->decl);
07ffa034
MJ
5321 ipa_modify_call_arguments (NULL, stmt, adjustments);
5322 }
5323 }
5324 }
5325
5326 return;
5327}
5328
5329/* Perform all the modification required in IPA-SRA for NODE to have parameters
8cbeddcc 5330 as given in ADJUSTMENTS. Return true iff the CFG has been changed. */
07ffa034 5331
8cbeddcc 5332static bool
07ffa034
MJ
5333modify_function (struct cgraph_node *node, ipa_parm_adjustment_vec adjustments)
5334{
29be3835 5335 struct cgraph_node *new_node;
8cbeddcc 5336 bool cfg_changed;
29be3835 5337
3dafb85c 5338 cgraph_edge::rebuild_edges ();
467a8db0 5339 free_dominance_info (CDI_DOMINATORS);
29be3835 5340 pop_cfun ();
29be3835 5341
878d3618
TJ
5342 /* This must be done after rebuilding cgraph edges for node above.
5343 Otherwise any recursive calls to node that are recorded in
5344 redirect_callers will be corrupted. */
d52f5295
ML
5345 vec<cgraph_edge *> redirect_callers = node->collect_callers ();
5346 new_node = node->create_version_clone_with_body (redirect_callers, NULL,
5347 NULL, false, NULL, NULL,
5348 "isra");
9771b263 5349 redirect_callers.release ();
c7e62a26 5350
67348ccc 5351 push_cfun (DECL_STRUCT_FUNCTION (new_node->decl));
31519c38 5352 ipa_modify_formal_parameters (current_function_decl, adjustments);
8cbeddcc 5353 cfg_changed = ipa_sra_modify_function_body (adjustments);
07ffa034 5354 sra_ipa_reset_debug_stmts (adjustments);
67348ccc 5355 convert_callers (new_node, node->decl, adjustments);
d52f5295 5356 new_node->make_local ();
8cbeddcc 5357 return cfg_changed;
07ffa034
MJ
5358}
5359
ebde4f8e
MJ
5360/* Means of communication between ipa_sra_check_caller and
5361 ipa_sra_preliminary_function_checks. */
5362
5363struct ipa_sra_check_caller_data
5364{
5365 bool has_callers;
5366 bool bad_arg_alignment;
670654ef 5367 bool has_thunk;
ebde4f8e
MJ
5368};
5369
5370/* If NODE has a caller, mark that fact in DATA which is pointer to
5371 ipa_sra_check_caller_data. Also check all aggregate arguments in all known
5372 calls if they are unit aligned and if not, set the appropriate flag in DATA
5373 too. */
9e401b63
JH
5374
5375static bool
ebde4f8e 5376ipa_sra_check_caller (struct cgraph_node *node, void *data)
9e401b63 5377{
ebde4f8e
MJ
5378 if (!node->callers)
5379 return false;
5380
5381 struct ipa_sra_check_caller_data *iscc;
5382 iscc = (struct ipa_sra_check_caller_data *) data;
5383 iscc->has_callers = true;
5384
5385 for (cgraph_edge *cs = node->callers; cs; cs = cs->next_caller)
5386 {
670654ef
JH
5387 if (cs->caller->thunk.thunk_p)
5388 {
5389 iscc->has_thunk = true;
5390 return true;
5391 }
355fe088 5392 gimple *call_stmt = cs->call_stmt;
ebde4f8e
MJ
5393 unsigned count = gimple_call_num_args (call_stmt);
5394 for (unsigned i = 0; i < count; i++)
5395 {
5396 tree arg = gimple_call_arg (call_stmt, i);
5397 if (is_gimple_reg (arg))
5398 continue;
5399
5400 tree offset;
f37fac2b 5401 poly_int64 bitsize, bitpos;
ebde4f8e 5402 machine_mode mode;
ee45a32d 5403 int unsignedp, reversep, volatilep = 0;
ebde4f8e 5404 get_inner_reference (arg, &bitsize, &bitpos, &offset, &mode,
25b75a48 5405 &unsignedp, &reversep, &volatilep);
f37fac2b 5406 if (!multiple_p (bitpos, BITS_PER_UNIT))
ebde4f8e
MJ
5407 {
5408 iscc->bad_arg_alignment = true;
5409 return true;
5410 }
5411 }
5412 }
5413
9e401b63
JH
5414 return false;
5415}
5416
07ffa034
MJ
5417/* Return false the function is apparently unsuitable for IPA-SRA based on it's
5418 attributes, return true otherwise. NODE is the cgraph node of the current
5419 function. */
5420
5421static bool
5422ipa_sra_preliminary_function_checks (struct cgraph_node *node)
5423{
d52f5295 5424 if (!node->can_be_local_p ())
07ffa034
MJ
5425 {
5426 if (dump_file)
5427 fprintf (dump_file, "Function not local to this compilation unit.\n");
5428 return false;
5429 }
5430
61e03ffc
JH
5431 if (!node->local.can_change_signature)
5432 {
5433 if (dump_file)
5434 fprintf (dump_file, "Function can not change signature.\n");
5435 return false;
5436 }
5437
67348ccc 5438 if (!tree_versionable_function_p (node->decl))
29be3835
MJ
5439 {
5440 if (dump_file)
a23c4464 5441 fprintf (dump_file, "Function is not versionable.\n");
29be3835
MJ
5442 return false;
5443 }
5444
d31d42c7
JJ
5445 if (!opt_for_fn (node->decl, optimize)
5446 || !opt_for_fn (node->decl, flag_ipa_sra))
5447 {
5448 if (dump_file)
5449 fprintf (dump_file, "Function not optimized.\n");
5450 return false;
5451 }
5452
07ffa034
MJ
5453 if (DECL_VIRTUAL_P (current_function_decl))
5454 {
5455 if (dump_file)
5456 fprintf (dump_file, "Function is a virtual method.\n");
5457 return false;
5458 }
5459
7a161d5b 5460 if ((DECL_ONE_ONLY (node->decl) || DECL_EXTERNAL (node->decl))
0bceb671 5461 && ipa_fn_summaries->get (node)->size >= MAX_INLINE_INSNS_AUTO)
07ffa034
MJ
5462 {
5463 if (dump_file)
5464 fprintf (dump_file, "Function too big to be made truly local.\n");
5465 return false;
5466 }
5467
07ffa034
MJ
5468 if (cfun->stdarg)
5469 {
5470 if (dump_file)
5471 fprintf (dump_file, "Function uses stdarg. \n");
5472 return false;
5473 }
5474
67348ccc 5475 if (TYPE_ATTRIBUTES (TREE_TYPE (node->decl)))
5c20baf1
MJ
5476 return false;
5477
7b3b340e
MJ
5478 if (DECL_DISREGARD_INLINE_LIMITS (node->decl))
5479 {
5480 if (dump_file)
5481 fprintf (dump_file, "Always inline function will be inlined "
5482 "anyway. \n");
5483 return false;
5484 }
5485
ebde4f8e
MJ
5486 struct ipa_sra_check_caller_data iscc;
5487 memset (&iscc, 0, sizeof(iscc));
670654ef 5488 node->call_for_symbol_and_aliases (ipa_sra_check_caller, &iscc, true);
ebde4f8e
MJ
5489 if (!iscc.has_callers)
5490 {
5491 if (dump_file)
5492 fprintf (dump_file,
5493 "Function has no callers in this compilation unit.\n");
5494 return false;
5495 }
5496
5497 if (iscc.bad_arg_alignment)
5498 {
5499 if (dump_file)
5500 fprintf (dump_file,
3a5c579a 5501 "A function call has an argument with non-unit alignment.\n");
ebde4f8e
MJ
5502 return false;
5503 }
5504
670654ef
JH
5505 if (iscc.has_thunk)
5506 {
5507 if (dump_file)
5508 fprintf (dump_file,
5509 "A has thunk.\n");
5510 return false;
5511 }
5512
07ffa034
MJ
5513 return true;
5514}
5515
5516/* Perform early interprocedural SRA. */
5517
5518static unsigned int
5519ipa_early_sra (void)
5520{
d52f5295 5521 struct cgraph_node *node = cgraph_node::get (current_function_decl);
07ffa034
MJ
5522 ipa_parm_adjustment_vec adjustments;
5523 int ret = 0;
5524
5525 if (!ipa_sra_preliminary_function_checks (node))
5526 return 0;
5527
5528 sra_initialize ();
5529 sra_mode = SRA_MODE_EARLY_IPA;
5530
5531 if (!find_param_candidates ())
5532 {
5533 if (dump_file)
5534 fprintf (dump_file, "Function has no IPA-SRA candidates.\n");
5535 goto simple_out;
5536 }
5537
670654ef 5538 if (node->call_for_symbol_and_aliases
d52f5295 5539 (some_callers_have_mismatched_arguments_p, NULL, true))
2f3cdcf5
MJ
5540 {
5541 if (dump_file)
5542 fprintf (dump_file, "There are callers with insufficient number of "
c18ff8a4 5543 "arguments or arguments with type mismatches.\n");
2f3cdcf5
MJ
5544 goto simple_out;
5545 }
5546
670654ef 5547 if (node->call_for_symbol_and_aliases
fb0653ab
IE
5548 (some_callers_have_no_vuse_p, NULL, true))
5549 {
5550 if (dump_file)
5551 fprintf (dump_file, "There are callers with no VUSE attached "
5552 "to a call stmt.\n");
5553 goto simple_out;
5554 }
5555
07ffa034
MJ
5556 bb_dereferences = XCNEWVEC (HOST_WIDE_INT,
5557 func_param_count
3986e690 5558 * last_basic_block_for_fn (cfun));
07ffa034
MJ
5559 final_bbs = BITMAP_ALLOC (NULL);
5560
6cbd3b6a 5561 scan_function ();
07ffa034
MJ
5562 if (encountered_apply_args)
5563 {
5564 if (dump_file)
5565 fprintf (dump_file, "Function calls __builtin_apply_args().\n");
5566 goto out;
5567 }
5568
2f3cdcf5
MJ
5569 if (encountered_unchangable_recursive_call)
5570 {
5571 if (dump_file)
5572 fprintf (dump_file, "Function calls itself with insufficient "
5573 "number of arguments.\n");
5574 goto out;
5575 }
5576
07ffa034 5577 adjustments = analyze_all_param_acesses ();
9771b263 5578 if (!adjustments.exists ())
07ffa034
MJ
5579 goto out;
5580 if (dump_file)
5581 ipa_dump_param_adjustments (dump_file, adjustments, current_function_decl);
5582
8cbeddcc
MJ
5583 if (modify_function (node, adjustments))
5584 ret = TODO_update_ssa | TODO_cleanup_cfg;
5585 else
5586 ret = TODO_update_ssa;
9771b263 5587 adjustments.release ();
07ffa034
MJ
5588
5589 statistics_counter_event (cfun, "Unused parameters deleted",
5590 sra_stats.deleted_unused_parameters);
5591 statistics_counter_event (cfun, "Scalar parameters converted to by-value",
5592 sra_stats.scalar_by_ref_to_by_val);
5593 statistics_counter_event (cfun, "Aggregate parameters broken up",
5594 sra_stats.aggregate_params_reduced);
5595 statistics_counter_event (cfun, "Aggregate parameter components created",
5596 sra_stats.param_reductions_created);
5597
5598 out:
5599 BITMAP_FREE (final_bbs);
5600 free (bb_dereferences);
5601 simple_out:
5602 sra_deinitialize ();
5603 return ret;
5604}
5605
27a4cd48
DM
5606namespace {
5607
5608const pass_data pass_data_early_ipa_sra =
07ffa034 5609{
27a4cd48
DM
5610 GIMPLE_PASS, /* type */
5611 "eipa_sra", /* name */
5612 OPTGROUP_NONE, /* optinfo_flags */
27a4cd48
DM
5613 TV_IPA_SRA, /* tv_id */
5614 0, /* properties_required */
5615 0, /* properties_provided */
5616 0, /* properties_destroyed */
5617 0, /* todo_flags_start */
5618 TODO_dump_symtab, /* todo_flags_finish */
07ffa034 5619};
27a4cd48
DM
5620
5621class pass_early_ipa_sra : public gimple_opt_pass
5622{
5623public:
c3284718
RS
5624 pass_early_ipa_sra (gcc::context *ctxt)
5625 : gimple_opt_pass (pass_data_early_ipa_sra, ctxt)
27a4cd48
DM
5626 {}
5627
5628 /* opt_pass methods: */
1a3d085c 5629 virtual bool gate (function *) { return flag_ipa_sra && dbg_cnt (eipa_sra); }
be55bfe6 5630 virtual unsigned int execute (function *) { return ipa_early_sra (); }
27a4cd48
DM
5631
5632}; // class pass_early_ipa_sra
5633
5634} // anon namespace
5635
5636gimple_opt_pass *
5637make_pass_early_ipa_sra (gcc::context *ctxt)
5638{
5639 return new pass_early_ipa_sra (ctxt);
5640}