]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/dse.c
* decl.c (compute_array_index_type): Use type_dependent_expression_p.
[thirdparty/gcc.git] / gcc / dse.c
CommitLineData
6fb5fa3c 1/* RTL dead store elimination.
757e8ba2 2 Copyright (C) 2005, 2006, 2007, 2008, 2009, 2010, 2011, 2012
457eeaae 3 Free Software Foundation, Inc.
6fb5fa3c
DB
4
5 Contributed by Richard Sandiford <rsandifor@codesourcery.com>
6 and Kenneth Zadeck <zadeck@naturalbridge.com>
7
8This file is part of GCC.
9
10GCC is free software; you can redistribute it and/or modify it under
11the terms of the GNU General Public License as published by the Free
9dcd6f09 12Software Foundation; either version 3, or (at your option) any later
6fb5fa3c
DB
13version.
14
15GCC is distributed in the hope that it will be useful, but WITHOUT ANY
16WARRANTY; without even the implied warranty of MERCHANTABILITY or
17FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18for more details.
19
20You should have received a copy of the GNU General Public License
9dcd6f09
NC
21along with GCC; see the file COPYING3. If not see
22<http://www.gnu.org/licenses/>. */
6fb5fa3c
DB
23
24#undef BASELINE
25
26#include "config.h"
27#include "system.h"
28#include "coretypes.h"
29#include "hashtab.h"
30#include "tm.h"
31#include "rtl.h"
32#include "tree.h"
18b526e8 33#include "tm_p.h"
6fb5fa3c
DB
34#include "regs.h"
35#include "hard-reg-set.h"
9e582b1d 36#include "regset.h"
6fb5fa3c
DB
37#include "flags.h"
38#include "df.h"
39#include "cselib.h"
6fb5fa3c
DB
40#include "tree-pass.h"
41#include "alloc-pool.h"
42#include "alias.h"
43#include "insn-config.h"
44#include "expr.h"
45#include "recog.h"
8660aaae 46#include "optabs.h"
6fb5fa3c 47#include "dbgcnt.h"
67d0afe9 48#include "target.h"
dabd47e7 49#include "params.h"
6f4185d7 50#include "tree-flow.h" /* for may_be_aliased */
6fb5fa3c
DB
51
52/* This file contains three techniques for performing Dead Store
b8698a0f 53 Elimination (dse).
6fb5fa3c
DB
54
55 * The first technique performs dse locally on any base address. It
56 is based on the cselib which is a local value numbering technique.
57 This technique is local to a basic block but deals with a fairly
58 general addresses.
b8698a0f 59
6fb5fa3c
DB
60 * The second technique performs dse globally but is restricted to
61 base addresses that are either constant or are relative to the
62 frame_pointer.
63
64 * The third technique, (which is only done after register allocation)
65 processes the spill spill slots. This differs from the second
66 technique because it takes advantage of the fact that spilling is
67 completely free from the effects of aliasing.
68
69 Logically, dse is a backwards dataflow problem. A store can be
70 deleted if it if cannot be reached in the backward direction by any
71 use of the value being stored. However, the local technique uses a
72 forwards scan of the basic block because cselib requires that the
73 block be processed in that order.
74
75 The pass is logically broken into 7 steps:
76
77 0) Initialization.
78
79 1) The local algorithm, as well as scanning the insns for the two
80 global algorithms.
81
82 2) Analysis to see if the global algs are necessary. In the case
83 of stores base on a constant address, there must be at least two
84 stores to that address, to make it possible to delete some of the
85 stores. In the case of stores off of the frame or spill related
86 stores, only one store to an address is necessary because those
87 stores die at the end of the function.
88
b8698a0f 89 3) Set up the global dataflow equations based on processing the
6fb5fa3c
DB
90 info parsed in the first step.
91
92 4) Solve the dataflow equations.
93
94 5) Delete the insns that the global analysis has indicated are
95 unnecessary.
96
073a8998 97 6) Delete insns that store the same value as preceding store
8dd5516b
JJ
98 where the earlier store couldn't be eliminated.
99
100 7) Cleanup.
6fb5fa3c
DB
101
102 This step uses cselib and canon_rtx to build the largest expression
103 possible for each address. This pass is a forwards pass through
104 each basic block. From the point of view of the global technique,
105 the first pass could examine a block in either direction. The
0d52bcc1 106 forwards ordering is to accommodate cselib.
6fb5fa3c
DB
107
108 We a simplifying assumption: addresses fall into four broad
109 categories:
110
111 1) base has rtx_varies_p == false, offset is constant.
112 2) base has rtx_varies_p == false, offset variable.
113 3) base has rtx_varies_p == true, offset constant.
114 4) base has rtx_varies_p == true, offset variable.
115
116 The local passes are able to process all 4 kinds of addresses. The
117 global pass only handles (1).
118
119 The global problem is formulated as follows:
120
121 A store, S1, to address A, where A is not relative to the stack
122 frame, can be eliminated if all paths from S1 to the end of the
123 of the function contain another store to A before a read to A.
124
125 If the address A is relative to the stack frame, a store S2 to A
126 can be eliminated if there are no paths from S1 that reach the
127 end of the function that read A before another store to A. In
128 this case S2 can be deleted if there are paths to from S2 to the
129 end of the function that have no reads or writes to A. This
130 second case allows stores to the stack frame to be deleted that
131 would otherwise die when the function returns. This cannot be
132 done if stores_off_frame_dead_at_return is not true. See the doc
133 for that variable for when this variable is false.
134
135 The global problem is formulated as a backwards set union
136 dataflow problem where the stores are the gens and reads are the
137 kills. Set union problems are rare and require some special
138 handling given our representation of bitmaps. A straightforward
139 implementation of requires a lot of bitmaps filled with 1s.
140 These are expensive and cumbersome in our bitmap formulation so
141 care has been taken to avoid large vectors filled with 1s. See
142 the comments in bb_info and in the dataflow confluence functions
b8698a0f 143 for details.
6fb5fa3c
DB
144
145 There are two places for further enhancements to this algorithm:
b8698a0f 146
6fb5fa3c
DB
147 1) The original dse which was embedded in a pass called flow also
148 did local address forwarding. For example in
149
150 A <- r100
151 ... <- A
152
153 flow would replace the right hand side of the second insn with a
6ed3da00 154 reference to r100. Most of the information is available to add this
6fb5fa3c
DB
155 to this pass. It has not done it because it is a lot of work in
156 the case that either r100 is assigned to between the first and
157 second insn and/or the second insn is a load of part of the value
158 stored by the first insn.
159
160 insn 5 in gcc.c-torture/compile/990203-1.c simple case.
161 insn 15 in gcc.c-torture/execute/20001017-2.c simple case.
162 insn 25 in gcc.c-torture/execute/20001026-1.c simple case.
163 insn 44 in gcc.c-torture/execute/20010910-1.c simple case.
164
165 2) The cleaning up of spill code is quite profitable. It currently
166 depends on reading tea leaves and chicken entrails left by reload.
167 This pass depends on reload creating a singleton alias set for each
168 spill slot and telling the next dse pass which of these alias sets
169 are the singletons. Rather than analyze the addresses of the
170 spills, dse's spill processing just does analysis of the loads and
171 stores that use those alias sets. There are three cases where this
172 falls short:
173
174 a) Reload sometimes creates the slot for one mode of access, and
175 then inserts loads and/or stores for a smaller mode. In this
176 case, the current code just punts on the slot. The proper thing
177 to do is to back out and use one bit vector position for each
178 byte of the entity associated with the slot. This depends on
179 KNOWING that reload always generates the accesses for each of the
180 bytes in some canonical (read that easy to understand several
181 passes after reload happens) way.
182
183 b) Reload sometimes decides that spill slot it allocated was not
184 large enough for the mode and goes back and allocates more slots
185 with the same mode and alias set. The backout in this case is a
186 little more graceful than (a). In this case the slot is unmarked
187 as being a spill slot and if final address comes out to be based
b8698a0f 188 off the frame pointer, the global algorithm handles this slot.
6fb5fa3c
DB
189
190 c) For any pass that may prespill, there is currently no
191 mechanism to tell the dse pass that the slot being used has the
192 special properties that reload uses. It may be that all that is
0d52bcc1 193 required is to have those passes make the same calls that reload
6fb5fa3c
DB
194 does, assuming that the alias sets can be manipulated in the same
195 way. */
196
197/* There are limits to the size of constant offsets we model for the
198 global problem. There are certainly test cases, that exceed this
199 limit, however, it is unlikely that there are important programs
200 that really have constant offsets this size. */
201#define MAX_OFFSET (64 * 1024)
202
3f9b14ff
SB
203/* Obstack for the DSE dataflow bitmaps. We don't want to put these
204 on the default obstack because these bitmaps can grow quite large
205 (~2GB for the small (!) test case of PR54146) and we'll hold on to
206 all that memory until the end of the compiler run.
207 As a bonus, delete_tree_live_info can destroy all the bitmaps by just
208 releasing the whole obstack. */
209static bitmap_obstack dse_bitmap_obstack;
210
211/* Obstack for other data. As for above: Kinda nice to be able to
212 throw it all away at the end in one big sweep. */
213static struct obstack dse_obstack;
214
215/* Scratch bitmap for cselib's cselib_expand_value_rtx. */
6fb5fa3c 216static bitmap scratch = NULL;
3f9b14ff 217
6fb5fa3c
DB
218struct insn_info;
219
220/* This structure holds information about a candidate store. */
b8698a0f 221struct store_info
6fb5fa3c
DB
222{
223
224 /* False means this is a clobber. */
225 bool is_set;
226
8dd5516b
JJ
227 /* False if a single HOST_WIDE_INT bitmap is used for positions_needed. */
228 bool is_large;
229
6fb5fa3c
DB
230 /* The id of the mem group of the base address. If rtx_varies_p is
231 true, this is -1. Otherwise, it is the index into the group
232 table. */
233 int group_id;
b8698a0f 234
6fb5fa3c
DB
235 /* This is the cselib value. */
236 cselib_val *cse_base;
237
238 /* This canonized mem. */
239 rtx mem;
240
6216f94e 241 /* Canonized MEM address for use by canon_true_dependence. */
6fb5fa3c
DB
242 rtx mem_addr;
243
244 /* If this is non-zero, it is the alias set of a spill location. */
4862826d 245 alias_set_type alias_set;
6fb5fa3c
DB
246
247 /* The offset of the first and byte before the last byte associated
248 with the operation. */
8dd5516b
JJ
249 HOST_WIDE_INT begin, end;
250
251 union
252 {
253 /* A bitmask as wide as the number of bytes in the word that
254 contains a 1 if the byte may be needed. The store is unused if
255 all of the bits are 0. This is used if IS_LARGE is false. */
256 unsigned HOST_WIDE_INT small_bitmask;
257
258 struct
259 {
260 /* A bitmap with one bit per byte. Cleared bit means the position
261 is needed. Used if IS_LARGE is false. */
dc491a25 262 bitmap bmap;
6fb5fa3c 263
8dd5516b
JJ
264 /* Number of set bits (i.e. unneeded bytes) in BITMAP. If it is
265 equal to END - BEGIN, the whole store is unused. */
266 int count;
267 } large;
268 } positions_needed;
6fb5fa3c
DB
269
270 /* The next store info for this insn. */
271 struct store_info *next;
272
273 /* The right hand side of the store. This is used if there is a
274 subsequent reload of the mems address somewhere later in the
275 basic block. */
8dd5516b
JJ
276 rtx rhs;
277
278 /* If rhs is or holds a constant, this contains that constant,
279 otherwise NULL. */
280 rtx const_rhs;
281
282 /* Set if this store stores the same constant value as REDUNDANT_REASON
283 insn stored. These aren't eliminated early, because doing that
284 might prevent the earlier larger store to be eliminated. */
285 struct insn_info *redundant_reason;
6fb5fa3c
DB
286};
287
4fe663b0
L
288/* Return a bitmask with the first N low bits set. */
289
290static unsigned HOST_WIDE_INT
291lowpart_bitmask (int n)
292{
293 unsigned HOST_WIDE_INT mask = ~(unsigned HOST_WIDE_INT) 0;
294 return mask >> (HOST_BITS_PER_WIDE_INT - n);
295}
296
6fb5fa3c
DB
297typedef struct store_info *store_info_t;
298static alloc_pool cse_store_info_pool;
299static alloc_pool rtx_store_info_pool;
300
301/* This structure holds information about a load. These are only
302 built for rtx bases. */
b8698a0f 303struct read_info
6fb5fa3c
DB
304{
305 /* The id of the mem group of the base address. */
306 int group_id;
307
308 /* If this is non-zero, it is the alias set of a spill location. */
4862826d 309 alias_set_type alias_set;
6fb5fa3c
DB
310
311 /* The offset of the first and byte after the last byte associated
312 with the operation. If begin == end == 0, the read did not have
313 a constant offset. */
314 int begin, end;
315
316 /* The mem being read. */
317 rtx mem;
318
319 /* The next read_info for this insn. */
320 struct read_info *next;
321};
322typedef struct read_info *read_info_t;
323static alloc_pool read_info_pool;
324
325
326/* One of these records is created for each insn. */
327
b8698a0f 328struct insn_info
6fb5fa3c
DB
329{
330 /* Set true if the insn contains a store but the insn itself cannot
331 be deleted. This is set if the insn is a parallel and there is
332 more than one non dead output or if the insn is in some way
333 volatile. */
334 bool cannot_delete;
335
336 /* This field is only used by the global algorithm. It is set true
337 if the insn contains any read of mem except for a (1). This is
338 also set if the insn is a call or has a clobber mem. If the insn
339 contains a wild read, the use_rec will be null. */
340 bool wild_read;
341
d26c7090
ER
342 /* This is true only for CALL instructions which could potentially read
343 any non-frame memory location. This field is used by the global
344 algorithm. */
345 bool non_frame_wild_read;
346
50f0f366
EB
347 /* This field is only used for the processing of const functions.
348 These functions cannot read memory, but they can read the stack
64520bdc
EB
349 because that is where they may get their parms. We need to be
350 this conservative because, like the store motion pass, we don't
351 consider CALL_INSN_FUNCTION_USAGE when processing call insns.
352 Moreover, we need to distinguish two cases:
353 1. Before reload (register elimination), the stores related to
354 outgoing arguments are stack pointer based and thus deemed
355 of non-constant base in this pass. This requires special
356 handling but also means that the frame pointer based stores
357 need not be killed upon encountering a const function call.
358 2. After reload, the stores related to outgoing arguments can be
359 either stack pointer or hard frame pointer based. This means
360 that we have no other choice than also killing all the frame
361 pointer based stores upon encountering a const function call.
362 This field is set after reload for const function calls. Having
363 this set is less severe than a wild read, it just means that all
364 the frame related stores are killed rather than all the stores. */
365 bool frame_read;
366
367 /* This field is only used for the processing of const functions.
368 It is set if the insn may contain a stack pointer based store. */
50f0f366 369 bool stack_pointer_based;
6fb5fa3c
DB
370
371 /* This is true if any of the sets within the store contains a
372 cselib base. Such stores can only be deleted by the local
373 algorithm. */
374 bool contains_cselib_groups;
375
376 /* The insn. */
377 rtx insn;
378
379 /* The list of mem sets or mem clobbers that are contained in this
380 insn. If the insn is deletable, it contains only one mem set.
381 But it could also contain clobbers. Insns that contain more than
382 one mem set are not deletable, but each of those mems are here in
6ed3da00 383 order to provide info to delete other insns. */
6fb5fa3c
DB
384 store_info_t store_rec;
385
386 /* The linked list of mem uses in this insn. Only the reads from
387 rtx bases are listed here. The reads to cselib bases are
388 completely processed during the first scan and so are never
389 created. */
390 read_info_t read_rec;
391
9e582b1d
JR
392 /* The live fixed registers. We assume only fixed registers can
393 cause trouble by being clobbered from an expanded pattern;
394 storing only the live fixed registers (rather than all registers)
395 means less memory needs to be allocated / copied for the individual
396 stores. */
397 regset fixed_regs_live;
398
6fb5fa3c
DB
399 /* The prev insn in the basic block. */
400 struct insn_info * prev_insn;
401
402 /* The linked list of insns that are in consideration for removal in
073a8998 403 the forwards pass through the basic block. This pointer may be
6fb5fa3c 404 trash as it is not cleared when a wild read occurs. The only
fa10beec 405 time it is guaranteed to be correct is when the traversal starts
6fb5fa3c
DB
406 at active_local_stores. */
407 struct insn_info * next_local_store;
408};
409
410typedef struct insn_info *insn_info_t;
411static alloc_pool insn_info_pool;
412
413/* The linked list of stores that are under consideration in this
b8698a0f 414 basic block. */
6fb5fa3c 415static insn_info_t active_local_stores;
dabd47e7 416static int active_local_stores_len;
6fb5fa3c 417
b8698a0f 418struct bb_info
6fb5fa3c
DB
419{
420
421 /* Pointer to the insn info for the last insn in the block. These
422 are linked so this is how all of the insns are reached. During
423 scanning this is the current insn being scanned. */
424 insn_info_t last_insn;
425
426 /* The info for the global dataflow problem. */
427
428
429 /* This is set if the transfer function should and in the wild_read
430 bitmap before applying the kill and gen sets. That vector knocks
431 out most of the bits in the bitmap and thus speeds up the
432 operations. */
433 bool apply_wild_read;
434
02b47899
KZ
435 /* The following 4 bitvectors hold information about which positions
436 of which stores are live or dead. They are indexed by
437 get_bitmap_index. */
438
6fb5fa3c
DB
439 /* The set of store positions that exist in this block before a wild read. */
440 bitmap gen;
b8698a0f 441
6fb5fa3c
DB
442 /* The set of load positions that exist in this block above the
443 same position of a store. */
444 bitmap kill;
445
446 /* The set of stores that reach the top of the block without being
447 killed by a read.
448
449 Do not represent the in if it is all ones. Note that this is
450 what the bitvector should logically be initialized to for a set
451 intersection problem. However, like the kill set, this is too
452 expensive. So initially, the in set will only be created for the
453 exit block and any block that contains a wild read. */
454 bitmap in;
455
456 /* The set of stores that reach the bottom of the block from it's
457 successors.
458
459 Do not represent the in if it is all ones. Note that this is
460 what the bitvector should logically be initialized to for a set
461 intersection problem. However, like the kill and in set, this is
462 too expensive. So what is done is that the confluence operator
463 just initializes the vector from one of the out sets of the
464 successors of the block. */
465 bitmap out;
02b47899
KZ
466
467 /* The following bitvector is indexed by the reg number. It
468 contains the set of regs that are live at the current instruction
469 being processed. While it contains info for all of the
9e582b1d
JR
470 registers, only the hard registers are actually examined. It is used
471 to assure that shift and/or add sequences that are inserted do not
073a8998 472 accidentally clobber live hard regs. */
02b47899 473 bitmap regs_live;
6fb5fa3c
DB
474};
475
476typedef struct bb_info *bb_info_t;
477static alloc_pool bb_info_pool;
478
479/* Table to hold all bb_infos. */
480static bb_info_t *bb_table;
481
482/* There is a group_info for each rtx base that is used to reference
483 memory. There are also not many of the rtx bases because they are
484 very limited in scope. */
485
b8698a0f 486struct group_info
6fb5fa3c
DB
487{
488 /* The actual base of the address. */
489 rtx rtx_base;
490
491 /* The sequential id of the base. This allows us to have a
492 canonical ordering of these that is not based on addresses. */
493 int id;
494
8f5929e1
JJ
495 /* True if there are any positions that are to be processed
496 globally. */
497 bool process_globally;
498
499 /* True if the base of this group is either the frame_pointer or
500 hard_frame_pointer. */
501 bool frame_related;
502
d32f725a
OH
503 /* A mem wrapped around the base pointer for the group in order to do
504 read dependency. It must be given BLKmode in order to encompass all
505 the possible offsets from the base. */
6fb5fa3c 506 rtx base_mem;
b8698a0f 507
6216f94e
JJ
508 /* Canonized version of base_mem's address. */
509 rtx canon_base_addr;
6fb5fa3c
DB
510
511 /* These two sets of two bitmaps are used to keep track of how many
6ed3da00 512 stores are actually referencing that position from this base. We
6fb5fa3c 513 only do this for rtx bases as this will be used to assign
6ed3da00 514 positions in the bitmaps for the global problem. Bit N is set in
6fb5fa3c
DB
515 store1 on the first store for offset N. Bit N is set in store2
516 for the second store to offset N. This is all we need since we
517 only care about offsets that have two or more stores for them.
518
519 The "_n" suffix is for offsets less than 0 and the "_p" suffix is
520 for 0 and greater offsets.
521
522 There is one special case here, for stores into the stack frame,
523 we will or store1 into store2 before deciding which stores look
524 at globally. This is because stores to the stack frame that have
525 no other reads before the end of the function can also be
526 deleted. */
527 bitmap store1_n, store1_p, store2_n, store2_p;
528
d26c7090
ER
529 /* These bitmaps keep track of offsets in this group escape this function.
530 An offset escapes if it corresponds to a named variable whose
531 addressable flag is set. */
532 bitmap escaped_n, escaped_p;
533
6ed3da00 534 /* The positions in this bitmap have the same assignments as the in,
6fb5fa3c 535 out, gen and kill bitmaps. This bitmap is all zeros except for
6ed3da00 536 the positions that are occupied by stores for this group. */
6fb5fa3c
DB
537 bitmap group_kill;
538
6fb5fa3c 539 /* The offset_map is used to map the offsets from this base into
6ed3da00 540 positions in the global bitmaps. It is only created after all of
6fb5fa3c
DB
541 the all of stores have been scanned and we know which ones we
542 care about. */
b8698a0f
L
543 int *offset_map_n, *offset_map_p;
544 int offset_map_size_n, offset_map_size_p;
6fb5fa3c
DB
545};
546typedef struct group_info *group_info_t;
5f754896 547typedef const struct group_info *const_group_info_t;
6fb5fa3c
DB
548static alloc_pool rtx_group_info_pool;
549
550/* Tables of group_info structures, hashed by base value. */
551static htab_t rtx_group_table;
552
553/* Index into the rtx_group_vec. */
554static int rtx_group_next_id;
555
556DEF_VEC_P(group_info_t);
557DEF_VEC_ALLOC_P(group_info_t,heap);
558
559static VEC(group_info_t,heap) *rtx_group_vec;
560
561
562/* This structure holds the set of changes that are being deferred
563 when removing read operation. See replace_read. */
b8698a0f 564struct deferred_change
6fb5fa3c
DB
565{
566
567 /* The mem that is being replaced. */
568 rtx *loc;
569
570 /* The reg it is being replaced with. */
571 rtx reg;
572
573 struct deferred_change *next;
574};
575
576typedef struct deferred_change *deferred_change_t;
577static alloc_pool deferred_change_pool;
578
579static deferred_change_t deferred_change_list = NULL;
580
581/* This are used to hold the alias sets of spill variables. Since
582 these are never aliased and there may be a lot of them, it makes
583 sense to treat them specially. This bitvector is only allocated in
584 calls from dse_record_singleton_alias_set which currently is only
585 made during reload1. So when dse is called before reload this
586 mechanism does nothing. */
587
588static bitmap clear_alias_sets = NULL;
589
590/* The set of clear_alias_sets that have been disqualified because
591 there are loads or stores using a different mode than the alias set
b8698a0f 592 was registered with. */
6fb5fa3c
DB
593static bitmap disqualified_clear_alias_sets = NULL;
594
595/* The group that holds all of the clear_alias_sets. */
596static group_info_t clear_alias_group;
597
598/* The modes of the clear_alias_sets. */
599static htab_t clear_alias_mode_table;
600
601/* Hash table element to look up the mode for an alias set. */
602struct clear_alias_mode_holder
603{
4862826d 604 alias_set_type alias_set;
6fb5fa3c
DB
605 enum machine_mode mode;
606};
607
608static alloc_pool clear_alias_mode_pool;
609
e3b5732b 610/* This is true except if cfun->stdarg -- i.e. we cannot do
9dd9bf80 611 this for vararg functions because they play games with the frame. */
6fb5fa3c
DB
612static bool stores_off_frame_dead_at_return;
613
614/* Counter for stats. */
b8698a0f
L
615static int globally_deleted;
616static int locally_deleted;
617static int spill_deleted;
618
6fb5fa3c
DB
619static bitmap all_blocks;
620
d26c7090
ER
621/* Locations that are killed by calls in the global phase. */
622static bitmap kill_on_calls;
623
6fb5fa3c
DB
624/* The number of bits used in the global bitmaps. */
625static unsigned int current_position;
626
627
7d817ebc
DE
628static bool gate_dse1 (void);
629static bool gate_dse2 (void);
6fb5fa3c
DB
630
631\f
632/*----------------------------------------------------------------------------
633 Zeroth step.
634
b8698a0f 635 Initialization.
6fb5fa3c
DB
636----------------------------------------------------------------------------*/
637
6fb5fa3c
DB
638
639/* Find the entry associated with ALIAS_SET. */
640
641static struct clear_alias_mode_holder *
4862826d 642clear_alias_set_lookup (alias_set_type alias_set)
6fb5fa3c
DB
643{
644 struct clear_alias_mode_holder tmp_holder;
645 void **slot;
b8698a0f 646
6fb5fa3c
DB
647 tmp_holder.alias_set = alias_set;
648 slot = htab_find_slot (clear_alias_mode_table, &tmp_holder, NO_INSERT);
649 gcc_assert (*slot);
b8698a0f 650
f883e0a7 651 return (struct clear_alias_mode_holder *) *slot;
6fb5fa3c
DB
652}
653
654
655/* Hashtable callbacks for maintaining the "bases" field of
656 store_group_info, given that the addresses are function invariants. */
657
658static int
659invariant_group_base_eq (const void *p1, const void *p2)
660{
5f754896
KG
661 const_group_info_t gi1 = (const_group_info_t) p1;
662 const_group_info_t gi2 = (const_group_info_t) p2;
6fb5fa3c
DB
663 return rtx_equal_p (gi1->rtx_base, gi2->rtx_base);
664}
665
666
667static hashval_t
668invariant_group_base_hash (const void *p)
669{
5f754896 670 const_group_info_t gi = (const_group_info_t) p;
6fb5fa3c
DB
671 int do_not_record;
672 return hash_rtx (gi->rtx_base, Pmode, &do_not_record, NULL, false);
673}
674
675
676/* Get the GROUP for BASE. Add a new group if it is not there. */
677
678static group_info_t
679get_group_info (rtx base)
680{
b8698a0f
L
681 struct group_info tmp_gi;
682 group_info_t gi;
6fb5fa3c
DB
683 void **slot;
684
685 if (base)
686 {
687 /* Find the store_base_info structure for BASE, creating a new one
688 if necessary. */
689 tmp_gi.rtx_base = base;
690 slot = htab_find_slot (rtx_group_table, &tmp_gi, INSERT);
691 gi = (group_info_t) *slot;
692 }
693 else
694 {
695 if (!clear_alias_group)
696 {
f883e0a7
KG
697 clear_alias_group = gi =
698 (group_info_t) pool_alloc (rtx_group_info_pool);
6fb5fa3c
DB
699 memset (gi, 0, sizeof (struct group_info));
700 gi->id = rtx_group_next_id++;
3f9b14ff
SB
701 gi->store1_n = BITMAP_ALLOC (&dse_bitmap_obstack);
702 gi->store1_p = BITMAP_ALLOC (&dse_bitmap_obstack);
703 gi->store2_n = BITMAP_ALLOC (&dse_bitmap_obstack);
704 gi->store2_p = BITMAP_ALLOC (&dse_bitmap_obstack);
705 gi->escaped_p = BITMAP_ALLOC (&dse_bitmap_obstack);
706 gi->escaped_n = BITMAP_ALLOC (&dse_bitmap_obstack);
707 gi->group_kill = BITMAP_ALLOC (&dse_bitmap_obstack);
6fb5fa3c
DB
708 gi->process_globally = false;
709 gi->offset_map_size_n = 0;
710 gi->offset_map_size_p = 0;
711 gi->offset_map_n = NULL;
712 gi->offset_map_p = NULL;
713 VEC_safe_push (group_info_t, heap, rtx_group_vec, gi);
714 }
715 return clear_alias_group;
716 }
717
718 if (gi == NULL)
719 {
f883e0a7 720 *slot = gi = (group_info_t) pool_alloc (rtx_group_info_pool);
6fb5fa3c
DB
721 gi->rtx_base = base;
722 gi->id = rtx_group_next_id++;
d32f725a 723 gi->base_mem = gen_rtx_MEM (BLKmode, base);
6216f94e 724 gi->canon_base_addr = canon_rtx (base);
3f9b14ff
SB
725 gi->store1_n = BITMAP_ALLOC (&dse_bitmap_obstack);
726 gi->store1_p = BITMAP_ALLOC (&dse_bitmap_obstack);
727 gi->store2_n = BITMAP_ALLOC (&dse_bitmap_obstack);
728 gi->store2_p = BITMAP_ALLOC (&dse_bitmap_obstack);
729 gi->escaped_p = BITMAP_ALLOC (&dse_bitmap_obstack);
730 gi->escaped_n = BITMAP_ALLOC (&dse_bitmap_obstack);
731 gi->group_kill = BITMAP_ALLOC (&dse_bitmap_obstack);
6fb5fa3c 732 gi->process_globally = false;
b8698a0f 733 gi->frame_related =
6fb5fa3c
DB
734 (base == frame_pointer_rtx) || (base == hard_frame_pointer_rtx);
735 gi->offset_map_size_n = 0;
736 gi->offset_map_size_p = 0;
737 gi->offset_map_n = NULL;
738 gi->offset_map_p = NULL;
739 VEC_safe_push (group_info_t, heap, rtx_group_vec, gi);
740 }
741
742 return gi;
743}
744
745
746/* Initialization of data structures. */
747
748static void
749dse_step0 (void)
750{
751 locally_deleted = 0;
752 globally_deleted = 0;
753 spill_deleted = 0;
754
3f9b14ff
SB
755 bitmap_obstack_initialize (&dse_bitmap_obstack);
756 gcc_obstack_init (&dse_obstack);
757
758 scratch = BITMAP_ALLOC (&reg_obstack);
759 kill_on_calls = BITMAP_ALLOC (&dse_bitmap_obstack);
6fb5fa3c
DB
760
761 rtx_store_info_pool
b8698a0f 762 = create_alloc_pool ("rtx_store_info_pool",
6fb5fa3c
DB
763 sizeof (struct store_info), 100);
764 read_info_pool
b8698a0f 765 = create_alloc_pool ("read_info_pool",
6fb5fa3c
DB
766 sizeof (struct read_info), 100);
767 insn_info_pool
b8698a0f 768 = create_alloc_pool ("insn_info_pool",
6fb5fa3c
DB
769 sizeof (struct insn_info), 100);
770 bb_info_pool
b8698a0f 771 = create_alloc_pool ("bb_info_pool",
6fb5fa3c
DB
772 sizeof (struct bb_info), 100);
773 rtx_group_info_pool
b8698a0f 774 = create_alloc_pool ("rtx_group_info_pool",
6fb5fa3c
DB
775 sizeof (struct group_info), 100);
776 deferred_change_pool
b8698a0f 777 = create_alloc_pool ("deferred_change_pool",
6fb5fa3c
DB
778 sizeof (struct deferred_change), 10);
779
780 rtx_group_table = htab_create (11, invariant_group_base_hash,
781 invariant_group_base_eq, NULL);
782
3f9b14ff 783 bb_table = XNEWVEC (bb_info_t, last_basic_block);
6fb5fa3c
DB
784 rtx_group_next_id = 0;
785
e3b5732b 786 stores_off_frame_dead_at_return = !cfun->stdarg;
6fb5fa3c
DB
787
788 init_alias_analysis ();
b8698a0f 789
6fb5fa3c
DB
790 if (clear_alias_sets)
791 clear_alias_group = get_group_info (NULL);
792 else
793 clear_alias_group = NULL;
794}
795
796
797\f
798/*----------------------------------------------------------------------------
799 First step.
800
801 Scan all of the insns. Any random ordering of the blocks is fine.
0d52bcc1 802 Each block is scanned in forward order to accommodate cselib which
6fb5fa3c
DB
803 is used to remove stores with non-constant bases.
804----------------------------------------------------------------------------*/
805
806/* Delete all of the store_info recs from INSN_INFO. */
807
b8698a0f 808static void
6fb5fa3c
DB
809free_store_info (insn_info_t insn_info)
810{
811 store_info_t store_info = insn_info->store_rec;
812 while (store_info)
813 {
814 store_info_t next = store_info->next;
8dd5516b 815 if (store_info->is_large)
dc491a25 816 BITMAP_FREE (store_info->positions_needed.large.bmap);
6fb5fa3c
DB
817 if (store_info->cse_base)
818 pool_free (cse_store_info_pool, store_info);
819 else
820 pool_free (rtx_store_info_pool, store_info);
821 store_info = next;
822 }
823
824 insn_info->cannot_delete = true;
825 insn_info->contains_cselib_groups = false;
826 insn_info->store_rec = NULL;
827}
828
9e582b1d
JR
829typedef struct
830{
831 rtx first, current;
832 regset fixed_regs_live;
833 bool failure;
834} note_add_store_info;
835
836/* Callback for emit_inc_dec_insn_before via note_stores.
837 Check if a register is clobbered which is live afterwards. */
838
839static void
840note_add_store (rtx loc, const_rtx expr ATTRIBUTE_UNUSED, void *data)
841{
842 rtx insn;
843 note_add_store_info *info = (note_add_store_info *) data;
844 int r, n;
845
846 if (!REG_P (loc))
847 return;
848
849 /* If this register is referenced by the current or an earlier insn,
850 that's OK. E.g. this applies to the register that is being incremented
851 with this addition. */
852 for (insn = info->first;
853 insn != NEXT_INSN (info->current);
854 insn = NEXT_INSN (insn))
855 if (reg_referenced_p (loc, PATTERN (insn)))
856 return;
857
858 /* If we come here, we have a clobber of a register that's only OK
859 if that register is not live. If we don't have liveness information
860 available, fail now. */
861 if (!info->fixed_regs_live)
862 {
863 info->failure = true;
864 return;
865 }
866 /* Now check if this is a live fixed register. */
867 r = REGNO (loc);
868 n = hard_regno_nregs[r][GET_MODE (loc)];
869 while (--n >= 0)
870 if (REGNO_REG_SET_P (info->fixed_regs_live, r+n))
871 info->failure = true;
872}
873
4deef538
AO
874/* Callback for for_each_inc_dec that emits an INSN that sets DEST to
875 SRC + SRCOFF before insn ARG. */
6fb5fa3c
DB
876
877static int
4deef538
AO
878emit_inc_dec_insn_before (rtx mem ATTRIBUTE_UNUSED,
879 rtx op ATTRIBUTE_UNUSED,
880 rtx dest, rtx src, rtx srcoff, void *arg)
6fb5fa3c 881{
9e582b1d
JR
882 insn_info_t insn_info = (insn_info_t) arg;
883 rtx insn = insn_info->insn, new_insn, cur;
884 note_add_store_info info;
b8698a0f 885
4deef538
AO
886 /* We can reuse all operands without copying, because we are about
887 to delete the insn that contained it. */
9e582b1d 888 if (srcoff)
ed079c4b
JJ
889 {
890 start_sequence ();
891 emit_insn (gen_add3_insn (dest, src, srcoff));
892 new_insn = get_insns ();
893 end_sequence ();
894 }
9e582b1d
JR
895 else
896 new_insn = gen_move_insn (dest, src);
897 info.first = new_insn;
898 info.fixed_regs_live = insn_info->fixed_regs_live;
899 info.failure = false;
900 for (cur = new_insn; cur; cur = NEXT_INSN (cur))
901 {
902 info.current = cur;
903 note_stores (PATTERN (cur), note_add_store, &info);
904 }
6fb5fa3c 905
9e582b1d
JR
906 /* If a failure was flagged above, return 1 so that for_each_inc_dec will
907 return it immediately, communicating the failure to its caller. */
908 if (info.failure)
909 return 1;
910
911 emit_insn_before (new_insn, insn);
6fb5fa3c 912
4deef538 913 return -1;
6fb5fa3c
DB
914}
915
9e582b1d
JR
916/* Before we delete INSN_INFO->INSN, make sure that the auto inc/dec, if it
917 is there, is split into a separate insn.
918 Return true on success (or if there was nothing to do), false on failure. */
6fb5fa3c 919
9e582b1d
JR
920static bool
921check_for_inc_dec_1 (insn_info_t insn_info)
6fb5fa3c 922{
9e582b1d 923 rtx insn = insn_info->insn;
6fb5fa3c
DB
924 rtx note = find_reg_note (insn, REG_INC, NULL_RTX);
925 if (note)
9e582b1d
JR
926 return for_each_inc_dec (&insn, emit_inc_dec_insn_before, insn_info) == 0;
927 return true;
6fb5fa3c
DB
928}
929
930
9e582b1d
JR
931/* Entry point for postreload. If you work on reload_cse, or you need this
932 anywhere else, consider if you can provide register liveness information
933 and add a parameter to this function so that it can be passed down in
934 insn_info.fixed_regs_live. */
935bool
936check_for_inc_dec (rtx insn)
937{
938 struct insn_info insn_info;
939 rtx note;
940
941 insn_info.insn = insn;
942 insn_info.fixed_regs_live = NULL;
943 note = find_reg_note (insn, REG_INC, NULL_RTX);
944 if (note)
945 return for_each_inc_dec (&insn, emit_inc_dec_insn_before, &insn_info) == 0;
946 return true;
947}
948
b8698a0f 949/* Delete the insn and free all of the fields inside INSN_INFO. */
6fb5fa3c
DB
950
951static void
952delete_dead_store_insn (insn_info_t insn_info)
953{
954 read_info_t read_info;
955
956 if (!dbg_cnt (dse))
957 return;
958
9e582b1d
JR
959 if (!check_for_inc_dec_1 (insn_info))
960 return;
6fb5fa3c
DB
961 if (dump_file)
962 {
b8698a0f 963 fprintf (dump_file, "Locally deleting insn %d ",
6fb5fa3c
DB
964 INSN_UID (insn_info->insn));
965 if (insn_info->store_rec->alias_set)
b8698a0f 966 fprintf (dump_file, "alias set %d\n",
4862826d 967 (int) insn_info->store_rec->alias_set);
6fb5fa3c
DB
968 else
969 fprintf (dump_file, "\n");
970 }
971
972 free_store_info (insn_info);
973 read_info = insn_info->read_rec;
b8698a0f 974
6fb5fa3c
DB
975 while (read_info)
976 {
977 read_info_t next = read_info->next;
978 pool_free (read_info_pool, read_info);
979 read_info = next;
980 }
981 insn_info->read_rec = NULL;
982
983 delete_insn (insn_info->insn);
984 locally_deleted++;
985 insn_info->insn = NULL;
986
987 insn_info->wild_read = false;
988}
989
d26c7090
ER
990/* Check if EXPR can possibly escape the current function scope. */
991static bool
992can_escape (tree expr)
993{
994 tree base;
995 if (!expr)
996 return true;
997 base = get_base_address (expr);
998 if (DECL_P (base)
999 && !may_be_aliased (base))
1000 return false;
1001 return true;
1002}
6fb5fa3c
DB
1003
1004/* Set the store* bitmaps offset_map_size* fields in GROUP based on
1005 OFFSET and WIDTH. */
1006
1007static void
d26c7090
ER
1008set_usage_bits (group_info_t group, HOST_WIDE_INT offset, HOST_WIDE_INT width,
1009 tree expr)
6fb5fa3c
DB
1010{
1011 HOST_WIDE_INT i;
d26c7090 1012 bool expr_escapes = can_escape (expr);
8dd5516b 1013 if (offset > -MAX_OFFSET && offset + width < MAX_OFFSET)
6fb5fa3c
DB
1014 for (i=offset; i<offset+width; i++)
1015 {
1016 bitmap store1;
1017 bitmap store2;
d26c7090 1018 bitmap escaped;
6fb5fa3c
DB
1019 int ai;
1020 if (i < 0)
1021 {
1022 store1 = group->store1_n;
1023 store2 = group->store2_n;
d26c7090 1024 escaped = group->escaped_n;
6fb5fa3c
DB
1025 ai = -i;
1026 }
1027 else
1028 {
1029 store1 = group->store1_p;
1030 store2 = group->store2_p;
d26c7090 1031 escaped = group->escaped_p;
6fb5fa3c
DB
1032 ai = i;
1033 }
b8698a0f 1034
fcaa4ca4 1035 if (!bitmap_set_bit (store1, ai))
6fb5fa3c 1036 bitmap_set_bit (store2, ai);
b8698a0f 1037 else
6fb5fa3c 1038 {
6fb5fa3c
DB
1039 if (i < 0)
1040 {
1041 if (group->offset_map_size_n < ai)
1042 group->offset_map_size_n = ai;
1043 }
1044 else
1045 {
1046 if (group->offset_map_size_p < ai)
1047 group->offset_map_size_p = ai;
1048 }
1049 }
d26c7090
ER
1050 if (expr_escapes)
1051 bitmap_set_bit (escaped, ai);
6fb5fa3c
DB
1052 }
1053}
1054
d26c7090
ER
1055static void
1056reset_active_stores (void)
1057{
1058 active_local_stores = NULL;
1059 active_local_stores_len = 0;
1060}
6fb5fa3c 1061
d26c7090 1062/* Free all READ_REC of the LAST_INSN of BB_INFO. */
6fb5fa3c
DB
1063
1064static void
d26c7090 1065free_read_records (bb_info_t bb_info)
6fb5fa3c
DB
1066{
1067 insn_info_t insn_info = bb_info->last_insn;
1068 read_info_t *ptr = &insn_info->read_rec;
6fb5fa3c
DB
1069 while (*ptr)
1070 {
1071 read_info_t next = (*ptr)->next;
4862826d 1072 if ((*ptr)->alias_set == 0)
6fb5fa3c
DB
1073 {
1074 pool_free (read_info_pool, *ptr);
1075 *ptr = next;
d26c7090 1076 }
b8698a0f 1077 else
d26c7090 1078 ptr = &(*ptr)->next;
6fb5fa3c 1079 }
d26c7090
ER
1080}
1081
1082/* Set the BB_INFO so that the last insn is marked as a wild read. */
1083
1084static void
1085add_wild_read (bb_info_t bb_info)
1086{
1087 insn_info_t insn_info = bb_info->last_insn;
6fb5fa3c 1088 insn_info->wild_read = true;
d26c7090
ER
1089 free_read_records (bb_info);
1090 reset_active_stores ();
6fb5fa3c
DB
1091}
1092
d26c7090
ER
1093/* Set the BB_INFO so that the last insn is marked as a wild read of
1094 non-frame locations. */
1095
1096static void
1097add_non_frame_wild_read (bb_info_t bb_info)
1098{
1099 insn_info_t insn_info = bb_info->last_insn;
1100 insn_info->non_frame_wild_read = true;
1101 free_read_records (bb_info);
1102 reset_active_stores ();
1103}
6fb5fa3c 1104
50f0f366
EB
1105/* Return true if X is a constant or one of the registers that behave
1106 as a constant over the life of a function. This is equivalent to
1107 !rtx_varies_p for memory addresses. */
6fb5fa3c
DB
1108
1109static bool
1110const_or_frame_p (rtx x)
1111{
1112 switch (GET_CODE (x))
1113 {
6fb5fa3c
DB
1114 case CONST:
1115 case CONST_INT:
1116 case CONST_DOUBLE:
1117 case CONST_VECTOR:
1118 case SYMBOL_REF:
1119 case LABEL_REF:
1120 return true;
1121
1122 case REG:
1123 /* Note that we have to test for the actual rtx used for the frame
1124 and arg pointers and not just the register number in case we have
1125 eliminated the frame and/or arg pointer and are using it
1126 for pseudos. */
1127 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
1128 /* The arg pointer varies if it is not a fixed register. */
1129 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM])
1130 || x == pic_offset_table_rtx)
1131 return true;
1132 return false;
1133
1134 default:
1135 return false;
1136 }
1137}
1138
b8698a0f
L
1139/* Take all reasonable action to put the address of MEM into the form
1140 that we can do analysis on.
6fb5fa3c
DB
1141
1142 The gold standard is to get the address into the form: address +
1143 OFFSET where address is something that rtx_varies_p considers a
1144 constant. When we can get the address in this form, we can do
1145 global analysis on it. Note that for constant bases, address is
1146 not actually returned, only the group_id. The address can be
1147 obtained from that.
1148
1149 If that fails, we try cselib to get a value we can at least use
b8698a0f
L
1150 locally. If that fails we return false.
1151
6fb5fa3c
DB
1152 The GROUP_ID is set to -1 for cselib bases and the index of the
1153 group for non_varying bases.
1154
1155 FOR_READ is true if this is a mem read and false if not. */
1156
1157static bool
1158canon_address (rtx mem,
4862826d 1159 alias_set_type *alias_set_out,
6fb5fa3c 1160 int *group_id,
b8698a0f 1161 HOST_WIDE_INT *offset,
6fb5fa3c
DB
1162 cselib_val **base)
1163{
372d6395 1164 enum machine_mode address_mode = get_address_mode (mem);
6fb5fa3c
DB
1165 rtx mem_address = XEXP (mem, 0);
1166 rtx expanded_address, address;
403c7520
JJ
1167 int expanded;
1168
6fb5fa3c
DB
1169 /* Make sure that cselib is has initialized all of the operands of
1170 the address before asking it to do the subst. */
1171
1172 if (clear_alias_sets)
1173 {
1174 /* If this is a spill, do not do any further processing. */
4862826d 1175 alias_set_type alias_set = MEM_ALIAS_SET (mem);
6fb5fa3c 1176 if (dump_file)
4862826d 1177 fprintf (dump_file, "found alias set %d\n", (int) alias_set);
6fb5fa3c
DB
1178 if (bitmap_bit_p (clear_alias_sets, alias_set))
1179 {
b8698a0f 1180 struct clear_alias_mode_holder *entry
6fb5fa3c
DB
1181 = clear_alias_set_lookup (alias_set);
1182
1183 /* If the modes do not match, we cannot process this set. */
1184 if (entry->mode != GET_MODE (mem))
1185 {
1186 if (dump_file)
b8698a0f
L
1187 fprintf (dump_file,
1188 "disqualifying alias set %d, (%s) != (%s)\n",
1189 (int) alias_set, GET_MODE_NAME (entry->mode),
6fb5fa3c 1190 GET_MODE_NAME (GET_MODE (mem)));
b8698a0f 1191
6fb5fa3c
DB
1192 bitmap_set_bit (disqualified_clear_alias_sets, alias_set);
1193 return false;
1194 }
1195
1196 *alias_set_out = alias_set;
1197 *group_id = clear_alias_group->id;
1198 return true;
1199 }
1200 }
1201
1202 *alias_set_out = 0;
1203
4deef538 1204 cselib_lookup (mem_address, address_mode, 1, GET_MODE (mem));
6fb5fa3c
DB
1205
1206 if (dump_file)
1207 {
1208 fprintf (dump_file, " mem: ");
1209 print_inline_rtx (dump_file, mem_address, 0);
1210 fprintf (dump_file, "\n");
1211 }
1212
403c7520
JJ
1213 /* First see if just canon_rtx (mem_address) is const or frame,
1214 if not, try cselib_expand_value_rtx and call canon_rtx on that. */
1215 address = NULL_RTX;
1216 for (expanded = 0; expanded < 2; expanded++)
1217 {
1218 if (expanded)
1219 {
1220 /* Use cselib to replace all of the reg references with the full
b8698a0f 1221 expression. This will take care of the case where we have
6fb5fa3c 1222
403c7520
JJ
1223 r_x = base + offset;
1224 val = *r_x;
b8698a0f
L
1225
1226 by making it into
6fb5fa3c 1227
403c7520 1228 val = *(base + offset); */
6fb5fa3c 1229
403c7520
JJ
1230 expanded_address = cselib_expand_value_rtx (mem_address,
1231 scratch, 5);
6fb5fa3c 1232
403c7520
JJ
1233 /* If this fails, just go with the address from first
1234 iteration. */
1235 if (!expanded_address)
1236 break;
1237 }
1238 else
1239 expanded_address = mem_address;
6fb5fa3c 1240
403c7520
JJ
1241 /* Split the address into canonical BASE + OFFSET terms. */
1242 address = canon_rtx (expanded_address);
6fb5fa3c 1243
403c7520 1244 *offset = 0;
6fb5fa3c 1245
403c7520
JJ
1246 if (dump_file)
1247 {
1248 if (expanded)
1249 {
1250 fprintf (dump_file, "\n after cselib_expand address: ");
1251 print_inline_rtx (dump_file, expanded_address, 0);
1252 fprintf (dump_file, "\n");
1253 }
6fb5fa3c 1254
403c7520
JJ
1255 fprintf (dump_file, "\n after canon_rtx address: ");
1256 print_inline_rtx (dump_file, address, 0);
1257 fprintf (dump_file, "\n");
1258 }
6fb5fa3c 1259
403c7520
JJ
1260 if (GET_CODE (address) == CONST)
1261 address = XEXP (address, 0);
6fb5fa3c 1262
403c7520
JJ
1263 if (GET_CODE (address) == PLUS
1264 && CONST_INT_P (XEXP (address, 1)))
1265 {
1266 *offset = INTVAL (XEXP (address, 1));
1267 address = XEXP (address, 0);
1268 }
6fb5fa3c 1269
09e881c9
BE
1270 if (ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (mem))
1271 && const_or_frame_p (address))
6fb5fa3c 1272 {
403c7520
JJ
1273 group_info_t group = get_group_info (address);
1274
6fb5fa3c 1275 if (dump_file)
403c7520
JJ
1276 fprintf (dump_file, " gid=%d offset=%d \n",
1277 group->id, (int)*offset);
1278 *base = NULL;
1279 *group_id = group->id;
1280 return true;
6fb5fa3c 1281 }
403c7520
JJ
1282 }
1283
4deef538 1284 *base = cselib_lookup (address, address_mode, true, GET_MODE (mem));
403c7520
JJ
1285 *group_id = -1;
1286
1287 if (*base == NULL)
1288 {
6fb5fa3c 1289 if (dump_file)
403c7520
JJ
1290 fprintf (dump_file, " no cselib val - should be a wild read.\n");
1291 return false;
6fb5fa3c 1292 }
403c7520 1293 if (dump_file)
5440c0e7
AO
1294 fprintf (dump_file, " varying cselib base=%u:%u offset = %d\n",
1295 (*base)->uid, (*base)->hash, (int)*offset);
6fb5fa3c
DB
1296 return true;
1297}
1298
1299
1300/* Clear the rhs field from the active_local_stores array. */
1301
1302static void
1303clear_rhs_from_active_local_stores (void)
1304{
1305 insn_info_t ptr = active_local_stores;
1306
1307 while (ptr)
1308 {
1309 store_info_t store_info = ptr->store_rec;
1310 /* Skip the clobbers. */
1311 while (!store_info->is_set)
1312 store_info = store_info->next;
1313
1314 store_info->rhs = NULL;
8dd5516b 1315 store_info->const_rhs = NULL;
6fb5fa3c
DB
1316
1317 ptr = ptr->next_local_store;
1318 }
1319}
1320
1321
8dd5516b
JJ
1322/* Mark byte POS bytes from the beginning of store S_INFO as unneeded. */
1323
1324static inline void
1325set_position_unneeded (store_info_t s_info, int pos)
1326{
1327 if (__builtin_expect (s_info->is_large, false))
1328 {
fcaa4ca4
NF
1329 if (bitmap_set_bit (s_info->positions_needed.large.bmap, pos))
1330 s_info->positions_needed.large.count++;
8dd5516b
JJ
1331 }
1332 else
1333 s_info->positions_needed.small_bitmask
1334 &= ~(((unsigned HOST_WIDE_INT) 1) << pos);
1335}
1336
1337/* Mark the whole store S_INFO as unneeded. */
1338
1339static inline void
1340set_all_positions_unneeded (store_info_t s_info)
1341{
1342 if (__builtin_expect (s_info->is_large, false))
1343 {
1344 int pos, end = s_info->end - s_info->begin;
1345 for (pos = 0; pos < end; pos++)
dc491a25 1346 bitmap_set_bit (s_info->positions_needed.large.bmap, pos);
8dd5516b
JJ
1347 s_info->positions_needed.large.count = end;
1348 }
1349 else
1350 s_info->positions_needed.small_bitmask = (unsigned HOST_WIDE_INT) 0;
1351}
1352
1353/* Return TRUE if any bytes from S_INFO store are needed. */
1354
1355static inline bool
1356any_positions_needed_p (store_info_t s_info)
1357{
1358 if (__builtin_expect (s_info->is_large, false))
1359 return (s_info->positions_needed.large.count
1360 < s_info->end - s_info->begin);
1361 else
1362 return (s_info->positions_needed.small_bitmask
1363 != (unsigned HOST_WIDE_INT) 0);
1364}
1365
1366/* Return TRUE if all bytes START through START+WIDTH-1 from S_INFO
1367 store are needed. */
1368
1369static inline bool
1370all_positions_needed_p (store_info_t s_info, int start, int width)
1371{
1372 if (__builtin_expect (s_info->is_large, false))
1373 {
1374 int end = start + width;
1375 while (start < end)
dc491a25 1376 if (bitmap_bit_p (s_info->positions_needed.large.bmap, start++))
8dd5516b
JJ
1377 return false;
1378 return true;
1379 }
1380 else
1381 {
1382 unsigned HOST_WIDE_INT mask = lowpart_bitmask (width) << start;
1383 return (s_info->positions_needed.small_bitmask & mask) == mask;
1384 }
1385}
1386
1387
1388static rtx get_stored_val (store_info_t, enum machine_mode, HOST_WIDE_INT,
1389 HOST_WIDE_INT, basic_block, bool);
1390
1391
6fb5fa3c
DB
1392/* BODY is an instruction pattern that belongs to INSN. Return 1 if
1393 there is a candidate store, after adding it to the appropriate
1394 local store group if so. */
1395
1396static int
1397record_store (rtx body, bb_info_t bb_info)
1398{
6216f94e 1399 rtx mem, rhs, const_rhs, mem_addr;
6fb5fa3c
DB
1400 HOST_WIDE_INT offset = 0;
1401 HOST_WIDE_INT width = 0;
4862826d 1402 alias_set_type spill_alias_set;
6fb5fa3c
DB
1403 insn_info_t insn_info = bb_info->last_insn;
1404 store_info_t store_info = NULL;
1405 int group_id;
1406 cselib_val *base = NULL;
8dd5516b 1407 insn_info_t ptr, last, redundant_reason;
6fb5fa3c
DB
1408 bool store_is_unused;
1409
1410 if (GET_CODE (body) != SET && GET_CODE (body) != CLOBBER)
1411 return 0;
1412
8dd5516b
JJ
1413 mem = SET_DEST (body);
1414
6fb5fa3c
DB
1415 /* If this is not used, then this cannot be used to keep the insn
1416 from being deleted. On the other hand, it does provide something
1417 that can be used to prove that another store is dead. */
1418 store_is_unused
8dd5516b 1419 = (find_reg_note (insn_info->insn, REG_UNUSED, mem) != NULL);
6fb5fa3c
DB
1420
1421 /* Check whether that value is a suitable memory location. */
6fb5fa3c
DB
1422 if (!MEM_P (mem))
1423 {
1424 /* If the set or clobber is unused, then it does not effect our
1425 ability to get rid of the entire insn. */
1426 if (!store_is_unused)
1427 insn_info->cannot_delete = true;
1428 return 0;
1429 }
1430
1431 /* At this point we know mem is a mem. */
1432 if (GET_MODE (mem) == BLKmode)
1433 {
1434 if (GET_CODE (XEXP (mem, 0)) == SCRATCH)
1435 {
b8698a0f 1436 if (dump_file)
6fb5fa3c
DB
1437 fprintf (dump_file, " adding wild read for (clobber (mem:BLK (scratch))\n");
1438 add_wild_read (bb_info);
1439 insn_info->cannot_delete = true;
8dd5516b 1440 return 0;
6fb5fa3c 1441 }
8dd5516b
JJ
1442 /* Handle (set (mem:BLK (addr) [... S36 ...]) (const_int 0))
1443 as memset (addr, 0, 36); */
f5541398
RS
1444 else if (!MEM_SIZE_KNOWN_P (mem)
1445 || MEM_SIZE (mem) <= 0
1446 || MEM_SIZE (mem) > MAX_OFFSET
8dd5516b 1447 || GET_CODE (body) != SET
8dd5516b 1448 || !CONST_INT_P (SET_SRC (body)))
6fb5fa3c 1449 {
8dd5516b
JJ
1450 if (!store_is_unused)
1451 {
1452 /* If the set or clobber is unused, then it does not effect our
1453 ability to get rid of the entire insn. */
1454 insn_info->cannot_delete = true;
1455 clear_rhs_from_active_local_stores ();
1456 }
1457 return 0;
6fb5fa3c 1458 }
6fb5fa3c
DB
1459 }
1460
1461 /* We can still process a volatile mem, we just cannot delete it. */
1462 if (MEM_VOLATILE_P (mem))
8dd5516b 1463 insn_info->cannot_delete = true;
6fb5fa3c
DB
1464
1465 if (!canon_address (mem, &spill_alias_set, &group_id, &offset, &base))
1466 {
1467 clear_rhs_from_active_local_stores ();
1468 return 0;
1469 }
1470
8dd5516b 1471 if (GET_MODE (mem) == BLKmode)
f5541398 1472 width = MEM_SIZE (mem);
8dd5516b
JJ
1473 else
1474 {
1475 width = GET_MODE_SIZE (GET_MODE (mem));
1476 gcc_assert ((unsigned) width <= HOST_BITS_PER_WIDE_INT);
1477 }
6fb5fa3c
DB
1478
1479 if (spill_alias_set)
1480 {
1481 bitmap store1 = clear_alias_group->store1_p;
1482 bitmap store2 = clear_alias_group->store2_p;
8dd5516b
JJ
1483
1484 gcc_assert (GET_MODE (mem) != BLKmode);
b8698a0f 1485
fcaa4ca4 1486 if (!bitmap_set_bit (store1, spill_alias_set))
6fb5fa3c 1487 bitmap_set_bit (store2, spill_alias_set);
b8698a0f 1488
6fb5fa3c
DB
1489 if (clear_alias_group->offset_map_size_p < spill_alias_set)
1490 clear_alias_group->offset_map_size_p = spill_alias_set;
b8698a0f 1491
f883e0a7 1492 store_info = (store_info_t) pool_alloc (rtx_store_info_pool);
6fb5fa3c
DB
1493
1494 if (dump_file)
1495 fprintf (dump_file, " processing spill store %d(%s)\n",
4862826d 1496 (int) spill_alias_set, GET_MODE_NAME (GET_MODE (mem)));
6fb5fa3c
DB
1497 }
1498 else if (group_id >= 0)
1499 {
1500 /* In the restrictive case where the base is a constant or the
1501 frame pointer we can do global analysis. */
b8698a0f
L
1502
1503 group_info_t group
6fb5fa3c 1504 = VEC_index (group_info_t, rtx_group_vec, group_id);
d26c7090 1505 tree expr = MEM_EXPR (mem);
b8698a0f 1506
f883e0a7 1507 store_info = (store_info_t) pool_alloc (rtx_store_info_pool);
d26c7090 1508 set_usage_bits (group, offset, width, expr);
6fb5fa3c
DB
1509
1510 if (dump_file)
1511 fprintf (dump_file, " processing const base store gid=%d[%d..%d)\n",
1512 group_id, (int)offset, (int)(offset+width));
1513 }
1514 else
1515 {
9e412ca3 1516 if (may_be_sp_based_p (XEXP (mem, 0)))
50f0f366 1517 insn_info->stack_pointer_based = true;
6fb5fa3c 1518 insn_info->contains_cselib_groups = true;
50f0f366 1519
f883e0a7 1520 store_info = (store_info_t) pool_alloc (cse_store_info_pool);
6fb5fa3c
DB
1521 group_id = -1;
1522
1523 if (dump_file)
1524 fprintf (dump_file, " processing cselib store [%d..%d)\n",
1525 (int)offset, (int)(offset+width));
1526 }
1527
8dd5516b
JJ
1528 const_rhs = rhs = NULL_RTX;
1529 if (GET_CODE (body) == SET
1530 /* No place to keep the value after ra. */
1531 && !reload_completed
1532 && (REG_P (SET_SRC (body))
1533 || GET_CODE (SET_SRC (body)) == SUBREG
1534 || CONSTANT_P (SET_SRC (body)))
1535 && !MEM_VOLATILE_P (mem)
1536 /* Sometimes the store and reload is used for truncation and
1537 rounding. */
1538 && !(FLOAT_MODE_P (GET_MODE (mem)) && (flag_float_store)))
1539 {
1540 rhs = SET_SRC (body);
1541 if (CONSTANT_P (rhs))
1542 const_rhs = rhs;
1543 else if (body == PATTERN (insn_info->insn))
1544 {
1545 rtx tem = find_reg_note (insn_info->insn, REG_EQUAL, NULL_RTX);
1546 if (tem && CONSTANT_P (XEXP (tem, 0)))
1547 const_rhs = XEXP (tem, 0);
1548 }
1549 if (const_rhs == NULL_RTX && REG_P (rhs))
1550 {
1551 rtx tem = cselib_expand_value_rtx (rhs, scratch, 5);
1552
1553 if (tem && CONSTANT_P (tem))
1554 const_rhs = tem;
1555 }
1556 }
1557
6fb5fa3c
DB
1558 /* Check to see if this stores causes some other stores to be
1559 dead. */
1560 ptr = active_local_stores;
1561 last = NULL;
8dd5516b 1562 redundant_reason = NULL;
6216f94e
JJ
1563 mem = canon_rtx (mem);
1564 /* For alias_set != 0 canon_true_dependence should be never called. */
1565 if (spill_alias_set)
1566 mem_addr = NULL_RTX;
1567 else
1568 {
1569 if (group_id < 0)
1570 mem_addr = base->val_rtx;
1571 else
1572 {
1573 group_info_t group
1574 = VEC_index (group_info_t, rtx_group_vec, group_id);
1575 mem_addr = group->canon_base_addr;
1576 }
1577 if (offset)
372d6395 1578 mem_addr = plus_constant (get_address_mode (mem), mem_addr, offset);
6216f94e 1579 }
6fb5fa3c
DB
1580
1581 while (ptr)
1582 {
1583 insn_info_t next = ptr->next_local_store;
1584 store_info_t s_info = ptr->store_rec;
60564289 1585 bool del = true;
6fb5fa3c
DB
1586
1587 /* Skip the clobbers. We delete the active insn if this insn
6ed3da00 1588 shadows the set. To have been put on the active list, it
6fb5fa3c
DB
1589 has exactly on set. */
1590 while (!s_info->is_set)
1591 s_info = s_info->next;
1592
1593 if (s_info->alias_set != spill_alias_set)
60564289 1594 del = false;
6fb5fa3c
DB
1595 else if (s_info->alias_set)
1596 {
b8698a0f 1597 struct clear_alias_mode_holder *entry
6fb5fa3c
DB
1598 = clear_alias_set_lookup (s_info->alias_set);
1599 /* Generally, spills cannot be processed if and of the
1600 references to the slot have a different mode. But if
1601 we are in the same block and mode is exactly the same
1602 between this store and one before in the same block,
1603 we can still delete it. */
1604 if ((GET_MODE (mem) == GET_MODE (s_info->mem))
1605 && (GET_MODE (mem) == entry->mode))
1606 {
60564289 1607 del = true;
8dd5516b 1608 set_all_positions_unneeded (s_info);
6fb5fa3c
DB
1609 }
1610 if (dump_file)
1611 fprintf (dump_file, " trying spill store in insn=%d alias_set=%d\n",
4862826d 1612 INSN_UID (ptr->insn), (int) s_info->alias_set);
6fb5fa3c 1613 }
b8698a0f 1614 else if ((s_info->group_id == group_id)
6fb5fa3c
DB
1615 && (s_info->cse_base == base))
1616 {
1617 HOST_WIDE_INT i;
1618 if (dump_file)
1619 fprintf (dump_file, " trying store in insn=%d gid=%d[%d..%d)\n",
b8698a0f 1620 INSN_UID (ptr->insn), s_info->group_id,
6fb5fa3c 1621 (int)s_info->begin, (int)s_info->end);
8dd5516b
JJ
1622
1623 /* Even if PTR won't be eliminated as unneeded, if both
1624 PTR and this insn store the same constant value, we might
1625 eliminate this insn instead. */
1626 if (s_info->const_rhs
1627 && const_rhs
1628 && offset >= s_info->begin
1629 && offset + width <= s_info->end
1630 && all_positions_needed_p (s_info, offset - s_info->begin,
1631 width))
1632 {
1633 if (GET_MODE (mem) == BLKmode)
1634 {
1635 if (GET_MODE (s_info->mem) == BLKmode
1636 && s_info->const_rhs == const_rhs)
1637 redundant_reason = ptr;
1638 }
1639 else if (s_info->const_rhs == const0_rtx
1640 && const_rhs == const0_rtx)
1641 redundant_reason = ptr;
1642 else
1643 {
1644 rtx val;
1645 start_sequence ();
1646 val = get_stored_val (s_info, GET_MODE (mem),
1647 offset, offset + width,
1648 BLOCK_FOR_INSN (insn_info->insn),
1649 true);
1650 if (get_insns () != NULL)
1651 val = NULL_RTX;
1652 end_sequence ();
1653 if (val && rtx_equal_p (val, const_rhs))
1654 redundant_reason = ptr;
1655 }
1656 }
1657
1658 for (i = MAX (offset, s_info->begin);
1659 i < offset + width && i < s_info->end;
1660 i++)
1661 set_position_unneeded (s_info, i - s_info->begin);
6fb5fa3c
DB
1662 }
1663 else if (s_info->rhs)
1664 /* Need to see if it is possible for this store to overwrite
1665 the value of store_info. If it is, set the rhs to NULL to
1666 keep it from being used to remove a load. */
1667 {
b8698a0f 1668 if (canon_true_dependence (s_info->mem,
6fb5fa3c
DB
1669 GET_MODE (s_info->mem),
1670 s_info->mem_addr,
53d9622b 1671 mem, mem_addr))
8dd5516b
JJ
1672 {
1673 s_info->rhs = NULL;
1674 s_info->const_rhs = NULL;
1675 }
6fb5fa3c 1676 }
6216f94e 1677
6fb5fa3c
DB
1678 /* An insn can be deleted if every position of every one of
1679 its s_infos is zero. */
1b6fa860 1680 if (any_positions_needed_p (s_info))
60564289 1681 del = false;
8dd5516b 1682
60564289 1683 if (del)
6fb5fa3c
DB
1684 {
1685 insn_info_t insn_to_delete = ptr;
b8698a0f 1686
dabd47e7 1687 active_local_stores_len--;
6fb5fa3c
DB
1688 if (last)
1689 last->next_local_store = ptr->next_local_store;
1690 else
1691 active_local_stores = ptr->next_local_store;
b8698a0f 1692
1b6fa860
JJ
1693 if (!insn_to_delete->cannot_delete)
1694 delete_dead_store_insn (insn_to_delete);
6fb5fa3c
DB
1695 }
1696 else
1697 last = ptr;
b8698a0f 1698
6fb5fa3c
DB
1699 ptr = next;
1700 }
b8698a0f 1701
6fb5fa3c
DB
1702 /* Finish filling in the store_info. */
1703 store_info->next = insn_info->store_rec;
1704 insn_info->store_rec = store_info;
6216f94e 1705 store_info->mem = mem;
6fb5fa3c 1706 store_info->alias_set = spill_alias_set;
6216f94e 1707 store_info->mem_addr = mem_addr;
6fb5fa3c 1708 store_info->cse_base = base;
8dd5516b
JJ
1709 if (width > HOST_BITS_PER_WIDE_INT)
1710 {
1711 store_info->is_large = true;
1712 store_info->positions_needed.large.count = 0;
3f9b14ff 1713 store_info->positions_needed.large.bmap = BITMAP_ALLOC (&dse_bitmap_obstack);
8dd5516b
JJ
1714 }
1715 else
1716 {
1717 store_info->is_large = false;
1718 store_info->positions_needed.small_bitmask = lowpart_bitmask (width);
1719 }
6fb5fa3c
DB
1720 store_info->group_id = group_id;
1721 store_info->begin = offset;
1722 store_info->end = offset + width;
1723 store_info->is_set = GET_CODE (body) == SET;
8dd5516b
JJ
1724 store_info->rhs = rhs;
1725 store_info->const_rhs = const_rhs;
1726 store_info->redundant_reason = redundant_reason;
6fb5fa3c 1727
6fb5fa3c
DB
1728 /* If this is a clobber, we return 0. We will only be able to
1729 delete this insn if there is only one store USED store, but we
1730 can use the clobber to delete other stores earlier. */
1731 return store_info->is_set ? 1 : 0;
1732}
1733
1734
1735static void
1736dump_insn_info (const char * start, insn_info_t insn_info)
1737{
b8698a0f 1738 fprintf (dump_file, "%s insn=%d %s\n", start,
6fb5fa3c
DB
1739 INSN_UID (insn_info->insn),
1740 insn_info->store_rec ? "has store" : "naked");
1741}
1742
1743
8660aaae
EC
1744/* If the modes are different and the value's source and target do not
1745 line up, we need to extract the value from lower part of the rhs of
1746 the store, shift it, and then put it into a form that can be shoved
1747 into the read_insn. This function generates a right SHIFT of a
1748 value that is at least ACCESS_SIZE bytes wide of READ_MODE. The
1749 shift sequence is returned or NULL if we failed to find a
1750 shift. */
1751
1752static rtx
18b526e8 1753find_shift_sequence (int access_size,
8660aaae 1754 store_info_t store_info,
8dd5516b
JJ
1755 enum machine_mode read_mode,
1756 int shift, bool speed, bool require_cst)
8660aaae
EC
1757{
1758 enum machine_mode store_mode = GET_MODE (store_info->mem);
18b526e8
RS
1759 enum machine_mode new_mode;
1760 rtx read_reg = NULL;
8660aaae
EC
1761
1762 /* Some machines like the x86 have shift insns for each size of
1763 operand. Other machines like the ppc or the ia-64 may only have
1764 shift insns that shift values within 32 or 64 bit registers.
1765 This loop tries to find the smallest shift insn that will right
1766 justify the value we want to read but is available in one insn on
1767 the machine. */
1768
18b526e8
RS
1769 for (new_mode = smallest_mode_for_size (access_size * BITS_PER_UNIT,
1770 MODE_INT);
1771 GET_MODE_BITSIZE (new_mode) <= BITS_PER_WORD;
1772 new_mode = GET_MODE_WIDER_MODE (new_mode))
8660aaae 1773 {
18b526e8 1774 rtx target, new_reg, shift_seq, insn, new_lhs;
d898d29b 1775 int cost;
348eea5f 1776
72a2609f
JJ
1777 /* If a constant was stored into memory, try to simplify it here,
1778 otherwise the cost of the shift might preclude this optimization
1779 e.g. at -Os, even when no actual shift will be needed. */
8dd5516b 1780 if (store_info->const_rhs)
72a2609f
JJ
1781 {
1782 unsigned int byte = subreg_lowpart_offset (new_mode, store_mode);
8dd5516b
JJ
1783 rtx ret = simplify_subreg (new_mode, store_info->const_rhs,
1784 store_mode, byte);
72a2609f
JJ
1785 if (ret && CONSTANT_P (ret))
1786 {
1787 ret = simplify_const_binary_operation (LSHIFTRT, new_mode,
1788 ret, GEN_INT (shift));
1789 if (ret && CONSTANT_P (ret))
1790 {
1791 byte = subreg_lowpart_offset (read_mode, new_mode);
1792 ret = simplify_subreg (read_mode, ret, new_mode, byte);
1793 if (ret && CONSTANT_P (ret)
5e8f01f4 1794 && set_src_cost (ret, speed) <= COSTS_N_INSNS (1))
72a2609f
JJ
1795 return ret;
1796 }
1797 }
1798 }
1799
8dd5516b
JJ
1800 if (require_cst)
1801 return NULL_RTX;
1802
18b526e8
RS
1803 /* Try a wider mode if truncating the store mode to NEW_MODE
1804 requires a real instruction. */
1805 if (GET_MODE_BITSIZE (new_mode) < GET_MODE_BITSIZE (store_mode)
d0edd768 1806 && !TRULY_NOOP_TRUNCATION_MODES_P (new_mode, store_mode))
348eea5f
RS
1807 continue;
1808
18b526e8
RS
1809 /* Also try a wider mode if the necessary punning is either not
1810 desirable or not possible. */
1811 if (!CONSTANT_P (store_info->rhs)
1812 && !MODES_TIEABLE_P (new_mode, store_mode))
1813 continue;
18b526e8 1814
348eea5f 1815 new_reg = gen_reg_rtx (new_mode);
8660aaae
EC
1816
1817 start_sequence ();
1818
1819 /* In theory we could also check for an ashr. Ian Taylor knows
1820 of one dsp where the cost of these two was not the same. But
1821 this really is a rare case anyway. */
1822 target = expand_binop (new_mode, lshr_optab, new_reg,
1823 GEN_INT (shift), new_reg, 1, OPTAB_DIRECT);
1824
c6f3019a
RS
1825 shift_seq = get_insns ();
1826 end_sequence ();
8660aaae 1827
c6f3019a
RS
1828 if (target != new_reg || shift_seq == NULL)
1829 continue;
1830
1831 cost = 0;
1832 for (insn = shift_seq; insn != NULL_RTX; insn = NEXT_INSN (insn))
1833 if (INSN_P (insn))
f40751dd 1834 cost += insn_rtx_cost (PATTERN (insn), speed);
c6f3019a
RS
1835
1836 /* The computation up to here is essentially independent
1837 of the arguments and could be precomputed. It may
1838 not be worth doing so. We could precompute if
1839 worthwhile or at least cache the results. The result
06acf7d0
RS
1840 technically depends on both SHIFT and ACCESS_SIZE,
1841 but in practice the answer will depend only on ACCESS_SIZE. */
c6f3019a
RS
1842
1843 if (cost > COSTS_N_INSNS (1))
1844 continue;
1845
d898d29b
JJ
1846 new_lhs = extract_low_bits (new_mode, store_mode,
1847 copy_rtx (store_info->rhs));
1848 if (new_lhs == NULL_RTX)
1849 continue;
1850
c6f3019a
RS
1851 /* We found an acceptable shift. Generate a move to
1852 take the value from the store and put it into the
1853 shift pseudo, then shift it, then generate another
1854 move to put in into the target of the read. */
18b526e8 1855 emit_move_insn (new_reg, new_lhs);
c6f3019a 1856 emit_insn (shift_seq);
18b526e8 1857 read_reg = extract_low_bits (read_mode, new_mode, new_reg);
c6f3019a 1858 break;
8660aaae
EC
1859 }
1860
18b526e8 1861 return read_reg;
8660aaae
EC
1862}
1863
1864
02b47899
KZ
1865/* Call back for note_stores to find the hard regs set or clobbered by
1866 insn. Data is a bitmap of the hardregs set so far. */
1867
1868static void
1869look_for_hardregs (rtx x, const_rtx pat ATTRIBUTE_UNUSED, void *data)
1870{
1871 bitmap regs_set = (bitmap) data;
1872
1873 if (REG_P (x)
f773c2bd 1874 && HARD_REGISTER_P (x))
02b47899 1875 {
f773c2bd
AS
1876 unsigned int regno = REGNO (x);
1877 bitmap_set_range (regs_set, regno,
1878 hard_regno_nregs[regno][GET_MODE (x)]);
02b47899
KZ
1879 }
1880}
1881
8dd5516b
JJ
1882/* Helper function for replace_read and record_store.
1883 Attempt to return a value stored in STORE_INFO, from READ_BEGIN
1884 to one before READ_END bytes read in READ_MODE. Return NULL
1885 if not successful. If REQUIRE_CST is true, return always constant. */
1886
1887static rtx
1888get_stored_val (store_info_t store_info, enum machine_mode read_mode,
1889 HOST_WIDE_INT read_begin, HOST_WIDE_INT read_end,
1890 basic_block bb, bool require_cst)
1891{
1892 enum machine_mode store_mode = GET_MODE (store_info->mem);
1893 int shift;
1894 int access_size; /* In bytes. */
1895 rtx read_reg;
1896
1897 /* To get here the read is within the boundaries of the write so
1898 shift will never be negative. Start out with the shift being in
1899 bytes. */
1900 if (store_mode == BLKmode)
1901 shift = 0;
1902 else if (BYTES_BIG_ENDIAN)
1903 shift = store_info->end - read_end;
1904 else
1905 shift = read_begin - store_info->begin;
1906
1907 access_size = shift + GET_MODE_SIZE (read_mode);
1908
1909 /* From now on it is bits. */
1910 shift *= BITS_PER_UNIT;
1911
1912 if (shift)
1913 read_reg = find_shift_sequence (access_size, store_info, read_mode, shift,
1914 optimize_bb_for_speed_p (bb),
1915 require_cst);
1916 else if (store_mode == BLKmode)
1917 {
1918 /* The store is a memset (addr, const_val, const_size). */
1919 gcc_assert (CONST_INT_P (store_info->rhs));
1920 store_mode = int_mode_for_mode (read_mode);
1921 if (store_mode == BLKmode)
1922 read_reg = NULL_RTX;
1923 else if (store_info->rhs == const0_rtx)
1924 read_reg = extract_low_bits (read_mode, store_mode, const0_rtx);
1925 else if (GET_MODE_BITSIZE (store_mode) > HOST_BITS_PER_WIDE_INT
1926 || BITS_PER_UNIT >= HOST_BITS_PER_WIDE_INT)
1927 read_reg = NULL_RTX;
1928 else
1929 {
1930 unsigned HOST_WIDE_INT c
1931 = INTVAL (store_info->rhs)
1932 & (((HOST_WIDE_INT) 1 << BITS_PER_UNIT) - 1);
1933 int shift = BITS_PER_UNIT;
1934 while (shift < HOST_BITS_PER_WIDE_INT)
1935 {
1936 c |= (c << shift);
1937 shift <<= 1;
1938 }
6d26322f 1939 read_reg = gen_int_mode (c, store_mode);
8dd5516b
JJ
1940 read_reg = extract_low_bits (read_mode, store_mode, read_reg);
1941 }
1942 }
1943 else if (store_info->const_rhs
1944 && (require_cst
1945 || GET_MODE_CLASS (read_mode) != GET_MODE_CLASS (store_mode)))
1946 read_reg = extract_low_bits (read_mode, store_mode,
1947 copy_rtx (store_info->const_rhs));
1948 else
1949 read_reg = extract_low_bits (read_mode, store_mode,
1950 copy_rtx (store_info->rhs));
1951 if (require_cst && read_reg && !CONSTANT_P (read_reg))
1952 read_reg = NULL_RTX;
1953 return read_reg;
1954}
02b47899 1955
6fb5fa3c
DB
1956/* Take a sequence of:
1957 A <- r1
1958 ...
1959 ... <- A
1960
b8698a0f 1961 and change it into
6fb5fa3c
DB
1962 r2 <- r1
1963 A <- r1
1964 ...
1965 ... <- r2
1966
8660aaae
EC
1967 or
1968
1969 r3 <- extract (r1)
1970 r3 <- r3 >> shift
1971 r2 <- extract (r3)
1972 ... <- r2
1973
1974 or
1975
1976 r2 <- extract (r1)
1977 ... <- r2
1978
1979 Depending on the alignment and the mode of the store and
1980 subsequent load.
1981
1982
1983 The STORE_INFO and STORE_INSN are for the store and READ_INFO
6fb5fa3c
DB
1984 and READ_INSN are for the read. Return true if the replacement
1985 went ok. */
1986
1987static bool
b8698a0f 1988replace_read (store_info_t store_info, insn_info_t store_insn,
8dd5516b
JJ
1989 read_info_t read_info, insn_info_t read_insn, rtx *loc,
1990 bitmap regs_live)
6fb5fa3c 1991{
8660aaae
EC
1992 enum machine_mode store_mode = GET_MODE (store_info->mem);
1993 enum machine_mode read_mode = GET_MODE (read_info->mem);
02b47899 1994 rtx insns, this_insn, read_reg;
8dd5516b 1995 basic_block bb;
8660aaae 1996
6fb5fa3c
DB
1997 if (!dbg_cnt (dse))
1998 return false;
1999
18b526e8
RS
2000 /* Create a sequence of instructions to set up the read register.
2001 This sequence goes immediately before the store and its result
2002 is read by the load.
2003
2004 We need to keep this in perspective. We are replacing a read
8660aaae
EC
2005 with a sequence of insns, but the read will almost certainly be
2006 in cache, so it is not going to be an expensive one. Thus, we
2007 are not willing to do a multi insn shift or worse a subroutine
2008 call to get rid of the read. */
18b526e8
RS
2009 if (dump_file)
2010 fprintf (dump_file, "trying to replace %smode load in insn %d"
2011 " from %smode store in insn %d\n",
2012 GET_MODE_NAME (read_mode), INSN_UID (read_insn->insn),
2013 GET_MODE_NAME (store_mode), INSN_UID (store_insn->insn));
2014 start_sequence ();
8dd5516b
JJ
2015 bb = BLOCK_FOR_INSN (read_insn->insn);
2016 read_reg = get_stored_val (store_info,
2017 read_mode, read_info->begin, read_info->end,
2018 bb, false);
18b526e8 2019 if (read_reg == NULL_RTX)
8660aaae 2020 {
18b526e8
RS
2021 end_sequence ();
2022 if (dump_file)
2023 fprintf (dump_file, " -- could not extract bits of stored value\n");
2024 return false;
8660aaae 2025 }
18b526e8
RS
2026 /* Force the value into a new register so that it won't be clobbered
2027 between the store and the load. */
2028 read_reg = copy_to_mode_reg (read_mode, read_reg);
2029 insns = get_insns ();
2030 end_sequence ();
8660aaae 2031
02b47899
KZ
2032 if (insns != NULL_RTX)
2033 {
2034 /* Now we have to scan the set of new instructions to see if the
2035 sequence contains and sets of hardregs that happened to be
2036 live at this point. For instance, this can happen if one of
2037 the insns sets the CC and the CC happened to be live at that
2038 point. This does occasionally happen, see PR 37922. */
3f9b14ff 2039 bitmap regs_set = BITMAP_ALLOC (&reg_obstack);
02b47899
KZ
2040
2041 for (this_insn = insns; this_insn != NULL_RTX; this_insn = NEXT_INSN (this_insn))
2042 note_stores (PATTERN (this_insn), look_for_hardregs, regs_set);
b8698a0f 2043
02b47899
KZ
2044 bitmap_and_into (regs_set, regs_live);
2045 if (!bitmap_empty_p (regs_set))
2046 {
2047 if (dump_file)
2048 {
b8698a0f 2049 fprintf (dump_file,
02b47899
KZ
2050 "abandoning replacement because sequence clobbers live hardregs:");
2051 df_print_regset (dump_file, regs_set);
2052 }
b8698a0f 2053
02b47899
KZ
2054 BITMAP_FREE (regs_set);
2055 return false;
2056 }
2057 BITMAP_FREE (regs_set);
2058 }
2059
8660aaae 2060 if (validate_change (read_insn->insn, loc, read_reg, 0))
6fb5fa3c 2061 {
f883e0a7
KG
2062 deferred_change_t deferred_change =
2063 (deferred_change_t) pool_alloc (deferred_change_pool);
b8698a0f 2064
8660aaae
EC
2065 /* Insert this right before the store insn where it will be safe
2066 from later insns that might change it before the read. */
2067 emit_insn_before (insns, store_insn->insn);
b8698a0f 2068
8660aaae
EC
2069 /* And now for the kludge part: cselib croaks if you just
2070 return at this point. There are two reasons for this:
b8698a0f 2071
8660aaae
EC
2072 1) Cselib has an idea of how many pseudos there are and
2073 that does not include the new ones we just added.
b8698a0f 2074
8660aaae
EC
2075 2) Cselib does not know about the move insn we added
2076 above the store_info, and there is no way to tell it
2077 about it, because it has "moved on".
b8698a0f 2078
8660aaae
EC
2079 Problem (1) is fixable with a certain amount of engineering.
2080 Problem (2) is requires starting the bb from scratch. This
2081 could be expensive.
b8698a0f 2082
8660aaae
EC
2083 So we are just going to have to lie. The move/extraction
2084 insns are not really an issue, cselib did not see them. But
2085 the use of the new pseudo read_insn is a real problem because
2086 cselib has not scanned this insn. The way that we solve this
2087 problem is that we are just going to put the mem back for now
2088 and when we are finished with the block, we undo this. We
2089 keep a table of mems to get rid of. At the end of the basic
2090 block we can put them back. */
b8698a0f 2091
8660aaae
EC
2092 *loc = read_info->mem;
2093 deferred_change->next = deferred_change_list;
2094 deferred_change_list = deferred_change;
2095 deferred_change->loc = loc;
2096 deferred_change->reg = read_reg;
b8698a0f 2097
8660aaae
EC
2098 /* Get rid of the read_info, from the point of view of the
2099 rest of dse, play like this read never happened. */
2100 read_insn->read_rec = read_info->next;
2101 pool_free (read_info_pool, read_info);
18b526e8
RS
2102 if (dump_file)
2103 {
2104 fprintf (dump_file, " -- replaced the loaded MEM with ");
2105 print_simple_rtl (dump_file, read_reg);
2106 fprintf (dump_file, "\n");
2107 }
8660aaae 2108 return true;
6fb5fa3c 2109 }
b8698a0f 2110 else
6fb5fa3c 2111 {
6fb5fa3c 2112 if (dump_file)
18b526e8
RS
2113 {
2114 fprintf (dump_file, " -- replacing the loaded MEM with ");
2115 print_simple_rtl (dump_file, read_reg);
2116 fprintf (dump_file, " led to an invalid instruction\n");
2117 }
6fb5fa3c
DB
2118 return false;
2119 }
2120}
2121
6fb5fa3c
DB
2122/* A for_each_rtx callback in which DATA is the bb_info. Check to see
2123 if LOC is a mem and if it is look at the address and kill any
2124 appropriate stores that may be active. */
2125
2126static int
2127check_mem_read_rtx (rtx *loc, void *data)
2128{
6216f94e 2129 rtx mem = *loc, mem_addr;
6fb5fa3c
DB
2130 bb_info_t bb_info;
2131 insn_info_t insn_info;
2132 HOST_WIDE_INT offset = 0;
2133 HOST_WIDE_INT width = 0;
4862826d 2134 alias_set_type spill_alias_set = 0;
b8698a0f 2135 cselib_val *base = NULL;
6fb5fa3c
DB
2136 int group_id;
2137 read_info_t read_info;
2138
2139 if (!mem || !MEM_P (mem))
2140 return 0;
2141
2142 bb_info = (bb_info_t) data;
2143 insn_info = bb_info->last_insn;
2144
2145 if ((MEM_ALIAS_SET (mem) == ALIAS_SET_MEMORY_BARRIER)
2146 || (MEM_VOLATILE_P (mem)))
2147 {
2148 if (dump_file)
2149 fprintf (dump_file, " adding wild read, volatile or barrier.\n");
2150 add_wild_read (bb_info);
2151 insn_info->cannot_delete = true;
2152 return 0;
2153 }
2154
2155 /* If it is reading readonly mem, then there can be no conflict with
2156 another write. */
2157 if (MEM_READONLY_P (mem))
2158 return 0;
2159
2160 if (!canon_address (mem, &spill_alias_set, &group_id, &offset, &base))
2161 {
2162 if (dump_file)
2163 fprintf (dump_file, " adding wild read, canon_address failure.\n");
2164 add_wild_read (bb_info);
2165 return 0;
2166 }
2167
2168 if (GET_MODE (mem) == BLKmode)
2169 width = -1;
2170 else
2171 width = GET_MODE_SIZE (GET_MODE (mem));
2172
f883e0a7 2173 read_info = (read_info_t) pool_alloc (read_info_pool);
6fb5fa3c
DB
2174 read_info->group_id = group_id;
2175 read_info->mem = mem;
2176 read_info->alias_set = spill_alias_set;
2177 read_info->begin = offset;
2178 read_info->end = offset + width;
2179 read_info->next = insn_info->read_rec;
2180 insn_info->read_rec = read_info;
6216f94e
JJ
2181 /* For alias_set != 0 canon_true_dependence should be never called. */
2182 if (spill_alias_set)
2183 mem_addr = NULL_RTX;
2184 else
2185 {
2186 if (group_id < 0)
2187 mem_addr = base->val_rtx;
2188 else
2189 {
2190 group_info_t group
2191 = VEC_index (group_info_t, rtx_group_vec, group_id);
2192 mem_addr = group->canon_base_addr;
2193 }
2194 if (offset)
372d6395 2195 mem_addr = plus_constant (get_address_mode (mem), mem_addr, offset);
6216f94e 2196 }
6fb5fa3c 2197
0d52bcc1 2198 /* We ignore the clobbers in store_info. The is mildly aggressive,
6fb5fa3c
DB
2199 but there really should not be a clobber followed by a read. */
2200
2201 if (spill_alias_set)
2202 {
2203 insn_info_t i_ptr = active_local_stores;
2204 insn_info_t last = NULL;
2205
2206 if (dump_file)
2207 fprintf (dump_file, " processing spill load %d\n",
4862826d 2208 (int) spill_alias_set);
6fb5fa3c
DB
2209
2210 while (i_ptr)
2211 {
2212 store_info_t store_info = i_ptr->store_rec;
2213
2214 /* Skip the clobbers. */
2215 while (!store_info->is_set)
2216 store_info = store_info->next;
b8698a0f 2217
6fb5fa3c
DB
2218 if (store_info->alias_set == spill_alias_set)
2219 {
2220 if (dump_file)
2221 dump_insn_info ("removing from active", i_ptr);
2222
dabd47e7 2223 active_local_stores_len--;
6fb5fa3c
DB
2224 if (last)
2225 last->next_local_store = i_ptr->next_local_store;
2226 else
2227 active_local_stores = i_ptr->next_local_store;
2228 }
2229 else
2230 last = i_ptr;
2231 i_ptr = i_ptr->next_local_store;
2232 }
2233 }
2234 else if (group_id >= 0)
2235 {
2236 /* This is the restricted case where the base is a constant or
2237 the frame pointer and offset is a constant. */
2238 insn_info_t i_ptr = active_local_stores;
2239 insn_info_t last = NULL;
b8698a0f 2240
6fb5fa3c
DB
2241 if (dump_file)
2242 {
2243 if (width == -1)
2244 fprintf (dump_file, " processing const load gid=%d[BLK]\n",
2245 group_id);
2246 else
2247 fprintf (dump_file, " processing const load gid=%d[%d..%d)\n",
2248 group_id, (int)offset, (int)(offset+width));
2249 }
2250
2251 while (i_ptr)
2252 {
2253 bool remove = false;
2254 store_info_t store_info = i_ptr->store_rec;
b8698a0f 2255
6fb5fa3c
DB
2256 /* Skip the clobbers. */
2257 while (!store_info->is_set)
2258 store_info = store_info->next;
b8698a0f 2259
6fb5fa3c
DB
2260 /* There are three cases here. */
2261 if (store_info->group_id < 0)
2262 /* We have a cselib store followed by a read from a
2263 const base. */
b8698a0f
L
2264 remove
2265 = canon_true_dependence (store_info->mem,
6fb5fa3c
DB
2266 GET_MODE (store_info->mem),
2267 store_info->mem_addr,
53d9622b 2268 mem, mem_addr);
b8698a0f 2269
6fb5fa3c
DB
2270 else if (group_id == store_info->group_id)
2271 {
2272 /* This is a block mode load. We may get lucky and
2273 canon_true_dependence may save the day. */
2274 if (width == -1)
b8698a0f
L
2275 remove
2276 = canon_true_dependence (store_info->mem,
6fb5fa3c
DB
2277 GET_MODE (store_info->mem),
2278 store_info->mem_addr,
53d9622b 2279 mem, mem_addr);
b8698a0f 2280
6fb5fa3c
DB
2281 /* If this read is just reading back something that we just
2282 stored, rewrite the read. */
b8698a0f 2283 else
6fb5fa3c
DB
2284 {
2285 if (store_info->rhs
8dd5516b
JJ
2286 && offset >= store_info->begin
2287 && offset + width <= store_info->end
2288 && all_positions_needed_p (store_info,
2289 offset - store_info->begin,
2290 width)
2291 && replace_read (store_info, i_ptr, read_info,
2292 insn_info, loc, bb_info->regs_live))
2293 return 0;
2294
6fb5fa3c
DB
2295 /* The bases are the same, just see if the offsets
2296 overlap. */
b8698a0f 2297 if ((offset < store_info->end)
6fb5fa3c
DB
2298 && (offset + width > store_info->begin))
2299 remove = true;
2300 }
2301 }
b8698a0f
L
2302
2303 /* else
6fb5fa3c
DB
2304 The else case that is missing here is that the
2305 bases are constant but different. There is nothing
2306 to do here because there is no overlap. */
b8698a0f 2307
6fb5fa3c
DB
2308 if (remove)
2309 {
2310 if (dump_file)
2311 dump_insn_info ("removing from active", i_ptr);
2312
dabd47e7 2313 active_local_stores_len--;
6fb5fa3c
DB
2314 if (last)
2315 last->next_local_store = i_ptr->next_local_store;
2316 else
2317 active_local_stores = i_ptr->next_local_store;
2318 }
2319 else
2320 last = i_ptr;
2321 i_ptr = i_ptr->next_local_store;
2322 }
2323 }
b8698a0f 2324 else
6fb5fa3c
DB
2325 {
2326 insn_info_t i_ptr = active_local_stores;
2327 insn_info_t last = NULL;
2328 if (dump_file)
2329 {
2330 fprintf (dump_file, " processing cselib load mem:");
2331 print_inline_rtx (dump_file, mem, 0);
2332 fprintf (dump_file, "\n");
2333 }
2334
2335 while (i_ptr)
2336 {
2337 bool remove = false;
2338 store_info_t store_info = i_ptr->store_rec;
b8698a0f 2339
6fb5fa3c
DB
2340 if (dump_file)
2341 fprintf (dump_file, " processing cselib load against insn %d\n",
2342 INSN_UID (i_ptr->insn));
2343
2344 /* Skip the clobbers. */
2345 while (!store_info->is_set)
2346 store_info = store_info->next;
2347
2348 /* If this read is just reading back something that we just
2349 stored, rewrite the read. */
2350 if (store_info->rhs
2351 && store_info->group_id == -1
2352 && store_info->cse_base == base
efc3527a 2353 && width != -1
8dd5516b
JJ
2354 && offset >= store_info->begin
2355 && offset + width <= store_info->end
2356 && all_positions_needed_p (store_info,
2357 offset - store_info->begin, width)
2358 && replace_read (store_info, i_ptr, read_info, insn_info, loc,
2359 bb_info->regs_live))
2360 return 0;
6fb5fa3c
DB
2361
2362 if (!store_info->alias_set)
b8698a0f 2363 remove = canon_true_dependence (store_info->mem,
6fb5fa3c
DB
2364 GET_MODE (store_info->mem),
2365 store_info->mem_addr,
53d9622b 2366 mem, mem_addr);
b8698a0f 2367
6fb5fa3c
DB
2368 if (remove)
2369 {
2370 if (dump_file)
2371 dump_insn_info ("removing from active", i_ptr);
b8698a0f 2372
dabd47e7 2373 active_local_stores_len--;
6fb5fa3c
DB
2374 if (last)
2375 last->next_local_store = i_ptr->next_local_store;
2376 else
2377 active_local_stores = i_ptr->next_local_store;
2378 }
2379 else
2380 last = i_ptr;
2381 i_ptr = i_ptr->next_local_store;
2382 }
2383 }
2384 return 0;
2385}
2386
b8698a0f 2387/* A for_each_rtx callback in which DATA points the INSN_INFO for
6fb5fa3c
DB
2388 as check_mem_read_rtx. Nullify the pointer if i_m_r_m_r returns
2389 true for any part of *LOC. */
2390
2391static void
2392check_mem_read_use (rtx *loc, void *data)
2393{
2394 for_each_rtx (loc, check_mem_read_rtx, data);
2395}
2396
8dd5516b
JJ
2397
2398/* Get arguments passed to CALL_INSN. Return TRUE if successful.
2399 So far it only handles arguments passed in registers. */
2400
2401static bool
2402get_call_args (rtx call_insn, tree fn, rtx *args, int nargs)
2403{
d5cc9181
JR
2404 CUMULATIVE_ARGS args_so_far_v;
2405 cumulative_args_t args_so_far;
8dd5516b
JJ
2406 tree arg;
2407 int idx;
2408
d5cc9181
JR
2409 INIT_CUMULATIVE_ARGS (args_so_far_v, TREE_TYPE (fn), NULL_RTX, 0, 3);
2410 args_so_far = pack_cumulative_args (&args_so_far_v);
8dd5516b
JJ
2411
2412 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
2413 for (idx = 0;
2414 arg != void_list_node && idx < nargs;
2415 arg = TREE_CHAIN (arg), idx++)
2416 {
2417 enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
3c07301f 2418 rtx reg, link, tmp;
d5cc9181 2419 reg = targetm.calls.function_arg (args_so_far, mode, NULL_TREE, true);
8dd5516b
JJ
2420 if (!reg || !REG_P (reg) || GET_MODE (reg) != mode
2421 || GET_MODE_CLASS (mode) != MODE_INT)
2422 return false;
2423
2424 for (link = CALL_INSN_FUNCTION_USAGE (call_insn);
2425 link;
2426 link = XEXP (link, 1))
2427 if (GET_CODE (XEXP (link, 0)) == USE)
2428 {
2429 args[idx] = XEXP (XEXP (link, 0), 0);
2430 if (REG_P (args[idx])
2431 && REGNO (args[idx]) == REGNO (reg)
2432 && (GET_MODE (args[idx]) == mode
2433 || (GET_MODE_CLASS (GET_MODE (args[idx])) == MODE_INT
2434 && (GET_MODE_SIZE (GET_MODE (args[idx]))
2435 <= UNITS_PER_WORD)
2436 && (GET_MODE_SIZE (GET_MODE (args[idx]))
2437 > GET_MODE_SIZE (mode)))))
2438 break;
2439 }
2440 if (!link)
2441 return false;
2442
2443 tmp = cselib_expand_value_rtx (args[idx], scratch, 5);
2444 if (GET_MODE (args[idx]) != mode)
2445 {
2446 if (!tmp || !CONST_INT_P (tmp))
2447 return false;
6d26322f 2448 tmp = gen_int_mode (INTVAL (tmp), mode);
8dd5516b
JJ
2449 }
2450 if (tmp)
2451 args[idx] = tmp;
2452
d5cc9181 2453 targetm.calls.function_arg_advance (args_so_far, mode, NULL_TREE, true);
8dd5516b
JJ
2454 }
2455 if (arg != void_list_node || idx != nargs)
2456 return false;
2457 return true;
2458}
2459
9e582b1d
JR
2460/* Return a bitmap of the fixed registers contained in IN. */
2461
2462static bitmap
2463copy_fixed_regs (const_bitmap in)
2464{
2465 bitmap ret;
2466
2467 ret = ALLOC_REG_SET (NULL);
2468 bitmap_and (ret, in, fixed_reg_set_regset);
2469 return ret;
2470}
8dd5516b 2471
6fb5fa3c
DB
2472/* Apply record_store to all candidate stores in INSN. Mark INSN
2473 if some part of it is not a candidate store and assigns to a
2474 non-register target. */
2475
2476static void
2477scan_insn (bb_info_t bb_info, rtx insn)
2478{
2479 rtx body;
f883e0a7 2480 insn_info_t insn_info = (insn_info_t) pool_alloc (insn_info_pool);
6fb5fa3c
DB
2481 int mems_found = 0;
2482 memset (insn_info, 0, sizeof (struct insn_info));
2483
2484 if (dump_file)
2485 fprintf (dump_file, "\n**scanning insn=%d\n",
2486 INSN_UID (insn));
2487
2488 insn_info->prev_insn = bb_info->last_insn;
2489 insn_info->insn = insn;
2490 bb_info->last_insn = insn_info;
b8698a0f 2491
b5b8b0ac
AO
2492 if (DEBUG_INSN_P (insn))
2493 {
2494 insn_info->cannot_delete = true;
2495 return;
2496 }
6fb5fa3c 2497
0d52bcc1 2498 /* Cselib clears the table for this case, so we have to essentially
6fb5fa3c
DB
2499 do the same. */
2500 if (NONJUMP_INSN_P (insn)
2501 && GET_CODE (PATTERN (insn)) == ASM_OPERANDS
2502 && MEM_VOLATILE_P (PATTERN (insn)))
2503 {
2504 add_wild_read (bb_info);
2505 insn_info->cannot_delete = true;
2506 return;
2507 }
2508
2509 /* Look at all of the uses in the insn. */
2510 note_uses (&PATTERN (insn), check_mem_read_use, bb_info);
2511
2512 if (CALL_P (insn))
2513 {
8dd5516b
JJ
2514 bool const_call;
2515 tree memset_call = NULL_TREE;
2516
6fb5fa3c 2517 insn_info->cannot_delete = true;
50f0f366 2518
6fb5fa3c 2519 /* Const functions cannot do anything bad i.e. read memory,
50f0f366 2520 however, they can read their parameters which may have
8dd5516b
JJ
2521 been pushed onto the stack.
2522 memset and bzero don't read memory either. */
2523 const_call = RTL_CONST_CALL_P (insn);
2524 if (!const_call)
2525 {
2526 rtx call = PATTERN (insn);
2527 if (GET_CODE (call) == PARALLEL)
2528 call = XVECEXP (call, 0, 0);
2529 if (GET_CODE (call) == SET)
2530 call = SET_SRC (call);
2531 if (GET_CODE (call) == CALL
2532 && MEM_P (XEXP (call, 0))
2533 && GET_CODE (XEXP (XEXP (call, 0), 0)) == SYMBOL_REF)
2534 {
2535 rtx symbol = XEXP (XEXP (call, 0), 0);
2536 if (SYMBOL_REF_DECL (symbol)
2537 && TREE_CODE (SYMBOL_REF_DECL (symbol)) == FUNCTION_DECL)
2538 {
2539 if ((DECL_BUILT_IN_CLASS (SYMBOL_REF_DECL (symbol))
2540 == BUILT_IN_NORMAL
2541 && (DECL_FUNCTION_CODE (SYMBOL_REF_DECL (symbol))
2542 == BUILT_IN_MEMSET))
2543 || SYMBOL_REF_DECL (symbol) == block_clear_fn)
2544 memset_call = SYMBOL_REF_DECL (symbol);
2545 }
2546 }
2547 }
2548 if (const_call || memset_call)
6fb5fa3c
DB
2549 {
2550 insn_info_t i_ptr = active_local_stores;
2551 insn_info_t last = NULL;
2552
2553 if (dump_file)
8dd5516b
JJ
2554 fprintf (dump_file, "%s call %d\n",
2555 const_call ? "const" : "memset", INSN_UID (insn));
6fb5fa3c 2556
64520bdc
EB
2557 /* See the head comment of the frame_read field. */
2558 if (reload_completed)
2559 insn_info->frame_read = true;
2560
2561 /* Loop over the active stores and remove those which are
2562 killed by the const function call. */
6fb5fa3c
DB
2563 while (i_ptr)
2564 {
64520bdc
EB
2565 bool remove_store = false;
2566
2567 /* The stack pointer based stores are always killed. */
50f0f366 2568 if (i_ptr->stack_pointer_based)
64520bdc
EB
2569 remove_store = true;
2570
2571 /* If the frame is read, the frame related stores are killed. */
2572 else if (insn_info->frame_read)
2573 {
2574 store_info_t store_info = i_ptr->store_rec;
2575
2576 /* Skip the clobbers. */
2577 while (!store_info->is_set)
2578 store_info = store_info->next;
2579
2580 if (store_info->group_id >= 0
2581 && VEC_index (group_info_t, rtx_group_vec,
2582 store_info->group_id)->frame_related)
2583 remove_store = true;
2584 }
2585
2586 if (remove_store)
6fb5fa3c
DB
2587 {
2588 if (dump_file)
2589 dump_insn_info ("removing from active", i_ptr);
b8698a0f 2590
dabd47e7 2591 active_local_stores_len--;
6fb5fa3c
DB
2592 if (last)
2593 last->next_local_store = i_ptr->next_local_store;
2594 else
2595 active_local_stores = i_ptr->next_local_store;
2596 }
2597 else
2598 last = i_ptr;
64520bdc 2599
6fb5fa3c
DB
2600 i_ptr = i_ptr->next_local_store;
2601 }
8dd5516b
JJ
2602
2603 if (memset_call)
2604 {
2605 rtx args[3];
2606 if (get_call_args (insn, memset_call, args, 3)
2607 && CONST_INT_P (args[1])
2608 && CONST_INT_P (args[2])
2609 && INTVAL (args[2]) > 0)
2610 {
2611 rtx mem = gen_rtx_MEM (BLKmode, args[0]);
f5541398 2612 set_mem_size (mem, INTVAL (args[2]));
8dd5516b
JJ
2613 body = gen_rtx_SET (VOIDmode, mem, args[1]);
2614 mems_found += record_store (body, bb_info);
2615 if (dump_file)
2616 fprintf (dump_file, "handling memset as BLKmode store\n");
2617 if (mems_found == 1)
2618 {
dabd47e7
JJ
2619 if (active_local_stores_len++
2620 >= PARAM_VALUE (PARAM_MAX_DSE_ACTIVE_LOCAL_STORES))
2621 {
2622 active_local_stores_len = 1;
2623 active_local_stores = NULL;
2624 }
9e582b1d
JR
2625 insn_info->fixed_regs_live
2626 = copy_fixed_regs (bb_info->regs_live);
8dd5516b
JJ
2627 insn_info->next_local_store = active_local_stores;
2628 active_local_stores = insn_info;
2629 }
2630 }
2631 }
6fb5fa3c
DB
2632 }
2633
50f0f366 2634 else
d26c7090
ER
2635 /* Every other call, including pure functions, may read any memory
2636 that is not relative to the frame. */
2637 add_non_frame_wild_read (bb_info);
50f0f366 2638
6fb5fa3c
DB
2639 return;
2640 }
2641
2642 /* Assuming that there are sets in these insns, we cannot delete
2643 them. */
2644 if ((GET_CODE (PATTERN (insn)) == CLOBBER)
0a64eeca 2645 || volatile_refs_p (PATTERN (insn))
2da02156 2646 || (!cfun->can_delete_dead_exceptions && !insn_nothrow_p (insn))
6fb5fa3c
DB
2647 || (RTX_FRAME_RELATED_P (insn))
2648 || find_reg_note (insn, REG_FRAME_RELATED_EXPR, NULL_RTX))
2649 insn_info->cannot_delete = true;
b8698a0f 2650
6fb5fa3c
DB
2651 body = PATTERN (insn);
2652 if (GET_CODE (body) == PARALLEL)
2653 {
2654 int i;
2655 for (i = 0; i < XVECLEN (body, 0); i++)
2656 mems_found += record_store (XVECEXP (body, 0, i), bb_info);
2657 }
2658 else
2659 mems_found += record_store (body, bb_info);
2660
2661 if (dump_file)
b8698a0f 2662 fprintf (dump_file, "mems_found = %d, cannot_delete = %s\n",
6fb5fa3c
DB
2663 mems_found, insn_info->cannot_delete ? "true" : "false");
2664
8dd5516b
JJ
2665 /* If we found some sets of mems, add it into the active_local_stores so
2666 that it can be locally deleted if found dead or used for
2667 replace_read and redundant constant store elimination. Otherwise mark
2668 it as cannot delete. This simplifies the processing later. */
2669 if (mems_found == 1)
6fb5fa3c 2670 {
dabd47e7
JJ
2671 if (active_local_stores_len++
2672 >= PARAM_VALUE (PARAM_MAX_DSE_ACTIVE_LOCAL_STORES))
2673 {
2674 active_local_stores_len = 1;
2675 active_local_stores = NULL;
2676 }
9e582b1d 2677 insn_info->fixed_regs_live = copy_fixed_regs (bb_info->regs_live);
6fb5fa3c
DB
2678 insn_info->next_local_store = active_local_stores;
2679 active_local_stores = insn_info;
2680 }
2681 else
2682 insn_info->cannot_delete = true;
2683}
2684
2685
2686/* Remove BASE from the set of active_local_stores. This is a
2687 callback from cselib that is used to get rid of the stores in
2688 active_local_stores. */
2689
2690static void
2691remove_useless_values (cselib_val *base)
2692{
2693 insn_info_t insn_info = active_local_stores;
2694 insn_info_t last = NULL;
2695
2696 while (insn_info)
2697 {
2698 store_info_t store_info = insn_info->store_rec;
60564289 2699 bool del = false;
6fb5fa3c
DB
2700
2701 /* If ANY of the store_infos match the cselib group that is
2702 being deleted, then the insn can not be deleted. */
2703 while (store_info)
2704 {
b8698a0f 2705 if ((store_info->group_id == -1)
6fb5fa3c
DB
2706 && (store_info->cse_base == base))
2707 {
60564289 2708 del = true;
6fb5fa3c
DB
2709 break;
2710 }
2711 store_info = store_info->next;
2712 }
2713
60564289 2714 if (del)
6fb5fa3c 2715 {
dabd47e7 2716 active_local_stores_len--;
6fb5fa3c
DB
2717 if (last)
2718 last->next_local_store = insn_info->next_local_store;
2719 else
2720 active_local_stores = insn_info->next_local_store;
2721 free_store_info (insn_info);
2722 }
2723 else
2724 last = insn_info;
b8698a0f 2725
6fb5fa3c
DB
2726 insn_info = insn_info->next_local_store;
2727 }
2728}
2729
2730
2731/* Do all of step 1. */
2732
2733static void
2734dse_step1 (void)
2735{
2736 basic_block bb;
3f9b14ff 2737 bitmap regs_live = BITMAP_ALLOC (&reg_obstack);
b8698a0f 2738
457eeaae 2739 cselib_init (0);
6fb5fa3c
DB
2740 all_blocks = BITMAP_ALLOC (NULL);
2741 bitmap_set_bit (all_blocks, ENTRY_BLOCK);
2742 bitmap_set_bit (all_blocks, EXIT_BLOCK);
2743
2744 FOR_ALL_BB (bb)
2745 {
2746 insn_info_t ptr;
f883e0a7 2747 bb_info_t bb_info = (bb_info_t) pool_alloc (bb_info_pool);
6fb5fa3c
DB
2748
2749 memset (bb_info, 0, sizeof (struct bb_info));
2750 bitmap_set_bit (all_blocks, bb->index);
02b47899
KZ
2751 bb_info->regs_live = regs_live;
2752
2753 bitmap_copy (regs_live, DF_LR_IN (bb));
2754 df_simulate_initialize_forwards (bb, regs_live);
6fb5fa3c
DB
2755
2756 bb_table[bb->index] = bb_info;
2757 cselib_discard_hook = remove_useless_values;
2758
2759 if (bb->index >= NUM_FIXED_BLOCKS)
2760 {
2761 rtx insn;
2762
2763 cse_store_info_pool
b8698a0f 2764 = create_alloc_pool ("cse_store_info_pool",
6fb5fa3c
DB
2765 sizeof (struct store_info), 100);
2766 active_local_stores = NULL;
dabd47e7 2767 active_local_stores_len = 0;
6fb5fa3c 2768 cselib_clear_table ();
b8698a0f 2769
6fb5fa3c
DB
2770 /* Scan the insns. */
2771 FOR_BB_INSNS (bb, insn)
2772 {
2773 if (INSN_P (insn))
2774 scan_insn (bb_info, insn);
2775 cselib_process_insn (insn);
02b47899
KZ
2776 if (INSN_P (insn))
2777 df_simulate_one_insn_forwards (bb, insn, regs_live);
6fb5fa3c 2778 }
b8698a0f 2779
6fb5fa3c
DB
2780 /* This is something of a hack, because the global algorithm
2781 is supposed to take care of the case where stores go dead
2782 at the end of the function. However, the global
2783 algorithm must take a more conservative view of block
2784 mode reads than the local alg does. So to get the case
2785 where you have a store to the frame followed by a non
0d52bcc1 2786 overlapping block more read, we look at the active local
6fb5fa3c
DB
2787 stores at the end of the function and delete all of the
2788 frame and spill based ones. */
2789 if (stores_off_frame_dead_at_return
2790 && (EDGE_COUNT (bb->succs) == 0
2791 || (single_succ_p (bb)
2792 && single_succ (bb) == EXIT_BLOCK_PTR
e3b5732b 2793 && ! crtl->calls_eh_return)))
6fb5fa3c
DB
2794 {
2795 insn_info_t i_ptr = active_local_stores;
2796 while (i_ptr)
2797 {
2798 store_info_t store_info = i_ptr->store_rec;
2799
2800 /* Skip the clobbers. */
2801 while (!store_info->is_set)
2802 store_info = store_info->next;
8dd5516b 2803 if (store_info->alias_set && !i_ptr->cannot_delete)
6fb5fa3c 2804 delete_dead_store_insn (i_ptr);
b8698a0f 2805 else
6fb5fa3c
DB
2806 if (store_info->group_id >= 0)
2807 {
b8698a0f 2808 group_info_t group
6fb5fa3c 2809 = VEC_index (group_info_t, rtx_group_vec, store_info->group_id);
8dd5516b 2810 if (group->frame_related && !i_ptr->cannot_delete)
6fb5fa3c
DB
2811 delete_dead_store_insn (i_ptr);
2812 }
2813
2814 i_ptr = i_ptr->next_local_store;
2815 }
2816 }
2817
2818 /* Get rid of the loads that were discovered in
2819 replace_read. Cselib is finished with this block. */
2820 while (deferred_change_list)
2821 {
2822 deferred_change_t next = deferred_change_list->next;
2823
2824 /* There is no reason to validate this change. That was
2825 done earlier. */
2826 *deferred_change_list->loc = deferred_change_list->reg;
2827 pool_free (deferred_change_pool, deferred_change_list);
2828 deferred_change_list = next;
2829 }
2830
2831 /* Get rid of all of the cselib based store_infos in this
2832 block and mark the containing insns as not being
2833 deletable. */
2834 ptr = bb_info->last_insn;
2835 while (ptr)
2836 {
2837 if (ptr->contains_cselib_groups)
8dd5516b
JJ
2838 {
2839 store_info_t s_info = ptr->store_rec;
2840 while (s_info && !s_info->is_set)
2841 s_info = s_info->next;
2842 if (s_info
2843 && s_info->redundant_reason
2844 && s_info->redundant_reason->insn
2845 && !ptr->cannot_delete)
2846 {
2847 if (dump_file)
2848 fprintf (dump_file, "Locally deleting insn %d "
2849 "because insn %d stores the "
2850 "same value and couldn't be "
2851 "eliminated\n",
2852 INSN_UID (ptr->insn),
2853 INSN_UID (s_info->redundant_reason->insn));
2854 delete_dead_store_insn (ptr);
2855 }
2856 if (s_info)
2857 s_info->redundant_reason = NULL;
2858 free_store_info (ptr);
2859 }
2860 else
2861 {
2862 store_info_t s_info;
2863
2864 /* Free at least positions_needed bitmaps. */
2865 for (s_info = ptr->store_rec; s_info; s_info = s_info->next)
2866 if (s_info->is_large)
2867 {
dc491a25 2868 BITMAP_FREE (s_info->positions_needed.large.bmap);
8dd5516b
JJ
2869 s_info->is_large = false;
2870 }
2871 }
6fb5fa3c
DB
2872 ptr = ptr->prev_insn;
2873 }
2874
2875 free_alloc_pool (cse_store_info_pool);
2876 }
02b47899 2877 bb_info->regs_live = NULL;
6fb5fa3c
DB
2878 }
2879
02b47899 2880 BITMAP_FREE (regs_live);
6fb5fa3c
DB
2881 cselib_finish ();
2882 htab_empty (rtx_group_table);
2883}
2884
2885\f
2886/*----------------------------------------------------------------------------
2887 Second step.
2888
2889 Assign each byte position in the stores that we are going to
2890 analyze globally to a position in the bitmaps. Returns true if
6ed3da00 2891 there are any bit positions assigned.
6fb5fa3c
DB
2892----------------------------------------------------------------------------*/
2893
2894static void
2895dse_step2_init (void)
2896{
2897 unsigned int i;
2898 group_info_t group;
2899
ac47786e 2900 FOR_EACH_VEC_ELT (group_info_t, rtx_group_vec, i, group)
6fb5fa3c
DB
2901 {
2902 /* For all non stack related bases, we only consider a store to
2903 be deletable if there are two or more stores for that
2904 position. This is because it takes one store to make the
2905 other store redundant. However, for the stores that are
2906 stack related, we consider them if there is only one store
2907 for the position. We do this because the stack related
2908 stores can be deleted if their is no read between them and
2909 the end of the function.
b8698a0f 2910
6fb5fa3c
DB
2911 To make this work in the current framework, we take the stack
2912 related bases add all of the bits from store1 into store2.
2913 This has the effect of making the eligible even if there is
2914 only one store. */
2915
2916 if (stores_off_frame_dead_at_return && group->frame_related)
2917 {
2918 bitmap_ior_into (group->store2_n, group->store1_n);
2919 bitmap_ior_into (group->store2_p, group->store1_p);
2920 if (dump_file)
b8698a0f 2921 fprintf (dump_file, "group %d is frame related ", i);
6fb5fa3c
DB
2922 }
2923
2924 group->offset_map_size_n++;
3f9b14ff
SB
2925 group->offset_map_n = XOBNEWVEC (&dse_obstack, int,
2926 group->offset_map_size_n);
6fb5fa3c 2927 group->offset_map_size_p++;
3f9b14ff
SB
2928 group->offset_map_p = XOBNEWVEC (&dse_obstack, int,
2929 group->offset_map_size_p);
6fb5fa3c
DB
2930 group->process_globally = false;
2931 if (dump_file)
2932 {
b8698a0f 2933 fprintf (dump_file, "group %d(%d+%d): ", i,
6fb5fa3c
DB
2934 (int)bitmap_count_bits (group->store2_n),
2935 (int)bitmap_count_bits (group->store2_p));
2936 bitmap_print (dump_file, group->store2_n, "n ", " ");
2937 bitmap_print (dump_file, group->store2_p, "p ", "\n");
2938 }
2939 }
2940}
2941
2942
2943/* Init the offset tables for the normal case. */
2944
2945static bool
2946dse_step2_nospill (void)
2947{
2948 unsigned int i;
2949 group_info_t group;
2950 /* Position 0 is unused because 0 is used in the maps to mean
2951 unused. */
2952 current_position = 1;
ac47786e 2953 FOR_EACH_VEC_ELT (group_info_t, rtx_group_vec, i, group)
6fb5fa3c
DB
2954 {
2955 bitmap_iterator bi;
2956 unsigned int j;
2957
2958 if (group == clear_alias_group)
2959 continue;
2960
2961 memset (group->offset_map_n, 0, sizeof(int) * group->offset_map_size_n);
2962 memset (group->offset_map_p, 0, sizeof(int) * group->offset_map_size_p);
2963 bitmap_clear (group->group_kill);
2964
2965 EXECUTE_IF_SET_IN_BITMAP (group->store2_n, 0, j, bi)
2966 {
2967 bitmap_set_bit (group->group_kill, current_position);
d26c7090
ER
2968 if (bitmap_bit_p (group->escaped_n, j))
2969 bitmap_set_bit (kill_on_calls, current_position);
6fb5fa3c
DB
2970 group->offset_map_n[j] = current_position++;
2971 group->process_globally = true;
2972 }
2973 EXECUTE_IF_SET_IN_BITMAP (group->store2_p, 0, j, bi)
2974 {
b8698a0f 2975 bitmap_set_bit (group->group_kill, current_position);
d26c7090
ER
2976 if (bitmap_bit_p (group->escaped_p, j))
2977 bitmap_set_bit (kill_on_calls, current_position);
6fb5fa3c
DB
2978 group->offset_map_p[j] = current_position++;
2979 group->process_globally = true;
2980 }
2981 }
2982 return current_position != 1;
2983}
2984
2985
2986/* Init the offset tables for the spill case. */
2987
2988static bool
2989dse_step2_spill (void)
2990{
2991 unsigned int j;
2992 group_info_t group = clear_alias_group;
2993 bitmap_iterator bi;
2994
2995 /* Position 0 is unused because 0 is used in the maps to mean
2996 unused. */
2997 current_position = 1;
2998
2999 if (dump_file)
3000 {
b8698a0f 3001 bitmap_print (dump_file, clear_alias_sets,
6fb5fa3c 3002 "clear alias sets ", "\n");
b8698a0f 3003 bitmap_print (dump_file, disqualified_clear_alias_sets,
6fb5fa3c
DB
3004 "disqualified clear alias sets ", "\n");
3005 }
3006
3007 memset (group->offset_map_n, 0, sizeof(int) * group->offset_map_size_n);
3008 memset (group->offset_map_p, 0, sizeof(int) * group->offset_map_size_p);
3009 bitmap_clear (group->group_kill);
b8698a0f 3010
6fb5fa3c
DB
3011 /* Remove the disqualified positions from the store2_p set. */
3012 bitmap_and_compl_into (group->store2_p, disqualified_clear_alias_sets);
b8698a0f 3013
6fb5fa3c
DB
3014 /* We do not need to process the store2_n set because
3015 alias_sets are always positive. */
3016 EXECUTE_IF_SET_IN_BITMAP (group->store2_p, 0, j, bi)
3017 {
b8698a0f 3018 bitmap_set_bit (group->group_kill, current_position);
6fb5fa3c
DB
3019 group->offset_map_p[j] = current_position++;
3020 group->process_globally = true;
3021 }
3022
3023 return current_position != 1;
3024}
3025
3026
3027\f
3028/*----------------------------------------------------------------------------
3029 Third step.
b8698a0f 3030
6fb5fa3c
DB
3031 Build the bit vectors for the transfer functions.
3032----------------------------------------------------------------------------*/
3033
3034
6fb5fa3c
DB
3035/* Look up the bitmap index for OFFSET in GROUP_INFO. If it is not
3036 there, return 0. */
3037
3038static int
3039get_bitmap_index (group_info_t group_info, HOST_WIDE_INT offset)
3040{
3041 if (offset < 0)
3042 {
3043 HOST_WIDE_INT offset_p = -offset;
3044 if (offset_p >= group_info->offset_map_size_n)
3045 return 0;
3046 return group_info->offset_map_n[offset_p];
3047 }
3048 else
3049 {
3050 if (offset >= group_info->offset_map_size_p)
3051 return 0;
3052 return group_info->offset_map_p[offset];
3053 }
3054}
3055
3056
3057/* Process the STORE_INFOs into the bitmaps into GEN and KILL. KILL
3058 may be NULL. */
3059
b8698a0f 3060static void
6fb5fa3c
DB
3061scan_stores_nospill (store_info_t store_info, bitmap gen, bitmap kill)
3062{
3063 while (store_info)
3064 {
3065 HOST_WIDE_INT i;
b8698a0f 3066 group_info_t group_info
6fb5fa3c
DB
3067 = VEC_index (group_info_t, rtx_group_vec, store_info->group_id);
3068 if (group_info->process_globally)
3069 for (i = store_info->begin; i < store_info->end; i++)
3070 {
3071 int index = get_bitmap_index (group_info, i);
3072 if (index != 0)
3073 {
3074 bitmap_set_bit (gen, index);
3075 if (kill)
3076 bitmap_clear_bit (kill, index);
3077 }
3078 }
3079 store_info = store_info->next;
3080 }
3081}
3082
3083
3084/* Process the STORE_INFOs into the bitmaps into GEN and KILL. KILL
3085 may be NULL. */
3086
b8698a0f 3087static void
6fb5fa3c
DB
3088scan_stores_spill (store_info_t store_info, bitmap gen, bitmap kill)
3089{
3090 while (store_info)
3091 {
3092 if (store_info->alias_set)
3093 {
b8698a0f 3094 int index = get_bitmap_index (clear_alias_group,
6fb5fa3c
DB
3095 store_info->alias_set);
3096 if (index != 0)
3097 {
3098 bitmap_set_bit (gen, index);
3099 if (kill)
3100 bitmap_clear_bit (kill, index);
3101 }
3102 }
3103 store_info = store_info->next;
3104 }
3105}
3106
3107
3108/* Process the READ_INFOs into the bitmaps into GEN and KILL. KILL
3109 may be NULL. */
3110
3111static void
3112scan_reads_nospill (insn_info_t insn_info, bitmap gen, bitmap kill)
3113{
3114 read_info_t read_info = insn_info->read_rec;
3115 int i;
3116 group_info_t group;
3117
64520bdc
EB
3118 /* If this insn reads the frame, kill all the frame related stores. */
3119 if (insn_info->frame_read)
3120 {
ac47786e 3121 FOR_EACH_VEC_ELT (group_info_t, rtx_group_vec, i, group)
64520bdc
EB
3122 if (group->process_globally && group->frame_related)
3123 {
3124 if (kill)
3125 bitmap_ior_into (kill, group->group_kill);
b8698a0f 3126 bitmap_and_compl_into (gen, group->group_kill);
64520bdc
EB
3127 }
3128 }
d26c7090
ER
3129 if (insn_info->non_frame_wild_read)
3130 {
3131 /* Kill all non-frame related stores. Kill all stores of variables that
3132 escape. */
3133 if (kill)
3134 bitmap_ior_into (kill, kill_on_calls);
3135 bitmap_and_compl_into (gen, kill_on_calls);
3136 FOR_EACH_VEC_ELT (group_info_t, rtx_group_vec, i, group)
3137 if (group->process_globally && !group->frame_related)
3138 {
3139 if (kill)
3140 bitmap_ior_into (kill, group->group_kill);
3141 bitmap_and_compl_into (gen, group->group_kill);
3142 }
3143 }
6fb5fa3c
DB
3144 while (read_info)
3145 {
ac47786e 3146 FOR_EACH_VEC_ELT (group_info_t, rtx_group_vec, i, group)
6fb5fa3c
DB
3147 {
3148 if (group->process_globally)
3149 {
3150 if (i == read_info->group_id)
3151 {
3152 if (read_info->begin > read_info->end)
3153 {
3154 /* Begin > end for block mode reads. */
3155 if (kill)
3156 bitmap_ior_into (kill, group->group_kill);
3157 bitmap_and_compl_into (gen, group->group_kill);
3158 }
3159 else
3160 {
3161 /* The groups are the same, just process the
3162 offsets. */
3163 HOST_WIDE_INT j;
3164 for (j = read_info->begin; j < read_info->end; j++)
3165 {
3166 int index = get_bitmap_index (group, j);
3167 if (index != 0)
3168 {
3169 if (kill)
3170 bitmap_set_bit (kill, index);
3171 bitmap_clear_bit (gen, index);
3172 }
3173 }
3174 }
3175 }
3176 else
3177 {
3178 /* The groups are different, if the alias sets
3179 conflict, clear the entire group. We only need
3180 to apply this test if the read_info is a cselib
3181 read. Anything with a constant base cannot alias
3182 something else with a different constant
3183 base. */
3184 if ((read_info->group_id < 0)
b8698a0f 3185 && canon_true_dependence (group->base_mem,
d32f725a 3186 GET_MODE (group->base_mem),
6216f94e 3187 group->canon_base_addr,
53d9622b 3188 read_info->mem, NULL_RTX))
6fb5fa3c
DB
3189 {
3190 if (kill)
3191 bitmap_ior_into (kill, group->group_kill);
3192 bitmap_and_compl_into (gen, group->group_kill);
3193 }
3194 }
3195 }
3196 }
b8698a0f 3197
6fb5fa3c
DB
3198 read_info = read_info->next;
3199 }
3200}
3201
3202/* Process the READ_INFOs into the bitmaps into GEN and KILL. KILL
3203 may be NULL. */
3204
3205static void
3206scan_reads_spill (read_info_t read_info, bitmap gen, bitmap kill)
3207{
3208 while (read_info)
3209 {
3210 if (read_info->alias_set)
3211 {
b8698a0f 3212 int index = get_bitmap_index (clear_alias_group,
6fb5fa3c
DB
3213 read_info->alias_set);
3214 if (index != 0)
3215 {
3216 if (kill)
3217 bitmap_set_bit (kill, index);
3218 bitmap_clear_bit (gen, index);
3219 }
3220 }
b8698a0f 3221
6fb5fa3c
DB
3222 read_info = read_info->next;
3223 }
3224}
3225
3226
3227/* Return the insn in BB_INFO before the first wild read or if there
3228 are no wild reads in the block, return the last insn. */
3229
3230static insn_info_t
3231find_insn_before_first_wild_read (bb_info_t bb_info)
3232{
3233 insn_info_t insn_info = bb_info->last_insn;
3234 insn_info_t last_wild_read = NULL;
3235
3236 while (insn_info)
3237 {
3238 if (insn_info->wild_read)
3239 {
3240 last_wild_read = insn_info->prev_insn;
3241 /* Block starts with wild read. */
3242 if (!last_wild_read)
3243 return NULL;
3244 }
3245
3246 insn_info = insn_info->prev_insn;
3247 }
3248
3249 if (last_wild_read)
3250 return last_wild_read;
3251 else
3252 return bb_info->last_insn;
3253}
3254
3255
3256/* Scan the insns in BB_INFO starting at PTR and going to the top of
3257 the block in order to build the gen and kill sets for the block.
3258 We start at ptr which may be the last insn in the block or may be
3259 the first insn with a wild read. In the latter case we are able to
3260 skip the rest of the block because it just does not matter:
3261 anything that happens is hidden by the wild read. */
3262
3263static void
3264dse_step3_scan (bool for_spills, basic_block bb)
3265{
3266 bb_info_t bb_info = bb_table[bb->index];
3267 insn_info_t insn_info;
3268
3269 if (for_spills)
3270 /* There are no wild reads in the spill case. */
3271 insn_info = bb_info->last_insn;
3272 else
3273 insn_info = find_insn_before_first_wild_read (bb_info);
b8698a0f 3274
6fb5fa3c
DB
3275 /* In the spill case or in the no_spill case if there is no wild
3276 read in the block, we will need a kill set. */
3277 if (insn_info == bb_info->last_insn)
3278 {
3279 if (bb_info->kill)
3280 bitmap_clear (bb_info->kill);
3281 else
3f9b14ff 3282 bb_info->kill = BITMAP_ALLOC (&dse_bitmap_obstack);
6fb5fa3c 3283 }
b8698a0f 3284 else
6fb5fa3c
DB
3285 if (bb_info->kill)
3286 BITMAP_FREE (bb_info->kill);
3287
3288 while (insn_info)
3289 {
3290 /* There may have been code deleted by the dce pass run before
3291 this phase. */
3292 if (insn_info->insn && INSN_P (insn_info->insn))
3293 {
b8698a0f 3294 /* Process the read(s) last. */
6fb5fa3c
DB
3295 if (for_spills)
3296 {
3297 scan_stores_spill (insn_info->store_rec, bb_info->gen, bb_info->kill);
3298 scan_reads_spill (insn_info->read_rec, bb_info->gen, bb_info->kill);
3299 }
3300 else
3301 {
3302 scan_stores_nospill (insn_info->store_rec, bb_info->gen, bb_info->kill);
3303 scan_reads_nospill (insn_info, bb_info->gen, bb_info->kill);
3304 }
b8698a0f 3305 }
6fb5fa3c
DB
3306
3307 insn_info = insn_info->prev_insn;
3308 }
3309}
3310
3311
3312/* Set the gen set of the exit block, and also any block with no
3313 successors that does not have a wild read. */
3314
3315static void
3316dse_step3_exit_block_scan (bb_info_t bb_info)
3317{
3318 /* The gen set is all 0's for the exit block except for the
3319 frame_pointer_group. */
b8698a0f 3320
6fb5fa3c
DB
3321 if (stores_off_frame_dead_at_return)
3322 {
3323 unsigned int i;
3324 group_info_t group;
b8698a0f 3325
ac47786e 3326 FOR_EACH_VEC_ELT (group_info_t, rtx_group_vec, i, group)
6fb5fa3c
DB
3327 {
3328 if (group->process_globally && group->frame_related)
3329 bitmap_ior_into (bb_info->gen, group->group_kill);
3330 }
3331 }
3332}
3333
3334
3335/* Find all of the blocks that are not backwards reachable from the
3336 exit block or any block with no successors (BB). These are the
3337 infinite loops or infinite self loops. These blocks will still
3338 have their bits set in UNREACHABLE_BLOCKS. */
3339
3340static void
3341mark_reachable_blocks (sbitmap unreachable_blocks, basic_block bb)
3342{
3343 edge e;
3344 edge_iterator ei;
3345
3346 if (TEST_BIT (unreachable_blocks, bb->index))
3347 {
3348 RESET_BIT (unreachable_blocks, bb->index);
3349 FOR_EACH_EDGE (e, ei, bb->preds)
b8698a0f 3350 {
6fb5fa3c 3351 mark_reachable_blocks (unreachable_blocks, e->src);
b8698a0f 3352 }
6fb5fa3c
DB
3353 }
3354}
3355
3356/* Build the transfer functions for the function. */
3357
3358static void
3359dse_step3 (bool for_spills)
3360{
3361 basic_block bb;
3362 sbitmap unreachable_blocks = sbitmap_alloc (last_basic_block);
3363 sbitmap_iterator sbi;
3364 bitmap all_ones = NULL;
3365 unsigned int i;
b8698a0f 3366
6fb5fa3c
DB
3367 sbitmap_ones (unreachable_blocks);
3368
3369 FOR_ALL_BB (bb)
3370 {
3371 bb_info_t bb_info = bb_table[bb->index];
3372 if (bb_info->gen)
3373 bitmap_clear (bb_info->gen);
3374 else
3f9b14ff 3375 bb_info->gen = BITMAP_ALLOC (&dse_bitmap_obstack);
6fb5fa3c
DB
3376
3377 if (bb->index == ENTRY_BLOCK)
3378 ;
3379 else if (bb->index == EXIT_BLOCK)
3380 dse_step3_exit_block_scan (bb_info);
3381 else
3382 dse_step3_scan (for_spills, bb);
3383 if (EDGE_COUNT (bb->succs) == 0)
3384 mark_reachable_blocks (unreachable_blocks, bb);
3385
3386 /* If this is the second time dataflow is run, delete the old
3387 sets. */
3388 if (bb_info->in)
3389 BITMAP_FREE (bb_info->in);
3390 if (bb_info->out)
3391 BITMAP_FREE (bb_info->out);
3392 }
3393
3394 /* For any block in an infinite loop, we must initialize the out set
3395 to all ones. This could be expensive, but almost never occurs in
3396 practice. However, it is common in regression tests. */
3397 EXECUTE_IF_SET_IN_SBITMAP (unreachable_blocks, 0, i, sbi)
3398 {
3399 if (bitmap_bit_p (all_blocks, i))
3400 {
3401 bb_info_t bb_info = bb_table[i];
3402 if (!all_ones)
3403 {
3404 unsigned int j;
3405 group_info_t group;
3406
3f9b14ff 3407 all_ones = BITMAP_ALLOC (&dse_bitmap_obstack);
ac47786e 3408 FOR_EACH_VEC_ELT (group_info_t, rtx_group_vec, j, group)
6fb5fa3c
DB
3409 bitmap_ior_into (all_ones, group->group_kill);
3410 }
3411 if (!bb_info->out)
3412 {
3f9b14ff 3413 bb_info->out = BITMAP_ALLOC (&dse_bitmap_obstack);
6fb5fa3c
DB
3414 bitmap_copy (bb_info->out, all_ones);
3415 }
3416 }
3417 }
3418
3419 if (all_ones)
3420 BITMAP_FREE (all_ones);
3421 sbitmap_free (unreachable_blocks);
3422}
3423
3424
3425\f
3426/*----------------------------------------------------------------------------
3427 Fourth step.
3428
3429 Solve the bitvector equations.
3430----------------------------------------------------------------------------*/
3431
3432
3433/* Confluence function for blocks with no successors. Create an out
3434 set from the gen set of the exit block. This block logically has
3435 the exit block as a successor. */
3436
3437
3438
3439static void
3440dse_confluence_0 (basic_block bb)
3441{
3442 bb_info_t bb_info = bb_table[bb->index];
3443
3444 if (bb->index == EXIT_BLOCK)
3445 return;
3446
3447 if (!bb_info->out)
3448 {
3f9b14ff 3449 bb_info->out = BITMAP_ALLOC (&dse_bitmap_obstack);
6fb5fa3c
DB
3450 bitmap_copy (bb_info->out, bb_table[EXIT_BLOCK]->gen);
3451 }
3452}
3453
3454/* Propagate the information from the in set of the dest of E to the
3455 out set of the src of E. If the various in or out sets are not
3456 there, that means they are all ones. */
3457
1a0f3fa1 3458static bool
6fb5fa3c
DB
3459dse_confluence_n (edge e)
3460{
3461 bb_info_t src_info = bb_table[e->src->index];
3462 bb_info_t dest_info = bb_table[e->dest->index];
3463
3464 if (dest_info->in)
3465 {
3466 if (src_info->out)
3467 bitmap_and_into (src_info->out, dest_info->in);
3468 else
3469 {
3f9b14ff 3470 src_info->out = BITMAP_ALLOC (&dse_bitmap_obstack);
6fb5fa3c
DB
3471 bitmap_copy (src_info->out, dest_info->in);
3472 }
3473 }
1a0f3fa1 3474 return true;
6fb5fa3c
DB
3475}
3476
3477
3478/* Propagate the info from the out to the in set of BB_INDEX's basic
b8698a0f 3479 block. There are three cases:
6fb5fa3c
DB
3480
3481 1) The block has no kill set. In this case the kill set is all
3482 ones. It does not matter what the out set of the block is, none of
3483 the info can reach the top. The only thing that reaches the top is
3484 the gen set and we just copy the set.
3485
3486 2) There is a kill set but no out set and bb has successors. In
3487 this case we just return. Eventually an out set will be created and
3488 it is better to wait than to create a set of ones.
3489
3490 3) There is both a kill and out set. We apply the obvious transfer
3491 function.
3492*/
3493
3494static bool
3495dse_transfer_function (int bb_index)
3496{
3497 bb_info_t bb_info = bb_table[bb_index];
3498
3499 if (bb_info->kill)
3500 {
3501 if (bb_info->out)
3502 {
3503 /* Case 3 above. */
3504 if (bb_info->in)
b8698a0f 3505 return bitmap_ior_and_compl (bb_info->in, bb_info->gen,
6fb5fa3c
DB
3506 bb_info->out, bb_info->kill);
3507 else
3508 {
3f9b14ff 3509 bb_info->in = BITMAP_ALLOC (&dse_bitmap_obstack);
b8698a0f 3510 bitmap_ior_and_compl (bb_info->in, bb_info->gen,
6fb5fa3c
DB
3511 bb_info->out, bb_info->kill);
3512 return true;
3513 }
3514 }
3515 else
3516 /* Case 2 above. */
3517 return false;
3518 }
3519 else
3520 {
3521 /* Case 1 above. If there is already an in set, nothing
3522 happens. */
3523 if (bb_info->in)
3524 return false;
3525 else
3526 {
3f9b14ff 3527 bb_info->in = BITMAP_ALLOC (&dse_bitmap_obstack);
6fb5fa3c
DB
3528 bitmap_copy (bb_info->in, bb_info->gen);
3529 return true;
3530 }
3531 }
3532}
3533
3534/* Solve the dataflow equations. */
3535
3536static void
3537dse_step4 (void)
3538{
b8698a0f
L
3539 df_simple_dataflow (DF_BACKWARD, NULL, dse_confluence_0,
3540 dse_confluence_n, dse_transfer_function,
3541 all_blocks, df_get_postorder (DF_BACKWARD),
6fb5fa3c
DB
3542 df_get_n_blocks (DF_BACKWARD));
3543 if (dump_file)
3544 {
3545 basic_block bb;
3546
3547 fprintf (dump_file, "\n\n*** Global dataflow info after analysis.\n");
3548 FOR_ALL_BB (bb)
3549 {
3550 bb_info_t bb_info = bb_table[bb->index];
3551
3552 df_print_bb_index (bb, dump_file);
3553 if (bb_info->in)
3554 bitmap_print (dump_file, bb_info->in, " in: ", "\n");
3555 else
3556 fprintf (dump_file, " in: *MISSING*\n");
3557 if (bb_info->gen)
3558 bitmap_print (dump_file, bb_info->gen, " gen: ", "\n");
3559 else
3560 fprintf (dump_file, " gen: *MISSING*\n");
3561 if (bb_info->kill)
3562 bitmap_print (dump_file, bb_info->kill, " kill: ", "\n");
3563 else
3564 fprintf (dump_file, " kill: *MISSING*\n");
3565 if (bb_info->out)
3566 bitmap_print (dump_file, bb_info->out, " out: ", "\n");
3567 else
3568 fprintf (dump_file, " out: *MISSING*\n\n");
3569 }
3570 }
3571}
3572
3573
3574\f
3575/*----------------------------------------------------------------------------
3576 Fifth step.
3577
0d52bcc1 3578 Delete the stores that can only be deleted using the global information.
6fb5fa3c
DB
3579----------------------------------------------------------------------------*/
3580
3581
3582static void
3583dse_step5_nospill (void)
3584{
3585 basic_block bb;
3586 FOR_EACH_BB (bb)
3587 {
3588 bb_info_t bb_info = bb_table[bb->index];
3589 insn_info_t insn_info = bb_info->last_insn;
3590 bitmap v = bb_info->out;
3591
3592 while (insn_info)
3593 {
3594 bool deleted = false;
3595 if (dump_file && insn_info->insn)
3596 {
3597 fprintf (dump_file, "starting to process insn %d\n",
3598 INSN_UID (insn_info->insn));
3599 bitmap_print (dump_file, v, " v: ", "\n");
3600 }
3601
3602 /* There may have been code deleted by the dce pass run before
3603 this phase. */
b8698a0f 3604 if (insn_info->insn
6fb5fa3c
DB
3605 && INSN_P (insn_info->insn)
3606 && (!insn_info->cannot_delete)
3607 && (!bitmap_empty_p (v)))
3608 {
3609 store_info_t store_info = insn_info->store_rec;
3610
3611 /* Try to delete the current insn. */
3612 deleted = true;
b8698a0f 3613
6fb5fa3c
DB
3614 /* Skip the clobbers. */
3615 while (!store_info->is_set)
3616 store_info = store_info->next;
3617
3618 if (store_info->alias_set)
3619 deleted = false;
3620 else
3621 {
3622 HOST_WIDE_INT i;
b8698a0f 3623 group_info_t group_info
6fb5fa3c 3624 = VEC_index (group_info_t, rtx_group_vec, store_info->group_id);
b8698a0f 3625
6fb5fa3c
DB
3626 for (i = store_info->begin; i < store_info->end; i++)
3627 {
3628 int index = get_bitmap_index (group_info, i);
b8698a0f 3629
6fb5fa3c 3630 if (dump_file)
b8698a0f 3631 fprintf (dump_file, "i = %d, index = %d\n", (int)i, index);
6fb5fa3c
DB
3632 if (index == 0 || !bitmap_bit_p (v, index))
3633 {
3634 if (dump_file)
b8698a0f 3635 fprintf (dump_file, "failing at i = %d\n", (int)i);
6fb5fa3c
DB
3636 deleted = false;
3637 break;
3638 }
3639 }
3640 }
3641 if (deleted)
3642 {
9e582b1d
JR
3643 if (dbg_cnt (dse)
3644 && check_for_inc_dec_1 (insn_info))
6fb5fa3c 3645 {
6fb5fa3c
DB
3646 delete_insn (insn_info->insn);
3647 insn_info->insn = NULL;
3648 globally_deleted++;
3649 }
3650 }
3651 }
3652 /* We do want to process the local info if the insn was
6ed3da00 3653 deleted. For instance, if the insn did a wild read, we
6fb5fa3c 3654 no longer need to trash the info. */
b8698a0f 3655 if (insn_info->insn
6fb5fa3c
DB
3656 && INSN_P (insn_info->insn)
3657 && (!deleted))
3658 {
3659 scan_stores_nospill (insn_info->store_rec, v, NULL);
3660 if (insn_info->wild_read)
3661 {
3662 if (dump_file)
3663 fprintf (dump_file, "wild read\n");
3664 bitmap_clear (v);
3665 }
d26c7090
ER
3666 else if (insn_info->read_rec
3667 || insn_info->non_frame_wild_read)
6fb5fa3c 3668 {
d26c7090 3669 if (dump_file && !insn_info->non_frame_wild_read)
6fb5fa3c 3670 fprintf (dump_file, "regular read\n");
d26c7090
ER
3671 else if (dump_file)
3672 fprintf (dump_file, "non-frame wild read\n");
6fb5fa3c
DB
3673 scan_reads_nospill (insn_info, v, NULL);
3674 }
3675 }
b8698a0f 3676
6fb5fa3c
DB
3677 insn_info = insn_info->prev_insn;
3678 }
3679 }
3680}
3681
3682
3683static void
3684dse_step5_spill (void)
3685{
3686 basic_block bb;
3687 FOR_EACH_BB (bb)
3688 {
3689 bb_info_t bb_info = bb_table[bb->index];
3690 insn_info_t insn_info = bb_info->last_insn;
3691 bitmap v = bb_info->out;
3692
3693 while (insn_info)
3694 {
3695 bool deleted = false;
3696 /* There may have been code deleted by the dce pass run before
3697 this phase. */
b8698a0f 3698 if (insn_info->insn
6fb5fa3c
DB
3699 && INSN_P (insn_info->insn)
3700 && (!insn_info->cannot_delete)
3701 && (!bitmap_empty_p (v)))
3702 {
3703 /* Try to delete the current insn. */
3704 store_info_t store_info = insn_info->store_rec;
3705 deleted = true;
b8698a0f 3706
6fb5fa3c
DB
3707 while (store_info)
3708 {
3709 if (store_info->alias_set)
3710 {
b8698a0f 3711 int index = get_bitmap_index (clear_alias_group,
6fb5fa3c
DB
3712 store_info->alias_set);
3713 if (index == 0 || !bitmap_bit_p (v, index))
3714 {
3715 deleted = false;
3716 break;
3717 }
3718 }
b8698a0f 3719 else
6fb5fa3c
DB
3720 deleted = false;
3721 store_info = store_info->next;
3722 }
9e582b1d
JR
3723 if (deleted && dbg_cnt (dse)
3724 && check_for_inc_dec_1 (insn_info))
6fb5fa3c
DB
3725 {
3726 if (dump_file)
b8698a0f 3727 fprintf (dump_file, "Spill deleting insn %d\n",
6fb5fa3c 3728 INSN_UID (insn_info->insn));
6fb5fa3c
DB
3729 delete_insn (insn_info->insn);
3730 spill_deleted++;
3731 insn_info->insn = NULL;
3732 }
3733 }
b8698a0f
L
3734
3735 if (insn_info->insn
6fb5fa3c
DB
3736 && INSN_P (insn_info->insn)
3737 && (!deleted))
3738 {
3739 scan_stores_spill (insn_info->store_rec, v, NULL);
3740 scan_reads_spill (insn_info->read_rec, v, NULL);
3741 }
b8698a0f 3742
6fb5fa3c
DB
3743 insn_info = insn_info->prev_insn;
3744 }
3745 }
3746}
3747
3748
3749\f
3750/*----------------------------------------------------------------------------
3751 Sixth step.
3752
8dd5516b
JJ
3753 Delete stores made redundant by earlier stores (which store the same
3754 value) that couldn't be eliminated.
3755----------------------------------------------------------------------------*/
3756
3757static void
3758dse_step6 (void)
3759{
3760 basic_block bb;
3761
3762 FOR_ALL_BB (bb)
3763 {
3764 bb_info_t bb_info = bb_table[bb->index];
3765 insn_info_t insn_info = bb_info->last_insn;
3766
3767 while (insn_info)
3768 {
3769 /* There may have been code deleted by the dce pass run before
3770 this phase. */
3771 if (insn_info->insn
3772 && INSN_P (insn_info->insn)
3773 && !insn_info->cannot_delete)
3774 {
3775 store_info_t s_info = insn_info->store_rec;
3776
3777 while (s_info && !s_info->is_set)
3778 s_info = s_info->next;
3779 if (s_info
3780 && s_info->redundant_reason
3781 && s_info->redundant_reason->insn
3782 && INSN_P (s_info->redundant_reason->insn))
3783 {
3784 rtx rinsn = s_info->redundant_reason->insn;
3785 if (dump_file)
3786 fprintf (dump_file, "Locally deleting insn %d "
3787 "because insn %d stores the "
3788 "same value and couldn't be "
3789 "eliminated\n",
3790 INSN_UID (insn_info->insn),
3791 INSN_UID (rinsn));
3792 delete_dead_store_insn (insn_info);
3793 }
3794 }
3795 insn_info = insn_info->prev_insn;
3796 }
3797 }
3798}
3799\f
3800/*----------------------------------------------------------------------------
3801 Seventh step.
3802
b8698a0f 3803 Destroy everything left standing.
6fb5fa3c
DB
3804----------------------------------------------------------------------------*/
3805
b8698a0f 3806static void
3f9b14ff 3807dse_step7 (void)
6fb5fa3c 3808{
3f9b14ff
SB
3809 bitmap_obstack_release (&dse_bitmap_obstack);
3810 obstack_free (&dse_obstack, NULL);
370f38e8 3811
6fb5fa3c
DB
3812 if (clear_alias_sets)
3813 {
3814 BITMAP_FREE (clear_alias_sets);
3815 BITMAP_FREE (disqualified_clear_alias_sets);
3816 free_alloc_pool (clear_alias_mode_pool);
3817 htab_delete (clear_alias_mode_table);
3818 }
3819
3820 end_alias_analysis ();
3821 free (bb_table);
3822 htab_delete (rtx_group_table);
3823 VEC_free (group_info_t, heap, rtx_group_vec);
3824 BITMAP_FREE (all_blocks);
3825 BITMAP_FREE (scratch);
3826
3827 free_alloc_pool (rtx_store_info_pool);
3828 free_alloc_pool (read_info_pool);
3829 free_alloc_pool (insn_info_pool);
3830 free_alloc_pool (bb_info_pool);
3831 free_alloc_pool (rtx_group_info_pool);
3832 free_alloc_pool (deferred_change_pool);
3833}
3834
3835
6fb5fa3c
DB
3836/* -------------------------------------------------------------------------
3837 DSE
3838 ------------------------------------------------------------------------- */
3839
3840/* Callback for running pass_rtl_dse. */
3841
3842static unsigned int
3843rest_of_handle_dse (void)
3844{
3845 bool did_global = false;
3846
3847 df_set_flags (DF_DEFER_INSN_RESCAN);
3848
02b47899
KZ
3849 /* Need the notes since we must track live hardregs in the forwards
3850 direction. */
3851 df_note_add_problem ();
3852 df_analyze ();
3853
6fb5fa3c
DB
3854 dse_step0 ();
3855 dse_step1 ();
3856 dse_step2_init ();
3857 if (dse_step2_nospill ())
3858 {
3859 df_set_flags (DF_LR_RUN_DCE);
3860 df_analyze ();
3861 did_global = true;
3862 if (dump_file)
3863 fprintf (dump_file, "doing global processing\n");
3864 dse_step3 (false);
3865 dse_step4 ();
3866 dse_step5_nospill ();
3867 }
3868
3869 /* For the instance of dse that runs after reload, we make a special
3870 pass to process the spills. These are special in that they are
3871 totally transparent, i.e, there is no aliasing issues that need
3872 to be considered. This means that the wild reads that kill
b8698a0f 3873 everything else do not apply here. */
6fb5fa3c
DB
3874 if (clear_alias_sets && dse_step2_spill ())
3875 {
3876 if (!did_global)
3877 {
3878 df_set_flags (DF_LR_RUN_DCE);
3879 df_analyze ();
3880 }
3881 did_global = true;
3882 if (dump_file)
3883 fprintf (dump_file, "doing global spill processing\n");
3884 dse_step3 (true);
3885 dse_step4 ();
3886 dse_step5_spill ();
3887 }
8dd5516b
JJ
3888
3889 dse_step6 ();
3f9b14ff 3890 dse_step7 ();
6fb5fa3c
DB
3891
3892 if (dump_file)
3893 fprintf (dump_file, "dse: local deletions = %d, global deletions = %d, spill deletions = %d\n",
3894 locally_deleted, globally_deleted, spill_deleted);
3895 return 0;
3896}
3897
7d817ebc
DE
3898static bool
3899gate_dse1 (void)
3900{
3901 return optimize > 0 && flag_dse
3902 && dbg_cnt (dse1);
3903}
3904
3905static bool
3906gate_dse2 (void)
3907{
3908 return optimize > 0 && flag_dse
3909 && dbg_cnt (dse2);
6fb5fa3c
DB
3910}
3911
8ddbbcae 3912struct rtl_opt_pass pass_rtl_dse1 =
6fb5fa3c 3913{
8ddbbcae
JH
3914 {
3915 RTL_PASS,
6fb5fa3c 3916 "dse1", /* name */
7d817ebc 3917 gate_dse1, /* gate */
6fb5fa3c
DB
3918 rest_of_handle_dse, /* execute */
3919 NULL, /* sub */
3920 NULL, /* next */
3921 0, /* static_pass_number */
3922 TV_DSE1, /* tv_id */
3923 0, /* properties_required */
3924 0, /* properties_provided */
3925 0, /* properties_destroyed */
3926 0, /* todo_flags_start */
a36b8a1e 3927 TODO_df_finish | TODO_verify_rtl_sharing |
8ddbbcae
JH
3928 TODO_ggc_collect /* todo_flags_finish */
3929 }
6fb5fa3c
DB
3930};
3931
8ddbbcae 3932struct rtl_opt_pass pass_rtl_dse2 =
6fb5fa3c 3933{
8ddbbcae
JH
3934 {
3935 RTL_PASS,
6fb5fa3c 3936 "dse2", /* name */
7d817ebc 3937 gate_dse2, /* gate */
6fb5fa3c
DB
3938 rest_of_handle_dse, /* execute */
3939 NULL, /* sub */
3940 NULL, /* next */
3941 0, /* static_pass_number */
3942 TV_DSE2, /* tv_id */
3943 0, /* properties_required */
3944 0, /* properties_provided */
3945 0, /* properties_destroyed */
3946 0, /* todo_flags_start */
a36b8a1e 3947 TODO_df_finish | TODO_verify_rtl_sharing |
8ddbbcae
JH
3948 TODO_ggc_collect /* todo_flags_finish */
3949 }
6fb5fa3c 3950};