]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/dse.c
Fix multiple_p for two non-poly_ints
[thirdparty/gcc.git] / gcc / dse.c
CommitLineData
6fb5fa3c 1/* RTL dead store elimination.
cbe34bb5 2 Copyright (C) 2005-2017 Free Software Foundation, Inc.
6fb5fa3c
DB
3
4 Contributed by Richard Sandiford <rsandifor@codesourcery.com>
5 and Kenneth Zadeck <zadeck@naturalbridge.com>
6
7This file is part of GCC.
8
9GCC is free software; you can redistribute it and/or modify it under
10the terms of the GNU General Public License as published by the Free
9dcd6f09 11Software Foundation; either version 3, or (at your option) any later
6fb5fa3c
DB
12version.
13
14GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15WARRANTY; without even the implied warranty of MERCHANTABILITY or
16FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17for more details.
18
19You should have received a copy of the GNU General Public License
9dcd6f09
NC
20along with GCC; see the file COPYING3. If not see
21<http://www.gnu.org/licenses/>. */
6fb5fa3c
DB
22
23#undef BASELINE
24
25#include "config.h"
26#include "system.h"
27#include "coretypes.h"
c7131fb2 28#include "backend.h"
957060b5
AM
29#include "target.h"
30#include "rtl.h"
c7131fb2
AM
31#include "tree.h"
32#include "gimple.h"
957060b5 33#include "predict.h"
c7131fb2 34#include "df.h"
4d0cdd0c 35#include "memmodel.h"
957060b5
AM
36#include "tm_p.h"
37#include "gimple-ssa.h"
38#include "expmed.h"
39#include "optabs.h"
957060b5
AM
40#include "emit-rtl.h"
41#include "recog.h"
40e23961 42#include "alias.h"
d8a2d370 43#include "stor-layout.h"
60393bbc 44#include "cfgrtl.h"
6fb5fa3c 45#include "cselib.h"
6fb5fa3c 46#include "tree-pass.h"
36566b39 47#include "explow.h"
6fb5fa3c 48#include "expr.h"
6fb5fa3c 49#include "dbgcnt.h"
dabd47e7 50#include "params.h"
d7111da8 51#include "rtl-iter.h"
6c5ed3f1 52#include "cfgcleanup.h"
6fb5fa3c
DB
53
54/* This file contains three techniques for performing Dead Store
b8698a0f 55 Elimination (dse).
6fb5fa3c
DB
56
57 * The first technique performs dse locally on any base address. It
58 is based on the cselib which is a local value numbering technique.
59 This technique is local to a basic block but deals with a fairly
60 general addresses.
b8698a0f 61
6fb5fa3c
DB
62 * The second technique performs dse globally but is restricted to
63 base addresses that are either constant or are relative to the
64 frame_pointer.
65
66 * The third technique, (which is only done after register allocation)
026c3cfd 67 processes the spill slots. This differs from the second
6fb5fa3c
DB
68 technique because it takes advantage of the fact that spilling is
69 completely free from the effects of aliasing.
70
71 Logically, dse is a backwards dataflow problem. A store can be
72 deleted if it if cannot be reached in the backward direction by any
73 use of the value being stored. However, the local technique uses a
74 forwards scan of the basic block because cselib requires that the
75 block be processed in that order.
76
77 The pass is logically broken into 7 steps:
78
79 0) Initialization.
80
81 1) The local algorithm, as well as scanning the insns for the two
82 global algorithms.
83
84 2) Analysis to see if the global algs are necessary. In the case
85 of stores base on a constant address, there must be at least two
86 stores to that address, to make it possible to delete some of the
87 stores. In the case of stores off of the frame or spill related
88 stores, only one store to an address is necessary because those
89 stores die at the end of the function.
90
b8698a0f 91 3) Set up the global dataflow equations based on processing the
6fb5fa3c
DB
92 info parsed in the first step.
93
94 4) Solve the dataflow equations.
95
96 5) Delete the insns that the global analysis has indicated are
97 unnecessary.
98
073a8998 99 6) Delete insns that store the same value as preceding store
8dd5516b
JJ
100 where the earlier store couldn't be eliminated.
101
102 7) Cleanup.
6fb5fa3c
DB
103
104 This step uses cselib and canon_rtx to build the largest expression
105 possible for each address. This pass is a forwards pass through
106 each basic block. From the point of view of the global technique,
107 the first pass could examine a block in either direction. The
0d52bcc1 108 forwards ordering is to accommodate cselib.
6fb5fa3c 109
5ef0b50d 110 We make a simplifying assumption: addresses fall into four broad
6fb5fa3c
DB
111 categories:
112
113 1) base has rtx_varies_p == false, offset is constant.
114 2) base has rtx_varies_p == false, offset variable.
115 3) base has rtx_varies_p == true, offset constant.
116 4) base has rtx_varies_p == true, offset variable.
117
118 The local passes are able to process all 4 kinds of addresses. The
5ef0b50d 119 global pass only handles 1).
6fb5fa3c
DB
120
121 The global problem is formulated as follows:
122
123 A store, S1, to address A, where A is not relative to the stack
124 frame, can be eliminated if all paths from S1 to the end of the
5ef0b50d 125 function contain another store to A before a read to A.
6fb5fa3c
DB
126
127 If the address A is relative to the stack frame, a store S2 to A
5ef0b50d 128 can be eliminated if there are no paths from S2 that reach the
6fb5fa3c 129 end of the function that read A before another store to A. In
5ef0b50d 130 this case S2 can be deleted if there are paths from S2 to the
6fb5fa3c
DB
131 end of the function that have no reads or writes to A. This
132 second case allows stores to the stack frame to be deleted that
133 would otherwise die when the function returns. This cannot be
134 done if stores_off_frame_dead_at_return is not true. See the doc
135 for that variable for when this variable is false.
136
137 The global problem is formulated as a backwards set union
138 dataflow problem where the stores are the gens and reads are the
139 kills. Set union problems are rare and require some special
140 handling given our representation of bitmaps. A straightforward
5ef0b50d 141 implementation requires a lot of bitmaps filled with 1s.
6fb5fa3c
DB
142 These are expensive and cumbersome in our bitmap formulation so
143 care has been taken to avoid large vectors filled with 1s. See
144 the comments in bb_info and in the dataflow confluence functions
b8698a0f 145 for details.
6fb5fa3c
DB
146
147 There are two places for further enhancements to this algorithm:
b8698a0f 148
6fb5fa3c
DB
149 1) The original dse which was embedded in a pass called flow also
150 did local address forwarding. For example in
151
152 A <- r100
153 ... <- A
154
155 flow would replace the right hand side of the second insn with a
6ed3da00 156 reference to r100. Most of the information is available to add this
6fb5fa3c
DB
157 to this pass. It has not done it because it is a lot of work in
158 the case that either r100 is assigned to between the first and
159 second insn and/or the second insn is a load of part of the value
160 stored by the first insn.
161
162 insn 5 in gcc.c-torture/compile/990203-1.c simple case.
163 insn 15 in gcc.c-torture/execute/20001017-2.c simple case.
164 insn 25 in gcc.c-torture/execute/20001026-1.c simple case.
165 insn 44 in gcc.c-torture/execute/20010910-1.c simple case.
166
167 2) The cleaning up of spill code is quite profitable. It currently
168 depends on reading tea leaves and chicken entrails left by reload.
169 This pass depends on reload creating a singleton alias set for each
170 spill slot and telling the next dse pass which of these alias sets
171 are the singletons. Rather than analyze the addresses of the
172 spills, dse's spill processing just does analysis of the loads and
173 stores that use those alias sets. There are three cases where this
174 falls short:
175
176 a) Reload sometimes creates the slot for one mode of access, and
177 then inserts loads and/or stores for a smaller mode. In this
178 case, the current code just punts on the slot. The proper thing
179 to do is to back out and use one bit vector position for each
180 byte of the entity associated with the slot. This depends on
181 KNOWING that reload always generates the accesses for each of the
182 bytes in some canonical (read that easy to understand several
183 passes after reload happens) way.
184
185 b) Reload sometimes decides that spill slot it allocated was not
186 large enough for the mode and goes back and allocates more slots
187 with the same mode and alias set. The backout in this case is a
188 little more graceful than (a). In this case the slot is unmarked
189 as being a spill slot and if final address comes out to be based
b8698a0f 190 off the frame pointer, the global algorithm handles this slot.
6fb5fa3c
DB
191
192 c) For any pass that may prespill, there is currently no
193 mechanism to tell the dse pass that the slot being used has the
194 special properties that reload uses. It may be that all that is
0d52bcc1 195 required is to have those passes make the same calls that reload
6fb5fa3c
DB
196 does, assuming that the alias sets can be manipulated in the same
197 way. */
198
199/* There are limits to the size of constant offsets we model for the
200 global problem. There are certainly test cases, that exceed this
201 limit, however, it is unlikely that there are important programs
202 that really have constant offsets this size. */
203#define MAX_OFFSET (64 * 1024)
204
3f9b14ff
SB
205/* Obstack for the DSE dataflow bitmaps. We don't want to put these
206 on the default obstack because these bitmaps can grow quite large
207 (~2GB for the small (!) test case of PR54146) and we'll hold on to
208 all that memory until the end of the compiler run.
209 As a bonus, delete_tree_live_info can destroy all the bitmaps by just
210 releasing the whole obstack. */
211static bitmap_obstack dse_bitmap_obstack;
212
213/* Obstack for other data. As for above: Kinda nice to be able to
214 throw it all away at the end in one big sweep. */
215static struct obstack dse_obstack;
216
217/* Scratch bitmap for cselib's cselib_expand_value_rtx. */
6fb5fa3c 218static bitmap scratch = NULL;
3f9b14ff 219
da6603c6 220struct insn_info_type;
6fb5fa3c
DB
221
222/* This structure holds information about a candidate store. */
b8698a0f 223struct store_info
6fb5fa3c
DB
224{
225
226 /* False means this is a clobber. */
227 bool is_set;
228
8dd5516b
JJ
229 /* False if a single HOST_WIDE_INT bitmap is used for positions_needed. */
230 bool is_large;
231
6fb5fa3c
DB
232 /* The id of the mem group of the base address. If rtx_varies_p is
233 true, this is -1. Otherwise, it is the index into the group
234 table. */
235 int group_id;
b8698a0f 236
6fb5fa3c
DB
237 /* This is the cselib value. */
238 cselib_val *cse_base;
239
240 /* This canonized mem. */
241 rtx mem;
242
6216f94e 243 /* Canonized MEM address for use by canon_true_dependence. */
6fb5fa3c
DB
244 rtx mem_addr;
245
71d6a386
RS
246 /* The offset of the first byte associated with the operation. */
247 HOST_WIDE_INT offset;
248
249 /* The number of bytes covered by the operation. This is always exact
250 and known (rather than -1). */
251 HOST_WIDE_INT width;
8dd5516b
JJ
252
253 union
254 {
255 /* A bitmask as wide as the number of bytes in the word that
256 contains a 1 if the byte may be needed. The store is unused if
257 all of the bits are 0. This is used if IS_LARGE is false. */
258 unsigned HOST_WIDE_INT small_bitmask;
259
260 struct
261 {
262 /* A bitmap with one bit per byte. Cleared bit means the position
263 is needed. Used if IS_LARGE is false. */
dc491a25 264 bitmap bmap;
6fb5fa3c 265
8dd5516b 266 /* Number of set bits (i.e. unneeded bytes) in BITMAP. If it is
71d6a386 267 equal to WIDTH, the whole store is unused. */
8dd5516b
JJ
268 int count;
269 } large;
270 } positions_needed;
6fb5fa3c
DB
271
272 /* The next store info for this insn. */
273 struct store_info *next;
274
275 /* The right hand side of the store. This is used if there is a
276 subsequent reload of the mems address somewhere later in the
277 basic block. */
8dd5516b
JJ
278 rtx rhs;
279
280 /* If rhs is or holds a constant, this contains that constant,
281 otherwise NULL. */
282 rtx const_rhs;
283
284 /* Set if this store stores the same constant value as REDUNDANT_REASON
285 insn stored. These aren't eliminated early, because doing that
286 might prevent the earlier larger store to be eliminated. */
da6603c6 287 struct insn_info_type *redundant_reason;
6fb5fa3c
DB
288};
289
4fe663b0
L
290/* Return a bitmask with the first N low bits set. */
291
292static unsigned HOST_WIDE_INT
293lowpart_bitmask (int n)
294{
dd4786fe 295 unsigned HOST_WIDE_INT mask = HOST_WIDE_INT_M1U;
4fe663b0
L
296 return mask >> (HOST_BITS_PER_WIDE_INT - n);
297}
298
fcb87c50 299static object_allocator<store_info> cse_store_info_pool ("cse_store_info_pool");
da6603c6 300
fcb87c50 301static object_allocator<store_info> rtx_store_info_pool ("rtx_store_info_pool");
6fb5fa3c
DB
302
303/* This structure holds information about a load. These are only
304 built for rtx bases. */
da6603c6 305struct read_info_type
6fb5fa3c
DB
306{
307 /* The id of the mem group of the base address. */
308 int group_id;
309
71d6a386
RS
310 /* The offset of the first byte associated with the operation. */
311 HOST_WIDE_INT offset;
312
313 /* The number of bytes covered by the operation, or -1 if not known. */
314 HOST_WIDE_INT width;
6fb5fa3c
DB
315
316 /* The mem being read. */
317 rtx mem;
318
319 /* The next read_info for this insn. */
da6603c6 320 struct read_info_type *next;
6fb5fa3c 321};
da6603c6 322typedef struct read_info_type *read_info_t;
6fb5fa3c 323
fcb87c50 324static object_allocator<read_info_type> read_info_type_pool ("read_info_pool");
6fb5fa3c
DB
325
326/* One of these records is created for each insn. */
327
da6603c6 328struct insn_info_type
6fb5fa3c
DB
329{
330 /* Set true if the insn contains a store but the insn itself cannot
331 be deleted. This is set if the insn is a parallel and there is
332 more than one non dead output or if the insn is in some way
333 volatile. */
334 bool cannot_delete;
335
336 /* This field is only used by the global algorithm. It is set true
337 if the insn contains any read of mem except for a (1). This is
338 also set if the insn is a call or has a clobber mem. If the insn
339 contains a wild read, the use_rec will be null. */
340 bool wild_read;
341
d26c7090
ER
342 /* This is true only for CALL instructions which could potentially read
343 any non-frame memory location. This field is used by the global
344 algorithm. */
345 bool non_frame_wild_read;
346
50f0f366
EB
347 /* This field is only used for the processing of const functions.
348 These functions cannot read memory, but they can read the stack
64520bdc
EB
349 because that is where they may get their parms. We need to be
350 this conservative because, like the store motion pass, we don't
351 consider CALL_INSN_FUNCTION_USAGE when processing call insns.
352 Moreover, we need to distinguish two cases:
353 1. Before reload (register elimination), the stores related to
354 outgoing arguments are stack pointer based and thus deemed
355 of non-constant base in this pass. This requires special
356 handling but also means that the frame pointer based stores
357 need not be killed upon encountering a const function call.
358 2. After reload, the stores related to outgoing arguments can be
359 either stack pointer or hard frame pointer based. This means
360 that we have no other choice than also killing all the frame
361 pointer based stores upon encountering a const function call.
57534689
JJ
362 This field is set after reload for const function calls and before
363 reload for const tail function calls on targets where arg pointer
364 is the frame pointer. Having this set is less severe than a wild
365 read, it just means that all the frame related stores are killed
366 rather than all the stores. */
64520bdc
EB
367 bool frame_read;
368
369 /* This field is only used for the processing of const functions.
370 It is set if the insn may contain a stack pointer based store. */
50f0f366 371 bool stack_pointer_based;
6fb5fa3c
DB
372
373 /* This is true if any of the sets within the store contains a
374 cselib base. Such stores can only be deleted by the local
375 algorithm. */
376 bool contains_cselib_groups;
377
378 /* The insn. */
dd60a84c 379 rtx_insn *insn;
6fb5fa3c
DB
380
381 /* The list of mem sets or mem clobbers that are contained in this
382 insn. If the insn is deletable, it contains only one mem set.
383 But it could also contain clobbers. Insns that contain more than
384 one mem set are not deletable, but each of those mems are here in
6ed3da00 385 order to provide info to delete other insns. */
fd6daec9 386 store_info *store_rec;
6fb5fa3c
DB
387
388 /* The linked list of mem uses in this insn. Only the reads from
389 rtx bases are listed here. The reads to cselib bases are
390 completely processed during the first scan and so are never
391 created. */
392 read_info_t read_rec;
393
9e582b1d
JR
394 /* The live fixed registers. We assume only fixed registers can
395 cause trouble by being clobbered from an expanded pattern;
396 storing only the live fixed registers (rather than all registers)
397 means less memory needs to be allocated / copied for the individual
398 stores. */
399 regset fixed_regs_live;
400
6fb5fa3c 401 /* The prev insn in the basic block. */
da6603c6 402 struct insn_info_type * prev_insn;
6fb5fa3c
DB
403
404 /* The linked list of insns that are in consideration for removal in
073a8998 405 the forwards pass through the basic block. This pointer may be
6fb5fa3c 406 trash as it is not cleared when a wild read occurs. The only
fa10beec 407 time it is guaranteed to be correct is when the traversal starts
6fb5fa3c 408 at active_local_stores. */
da6603c6 409 struct insn_info_type * next_local_store;
6fb5fa3c 410};
da6603c6 411typedef struct insn_info_type *insn_info_t;
6fb5fa3c 412
fcb87c50 413static object_allocator<insn_info_type> insn_info_type_pool ("insn_info_pool");
6fb5fa3c
DB
414
415/* The linked list of stores that are under consideration in this
b8698a0f 416 basic block. */
6fb5fa3c 417static insn_info_t active_local_stores;
dabd47e7 418static int active_local_stores_len;
6fb5fa3c 419
da6603c6 420struct dse_bb_info_type
6fb5fa3c 421{
6fb5fa3c
DB
422 /* Pointer to the insn info for the last insn in the block. These
423 are linked so this is how all of the insns are reached. During
424 scanning this is the current insn being scanned. */
425 insn_info_t last_insn;
426
427 /* The info for the global dataflow problem. */
428
429
430 /* This is set if the transfer function should and in the wild_read
431 bitmap before applying the kill and gen sets. That vector knocks
432 out most of the bits in the bitmap and thus speeds up the
433 operations. */
434 bool apply_wild_read;
435
02b47899
KZ
436 /* The following 4 bitvectors hold information about which positions
437 of which stores are live or dead. They are indexed by
438 get_bitmap_index. */
439
6fb5fa3c
DB
440 /* The set of store positions that exist in this block before a wild read. */
441 bitmap gen;
b8698a0f 442
6fb5fa3c
DB
443 /* The set of load positions that exist in this block above the
444 same position of a store. */
445 bitmap kill;
446
447 /* The set of stores that reach the top of the block without being
448 killed by a read.
449
450 Do not represent the in if it is all ones. Note that this is
451 what the bitvector should logically be initialized to for a set
452 intersection problem. However, like the kill set, this is too
453 expensive. So initially, the in set will only be created for the
454 exit block and any block that contains a wild read. */
455 bitmap in;
456
457 /* The set of stores that reach the bottom of the block from it's
458 successors.
459
460 Do not represent the in if it is all ones. Note that this is
461 what the bitvector should logically be initialized to for a set
462 intersection problem. However, like the kill and in set, this is
463 too expensive. So what is done is that the confluence operator
464 just initializes the vector from one of the out sets of the
465 successors of the block. */
466 bitmap out;
02b47899
KZ
467
468 /* The following bitvector is indexed by the reg number. It
469 contains the set of regs that are live at the current instruction
470 being processed. While it contains info for all of the
9e582b1d
JR
471 registers, only the hard registers are actually examined. It is used
472 to assure that shift and/or add sequences that are inserted do not
073a8998 473 accidentally clobber live hard regs. */
02b47899 474 bitmap regs_live;
6fb5fa3c
DB
475};
476
da6603c6 477typedef struct dse_bb_info_type *bb_info_t;
fb0b2914
ML
478
479static object_allocator<dse_bb_info_type> dse_bb_info_type_pool
fcb87c50 480 ("bb_info_pool");
6fb5fa3c
DB
481
482/* Table to hold all bb_infos. */
483static bb_info_t *bb_table;
484
485/* There is a group_info for each rtx base that is used to reference
486 memory. There are also not many of the rtx bases because they are
487 very limited in scope. */
488
b8698a0f 489struct group_info
6fb5fa3c
DB
490{
491 /* The actual base of the address. */
492 rtx rtx_base;
493
494 /* The sequential id of the base. This allows us to have a
495 canonical ordering of these that is not based on addresses. */
496 int id;
497
8f5929e1
JJ
498 /* True if there are any positions that are to be processed
499 globally. */
500 bool process_globally;
501
502 /* True if the base of this group is either the frame_pointer or
503 hard_frame_pointer. */
504 bool frame_related;
505
d32f725a
OH
506 /* A mem wrapped around the base pointer for the group in order to do
507 read dependency. It must be given BLKmode in order to encompass all
508 the possible offsets from the base. */
6fb5fa3c 509 rtx base_mem;
b8698a0f 510
6216f94e
JJ
511 /* Canonized version of base_mem's address. */
512 rtx canon_base_addr;
6fb5fa3c
DB
513
514 /* These two sets of two bitmaps are used to keep track of how many
6ed3da00 515 stores are actually referencing that position from this base. We
6fb5fa3c 516 only do this for rtx bases as this will be used to assign
6ed3da00 517 positions in the bitmaps for the global problem. Bit N is set in
6fb5fa3c
DB
518 store1 on the first store for offset N. Bit N is set in store2
519 for the second store to offset N. This is all we need since we
520 only care about offsets that have two or more stores for them.
521
522 The "_n" suffix is for offsets less than 0 and the "_p" suffix is
523 for 0 and greater offsets.
524
525 There is one special case here, for stores into the stack frame,
526 we will or store1 into store2 before deciding which stores look
527 at globally. This is because stores to the stack frame that have
528 no other reads before the end of the function can also be
529 deleted. */
530 bitmap store1_n, store1_p, store2_n, store2_p;
531
d26c7090
ER
532 /* These bitmaps keep track of offsets in this group escape this function.
533 An offset escapes if it corresponds to a named variable whose
534 addressable flag is set. */
535 bitmap escaped_n, escaped_p;
536
6ed3da00 537 /* The positions in this bitmap have the same assignments as the in,
6fb5fa3c 538 out, gen and kill bitmaps. This bitmap is all zeros except for
6ed3da00 539 the positions that are occupied by stores for this group. */
6fb5fa3c
DB
540 bitmap group_kill;
541
6fb5fa3c 542 /* The offset_map is used to map the offsets from this base into
6ed3da00 543 positions in the global bitmaps. It is only created after all of
6fb5fa3c
DB
544 the all of stores have been scanned and we know which ones we
545 care about. */
b8698a0f
L
546 int *offset_map_n, *offset_map_p;
547 int offset_map_size_n, offset_map_size_p;
6fb5fa3c 548};
da6603c6 549
fcb87c50 550static object_allocator<group_info> group_info_pool ("rtx_group_info_pool");
6fb5fa3c 551
6fb5fa3c
DB
552/* Index into the rtx_group_vec. */
553static int rtx_group_next_id;
554
6fb5fa3c 555
fd6daec9 556static vec<group_info *> rtx_group_vec;
6fb5fa3c
DB
557
558
559/* This structure holds the set of changes that are being deferred
560 when removing read operation. See replace_read. */
b8698a0f 561struct deferred_change
6fb5fa3c
DB
562{
563
564 /* The mem that is being replaced. */
565 rtx *loc;
566
567 /* The reg it is being replaced with. */
568 rtx reg;
569
570 struct deferred_change *next;
571};
572
fb0b2914 573static object_allocator<deferred_change> deferred_change_pool
fcb87c50 574 ("deferred_change_pool");
6fb5fa3c 575
fd6daec9 576static deferred_change *deferred_change_list = NULL;
6fb5fa3c 577
e3b5732b 578/* This is true except if cfun->stdarg -- i.e. we cannot do
9dd9bf80 579 this for vararg functions because they play games with the frame. */
6fb5fa3c
DB
580static bool stores_off_frame_dead_at_return;
581
582/* Counter for stats. */
b8698a0f
L
583static int globally_deleted;
584static int locally_deleted;
b8698a0f 585
6fb5fa3c
DB
586static bitmap all_blocks;
587
d26c7090
ER
588/* Locations that are killed by calls in the global phase. */
589static bitmap kill_on_calls;
590
6fb5fa3c
DB
591/* The number of bits used in the global bitmaps. */
592static unsigned int current_position;
71d6a386
RS
593
594/* Print offset range [OFFSET, OFFSET + WIDTH) to FILE. */
595
596static void
597print_range (FILE *file, poly_int64 offset, poly_int64 width)
598{
599 fprintf (file, "[");
600 print_dec (offset, file, SIGNED);
601 fprintf (file, "..");
602 print_dec (offset + width, file, SIGNED);
603 fprintf (file, ")");
604}
6fb5fa3c
DB
605\f
606/*----------------------------------------------------------------------------
607 Zeroth step.
608
b8698a0f 609 Initialization.
6fb5fa3c
DB
610----------------------------------------------------------------------------*/
611
6fb5fa3c 612
6fb5fa3c
DB
613/* Hashtable callbacks for maintaining the "bases" field of
614 store_group_info, given that the addresses are function invariants. */
615
8d67ee55 616struct invariant_group_base_hasher : nofree_ptr_hash <group_info>
703c8606 617{
67f58944
TS
618 static inline hashval_t hash (const group_info *);
619 static inline bool equal (const group_info *, const group_info *);
703c8606
LC
620};
621
622inline bool
67f58944
TS
623invariant_group_base_hasher::equal (const group_info *gi1,
624 const group_info *gi2)
6fb5fa3c 625{
6fb5fa3c
DB
626 return rtx_equal_p (gi1->rtx_base, gi2->rtx_base);
627}
628
703c8606 629inline hashval_t
67f58944 630invariant_group_base_hasher::hash (const group_info *gi)
6fb5fa3c 631{
6fb5fa3c
DB
632 int do_not_record;
633 return hash_rtx (gi->rtx_base, Pmode, &do_not_record, NULL, false);
634}
635
703c8606 636/* Tables of group_info structures, hashed by base value. */
c203e8a7 637static hash_table<invariant_group_base_hasher> *rtx_group_table;
703c8606 638
6fb5fa3c
DB
639
640/* Get the GROUP for BASE. Add a new group if it is not there. */
641
fd6daec9 642static group_info *
6fb5fa3c
DB
643get_group_info (rtx base)
644{
b8698a0f 645 struct group_info tmp_gi;
fd6daec9 646 group_info *gi;
703c8606 647 group_info **slot;
6fb5fa3c 648
ac6929b5
RB
649 gcc_assert (base != NULL_RTX);
650
651 /* Find the store_base_info structure for BASE, creating a new one
652 if necessary. */
653 tmp_gi.rtx_base = base;
654 slot = rtx_group_table->find_slot (&tmp_gi, INSERT);
655 gi = *slot;
6fb5fa3c
DB
656
657 if (gi == NULL)
658 {
fb0b2914 659 *slot = gi = group_info_pool.allocate ();
6fb5fa3c
DB
660 gi->rtx_base = base;
661 gi->id = rtx_group_next_id++;
d32f725a 662 gi->base_mem = gen_rtx_MEM (BLKmode, base);
6216f94e 663 gi->canon_base_addr = canon_rtx (base);
3f9b14ff
SB
664 gi->store1_n = BITMAP_ALLOC (&dse_bitmap_obstack);
665 gi->store1_p = BITMAP_ALLOC (&dse_bitmap_obstack);
666 gi->store2_n = BITMAP_ALLOC (&dse_bitmap_obstack);
667 gi->store2_p = BITMAP_ALLOC (&dse_bitmap_obstack);
668 gi->escaped_p = BITMAP_ALLOC (&dse_bitmap_obstack);
669 gi->escaped_n = BITMAP_ALLOC (&dse_bitmap_obstack);
670 gi->group_kill = BITMAP_ALLOC (&dse_bitmap_obstack);
6fb5fa3c 671 gi->process_globally = false;
b8698a0f 672 gi->frame_related =
6fb5fa3c
DB
673 (base == frame_pointer_rtx) || (base == hard_frame_pointer_rtx);
674 gi->offset_map_size_n = 0;
675 gi->offset_map_size_p = 0;
676 gi->offset_map_n = NULL;
677 gi->offset_map_p = NULL;
9771b263 678 rtx_group_vec.safe_push (gi);
6fb5fa3c
DB
679 }
680
681 return gi;
682}
683
684
685/* Initialization of data structures. */
686
687static void
688dse_step0 (void)
689{
690 locally_deleted = 0;
691 globally_deleted = 0;
6fb5fa3c 692
3f9b14ff
SB
693 bitmap_obstack_initialize (&dse_bitmap_obstack);
694 gcc_obstack_init (&dse_obstack);
695
696 scratch = BITMAP_ALLOC (&reg_obstack);
697 kill_on_calls = BITMAP_ALLOC (&dse_bitmap_obstack);
6fb5fa3c 698
6fb5fa3c 699
c203e8a7 700 rtx_group_table = new hash_table<invariant_group_base_hasher> (11);
6fb5fa3c 701
8b1c6fd7 702 bb_table = XNEWVEC (bb_info_t, last_basic_block_for_fn (cfun));
6fb5fa3c
DB
703 rtx_group_next_id = 0;
704
e3b5732b 705 stores_off_frame_dead_at_return = !cfun->stdarg;
6fb5fa3c
DB
706
707 init_alias_analysis ();
6fb5fa3c
DB
708}
709
710
711\f
712/*----------------------------------------------------------------------------
713 First step.
714
715 Scan all of the insns. Any random ordering of the blocks is fine.
0d52bcc1 716 Each block is scanned in forward order to accommodate cselib which
6fb5fa3c
DB
717 is used to remove stores with non-constant bases.
718----------------------------------------------------------------------------*/
719
720/* Delete all of the store_info recs from INSN_INFO. */
721
b8698a0f 722static void
6fb5fa3c
DB
723free_store_info (insn_info_t insn_info)
724{
fd6daec9
TS
725 store_info *cur = insn_info->store_rec;
726 while (cur)
6fb5fa3c 727 {
fd6daec9
TS
728 store_info *next = cur->next;
729 if (cur->is_large)
730 BITMAP_FREE (cur->positions_needed.large.bmap);
731 if (cur->cse_base)
732 cse_store_info_pool.remove (cur);
6fb5fa3c 733 else
fd6daec9
TS
734 rtx_store_info_pool.remove (cur);
735 cur = next;
6fb5fa3c
DB
736 }
737
738 insn_info->cannot_delete = true;
739 insn_info->contains_cselib_groups = false;
740 insn_info->store_rec = NULL;
741}
742
a79683d5 743struct note_add_store_info
9e582b1d 744{
dc01c3d1 745 rtx_insn *first, *current;
9e582b1d
JR
746 regset fixed_regs_live;
747 bool failure;
a79683d5 748};
9e582b1d
JR
749
750/* Callback for emit_inc_dec_insn_before via note_stores.
751 Check if a register is clobbered which is live afterwards. */
752
753static void
754note_add_store (rtx loc, const_rtx expr ATTRIBUTE_UNUSED, void *data)
755{
dc01c3d1 756 rtx_insn *insn;
9e582b1d 757 note_add_store_info *info = (note_add_store_info *) data;
9e582b1d
JR
758
759 if (!REG_P (loc))
760 return;
761
762 /* If this register is referenced by the current or an earlier insn,
763 that's OK. E.g. this applies to the register that is being incremented
764 with this addition. */
765 for (insn = info->first;
766 insn != NEXT_INSN (info->current);
767 insn = NEXT_INSN (insn))
768 if (reg_referenced_p (loc, PATTERN (insn)))
769 return;
770
771 /* If we come here, we have a clobber of a register that's only OK
772 if that register is not live. If we don't have liveness information
773 available, fail now. */
774 if (!info->fixed_regs_live)
775 {
53d1bae9 776 info->failure = true;
9e582b1d
JR
777 return;
778 }
779 /* Now check if this is a live fixed register. */
53d1bae9
RS
780 unsigned int end_regno = END_REGNO (loc);
781 for (unsigned int regno = REGNO (loc); regno < end_regno; ++regno)
782 if (REGNO_REG_SET_P (info->fixed_regs_live, regno))
783 info->failure = true;
9e582b1d
JR
784}
785
4deef538
AO
786/* Callback for for_each_inc_dec that emits an INSN that sets DEST to
787 SRC + SRCOFF before insn ARG. */
6fb5fa3c
DB
788
789static int
4deef538
AO
790emit_inc_dec_insn_before (rtx mem ATTRIBUTE_UNUSED,
791 rtx op ATTRIBUTE_UNUSED,
792 rtx dest, rtx src, rtx srcoff, void *arg)
6fb5fa3c 793{
9e582b1d 794 insn_info_t insn_info = (insn_info_t) arg;
dc01c3d1 795 rtx_insn *insn = insn_info->insn, *new_insn, *cur;
9e582b1d 796 note_add_store_info info;
b8698a0f 797
4deef538
AO
798 /* We can reuse all operands without copying, because we are about
799 to delete the insn that contained it. */
9e582b1d 800 if (srcoff)
ed079c4b
JJ
801 {
802 start_sequence ();
803 emit_insn (gen_add3_insn (dest, src, srcoff));
804 new_insn = get_insns ();
805 end_sequence ();
806 }
9e582b1d 807 else
1476d1bd 808 new_insn = gen_move_insn (dest, src);
9e582b1d
JR
809 info.first = new_insn;
810 info.fixed_regs_live = insn_info->fixed_regs_live;
811 info.failure = false;
812 for (cur = new_insn; cur; cur = NEXT_INSN (cur))
813 {
814 info.current = cur;
815 note_stores (PATTERN (cur), note_add_store, &info);
816 }
6fb5fa3c 817
9e582b1d
JR
818 /* If a failure was flagged above, return 1 so that for_each_inc_dec will
819 return it immediately, communicating the failure to its caller. */
820 if (info.failure)
821 return 1;
822
823 emit_insn_before (new_insn, insn);
6fb5fa3c 824
8d8e205b 825 return 0;
6fb5fa3c
DB
826}
827
9e582b1d
JR
828/* Before we delete INSN_INFO->INSN, make sure that the auto inc/dec, if it
829 is there, is split into a separate insn.
830 Return true on success (or if there was nothing to do), false on failure. */
6fb5fa3c 831
9e582b1d
JR
832static bool
833check_for_inc_dec_1 (insn_info_t insn_info)
6fb5fa3c 834{
dd60a84c 835 rtx_insn *insn = insn_info->insn;
6fb5fa3c
DB
836 rtx note = find_reg_note (insn, REG_INC, NULL_RTX);
837 if (note)
8d8e205b
RS
838 return for_each_inc_dec (PATTERN (insn), emit_inc_dec_insn_before,
839 insn_info) == 0;
9e582b1d 840 return true;
6fb5fa3c
DB
841}
842
843
9e582b1d
JR
844/* Entry point for postreload. If you work on reload_cse, or you need this
845 anywhere else, consider if you can provide register liveness information
846 and add a parameter to this function so that it can be passed down in
847 insn_info.fixed_regs_live. */
848bool
dd60a84c 849check_for_inc_dec (rtx_insn *insn)
9e582b1d 850{
da6603c6 851 insn_info_type insn_info;
9e582b1d
JR
852 rtx note;
853
854 insn_info.insn = insn;
855 insn_info.fixed_regs_live = NULL;
856 note = find_reg_note (insn, REG_INC, NULL_RTX);
857 if (note)
8d8e205b
RS
858 return for_each_inc_dec (PATTERN (insn), emit_inc_dec_insn_before,
859 &insn_info) == 0;
9e582b1d
JR
860 return true;
861}
862
b8698a0f 863/* Delete the insn and free all of the fields inside INSN_INFO. */
6fb5fa3c
DB
864
865static void
866delete_dead_store_insn (insn_info_t insn_info)
867{
868 read_info_t read_info;
869
870 if (!dbg_cnt (dse))
871 return;
872
9e582b1d
JR
873 if (!check_for_inc_dec_1 (insn_info))
874 return;
456610d3 875 if (dump_file && (dump_flags & TDF_DETAILS))
ac6929b5
RB
876 fprintf (dump_file, "Locally deleting insn %d\n",
877 INSN_UID (insn_info->insn));
6fb5fa3c
DB
878
879 free_store_info (insn_info);
880 read_info = insn_info->read_rec;
b8698a0f 881
6fb5fa3c
DB
882 while (read_info)
883 {
884 read_info_t next = read_info->next;
fb0b2914 885 read_info_type_pool.remove (read_info);
6fb5fa3c
DB
886 read_info = next;
887 }
888 insn_info->read_rec = NULL;
889
890 delete_insn (insn_info->insn);
891 locally_deleted++;
892 insn_info->insn = NULL;
893
894 insn_info->wild_read = false;
895}
896
88d8330d
EB
897/* Return whether DECL, a local variable, can possibly escape the current
898 function scope. */
899
900static bool
901local_variable_can_escape (tree decl)
902{
903 if (TREE_ADDRESSABLE (decl))
904 return true;
905
906 /* If this is a partitioned variable, we need to consider all the variables
907 in the partition. This is necessary because a store into one of them can
908 be replaced with a store into another and this may not change the outcome
909 of the escape analysis. */
910 if (cfun->gimple_df->decls_to_pointers != NULL)
911 {
39c8aaa4 912 tree *namep = cfun->gimple_df->decls_to_pointers->get (decl);
88d8330d 913 if (namep)
39c8aaa4 914 return TREE_ADDRESSABLE (*namep);
88d8330d
EB
915 }
916
917 return false;
918}
919
920/* Return whether EXPR can possibly escape the current function scope. */
921
d26c7090
ER
922static bool
923can_escape (tree expr)
924{
925 tree base;
926 if (!expr)
927 return true;
928 base = get_base_address (expr);
929 if (DECL_P (base)
88d8330d 930 && !may_be_aliased (base)
8813a647 931 && !(VAR_P (base)
88d8330d
EB
932 && !DECL_EXTERNAL (base)
933 && !TREE_STATIC (base)
934 && local_variable_can_escape (base)))
d26c7090
ER
935 return false;
936 return true;
937}
6fb5fa3c
DB
938
939/* Set the store* bitmaps offset_map_size* fields in GROUP based on
940 OFFSET and WIDTH. */
941
942static void
fd6daec9 943set_usage_bits (group_info *group, HOST_WIDE_INT offset, HOST_WIDE_INT width,
d26c7090 944 tree expr)
6fb5fa3c
DB
945{
946 HOST_WIDE_INT i;
d26c7090 947 bool expr_escapes = can_escape (expr);
8dd5516b 948 if (offset > -MAX_OFFSET && offset + width < MAX_OFFSET)
6fb5fa3c
DB
949 for (i=offset; i<offset+width; i++)
950 {
951 bitmap store1;
952 bitmap store2;
d26c7090 953 bitmap escaped;
6fb5fa3c
DB
954 int ai;
955 if (i < 0)
956 {
957 store1 = group->store1_n;
958 store2 = group->store2_n;
d26c7090 959 escaped = group->escaped_n;
6fb5fa3c
DB
960 ai = -i;
961 }
962 else
963 {
964 store1 = group->store1_p;
965 store2 = group->store2_p;
d26c7090 966 escaped = group->escaped_p;
6fb5fa3c
DB
967 ai = i;
968 }
b8698a0f 969
fcaa4ca4 970 if (!bitmap_set_bit (store1, ai))
6fb5fa3c 971 bitmap_set_bit (store2, ai);
b8698a0f 972 else
6fb5fa3c 973 {
6fb5fa3c
DB
974 if (i < 0)
975 {
976 if (group->offset_map_size_n < ai)
977 group->offset_map_size_n = ai;
978 }
979 else
980 {
981 if (group->offset_map_size_p < ai)
982 group->offset_map_size_p = ai;
983 }
984 }
d26c7090
ER
985 if (expr_escapes)
986 bitmap_set_bit (escaped, ai);
6fb5fa3c
DB
987 }
988}
989
d26c7090
ER
990static void
991reset_active_stores (void)
992{
993 active_local_stores = NULL;
994 active_local_stores_len = 0;
995}
6fb5fa3c 996
d26c7090 997/* Free all READ_REC of the LAST_INSN of BB_INFO. */
6fb5fa3c
DB
998
999static void
d26c7090 1000free_read_records (bb_info_t bb_info)
6fb5fa3c
DB
1001{
1002 insn_info_t insn_info = bb_info->last_insn;
1003 read_info_t *ptr = &insn_info->read_rec;
6fb5fa3c
DB
1004 while (*ptr)
1005 {
1006 read_info_t next = (*ptr)->next;
ac6929b5
RB
1007 read_info_type_pool.remove (*ptr);
1008 *ptr = next;
6fb5fa3c 1009 }
d26c7090
ER
1010}
1011
1012/* Set the BB_INFO so that the last insn is marked as a wild read. */
1013
1014static void
1015add_wild_read (bb_info_t bb_info)
1016{
1017 insn_info_t insn_info = bb_info->last_insn;
6fb5fa3c 1018 insn_info->wild_read = true;
d26c7090
ER
1019 free_read_records (bb_info);
1020 reset_active_stores ();
6fb5fa3c
DB
1021}
1022
d26c7090
ER
1023/* Set the BB_INFO so that the last insn is marked as a wild read of
1024 non-frame locations. */
1025
1026static void
1027add_non_frame_wild_read (bb_info_t bb_info)
1028{
1029 insn_info_t insn_info = bb_info->last_insn;
1030 insn_info->non_frame_wild_read = true;
1031 free_read_records (bb_info);
1032 reset_active_stores ();
1033}
6fb5fa3c 1034
50f0f366
EB
1035/* Return true if X is a constant or one of the registers that behave
1036 as a constant over the life of a function. This is equivalent to
1037 !rtx_varies_p for memory addresses. */
6fb5fa3c
DB
1038
1039static bool
1040const_or_frame_p (rtx x)
1041{
d8116890
KZ
1042 if (CONSTANT_P (x))
1043 return true;
1044
1045 if (GET_CODE (x) == REG)
6fb5fa3c 1046 {
6fb5fa3c
DB
1047 /* Note that we have to test for the actual rtx used for the frame
1048 and arg pointers and not just the register number in case we have
1049 eliminated the frame and/or arg pointer and are using it
1050 for pseudos. */
1051 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
1052 /* The arg pointer varies if it is not a fixed register. */
1053 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM])
1054 || x == pic_offset_table_rtx)
1055 return true;
1056 return false;
6fb5fa3c 1057 }
da6603c6 1058
d8116890 1059 return false;
6fb5fa3c
DB
1060}
1061
b8698a0f
L
1062/* Take all reasonable action to put the address of MEM into the form
1063 that we can do analysis on.
6fb5fa3c
DB
1064
1065 The gold standard is to get the address into the form: address +
1066 OFFSET where address is something that rtx_varies_p considers a
1067 constant. When we can get the address in this form, we can do
1068 global analysis on it. Note that for constant bases, address is
1069 not actually returned, only the group_id. The address can be
1070 obtained from that.
1071
1072 If that fails, we try cselib to get a value we can at least use
b8698a0f
L
1073 locally. If that fails we return false.
1074
6fb5fa3c
DB
1075 The GROUP_ID is set to -1 for cselib bases and the index of the
1076 group for non_varying bases.
1077
1078 FOR_READ is true if this is a mem read and false if not. */
1079
1080static bool
1081canon_address (rtx mem,
6fb5fa3c 1082 int *group_id,
b8698a0f 1083 HOST_WIDE_INT *offset,
6fb5fa3c
DB
1084 cselib_val **base)
1085{
ef4bddc2 1086 machine_mode address_mode = get_address_mode (mem);
6fb5fa3c
DB
1087 rtx mem_address = XEXP (mem, 0);
1088 rtx expanded_address, address;
403c7520
JJ
1089 int expanded;
1090
4deef538 1091 cselib_lookup (mem_address, address_mode, 1, GET_MODE (mem));
6fb5fa3c 1092
456610d3 1093 if (dump_file && (dump_flags & TDF_DETAILS))
6fb5fa3c
DB
1094 {
1095 fprintf (dump_file, " mem: ");
1096 print_inline_rtx (dump_file, mem_address, 0);
1097 fprintf (dump_file, "\n");
1098 }
1099
403c7520
JJ
1100 /* First see if just canon_rtx (mem_address) is const or frame,
1101 if not, try cselib_expand_value_rtx and call canon_rtx on that. */
1102 address = NULL_RTX;
1103 for (expanded = 0; expanded < 2; expanded++)
1104 {
1105 if (expanded)
1106 {
1107 /* Use cselib to replace all of the reg references with the full
b8698a0f 1108 expression. This will take care of the case where we have
6fb5fa3c 1109
403c7520
JJ
1110 r_x = base + offset;
1111 val = *r_x;
b8698a0f
L
1112
1113 by making it into
6fb5fa3c 1114
403c7520 1115 val = *(base + offset); */
6fb5fa3c 1116
403c7520
JJ
1117 expanded_address = cselib_expand_value_rtx (mem_address,
1118 scratch, 5);
6fb5fa3c 1119
403c7520
JJ
1120 /* If this fails, just go with the address from first
1121 iteration. */
1122 if (!expanded_address)
1123 break;
1124 }
1125 else
1126 expanded_address = mem_address;
6fb5fa3c 1127
403c7520
JJ
1128 /* Split the address into canonical BASE + OFFSET terms. */
1129 address = canon_rtx (expanded_address);
6fb5fa3c 1130
403c7520 1131 *offset = 0;
6fb5fa3c 1132
456610d3 1133 if (dump_file && (dump_flags & TDF_DETAILS))
403c7520
JJ
1134 {
1135 if (expanded)
1136 {
1137 fprintf (dump_file, "\n after cselib_expand address: ");
1138 print_inline_rtx (dump_file, expanded_address, 0);
1139 fprintf (dump_file, "\n");
1140 }
6fb5fa3c 1141
403c7520
JJ
1142 fprintf (dump_file, "\n after canon_rtx address: ");
1143 print_inline_rtx (dump_file, address, 0);
1144 fprintf (dump_file, "\n");
1145 }
6fb5fa3c 1146
403c7520
JJ
1147 if (GET_CODE (address) == CONST)
1148 address = XEXP (address, 0);
6fb5fa3c 1149
403c7520
JJ
1150 if (GET_CODE (address) == PLUS
1151 && CONST_INT_P (XEXP (address, 1)))
1152 {
1153 *offset = INTVAL (XEXP (address, 1));
1154 address = XEXP (address, 0);
1155 }
6fb5fa3c 1156
09e881c9
BE
1157 if (ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (mem))
1158 && const_or_frame_p (address))
6fb5fa3c 1159 {
fd6daec9 1160 group_info *group = get_group_info (address);
403c7520 1161
456610d3 1162 if (dump_file && (dump_flags & TDF_DETAILS))
403c7520
JJ
1163 fprintf (dump_file, " gid=%d offset=%d \n",
1164 group->id, (int)*offset);
1165 *base = NULL;
1166 *group_id = group->id;
1167 return true;
6fb5fa3c 1168 }
403c7520
JJ
1169 }
1170
4deef538 1171 *base = cselib_lookup (address, address_mode, true, GET_MODE (mem));
403c7520
JJ
1172 *group_id = -1;
1173
1174 if (*base == NULL)
1175 {
456610d3 1176 if (dump_file && (dump_flags & TDF_DETAILS))
403c7520
JJ
1177 fprintf (dump_file, " no cselib val - should be a wild read.\n");
1178 return false;
6fb5fa3c 1179 }
456610d3 1180 if (dump_file && (dump_flags & TDF_DETAILS))
5440c0e7
AO
1181 fprintf (dump_file, " varying cselib base=%u:%u offset = %d\n",
1182 (*base)->uid, (*base)->hash, (int)*offset);
6fb5fa3c
DB
1183 return true;
1184}
1185
1186
1187/* Clear the rhs field from the active_local_stores array. */
1188
1189static void
1190clear_rhs_from_active_local_stores (void)
1191{
1192 insn_info_t ptr = active_local_stores;
1193
1194 while (ptr)
1195 {
fd6daec9 1196 store_info *store_info = ptr->store_rec;
6fb5fa3c
DB
1197 /* Skip the clobbers. */
1198 while (!store_info->is_set)
1199 store_info = store_info->next;
1200
1201 store_info->rhs = NULL;
8dd5516b 1202 store_info->const_rhs = NULL;
6fb5fa3c
DB
1203
1204 ptr = ptr->next_local_store;
1205 }
1206}
1207
1208
8dd5516b
JJ
1209/* Mark byte POS bytes from the beginning of store S_INFO as unneeded. */
1210
1211static inline void
fd6daec9 1212set_position_unneeded (store_info *s_info, int pos)
8dd5516b
JJ
1213{
1214 if (__builtin_expect (s_info->is_large, false))
1215 {
fcaa4ca4
NF
1216 if (bitmap_set_bit (s_info->positions_needed.large.bmap, pos))
1217 s_info->positions_needed.large.count++;
8dd5516b
JJ
1218 }
1219 else
1220 s_info->positions_needed.small_bitmask
fecfbfa4 1221 &= ~(HOST_WIDE_INT_1U << pos);
8dd5516b
JJ
1222}
1223
1224/* Mark the whole store S_INFO as unneeded. */
1225
1226static inline void
fd6daec9 1227set_all_positions_unneeded (store_info *s_info)
8dd5516b
JJ
1228{
1229 if (__builtin_expect (s_info->is_large, false))
1230 {
71d6a386
RS
1231 bitmap_set_range (s_info->positions_needed.large.bmap,
1232 0, s_info->width);
1233 s_info->positions_needed.large.count = s_info->width;
8dd5516b
JJ
1234 }
1235 else
07e96250 1236 s_info->positions_needed.small_bitmask = HOST_WIDE_INT_0U;
8dd5516b
JJ
1237}
1238
1239/* Return TRUE if any bytes from S_INFO store are needed. */
1240
1241static inline bool
fd6daec9 1242any_positions_needed_p (store_info *s_info)
8dd5516b
JJ
1243{
1244 if (__builtin_expect (s_info->is_large, false))
71d6a386 1245 return s_info->positions_needed.large.count < s_info->width;
8dd5516b 1246 else
07e96250 1247 return (s_info->positions_needed.small_bitmask != HOST_WIDE_INT_0U);
8dd5516b
JJ
1248}
1249
1250/* Return TRUE if all bytes START through START+WIDTH-1 from S_INFO
1251 store are needed. */
1252
1253static inline bool
fd6daec9 1254all_positions_needed_p (store_info *s_info, int start, int width)
8dd5516b
JJ
1255{
1256 if (__builtin_expect (s_info->is_large, false))
1257 {
1258 int end = start + width;
1259 while (start < end)
dc491a25 1260 if (bitmap_bit_p (s_info->positions_needed.large.bmap, start++))
8dd5516b
JJ
1261 return false;
1262 return true;
1263 }
1264 else
1265 {
1266 unsigned HOST_WIDE_INT mask = lowpart_bitmask (width) << start;
1267 return (s_info->positions_needed.small_bitmask & mask) == mask;
1268 }
1269}
1270
1271
fd6daec9 1272static rtx get_stored_val (store_info *, machine_mode, HOST_WIDE_INT,
8dd5516b
JJ
1273 HOST_WIDE_INT, basic_block, bool);
1274
1275
6fb5fa3c
DB
1276/* BODY is an instruction pattern that belongs to INSN. Return 1 if
1277 there is a candidate store, after adding it to the appropriate
1278 local store group if so. */
1279
1280static int
1281record_store (rtx body, bb_info_t bb_info)
1282{
6216f94e 1283 rtx mem, rhs, const_rhs, mem_addr;
6fb5fa3c
DB
1284 HOST_WIDE_INT offset = 0;
1285 HOST_WIDE_INT width = 0;
6fb5fa3c 1286 insn_info_t insn_info = bb_info->last_insn;
fd6daec9 1287 store_info *store_info = NULL;
6fb5fa3c
DB
1288 int group_id;
1289 cselib_val *base = NULL;
8dd5516b 1290 insn_info_t ptr, last, redundant_reason;
6fb5fa3c
DB
1291 bool store_is_unused;
1292
1293 if (GET_CODE (body) != SET && GET_CODE (body) != CLOBBER)
1294 return 0;
1295
8dd5516b
JJ
1296 mem = SET_DEST (body);
1297
6fb5fa3c
DB
1298 /* If this is not used, then this cannot be used to keep the insn
1299 from being deleted. On the other hand, it does provide something
1300 that can be used to prove that another store is dead. */
1301 store_is_unused
8dd5516b 1302 = (find_reg_note (insn_info->insn, REG_UNUSED, mem) != NULL);
6fb5fa3c
DB
1303
1304 /* Check whether that value is a suitable memory location. */
6fb5fa3c
DB
1305 if (!MEM_P (mem))
1306 {
1307 /* If the set or clobber is unused, then it does not effect our
1308 ability to get rid of the entire insn. */
1309 if (!store_is_unused)
1310 insn_info->cannot_delete = true;
1311 return 0;
1312 }
1313
1314 /* At this point we know mem is a mem. */
1315 if (GET_MODE (mem) == BLKmode)
1316 {
1317 if (GET_CODE (XEXP (mem, 0)) == SCRATCH)
1318 {
456610d3 1319 if (dump_file && (dump_flags & TDF_DETAILS))
6fb5fa3c
DB
1320 fprintf (dump_file, " adding wild read for (clobber (mem:BLK (scratch))\n");
1321 add_wild_read (bb_info);
1322 insn_info->cannot_delete = true;
8dd5516b 1323 return 0;
6fb5fa3c 1324 }
8dd5516b
JJ
1325 /* Handle (set (mem:BLK (addr) [... S36 ...]) (const_int 0))
1326 as memset (addr, 0, 36); */
f5541398
RS
1327 else if (!MEM_SIZE_KNOWN_P (mem)
1328 || MEM_SIZE (mem) <= 0
1329 || MEM_SIZE (mem) > MAX_OFFSET
8dd5516b 1330 || GET_CODE (body) != SET
8dd5516b 1331 || !CONST_INT_P (SET_SRC (body)))
6fb5fa3c 1332 {
8dd5516b
JJ
1333 if (!store_is_unused)
1334 {
1335 /* If the set or clobber is unused, then it does not effect our
1336 ability to get rid of the entire insn. */
1337 insn_info->cannot_delete = true;
1338 clear_rhs_from_active_local_stores ();
1339 }
1340 return 0;
6fb5fa3c 1341 }
6fb5fa3c
DB
1342 }
1343
1344 /* We can still process a volatile mem, we just cannot delete it. */
1345 if (MEM_VOLATILE_P (mem))
8dd5516b 1346 insn_info->cannot_delete = true;
6fb5fa3c 1347
ac6929b5 1348 if (!canon_address (mem, &group_id, &offset, &base))
6fb5fa3c
DB
1349 {
1350 clear_rhs_from_active_local_stores ();
1351 return 0;
1352 }
1353
8dd5516b 1354 if (GET_MODE (mem) == BLKmode)
f5541398 1355 width = MEM_SIZE (mem);
8dd5516b 1356 else
6f557e0e 1357 width = GET_MODE_SIZE (GET_MODE (mem));
6fb5fa3c 1358
213ffde2
JJ
1359 if (offset > HOST_WIDE_INT_MAX - width)
1360 {
1361 clear_rhs_from_active_local_stores ();
1362 return 0;
1363 }
1364
ac6929b5 1365 if (group_id >= 0)
6fb5fa3c
DB
1366 {
1367 /* In the restrictive case where the base is a constant or the
1368 frame pointer we can do global analysis. */
b8698a0f 1369
fd6daec9 1370 group_info *group
9771b263 1371 = rtx_group_vec[group_id];
d26c7090 1372 tree expr = MEM_EXPR (mem);
b8698a0f 1373
da6603c6 1374 store_info = rtx_store_info_pool.allocate ();
d26c7090 1375 set_usage_bits (group, offset, width, expr);
6fb5fa3c 1376
456610d3 1377 if (dump_file && (dump_flags & TDF_DETAILS))
71d6a386
RS
1378 {
1379 fprintf (dump_file, " processing const base store gid=%d",
1380 group_id);
1381 print_range (dump_file, offset, width);
1382 fprintf (dump_file, "\n");
1383 }
6fb5fa3c
DB
1384 }
1385 else
1386 {
9e412ca3 1387 if (may_be_sp_based_p (XEXP (mem, 0)))
50f0f366 1388 insn_info->stack_pointer_based = true;
6fb5fa3c 1389 insn_info->contains_cselib_groups = true;
50f0f366 1390
da6603c6 1391 store_info = cse_store_info_pool.allocate ();
6fb5fa3c
DB
1392 group_id = -1;
1393
456610d3 1394 if (dump_file && (dump_flags & TDF_DETAILS))
71d6a386
RS
1395 {
1396 fprintf (dump_file, " processing cselib store ");
1397 print_range (dump_file, offset, width);
1398 fprintf (dump_file, "\n");
1399 }
6fb5fa3c
DB
1400 }
1401
8dd5516b
JJ
1402 const_rhs = rhs = NULL_RTX;
1403 if (GET_CODE (body) == SET
1404 /* No place to keep the value after ra. */
1405 && !reload_completed
1406 && (REG_P (SET_SRC (body))
1407 || GET_CODE (SET_SRC (body)) == SUBREG
1408 || CONSTANT_P (SET_SRC (body)))
1409 && !MEM_VOLATILE_P (mem)
1410 /* Sometimes the store and reload is used for truncation and
1411 rounding. */
1412 && !(FLOAT_MODE_P (GET_MODE (mem)) && (flag_float_store)))
1413 {
1414 rhs = SET_SRC (body);
1415 if (CONSTANT_P (rhs))
1416 const_rhs = rhs;
1417 else if (body == PATTERN (insn_info->insn))
1418 {
1419 rtx tem = find_reg_note (insn_info->insn, REG_EQUAL, NULL_RTX);
1420 if (tem && CONSTANT_P (XEXP (tem, 0)))
1421 const_rhs = XEXP (tem, 0);
1422 }
1423 if (const_rhs == NULL_RTX && REG_P (rhs))
1424 {
1425 rtx tem = cselib_expand_value_rtx (rhs, scratch, 5);
1426
1427 if (tem && CONSTANT_P (tem))
1428 const_rhs = tem;
1429 }
1430 }
1431
6fb5fa3c
DB
1432 /* Check to see if this stores causes some other stores to be
1433 dead. */
1434 ptr = active_local_stores;
1435 last = NULL;
8dd5516b 1436 redundant_reason = NULL;
6216f94e 1437 mem = canon_rtx (mem);
ac6929b5
RB
1438
1439 if (group_id < 0)
1440 mem_addr = base->val_rtx;
6216f94e
JJ
1441 else
1442 {
ac6929b5
RB
1443 group_info *group = rtx_group_vec[group_id];
1444 mem_addr = group->canon_base_addr;
6216f94e 1445 }
ac6929b5
RB
1446 if (offset)
1447 mem_addr = plus_constant (get_address_mode (mem), mem_addr, offset);
6fb5fa3c
DB
1448
1449 while (ptr)
1450 {
1451 insn_info_t next = ptr->next_local_store;
fd6daec9 1452 struct store_info *s_info = ptr->store_rec;
60564289 1453 bool del = true;
6fb5fa3c
DB
1454
1455 /* Skip the clobbers. We delete the active insn if this insn
6ed3da00 1456 shadows the set. To have been put on the active list, it
6fb5fa3c
DB
1457 has exactly on set. */
1458 while (!s_info->is_set)
1459 s_info = s_info->next;
1460
ac6929b5 1461 if (s_info->group_id == group_id && s_info->cse_base == base)
6fb5fa3c
DB
1462 {
1463 HOST_WIDE_INT i;
456610d3 1464 if (dump_file && (dump_flags & TDF_DETAILS))
71d6a386
RS
1465 {
1466 fprintf (dump_file, " trying store in insn=%d gid=%d",
1467 INSN_UID (ptr->insn), s_info->group_id);
1468 print_range (dump_file, s_info->offset, s_info->width);
1469 fprintf (dump_file, "\n");
1470 }
8dd5516b
JJ
1471
1472 /* Even if PTR won't be eliminated as unneeded, if both
1473 PTR and this insn store the same constant value, we might
1474 eliminate this insn instead. */
1475 if (s_info->const_rhs
1476 && const_rhs
71d6a386
RS
1477 && known_subrange_p (offset, width,
1478 s_info->offset, s_info->width)
1479 && all_positions_needed_p (s_info, offset - s_info->offset,
8dd5516b
JJ
1480 width))
1481 {
1482 if (GET_MODE (mem) == BLKmode)
1483 {
1484 if (GET_MODE (s_info->mem) == BLKmode
1485 && s_info->const_rhs == const_rhs)
1486 redundant_reason = ptr;
1487 }
1488 else if (s_info->const_rhs == const0_rtx
1489 && const_rhs == const0_rtx)
1490 redundant_reason = ptr;
1491 else
1492 {
1493 rtx val;
1494 start_sequence ();
71d6a386 1495 val = get_stored_val (s_info, GET_MODE (mem), offset, width,
8dd5516b
JJ
1496 BLOCK_FOR_INSN (insn_info->insn),
1497 true);
1498 if (get_insns () != NULL)
1499 val = NULL_RTX;
1500 end_sequence ();
1501 if (val && rtx_equal_p (val, const_rhs))
1502 redundant_reason = ptr;
1503 }
1504 }
1505
71d6a386
RS
1506 if (known_subrange_p (s_info->offset, s_info->width, offset, width))
1507 /* The new store touches every byte that S_INFO does. */
1508 set_all_positions_unneeded (s_info);
1509 else
1510 {
1511 HOST_WIDE_INT begin_unneeded = offset - s_info->offset;
1512 HOST_WIDE_INT end_unneeded = begin_unneeded + width;
1513 begin_unneeded = MAX (begin_unneeded, 0);
1514 end_unneeded = MIN (end_unneeded, s_info->width);
1515 for (i = begin_unneeded; i < end_unneeded; ++i)
1516 set_position_unneeded (s_info, i);
1517 }
6fb5fa3c
DB
1518 }
1519 else if (s_info->rhs)
1520 /* Need to see if it is possible for this store to overwrite
1521 the value of store_info. If it is, set the rhs to NULL to
1522 keep it from being used to remove a load. */
1523 {
43b9f499
RB
1524 if (canon_output_dependence (s_info->mem, true,
1525 mem, GET_MODE (mem),
1526 mem_addr))
8dd5516b
JJ
1527 {
1528 s_info->rhs = NULL;
1529 s_info->const_rhs = NULL;
1530 }
6fb5fa3c 1531 }
6216f94e 1532
6fb5fa3c
DB
1533 /* An insn can be deleted if every position of every one of
1534 its s_infos is zero. */
1b6fa860 1535 if (any_positions_needed_p (s_info))
60564289 1536 del = false;
8dd5516b 1537
60564289 1538 if (del)
6fb5fa3c
DB
1539 {
1540 insn_info_t insn_to_delete = ptr;
b8698a0f 1541
dabd47e7 1542 active_local_stores_len--;
6fb5fa3c
DB
1543 if (last)
1544 last->next_local_store = ptr->next_local_store;
1545 else
1546 active_local_stores = ptr->next_local_store;
b8698a0f 1547
1b6fa860
JJ
1548 if (!insn_to_delete->cannot_delete)
1549 delete_dead_store_insn (insn_to_delete);
6fb5fa3c
DB
1550 }
1551 else
1552 last = ptr;
b8698a0f 1553
6fb5fa3c
DB
1554 ptr = next;
1555 }
b8698a0f 1556
6fb5fa3c
DB
1557 /* Finish filling in the store_info. */
1558 store_info->next = insn_info->store_rec;
1559 insn_info->store_rec = store_info;
6216f94e 1560 store_info->mem = mem;
6216f94e 1561 store_info->mem_addr = mem_addr;
6fb5fa3c 1562 store_info->cse_base = base;
8dd5516b
JJ
1563 if (width > HOST_BITS_PER_WIDE_INT)
1564 {
1565 store_info->is_large = true;
1566 store_info->positions_needed.large.count = 0;
3f9b14ff 1567 store_info->positions_needed.large.bmap = BITMAP_ALLOC (&dse_bitmap_obstack);
8dd5516b
JJ
1568 }
1569 else
1570 {
1571 store_info->is_large = false;
1572 store_info->positions_needed.small_bitmask = lowpart_bitmask (width);
1573 }
6fb5fa3c 1574 store_info->group_id = group_id;
71d6a386
RS
1575 store_info->offset = offset;
1576 store_info->width = width;
6fb5fa3c 1577 store_info->is_set = GET_CODE (body) == SET;
8dd5516b
JJ
1578 store_info->rhs = rhs;
1579 store_info->const_rhs = const_rhs;
1580 store_info->redundant_reason = redundant_reason;
6fb5fa3c 1581
6fb5fa3c
DB
1582 /* If this is a clobber, we return 0. We will only be able to
1583 delete this insn if there is only one store USED store, but we
1584 can use the clobber to delete other stores earlier. */
1585 return store_info->is_set ? 1 : 0;
1586}
1587
1588
1589static void
1590dump_insn_info (const char * start, insn_info_t insn_info)
1591{
b8698a0f 1592 fprintf (dump_file, "%s insn=%d %s\n", start,
6fb5fa3c
DB
1593 INSN_UID (insn_info->insn),
1594 insn_info->store_rec ? "has store" : "naked");
1595}
1596
1597
8660aaae
EC
1598/* If the modes are different and the value's source and target do not
1599 line up, we need to extract the value from lower part of the rhs of
1600 the store, shift it, and then put it into a form that can be shoved
1601 into the read_insn. This function generates a right SHIFT of a
1602 value that is at least ACCESS_SIZE bytes wide of READ_MODE. The
1603 shift sequence is returned or NULL if we failed to find a
1604 shift. */
1605
1606static rtx
18b526e8 1607find_shift_sequence (int access_size,
fd6daec9 1608 store_info *store_info,
ef4bddc2 1609 machine_mode read_mode,
8dd5516b 1610 int shift, bool speed, bool require_cst)
8660aaae 1611{
ef4bddc2 1612 machine_mode store_mode = GET_MODE (store_info->mem);
f67f4dff 1613 scalar_int_mode new_mode;
18b526e8 1614 rtx read_reg = NULL;
8660aaae
EC
1615
1616 /* Some machines like the x86 have shift insns for each size of
1617 operand. Other machines like the ppc or the ia-64 may only have
1618 shift insns that shift values within 32 or 64 bit registers.
1619 This loop tries to find the smallest shift insn that will right
1620 justify the value we want to read but is available in one insn on
1621 the machine. */
1622
f67f4dff
RS
1623 opt_scalar_int_mode new_mode_iter;
1624 FOR_EACH_MODE_FROM (new_mode_iter,
1625 smallest_int_mode_for_size (access_size * BITS_PER_UNIT))
8660aaae 1626 {
dc01c3d1
DM
1627 rtx target, new_reg, new_lhs;
1628 rtx_insn *shift_seq, *insn;
d898d29b 1629 int cost;
348eea5f 1630
f67f4dff 1631 new_mode = new_mode_iter.require ();
c94843d2
RS
1632 if (GET_MODE_BITSIZE (new_mode) > BITS_PER_WORD)
1633 break;
1634
72a2609f
JJ
1635 /* If a constant was stored into memory, try to simplify it here,
1636 otherwise the cost of the shift might preclude this optimization
1637 e.g. at -Os, even when no actual shift will be needed. */
8dd5516b 1638 if (store_info->const_rhs)
72a2609f
JJ
1639 {
1640 unsigned int byte = subreg_lowpart_offset (new_mode, store_mode);
8dd5516b
JJ
1641 rtx ret = simplify_subreg (new_mode, store_info->const_rhs,
1642 store_mode, byte);
72a2609f
JJ
1643 if (ret && CONSTANT_P (ret))
1644 {
1645 ret = simplify_const_binary_operation (LSHIFTRT, new_mode,
b4ddce36 1646 ret, GEN_INT (shift));
72a2609f
JJ
1647 if (ret && CONSTANT_P (ret))
1648 {
1649 byte = subreg_lowpart_offset (read_mode, new_mode);
1650 ret = simplify_subreg (read_mode, ret, new_mode, byte);
1651 if (ret && CONSTANT_P (ret)
e548c9df
AM
1652 && (set_src_cost (ret, read_mode, speed)
1653 <= COSTS_N_INSNS (1)))
72a2609f
JJ
1654 return ret;
1655 }
1656 }
1657 }
1658
8dd5516b
JJ
1659 if (require_cst)
1660 return NULL_RTX;
1661
18b526e8
RS
1662 /* Try a wider mode if truncating the store mode to NEW_MODE
1663 requires a real instruction. */
1664 if (GET_MODE_BITSIZE (new_mode) < GET_MODE_BITSIZE (store_mode)
d0edd768 1665 && !TRULY_NOOP_TRUNCATION_MODES_P (new_mode, store_mode))
348eea5f
RS
1666 continue;
1667
18b526e8
RS
1668 /* Also try a wider mode if the necessary punning is either not
1669 desirable or not possible. */
1670 if (!CONSTANT_P (store_info->rhs)
99e1629f 1671 && !targetm.modes_tieable_p (new_mode, store_mode))
18b526e8 1672 continue;
18b526e8 1673
348eea5f 1674 new_reg = gen_reg_rtx (new_mode);
8660aaae
EC
1675
1676 start_sequence ();
1677
1678 /* In theory we could also check for an ashr. Ian Taylor knows
1679 of one dsp where the cost of these two was not the same. But
1680 this really is a rare case anyway. */
1681 target = expand_binop (new_mode, lshr_optab, new_reg,
b4ddce36 1682 GEN_INT (shift), new_reg, 1, OPTAB_DIRECT);
8660aaae 1683
c6f3019a
RS
1684 shift_seq = get_insns ();
1685 end_sequence ();
8660aaae 1686
c6f3019a
RS
1687 if (target != new_reg || shift_seq == NULL)
1688 continue;
1689
1690 cost = 0;
1691 for (insn = shift_seq; insn != NULL_RTX; insn = NEXT_INSN (insn))
1692 if (INSN_P (insn))
d09e78e8 1693 cost += insn_cost (insn, speed);
c6f3019a
RS
1694
1695 /* The computation up to here is essentially independent
1696 of the arguments and could be precomputed. It may
1697 not be worth doing so. We could precompute if
1698 worthwhile or at least cache the results. The result
06acf7d0
RS
1699 technically depends on both SHIFT and ACCESS_SIZE,
1700 but in practice the answer will depend only on ACCESS_SIZE. */
c6f3019a
RS
1701
1702 if (cost > COSTS_N_INSNS (1))
1703 continue;
1704
d898d29b
JJ
1705 new_lhs = extract_low_bits (new_mode, store_mode,
1706 copy_rtx (store_info->rhs));
1707 if (new_lhs == NULL_RTX)
1708 continue;
1709
c6f3019a
RS
1710 /* We found an acceptable shift. Generate a move to
1711 take the value from the store and put it into the
1712 shift pseudo, then shift it, then generate another
1713 move to put in into the target of the read. */
18b526e8 1714 emit_move_insn (new_reg, new_lhs);
c6f3019a 1715 emit_insn (shift_seq);
18b526e8 1716 read_reg = extract_low_bits (read_mode, new_mode, new_reg);
c6f3019a 1717 break;
8660aaae
EC
1718 }
1719
18b526e8 1720 return read_reg;
8660aaae
EC
1721}
1722
1723
02b47899
KZ
1724/* Call back for note_stores to find the hard regs set or clobbered by
1725 insn. Data is a bitmap of the hardregs set so far. */
1726
1727static void
1728look_for_hardregs (rtx x, const_rtx pat ATTRIBUTE_UNUSED, void *data)
1729{
1730 bitmap regs_set = (bitmap) data;
1731
1732 if (REG_P (x)
f773c2bd 1733 && HARD_REGISTER_P (x))
dc8afb70 1734 bitmap_set_range (regs_set, REGNO (x), REG_NREGS (x));
02b47899
KZ
1735}
1736
8dd5516b 1737/* Helper function for replace_read and record_store.
71d6a386
RS
1738 Attempt to return a value of mode READ_MODE stored in STORE_INFO,
1739 consisting of READ_WIDTH bytes starting from READ_OFFSET. Return NULL
8dd5516b
JJ
1740 if not successful. If REQUIRE_CST is true, return always constant. */
1741
1742static rtx
fd6daec9 1743get_stored_val (store_info *store_info, machine_mode read_mode,
71d6a386 1744 HOST_WIDE_INT read_offset, HOST_WIDE_INT read_width,
8dd5516b
JJ
1745 basic_block bb, bool require_cst)
1746{
ef4bddc2 1747 machine_mode store_mode = GET_MODE (store_info->mem);
71d6a386 1748 HOST_WIDE_INT gap;
8dd5516b
JJ
1749 rtx read_reg;
1750
1751 /* To get here the read is within the boundaries of the write so
1752 shift will never be negative. Start out with the shift being in
1753 bytes. */
1754 if (store_mode == BLKmode)
71d6a386 1755 gap = 0;
8dd5516b 1756 else if (BYTES_BIG_ENDIAN)
71d6a386
RS
1757 gap = ((store_info->offset + store_info->width)
1758 - (read_offset + read_width));
8dd5516b 1759 else
71d6a386 1760 gap = read_offset - store_info->offset;
8dd5516b 1761
71d6a386
RS
1762 if (gap != 0)
1763 {
1764 HOST_WIDE_INT shift = gap * BITS_PER_UNIT;
1765 HOST_WIDE_INT access_size = GET_MODE_SIZE (read_mode) + gap;
1766 read_reg = find_shift_sequence (access_size, store_info, read_mode,
1767 shift, optimize_bb_for_speed_p (bb),
1768 require_cst);
1769 }
8dd5516b
JJ
1770 else if (store_mode == BLKmode)
1771 {
1772 /* The store is a memset (addr, const_val, const_size). */
1773 gcc_assert (CONST_INT_P (store_info->rhs));
304b9962
RS
1774 scalar_int_mode int_store_mode;
1775 if (!int_mode_for_mode (read_mode).exists (&int_store_mode))
8dd5516b
JJ
1776 read_reg = NULL_RTX;
1777 else if (store_info->rhs == const0_rtx)
304b9962
RS
1778 read_reg = extract_low_bits (read_mode, int_store_mode, const0_rtx);
1779 else if (GET_MODE_BITSIZE (int_store_mode) > HOST_BITS_PER_WIDE_INT
8dd5516b
JJ
1780 || BITS_PER_UNIT >= HOST_BITS_PER_WIDE_INT)
1781 read_reg = NULL_RTX;
1782 else
1783 {
1784 unsigned HOST_WIDE_INT c
1785 = INTVAL (store_info->rhs)
fecfbfa4 1786 & ((HOST_WIDE_INT_1 << BITS_PER_UNIT) - 1);
8dd5516b
JJ
1787 int shift = BITS_PER_UNIT;
1788 while (shift < HOST_BITS_PER_WIDE_INT)
1789 {
1790 c |= (c << shift);
1791 shift <<= 1;
1792 }
304b9962
RS
1793 read_reg = gen_int_mode (c, int_store_mode);
1794 read_reg = extract_low_bits (read_mode, int_store_mode, read_reg);
8dd5516b
JJ
1795 }
1796 }
1797 else if (store_info->const_rhs
1798 && (require_cst
1799 || GET_MODE_CLASS (read_mode) != GET_MODE_CLASS (store_mode)))
1800 read_reg = extract_low_bits (read_mode, store_mode,
1801 copy_rtx (store_info->const_rhs));
1802 else
1803 read_reg = extract_low_bits (read_mode, store_mode,
1804 copy_rtx (store_info->rhs));
1805 if (require_cst && read_reg && !CONSTANT_P (read_reg))
1806 read_reg = NULL_RTX;
1807 return read_reg;
1808}
02b47899 1809
6fb5fa3c
DB
1810/* Take a sequence of:
1811 A <- r1
1812 ...
1813 ... <- A
1814
b8698a0f 1815 and change it into
6fb5fa3c
DB
1816 r2 <- r1
1817 A <- r1
1818 ...
1819 ... <- r2
1820
8660aaae
EC
1821 or
1822
1823 r3 <- extract (r1)
1824 r3 <- r3 >> shift
1825 r2 <- extract (r3)
1826 ... <- r2
1827
1828 or
1829
1830 r2 <- extract (r1)
1831 ... <- r2
1832
1833 Depending on the alignment and the mode of the store and
1834 subsequent load.
1835
1836
1837 The STORE_INFO and STORE_INSN are for the store and READ_INFO
6fb5fa3c
DB
1838 and READ_INSN are for the read. Return true if the replacement
1839 went ok. */
1840
1841static bool
fd6daec9 1842replace_read (store_info *store_info, insn_info_t store_insn,
8dd5516b
JJ
1843 read_info_t read_info, insn_info_t read_insn, rtx *loc,
1844 bitmap regs_live)
6fb5fa3c 1845{
ef4bddc2
RS
1846 machine_mode store_mode = GET_MODE (store_info->mem);
1847 machine_mode read_mode = GET_MODE (read_info->mem);
dc01c3d1
DM
1848 rtx_insn *insns, *this_insn;
1849 rtx read_reg;
8dd5516b 1850 basic_block bb;
8660aaae 1851
6fb5fa3c
DB
1852 if (!dbg_cnt (dse))
1853 return false;
1854
18b526e8
RS
1855 /* Create a sequence of instructions to set up the read register.
1856 This sequence goes immediately before the store and its result
1857 is read by the load.
1858
1859 We need to keep this in perspective. We are replacing a read
8660aaae
EC
1860 with a sequence of insns, but the read will almost certainly be
1861 in cache, so it is not going to be an expensive one. Thus, we
1862 are not willing to do a multi insn shift or worse a subroutine
1863 call to get rid of the read. */
456610d3 1864 if (dump_file && (dump_flags & TDF_DETAILS))
18b526e8
RS
1865 fprintf (dump_file, "trying to replace %smode load in insn %d"
1866 " from %smode store in insn %d\n",
1867 GET_MODE_NAME (read_mode), INSN_UID (read_insn->insn),
1868 GET_MODE_NAME (store_mode), INSN_UID (store_insn->insn));
1869 start_sequence ();
8dd5516b
JJ
1870 bb = BLOCK_FOR_INSN (read_insn->insn);
1871 read_reg = get_stored_val (store_info,
71d6a386 1872 read_mode, read_info->offset, read_info->width,
8dd5516b 1873 bb, false);
18b526e8 1874 if (read_reg == NULL_RTX)
8660aaae 1875 {
18b526e8 1876 end_sequence ();
456610d3 1877 if (dump_file && (dump_flags & TDF_DETAILS))
18b526e8
RS
1878 fprintf (dump_file, " -- could not extract bits of stored value\n");
1879 return false;
8660aaae 1880 }
18b526e8
RS
1881 /* Force the value into a new register so that it won't be clobbered
1882 between the store and the load. */
1883 read_reg = copy_to_mode_reg (read_mode, read_reg);
1884 insns = get_insns ();
1885 end_sequence ();
8660aaae 1886
02b47899
KZ
1887 if (insns != NULL_RTX)
1888 {
1889 /* Now we have to scan the set of new instructions to see if the
1890 sequence contains and sets of hardregs that happened to be
1891 live at this point. For instance, this can happen if one of
1892 the insns sets the CC and the CC happened to be live at that
1893 point. This does occasionally happen, see PR 37922. */
3f9b14ff 1894 bitmap regs_set = BITMAP_ALLOC (&reg_obstack);
02b47899
KZ
1895
1896 for (this_insn = insns; this_insn != NULL_RTX; this_insn = NEXT_INSN (this_insn))
1897 note_stores (PATTERN (this_insn), look_for_hardregs, regs_set);
b8698a0f 1898
02b47899
KZ
1899 bitmap_and_into (regs_set, regs_live);
1900 if (!bitmap_empty_p (regs_set))
1901 {
456610d3 1902 if (dump_file && (dump_flags & TDF_DETAILS))
02b47899 1903 {
b8698a0f 1904 fprintf (dump_file,
02b47899
KZ
1905 "abandoning replacement because sequence clobbers live hardregs:");
1906 df_print_regset (dump_file, regs_set);
1907 }
b8698a0f 1908
02b47899
KZ
1909 BITMAP_FREE (regs_set);
1910 return false;
1911 }
1912 BITMAP_FREE (regs_set);
1913 }
1914
8660aaae 1915 if (validate_change (read_insn->insn, loc, read_reg, 0))
6fb5fa3c 1916 {
fd6daec9 1917 deferred_change *change = deferred_change_pool.allocate ();
b8698a0f 1918
8660aaae
EC
1919 /* Insert this right before the store insn where it will be safe
1920 from later insns that might change it before the read. */
1921 emit_insn_before (insns, store_insn->insn);
b8698a0f 1922
8660aaae
EC
1923 /* And now for the kludge part: cselib croaks if you just
1924 return at this point. There are two reasons for this:
b8698a0f 1925
8660aaae
EC
1926 1) Cselib has an idea of how many pseudos there are and
1927 that does not include the new ones we just added.
b8698a0f 1928
8660aaae
EC
1929 2) Cselib does not know about the move insn we added
1930 above the store_info, and there is no way to tell it
1931 about it, because it has "moved on".
b8698a0f 1932
8660aaae
EC
1933 Problem (1) is fixable with a certain amount of engineering.
1934 Problem (2) is requires starting the bb from scratch. This
1935 could be expensive.
b8698a0f 1936
8660aaae
EC
1937 So we are just going to have to lie. The move/extraction
1938 insns are not really an issue, cselib did not see them. But
1939 the use of the new pseudo read_insn is a real problem because
1940 cselib has not scanned this insn. The way that we solve this
1941 problem is that we are just going to put the mem back for now
1942 and when we are finished with the block, we undo this. We
1943 keep a table of mems to get rid of. At the end of the basic
1944 block we can put them back. */
b8698a0f 1945
8660aaae 1946 *loc = read_info->mem;
da6603c6
ML
1947 change->next = deferred_change_list;
1948 deferred_change_list = change;
1949 change->loc = loc;
1950 change->reg = read_reg;
b8698a0f 1951
8660aaae
EC
1952 /* Get rid of the read_info, from the point of view of the
1953 rest of dse, play like this read never happened. */
1954 read_insn->read_rec = read_info->next;
fb0b2914 1955 read_info_type_pool.remove (read_info);
456610d3 1956 if (dump_file && (dump_flags & TDF_DETAILS))
18b526e8
RS
1957 {
1958 fprintf (dump_file, " -- replaced the loaded MEM with ");
1959 print_simple_rtl (dump_file, read_reg);
1960 fprintf (dump_file, "\n");
1961 }
8660aaae 1962 return true;
6fb5fa3c 1963 }
b8698a0f 1964 else
6fb5fa3c 1965 {
456610d3 1966 if (dump_file && (dump_flags & TDF_DETAILS))
18b526e8
RS
1967 {
1968 fprintf (dump_file, " -- replacing the loaded MEM with ");
1969 print_simple_rtl (dump_file, read_reg);
1970 fprintf (dump_file, " led to an invalid instruction\n");
1971 }
6fb5fa3c
DB
1972 return false;
1973 }
1974}
1975
d7111da8
RS
1976/* Check the address of MEM *LOC and kill any appropriate stores that may
1977 be active. */
6fb5fa3c 1978
d7111da8
RS
1979static void
1980check_mem_read_rtx (rtx *loc, bb_info_t bb_info)
6fb5fa3c 1981{
6216f94e 1982 rtx mem = *loc, mem_addr;
6fb5fa3c
DB
1983 insn_info_t insn_info;
1984 HOST_WIDE_INT offset = 0;
1985 HOST_WIDE_INT width = 0;
b8698a0f 1986 cselib_val *base = NULL;
6fb5fa3c
DB
1987 int group_id;
1988 read_info_t read_info;
1989
6fb5fa3c
DB
1990 insn_info = bb_info->last_insn;
1991
1992 if ((MEM_ALIAS_SET (mem) == ALIAS_SET_MEMORY_BARRIER)
1993 || (MEM_VOLATILE_P (mem)))
1994 {
456610d3 1995 if (dump_file && (dump_flags & TDF_DETAILS))
6fb5fa3c
DB
1996 fprintf (dump_file, " adding wild read, volatile or barrier.\n");
1997 add_wild_read (bb_info);
1998 insn_info->cannot_delete = true;
d7111da8 1999 return;
6fb5fa3c
DB
2000 }
2001
2002 /* If it is reading readonly mem, then there can be no conflict with
2003 another write. */
2004 if (MEM_READONLY_P (mem))
d7111da8 2005 return;
6fb5fa3c 2006
ac6929b5 2007 if (!canon_address (mem, &group_id, &offset, &base))
6fb5fa3c 2008 {
456610d3 2009 if (dump_file && (dump_flags & TDF_DETAILS))
6fb5fa3c
DB
2010 fprintf (dump_file, " adding wild read, canon_address failure.\n");
2011 add_wild_read (bb_info);
d7111da8 2012 return;
6fb5fa3c
DB
2013 }
2014
2015 if (GET_MODE (mem) == BLKmode)
2016 width = -1;
2017 else
2018 width = GET_MODE_SIZE (GET_MODE (mem));
2019
213ffde2
JJ
2020 if (width == -1
2021 ? offset == HOST_WIDE_INT_MIN
2022 : offset > HOST_WIDE_INT_MAX - width)
938f9248 2023 {
213ffde2
JJ
2024 if (dump_file && (dump_flags & TDF_DETAILS))
2025 fprintf (dump_file, " adding wild read, due to overflow.\n");
2026 add_wild_read (bb_info);
938f9248
ML
2027 return;
2028 }
2029
fb0b2914 2030 read_info = read_info_type_pool.allocate ();
6fb5fa3c
DB
2031 read_info->group_id = group_id;
2032 read_info->mem = mem;
71d6a386
RS
2033 read_info->offset = offset;
2034 read_info->width = width;
6fb5fa3c
DB
2035 read_info->next = insn_info->read_rec;
2036 insn_info->read_rec = read_info;
ac6929b5
RB
2037 if (group_id < 0)
2038 mem_addr = base->val_rtx;
6216f94e
JJ
2039 else
2040 {
ac6929b5
RB
2041 group_info *group = rtx_group_vec[group_id];
2042 mem_addr = group->canon_base_addr;
6216f94e 2043 }
ac6929b5
RB
2044 if (offset)
2045 mem_addr = plus_constant (get_address_mode (mem), mem_addr, offset);
6fb5fa3c 2046
ac6929b5 2047 if (group_id >= 0)
6fb5fa3c
DB
2048 {
2049 /* This is the restricted case where the base is a constant or
2050 the frame pointer and offset is a constant. */
2051 insn_info_t i_ptr = active_local_stores;
2052 insn_info_t last = NULL;
b8698a0f 2053
456610d3 2054 if (dump_file && (dump_flags & TDF_DETAILS))
6fb5fa3c
DB
2055 {
2056 if (width == -1)
2057 fprintf (dump_file, " processing const load gid=%d[BLK]\n",
2058 group_id);
2059 else
71d6a386
RS
2060 {
2061 fprintf (dump_file, " processing const load gid=%d", group_id);
2062 print_range (dump_file, offset, width);
2063 fprintf (dump_file, "\n");
2064 }
6fb5fa3c
DB
2065 }
2066
2067 while (i_ptr)
2068 {
2069 bool remove = false;
fd6daec9 2070 store_info *store_info = i_ptr->store_rec;
b8698a0f 2071
6fb5fa3c
DB
2072 /* Skip the clobbers. */
2073 while (!store_info->is_set)
2074 store_info = store_info->next;
b8698a0f 2075
6fb5fa3c
DB
2076 /* There are three cases here. */
2077 if (store_info->group_id < 0)
2078 /* We have a cselib store followed by a read from a
2079 const base. */
b8698a0f
L
2080 remove
2081 = canon_true_dependence (store_info->mem,
6fb5fa3c
DB
2082 GET_MODE (store_info->mem),
2083 store_info->mem_addr,
53d9622b 2084 mem, mem_addr);
b8698a0f 2085
6fb5fa3c
DB
2086 else if (group_id == store_info->group_id)
2087 {
2088 /* This is a block mode load. We may get lucky and
2089 canon_true_dependence may save the day. */
2090 if (width == -1)
b8698a0f
L
2091 remove
2092 = canon_true_dependence (store_info->mem,
6fb5fa3c
DB
2093 GET_MODE (store_info->mem),
2094 store_info->mem_addr,
53d9622b 2095 mem, mem_addr);
b8698a0f 2096
6fb5fa3c
DB
2097 /* If this read is just reading back something that we just
2098 stored, rewrite the read. */
b8698a0f 2099 else
6fb5fa3c
DB
2100 {
2101 if (store_info->rhs
71d6a386
RS
2102 && known_subrange_p (offset, width, store_info->offset,
2103 store_info->width)
8dd5516b 2104 && all_positions_needed_p (store_info,
71d6a386 2105 offset - store_info->offset,
8dd5516b
JJ
2106 width)
2107 && replace_read (store_info, i_ptr, read_info,
2108 insn_info, loc, bb_info->regs_live))
d7111da8 2109 return;
8dd5516b 2110
6fb5fa3c 2111 /* The bases are the same, just see if the offsets
71d6a386
RS
2112 could overlap. */
2113 if (ranges_maybe_overlap_p (offset, width,
2114 store_info->offset,
2115 store_info->width))
6fb5fa3c
DB
2116 remove = true;
2117 }
2118 }
b8698a0f
L
2119
2120 /* else
6fb5fa3c
DB
2121 The else case that is missing here is that the
2122 bases are constant but different. There is nothing
2123 to do here because there is no overlap. */
b8698a0f 2124
6fb5fa3c
DB
2125 if (remove)
2126 {
456610d3 2127 if (dump_file && (dump_flags & TDF_DETAILS))
6fb5fa3c
DB
2128 dump_insn_info ("removing from active", i_ptr);
2129
dabd47e7 2130 active_local_stores_len--;
6fb5fa3c
DB
2131 if (last)
2132 last->next_local_store = i_ptr->next_local_store;
2133 else
2134 active_local_stores = i_ptr->next_local_store;
2135 }
2136 else
2137 last = i_ptr;
2138 i_ptr = i_ptr->next_local_store;
2139 }
2140 }
b8698a0f 2141 else
6fb5fa3c
DB
2142 {
2143 insn_info_t i_ptr = active_local_stores;
2144 insn_info_t last = NULL;
456610d3 2145 if (dump_file && (dump_flags & TDF_DETAILS))
6fb5fa3c
DB
2146 {
2147 fprintf (dump_file, " processing cselib load mem:");
2148 print_inline_rtx (dump_file, mem, 0);
2149 fprintf (dump_file, "\n");
2150 }
2151
2152 while (i_ptr)
2153 {
2154 bool remove = false;
fd6daec9 2155 store_info *store_info = i_ptr->store_rec;
b8698a0f 2156
456610d3 2157 if (dump_file && (dump_flags & TDF_DETAILS))
6fb5fa3c
DB
2158 fprintf (dump_file, " processing cselib load against insn %d\n",
2159 INSN_UID (i_ptr->insn));
2160
2161 /* Skip the clobbers. */
2162 while (!store_info->is_set)
2163 store_info = store_info->next;
2164
2165 /* If this read is just reading back something that we just
2166 stored, rewrite the read. */
2167 if (store_info->rhs
2168 && store_info->group_id == -1
2169 && store_info->cse_base == base
71d6a386
RS
2170 && known_subrange_p (offset, width, store_info->offset,
2171 store_info->width)
8dd5516b 2172 && all_positions_needed_p (store_info,
71d6a386 2173 offset - store_info->offset, width)
8dd5516b
JJ
2174 && replace_read (store_info, i_ptr, read_info, insn_info, loc,
2175 bb_info->regs_live))
d7111da8 2176 return;
6fb5fa3c 2177
ac6929b5
RB
2178 remove = canon_true_dependence (store_info->mem,
2179 GET_MODE (store_info->mem),
2180 store_info->mem_addr,
2181 mem, mem_addr);
b8698a0f 2182
6fb5fa3c
DB
2183 if (remove)
2184 {
456610d3 2185 if (dump_file && (dump_flags & TDF_DETAILS))
6fb5fa3c 2186 dump_insn_info ("removing from active", i_ptr);
b8698a0f 2187
dabd47e7 2188 active_local_stores_len--;
6fb5fa3c
DB
2189 if (last)
2190 last->next_local_store = i_ptr->next_local_store;
2191 else
2192 active_local_stores = i_ptr->next_local_store;
2193 }
2194 else
2195 last = i_ptr;
2196 i_ptr = i_ptr->next_local_store;
2197 }
2198 }
6fb5fa3c
DB
2199}
2200
d7111da8 2201/* A note_uses callback in which DATA points the INSN_INFO for
6fb5fa3c
DB
2202 as check_mem_read_rtx. Nullify the pointer if i_m_r_m_r returns
2203 true for any part of *LOC. */
2204
2205static void
2206check_mem_read_use (rtx *loc, void *data)
2207{
d7111da8
RS
2208 subrtx_ptr_iterator::array_type array;
2209 FOR_EACH_SUBRTX_PTR (iter, array, loc, NONCONST)
2210 {
2211 rtx *loc = *iter;
2212 if (MEM_P (*loc))
2213 check_mem_read_rtx (loc, (bb_info_t) data);
2214 }
6fb5fa3c
DB
2215}
2216
8dd5516b
JJ
2217
2218/* Get arguments passed to CALL_INSN. Return TRUE if successful.
2219 So far it only handles arguments passed in registers. */
2220
2221static bool
2222get_call_args (rtx call_insn, tree fn, rtx *args, int nargs)
2223{
d5cc9181
JR
2224 CUMULATIVE_ARGS args_so_far_v;
2225 cumulative_args_t args_so_far;
8dd5516b
JJ
2226 tree arg;
2227 int idx;
2228
d5cc9181
JR
2229 INIT_CUMULATIVE_ARGS (args_so_far_v, TREE_TYPE (fn), NULL_RTX, 0, 3);
2230 args_so_far = pack_cumulative_args (&args_so_far_v);
8dd5516b
JJ
2231
2232 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
2233 for (idx = 0;
2234 arg != void_list_node && idx < nargs;
2235 arg = TREE_CHAIN (arg), idx++)
2236 {
b4206259 2237 scalar_int_mode mode;
3c07301f 2238 rtx reg, link, tmp;
b4206259
RS
2239
2240 if (!is_int_mode (TYPE_MODE (TREE_VALUE (arg)), &mode))
2241 return false;
2242
d5cc9181 2243 reg = targetm.calls.function_arg (args_so_far, mode, NULL_TREE, true);
b4206259 2244 if (!reg || !REG_P (reg) || GET_MODE (reg) != mode)
8dd5516b
JJ
2245 return false;
2246
2247 for (link = CALL_INSN_FUNCTION_USAGE (call_insn);
2248 link;
2249 link = XEXP (link, 1))
2250 if (GET_CODE (XEXP (link, 0)) == USE)
2251 {
b4206259 2252 scalar_int_mode arg_mode;
8dd5516b
JJ
2253 args[idx] = XEXP (XEXP (link, 0), 0);
2254 if (REG_P (args[idx])
2255 && REGNO (args[idx]) == REGNO (reg)
2256 && (GET_MODE (args[idx]) == mode
b4206259
RS
2257 || (is_int_mode (GET_MODE (args[idx]), &arg_mode)
2258 && (GET_MODE_SIZE (arg_mode) <= UNITS_PER_WORD)
2259 && (GET_MODE_SIZE (arg_mode) > GET_MODE_SIZE (mode)))))
8dd5516b
JJ
2260 break;
2261 }
2262 if (!link)
2263 return false;
2264
2265 tmp = cselib_expand_value_rtx (args[idx], scratch, 5);
2266 if (GET_MODE (args[idx]) != mode)
2267 {
2268 if (!tmp || !CONST_INT_P (tmp))
2269 return false;
6d26322f 2270 tmp = gen_int_mode (INTVAL (tmp), mode);
8dd5516b
JJ
2271 }
2272 if (tmp)
2273 args[idx] = tmp;
2274
d5cc9181 2275 targetm.calls.function_arg_advance (args_so_far, mode, NULL_TREE, true);
8dd5516b
JJ
2276 }
2277 if (arg != void_list_node || idx != nargs)
2278 return false;
2279 return true;
2280}
2281
9e582b1d
JR
2282/* Return a bitmap of the fixed registers contained in IN. */
2283
2284static bitmap
2285copy_fixed_regs (const_bitmap in)
2286{
2287 bitmap ret;
2288
2289 ret = ALLOC_REG_SET (NULL);
2290 bitmap_and (ret, in, fixed_reg_set_regset);
2291 return ret;
2292}
8dd5516b 2293
6fb5fa3c
DB
2294/* Apply record_store to all candidate stores in INSN. Mark INSN
2295 if some part of it is not a candidate store and assigns to a
2296 non-register target. */
2297
2298static void
dd60a84c 2299scan_insn (bb_info_t bb_info, rtx_insn *insn)
6fb5fa3c
DB
2300{
2301 rtx body;
fb0b2914 2302 insn_info_type *insn_info = insn_info_type_pool.allocate ();
6fb5fa3c 2303 int mems_found = 0;
da6603c6 2304 memset (insn_info, 0, sizeof (struct insn_info_type));
6fb5fa3c 2305
456610d3 2306 if (dump_file && (dump_flags & TDF_DETAILS))
6fb5fa3c
DB
2307 fprintf (dump_file, "\n**scanning insn=%d\n",
2308 INSN_UID (insn));
2309
2310 insn_info->prev_insn = bb_info->last_insn;
2311 insn_info->insn = insn;
2312 bb_info->last_insn = insn_info;
b8698a0f 2313
b5b8b0ac
AO
2314 if (DEBUG_INSN_P (insn))
2315 {
2316 insn_info->cannot_delete = true;
2317 return;
2318 }
6fb5fa3c 2319
6fb5fa3c
DB
2320 /* Look at all of the uses in the insn. */
2321 note_uses (&PATTERN (insn), check_mem_read_use, bb_info);
2322
2323 if (CALL_P (insn))
2324 {
8dd5516b 2325 bool const_call;
ee516de9 2326 rtx call, sym;
8dd5516b
JJ
2327 tree memset_call = NULL_TREE;
2328
6fb5fa3c 2329 insn_info->cannot_delete = true;
50f0f366 2330
6fb5fa3c 2331 /* Const functions cannot do anything bad i.e. read memory,
50f0f366 2332 however, they can read their parameters which may have
8dd5516b
JJ
2333 been pushed onto the stack.
2334 memset and bzero don't read memory either. */
2335 const_call = RTL_CONST_CALL_P (insn);
ee516de9
EB
2336 if (!const_call
2337 && (call = get_call_rtx_from (insn))
2338 && (sym = XEXP (XEXP (call, 0), 0))
2339 && GET_CODE (sym) == SYMBOL_REF
2340 && SYMBOL_REF_DECL (sym)
2341 && TREE_CODE (SYMBOL_REF_DECL (sym)) == FUNCTION_DECL
2342 && DECL_BUILT_IN_CLASS (SYMBOL_REF_DECL (sym)) == BUILT_IN_NORMAL
2343 && DECL_FUNCTION_CODE (SYMBOL_REF_DECL (sym)) == BUILT_IN_MEMSET)
2344 memset_call = SYMBOL_REF_DECL (sym);
2345
8dd5516b 2346 if (const_call || memset_call)
6fb5fa3c
DB
2347 {
2348 insn_info_t i_ptr = active_local_stores;
2349 insn_info_t last = NULL;
2350
456610d3 2351 if (dump_file && (dump_flags & TDF_DETAILS))
8dd5516b
JJ
2352 fprintf (dump_file, "%s call %d\n",
2353 const_call ? "const" : "memset", INSN_UID (insn));
6fb5fa3c 2354
64520bdc 2355 /* See the head comment of the frame_read field. */
57534689
JJ
2356 if (reload_completed
2357 /* Tail calls are storing their arguments using
2358 arg pointer. If it is a frame pointer on the target,
2359 even before reload we need to kill frame pointer based
2360 stores. */
2361 || (SIBLING_CALL_P (insn)
2362 && HARD_FRAME_POINTER_IS_ARG_POINTER))
64520bdc
EB
2363 insn_info->frame_read = true;
2364
2365 /* Loop over the active stores and remove those which are
2366 killed by the const function call. */
6fb5fa3c
DB
2367 while (i_ptr)
2368 {
64520bdc
EB
2369 bool remove_store = false;
2370
2371 /* The stack pointer based stores are always killed. */
50f0f366 2372 if (i_ptr->stack_pointer_based)
64520bdc
EB
2373 remove_store = true;
2374
2375 /* If the frame is read, the frame related stores are killed. */
2376 else if (insn_info->frame_read)
2377 {
fd6daec9 2378 store_info *store_info = i_ptr->store_rec;
64520bdc
EB
2379
2380 /* Skip the clobbers. */
2381 while (!store_info->is_set)
2382 store_info = store_info->next;
2383
2384 if (store_info->group_id >= 0
9771b263 2385 && rtx_group_vec[store_info->group_id]->frame_related)
64520bdc
EB
2386 remove_store = true;
2387 }
2388
2389 if (remove_store)
6fb5fa3c 2390 {
456610d3 2391 if (dump_file && (dump_flags & TDF_DETAILS))
6fb5fa3c 2392 dump_insn_info ("removing from active", i_ptr);
b8698a0f 2393
dabd47e7 2394 active_local_stores_len--;
6fb5fa3c
DB
2395 if (last)
2396 last->next_local_store = i_ptr->next_local_store;
2397 else
2398 active_local_stores = i_ptr->next_local_store;
2399 }
2400 else
2401 last = i_ptr;
64520bdc 2402
6fb5fa3c
DB
2403 i_ptr = i_ptr->next_local_store;
2404 }
8dd5516b
JJ
2405
2406 if (memset_call)
2407 {
2408 rtx args[3];
2409 if (get_call_args (insn, memset_call, args, 3)
2410 && CONST_INT_P (args[1])
2411 && CONST_INT_P (args[2])
2412 && INTVAL (args[2]) > 0)
2413 {
2414 rtx mem = gen_rtx_MEM (BLKmode, args[0]);
f5541398 2415 set_mem_size (mem, INTVAL (args[2]));
f7df4a84 2416 body = gen_rtx_SET (mem, args[1]);
8dd5516b 2417 mems_found += record_store (body, bb_info);
456610d3 2418 if (dump_file && (dump_flags & TDF_DETAILS))
8dd5516b
JJ
2419 fprintf (dump_file, "handling memset as BLKmode store\n");
2420 if (mems_found == 1)
2421 {
dabd47e7
JJ
2422 if (active_local_stores_len++
2423 >= PARAM_VALUE (PARAM_MAX_DSE_ACTIVE_LOCAL_STORES))
2424 {
2425 active_local_stores_len = 1;
2426 active_local_stores = NULL;
2427 }
9e582b1d
JR
2428 insn_info->fixed_regs_live
2429 = copy_fixed_regs (bb_info->regs_live);
8dd5516b
JJ
2430 insn_info->next_local_store = active_local_stores;
2431 active_local_stores = insn_info;
2432 }
2433 }
a2ad54a0
JJ
2434 else
2435 clear_rhs_from_active_local_stores ();
8dd5516b 2436 }
6fb5fa3c 2437 }
57534689
JJ
2438 else if (SIBLING_CALL_P (insn) && reload_completed)
2439 /* Arguments for a sibling call that are pushed to memory are passed
2440 using the incoming argument pointer of the current function. After
2441 reload that might be (and likely is) frame pointer based. */
2442 add_wild_read (bb_info);
50f0f366 2443 else
d26c7090
ER
2444 /* Every other call, including pure functions, may read any memory
2445 that is not relative to the frame. */
2446 add_non_frame_wild_read (bb_info);
50f0f366 2447
6fb5fa3c
DB
2448 return;
2449 }
2450
2451 /* Assuming that there are sets in these insns, we cannot delete
2452 them. */
2453 if ((GET_CODE (PATTERN (insn)) == CLOBBER)
0a64eeca 2454 || volatile_refs_p (PATTERN (insn))
2da02156 2455 || (!cfun->can_delete_dead_exceptions && !insn_nothrow_p (insn))
6fb5fa3c
DB
2456 || (RTX_FRAME_RELATED_P (insn))
2457 || find_reg_note (insn, REG_FRAME_RELATED_EXPR, NULL_RTX))
2458 insn_info->cannot_delete = true;
b8698a0f 2459
6fb5fa3c
DB
2460 body = PATTERN (insn);
2461 if (GET_CODE (body) == PARALLEL)
2462 {
2463 int i;
2464 for (i = 0; i < XVECLEN (body, 0); i++)
2465 mems_found += record_store (XVECEXP (body, 0, i), bb_info);
2466 }
2467 else
2468 mems_found += record_store (body, bb_info);
2469
456610d3 2470 if (dump_file && (dump_flags & TDF_DETAILS))
b8698a0f 2471 fprintf (dump_file, "mems_found = %d, cannot_delete = %s\n",
6fb5fa3c
DB
2472 mems_found, insn_info->cannot_delete ? "true" : "false");
2473
8dd5516b
JJ
2474 /* If we found some sets of mems, add it into the active_local_stores so
2475 that it can be locally deleted if found dead or used for
2476 replace_read and redundant constant store elimination. Otherwise mark
2477 it as cannot delete. This simplifies the processing later. */
2478 if (mems_found == 1)
6fb5fa3c 2479 {
dabd47e7
JJ
2480 if (active_local_stores_len++
2481 >= PARAM_VALUE (PARAM_MAX_DSE_ACTIVE_LOCAL_STORES))
2482 {
2483 active_local_stores_len = 1;
2484 active_local_stores = NULL;
2485 }
9e582b1d 2486 insn_info->fixed_regs_live = copy_fixed_regs (bb_info->regs_live);
6fb5fa3c
DB
2487 insn_info->next_local_store = active_local_stores;
2488 active_local_stores = insn_info;
2489 }
2490 else
2491 insn_info->cannot_delete = true;
2492}
2493
2494
2495/* Remove BASE from the set of active_local_stores. This is a
2496 callback from cselib that is used to get rid of the stores in
2497 active_local_stores. */
2498
2499static void
2500remove_useless_values (cselib_val *base)
2501{
2502 insn_info_t insn_info = active_local_stores;
2503 insn_info_t last = NULL;
2504
2505 while (insn_info)
2506 {
fd6daec9 2507 store_info *store_info = insn_info->store_rec;
60564289 2508 bool del = false;
6fb5fa3c
DB
2509
2510 /* If ANY of the store_infos match the cselib group that is
2511 being deleted, then the insn can not be deleted. */
2512 while (store_info)
2513 {
b8698a0f 2514 if ((store_info->group_id == -1)
6fb5fa3c
DB
2515 && (store_info->cse_base == base))
2516 {
60564289 2517 del = true;
6fb5fa3c
DB
2518 break;
2519 }
2520 store_info = store_info->next;
2521 }
2522
60564289 2523 if (del)
6fb5fa3c 2524 {
dabd47e7 2525 active_local_stores_len--;
6fb5fa3c
DB
2526 if (last)
2527 last->next_local_store = insn_info->next_local_store;
2528 else
2529 active_local_stores = insn_info->next_local_store;
2530 free_store_info (insn_info);
2531 }
2532 else
2533 last = insn_info;
b8698a0f 2534
6fb5fa3c
DB
2535 insn_info = insn_info->next_local_store;
2536 }
2537}
2538
2539
2540/* Do all of step 1. */
2541
2542static void
2543dse_step1 (void)
2544{
2545 basic_block bb;
3f9b14ff 2546 bitmap regs_live = BITMAP_ALLOC (&reg_obstack);
b8698a0f 2547
457eeaae 2548 cselib_init (0);
6fb5fa3c
DB
2549 all_blocks = BITMAP_ALLOC (NULL);
2550 bitmap_set_bit (all_blocks, ENTRY_BLOCK);
2551 bitmap_set_bit (all_blocks, EXIT_BLOCK);
2552
04a90bec 2553 FOR_ALL_BB_FN (bb, cfun)
6fb5fa3c
DB
2554 {
2555 insn_info_t ptr;
fb0b2914 2556 bb_info_t bb_info = dse_bb_info_type_pool.allocate ();
6fb5fa3c 2557
da6603c6 2558 memset (bb_info, 0, sizeof (dse_bb_info_type));
6fb5fa3c 2559 bitmap_set_bit (all_blocks, bb->index);
02b47899
KZ
2560 bb_info->regs_live = regs_live;
2561
2562 bitmap_copy (regs_live, DF_LR_IN (bb));
2563 df_simulate_initialize_forwards (bb, regs_live);
6fb5fa3c
DB
2564
2565 bb_table[bb->index] = bb_info;
2566 cselib_discard_hook = remove_useless_values;
2567
2568 if (bb->index >= NUM_FIXED_BLOCKS)
2569 {
dd60a84c 2570 rtx_insn *insn;
6fb5fa3c 2571
6fb5fa3c 2572 active_local_stores = NULL;
dabd47e7 2573 active_local_stores_len = 0;
6fb5fa3c 2574 cselib_clear_table ();
b8698a0f 2575
6fb5fa3c
DB
2576 /* Scan the insns. */
2577 FOR_BB_INSNS (bb, insn)
2578 {
2579 if (INSN_P (insn))
2580 scan_insn (bb_info, insn);
2581 cselib_process_insn (insn);
02b47899
KZ
2582 if (INSN_P (insn))
2583 df_simulate_one_insn_forwards (bb, insn, regs_live);
6fb5fa3c 2584 }
b8698a0f 2585
6fb5fa3c
DB
2586 /* This is something of a hack, because the global algorithm
2587 is supposed to take care of the case where stores go dead
2588 at the end of the function. However, the global
2589 algorithm must take a more conservative view of block
2590 mode reads than the local alg does. So to get the case
2591 where you have a store to the frame followed by a non
0d52bcc1 2592 overlapping block more read, we look at the active local
6fb5fa3c
DB
2593 stores at the end of the function and delete all of the
2594 frame and spill based ones. */
2595 if (stores_off_frame_dead_at_return
2596 && (EDGE_COUNT (bb->succs) == 0
2597 || (single_succ_p (bb)
fefa31b5 2598 && single_succ (bb) == EXIT_BLOCK_PTR_FOR_FN (cfun)
e3b5732b 2599 && ! crtl->calls_eh_return)))
6fb5fa3c
DB
2600 {
2601 insn_info_t i_ptr = active_local_stores;
2602 while (i_ptr)
2603 {
fd6daec9 2604 store_info *store_info = i_ptr->store_rec;
6fb5fa3c
DB
2605
2606 /* Skip the clobbers. */
2607 while (!store_info->is_set)
2608 store_info = store_info->next;
ac6929b5
RB
2609 if (store_info->group_id >= 0)
2610 {
2611 group_info *group = rtx_group_vec[store_info->group_id];
2612 if (group->frame_related && !i_ptr->cannot_delete)
2613 delete_dead_store_insn (i_ptr);
2614 }
6fb5fa3c
DB
2615
2616 i_ptr = i_ptr->next_local_store;
2617 }
2618 }
2619
2620 /* Get rid of the loads that were discovered in
2621 replace_read. Cselib is finished with this block. */
2622 while (deferred_change_list)
2623 {
fd6daec9 2624 deferred_change *next = deferred_change_list->next;
6fb5fa3c
DB
2625
2626 /* There is no reason to validate this change. That was
2627 done earlier. */
2628 *deferred_change_list->loc = deferred_change_list->reg;
fb0b2914 2629 deferred_change_pool.remove (deferred_change_list);
6fb5fa3c
DB
2630 deferred_change_list = next;
2631 }
2632
2633 /* Get rid of all of the cselib based store_infos in this
2634 block and mark the containing insns as not being
2635 deletable. */
2636 ptr = bb_info->last_insn;
2637 while (ptr)
2638 {
2639 if (ptr->contains_cselib_groups)
8dd5516b 2640 {
fd6daec9 2641 store_info *s_info = ptr->store_rec;
8dd5516b
JJ
2642 while (s_info && !s_info->is_set)
2643 s_info = s_info->next;
2644 if (s_info
2645 && s_info->redundant_reason
2646 && s_info->redundant_reason->insn
2647 && !ptr->cannot_delete)
2648 {
456610d3 2649 if (dump_file && (dump_flags & TDF_DETAILS))
8dd5516b
JJ
2650 fprintf (dump_file, "Locally deleting insn %d "
2651 "because insn %d stores the "
2652 "same value and couldn't be "
2653 "eliminated\n",
2654 INSN_UID (ptr->insn),
2655 INSN_UID (s_info->redundant_reason->insn));
2656 delete_dead_store_insn (ptr);
2657 }
8dd5516b
JJ
2658 free_store_info (ptr);
2659 }
2660 else
2661 {
fd6daec9 2662 store_info *s_info;
8dd5516b
JJ
2663
2664 /* Free at least positions_needed bitmaps. */
2665 for (s_info = ptr->store_rec; s_info; s_info = s_info->next)
2666 if (s_info->is_large)
2667 {
dc491a25 2668 BITMAP_FREE (s_info->positions_needed.large.bmap);
8dd5516b
JJ
2669 s_info->is_large = false;
2670 }
2671 }
6fb5fa3c
DB
2672 ptr = ptr->prev_insn;
2673 }
2674
da6603c6 2675 cse_store_info_pool.release ();
6fb5fa3c 2676 }
02b47899 2677 bb_info->regs_live = NULL;
6fb5fa3c
DB
2678 }
2679
02b47899 2680 BITMAP_FREE (regs_live);
6fb5fa3c 2681 cselib_finish ();
c203e8a7 2682 rtx_group_table->empty ();
6fb5fa3c
DB
2683}
2684
2685\f
2686/*----------------------------------------------------------------------------
2687 Second step.
2688
2689 Assign each byte position in the stores that we are going to
2690 analyze globally to a position in the bitmaps. Returns true if
6ed3da00 2691 there are any bit positions assigned.
6fb5fa3c
DB
2692----------------------------------------------------------------------------*/
2693
2694static void
2695dse_step2_init (void)
2696{
2697 unsigned int i;
fd6daec9 2698 group_info *group;
6fb5fa3c 2699
9771b263 2700 FOR_EACH_VEC_ELT (rtx_group_vec, i, group)
6fb5fa3c
DB
2701 {
2702 /* For all non stack related bases, we only consider a store to
2703 be deletable if there are two or more stores for that
2704 position. This is because it takes one store to make the
2705 other store redundant. However, for the stores that are
2706 stack related, we consider them if there is only one store
2707 for the position. We do this because the stack related
2708 stores can be deleted if their is no read between them and
2709 the end of the function.
b8698a0f 2710
6fb5fa3c
DB
2711 To make this work in the current framework, we take the stack
2712 related bases add all of the bits from store1 into store2.
2713 This has the effect of making the eligible even if there is
2714 only one store. */
2715
2716 if (stores_off_frame_dead_at_return && group->frame_related)
2717 {
2718 bitmap_ior_into (group->store2_n, group->store1_n);
2719 bitmap_ior_into (group->store2_p, group->store1_p);
456610d3 2720 if (dump_file && (dump_flags & TDF_DETAILS))
b8698a0f 2721 fprintf (dump_file, "group %d is frame related ", i);
6fb5fa3c
DB
2722 }
2723
2724 group->offset_map_size_n++;
3f9b14ff
SB
2725 group->offset_map_n = XOBNEWVEC (&dse_obstack, int,
2726 group->offset_map_size_n);
6fb5fa3c 2727 group->offset_map_size_p++;
3f9b14ff
SB
2728 group->offset_map_p = XOBNEWVEC (&dse_obstack, int,
2729 group->offset_map_size_p);
6fb5fa3c 2730 group->process_globally = false;
456610d3 2731 if (dump_file && (dump_flags & TDF_DETAILS))
6fb5fa3c 2732 {
b8698a0f 2733 fprintf (dump_file, "group %d(%d+%d): ", i,
6fb5fa3c
DB
2734 (int)bitmap_count_bits (group->store2_n),
2735 (int)bitmap_count_bits (group->store2_p));
2736 bitmap_print (dump_file, group->store2_n, "n ", " ");
2737 bitmap_print (dump_file, group->store2_p, "p ", "\n");
2738 }
2739 }
2740}
2741
2742
ac6929b5 2743/* Init the offset tables. */
6fb5fa3c
DB
2744
2745static bool
ac6929b5 2746dse_step2 (void)
6fb5fa3c
DB
2747{
2748 unsigned int i;
fd6daec9 2749 group_info *group;
6fb5fa3c
DB
2750 /* Position 0 is unused because 0 is used in the maps to mean
2751 unused. */
2752 current_position = 1;
9771b263 2753 FOR_EACH_VEC_ELT (rtx_group_vec, i, group)
6fb5fa3c
DB
2754 {
2755 bitmap_iterator bi;
2756 unsigned int j;
2757
c3284718
RS
2758 memset (group->offset_map_n, 0, sizeof (int) * group->offset_map_size_n);
2759 memset (group->offset_map_p, 0, sizeof (int) * group->offset_map_size_p);
6fb5fa3c
DB
2760 bitmap_clear (group->group_kill);
2761
2762 EXECUTE_IF_SET_IN_BITMAP (group->store2_n, 0, j, bi)
2763 {
2764 bitmap_set_bit (group->group_kill, current_position);
d26c7090
ER
2765 if (bitmap_bit_p (group->escaped_n, j))
2766 bitmap_set_bit (kill_on_calls, current_position);
6fb5fa3c
DB
2767 group->offset_map_n[j] = current_position++;
2768 group->process_globally = true;
2769 }
2770 EXECUTE_IF_SET_IN_BITMAP (group->store2_p, 0, j, bi)
2771 {
b8698a0f 2772 bitmap_set_bit (group->group_kill, current_position);
d26c7090
ER
2773 if (bitmap_bit_p (group->escaped_p, j))
2774 bitmap_set_bit (kill_on_calls, current_position);
6fb5fa3c
DB
2775 group->offset_map_p[j] = current_position++;
2776 group->process_globally = true;
2777 }
2778 }
2779 return current_position != 1;
2780}
2781
2782
6fb5fa3c
DB
2783\f
2784/*----------------------------------------------------------------------------
2785 Third step.
b8698a0f 2786
6fb5fa3c
DB
2787 Build the bit vectors for the transfer functions.
2788----------------------------------------------------------------------------*/
2789
2790
6fb5fa3c
DB
2791/* Look up the bitmap index for OFFSET in GROUP_INFO. If it is not
2792 there, return 0. */
2793
2794static int
fd6daec9 2795get_bitmap_index (group_info *group_info, HOST_WIDE_INT offset)
6fb5fa3c
DB
2796{
2797 if (offset < 0)
2798 {
2799 HOST_WIDE_INT offset_p = -offset;
2800 if (offset_p >= group_info->offset_map_size_n)
2801 return 0;
2802 return group_info->offset_map_n[offset_p];
2803 }
2804 else
2805 {
2806 if (offset >= group_info->offset_map_size_p)
2807 return 0;
2808 return group_info->offset_map_p[offset];
2809 }
2810}
2811
2812
2813/* Process the STORE_INFOs into the bitmaps into GEN and KILL. KILL
2814 may be NULL. */
2815
b8698a0f 2816static void
ac6929b5 2817scan_stores (store_info *store_info, bitmap gen, bitmap kill)
6fb5fa3c
DB
2818{
2819 while (store_info)
2820 {
2821 HOST_WIDE_INT i;
fd6daec9 2822 group_info *group_info
9771b263 2823 = rtx_group_vec[store_info->group_id];
6fb5fa3c 2824 if (group_info->process_globally)
71d6a386
RS
2825 {
2826 HOST_WIDE_INT end = store_info->offset + store_info->width;
2827 for (i = store_info->offset; i < end; i++)
2828 {
2829 int index = get_bitmap_index (group_info, i);
2830 if (index != 0)
2831 {
2832 bitmap_set_bit (gen, index);
2833 if (kill)
2834 bitmap_clear_bit (kill, index);
2835 }
2836 }
2837 }
6fb5fa3c
DB
2838 store_info = store_info->next;
2839 }
2840}
2841
2842
6fb5fa3c
DB
2843/* Process the READ_INFOs into the bitmaps into GEN and KILL. KILL
2844 may be NULL. */
2845
2846static void
ac6929b5 2847scan_reads (insn_info_t insn_info, bitmap gen, bitmap kill)
6fb5fa3c
DB
2848{
2849 read_info_t read_info = insn_info->read_rec;
2850 int i;
fd6daec9 2851 group_info *group;
6fb5fa3c 2852
64520bdc
EB
2853 /* If this insn reads the frame, kill all the frame related stores. */
2854 if (insn_info->frame_read)
2855 {
9771b263 2856 FOR_EACH_VEC_ELT (rtx_group_vec, i, group)
64520bdc
EB
2857 if (group->process_globally && group->frame_related)
2858 {
2859 if (kill)
2860 bitmap_ior_into (kill, group->group_kill);
b8698a0f 2861 bitmap_and_compl_into (gen, group->group_kill);
64520bdc
EB
2862 }
2863 }
d26c7090
ER
2864 if (insn_info->non_frame_wild_read)
2865 {
2866 /* Kill all non-frame related stores. Kill all stores of variables that
2867 escape. */
2868 if (kill)
2869 bitmap_ior_into (kill, kill_on_calls);
2870 bitmap_and_compl_into (gen, kill_on_calls);
9771b263 2871 FOR_EACH_VEC_ELT (rtx_group_vec, i, group)
d26c7090
ER
2872 if (group->process_globally && !group->frame_related)
2873 {
2874 if (kill)
2875 bitmap_ior_into (kill, group->group_kill);
2876 bitmap_and_compl_into (gen, group->group_kill);
2877 }
2878 }
6fb5fa3c
DB
2879 while (read_info)
2880 {
9771b263 2881 FOR_EACH_VEC_ELT (rtx_group_vec, i, group)
6fb5fa3c
DB
2882 {
2883 if (group->process_globally)
2884 {
2885 if (i == read_info->group_id)
2886 {
71d6a386 2887 if (!known_size_p (read_info->width))
6fb5fa3c 2888 {
71d6a386 2889 /* Handle block mode reads. */
6fb5fa3c
DB
2890 if (kill)
2891 bitmap_ior_into (kill, group->group_kill);
2892 bitmap_and_compl_into (gen, group->group_kill);
2893 }
2894 else
2895 {
2896 /* The groups are the same, just process the
2897 offsets. */
2898 HOST_WIDE_INT j;
71d6a386
RS
2899 HOST_WIDE_INT end = read_info->offset + read_info->width;
2900 for (j = read_info->offset; j < end; j++)
6fb5fa3c
DB
2901 {
2902 int index = get_bitmap_index (group, j);
2903 if (index != 0)
2904 {
2905 if (kill)
2906 bitmap_set_bit (kill, index);
2907 bitmap_clear_bit (gen, index);
2908 }
2909 }
2910 }
2911 }
2912 else
2913 {
2914 /* The groups are different, if the alias sets
2915 conflict, clear the entire group. We only need
2916 to apply this test if the read_info is a cselib
2917 read. Anything with a constant base cannot alias
2918 something else with a different constant
2919 base. */
2920 if ((read_info->group_id < 0)
b8698a0f 2921 && canon_true_dependence (group->base_mem,
d32f725a 2922 GET_MODE (group->base_mem),
6216f94e 2923 group->canon_base_addr,
53d9622b 2924 read_info->mem, NULL_RTX))
6fb5fa3c
DB
2925 {
2926 if (kill)
2927 bitmap_ior_into (kill, group->group_kill);
2928 bitmap_and_compl_into (gen, group->group_kill);
2929 }
2930 }
2931 }
2932 }
b8698a0f 2933
6fb5fa3c
DB
2934 read_info = read_info->next;
2935 }
2936}
2937
6fb5fa3c
DB
2938
2939/* Return the insn in BB_INFO before the first wild read or if there
2940 are no wild reads in the block, return the last insn. */
2941
2942static insn_info_t
2943find_insn_before_first_wild_read (bb_info_t bb_info)
2944{
2945 insn_info_t insn_info = bb_info->last_insn;
2946 insn_info_t last_wild_read = NULL;
2947
2948 while (insn_info)
2949 {
2950 if (insn_info->wild_read)
2951 {
2952 last_wild_read = insn_info->prev_insn;
2953 /* Block starts with wild read. */
2954 if (!last_wild_read)
2955 return NULL;
2956 }
2957
2958 insn_info = insn_info->prev_insn;
2959 }
2960
2961 if (last_wild_read)
2962 return last_wild_read;
2963 else
2964 return bb_info->last_insn;
2965}
2966
2967
2968/* Scan the insns in BB_INFO starting at PTR and going to the top of
2969 the block in order to build the gen and kill sets for the block.
2970 We start at ptr which may be the last insn in the block or may be
2971 the first insn with a wild read. In the latter case we are able to
2972 skip the rest of the block because it just does not matter:
2973 anything that happens is hidden by the wild read. */
2974
2975static void
ac6929b5 2976dse_step3_scan (basic_block bb)
6fb5fa3c
DB
2977{
2978 bb_info_t bb_info = bb_table[bb->index];
2979 insn_info_t insn_info;
2980
ac6929b5 2981 insn_info = find_insn_before_first_wild_read (bb_info);
b8698a0f 2982
6fb5fa3c
DB
2983 /* In the spill case or in the no_spill case if there is no wild
2984 read in the block, we will need a kill set. */
2985 if (insn_info == bb_info->last_insn)
2986 {
2987 if (bb_info->kill)
2988 bitmap_clear (bb_info->kill);
2989 else
3f9b14ff 2990 bb_info->kill = BITMAP_ALLOC (&dse_bitmap_obstack);
6fb5fa3c 2991 }
b8698a0f 2992 else
6fb5fa3c
DB
2993 if (bb_info->kill)
2994 BITMAP_FREE (bb_info->kill);
2995
2996 while (insn_info)
2997 {
2998 /* There may have been code deleted by the dce pass run before
2999 this phase. */
3000 if (insn_info->insn && INSN_P (insn_info->insn))
3001 {
ac6929b5
RB
3002 scan_stores (insn_info->store_rec, bb_info->gen, bb_info->kill);
3003 scan_reads (insn_info, bb_info->gen, bb_info->kill);
b8698a0f 3004 }
6fb5fa3c
DB
3005
3006 insn_info = insn_info->prev_insn;
3007 }
3008}
3009
3010
3011/* Set the gen set of the exit block, and also any block with no
3012 successors that does not have a wild read. */
3013
3014static void
3015dse_step3_exit_block_scan (bb_info_t bb_info)
3016{
3017 /* The gen set is all 0's for the exit block except for the
3018 frame_pointer_group. */
b8698a0f 3019
6fb5fa3c
DB
3020 if (stores_off_frame_dead_at_return)
3021 {
3022 unsigned int i;
fd6daec9 3023 group_info *group;
b8698a0f 3024
9771b263 3025 FOR_EACH_VEC_ELT (rtx_group_vec, i, group)
6fb5fa3c
DB
3026 {
3027 if (group->process_globally && group->frame_related)
3028 bitmap_ior_into (bb_info->gen, group->group_kill);
3029 }
3030 }
3031}
3032
3033
3034/* Find all of the blocks that are not backwards reachable from the
3035 exit block or any block with no successors (BB). These are the
3036 infinite loops or infinite self loops. These blocks will still
3037 have their bits set in UNREACHABLE_BLOCKS. */
3038
3039static void
3040mark_reachable_blocks (sbitmap unreachable_blocks, basic_block bb)
3041{
3042 edge e;
3043 edge_iterator ei;
3044
d7c028c0 3045 if (bitmap_bit_p (unreachable_blocks, bb->index))
6fb5fa3c 3046 {
d7c028c0 3047 bitmap_clear_bit (unreachable_blocks, bb->index);
6fb5fa3c 3048 FOR_EACH_EDGE (e, ei, bb->preds)
b8698a0f 3049 {
6fb5fa3c 3050 mark_reachable_blocks (unreachable_blocks, e->src);
b8698a0f 3051 }
6fb5fa3c
DB
3052 }
3053}
3054
3055/* Build the transfer functions for the function. */
3056
3057static void
ac6929b5 3058dse_step3 ()
6fb5fa3c
DB
3059{
3060 basic_block bb;
6fb5fa3c
DB
3061 sbitmap_iterator sbi;
3062 bitmap all_ones = NULL;
3063 unsigned int i;
b8698a0f 3064
7ba9e72d 3065 auto_sbitmap unreachable_blocks (last_basic_block_for_fn (cfun));
f61e445a 3066 bitmap_ones (unreachable_blocks);
6fb5fa3c 3067
04a90bec 3068 FOR_ALL_BB_FN (bb, cfun)
6fb5fa3c
DB
3069 {
3070 bb_info_t bb_info = bb_table[bb->index];
3071 if (bb_info->gen)
3072 bitmap_clear (bb_info->gen);
3073 else
3f9b14ff 3074 bb_info->gen = BITMAP_ALLOC (&dse_bitmap_obstack);
6fb5fa3c
DB
3075
3076 if (bb->index == ENTRY_BLOCK)
3077 ;
3078 else if (bb->index == EXIT_BLOCK)
3079 dse_step3_exit_block_scan (bb_info);
3080 else
ac6929b5 3081 dse_step3_scan (bb);
6fb5fa3c
DB
3082 if (EDGE_COUNT (bb->succs) == 0)
3083 mark_reachable_blocks (unreachable_blocks, bb);
3084
3085 /* If this is the second time dataflow is run, delete the old
3086 sets. */
3087 if (bb_info->in)
3088 BITMAP_FREE (bb_info->in);
3089 if (bb_info->out)
3090 BITMAP_FREE (bb_info->out);
3091 }
3092
3093 /* For any block in an infinite loop, we must initialize the out set
3094 to all ones. This could be expensive, but almost never occurs in
3095 practice. However, it is common in regression tests. */
d4ac4ce2 3096 EXECUTE_IF_SET_IN_BITMAP (unreachable_blocks, 0, i, sbi)
6fb5fa3c
DB
3097 {
3098 if (bitmap_bit_p (all_blocks, i))
3099 {
3100 bb_info_t bb_info = bb_table[i];
3101 if (!all_ones)
3102 {
3103 unsigned int j;
fd6daec9 3104 group_info *group;
6fb5fa3c 3105
3f9b14ff 3106 all_ones = BITMAP_ALLOC (&dse_bitmap_obstack);
9771b263 3107 FOR_EACH_VEC_ELT (rtx_group_vec, j, group)
6fb5fa3c
DB
3108 bitmap_ior_into (all_ones, group->group_kill);
3109 }
3110 if (!bb_info->out)
3111 {
3f9b14ff 3112 bb_info->out = BITMAP_ALLOC (&dse_bitmap_obstack);
6fb5fa3c
DB
3113 bitmap_copy (bb_info->out, all_ones);
3114 }
3115 }
3116 }
3117
3118 if (all_ones)
3119 BITMAP_FREE (all_ones);
6fb5fa3c
DB
3120}
3121
3122
3123\f
3124/*----------------------------------------------------------------------------
3125 Fourth step.
3126
3127 Solve the bitvector equations.
3128----------------------------------------------------------------------------*/
3129
3130
3131/* Confluence function for blocks with no successors. Create an out
3132 set from the gen set of the exit block. This block logically has
3133 the exit block as a successor. */
3134
3135
3136
3137static void
3138dse_confluence_0 (basic_block bb)
3139{
3140 bb_info_t bb_info = bb_table[bb->index];
3141
3142 if (bb->index == EXIT_BLOCK)
3143 return;
3144
3145 if (!bb_info->out)
3146 {
3f9b14ff 3147 bb_info->out = BITMAP_ALLOC (&dse_bitmap_obstack);
6fb5fa3c
DB
3148 bitmap_copy (bb_info->out, bb_table[EXIT_BLOCK]->gen);
3149 }
3150}
3151
3152/* Propagate the information from the in set of the dest of E to the
3153 out set of the src of E. If the various in or out sets are not
3154 there, that means they are all ones. */
3155
1a0f3fa1 3156static bool
6fb5fa3c
DB
3157dse_confluence_n (edge e)
3158{
3159 bb_info_t src_info = bb_table[e->src->index];
3160 bb_info_t dest_info = bb_table[e->dest->index];
3161
3162 if (dest_info->in)
3163 {
3164 if (src_info->out)
3165 bitmap_and_into (src_info->out, dest_info->in);
3166 else
3167 {
3f9b14ff 3168 src_info->out = BITMAP_ALLOC (&dse_bitmap_obstack);
6fb5fa3c
DB
3169 bitmap_copy (src_info->out, dest_info->in);
3170 }
3171 }
1a0f3fa1 3172 return true;
6fb5fa3c
DB
3173}
3174
3175
3176/* Propagate the info from the out to the in set of BB_INDEX's basic
b8698a0f 3177 block. There are three cases:
6fb5fa3c
DB
3178
3179 1) The block has no kill set. In this case the kill set is all
3180 ones. It does not matter what the out set of the block is, none of
3181 the info can reach the top. The only thing that reaches the top is
3182 the gen set and we just copy the set.
3183
3184 2) There is a kill set but no out set and bb has successors. In
3185 this case we just return. Eventually an out set will be created and
3186 it is better to wait than to create a set of ones.
3187
3188 3) There is both a kill and out set. We apply the obvious transfer
3189 function.
3190*/
3191
3192static bool
3193dse_transfer_function (int bb_index)
3194{
3195 bb_info_t bb_info = bb_table[bb_index];
3196
3197 if (bb_info->kill)
3198 {
3199 if (bb_info->out)
3200 {
3201 /* Case 3 above. */
3202 if (bb_info->in)
b8698a0f 3203 return bitmap_ior_and_compl (bb_info->in, bb_info->gen,
6fb5fa3c
DB
3204 bb_info->out, bb_info->kill);
3205 else
3206 {
3f9b14ff 3207 bb_info->in = BITMAP_ALLOC (&dse_bitmap_obstack);
b8698a0f 3208 bitmap_ior_and_compl (bb_info->in, bb_info->gen,
6fb5fa3c
DB
3209 bb_info->out, bb_info->kill);
3210 return true;
3211 }
3212 }
3213 else
3214 /* Case 2 above. */
3215 return false;
3216 }
3217 else
3218 {
3219 /* Case 1 above. If there is already an in set, nothing
3220 happens. */
3221 if (bb_info->in)
3222 return false;
3223 else
3224 {
3f9b14ff 3225 bb_info->in = BITMAP_ALLOC (&dse_bitmap_obstack);
6fb5fa3c
DB
3226 bitmap_copy (bb_info->in, bb_info->gen);
3227 return true;
3228 }
3229 }
3230}
3231
3232/* Solve the dataflow equations. */
3233
3234static void
3235dse_step4 (void)
3236{
b8698a0f
L
3237 df_simple_dataflow (DF_BACKWARD, NULL, dse_confluence_0,
3238 dse_confluence_n, dse_transfer_function,
3239 all_blocks, df_get_postorder (DF_BACKWARD),
6fb5fa3c 3240 df_get_n_blocks (DF_BACKWARD));
456610d3 3241 if (dump_file && (dump_flags & TDF_DETAILS))
6fb5fa3c
DB
3242 {
3243 basic_block bb;
3244
3245 fprintf (dump_file, "\n\n*** Global dataflow info after analysis.\n");
04a90bec 3246 FOR_ALL_BB_FN (bb, cfun)
6fb5fa3c
DB
3247 {
3248 bb_info_t bb_info = bb_table[bb->index];
3249
3250 df_print_bb_index (bb, dump_file);
3251 if (bb_info->in)
3252 bitmap_print (dump_file, bb_info->in, " in: ", "\n");
3253 else
3254 fprintf (dump_file, " in: *MISSING*\n");
3255 if (bb_info->gen)
3256 bitmap_print (dump_file, bb_info->gen, " gen: ", "\n");
3257 else
3258 fprintf (dump_file, " gen: *MISSING*\n");
3259 if (bb_info->kill)
3260 bitmap_print (dump_file, bb_info->kill, " kill: ", "\n");
3261 else
3262 fprintf (dump_file, " kill: *MISSING*\n");
3263 if (bb_info->out)
3264 bitmap_print (dump_file, bb_info->out, " out: ", "\n");
3265 else
3266 fprintf (dump_file, " out: *MISSING*\n\n");
3267 }
3268 }
3269}
3270
3271
3272\f
3273/*----------------------------------------------------------------------------
3274 Fifth step.
3275
0d52bcc1 3276 Delete the stores that can only be deleted using the global information.
6fb5fa3c
DB
3277----------------------------------------------------------------------------*/
3278
3279
3280static void
ac6929b5 3281dse_step5 (void)
6fb5fa3c
DB
3282{
3283 basic_block bb;
11cd3bed 3284 FOR_EACH_BB_FN (bb, cfun)
6fb5fa3c
DB
3285 {
3286 bb_info_t bb_info = bb_table[bb->index];
3287 insn_info_t insn_info = bb_info->last_insn;
3288 bitmap v = bb_info->out;
3289
3290 while (insn_info)
3291 {
3292 bool deleted = false;
3293 if (dump_file && insn_info->insn)
3294 {
3295 fprintf (dump_file, "starting to process insn %d\n",
3296 INSN_UID (insn_info->insn));
3297 bitmap_print (dump_file, v, " v: ", "\n");
3298 }
3299
3300 /* There may have been code deleted by the dce pass run before
3301 this phase. */
b8698a0f 3302 if (insn_info->insn
6fb5fa3c
DB
3303 && INSN_P (insn_info->insn)
3304 && (!insn_info->cannot_delete)
3305 && (!bitmap_empty_p (v)))
3306 {
fd6daec9 3307 store_info *store_info = insn_info->store_rec;
6fb5fa3c
DB
3308
3309 /* Try to delete the current insn. */
3310 deleted = true;
b8698a0f 3311
6fb5fa3c
DB
3312 /* Skip the clobbers. */
3313 while (!store_info->is_set)
3314 store_info = store_info->next;
3315
ac6929b5
RB
3316 HOST_WIDE_INT i;
3317 group_info *group_info = rtx_group_vec[store_info->group_id];
3318
71d6a386
RS
3319 HOST_WIDE_INT end = store_info->offset + store_info->width;
3320 for (i = store_info->offset; i < end; i++)
6fb5fa3c 3321 {
ac6929b5 3322 int index = get_bitmap_index (group_info, i);
b8698a0f 3323
ac6929b5
RB
3324 if (dump_file && (dump_flags & TDF_DETAILS))
3325 fprintf (dump_file, "i = %d, index = %d\n", (int)i, index);
3326 if (index == 0 || !bitmap_bit_p (v, index))
6fb5fa3c 3327 {
456610d3 3328 if (dump_file && (dump_flags & TDF_DETAILS))
ac6929b5
RB
3329 fprintf (dump_file, "failing at i = %d\n", (int)i);
3330 deleted = false;
3331 break;
6fb5fa3c
DB
3332 }
3333 }
3334 if (deleted)
3335 {
9e582b1d
JR
3336 if (dbg_cnt (dse)
3337 && check_for_inc_dec_1 (insn_info))
6fb5fa3c 3338 {
6fb5fa3c
DB
3339 delete_insn (insn_info->insn);
3340 insn_info->insn = NULL;
3341 globally_deleted++;
3342 }
3343 }
3344 }
3345 /* We do want to process the local info if the insn was
6ed3da00 3346 deleted. For instance, if the insn did a wild read, we
6fb5fa3c 3347 no longer need to trash the info. */
b8698a0f 3348 if (insn_info->insn
6fb5fa3c
DB
3349 && INSN_P (insn_info->insn)
3350 && (!deleted))
3351 {
ac6929b5 3352 scan_stores (insn_info->store_rec, v, NULL);
6fb5fa3c
DB
3353 if (insn_info->wild_read)
3354 {
456610d3 3355 if (dump_file && (dump_flags & TDF_DETAILS))
6fb5fa3c
DB
3356 fprintf (dump_file, "wild read\n");
3357 bitmap_clear (v);
3358 }
d26c7090 3359 else if (insn_info->read_rec
a8404b9a
JJ
3360 || insn_info->non_frame_wild_read
3361 || insn_info->frame_read)
6fb5fa3c 3362 {
a8404b9a
JJ
3363 if (dump_file && (dump_flags & TDF_DETAILS))
3364 {
3365 if (!insn_info->non_frame_wild_read
3366 && !insn_info->frame_read)
3367 fprintf (dump_file, "regular read\n");
3368 if (insn_info->non_frame_wild_read)
3369 fprintf (dump_file, "non-frame wild read\n");
3370 if (insn_info->frame_read)
3371 fprintf (dump_file, "frame read\n");
3372 }
ac6929b5 3373 scan_reads (insn_info, v, NULL);
6fb5fa3c
DB
3374 }
3375 }
b8698a0f 3376
6fb5fa3c
DB
3377 insn_info = insn_info->prev_insn;
3378 }
3379 }
3380}
3381
3382
6fb5fa3c
DB
3383\f
3384/*----------------------------------------------------------------------------
3385 Sixth step.
3386
8dd5516b
JJ
3387 Delete stores made redundant by earlier stores (which store the same
3388 value) that couldn't be eliminated.
3389----------------------------------------------------------------------------*/
3390
3391static void
3392dse_step6 (void)
3393{
3394 basic_block bb;
3395
04a90bec 3396 FOR_ALL_BB_FN (bb, cfun)
8dd5516b
JJ
3397 {
3398 bb_info_t bb_info = bb_table[bb->index];
3399 insn_info_t insn_info = bb_info->last_insn;
3400
3401 while (insn_info)
3402 {
3403 /* There may have been code deleted by the dce pass run before
3404 this phase. */
3405 if (insn_info->insn
3406 && INSN_P (insn_info->insn)
3407 && !insn_info->cannot_delete)
3408 {
fd6daec9 3409 store_info *s_info = insn_info->store_rec;
8dd5516b
JJ
3410
3411 while (s_info && !s_info->is_set)
3412 s_info = s_info->next;
3413 if (s_info
3414 && s_info->redundant_reason
3415 && s_info->redundant_reason->insn
3416 && INSN_P (s_info->redundant_reason->insn))
3417 {
eb92d49a 3418 rtx_insn *rinsn = s_info->redundant_reason->insn;
456610d3 3419 if (dump_file && (dump_flags & TDF_DETAILS))
8dd5516b
JJ
3420 fprintf (dump_file, "Locally deleting insn %d "
3421 "because insn %d stores the "
3422 "same value and couldn't be "
3423 "eliminated\n",
3424 INSN_UID (insn_info->insn),
3425 INSN_UID (rinsn));
3426 delete_dead_store_insn (insn_info);
3427 }
3428 }
3429 insn_info = insn_info->prev_insn;
3430 }
3431 }
3432}
3433\f
3434/*----------------------------------------------------------------------------
3435 Seventh step.
3436
b8698a0f 3437 Destroy everything left standing.
6fb5fa3c
DB
3438----------------------------------------------------------------------------*/
3439
b8698a0f 3440static void
3f9b14ff 3441dse_step7 (void)
6fb5fa3c 3442{
3f9b14ff
SB
3443 bitmap_obstack_release (&dse_bitmap_obstack);
3444 obstack_free (&dse_obstack, NULL);
370f38e8 3445
6fb5fa3c
DB
3446 end_alias_analysis ();
3447 free (bb_table);
c203e8a7
TS
3448 delete rtx_group_table;
3449 rtx_group_table = NULL;
9771b263 3450 rtx_group_vec.release ();
6fb5fa3c
DB
3451 BITMAP_FREE (all_blocks);
3452 BITMAP_FREE (scratch);
3453
da6603c6 3454 rtx_store_info_pool.release ();
fb0b2914
ML
3455 read_info_type_pool.release ();
3456 insn_info_type_pool.release ();
3457 dse_bb_info_type_pool.release ();
3458 group_info_pool.release ();
3459 deferred_change_pool.release ();
6fb5fa3c
DB
3460}
3461
3462
6fb5fa3c
DB
3463/* -------------------------------------------------------------------------
3464 DSE
3465 ------------------------------------------------------------------------- */
3466
3467/* Callback for running pass_rtl_dse. */
3468
3469static unsigned int
3470rest_of_handle_dse (void)
3471{
6fb5fa3c
DB
3472 df_set_flags (DF_DEFER_INSN_RESCAN);
3473
02b47899
KZ
3474 /* Need the notes since we must track live hardregs in the forwards
3475 direction. */
3476 df_note_add_problem ();
3477 df_analyze ();
3478
6fb5fa3c
DB
3479 dse_step0 ();
3480 dse_step1 ();
3481 dse_step2_init ();
ac6929b5 3482 if (dse_step2 ())
6fb5fa3c
DB
3483 {
3484 df_set_flags (DF_LR_RUN_DCE);
3485 df_analyze ();
456610d3 3486 if (dump_file && (dump_flags & TDF_DETAILS))
6fb5fa3c 3487 fprintf (dump_file, "doing global processing\n");
ac6929b5 3488 dse_step3 ();
6fb5fa3c 3489 dse_step4 ();
ac6929b5 3490 dse_step5 ();
6fb5fa3c
DB
3491 }
3492
8dd5516b 3493 dse_step6 ();
3f9b14ff 3494 dse_step7 ();
6fb5fa3c
DB
3495
3496 if (dump_file)
ac6929b5
RB
3497 fprintf (dump_file, "dse: local deletions = %d, global deletions = %d\n",
3498 locally_deleted, globally_deleted);
6c5ed3f1
JJ
3499
3500 /* DSE can eliminate potentially-trapping MEMs.
3501 Remove any EH edges associated with them. */
3502 if ((locally_deleted || globally_deleted)
3503 && cfun->can_throw_non_call_exceptions
3504 && purge_all_dead_edges ())
3505 cleanup_cfg (0);
3506
6fb5fa3c
DB
3507 return 0;
3508}
3509
17795822
TS
3510namespace {
3511
3512const pass_data pass_data_rtl_dse1 =
27a4cd48
DM
3513{
3514 RTL_PASS, /* type */
3515 "dse1", /* name */
3516 OPTGROUP_NONE, /* optinfo_flags */
27a4cd48
DM
3517 TV_DSE1, /* tv_id */
3518 0, /* properties_required */
3519 0, /* properties_provided */
3520 0, /* properties_destroyed */
3521 0, /* todo_flags_start */
3bea341f 3522 TODO_df_finish, /* todo_flags_finish */
6fb5fa3c
DB
3523};
3524
17795822 3525class pass_rtl_dse1 : public rtl_opt_pass
27a4cd48
DM
3526{
3527public:
c3284718
RS
3528 pass_rtl_dse1 (gcc::context *ctxt)
3529 : rtl_opt_pass (pass_data_rtl_dse1, ctxt)
27a4cd48
DM
3530 {}
3531
3532 /* opt_pass methods: */
1a3d085c
TS
3533 virtual bool gate (function *)
3534 {
3535 return optimize > 0 && flag_dse && dbg_cnt (dse1);
3536 }
3537
be55bfe6 3538 virtual unsigned int execute (function *) { return rest_of_handle_dse (); }
27a4cd48
DM
3539
3540}; // class pass_rtl_dse1
3541
17795822
TS
3542} // anon namespace
3543
27a4cd48
DM
3544rtl_opt_pass *
3545make_pass_rtl_dse1 (gcc::context *ctxt)
3546{
3547 return new pass_rtl_dse1 (ctxt);
3548}
3549
17795822
TS
3550namespace {
3551
3552const pass_data pass_data_rtl_dse2 =
27a4cd48
DM
3553{
3554 RTL_PASS, /* type */
3555 "dse2", /* name */
3556 OPTGROUP_NONE, /* optinfo_flags */
27a4cd48
DM
3557 TV_DSE2, /* tv_id */
3558 0, /* properties_required */
3559 0, /* properties_provided */
3560 0, /* properties_destroyed */
3561 0, /* todo_flags_start */
3bea341f 3562 TODO_df_finish, /* todo_flags_finish */
6fb5fa3c 3563};
27a4cd48 3564
17795822 3565class pass_rtl_dse2 : public rtl_opt_pass
27a4cd48
DM
3566{
3567public:
c3284718
RS
3568 pass_rtl_dse2 (gcc::context *ctxt)
3569 : rtl_opt_pass (pass_data_rtl_dse2, ctxt)
27a4cd48
DM
3570 {}
3571
3572 /* opt_pass methods: */
1a3d085c
TS
3573 virtual bool gate (function *)
3574 {
3575 return optimize > 0 && flag_dse && dbg_cnt (dse2);
3576 }
3577
be55bfe6 3578 virtual unsigned int execute (function *) { return rest_of_handle_dse (); }
27a4cd48
DM
3579
3580}; // class pass_rtl_dse2
3581
17795822
TS
3582} // anon namespace
3583
27a4cd48
DM
3584rtl_opt_pass *
3585make_pass_rtl_dse2 (gcc::context *ctxt)
3586{
3587 return new pass_rtl_dse2 (ctxt);
3588}